diff --git a/.gitignore b/.gitignore index f4b66da..e62030a 100644 --- a/.gitignore +++ b/.gitignore @@ -31,4 +31,7 @@ share/python-wheels/ *.egg-info/ .installed.cfg *.egg -MANIFEST \ No newline at end of file +MANIFEST + +# macOS +**/.DS_Store diff --git a/README.md b/README.md index df92a68..f0341c3 100644 --- a/README.md +++ b/README.md @@ -1,26 +1,53 @@ -# Purpose -The purpose of RoverGUI is to provide a live camera view to anyone operating the rover via a web browser! While RoverGUI is meant to be used by anyone, it is important to keep in mind that non-autonomous members will likely -be using the GUI as a live camera view like this would not be allowed for Autonomous operation during URC. +# RoverGUI -# Dependencies -The following dependencies are under the assertion that both the frontend and backend are running on an **x86-64 linux operation system**. Each bullet is a link to the corresponding dependency's download/install page. +RoverGUI provides a live camera view to anyone operating the rover via a web browser! Non-Autonomous members will likely be using the GUI, so it's easy to use. -## Frontend Dependencies -* [NodeJS](https://nodejs.org/en/download) -* [NPM](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) +## Usage + +First, open two `ssh` windows to the Rover. Run both the backend and frontend, starting **with the backend first**: + +In window 1: + +```console +~ $ cd RoverGUI/backend +~/RoverGUI/backend $ cargo run +``` + +In window 2: + +```console +~ $ cd RoverGUI/react-app +~/RoverGUI/backend $ npm run start --host +``` + +From your laptop, **open the GUI in a web browser with its URL**: (i.e., open this link in Firefox/Chrome: [http://192.168.1.68:3000](http://192.168.1.68:3000), where `192.168.1.68` is the Rover's IP address, and ). You should then be presented with a page with a dropdown list of available cameras. Select a camera -- then, a live stream should be visible! + +## Dependencies + +The following dependencies are under the assertion that both the frontend and backend are running on an **x86-64 Linux operating system**. Each bullet is a link to the corresponding dependency's download/install page. + +### Frontend + +- [NodeJS](https://nodejs.org/en/download) +- [NPM](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) > [!TIP] > It is recommended to install NodeJS using the **nvm** package manager on linux. This simplifies the process of both installing and upgrading NodeJS versions. This also automatically installs NPM for you! The NodeJS download page should provide instructions given that **Linux**, **nvm**, and **npm** are selected. -## Backend Dependencies -* [Rust](https://rust-lang.org/tools/install/) -* Video4Linux (v4l) +### Backend + +- [Rust](https://rust-lang.org/tools/install/) +- Video4Linux (v4l) > [!IMPORTANT] -> Video4Linux is a kernel-level driver dependency. Usually it is included with almost all linux distros. If it isn't, please consult your distro's documentation! +> Video4Linux is a kernel-level driver dependency. It is usually included with most Linux distros. If it isn't, please consult your distro's documentation for installation instructions! + +## Instructions + +### Frontend + +#### Installing Frontend's Package Dependencies -# Frontend (`/react-app`) -## Installing Frontend's Package Dependencies Before running the frontend, you will need to install the package's dependencies using your favorite terminal! ```bash @@ -54,25 +81,18 @@ npm notice # Done! ``` -## Running the frontend +#### Running the frontend + ```bash # To start the frontend, simply run the following command in the frontend's folder. -npm run start +npm run start --host # Your output should look like this: -Compiled successfully! - -You can now view react-app in the browser. - - Local: http://localhost:3000 - On Your Network: http://192.168.2.62:3000 +VITE v7.3.1 ready in 96 ms -Note that the development build is not optimized. -To create a production build, use npm run build. - -webpack compiled successfully - -# Done! +➜ Local: http://localhost:5173/ +➜ Network: http://192.168.1.68:5173/ +➜ press h + enter to show help ``` The URLs that are outputted by this command (i.e. http://localhost:3000) are the same URLs that you use to see the GUI from your web browser. You may hold ctrl and then click on the links to view them or simply copy & paste them into your browser. @@ -88,11 +108,14 @@ The URLs that are outputted by this command (i.e. http://localhost:3000) are the Would you like to run the app on another port instead? › (Y/n) ``` -## Stopping the frontend -In the same terminal in which you are running the frontend, simply do `Ctrl+C` to send a SIGINT signal to interrupt the frontend (this stops it)! +#### Stopping the frontend + +In the same terminal in which you are running the frontend, press `Ctrl+C` to send a SIGINT signal to interrupt the frontend (this stops it)! + +### Backend (`/backend`) + +#### Building the backend crate and its Cargo dependencies -# Backend (`/backend`) -## Building the backend crate and its Cargo dependencies Before running the backend, you will need to build the frontend along with all of its dependencies. ```bash @@ -135,6 +158,7 @@ cargo build ``` ## Running the backend + ```bash # Simply run: cargo run @@ -176,8 +200,14 @@ arget(s) in 0.24s The output of this command tells important information about the backend like its address, port, and its available routes. In this case, the frontend would need to access the backend from the URL `http://127.0.0.1:3600`. -## Stopping the backend +### Stopping the backend + In the same terminal in which you are running the backend, simply do `Ctrl+C` to send a SIGINT signal to interrupt the backend (this stops it)! -# Using RoverGUI -Simply open the GUI in a web browser with its URL (i.e. http://localhost:3000). You should then be presented with a page that has a dropdown for the different camera paths that are available on the system. Simply select a path and then the corresponding live camera stream should be visible! +### The "fake" backend + +There's a `fake_backend` binary you can use to test the frontend on a non-Linux computer (i.e., macOS or Windows). To use it, just type: `NUM_CAMERAS=1 cargo run --bin fake_backend $NUM_CAMERAS`. You can set `NUM_CAMERAS` to any value you'd like. + +Then, open up the frontend as described above. It'll connect successfully! + +You can press `Ctrl+C` to stop the fake backend from running. diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 3136bb9..3e0e2ff 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -3,6 +3,10 @@ name = "backend-rs" version = "0.1.0" edition = "2021" +[[bin]] +name = "fake_backend" +path = "src/fake_backend.rs" + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -12,6 +16,9 @@ serde = { version = "1.0.210", features = ["derive"] } serde_json = "1.0.128" tokio-util = { version = "0.7.12", features = ["io"] } webrtc = "0.11.0" -v4l = "0.14.0" openh264 = "0.6.2" jpeg-decoder = "0.3.1" +mp4 = "0.14.0" + +[target.'cfg(target_os = "linux")'.dependencies] +v4l = "0.14.0" diff --git a/backend/fake_stream.mp4 b/backend/fake_stream.mp4 new file mode 100644 index 0000000..b11552f Binary files /dev/null and b/backend/fake_stream.mp4 differ diff --git a/backend/src/fake_backend.rs b/backend/src/fake_backend.rs new file mode 100644 index 0000000..f8dc297 --- /dev/null +++ b/backend/src/fake_backend.rs @@ -0,0 +1,487 @@ +use std::{ + env::Args, + error::Error, + path::PathBuf, + sync::{Arc, Mutex}, +}; + +use rocket::{routes, Config}; + +use crate::utils::{CameraMode, FakeCamera, Fraction}; + +struct AppState { + cameras: Arc>>, +} + +mod utils { + use std::path::PathBuf; + + #[derive(Debug, Clone, Copy)] + pub struct CameraMode { + pub width: u32, + pub height: u32, + pub frame_interval: Fraction, + } + + impl core::fmt::Display for CameraMode { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + write!( + f, + "{}x{} @{}fps", + self.width, self.height, self.frame_interval.denominator + ) + } + } + + #[derive(Debug, Clone, Copy)] + pub struct Fraction { + #[expect(unused, reason = "buggy FPS impl in `Display`")] + pub numerator: u32, + pub denominator: u32, + } + + /// A fake camera. Imitates a V4L2 camera. + pub struct FakeCamera { + /// The camera's capture settings. + pub camera_mode: CameraMode, + + /// A list of all supported camera modes for this device. + pub supported_camera_modes: Vec, + + /// File representation on disk. + /// + /// Could become stale if unplugged. + pub path: PathBuf, + } +} + +#[rocket::main] +async fn main() -> Result<(), Box> { + let num_cameras: u8 = { + // ensure args given correctly + let mut args: Args = std::env::args(); + if args.len() != 2 { + panic!( + "You ran this script with {} argument(s)! \ + Please run like so: \ + `cargo run --bin fake_backend -- {{NUM_CAMERAS}}`", + args.len() + ); + } + + // skip binary name + _ = args.next(); + + // grab number of cameras + str::parse( + args.next() + .expect("one argument found but couldn't access it") + .as_str(), + ) + .expect("number of cameras should be a value in [0, 255].") + }; + + // initialize the fake cameras + let fake_cameras: Vec = Vec::from_iter((0..num_cameras).map(|n| { + let camera_mode: CameraMode = CameraMode { + width: 1_000 * (n + 1) as u32, + height: 2_000 * (n + 1) as u32, + frame_interval: Fraction { + numerator: 1, + denominator: (n + 1) as u32, + }, + }; + + FakeCamera { + camera_mode, + supported_camera_modes: vec![camera_mode], + path: PathBuf::from(format!("/fake/camera{n}")), + } + })); + + // Create a Rocket instance with the default configuration. + rocket::build() + // This is arbitrary and can be changed at any time through a config file or it can be left hardcoded. + .configure(Config::figment().merge(("port", 3600))) + .mount( + "/stream", + routes![ + api::get_available_cameras, + api::get_camera_feed, + api::get_camera_mode, + api::get_camera_modes, + api::put_camera_mode_set + ], + ) + .manage(AppState { + cameras: Arc::new(Mutex::new(fake_cameras)), + }) + .launch() + .await?; + + Ok(()) +} + +mod api { + use std::{collections::HashMap, io::Cursor, path::PathBuf, sync::Arc, time::Duration}; + + use rocket::{get, http::Status, post, put, serde::json::Json, tokio::time, State}; + use webrtc::peer_connection::sdp::session_description::RTCSessionDescription; + use webrtc::{ + api::{ + interceptor_registry, + media_engine::{MediaEngine, MIME_TYPE_H264}, + APIBuilder, + }, + interceptor::registry::Registry, + media::Sample, + peer_connection::configuration::RTCConfiguration, + rtp_transceiver::rtp_codec::RTCRtpCodecCapability, + track::track_local::{track_local_static_sample::TrackLocalStaticSample, TrackLocal}, + }; + + use crate::AppState; + + struct FakeVideoSample { + bytes: Vec, + duration: Duration, + } + + struct Mp4BitstreamConverter { + length_size: u8, + sps: Vec>, + pps: Vec>, + } + + impl Mp4BitstreamConverter { + fn for_mp4_track(track: &mp4::Mp4Track) -> Result { + let avcc = &track + .trak + .mdia + .minf + .stbl + .stsd + .avc1 + .as_ref() + .ok_or(Status::InternalServerError)? + .avcc; + + Ok(Self { + length_size: avcc.length_size_minus_one + 1, + sps: avcc + .sequence_parameter_sets + .iter() + .map(|v| v.bytes.clone()) + .collect(), + pps: avcc + .picture_parameter_sets + .iter() + .map(|v| v.bytes.clone()) + .collect(), + }) + } + + fn convert_packet(&self, packet: &[u8], out: &mut Vec) { + out.clear(); + let mut stream = packet; + let mut should_prefix_sps_pps = false; + + while !stream.is_empty() { + let mut nal_size: usize = 0; + for _ in 0..self.length_size { + if stream.is_empty() { + return; + } + nal_size = (nal_size << 8) | usize::from(stream[0]); + stream = &stream[1..]; + } + + if nal_size == 0 || stream.len() < nal_size { + return; + } + + let nal = &stream[..nal_size]; + stream = &stream[nal_size..]; + + if !nal.is_empty() && (nal[0] & 0x1F) == 5 { + should_prefix_sps_pps = true; + } + + out.extend([0, 0, 0, 1]); + out.extend(nal); + } + + if should_prefix_sps_pps { + let mut with_params = Vec::with_capacity(out.len() + 128); + for sps in &self.sps { + with_params.extend([0, 0, 0, 1]); + with_params.extend(sps); + } + for pps in &self.pps { + with_params.extend([0, 0, 0, 1]); + with_params.extend(pps); + } + with_params.extend(out.iter().copied()); + *out = with_params; + } + } + } + + fn load_fake_video_samples(path: PathBuf) -> Result, Status> { + let mp4_bytes = std::fs::read(path).map_err(|_| Status::InternalServerError)?; + let mut mp4_reader = + mp4::Mp4Reader::read_header(Cursor::new(&mp4_bytes), mp4_bytes.len() as u64) + .map_err(|_| Status::InternalServerError)?; + + let (_, track) = mp4_reader + .tracks() + .iter() + .find(|(_, t)| t.media_type().ok() == Some(mp4::MediaType::H264)) + .ok_or(Status::InternalServerError)?; + + let track_id = track.track_id(); + let timescale = track.timescale(); + let sample_count = track.sample_count(); + let converter = Mp4BitstreamConverter::for_mp4_track(track)?; + let mut converted = Vec::new(); + let mut out = Vec::new(); + + for i in 1..=sample_count { + let Some(sample) = mp4_reader + .read_sample(track_id, i) + .map_err(|_| Status::InternalServerError)? + else { + continue; + }; + + converter.convert_packet(&sample.bytes, &mut out); + if out.is_empty() { + continue; + } + + let duration_ms = ((u64::from(sample.duration) * 1_000) / u64::from(timescale)).max(1); + converted.push(FakeVideoSample { + bytes: out.clone(), + duration: Duration::from_millis(duration_ms), + }); + } + + if converted.is_empty() { + return Err(Status::InternalServerError); + } + + Ok(converted) + } + + // Fetch all the available v4l cameras in the system + #[get("/cameras")] + pub async fn get_available_cameras( + state: &State, + ) -> Result>, (Status, &'static str)> { + Ok(Json( + state + .cameras + .lock() + .expect("not poisoned") + .iter() + .map(|c| c.path.clone()) + .collect(), + )) + } + + // Start a WebRTC stream by creating and returning an offer + #[post("/cameras//start", data = "")] + pub async fn get_camera_feed( + camera_path: &str, + offer: Json, + state: &State, + ) -> Result, Status> { + // warning: slop code below + + if state + .cameras + .lock() + .expect("not poisoned") + .iter() + .all(|c| c.path.to_string_lossy() != camera_path) + { + return Err(Status::NotFound); + } + + let mut media_engine = MediaEngine::default(); + media_engine + .register_default_codecs() + .map_err(|_| Status::InternalServerError)?; + let mut registry = Registry::new(); + registry = interceptor_registry::register_default_interceptors(registry, &mut media_engine) + .map_err(|_| Status::InternalServerError)?; + let rtc_api = APIBuilder::new() + .with_media_engine(media_engine) + .with_interceptor_registry(registry) + .build(); + + let peer_connection = Arc::new( + rtc_api + .new_peer_connection(RTCConfiguration::default()) + .await + .map_err(|_| Status::InternalServerError)?, + ); + let video_track = Arc::new(TrackLocalStaticSample::new( + RTCRtpCodecCapability { + mime_type: MIME_TYPE_H264.to_owned(), + ..Default::default() + }, + "video".to_owned(), + "webrtc".to_owned(), + )); + let rtp_sender = peer_connection + .add_track(Arc::clone(&video_track) as Arc) + .await + .map_err(|_| Status::InternalServerError)?; + + rocket::tokio::spawn(async move { + let mut rtcp_buf = vec![0u8; 1500]; + while rtp_sender.read(&mut rtcp_buf).await.is_ok() {} + }); + + let samples = load_fake_video_samples( + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("fake_stream.mp4"), + )?; + let notify_tx = Arc::new(rocket::tokio::sync::Notify::new()); + let notify_rx = notify_tx.clone(); + + rocket::tokio::spawn(async move { + notify_rx.notified().await; + loop { + for sample in &samples { + if video_track + .write_sample(&Sample { + data: sample.bytes.clone().into(), + duration: sample.duration, + ..Default::default() + }) + .await + .is_err() + { + return; + } + time::sleep(sample.duration).await; + } + } + }); + + peer_connection.on_ice_connection_state_change(Box::new( + move |connection_state: webrtc::ice_transport::ice_connection_state::RTCIceConnectionState| { + if connection_state + == webrtc::ice_transport::ice_connection_state::RTCIceConnectionState::Connected + { + notify_tx.notify_waiters(); + } + + Box::pin(async {}) + }, + )); + + peer_connection + .set_remote_description(offer.into_inner()) + .await + .map_err(|_| Status::InternalServerError)?; + let answer = peer_connection + .create_answer(None) + .await + .map_err(|_| Status::InternalServerError)?; + let mut ice_gather_rx = peer_connection.gathering_complete_promise().await; + peer_connection + .set_local_description(answer) + .await + .map_err(|_| Status::InternalServerError)?; + ice_gather_rx.recv().await; + + Ok(Json( + peer_connection + .local_description() + .await + .ok_or(Status::InternalServerError)?, + )) + } + + // Get the current camera mode + #[get("/cameras//modes/current")] + pub async fn get_camera_mode( + camera_path: &str, + state: &State, + ) -> Result { + // grab the mutex + let locked_cameras = state.cameras.lock().expect("not poisoned"); + + // find the camera; error if we don't find it + let Some(camera) = locked_cameras + .iter() + .find(|c| c.path.to_string_lossy() == camera_path) + else { + return Err((Status::NotFound, "Camera with given value not found")); + }; + + Ok(camera.camera_mode.to_string()) + } + + // Get all the available camera modes + #[get("/cameras//modes")] + pub async fn get_camera_modes( + camera_path: &str, + state: &State, + ) -> Result>, (Status, &'static str)> { + // grab the mutex + let locked_cameras = state.cameras.lock().expect("not poisoned"); + + // find the camera; error if we don't find it + let Some(camera) = locked_cameras + .iter() + .find(|c| c.path.to_string_lossy() == camera_path) + else { + return Err((Status::NotFound, "Camera with given value not found")); + }; + + // list all the supported camera modes in a hashmap + Ok(Json( + camera + .supported_camera_modes + .iter() + .enumerate() + .map(|(i, camera_mode)| (i, camera_mode.to_string())) + .collect(), + )) + } + + // Set the current camera mode for the camera path + #[put("/cameras//modes/set/")] + pub async fn put_camera_mode_set( + camera_path: &str, + mode_id: usize, + state: &State, + ) -> Result<(), (Status, &'static str)> { + // grab the mutex + let mut locked_cameras = state.cameras.lock().expect("not poisoned"); + + // find the requested camera + let Some(camera) = locked_cameras + .iter_mut() + .find(|c| c.path.to_string_lossy() == camera_path) + else { + return Err((Status::NotFound, "Camera with given value not found")); + }; + + // find the requested camera mode + let Some(new_camera_mode) = camera.supported_camera_modes.get(mode_id) else { + return Err(( + Status::NotFound, + "Camera mode at given index was not found.", + )); + }; + + // swap to the new mode + camera.camera_mode = *new_camera_mode; + + Ok(()) + } +} diff --git a/backend/src/main.rs b/backend/src/main.rs index 5d1c702..06f827e 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -1,105 +1,17 @@ -use std::{collections::HashMap, error::Error, path::PathBuf}; - -use rocket::{get, http::Status, post, put, routes, serde::json::Json, Config, State}; -use utils::{CameraMode, H264CameraReader, WebcamManager}; -use v4l::Device; -use webrtc::peer_connection::sdp::session_description::RTCSessionDescription; - +#[cfg(target_os = "linux")] mod utils; -// Fetch all the available v4l cameras in the system -#[get("/cameras")] -async fn get_available_cameras( - state: &State, -) -> Result>, (Status, &'static str)> { - Ok(Json(state.available_camera_paths.clone())) -} - -// Start a WebRTC stream by creating and returning an offer -#[post("/cameras//start", data = "")] -async fn get_camera_feed( - camera_path: &str, - offer: Json, - state: &State, -) -> Result, Status> { - let webcam_manager = &state.webcam_manager; - let local_description = webcam_manager - .add_client(camera_path.to_owned(), offer.into_inner()) - .await - .map_err(|_| Status::InternalServerError)?; - - Ok(Json(local_description)) -} - -// Get the current camera mode -#[get("/cameras//modes/current")] -async fn get_camera_mode( - camera_path: &str, - state: &State, -) -> Result { - let camera_handles_mutex = state.webcam_manager.camera_handles(); - let camera_handles = &mut *camera_handles_mutex.lock().await; - let handle = camera_handles - .iter() - .find(|handle| handle.camera_path() == camera_path) - .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; - - Ok(handle.current_mode().to_string()) -} - -// Get all the available camera modes -#[get("/cameras//modes")] -async fn get_camera_modes( - camera_path: &str, - state: &State, -) -> Result>, (Status, &'static str)> { - let camera_handles_mutex = state.webcam_manager.camera_handles(); - let camera_handles = &mut *camera_handles_mutex.lock().await; - let handle = camera_handles - .iter() - .find(|handle| handle.camera_path() == camera_path) - .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; - - // Using a HashMap to make sure that all modes stay in their exact order represented in the Vec of the handle. - let mut mapped_modes: HashMap = HashMap::new(); - for i in 0..handle.camera_modes().len() { - let mode = handle.camera_modes()[i]; - mapped_modes.insert(i, mode.to_string()); - } - - Ok(Json(mapped_modes)) -} +#[cfg(target_os = "linux")] +#[rocket::main] +async fn main() -> Result<(), Box> { + use std::path::PathBuf; -// Set the current camera mode for the camera path -#[put("/cameras//modes/set/")] -async fn put_camera_mode_set( - camera_path: &str, - mode_id: usize, - state: &State, -) -> Result<(), (Status, &'static str)> { - let camera_handles_mutex = state.webcam_manager.camera_handles(); - let camera_handles = &mut *camera_handles_mutex.lock().await; - let handle = camera_handles - .iter_mut() - .find(|handle| handle.camera_path() == camera_path) - .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; - handle.update_camera_mode(mode_id).await.map_err(|_| { - ( - Status::BadRequest, - "Failed to Update Camera Mode; index may be invalid", - ) - })?; + use rocket::{routes, Config}; + use utils::{CameraMode, H264CameraReader, WebcamManager}; + use v4l::Device; - Ok(()) -} + use crate::api::AppState; -struct AppState { - webcam_manager: WebcamManager, - available_camera_paths: Vec, -} - -#[rocket::main] -async fn main() -> Result<(), Box> { let mut available_camera_paths: Vec = Vec::new(); // Only add cameras that are streamable with h264. @@ -109,7 +21,12 @@ async fn main() -> Result<(), Box> { let Ok(modes) = CameraMode::fetch_all(&device) else { continue; }; - let initial_mode = *modes.first().ok_or("Error Creating a CameraThreadHandle: Failed to initialize camera as no valid Camera operating modes were provided by video4linux. (Check the camera as this was an OS-level issue!)")?; + let initial_mode: CameraMode = *modes.first().ok_or( + "Error creating a `CameraThreadHandle`: \ + Failed to initialize camera, as no valid camera operating modes \ + were provided by Video4Linux. \ + (Check the camera, as this was an OS-level issue!)", + )?; // If can't query or create a stream, then it can't be displayed. if H264CameraReader::new(&mut device, initial_mode).is_ok() { @@ -124,11 +41,11 @@ async fn main() -> Result<(), Box> { .mount( "/stream", routes![ - get_available_cameras, - get_camera_feed, - get_camera_mode, - get_camera_modes, - put_camera_mode_set + api::get_available_cameras, + api::get_camera_feed, + api::get_camera_mode, + api::get_camera_modes, + api::put_camera_mode_set ], ) .manage(AppState { @@ -140,3 +57,114 @@ async fn main() -> Result<(), Box> { Ok(()) } + +#[cfg(not(target_os = "linux"))] +fn main() -> Result<(), u8> { + eprintln!( + "The camera backend is unsupported on non-Linux machines. \ + For testing, please use the `fake_backend` instead. \ + Otherwise, please run the real backend on the Rover -- it isn't \ + supposed to run on your personal device!" + ); + Err(1) +} + +#[cfg(target_os = "linux")] +mod api { + use std::{collections::HashMap, path::PathBuf}; + + use super::utils::WebcamManager; + use rocket::{get, http::Status, post, put, serde::json::Json, State}; + use webrtc::peer_connection::sdp::session_description::RTCSessionDescription; + + pub struct AppState { + pub webcam_manager: WebcamManager, + pub available_camera_paths: Vec, + } + + // Fetch all the available v4l cameras in the system + #[get("/cameras")] + pub async fn get_available_cameras( + state: &State, + ) -> Result>, (Status, &'static str)> { + Ok(Json(state.available_camera_paths.clone())) + } + + // Start a WebRTC stream by creating and returning an offer + #[post("/cameras//start", data = "")] + pub async fn get_camera_feed( + camera_path: &str, + offer: Json, + state: &State, + ) -> Result, Status> { + let webcam_manager = &state.webcam_manager; + let local_description = webcam_manager + .add_client(camera_path.to_owned(), offer.into_inner()) + .await + .map_err(|_| Status::InternalServerError)?; + + Ok(Json(local_description)) + } + + // Get the current camera mode + #[get("/cameras//modes/current")] + pub async fn get_camera_mode( + camera_path: &str, + state: &State, + ) -> Result { + let camera_handles_mutex = state.webcam_manager.camera_handles(); + let camera_handles = &mut *camera_handles_mutex.lock().await; + let handle = camera_handles + .iter() + .find(|handle| handle.camera_path() == camera_path) + .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; + + Ok(handle.current_mode().to_string()) + } + + // Get all the available camera modes + #[get("/cameras//modes")] + pub async fn get_camera_modes( + camera_path: &str, + state: &State, + ) -> Result>, (Status, &'static str)> { + let camera_handles_mutex = state.webcam_manager.camera_handles(); + let camera_handles = &mut *camera_handles_mutex.lock().await; + let handle = camera_handles + .iter() + .find(|handle| handle.camera_path() == camera_path) + .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; + + // Using a HashMap to make sure that all modes stay in their exact order represented in the Vec of the handle. + let mut mapped_modes: HashMap = HashMap::new(); + for i in 0..handle.camera_modes().len() { + let mode = handle.camera_modes()[i]; + mapped_modes.insert(i, mode.to_string()); + } + + Ok(Json(mapped_modes)) + } + + // Set the current camera mode for the camera path + #[put("/cameras//modes/set/")] + pub async fn put_camera_mode_set( + camera_path: &str, + mode_id: usize, + state: &State, + ) -> Result<(), (Status, &'static str)> { + let camera_handles_mutex = state.webcam_manager.camera_handles(); + let camera_handles = &mut *camera_handles_mutex.lock().await; + let handle = camera_handles + .iter_mut() + .find(|handle| handle.camera_path() == camera_path) + .ok_or((Status::BadRequest, "Invalid / Inactive Camera Path"))?; + handle.update_camera_mode(mode_id).await.map_err(|_| { + ( + Status::BadRequest, + "Failed to Update Camera Mode; index may be invalid", + ) + })?; + + Ok(()) + } +} diff --git a/backend/src/utils.rs b/backend/src/utils.rs index e75a33b..cf1ebb7 100644 --- a/backend/src/utils.rs +++ b/backend/src/utils.rs @@ -339,7 +339,12 @@ impl CameraThreadHandle { .ok_or("V4l Node Not Found")?; let mut device = Device::new(node.index())?; let modes = CameraMode::fetch_all(&device)?; - let initial_mode = *modes.last().ok_or("Error Creating a CameraThreadHandle: Failed to initialize camera as no valid Camera operating modes were provided by video4linux. (Check the camera as this was an OS-level issue!)")?; // The last camera mode tends to be the one with the best resolution and fps. + let initial_mode = *modes.last().ok_or( + "Error creating a CameraThreadHandle: \ + Failed to initialize camera, as no valid camera operating \ + modes were provided by Video4Linux. \ + (Check the camera, as this was an OS-level issue!)", + )?; // The last camera mode tends to be the one with the best resolution and fps. thread::spawn(move || { let mut reader = H264CameraReader::new(&mut device, initial_mode).unwrap(); let mut rtc_txs: Vec>> = Vec::new(); @@ -379,9 +384,12 @@ impl CameraThreadHandle { camera_path, manual_shutdown_needed, cam_mode_tx, - current_mode: *modes - .last() - .ok_or("Error Creating a CameraThreadHandle: Failed to initialize camera as no valid Camera operating modes were provided by video4linux. (Check the camera as this was an OS-level issue!)")?, + current_mode: *modes.last().ok_or( + "Error creating a CameraThreadHandle: \ + Failed to initialize camera, as no valid camera operating \ + modes were provided by Video4Linux. \ + (Check the camera, as this was an OS-level issue!)", + )?, camera_modes: modes, sink_flush_needed, tx_sink, diff --git a/react-app/src/App.tsx b/react-app/src/App.tsx index d42d7b2..84dea05 100644 --- a/react-app/src/App.tsx +++ b/react-app/src/App.tsx @@ -12,6 +12,7 @@ function App() { const [cameras, setCameras] = useState([]); //getting available devices from server const connection = useRef(null); + const videoDivRef = useRef(null); /*//Effect to get the camera names from the server useEffect(() => { @@ -28,6 +29,8 @@ function App() { const response = await fetch("/stream/cameras"); const cameras = await response.json(); + console.log(`Found cameras: ${cameras}`); + setCameras(["", ...cameras]); })(); }, []); @@ -41,47 +44,112 @@ function App() { //to control the camera feed const [selectedCamera, setSelectedCamera] = useState(""); - const handleCameraChange = async (event: React.ChangeEvent) => { + const handleCameraChange = async ( + event: React.ChangeEvent, + ) => { const selectedCameraPath = event.target.value; + console.info( + `stream: camera selection changed to: \`${selectedCameraPath}\``, + ); if (connection.current !== null) { + console.log("stream: closing current connection.", selectedCameraPath); connection.current.close(); + if (videoDivRef.current) { + videoDivRef.current.innerHTML = ""; + } } if (selectedCameraPath === "") { + console.log( + "stream: cannot stream for empty path. early returning...", + selectedCameraPath, + ); return; } const peerConnection = new RTCPeerConnection(); + peerConnection.onconnectionstatechange = () => { + console.info("stream: peer connection change", { + selectedCameraPath, + state: peerConnection.connectionState, + }); + }; + + peerConnection.oniceconnectionstatechange = () => { + console.info("stream: ice connection state changed", { + selectedCameraPath, + state: peerConnection.iceConnectionState, + }); + }; + peerConnection.ontrack = (e) => { const el = document.createElement(e.track.kind) as HTMLMediaElement; el.srcObject = e.streams[0] ?? null; + if (el.srcObject === null) { + console.error( + "stream: video `src` was set to `null` for path: ", + selectedCameraPath, + ); + } + el.autoplay = true; el.controls = true; - - document.getElementById("videoDiv")?.appendChild(el); + el.onerror = (error) => { + console.error("stream: html media element err: ", { + selectedCameraPath, + kind: e.track.kind, + error, + mediaError: el.error, + }); + }; + + if (videoDivRef.current) { + videoDivRef.current.appendChild(el); + console.debug( + "stream: added HTMLMediaElement to videoDiv.", + selectedCameraPath, + ); + } else { + console.error( + "stream: videoDiv missing; cannot append media element.", + selectedCameraPath, + ); + } }; peerConnection.onicecandidate = async (e) => { if (e.candidate === null || connection.current !== null) return; connection.current = peerConnection; - const response = await fetch( - `/stream/cameras/${encodeURIComponent(selectedCameraPath)}/start`, - { - method: "POST", - headers: { - "Content-Type": "application/json", + try { + const response = await fetch( + `/stream/cameras/${encodeURIComponent(selectedCameraPath)}/start`, + { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(peerConnection.localDescription), }, - body: JSON.stringify(peerConnection.localDescription), - }, - ); - const remoteOffer = await response.json(); - peerConnection.setRemoteDescription( - new RTCSessionDescription(remoteOffer), - ); - - setSelectedCamera(selectedCameraPath); + ); + if (!response.ok) { + throw new Error( + `failed to start stream! http err: ${response.status}`, + ); + } + const remoteOffer = await response.json(); + await peerConnection.setRemoteDescription( + new RTCSessionDescription(remoteOffer), + ); + + setSelectedCamera(selectedCameraPath); + } catch (error) { + console.error("stream: failed to create rtc stream session", { + selectedCameraPath, + error, + }); + } // IMPORTANT: Calls to the API should only run after this point. @@ -103,61 +171,69 @@ function App() { peerConnection.addTransceiver("video", { direction: "sendrecv" }); peerConnection.addTransceiver("audio", { direction: "sendrecv" }); - const offer = await peerConnection.createOffer(); - peerConnection.setLocalDescription(offer); + try { + const offer = await peerConnection.createOffer(); + await peerConnection.setLocalDescription(offer); + } catch (error) { + console.error("stream: failed to do offer/local desc", { + selectedCameraPath, + error, + }); + } }; return ( -
-
- - < select +
+
+ + - { - selectedCamera && ( +
+
+ {selectedCamera && (
-
{/*
Camera Frame
*/} - < div className="slider-container" > - - < input +
+ + ) => setFpsSlider(Number(event.target.value)) + onChange={(event: React.ChangeEvent) => + setFpsSlider(Number(event.target.value)) } /> - < label htmlFor="resolutionSlider" > Resolution: - < input + + ) => setResolutionSlider(Number(event.target.value))} + onChange={(event: React.ChangeEvent) => + setResolutionSlider(Number(event.target.value)) + } />
)} +
); diff --git a/react-app/vite.config.ts b/react-app/vite.config.ts index ff154a9..074086f 100644 --- a/react-app/vite.config.ts +++ b/react-app/vite.config.ts @@ -1,11 +1,12 @@ -import { defineConfig } from 'vite' -import react from '@vitejs/plugin-react' +import react from "@vitejs/plugin-react"; +import { defineConfig } from "vite"; export default defineConfig({ - plugins: [react()], - server: { - proxy: { - '/api': 'http://localhost:3600' - } - } -}) \ No newline at end of file + plugins: [react()], + server: { + proxy: { + "/api": "http://localhost:3600", + "/stream": "http://localhost:3600", + }, + }, +});