From 67b763f959517287fba4360391c61803bf4343d8 Mon Sep 17 00:00:00 2001 From: Jure Rebernik Date: Sat, 28 Feb 2026 12:57:09 +0100 Subject: [PATCH 1/3] uvc: remove unused VideoSaver node Signed-off-by: Jure Rebernik --- cpp/uvc/src/uvc_example.cpp | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/cpp/uvc/src/uvc_example.cpp b/cpp/uvc/src/uvc_example.cpp index 017140357..ed63bed55 100644 --- a/cpp/uvc/src/uvc_example.cpp +++ b/cpp/uvc/src/uvc_example.cpp @@ -8,13 +8,11 @@ #include #include #include -#include #include #include "depthai/depthai.hpp" #include "depthai/pipeline/MessageQueue.hpp" #include "depthai/pipeline/datatype/ImgFrame.hpp" -#include "depthai/pipeline/datatype/MessageGroup.hpp" #include "uvc_example.hpp" extern "C" { @@ -48,39 +46,6 @@ void signalHandler(int signum) { events_stop(sigint_events); } -// Custom host node for saving video data -class VideoSaver : public dai::node::CustomNode { - public: - VideoSaver() : fileHandle("video.encoded", std::ios::binary) { - if(!fileHandle.is_open()) { - throw std::runtime_error("Could not open video.encoded for writing"); - } - } - - ~VideoSaver() { - if(fileHandle.is_open()) { - fileHandle.close(); - } - } - - std::shared_ptr processGroup(std::shared_ptr message) override { - if(!fileHandle.is_open()) return nullptr; - - // Get raw data and write to file - auto frame = message->get("data"); - unsigned char* frameData = frame->getData().data(); - size_t frameSize = frame->getData().size(); - std::cout << "Storing frame of size: " << frameSize << std::endl; - fileHandle.write(reinterpret_cast(frameData), frameSize); - - // Don't send anything back - return nullptr; - } - - private: - std::ofstream fileHandle; -}; - extern "C" void depthai_uvc_get_buffer(struct video_source *s, struct video_buffer *buf) { unsigned int frame_size, size; uint8_t *f; From e0d2a1ac55c9052f78b80bf595f8e96970116216 Mon Sep 17 00:00:00 2001 From: Jure Rebernik Date: Sat, 28 Feb 2026 12:58:47 +0100 Subject: [PATCH 2/3] uvc: add uncompressed NV12 streaming support - Add runtime UVC format selection (mjpeg|uncompressed/nv12) - Stream uncompressed NV12 frames from DepthAI with stride/plane-aware repack - Configure UVC configfs uncompressed descriptor as NV12 (uvc-gadget submodule) - Update app default startup format and README Signed-off-by: Jure Rebernik --- cpp/uvc/README.md | 7 +++ cpp/uvc/oakapp.toml | 2 +- cpp/uvc/src/uvc_example.cpp | 109 +++++++++++++++++++++++++++++++----- cpp/uvc/uvc-gadget | 2 +- cpp/uvc/uvc-start.sh | 36 +++++++++--- 5 files changed, 131 insertions(+), 25 deletions(-) diff --git a/cpp/uvc/README.md b/cpp/uvc/README.md index 5ba384164..f6445907a 100644 --- a/cpp/uvc/README.md +++ b/cpp/uvc/README.md @@ -33,3 +33,10 @@ Running the example in standalone mode builds and deploys it as an OAK app so th ``` `oakctl` uses the provided `oakapp.toml` to build the C++ project inside the Luxonis base container and deploy it to the device. Configuration tweaks such as changing the camera resolution or registering more topics should be done in `src/uvc_example.cpp`, then re-run `oakctl app run ./cpp/uvc`. + +### Video format selection + +The example supports two UVC stream formats controlled by `UVC_FORMAT` environment variable: + +- `nv12` / `uncompressed` (default): Uses DepthAI `NV12` output and exposes UVC uncompressed NV12 format. +- `mjpeg`: Uses `VideoEncoder` and exposes UVC MJPEG format. diff --git a/cpp/uvc/oakapp.toml b/cpp/uvc/oakapp.toml index 1bd516f31..0e730f6e4 100644 --- a/cpp/uvc/oakapp.toml +++ b/cpp/uvc/oakapp.toml @@ -8,7 +8,7 @@ identifier = "com.example.streaming.uvc" app_version = "3.0.0" # Command to run when the container starts -entrypoint = ["bash", "-c", "/app/uvc-start.sh start"] +entrypoint = ["bash", "-c", "export UVC_FORMAT=nv12 && /app/uvc-start.sh start"] # Here is the place where you can install all the dependencies that are needed at run-time prepare_container = [ diff --git a/cpp/uvc/src/uvc_example.cpp b/cpp/uvc/src/uvc_example.cpp index ed63bed55..6020571ad 100644 --- a/cpp/uvc/src/uvc_example.cpp +++ b/cpp/uvc/src/uvc_example.cpp @@ -6,12 +6,18 @@ */ #include +#include +#include #include +#include #include #include +#include +#include #include "depthai/depthai.hpp" #include "depthai/pipeline/MessageQueue.hpp" +#include "depthai/pipeline/datatype/Buffer.hpp" #include "depthai/pipeline/datatype/ImgFrame.hpp" #include "uvc_example.hpp" @@ -35,6 +41,28 @@ std::atomic quitEvent(false); std::shared_ptr inputQueue{nullptr}; std::shared_ptr outputQueue; +enum class StreamFormat { + MJPEG, + UNCOMPRESSED, +}; + +static StreamFormat gStreamFormat = StreamFormat::UNCOMPRESSED; +static std::vector gNv12Buffer; + +static StreamFormat parseStreamFormat() { + const char* format = std::getenv("UVC_FORMAT"); + if(format == nullptr) return StreamFormat::UNCOMPRESSED; + + std::string formatStr(format); + std::transform(formatStr.begin(), formatStr.end(), formatStr.begin(), [](unsigned char c) { return std::tolower(c); }); + + if(formatStr == "mjpeg") return StreamFormat::MJPEG; + if(formatStr == "uncompressed" || formatStr == "nv12") return StreamFormat::UNCOMPRESSED; + + std::cerr << "Unknown UVC_FORMAT=\"" << formatStr << "\", defaulting to uncompressed NV12." << std::endl; + return StreamFormat::UNCOMPRESSED; +} + /* Necessary for and only used by signal handler. */ static struct events *sigint_events; @@ -48,22 +76,64 @@ void signalHandler(int signum) { extern "C" void depthai_uvc_get_buffer(struct video_source *s, struct video_buffer *buf) { unsigned int frame_size, size; - uint8_t *f; + const uint8_t *f; if(quitEvent) { std::cout << "depthai_uvc_get_buffer(): Stopping capture due to quit event." << std::endl; return; } - auto frame = outputQueue->get(); - if(frame == nullptr) { - std::cerr << "depthai_uvc_get_buffer(): No frame available." << std::endl; - return; + if(gStreamFormat == StreamFormat::MJPEG) { + auto frame = outputQueue->get(); + if(frame == nullptr || frame->getData().empty()) { + std::cerr << "depthai_uvc_get_buffer(): No MJPEG frame available." << std::endl; + return; + } + f = frame->getData().data(); + frame_size = frame->getData().size(); + } else { + auto frame = outputQueue->get(); + if(frame == nullptr) { + std::cerr << "depthai_uvc_get_buffer(): No uncompressed frame available." << std::endl; + return; + } + if(frame->getType() != dai::ImgFrame::Type::NV12) { + std::cerr << "depthai_uvc_get_buffer(): Unexpected frame type for uncompressed mode: " << static_cast(frame->getType()) << std::endl; + return; + } + + const auto width = frame->getWidth(); + const auto height = frame->getHeight(); + const auto stride = frame->getStride(); + const auto uvPlaneOffset = frame->getPlaneStride(0); + const auto compactNv12FrameSize = (width * height * 3) / 2; + const auto expectedSrcBytes = uvPlaneOffset + (stride * (height / 2)); + const auto& data = frame->getData(); + + if(data.size() < expectedSrcBytes) { + std::cerr << "depthai_uvc_get_buffer(): NV12 frame smaller than expected: have " + << data.size() << " need " << expectedSrcBytes << std::endl; + return; + } + + gNv12Buffer.resize(compactNv12FrameSize); + const auto* src = data.data(); + auto* dst = gNv12Buffer.data(); + + for(uint32_t y = 0; y < height; ++y) { + memcpy(dst + (y * width), src + (y * stride), width); + } + + const auto* uvSrc = src + uvPlaneOffset; + auto* uvDst = dst + (width * height); + for(uint32_t y = 0; y < height / 2; ++y) { + memcpy(uvDst + (y * width), uvSrc + (y * stride), width); + } + + f = gNv12Buffer.data(); + frame_size = static_cast(gNv12Buffer.size()); } - f = frame->getData().data(); - frame_size = frame->getData().size(); - size = std::min(frame_size, buf->size); memcpy(buf->mem, f, size); buf->bytesused = size; @@ -90,6 +160,8 @@ int main() { struct video_source* src; struct uvc_stream* stream; + gStreamFormat = parseStreamFormat(); + depthai_uvc_register_get_buffer(depthai_uvc_get_buffer); fc = configfs_parse_uvc_function("uvc.0"); @@ -141,13 +213,20 @@ int main() { // Create nodes auto camRgb = pipeline.create()->build(socket); inputQueue = camRgb->inputControl.createInputQueue(); - auto output = camRgb->requestOutput(std::make_pair(1920, 1080), dai::ImgFrame::Type::NV12); - - // Create video encoder node - auto encoded = pipeline.create(); - encoded->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::MJPEG); - output->link(encoded->input); - outputQueue = encoded->bitstream.createOutputQueue(1, false); + constexpr uint32_t width = 1920; + constexpr uint32_t height = 1080; + auto output = camRgb->requestOutput(std::make_pair(width, height), dai::ImgFrame::Type::NV12); + + if(gStreamFormat == StreamFormat::MJPEG) { + auto encoded = pipeline.create(); + encoded->setDefaultProfilePreset(30, dai::VideoEncoderProperties::Profile::MJPEG); + output->link(encoded->input); + outputQueue = encoded->bitstream.createOutputQueue(1, false); + std::cout << "Configured UVC stream format: MJPEG" << std::endl; + } else { + outputQueue = output->createOutputQueue(1, false); + std::cout << "Configured UVC stream format: uncompressed NV12" << std::endl; + } // Start pipeline pipeline.start(); diff --git a/cpp/uvc/uvc-gadget b/cpp/uvc/uvc-gadget index 860b0b506..83cd15e39 160000 --- a/cpp/uvc/uvc-gadget +++ b/cpp/uvc/uvc-gadget @@ -1 +1 @@ -Subproject commit 860b0b506bf2e17662458d5c642eaba5bfc07831 +Subproject commit 83cd15e39fe17166bd496f26cd53bc09519e0ca2 diff --git a/cpp/uvc/uvc-start.sh b/cpp/uvc/uvc-start.sh index cbf9a3233..739c77e67 100644 --- a/cpp/uvc/uvc-start.sh +++ b/cpp/uvc/uvc-start.sh @@ -25,11 +25,14 @@ fi MANUF="Luxonis" PRODUCT="Luxonis UVC Camera" UDC=$(ls /sys/class/udc | head -n1) # will identify the 'first' UDC +: "${UVC_FORMAT:=uncompressed}" +UVC_FORMAT=$(echo "$UVC_FORMAT" | tr '[:upper:]' '[:lower:]') log "=== Detecting platform:" log " product : $PRODUCT" log " udc : $UDC" log " serial : $SERIAL" +log " format : $UVC_FORMAT" remove_uvc_gadget() { if [ ! -d /sys/kernel/config/usb_gadget/g1/functions/uvc.0 ]; then @@ -101,6 +104,20 @@ create_frame() { EOF } +configure_uncompressed_nv12_descriptor() { + FUNCTION=$1 + NAME=$2 + + FRAME_DIR="functions/$FUNCTION/streaming/uncompressed/$NAME/1080p" + FORMAT_DIR="functions/$FUNCTION/streaming/uncompressed/$NAME" + + # NV12 is 12bpp (4:2:0), frame size is width * height * 3 / 2. + echo 12 > "$FORMAT_DIR/bBitsPerPixel" + echo $(( 1920 * 1080 * 3 / 2 )) > "$FRAME_DIR/dwMaxVideoFrameBufferSize" + # UVC GUID for NV12: 4e 56 31 32 00 00 10 00 80 00 00 aa 00 38 9b 71 + echo -ne '\x4e\x56\x31\x32\x00\x00\x10\x00\x80\x00\x00\xaa\x00\x38\x9b\x71' > "$FORMAT_DIR/guidFormat" +} + create_uvc() { # Example usage: # create_uvc @@ -113,17 +130,20 @@ create_uvc() { pushd "$GADGET/g1" >/dev/null mkdir "functions/$FUNCTION" - # create_frame "$FUNCTION" 640 360 uncompressed u - # create_frame "$FUNCTION" 1280 720 uncompressed u - # create_frame "$FUNCTION" 320 180 uncompressed u - create_frame "$FUNCTION" 1920 1080 mjpeg m - # create_frame "$FUNCTION" 640 480 mjpeg m - # create_frame "$FUNCTION" 640 360 mjpeg m + if [ "$UVC_FORMAT" = "mjpeg" ]; then + create_frame "$FUNCTION" 1920 1080 mjpeg m + else + create_frame "$FUNCTION" 1920 1080 uncompressed u + configure_uncompressed_nv12_descriptor "$FUNCTION" "u" + fi mkdir "functions/$FUNCTION/streaming/header/h" cd "functions/$FUNCTION/streaming/header/h" - # ln -s ../../uncompressed/u - ln -s ../../mjpeg/m + if [ "$UVC_FORMAT" = "mjpeg" ]; then + ln -s ../../mjpeg/m + else + ln -s ../../uncompressed/u + fi cd ../../class/fs ln -s ../../header/h cd ../../class/hs From d5442136455e9456d21569afac3e0ca65c86ae1f Mon Sep 17 00:00:00 2001 From: Jure Rebernik Date: Sat, 28 Feb 2026 13:52:29 +0100 Subject: [PATCH 3/3] uvc: bump oakapp version to 3.1.0 Signed-off-by: Jure Rebernik --- cpp/uvc/oakapp.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpp/uvc/oakapp.toml b/cpp/uvc/oakapp.toml index 0e730f6e4..51c46fbcc 100644 --- a/cpp/uvc/oakapp.toml +++ b/cpp/uvc/oakapp.toml @@ -5,7 +5,7 @@ # Application metadata identifier = "com.example.streaming.uvc" -app_version = "3.0.0" +app_version = "3.1.0" # Command to run when the container starts entrypoint = ["bash", "-c", "export UVC_FORMAT=nv12 && /app/uvc-start.sh start"]