diff --git a/.github/workflows/bom-test.yaml b/.github/workflows/bom-test.yaml new file mode 100644 index 0000000..6e6fa85 --- /dev/null +++ b/.github/workflows/bom-test.yaml @@ -0,0 +1,76 @@ + +name: HIL tests +on: + workflow_dispatch: + inputs: + distinct_id: + description: "Run identifier" + required: false + reservation_name: + required: false + description: "Reservation Name" + type: string + hold_reservation: + description: "Hold Testbed Reservation" + required: false + type: boolean + depthai_version: + description: "DepthAI version to test against (e.g., 3.3.0)" + required: true + type: string + additional_options: + description: "Additional options:" + required: false + type: string + +env: + DISTINCT_ID: ${{ github.event.inputs.distinct_id }} + DEPTHAI_VERSION: ${{ github.event.inputs.depthai_version }} + HIL_FRAMEWORK_TOKEN: ${{ secrets.HIL_FRAMEWORK_TOKEN }} + HUBAI_API_KEY: ${{ secrets.HUBAI_API_KEY }} + +jobs: + id: + name: Workflow ID Provider + runs-on: ubuntu-latest + steps: + - name: echo distinct ID ${{ env.DISTINCT_ID }} + run: echo $DISTINCT_ID + BOM-test: + runs-on: ["self-hosted", "testbed-runner"] + steps: + - uses: actions/checkout@v3 + + - name: Run tests + # yamllint disable rule:line-length + run: | + pip install hil-framework --upgrade \ + --index-url "https://__token__:$HIL_FRAMEWORK_TOKEN@gitlab.luxonis.com/api/v4/projects/213/packages/pypi/simple" + + if [[ -n "${{ github.event.inputs.reservation_name }}" ]]; then + RESERVATION_OPTION="--reservation-name ${{ github.event.inputs.reservation_name }}" + else + export RESERVATION_NAME="https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID#modelconverter" + RESERVATION_OPTION="--reservation-name $RESERVATION_NAME" + fi + + if [[ "${{ github.event.inputs.hold_reservation }}" == 'true' ]]; then + HOLD_RESERVATION="--hold-reservation" + fi + + if [[ -n "${{ github.event.inputs.additional_options }}" ]]; then + ADDITIONAL_OPTIONS="${{ github.event.inputs.additional_options }}" + fi + + if [[ "$DEPTHAI_VERSION" =~ ^3\.[0-9]{1,2}\.[0-9]{1,2}$ ]]; then + DEPTHAI_VERSION_CHECKED="$DEPTHAI_VERSION" + fi + + exec hil_runner \ + --models "oak4_pro or oak4_d or oak4_s" \ + $HOLD_RESERVATION \ + --wait \ + $ADDITIONAL_OPTIONS \ + $RESERVATION_OPTION \ + --sync-workspace --rsync-args="--exclude=venv"\ + --commands "cd /tmp/modelconverter && ./tests/test_benchmark/run_hil_tests.sh $HUBAI_API_KEY $HIL_FRAMEWORK_TOKEN $DEPTHAI_VERSION_CHECKED" diff --git a/tests/test_benchmark/__init__.py b/tests/test_benchmark/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_benchmark/benchmark_targets.json b/tests/test_benchmark/benchmark_targets.json new file mode 100644 index 0000000..6c359a6 --- /dev/null +++ b/tests/test_benchmark/benchmark_targets.json @@ -0,0 +1,29 @@ +{ + "rvc4": { + "luxonis/yolov6-nano:r2-coco-512x288": { + "expected_fps": 750.00, + "tolerance_low": 0.10, + "tolerance_high": 0.30 + }, + "luxonis/yolov8-instance-segmentation-nano:coco-512x288": { + "expected_fps": 554.68, + "tolerance_low": 0.10, + "tolerance_high": 0.30 + }, + "luxonis/yolov8-nano-pose-estimation:coco-512x288": { + "expected_fps": 609.11, + "tolerance_low": 0.10, + "tolerance_high": 0.30 + }, + "luxonis/fastsam-s:512x288": { + "expected_fps": 475.68, + "tolerance_low": 0.10, + "tolerance_high": 0.30 + }, + "luxonis/deeplab-v3-plus:512x288": { + "expected_fps": 339.42, + "tolerance_low": 0.10, + "tolerance_high": 0.30 + } + } +} diff --git a/tests/test_benchmark/conftest.py b/tests/test_benchmark/conftest.py new file mode 100644 index 0000000..9d91a9c --- /dev/null +++ b/tests/test_benchmark/conftest.py @@ -0,0 +1,36 @@ +import os + +import pytest + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addoption( + "--device-ip", + action="store", + default=None, + help="IP address of the target device.", + ) + parser.addoption( + "--benchmark-target", + action="store", + default="rvc4", + help="Target platform to benchmark (default: rvc4).", + ) + + +def pytest_configure(config: pytest.Config) -> None: + if not os.environ.get("HUBAI_API_KEY"): + pytest.exit( + "HUBAI_API_KEY environment variable is not set.", + returncode=1, + ) + + +@pytest.fixture +def device_ip(request: pytest.FixtureRequest) -> str | None: + return request.config.getoption("--device-ip") + + +@pytest.fixture +def benchmark_target(request: pytest.FixtureRequest) -> str: + return request.config.getoption("--benchmark-target") diff --git a/tests/test_benchmark/run_hil_tests.sh b/tests/test_benchmark/run_hil_tests.sh new file mode 100755 index 0000000..e0ae2f0 --- /dev/null +++ b/tests/test_benchmark/run_hil_tests.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash + +set -e # Exit immediately if a command fails + +# Check if required arguments were provided +if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then + echo "Usage: $0 " + exit 1 +fi + +# Export variables from input arguments +export HUBAI_API_KEY="$1" +export PAT_TOKEN="$2" +export DEPTHAI_VERSION="$3" + +# Navigate to project directory +cd /tmp/modelconverter + +# Create virtual environment +python3 -m venv venv + +# Activate virtual environment +source venv/bin/activate + +# Install dependencies +pip install -r requirements.txt +pip install pytest + +pip install hil-framework --upgrade \ + --index-url "https://__token__:$PAT_TOKEN@gitlab.luxonis.com/api/v4/projects/213/packages/pypi/simple" \ + > /dev/null + +pip install --upgrade \ + --extra-index-url "https://artifacts.luxonis.com/artifactory/luxonis-python-snapshot-local/" \ + --extra-index-url https://artifacts.luxonis.com/artifactory/luxonis-python-release-local \ + "depthai==${DEPTHAI_VERSION}" + +# Extract hostname of first rvc4 device +hostname=$(hil_camera -t "$HIL_TESTBED" -n test all info -j \ + | jq -r '.[] | select(.platform=="rvc4") | .hostname' \ + | head -n1) + +# Run tests +pytest -s -v tests/test_benchmark/ --device-ip "$hostname" \ No newline at end of file diff --git a/tests/test_benchmark/test_benchmark_regression.py b/tests/test_benchmark/test_benchmark_regression.py new file mode 100644 index 0000000..bcd91b6 --- /dev/null +++ b/tests/test_benchmark/test_benchmark_regression.py @@ -0,0 +1,74 @@ +import json +from pathlib import Path + +import pytest + +from modelconverter.packages import get_benchmark +from modelconverter.utils.types import Target + +TARGETS_FILE = Path(__file__).parent / "benchmark_targets.json" + +_targets_data = json.loads(TARGETS_FILE.read_text()) + + +def _model_slugs(target: str) -> list[str]: + return list(_targets_data.get(target, {}).keys()) + + +def _model_id(slug: str) -> str: + """Take out the `luxonis` and use the remainder of the slug to name + the test.""" + return slug.rsplit("/", 1)[-1] + + +@pytest.mark.parametrize( + "model_slug", + _model_slugs("rvc4"), + ids=[_model_id(s) for s in _model_slugs("rvc4")], +) +def test_benchmark_fps( + model_slug: str, + device_ip: str | None, + benchmark_target: str, +) -> None: + model_config = _targets_data[benchmark_target][model_slug] + expected_fps = model_config["expected_fps"] + + if expected_fps is None: + pytest.skip( + f"No expected_fps set for {model_slug}." + "Establish a baseline and add it to benchmark_targets.json." + ) + + target_enum = Target(benchmark_target) + + bench = get_benchmark(target_enum, model_slug) + configuration = { + **bench.default_configuration, + "power_benchmark": False, + "dsp_benchmark": False, + } + if device_ip is not None: + configuration["device_ip"] = device_ip + + result = bench.benchmark(configuration) + actual_fps = result.fps + + tolerance_low = model_config["tolerance_low"] + tolerance_high = model_config["tolerance_high"] + fps_min = expected_fps * (1 - tolerance_low) + fps_max = expected_fps * (1 + tolerance_high) + + deviation_pct = ((actual_fps - expected_fps) / expected_fps) * 100 + + print( + f"Benchmark result for {model_slug}: " + f"actual={actual_fps:.2f} FPS, expected={expected_fps:.2f} FPS. " + ) + + assert fps_min <= actual_fps <= fps_max, ( + f"FPS regression for {model_slug}: " + f"actual={actual_fps:.2f}, expected={expected_fps:.2f} " + f"(deviation: {deviation_pct:+.1f}%, " + f"allowed range: [{fps_min:.2f}, {fps_max:.2f}])" + )