From cb7ab0718b3e94522f387a398f99e85c185e2a9f Mon Sep 17 00:00:00 2001 From: ISnowFoxI Date: Wed, 3 Sep 2025 12:36:10 +0400 Subject: [PATCH 1/8] initial implementation of input device listener and change logic, input device level listener functionality --- example/ios/Podfile.lock | 16 +- .../project.pbxproj | 5 +- example/src/CameraPage.tsx | 46 +- package/ios/Core/CameraConfiguration.swift | 12 +- package/ios/Core/CameraSession+Audio.swift | 83 ++- .../Core/CameraSession+Configuration.swift | 1 + package/ios/Core/CameraSession+Video.swift | 2 +- package/ios/Core/CameraSession.swift | 612 +++++++++--------- .../ios/Core/Types/RecordVideoOptions.swift | 1 + package/ios/React/AudioInputDevicesManager.m | 14 + .../ios/React/AudioInputDevicesManager.swift | 68 ++ package/ios/React/AudioInputLevelManager.m | 14 + .../ios/React/AudioInputLevelManager.swift | 128 ++++ .../ios/React/CameraView+RecordVideo.swift | 2 +- package/ios/React/CameraView.swift | 11 +- package/ios/React/CameraViewManager.m | 1 + package/src/AudioInputDevices.ts | 22 + package/src/Camera.tsx | 3 +- package/src/NativeCameraView.ts | 2 + package/src/hooks/useAudioInputDevices.ts | 23 + package/src/index.ts | 5 +- package/src/listeners/AudioInputLevel.ts | 17 + package/src/types/AudioInputDevice.ts | 5 + package/src/types/CameraProps.ts | 7 + 24 files changed, 761 insertions(+), 339 deletions(-) create mode 100644 package/ios/React/AudioInputDevicesManager.m create mode 100644 package/ios/React/AudioInputDevicesManager.swift create mode 100644 package/ios/React/AudioInputLevelManager.m create mode 100644 package/ios/React/AudioInputLevelManager.swift create mode 100644 package/src/AudioInputDevices.ts create mode 100644 package/src/hooks/useAudioInputDevices.ts create mode 100644 package/src/listeners/AudioInputLevel.ts create mode 100644 package/src/types/AudioInputDevice.ts diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 9a08834c6b..ea97d74b89 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -1768,16 +1768,16 @@ PODS: - ReactCommon/turbomodule/core - Yoga - SocketRocket (0.7.0) - - VisionCamera (4.6.3): - - VisionCamera/Core (= 4.6.3) - - VisionCamera/FrameProcessors (= 4.6.3) - - VisionCamera/React (= 4.6.3) - - VisionCamera/Core (4.6.3) - - VisionCamera/FrameProcessors (4.6.3): + - VisionCamera (4.7.1): + - VisionCamera/Core (= 4.7.1) + - VisionCamera/FrameProcessors (= 4.7.1) + - VisionCamera/React (= 4.7.1) + - VisionCamera/Core (4.7.1) + - VisionCamera/FrameProcessors (4.7.1): - React - React-callinvoker - react-native-worklets-core - - VisionCamera/React (4.6.3): + - VisionCamera/React (4.7.1): - React-Core - VisionCamera/FrameProcessors - Yoga (0.0.0) @@ -2097,7 +2097,7 @@ SPEC CHECKSUMS: RNStaticSafeAreaInsets: 055ddbf5e476321720457cdaeec0ff2ba40ec1b8 RNVectorIcons: 6382277afab3c54658e9d555ee0faa7a37827136 SocketRocket: abac6f5de4d4d62d24e11868d7a2f427e0ef940d - VisionCamera: 88df4dae7196c93ecd331f105f0e5d7d95702cb3 + VisionCamera: d785f2775f8b200d713f6e74b6ebdee2b8c8cb5c Yoga: aa3df615739504eebb91925fc9c58b4922ea9a08 PODFILE CHECKSUM: 2ad84241179871ca890f7c65c855d117862f1a68 diff --git a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj index 829ad9100d..8f5892d0c2 100644 --- a/example/ios/VisionCameraExample.xcodeproj/project.pbxproj +++ b/example/ios/VisionCameraExample.xcodeproj/project.pbxproj @@ -181,7 +181,6 @@ LastUpgradeCheck = 1250; TargetAttributes = { 13B07F861A680F5B00A75B9A = { - DevelopmentTeam = CJW62Q77E7; LastSwiftMigration = 1240; }; }; @@ -422,7 +421,7 @@ CLANG_CXX_LANGUAGE_STANDARD = "c++20"; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = CJW62Q77E7; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; INFOPLIST_FILE = VisionCameraExample/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera"; @@ -455,7 +454,7 @@ CLANG_CXX_LANGUAGE_STANDARD = "c++20"; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = CJW62Q77E7; + DEVELOPMENT_TEAM = ""; INFOPLIST_FILE = VisionCameraExample/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = "Vision Camera"; INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.photography"; diff --git a/example/src/CameraPage.tsx b/example/src/CameraPage.tsx index af65e71462..4007be5f2c 100644 --- a/example/src/CameraPage.tsx +++ b/example/src/CameraPage.tsx @@ -1,17 +1,19 @@ import * as React from 'react' import { useRef, useState, useCallback, useMemo } from 'react' -import type { GestureResponderEvent } from 'react-native' +import type { EmitterSubscription, GestureResponderEvent } from 'react-native' import { StyleSheet, Text, View } from 'react-native' import type { PinchGestureHandlerGestureEvent } from 'react-native-gesture-handler' import { PinchGestureHandler, TapGestureHandler } from 'react-native-gesture-handler' import type { CameraProps, CameraRuntimeError, PhotoFile, VideoFile } from 'react-native-vision-camera' import { runAtTargetFps, + useAudioInputDevices, useCameraDevice, useCameraFormat, useFrameProcessor, useLocationPermission, useMicrophonePermission, + AudioInputLevel, } from 'react-native-vision-camera' import { Camera } from 'react-native-vision-camera' import { CONTENT_SPACING, CONTROL_BUTTON_SIZE, MAX_ZOOM_FACTOR, SAFE_AREA_PADDING, SCREEN_HEIGHT, SCREEN_WIDTH } from './Constants' @@ -29,6 +31,7 @@ import { useIsFocused } from '@react-navigation/core' import { usePreferredCameraDevice } from './hooks/usePreferredCameraDevice' import { examplePlugin } from './frame-processors/ExamplePlugin' import { exampleKotlinSwiftPlugin } from './frame-processors/ExampleKotlinSwiftPlugin' +import { useSafeAreaInsets } from 'react-native-safe-area-context' const ReanimatedCamera = Reanimated.createAnimatedComponent(Camera) Reanimated.addWhitelistedNativeProps({ @@ -39,13 +42,15 @@ const SCALE_FULL_ZOOM = 3 type Props = NativeStackScreenProps export function CameraPage({ navigation }: Props): React.ReactElement { + const audioInputDevices = useAudioInputDevices() + const { bottom } = useSafeAreaInsets() + const [selectedMic, setSelectedMic] = useState(audioInputDevices[0]) const camera = useRef(null) const [isCameraInitialized, setIsCameraInitialized] = useState(false) const microphone = useMicrophonePermission() const location = useLocationPermission() const zoom = useSharedValue(1) const isPressingButton = useSharedValue(false) - // check if camera page is active const isFocussed = useIsFocused() const isForeground = useIsForeground() @@ -183,12 +188,23 @@ export function CameraPage({ navigation }: Props): React.ReactElement { runAtTargetFps(10, () => { 'worklet' - console.log(`${frame.timestamp}: ${frame.width}x${frame.height} ${frame.pixelFormat} Frame (${frame.orientation})`) examplePlugin(frame) exampleKotlinSwiftPlugin(frame) }) }, []) + useEffect(() => { + let listener: EmitterSubscription | null = null + if (selectedMic?.uid) { + listener = AudioInputLevel.addAudioLevelChangedListener((level) => { + console.log('Current Audio device level:', level) + }) + } + return () => { + listener?.remove() + } + }, [selectedMic?.uid]) + const videoHdr = format?.supportsVideoHdr && enableHdr const photoHdr = format?.supportsPhotoHdr && enableHdr && !videoHdr @@ -201,6 +217,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { + + {audioInputDevices.map((item) => ( + setSelectedMic(item)} style={styles.microphoneButton}> + + {item.portName} + + + ))} + ) } @@ -322,4 +348,18 @@ const styles = StyleSheet.create({ justifyContent: 'center', alignItems: 'center', }, + microphoneContainer: { + position: 'absolute', + left: 12, + top: 100, + }, + microphoneButton: { + height: 48, + }, + microphoneButtonText: { + color: 'white', + }, + microphoneButtonSelectedText: { + color: 'blue', + }, }) diff --git a/package/ios/Core/CameraConfiguration.swift b/package/ios/Core/CameraConfiguration.swift index a4b94dd377..0fecb5c79c 100644 --- a/package/ios/Core/CameraConfiguration.swift +++ b/package/ios/Core/CameraConfiguration.swift @@ -16,6 +16,9 @@ final class CameraConfiguration { // Input var cameraId: String? + var audioInputDeviceUid: String? + + // Outputs var photo: OutputConfiguration = .disabled @@ -57,6 +60,7 @@ final class CameraConfiguration { if let other { // copy over all values cameraId = other.cameraId + audioInputDeviceUid = other.audioInputDeviceUid photo = other.photo video = other.video codeScanner = other.codeScanner @@ -83,7 +87,7 @@ final class CameraConfiguration { /** Throw this to abort calls to configure { ... } and apply no changes. */ - @frozen + enum AbortThrow: Error { case abort } @@ -101,6 +105,7 @@ final class CameraConfiguration { let audioSessionChanged: Bool let locationChanged: Bool + /** Returns `true` when props that affect the AVCaptureSession configuration (i.e. props that require beginConfiguration()) have changed. @@ -141,14 +146,15 @@ final class CameraConfiguration { exposureChanged = inputChanged || left?.exposure != right.exposure // audio session - audioSessionChanged = left?.audio != right.audio + audioSessionChanged = left?.audio != right.audio || left?.audioInputDeviceUid != right.audioInputDeviceUid // location locationChanged = left?.enableLocation != right.enableLocation + } } - @frozen + enum OutputConfiguration: Equatable { case disabled case enabled(config: T) diff --git a/package/ios/Core/CameraSession+Audio.swift b/package/ios/Core/CameraSession+Audio.swift index 3d67c8c6bc..7da96fc607 100644 --- a/package/ios/Core/CameraSession+Audio.swift +++ b/package/ios/Core/CameraSession+Audio.swift @@ -10,25 +10,17 @@ import AVFoundation import Foundation extension CameraSession { + /** Configures the Audio session and activates it. If the session was active it will shortly be deactivated before configuration. The Audio Session will be configured to allow background music, haptics (vibrations) and system sound playback while recording. Background audio is allowed to play on speakers or bluetooth speakers. */ - final func activateAudioSession() throws { + final func activateAudioSession() throws { VisionLogger.log(level: .info, message: "Activating Audio Session...") do { - let audioSession = AVAudioSession.sharedInstance() - - try audioSession.updateCategory(AVAudioSession.Category.playAndRecord, - mode: .videoRecording, - options: [.mixWithOthers, - .allowBluetoothA2DP, - .defaultToSpeaker, - .allowAirPlay]) - if #available(iOS 14.5, *) { // prevents the audio session from being interrupted by a phone call try audioSession.setPrefersNoInterruptionsFromSystemAlerts(true) @@ -51,10 +43,9 @@ extension CameraSession { } } } - + final func deactivateAudioSession() { VisionLogger.log(level: .info, message: "Deactivating Audio Session...") - audioCaptureSession.stopRunning() VisionLogger.log(level: .info, message: "Audio Session deactivated!") } @@ -84,7 +75,7 @@ extension CameraSession { CameraQueues.audioQueue.async { VisionLogger.log(level: .info, message: "Resuming interrupted Audio Session...") // restart audio session because interruption is over - try? self.activateAudioSession() + try? self.activateAudioSession() } } } else { @@ -94,4 +85,70 @@ extension CameraSession { () } } + + func setAudioConfig () { + do { + try audioSession.updateCategory(AVAudioSession.Category.playAndRecord, + mode: .videoRecording, + options: [.mixWithOthers, + .allowBluetoothA2DP, + .allowBluetooth, + .defaultToSpeaker, + .allowAirPlay])} + catch let error as NSError { + VisionLogger.log(level: .error, message: "Failed to update audio category! Error \(error.code): \(error.description)") + } + } + // Activates AvAudioSession + func enableAudioSession () { + do { + try audioSession.setActive(true) + } + catch let error as NSError { + VisionLogger.log(level: .error, message: "Failed to activate audio session! Error \(error.code): \(error.description)") + } + } + + // Deactivates AvAudioSession + func disableAudioSession () { + do { + try audioSession.setActive(false) + } + catch let error as NSError { + VisionLogger.log(level: .error, message: "Failed to deactivate audio session! Error \(error.code): \(error.description)") + } + } + // Gets the preffered audio input my comparing input device uids + func getPreferredAudioInput() -> AVAudioSessionPortDescription? { + guard let availableInputs = audioSession.availableInputs else { + VisionLogger.log(level: .error, message: "No available inputs detected") + return nil + } + + if configuration?.audioInputDeviceUid == nil { + VisionLogger.log(level: .error, message: "No audio input uid specificed, reverting to first/default input") + return availableInputs.first + } + + return availableInputs.first(where: { $0.uid == configuration?.audioInputDeviceUid}) + } + + // Sets the preffered audio input device by the user + func setPreferredAudioInput() { + guard let preferredInput = getPreferredAudioInput() else { + return + } + disableAudioSession() + do { + try audioSession.setPreferredInput(preferredInput) + enableAudioSession() + } catch { + VisionLogger.log(level: .error, message: "Error setting preferred audio input: \(error.localizedDescription)") + } + } + + } + + + diff --git a/package/ios/Core/CameraSession+Configuration.swift b/package/ios/Core/CameraSession+Configuration.swift index ecd2a94daf..1491939124 100644 --- a/package/ios/Core/CameraSession+Configuration.swift +++ b/package/ios/Core/CameraSession+Configuration.swift @@ -365,6 +365,7 @@ extension CameraSession { // Audio Input (Microphone) if enableAudio { VisionLogger.log(level: .info, message: "Adding Audio input...") + guard let microphone = AVCaptureDevice.default(for: .audio) else { throw CameraError.device(.microphoneUnavailable) } diff --git a/package/ios/Core/CameraSession+Video.swift b/package/ios/Core/CameraSession+Video.swift index 8e57710f24..2a5cbddce1 100644 --- a/package/ios/Core/CameraSession+Video.swift +++ b/package/ios/Core/CameraSession+Video.swift @@ -107,7 +107,7 @@ extension CameraSession { // Activate Audio Session asynchronously CameraQueues.audioQueue.async { do { - try self.activateAudioSession() + try self.activateAudioSession() } catch { self.onConfigureError(error) } diff --git a/package/ios/Core/CameraSession.swift b/package/ios/Core/CameraSession.swift index 10b0f3399c..8df41d41e8 100644 --- a/package/ios/Core/CameraSession.swift +++ b/package/ios/Core/CameraSession.swift @@ -14,319 +14,327 @@ import Foundation All changes to the session have to be controlled via the `configure` function. */ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - // Configuration - private var isInitialized = false - var configuration: CameraConfiguration? - var currentConfigureCall: DispatchTime = .now() - // Capture Session - let captureSession = AVCaptureSession() - let audioCaptureSession = AVCaptureSession() - // Inputs & Outputs - var videoDeviceInput: AVCaptureDeviceInput? - var audioDeviceInput: AVCaptureDeviceInput? - var photoOutput: AVCapturePhotoOutput? - var videoOutput: AVCaptureVideoDataOutput? - var audioOutput: AVCaptureAudioDataOutput? - var codeScannerOutput: AVCaptureMetadataOutput? - // State - var metadataProvider = MetadataProvider() - var recordingSession: RecordingSession? - var didCancelRecording = false - var orientationManager = OrientationManager() - - // Callbacks - weak var delegate: CameraSessionDelegate? - - // Public accessors - var maxZoom: Double { - if let device = videoDeviceInput?.device { - return device.activeFormat.videoMaxZoomFactor + // Configuration + private var isInitialized = false + var configuration: CameraConfiguration? + var currentConfigureCall: DispatchTime = .now() + // Capture Session + let captureSession = AVCaptureSession() + let audioCaptureSession = AVCaptureSession() + let audioSession = AVAudioSession.sharedInstance() + // Inputs & Outputs + var videoDeviceInput: AVCaptureDeviceInput? + var audioDeviceInput: AVCaptureDeviceInput? + var photoOutput: AVCapturePhotoOutput? + var videoOutput: AVCaptureVideoDataOutput? + var audioOutput: AVCaptureAudioDataOutput? + var codeScannerOutput: AVCaptureMetadataOutput? + // State + var metadataProvider = MetadataProvider() + var recordingSession: RecordingSession? + var didCancelRecording = false + var orientationManager = OrientationManager() + + // Callbacks + weak var delegate: CameraSessionDelegate? + + // Public accessors + var maxZoom: Double { + if let device = videoDeviceInput?.device { + return device.activeFormat.videoMaxZoomFactor + } + return 1.0 } - return 1.0 - } - - /** - Create a new instance of the `CameraSession`. - The `onError` callback is used for any runtime errors. - */ - override init() { - super.init() - NotificationCenter.default.addObserver(self, - selector: #selector(sessionRuntimeError), - name: .AVCaptureSessionRuntimeError, - object: captureSession) - NotificationCenter.default.addObserver(self, - selector: #selector(sessionRuntimeError), - name: .AVCaptureSessionRuntimeError, - object: audioCaptureSession) - NotificationCenter.default.addObserver(self, - selector: #selector(audioSessionInterrupted), - name: AVAudioSession.interruptionNotification, - object: AVAudioSession.sharedInstance) - } - - private func initialize() { - if isInitialized { - return + + /** + Create a new instance of the `CameraSession`. + The `onError` callback is used for any runtime errors. + */ + override init() { + super.init() + NotificationCenter.default.addObserver(self, + selector: #selector(sessionRuntimeError), + name: .AVCaptureSessionRuntimeError, + object: captureSession) + NotificationCenter.default.addObserver(self, + selector: #selector(sessionRuntimeError), + name: .AVCaptureSessionRuntimeError, + object: audioCaptureSession) + NotificationCenter.default.addObserver(self, + selector: #selector(audioSessionInterrupted), + name: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance) + // Set Audio configuration to be use by the camera + setAudioConfig() } - orientationManager.delegate = self - isInitialized = true - } - - deinit { - NotificationCenter.default.removeObserver(self, - name: .AVCaptureSessionRuntimeError, - object: captureSession) - NotificationCenter.default.removeObserver(self, - name: .AVCaptureSessionRuntimeError, - object: audioCaptureSession) - NotificationCenter.default.removeObserver(self, - name: AVAudioSession.interruptionNotification, - object: AVAudioSession.sharedInstance) - } - - /** - Creates a PreviewView for the current Capture Session - */ - func createPreviewView(frame: CGRect) -> PreviewView { - return PreviewView(frame: frame, session: captureSession) - } - - func onConfigureError(_ error: Error) { - if let error = error as? CameraError { - // It's a typed Error - delegate?.onError(error) - } else { - // It's any kind of unknown error - let cameraError = CameraError.unknown(message: error.localizedDescription) - delegate?.onError(cameraError) + + private func initialize() { + if isInitialized { + return + } + orientationManager.delegate = self + + isInitialized = true } - } - - /** - Update the session configuration. - Any changes in here will be re-configured only if required, and under a lock (in this case, the serial cameraQueue DispatchQueue). - The `configuration` object is a copy of the currently active configuration that can be modified by the caller in the lambda. - */ - func configure(_ lambda: @escaping (_ configuration: CameraConfiguration) throws -> Void) { - initialize() - - VisionLogger.log(level: .info, message: "configure { ... }: Waiting for lock...") - - // Set up Camera (Video) Capture Session (on camera queue, acts like a lock) - CameraQueues.cameraQueue.async { - // Let caller configure a new configuration for the Camera. - let config = CameraConfiguration(copyOf: self.configuration) - do { - try lambda(config) - } catch CameraConfiguration.AbortThrow.abort { - // call has been aborted and changes shall be discarded - return - } catch { - // another error occured, possibly while trying to parse enums - self.onConfigureError(error) - return - } - let difference = CameraConfiguration.Difference(between: self.configuration, and: config) - - VisionLogger.log(level: .info, message: "configure { ... }: Updating CameraSession Configuration... \(difference)") - - do { - // If needed, configure the AVCaptureSession (inputs, outputs) - if difference.isSessionConfigurationDirty { - self.captureSession.beginConfiguration() - - // 1. Update input device - if difference.inputChanged { - try self.configureDevice(configuration: config) - } - // 2. Update outputs - if difference.outputsChanged { - try self.configureOutputs(configuration: config) - } - // 3. Update Video Stabilization - if difference.videoStabilizationChanged { - self.configureVideoStabilization(configuration: config) - } - // 4. Update target output orientation - if difference.orientationChanged { - self.orientationManager.setTargetOutputOrientation(config.outputOrientation) - } + + deinit { + NotificationCenter.default.removeObserver(self, + name: .AVCaptureSessionRuntimeError, + object: captureSession) + NotificationCenter.default.removeObserver(self, + name: .AVCaptureSessionRuntimeError, + object: audioCaptureSession) + NotificationCenter.default.removeObserver(self, + name: AVAudioSession.interruptionNotification, + object: AVAudioSession.sharedInstance) + + } + + /** + Creates a PreviewView for the current Capture Session + */ + func createPreviewView(frame: CGRect) -> PreviewView { + return PreviewView(frame: frame, session: captureSession) + } + + func onConfigureError(_ error: Error) { + if let error = error as? CameraError { + // It's a typed Error + delegate?.onError(error) + } else { + // It's any kind of unknown error + let cameraError = CameraError.unknown(message: error.localizedDescription) + delegate?.onError(cameraError) } - - guard let device = self.videoDeviceInput?.device else { - throw CameraError.device(.noDevice) + } + + /** + Update the session configuration. + Any changes in here will be re-configured only if required, and under a lock (in this case, the serial cameraQueue DispatchQueue). + The `configuration` object is a copy of the currently active configuration that can be modified by the caller in the lambda. + */ + func configure(_ lambda: @escaping (_ configuration: CameraConfiguration) throws -> Void) { + initialize() + + VisionLogger.log(level: .info, message: "configure { ... }: Waiting for lock...") + + // Set up Camera (Video) Capture Session (on camera queue, acts like a lock) + CameraQueues.cameraQueue.async { + // Let caller configure a new configuration for the Camera. + let config = CameraConfiguration(copyOf: self.configuration) + do { + try lambda(config) + } catch CameraConfiguration.AbortThrow.abort { + // call has been aborted and changes shall be discarded + return + } catch { + // another error occured, possibly while trying to parse enums + self.onConfigureError(error) + return + } + let difference = CameraConfiguration.Difference(between: self.configuration, and: config) + + VisionLogger.log(level: .info, message: "configure { ... }: Updating CameraSession Configuration... \(difference)") + + do { + // If needed, configure the AVCaptureSession (inputs, outputs) + if difference.isSessionConfigurationDirty { + self.captureSession.beginConfiguration() + + // 1. Update input device + if difference.inputChanged { + try self.configureDevice(configuration: config) + } + // 2. Update outputs + if difference.outputsChanged { + try self.configureOutputs(configuration: config) + } + // 3. Update Video Stabilization + if difference.videoStabilizationChanged { + self.configureVideoStabilization(configuration: config) + } + // 4. Update target output orientation + if difference.orientationChanged { + self.orientationManager.setTargetOutputOrientation(config.outputOrientation) + } + } + + guard let device = self.videoDeviceInput?.device else { + throw CameraError.device(.noDevice) + } + + // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) + if difference.isDeviceConfigurationDirty { + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() + } + + // 5. Configure format + if difference.formatChanged { + try self.configureFormat(configuration: config, device: device) + } + // 6. After step 2. and 4., we also need to configure some output properties that depend on format. + // This needs to be done AFTER we updated the `format`, as this controls the supported properties. + if difference.outputsChanged || difference.formatChanged { + self.configureVideoOutputFormat(configuration: config) + self.configurePhotoOutputFormat(configuration: config) + } + // 7. Configure side-props (fps, lowLightBoost) + if difference.sidePropsChanged { + try self.configureSideProps(configuration: config, device: device) + } + // 8. Configure zoom + if difference.zoomChanged { + self.configureZoom(configuration: config, device: device) + } + // 9. Configure exposure bias + if difference.exposureChanged { + self.configureExposure(configuration: config, device: device) + } + } + + if difference.isSessionConfigurationDirty { + // We commit the session config updates AFTER the device config, + // that way we can also batch those changes into one update instead of doing two updates. + self.captureSession.commitConfiguration() + } + + // 10. Start or stop the session if needed + self.checkIsActive(configuration: config) + + // 11. Enable or disable the Torch if needed (requires session to be running) + if difference.torchChanged { + try device.lockForConfiguration() + defer { + device.unlockForConfiguration() + } + try self.configureTorch(configuration: config, device: device) + } + + // After configuring, set this to the new configuration. + self.configuration = config + } catch { + self.onConfigureError(error) + } + + // Set up Audio Capture Session (on audio queue) + if difference.audioSessionChanged { + CameraQueues.audioQueue.async { + do { + // Set the preferred input device on props change + self.setPreferredAudioInput() + + // Lock Capture Session for configuration + VisionLogger.log(level: .info, message: "Beginning AudioSession configuration...") + self.audioCaptureSession.beginConfiguration() + + try self.configureAudioSession(configuration: config) + + // Unlock Capture Session again and submit configuration to Hardware + self.audioCaptureSession.commitConfiguration() + VisionLogger.log(level: .info, message: "Committed AudioSession configuration!") + } catch { + self.onConfigureError(error) + } + } + } + + // Set up Location streaming (on location queue) + if difference.locationChanged { + CameraQueues.locationQueue.async { + do { + VisionLogger.log(level: .info, message: "Beginning Location Output configuration...") + try self.configureLocationOutput(configuration: config) + VisionLogger.log(level: .info, message: "Finished Location Output configuration!") + } catch { + self.onConfigureError(error) + } + } + } } - - // If needed, configure the AVCaptureDevice (format, zoom, low-light-boost, ..) - if difference.isDeviceConfigurationDirty { - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - } - - // 5. Configure format - if difference.formatChanged { - try self.configureFormat(configuration: config, device: device) - } - // 6. After step 2. and 4., we also need to configure some output properties that depend on format. - // This needs to be done AFTER we updated the `format`, as this controls the supported properties. - if difference.outputsChanged || difference.formatChanged { - self.configureVideoOutputFormat(configuration: config) - self.configurePhotoOutputFormat(configuration: config) - } - // 7. Configure side-props (fps, lowLightBoost) - if difference.sidePropsChanged { - try self.configureSideProps(configuration: config, device: device) - } - // 8. Configure zoom - if difference.zoomChanged { - self.configureZoom(configuration: config, device: device) - } - // 9. Configure exposure bias - if difference.exposureChanged { - self.configureExposure(configuration: config, device: device) - } + } + + /** + Starts or stops the CaptureSession if needed (`isActive`) + */ + private func checkIsActive(configuration: CameraConfiguration) { + if configuration.isActive == captureSession.isRunning { + return } - - if difference.isSessionConfigurationDirty { - // We commit the session config updates AFTER the device config, - // that way we can also batch those changes into one update instead of doing two updates. - self.captureSession.commitConfiguration() + + // Start/Stop session + if configuration.isActive { + captureSession.startRunning() + delegate?.onCameraStarted() + } else { + captureSession.stopRunning() + delegate?.onCameraStopped() } - - // 10. Start or stop the session if needed - self.checkIsActive(configuration: config) - - // 11. Enable or disable the Torch if needed (requires session to be running) - if difference.torchChanged { - try device.lockForConfiguration() - defer { - device.unlockForConfiguration() - } - try self.configureTorch(configuration: config, device: device) + } + + public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + switch captureOutput { + case is AVCaptureVideoDataOutput: + onVideoFrame(sampleBuffer: sampleBuffer, orientation: connection.orientation, isMirrored: connection.isVideoMirrored) + case is AVCaptureAudioDataOutput: + onAudioFrame(sampleBuffer: sampleBuffer) + default: + break } - - // After configuring, set this to the new configuration. - self.configuration = config - } catch { - self.onConfigureError(error) - } - - // Set up Audio Capture Session (on audio queue) - if difference.audioSessionChanged { - CameraQueues.audioQueue.async { - do { - // Lock Capture Session for configuration - VisionLogger.log(level: .info, message: "Beginning AudioSession configuration...") - self.audioCaptureSession.beginConfiguration() - - try self.configureAudioSession(configuration: config) - - // Unlock Capture Session again and submit configuration to Hardware - self.audioCaptureSession.commitConfiguration() - VisionLogger.log(level: .info, message: "Committed AudioSession configuration!") - } catch { - self.onConfigureError(error) - } + } + + private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) { + if let recordingSession { + do { + // Write the Video Buffer to the .mov/.mp4 file + try recordingSession.append(buffer: sampleBuffer, ofType: .video) + } catch let error as CameraError { + delegate?.onError(error) + } catch { + delegate?.onError(.capture(.unknown(message: error.localizedDescription))) + } } - } - - // Set up Location streaming (on location queue) - if difference.locationChanged { - CameraQueues.locationQueue.async { - do { - VisionLogger.log(level: .info, message: "Beginning Location Output configuration...") - try self.configureLocationOutput(configuration: config) - VisionLogger.log(level: .info, message: "Finished Location Output configuration!") - } catch { - self.onConfigureError(error) - } + + if let delegate { + // Call Frame Processor (delegate) for every Video Frame + delegate.onFrame(sampleBuffer: sampleBuffer, orientation: orientation, isMirrored: isMirrored) } - } } - } - - /** - Starts or stops the CaptureSession if needed (`isActive`) - */ - private func checkIsActive(configuration: CameraConfiguration) { - if configuration.isActive == captureSession.isRunning { - return - } - - // Start/Stop session - if configuration.isActive { - captureSession.startRunning() - delegate?.onCameraStarted() - } else { - captureSession.stopRunning() - delegate?.onCameraStopped() - } - } - - public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - switch captureOutput { - case is AVCaptureVideoDataOutput: - onVideoFrame(sampleBuffer: sampleBuffer, orientation: connection.orientation, isMirrored: connection.isVideoMirrored) - case is AVCaptureAudioDataOutput: - onAudioFrame(sampleBuffer: sampleBuffer) - default: - break - } - } - - private final func onVideoFrame(sampleBuffer: CMSampleBuffer, orientation: Orientation, isMirrored: Bool) { - if let recordingSession { - do { - // Write the Video Buffer to the .mov/.mp4 file - try recordingSession.append(buffer: sampleBuffer, ofType: .video) - } catch let error as CameraError { - delegate?.onError(error) - } catch { - delegate?.onError(.capture(.unknown(message: error.localizedDescription))) - } - } - - if let delegate { - // Call Frame Processor (delegate) for every Video Frame - delegate.onFrame(sampleBuffer: sampleBuffer, orientation: orientation, isMirrored: isMirrored) - } - } - - private final func onAudioFrame(sampleBuffer: CMSampleBuffer) { - if let recordingSession { - do { - // Synchronize the Audio Buffer with the Video Session's time because it's two separate - // AVCaptureSessions, then write it to the .mov/.mp4 file - audioCaptureSession.synchronizeBuffer(sampleBuffer, toSession: captureSession) - try recordingSession.append(buffer: sampleBuffer, ofType: .audio) - } catch let error as CameraError { - delegate?.onError(error) - } catch { - delegate?.onError(.capture(.unknown(message: error.localizedDescription))) - } - } - } - - // pragma MARK: Notifications - - @objc - func sessionRuntimeError(notification: Notification) { - VisionLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!") - guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { - return + + private final func onAudioFrame(sampleBuffer: CMSampleBuffer) { + if let recordingSession { + do { + // Synchronize the Audio Buffer with the Video Session's time because it's two separate + // AVCaptureSessions, then write it to the .mov/.mp4 file + audioCaptureSession.synchronizeBuffer(sampleBuffer, toSession: captureSession) + try recordingSession.append(buffer: sampleBuffer, ofType: .audio) + } catch let error as CameraError { + delegate?.onError(error) + } catch { + delegate?.onError(.capture(.unknown(message: error.localizedDescription))) + } + } } - - // Notify consumer about runtime error - delegate?.onError(.unknown(message: error._nsError.description, cause: error._nsError)) - - let shouldRestart = configuration?.isActive == true - if shouldRestart { - // restart capture session after an error occured - CameraQueues.cameraQueue.async { - self.captureSession.startRunning() - } + + // pragma MARK: Notifications + + @objc + func sessionRuntimeError(notification: Notification) { + VisionLogger.log(level: .error, message: "Unexpected Camera Runtime Error occured!") + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { + return + } + + // Notify consumer about runtime error + delegate?.onError(.unknown(message: error._nsError.description, cause: error._nsError)) + + let shouldRestart = configuration?.isActive == true + if shouldRestart { + // restart capture session after an error occured + CameraQueues.cameraQueue.async { + self.captureSession.startRunning() + } + } } - } } diff --git a/package/ios/Core/Types/RecordVideoOptions.swift b/package/ios/Core/Types/RecordVideoOptions.swift index d12e05eb79..ae40728fe4 100644 --- a/package/ios/Core/Types/RecordVideoOptions.swift +++ b/package/ios/Core/Types/RecordVideoOptions.swift @@ -14,6 +14,7 @@ struct RecordVideoOptions { var flash: Torch = .off var codec: AVVideoCodecType? var path: URL + /** * Full Bit-Rate override for the Video Encoder, in Megabits per second (Mbps) */ diff --git a/package/ios/React/AudioInputDevicesManager.m b/package/ios/React/AudioInputDevicesManager.m new file mode 100644 index 0000000000..6cd6c615cb --- /dev/null +++ b/package/ios/React/AudioInputDevicesManager.m @@ -0,0 +1,14 @@ +// +// AudioInputManager.m +// VisionCamera +// +// Created by SnowFox (Y.O) on 19.08.25. +// + +#import +#import +#import + +@interface RCT_EXTERN_REMAP_MODULE (AudioInputDevices, AudioInputDevicesManager, RCTEventEmitter) + +@end diff --git a/package/ios/React/AudioInputDevicesManager.swift b/package/ios/React/AudioInputDevicesManager.swift new file mode 100644 index 0000000000..275210edfb --- /dev/null +++ b/package/ios/React/AudioInputDevicesManager.swift @@ -0,0 +1,68 @@ +// +// AudioInputManager.swift +// VisionCamera +// +// Created by SnowFox (Y.O) on 19.08.25. +// + +import AVFoundation +import Foundation + +@objc(AudioInputDevicesManager) +final class AudioInputDevicesManager: RCTEventEmitter { + private let audioSession = AVAudioSession.sharedInstance() + private let notificationCetner = NotificationCenter.default + private var observer: NSKeyValueObservation? + private let devicesChangedEventName = "AudioInputDevicesChanged" + override init(){ + super.init() + + } + + override func supportedEvents() -> [String]! { + return [devicesChangedEventName] + } + + override static func requiresMainQueueSetup() -> Bool { + return false + } + + override func constantsToExport() -> [AnyHashable: Any]! { + let devices = getDevicesJson() + + VisionLogger.log(level: .info, message: "Found \(devices.count) initial Audio Input Devices.") + + return [ + "availableAudioInputDevices": devices, + ] + } + @objc private func handleAvailableInputsChanged(_ notification: Notification) { + let devices = getDevicesJson() + VisionLogger.log(level: .info, message: "Audio Input Devices changed - found \(devices.count) Audio Devices.") + self.sendEvent(withName: self.devicesChangedEventName, body: devices) + } + + private func getDevicesJson () -> [[String: Any]] { + return audioSession.availableInputs?.map { input in + return [ + "portName": input.portName, + "portType": input.portType.rawValue, + "uid": input.uid, + ] + } ?? [] + } + + override func startObserving() { + + notificationCetner.addObserver(self, selector:#selector(handleAvailableInputsChanged(_:)) , name: AVAudioSession.routeChangeNotification, object: audioSession) + + } + + override func stopObserving() { + notificationCetner.removeObserver(self) + + } + +} + + diff --git a/package/ios/React/AudioInputLevelManager.m b/package/ios/React/AudioInputLevelManager.m new file mode 100644 index 0000000000..af934bd50e --- /dev/null +++ b/package/ios/React/AudioInputLevelManager.m @@ -0,0 +1,14 @@ +// +// AudioInputLevelManager.m +// VisionCamera +// +// Created by SnowFox (Y.O) on 24.08.25. +// + +#import +#import +#import + +@interface RCT_EXTERN_REMAP_MODULE (AudioInputLevel, AudioInputLevelManager, RCTEventEmitter) + +@end diff --git a/package/ios/React/AudioInputLevelManager.swift b/package/ios/React/AudioInputLevelManager.swift new file mode 100644 index 0000000000..a018ffcfdd --- /dev/null +++ b/package/ios/React/AudioInputLevelManager.swift @@ -0,0 +1,128 @@ +// +// AudioInputLevelManager.swift +// VisionCamera +// +// Created by SnowFox (Y.O) on 24.08.25. +// + +import Foundation +import AVFAudio + + +@objc(AudioInputLevelManager) +final class AudioInputLevelManager: RCTEventEmitter { + private let audioSession = AVAudioSession.sharedInstance() + private let notificationCetner = NotificationCenter.default + private var audioEngine = AVAudioEngine() + private let audioLevelChangedEventName = "AudioInputLevelChanged" + + override init(){ + super.init() + } + + override func invalidate() { + notificationCetner.removeObserver(self) + self.removeInputNode() + } + + override static func requiresMainQueueSetup() -> Bool { + return false + } + + override func supportedEvents() -> [String]! { + return [audioLevelChangedEventName] + } + + private func startInputNode () { + // update audio engine + audioEngine = AVAudioEngine() + let inputNode = audioEngine.inputNode + let format = inputNode.inputFormat(forBus: 0) + + if (format.sampleRate == 0) { + return + } + + inputNode.installTap(onBus: 0, bufferSize: 1024, format: format) { buffer, _ in + let level = self.getSoundLevel(buffer: buffer) + self.sendEvent(withName: self.audioLevelChangedEventName, body: level) + } + // Start audio engine and send events to the listener + try? audioEngine.start() + } + + @objc private func restartInputNode(_ notification: Notification) { + guard let userInfo = notification.userInfo, + let reasonValue = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt, + let reason = AVAudioSession.RouteChangeReason(rawValue: reasonValue) else { + return + } + + switch reason { + case .unknown: + return + default: + self.startInputNode() + } + + } + + private func removeInputNode() { + audioEngine.stop() + audioEngine.inputNode.removeTap(onBus: 0) + } + + private func getSoundLevel(buffer: AVAudioPCMBuffer) -> Float { + guard let channelData = buffer.floatChannelData else { return 0 } + let channelDataArray = Array(UnsafeBufferPointer(start: channelData[0], count: Int(buffer.frameLength))) + let rms = sqrt(channelDataArray.map { $0 * $0 }.reduce(0, +) / Float(buffer.frameLength) + Float.ulpOfOne) + let level = 20 * log10(rms) + return max(level + 100, 0) + } + + @objc func handleInterruption(notification: Notification) { + guard let info = notification.userInfo, + let type = info[AVAudioSessionInterruptionTypeKey] as? UInt else { return } + + if type == AVAudioSession.InterruptionType.began.rawValue { + // pause or stop engine + if audioEngine.isRunning { + audioEngine.pause() + VisionLogger.log(level:.info, message:"audio engine interrupted status: paused") + } + } else if type == AVAudioSession.InterruptionType.ended.rawValue { + // restart engine if needed + try? audioEngine.start() + VisionLogger.log(level:.info, message: "audio engine interrupted status: started") + + } + } + + @objc func appMovedToBackground() { + if audioEngine.isRunning { + audioEngine.pause() + } + } + + @objc func appMovedToForeground() { + try? audioEngine.start() + } + + override func startObserving() { + notificationCetner.addObserver(self, selector:#selector(restartInputNode(_:)) , name: AVAudioSession.routeChangeNotification, object: audioSession) + notificationCetner.addObserver(self, selector: #selector(appMovedToBackground), name: UIApplication.willResignActiveNotification, object: nil) + notificationCetner.addObserver(self, selector: #selector(appMovedToForeground), name: UIApplication.didBecomeActiveNotification, object: nil) + } + + override func stopObserving() { + notificationCetner.removeObserver(self) + self.removeInputNode() + } +} + + + + + + + diff --git a/package/ios/React/CameraView+RecordVideo.swift b/package/ios/React/CameraView+RecordVideo.swift index 913d420efd..4fdefedcdc 100644 --- a/package/ios/React/CameraView+RecordVideo.swift +++ b/package/ios/React/CameraView+RecordVideo.swift @@ -19,7 +19,7 @@ extension CameraView: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAud let options = try RecordVideoOptions(fromJSValue: options, bitRateOverride: videoBitRateOverride?.doubleValue, bitRateMultiplier: videoBitRateMultiplier?.doubleValue) - + // Start Recording with success and error callbacks cameraSession.startRecording( options: options, diff --git a/package/ios/React/CameraView.swift b/package/ios/React/CameraView.swift index c773975353..474f87fef8 100644 --- a/package/ios/React/CameraView.swift +++ b/package/ios/React/CameraView.swift @@ -25,6 +25,8 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat // props that require reconfiguring @objc var cameraId: NSString? + @objc var audioInputDeviceUid: NSString? + @objc var enableDepthData = false @objc var enablePortraitEffectsMatteDelivery = false @objc var enableBufferCompression = false @@ -183,7 +185,7 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat VisionLogger.log(level: .info, message: "Updating \(changedProps.count) props: [\(changedProps.joined(separator: ", "))]") let now = DispatchTime.now() currentConfigureCall = now - + cameraSession.configure { [self] config in // Check if we're still the latest call to configure { ... } guard currentConfigureCall == now else { @@ -196,7 +198,7 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat // Input Camera Device config.cameraId = cameraId as? String config.isMirrored = isMirrored - + // Photo if photo { config.photo = .enabled(config: CameraConfiguration.Photo(qualityBalance: getPhotoQualityBalance(), @@ -272,13 +274,16 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat // isActive config.isActive = isActive + + // Preffered audio input device uid + config.audioInputDeviceUid = audioInputDeviceUid as String? } // Store `zoom` offset for native pinch-gesture if changedProps.contains("zoom") { pinchScaleOffset = zoom.doubleValue } - + // Prevent phone from going to sleep UIApplication.shared.isIdleTimerDisabled = isActive } diff --git a/package/ios/React/CameraViewManager.m b/package/ios/React/CameraViewManager.m index 527c9bc0fd..d39ca0f581 100644 --- a/package/ios/React/CameraViewManager.m +++ b/package/ios/React/CameraViewManager.m @@ -26,6 +26,7 @@ @interface RCT_EXTERN_REMAP_MODULE (CameraView, CameraViewManager, RCTViewManage // Camera View Properties RCT_EXPORT_VIEW_PROPERTY(isActive, BOOL); RCT_EXPORT_VIEW_PROPERTY(cameraId, NSString); +RCT_EXPORT_VIEW_PROPERTY(audioInputDeviceUid, NSString); RCT_EXPORT_VIEW_PROPERTY(enableDepthData, BOOL); RCT_EXPORT_VIEW_PROPERTY(enablePortraitEffectsMatteDelivery, BOOL); RCT_EXPORT_VIEW_PROPERTY(enableBufferCompression, BOOL); diff --git a/package/src/AudioInputDevices.ts b/package/src/AudioInputDevices.ts new file mode 100644 index 0000000000..44f1d09846 --- /dev/null +++ b/package/src/AudioInputDevices.ts @@ -0,0 +1,22 @@ +import { NativeModules, NativeEventEmitter } from 'react-native' +import type { AudioInputDevice } from './types/AudioInputDevice' + +const AudioInputDevicesManager = NativeModules.AudioInputDevices as { + getConstants: () => { + availableAudioInputDevices: AudioInputDevice[] + } +} + +const constants = AudioInputDevicesManager.getConstants() + +const DEVICES_CHANGED_NAME = 'AudioInputDevicesChanged' +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const eventEmitter = new NativeEventEmitter(AudioInputDevicesManager as any) + +export const AudioInputDevices = { + // userPreferredCameraDevice: constants.userPreferredCameraDevice, + getAvailableAudioInputDevices: () => constants.availableAudioInputDevices, + addAudioInputChangedListener: (callback: (newDevices: AudioInputDevice[]) => void) => { + return eventEmitter.addListener(DEVICES_CHANGED_NAME, callback) + }, +} diff --git a/package/src/Camera.tsx b/package/src/Camera.tsx index afe056a761..fced373a69 100644 --- a/package/src/Camera.tsx +++ b/package/src/Camera.tsx @@ -616,7 +616,7 @@ export class Camera extends React.PureComponent { /** @internal */ public render(): React.ReactNode { // We remove the big `device` object from the props because we only need to pass `cameraId` to native. - const { device, frameProcessor, codeScanner, enableFpsGraph, fps, videoBitRate, ...props } = this.props + const { device, audioInputDevice, frameProcessor, codeScanner, enableFpsGraph, fps, videoBitRate, ...props } = this.props // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition if (device == null) { @@ -650,6 +650,7 @@ export class Camera extends React.PureComponent { & { // private intermediate props cameraId: string + audioInputDeviceUid?: string enableFrameProcessor: boolean codeScannerOptions?: Omit minFps?: number diff --git a/package/src/hooks/useAudioInputDevices.ts b/package/src/hooks/useAudioInputDevices.ts new file mode 100644 index 0000000000..14dd33bd6d --- /dev/null +++ b/package/src/hooks/useAudioInputDevices.ts @@ -0,0 +1,23 @@ +import { useEffect, useState } from 'react' +import { AudioInputDevices } from '../AudioInputDevices' +import type { AudioInputDevice } from '../types/AudioInputDevice' + +/** + * Get the audio input devices for the current audio session. + * + * Built in microphone is always available after permissions, + * while `external` devices might be plugged in or out at any point, + * so the result of this function might update over time. + */ +export function useAudioInputDevices(): AudioInputDevice[] { + const [devices, setDevices] = useState(AudioInputDevices.getAvailableAudioInputDevices) + + useEffect(() => { + const listener = AudioInputDevices.addAudioInputChangedListener((newDevices) => { + setDevices(newDevices) + }) + return () => listener.remove() + }, []) + + return devices +} diff --git a/package/src/index.ts b/package/src/index.ts index a375739263..3d3cd7afbe 100644 --- a/package/src/index.ts +++ b/package/src/index.ts @@ -20,6 +20,9 @@ export * from './devices/getCameraFormat' export * from './devices/getCameraDevice' export * from './devices/Templates' +// Listeners +export * from './listeners/AudioInputLevel' + // Hooks export * from './hooks/useCameraDevice' export * from './hooks/useCameraDevices' @@ -27,7 +30,7 @@ export * from './hooks/useCameraFormat' export * from './hooks/useCameraPermission' export * from './hooks/useCodeScanner' export * from './hooks/useFrameProcessor' - +export * from './hooks/useAudioInputDevices' // Frame Processors export * from './frame-processors/runAsync' export * from './frame-processors/runAtTargetFps' diff --git a/package/src/listeners/AudioInputLevel.ts b/package/src/listeners/AudioInputLevel.ts new file mode 100644 index 0000000000..e3c7be30ba --- /dev/null +++ b/package/src/listeners/AudioInputLevel.ts @@ -0,0 +1,17 @@ +import { NativeModules, NativeEventEmitter } from 'react-native' + +interface AudioInputLevelManagerType {} + +const AudioInputLevelManager = NativeModules.AudioInputLevel as AudioInputLevelManagerType +const AUDIO_LEVEL_CHANGED_NAME = 'AudioInputLevelChanged' +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const eventEmitter = new NativeEventEmitter(AudioInputLevelManager as any) + +export const AudioInputLevel = { + /** + * Used for listening to audio levels of the current microphone. + */ + addAudioLevelChangedListener: (callback: (level: number) => void) => { + return eventEmitter.addListener(AUDIO_LEVEL_CHANGED_NAME, callback) + }, +} diff --git a/package/src/types/AudioInputDevice.ts b/package/src/types/AudioInputDevice.ts new file mode 100644 index 0000000000..45ec912729 --- /dev/null +++ b/package/src/types/AudioInputDevice.ts @@ -0,0 +1,5 @@ +export interface AudioInputDevice { + portName: string + portType: string + uid: string +} diff --git a/package/src/types/CameraProps.ts b/package/src/types/CameraProps.ts index 18bfc9b618..9576728406 100644 --- a/package/src/types/CameraProps.ts +++ b/package/src/types/CameraProps.ts @@ -7,6 +7,7 @@ import type { ISharedValue } from 'react-native-worklets-core' import type { SkImage } from '@shopify/react-native-skia' import type { OutputOrientation } from './OutputOrientation' import type { Orientation } from './Orientation' +import type { AudioInputDevice } from './AudioInputDevice' export interface ReadonlyFrameProcessor { frameProcessor: (frame: Frame) => void @@ -51,6 +52,12 @@ export interface CameraProps extends ViewProps { * ``` */ device: CameraDevice + + /** + * Audio Input device to be used as the microphone. + */ + audioInputDevice?: AudioInputDevice + /** * Whether the Camera should actively stream video frames, or not. See the [documentation about the `isActive` prop](https://react-native-vision-camera.com/docs/guides/lifecycle#the-isactive-prop) for more information. * From 1d0aead336bdae4584cf05f3bbc525ae8b2f162c Mon Sep 17 00:00:00 2001 From: ISnowFoxI Date: Wed, 24 Sep 2025 21:17:54 +0400 Subject: [PATCH 2/8] working on audio switch --- .cursor/mcp.json | 11 ++ .../android/app/src/main/AndroidManifest.xml | 3 + example/src/CameraPage.tsx | 34 ++--- .../camera/core/CameraConfiguration.kt | 14 +- .../core/CameraSession+Configuration.kt | 39 +++++- .../camera/core/CameraSession+Video.kt | 4 +- .../com/mrousavy/camera/core/CameraSession.kt | 2 +- .../camera/react/AudioInputDevicesManager.kt | 128 ++++++++++++++++++ .../camera/react/AudioInputLevelManager.kt | 4 + .../mrousavy/camera/react/CameraPackage.kt | 3 +- .../camera/react/CameraView+Events.kt | 2 +- .../com/mrousavy/camera/react/CameraView.kt | 4 + .../camera/react/CameraViewManager.kt | 5 + package/src/listeners/AudioInputLevel.ts | 28 ++-- 14 files changed, 239 insertions(+), 42 deletions(-) create mode 100644 .cursor/mcp.json create mode 100644 package/android/src/main/java/com/mrousavy/camera/react/AudioInputDevicesManager.kt create mode 100644 package/android/src/main/java/com/mrousavy/camera/react/AudioInputLevelManager.kt diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 100644 index 0000000000..0f9eee6a18 --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1,11 @@ +{ + "mcpServers": { + "RadonAi": { + "url": "http://127.0.0.1:52696/mcp", + "type": "http", + "headers": { + "nonce": "066ca2c6-60c7-4025-9dd3-bfc520d9b007" + } + } + } +} \ No newline at end of file diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index 5c7a7ab0e4..a9fb699239 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -9,6 +9,9 @@ + + + { - let listener: EmitterSubscription | null = null - if (selectedMic?.uid) { - listener = AudioInputLevel.addAudioLevelChangedListener((level) => { - console.log('Current Audio device level:', level) - }) - } - return () => { - listener?.remove() - } - }, [selectedMic?.uid]) + // useEffect(() => { + // let listener: EmitterSubscription | null = null + // if (selectedMic?.uid) { + // listener = AudioInputLevel.addAudioLevelChangedListener((level) => { + // console.log('Current Audio device level:', level) + // }) + // } + // return () => { + // listener?.remove() + // } + // }, [selectedMic?.uid]) const videoHdr = format?.supportsVideoHdr && enableHdr const photoHdr = format?.supportsPhotoHdr && enableHdr && !videoHdr @@ -230,7 +230,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { onPreviewOrientationChanged={(o) => console.log(`Preview orientation changed to ${o}!`)} onUIRotationChanged={(degrees) => console.log(`UI Rotation changed: ${degrees}°`)} format={format} - fps={fps} + // fps={fps} photoHdr={photoHdr} videoHdr={videoHdr} photoQualityBalance="quality" @@ -238,13 +238,13 @@ export function CameraPage({ navigation }: Props): React.ReactElement { enableZoomGesture={false} animatedProps={cameraAnimatedProps} exposure={0} - enableFpsGraph={true} + // enableFpsGraph={true} outputOrientation="device" photo={true} video={true} audio={microphone.hasPermission} enableLocation={location.hasPermission} - frameProcessor={frameProcessor} + // frameProcessor={frameProcessor} /> @@ -304,7 +304,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement { {audioInputDevices.map((item) => ( setSelectedMic(item)} style={styles.microphoneButton}> - {item.portName} + {item.portType} ))} diff --git a/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt b/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt index f91ce830bb..bd9383ff25 100644 --- a/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt +++ b/package/android/src/main/java/com/mrousavy/camera/core/CameraConfiguration.kt @@ -13,8 +13,9 @@ import com.mrousavy.camera.core.types.VideoStabilizationMode data class CameraConfiguration( // Input var cameraId: String? = null, - // Outputs + + var audioInputDeviceUid: String? = null, var preview: Output = Output.Disabled.create(), var photo: Output = Output.Disabled.create(), var video: Output