Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions example/ios/Podfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1768,16 +1768,16 @@ PODS:
- ReactCommon/turbomodule/core
- Yoga
- SocketRocket (0.7.0)
- VisionCamera (4.7.2):
- VisionCamera/Core (= 4.7.2)
- VisionCamera/FrameProcessors (= 4.7.2)
- VisionCamera/React (= 4.7.2)
- VisionCamera/Core (4.7.2)
- VisionCamera/FrameProcessors (4.7.2):
- VisionCamera (4.7.3):
- VisionCamera/Core (= 4.7.3)
- VisionCamera/FrameProcessors (= 4.7.3)
- VisionCamera/React (= 4.7.3)
- VisionCamera/Core (4.7.3)
- VisionCamera/FrameProcessors (4.7.3):
- React
- React-callinvoker
- react-native-worklets-core
- VisionCamera/React (4.7.2):
- VisionCamera/React (4.7.3):
- React-Core
- VisionCamera/FrameProcessors
- Yoga (0.0.0)
Expand Down Expand Up @@ -2097,9 +2097,9 @@ SPEC CHECKSUMS:
RNStaticSafeAreaInsets: 4696b82d3a11ba6f3a790159ddb7290f04abd275
RNVectorIcons: 182892e7d1a2f27b52d3c627eca5d2665a22ee28
SocketRocket: abac6f5de4d4d62d24e11868d7a2f427e0ef940d
VisionCamera: 4146fa2612c154f893a42a9b1feedf868faa6b23
Yoga: aa3df615739504eebb91925fc9c58b4922ea9a08
VisionCamera: 0044a94f7489f19e19d5938e97dfc36f4784af3c
Yoga: 055f92ad73f8c8600a93f0e25ac0b2344c3b07e6

PODFILE CHECKSUM: 2ad84241179871ca890f7c65c855d117862f1a68

COCOAPODS: 1.15.2
COCOAPODS: 1.16.2
1 change: 1 addition & 0 deletions example/src/CameraPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ export function CameraPage({ navigation }: Props): React.ReactElement {
enableZoomGesture={false}
animatedProps={cameraAnimatedProps}
exposure={0}
whiteBalance={5000}
enableFpsGraph={true}
outputOrientation="device"
photo={true}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ std::vector<jsi::PropNameID> FrameHostObject::getPropertyNames(jsi::Runtime& rt)
return result;
}

#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value
#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count)->jsi::Value

jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
Expand Down
13 changes: 10 additions & 3 deletions package/ios/Core/CameraConfiguration.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ final class CameraConfiguration {
// Exposure
var exposure: Float?

// White Balance
var whiteBalance: Float?

// isActive (Start/Stop)
var isActive = false

Expand All @@ -71,6 +74,7 @@ final class CameraConfiguration {
torch = other.torch
zoom = other.zoom
exposure = other.exposure
whiteBalance = other.whiteBalance
isActive = other.isActive
audio = other.audio
} else {
Expand Down Expand Up @@ -98,6 +102,7 @@ final class CameraConfiguration {
let torchChanged: Bool
let zoomChanged: Bool
let exposureChanged: Bool
let whiteBalanceChanged: Bool

let audioSessionChanged: Bool
let locationChanged: Bool
Expand All @@ -112,10 +117,10 @@ final class CameraConfiguration {

/**
Returns `true` when props that affect the AVCaptureDevice configuration (i.e. props that require lockForConfiguration()) have changed.
[`formatChanged`, `sidePropsChanged`, `zoomChanged`, `exposureChanged`]
[`formatChanged`, `sidePropsChanged`, `zoomChanged`, `exposureChanged`, `whiteBalanceChanged`]
*/
var isDeviceConfigurationDirty: Bool {
return isSessionConfigurationDirty || formatChanged || sidePropsChanged || zoomChanged || exposureChanged
return isSessionConfigurationDirty || formatChanged || sidePropsChanged || zoomChanged || exposureChanged || whiteBalanceChanged
}

init(between left: CameraConfiguration?, and right: CameraConfiguration) {
Expand All @@ -139,6 +144,8 @@ final class CameraConfiguration {
zoomChanged = formatChanged || left?.zoom != right.zoom
// exposure (depends on device)
exposureChanged = inputChanged || left?.exposure != right.exposure
// white balance (depends on device)
whiteBalanceChanged = inputChanged || left?.whiteBalance != right.whiteBalance

// audio session
audioSessionChanged = left?.audio != right.audio
Expand All @@ -153,7 +160,7 @@ final class CameraConfiguration {
case disabled
case enabled(config: T)

public static func == (lhs: OutputConfiguration, rhs: OutputConfiguration) -> Bool {
static func == (lhs: OutputConfiguration, rhs: OutputConfiguration) -> Bool {
switch (lhs, rhs) {
case (.disabled, .disabled):
return true
Expand Down
41 changes: 41 additions & 0 deletions package/ios/Core/CameraSession+Configuration.swift
Original file line number Diff line number Diff line change
Expand Up @@ -336,6 +336,47 @@ extension CameraSession {
device.setExposureTargetBias(clamped)
}

// pragma MARK: White Balance

/**
Configures white balance (`whiteBalance`) as a temperature value in Kelvin.
*/
func configureWhiteBalance(configuration: CameraConfiguration, device: AVCaptureDevice) {
guard let whiteBalance = configuration.whiteBalance else {
return
}

guard device.isLockingWhiteBalanceWithCustomDeviceGainsSupported else {
VisionLogger.log(level: .warning, message: "White balance lock mode with gains is not supported on this device!")
return
}

// Clamp temperature to valid range (typically 3000K to 8000K)
let clampedTemperature = min(max(whiteBalance, 3000), 8000)

// Convert temperature to white balance gains
let tempAndTint = AVCaptureDevice.WhiteBalanceTemperatureAndTintValues(
temperature: clampedTemperature,
tint: 0
)
let gains = device.deviceWhiteBalanceGains(for: tempAndTint)

// Clamp gains to valid range
let clampedGains = AVCaptureDevice.WhiteBalanceGains(
redGain: min(max(gains.redGain, 1.0), device.maxWhiteBalanceGain),
greenGain: min(max(gains.greenGain, 1.0), device.maxWhiteBalanceGain),
blueGain: min(max(gains.blueGain, 1.0), device.maxWhiteBalanceGain)
)

// Set the white balance mode to locked with the specified gains
device.setWhiteBalanceModeLocked(with: clampedGains, completionHandler: nil)

VisionLogger.log(
level: .info,
message: "White balance set to \(clampedTemperature)K (R:\(clampedGains.redGain), G:\(clampedGains.greenGain), B:\(clampedGains.blueGain))"
)
}

// pragma MARK: Audio

/**
Expand Down
8 changes: 6 additions & 2 deletions package/ios/Core/CameraSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,10 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat
if difference.exposureChanged {
self.configureExposure(configuration: config, device: device)
}
// 10. Configure white balance
if difference.whiteBalanceChanged {
self.configureWhiteBalance(configuration: config, device: device)
}
}

if difference.isSessionConfigurationDirty {
Expand All @@ -195,7 +199,7 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat
self.captureSession.commitConfiguration()
}

// 10. Start or stop the session if needed
// 11. Start or stop the session if needed
self.checkIsActive(configuration: config)

// 11. Enable or disable the Torch if needed (requires session to be running)
Expand Down Expand Up @@ -265,7 +269,7 @@ final class CameraSession: NSObject, AVCaptureVideoDataOutputSampleBufferDelegat
}
}

public final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
final func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
switch captureOutput {
case is AVCaptureVideoDataOutput:
onVideoFrame(sampleBuffer: sampleBuffer, orientation: connection.orientation, isMirrored: connection.isVideoMirrored)
Expand Down
2 changes: 1 addition & 1 deletion package/ios/Core/PreviewView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ final class PreviewView: UIView {
}
}

override public static var layerClass: AnyClass {
override static var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}

Expand Down
2 changes: 1 addition & 1 deletion package/ios/Core/Recording/Track.swift
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ final class Track {
/**
Returns the last timestamp that was actually written to the track.
*/
public private(set) var lastTimestamp: CMTime?
private(set) var lastTimestamp: CMTime?

/**
Gets the natural size of the asset writer, or zero if it is not a visual track.
Expand Down
8 changes: 4 additions & 4 deletions package/ios/Core/Recording/TrackTimeline.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,22 +25,22 @@ final class TrackTimeline {
Represents whether the timeline has been marked as finished or not.
A timeline will automatically be marked as finished when a timestamp arrives that appears after a stop().
*/
public private(set) var isFinished = false
private(set) var isFinished = false

/**
Gets the latency of the buffers in this timeline.
This is computed by (currentTime - mostRecentBuffer.timestamp)
*/
public private(set) var latency: CMTime = .zero
private(set) var latency: CMTime = .zero

/**
Get the first actually written timestamp of this timeline
*/
public private(set) var firstTimestamp: CMTime?
private(set) var firstTimestamp: CMTime?
/**
Get the last actually written timestamp of this timeline.
*/
public private(set) var lastTimestamp: CMTime?
private(set) var lastTimestamp: CMTime?

init(ofTrackType type: TrackType, withClock clock: CMClock) {
trackType = type
Expand Down
2 changes: 1 addition & 1 deletion package/ios/FrameProcessors/FrameHostObject.mm
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
return result;
}

#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count) -> jsi::Value
#define JSI_FUNC [=](jsi::Runtime & runtime, const jsi::Value& thisValue, const jsi::Value* arguments, size_t count)->jsi::Value

jsi::Value FrameHostObject::get(jsi::Runtime& runtime, const jsi::PropNameID& propName) {
auto name = propName.utf8(runtime);
Expand Down
4 changes: 4 additions & 0 deletions package/ios/React/CameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat
@objc var torch = "off"
@objc var zoom: NSNumber = 1.0 // in "factor"
@objc var exposure: NSNumber = 0.0
@objc var whiteBalance: NSNumber? // in Kelvin
@objc var videoStabilizationMode: NSString?
@objc var resizeMode: NSString = "cover" {
didSet {
Expand Down Expand Up @@ -270,6 +271,9 @@ public final class CameraView: UIView, CameraSessionDelegate, PreviewViewDelegat
// Exposure
config.exposure = exposure.floatValue

// White Balance
config.whiteBalance = whiteBalance?.floatValue

// isActive
config.isActive = isActive
}
Expand Down
1 change: 1 addition & 0 deletions package/ios/React/CameraViewManager.m
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ @interface RCT_EXTERN_REMAP_MODULE (CameraView, CameraViewManager, RCTViewManage
RCT_EXPORT_VIEW_PROPERTY(torch, NSString);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(exposure, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(whiteBalance, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(enableZoomGesture, BOOL);
RCT_EXPORT_VIEW_PROPERTY(outputOrientation, NSString);
RCT_EXPORT_VIEW_PROPERTY(resizeMode, NSString);
Expand Down
2 changes: 1 addition & 1 deletion package/ios/React/Utils/Promise.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import Foundation
* Represents a JavaScript Promise instance. `reject()` and `resolve()` should only be called once.
*/
class Promise {
public private(set) var didResolve = false
private(set) var didResolve = false

init(resolver: @escaping RCTPromiseResolveBlock, rejecter: @escaping RCTPromiseRejectBlock) {
self.resolver = resolver
Expand Down
6 changes: 3 additions & 3 deletions package/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@
"typescript": "tsc --noEmit false",
"lint": "eslint \"**/*.{js,ts,tsx}\" --fix",
"lint-ci": "eslint \"**/*.{js,ts,tsx}\" -f @jamesacarr/github-actions",
"start": "cd example && bun start",
"start": "cd ../example && bun start",
"build": "bob build",
"release": "bob build && release-it",
"pods": "cd example && bun pods",
"bootstrap": "bun && cd example && bun && bun pods",
"pods": "cd ../example && bun pods",
"bootstrap": "bun && cd ../example && bun && bun pods",
"check-android": "scripts/ktlint.sh && scripts/clang-format.sh",
"check-ios": "scripts/swiftlint.sh && scripts/swiftformat.sh && scripts/clang-format.sh",
"check-js": "bun lint --fix && bun typescript",
Expand Down
17 changes: 17 additions & 0 deletions package/src/types/CameraProps.ts
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,23 @@ export interface CameraProps extends ViewProps {
* The value between min- and max supported exposure is considered the default, neutral value.
*/
exposure?: number
/**
* Specifies the White Balance of the current camera as a color temperature in Kelvin.
*
* This locks the white balance to a specific temperature value in Kelvin (e.g. 5000 for daylight, 3000 for warm indoor lighting).
*
* When set, the camera will use a fixed white balance instead of auto white balance.
*
* Common values:
* - `3000` - Warm indoor lighting
* - `4000` - Fluorescent lighting
* - `5000` - Daylight
* - `6000` - Cloudy daylight
* - `7000` - Shade
*
* @platform iOS
*/
whiteBalance?: number
//#endregion

//#region Format/Preset selection
Expand Down