Skip to content

Commit

Permalink
Merge branch 'develop' into feature/misc
Browse files Browse the repository at this point in the history
  • Loading branch information
melpon authored Sep 5, 2023
2 parents 492ceb8 + 5d710be commit 05e399c
Show file tree
Hide file tree
Showing 9 changed files with 226 additions and 56 deletions.
5 changes: 5 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,17 @@

## develop

- [CHANGE] `Sora.Config` 中にあるキャプチャラに関するフィールドを `Sora.CameraConfig` に移動する
- 修正方法は (TODO: @miosakuma がドキュメントへのリンクに差し替える) を参照して下さい
- @melpon
- [UPDATE] SoraClientContext を利用してコードを短くする
- @melpon
- [UPDATE] Sora C++ SDK を `2023.11.0` に上げる
- @melpon
- [UPDATE] libwebrtc を `m116.5845.6.1` に上げる
- @melpon
- [ADD] 接続中にキャプチャラを切り替える機能を実装
- @melpon
- [ADD] デバイスを掴まないようにする `NoVideoDevice`, `NoAudioDevice` を追加
- @melpon
- [ADD] ハードウェアエンコーダを利用するかどうかを設定する `UseHardwareEncoder` を追加
Expand Down
94 changes: 78 additions & 16 deletions Sora/Sora.cs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,44 @@ public class Rule
public List<List<Rule>> Rules = new List<List<Rule>>();
}

public class CameraConfig
{
public CapturerType CapturerType = Sora.CapturerType.DeviceCamera;
public UnityEngine.Camera UnityCamera = null;
public int UnityCameraRenderTargetDepthBuffer = 16;
public string VideoCapturerDevice = "";
public int VideoWidth = 640;
public int VideoHeight = 480;
public int VideoFps = 30;

public static CameraConfig FromUnityCamera(UnityEngine.Camera unityCamera, int unityCameraRenderTargetDepthBuffer, int videoWidth, int videoHeight, int videoFps)
{
return new CameraConfig()
{
CapturerType = Sora.CapturerType.UnityCamera,
UnityCamera = unityCamera,
UnityCameraRenderTargetDepthBuffer = unityCameraRenderTargetDepthBuffer,
VideoCapturerDevice = "",
VideoWidth = videoWidth,
VideoHeight = videoHeight,
VideoFps = videoFps,
};
}
public static CameraConfig FromDeviceCamera(string videoCapturerDevice, int videoWidth, int videoHeight, int videoFps)
{
return new CameraConfig()
{
CapturerType = Sora.CapturerType.DeviceCamera,
UnityCamera = null,
UnityCameraRenderTargetDepthBuffer = 16,
VideoCapturerDevice = videoCapturerDevice,
VideoWidth = videoWidth,
VideoHeight = videoHeight,
VideoFps = videoFps,
};
}
}

public class Config
{
public string SignalingUrl = "";
Expand All @@ -102,15 +140,9 @@ public class Config
public SimulcastRidType? SimulcastRid = null;
public bool NoVideoDevice = false;
public bool NoAudioDevice = false;
public CapturerType CapturerType = Sora.CapturerType.DeviceCamera;
public UnityEngine.Camera UnityCamera = null;
public int UnityCameraRenderTargetDepthBuffer = 16;
public CameraConfig CameraConfig = new CameraConfig();
public bool Video = true;
public bool Audio = true;
public string VideoCapturerDevice = "";
public int VideoWidth = 640;
public int VideoHeight = 480;
public int VideoFps = 30;
public VideoCodecType VideoCodecType = VideoCodecType.VP9;
public string VideoVp9Params = "";
public string VideoAv1Params = "";
Expand Down Expand Up @@ -241,10 +273,10 @@ public Sora()
public void Connect(Config config)
{
IntPtr unityCameraTexture = IntPtr.Zero;
if (config.CapturerType == CapturerType.UnityCamera)
if (config.CameraConfig.CapturerType == CapturerType.UnityCamera)
{
unityCamera = config.UnityCamera;
var texture = new UnityEngine.RenderTexture(config.VideoWidth, config.VideoHeight, config.UnityCameraRenderTargetDepthBuffer, UnityEngine.RenderTextureFormat.BGRA32);
unityCamera = config.CameraConfig.UnityCamera;
var texture = new UnityEngine.RenderTexture(config.CameraConfig.VideoWidth, config.CameraConfig.VideoHeight, config.CameraConfig.UnityCameraRenderTargetDepthBuffer, UnityEngine.RenderTextureFormat.BGRA32);
unityCamera.targetTexture = texture;
unityCamera.enabled = true;
unityCameraTexture = texture.GetNativeTexturePtr();
Expand Down Expand Up @@ -286,14 +318,14 @@ public void Connect(Config config)
cc.insecure = config.Insecure;
cc.no_video_device = config.NoVideoDevice;
cc.no_audio_device = config.NoAudioDevice;
cc.capturer_type = (int)config.CapturerType;
cc.unity_camera_texture = unityCameraTexture.ToInt64();
cc.video = config.Video;
cc.audio = config.Audio;
cc.video_capturer_device = config.VideoCapturerDevice;
cc.video_width = config.VideoWidth;
cc.video_height = config.VideoHeight;
cc.video_fps = config.VideoFps;
cc.camera_config.capturer_type = (int)config.CameraConfig.CapturerType;
cc.camera_config.unity_camera_texture = unityCameraTexture.ToInt64();
cc.camera_config.video_capturer_device = config.CameraConfig.VideoCapturerDevice;
cc.camera_config.video_width = config.CameraConfig.VideoWidth;
cc.camera_config.video_height = config.CameraConfig.VideoHeight;
cc.camera_config.video_fps = config.CameraConfig.VideoFps;
cc.video_codec_type = config.VideoCodecType.ToString();
cc.video_vp9_params = config.VideoVp9Params;
cc.video_av1_params = config.VideoAv1Params;
Expand Down Expand Up @@ -388,6 +420,34 @@ public void Disconnect()
sora_disconnect(p);
}

public void SwitchCamera(CameraConfig config)
{
if (unityCamera != null)
{
unityCamera.enabled = false;
unityCamera.targetTexture = null;
unityCamera = null;
}

IntPtr unityCameraTexture = IntPtr.Zero;
if (config.CapturerType == CapturerType.UnityCamera)
{
unityCamera = config.UnityCamera;
var texture = new UnityEngine.RenderTexture(config.VideoWidth, config.VideoHeight, config.UnityCameraRenderTargetDepthBuffer, UnityEngine.RenderTextureFormat.BGRA32);
unityCamera.targetTexture = texture;
unityCamera.enabled = true;
unityCameraTexture = texture.GetNativeTexturePtr();
}
var cc = new SoraConf.Internal.CameraConfig();
cc.capturer_type = (int)config.CapturerType;
cc.unity_camera_texture = unityCameraTexture.ToInt64();
cc.video_capturer_device = config.VideoCapturerDevice;
cc.video_width = config.VideoWidth;
cc.video_height = config.VideoHeight;
cc.video_fps = config.VideoFps;
sora_switch_camera(p, Jsonif.Json.ToJson(cc));
}

// Unity 側でレンダリングが完了した時(yield return new WaitForEndOfFrame() の後)に呼ぶイベント
// 指定した Unity カメラの映像を Sora 側のテクスチャにレンダリングしたりする
public void OnRender()
Expand Down Expand Up @@ -821,6 +881,8 @@ public string ConnectedSignalingURL
[DllImport(DllName)]
private static extern void sora_disconnect(IntPtr p);
[DllImport(DllName)]
private static extern void sora_switch_camera(IntPtr p, string config);
[DllImport(DllName)]
private static extern IntPtr sora_get_texture_update_callback();
[DllImport(DllName)]
private static extern void sora_destroy(IntPtr p);
Expand Down
16 changes: 10 additions & 6 deletions proto/sora_conf_internal.proto
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,15 @@ message ForwardingFilter {
repeated Rules rules = 2;
}

message CameraConfig {
int32 capturer_type = 17;
int64 unity_camera_texture = 18;
string video_capturer_device = 19;
int32 video_width = 22;
int32 video_height = 23;
int32 video_fps = 24;
}

message ConnectConfig {
string unity_version = 1;
repeated string signaling_url = 2;
Expand All @@ -56,14 +65,9 @@ message ConnectConfig {
string simulcast_rid = 16;
bool no_video_device = 160;
bool no_audio_device = 161;
int32 capturer_type = 17;
int64 unity_camera_texture = 18;
string video_capturer_device = 19;
CameraConfig camera_config = 17;
bool video = 20;
bool audio = 21;
int32 video_width = 22;
int32 video_height = 23;
int32 video_fps = 24;
string video_codec_type = 25;
string video_vp9_params = 250;
string video_av1_params = 251;
Expand Down
134 changes: 100 additions & 34 deletions src/sora.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ Sora::~Sora() {
capturer_ = nullptr;
unity_adm_ = nullptr;

video_sender_ = nullptr;
audio_track_ = nullptr;
video_track_ = nullptr;

Expand Down Expand Up @@ -134,6 +135,40 @@ void Sora::DispatchEvents() {
}
}

static sora_conf::VideoFrame VideoFrameToConfig(
const webrtc::VideoFrame& frame) {
sora_conf::VideoFrame f;
f.baseptr = reinterpret_cast<int64_t>(&frame);
f.id = frame.id();
f.timestamp_us = frame.timestamp_us();
f.timestamp = frame.timestamp();
f.ntp_time_ms = frame.ntp_time_ms();
f.rotation = (int)frame.rotation();
auto& v = f.video_frame_buffer;
auto vfb = frame.video_frame_buffer();
v.baseptr = reinterpret_cast<int64_t>(vfb.get());
v.type = (sora_conf::VideoFrameBuffer::Type)vfb->type();
v.width = vfb->width();
v.height = vfb->height();
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kI420) {
auto p = vfb->GetI420();
v.i420_stride_y = p->StrideY();
v.i420_stride_u = p->StrideU();
v.i420_stride_v = p->StrideV();
v.i420_data_y = reinterpret_cast<int64_t>(p->DataY());
v.i420_data_u = reinterpret_cast<int64_t>(p->DataU());
v.i420_data_v = reinterpret_cast<int64_t>(p->DataV());
}
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
auto p = vfb->GetNV12();
v.nv12_stride_y = p->StrideY();
v.nv12_stride_uv = p->StrideUV();
v.nv12_data_y = reinterpret_cast<int64_t>(p->DataY());
v.nv12_data_uv = reinterpret_cast<int64_t>(p->DataUV());
}
return f;
}

void Sora::Connect(const sora_conf::internal::ConnectConfig& cc) {
auto on_disconnect = [this](int error_code, std::string reason) {
PushEvent([this, error_code, reason = std::move(reason)]() {
Expand Down Expand Up @@ -249,52 +284,25 @@ void Sora::DoConnect(const sora_conf::internal::ConnectConfig& cc,
if (on_capturer_frame_) {
on_frame = [on_frame =
on_capturer_frame_](const webrtc::VideoFrame& frame) {
sora_conf::VideoFrame f;
f.baseptr = reinterpret_cast<int64_t>(&frame);
f.id = frame.id();
f.timestamp_us = frame.timestamp_us();
f.timestamp = frame.timestamp();
f.ntp_time_ms = frame.ntp_time_ms();
f.rotation = (int)frame.rotation();
auto& v = f.video_frame_buffer;
auto vfb = frame.video_frame_buffer();
v.baseptr = reinterpret_cast<int64_t>(vfb.get());
v.type = (sora_conf::VideoFrameBuffer::Type)vfb->type();
v.width = vfb->width();
v.height = vfb->height();
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kI420) {
auto p = vfb->GetI420();
v.i420_stride_y = p->StrideY();
v.i420_stride_u = p->StrideU();
v.i420_stride_v = p->StrideV();
v.i420_data_y = reinterpret_cast<int64_t>(p->DataY());
v.i420_data_u = reinterpret_cast<int64_t>(p->DataU());
v.i420_data_v = reinterpret_cast<int64_t>(p->DataV());
}
if (vfb->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
auto p = vfb->GetNV12();
v.nv12_stride_y = p->StrideY();
v.nv12_stride_uv = p->StrideUV();
v.nv12_data_y = reinterpret_cast<int64_t>(p->DataY());
v.nv12_data_uv = reinterpret_cast<int64_t>(p->DataUV());
}
sora_conf::VideoFrame f = VideoFrameToConfig(frame);
on_frame(jsonif::to_json(f));
};
}

auto capturer = CreateVideoCapturer(
cc.capturer_type, (void*)cc.unity_camera_texture, cc.no_video_device,
cc.video_capturer_device, cc.video_width, cc.video_height, cc.video_fps,
on_frame, sora_context_->signaling_thread(), env, android_context,
unity_context_);
cc.camera_config.capturer_type,
(void*)cc.camera_config.unity_camera_texture, cc.no_video_device,
cc.camera_config.video_capturer_device, cc.camera_config.video_width,
cc.camera_config.video_height, cc.camera_config.video_fps, on_frame,
signaling_thread_.get(), env, android_context);
if (!cc.no_video_device && !capturer) {
on_disconnect((int)sora_conf::ErrorCode::INTERNAL_ERROR,
"Capturer Init Failed");
return;
}

capturer_ = capturer;
capturer_type_ = cc.capturer_type;
capturer_type_ = cc.camera_config.capturer_type;

std::string audio_track_id = rtc::CreateRandomString(16);
audio_track_ = sora_context_->peer_connection_factory()->CreateAudioTrack(
Expand Down Expand Up @@ -487,6 +495,63 @@ void Sora::Disconnect() {
signaling_->Disconnect();
}

void Sora::SwitchCamera(const sora_conf::internal::CameraConfig& cc) {
RTC_LOG(LS_INFO) << "SwitchCamera: " << jsonif::to_json(cc);
boost::asio::post(*ioc_, [self = shared_from_this(), cc = cc]() {
self->DoSwitchCamera(cc);
});
}
void Sora::DoSwitchCamera(const sora_conf::internal::CameraConfig& cc) {
std::function<void(const webrtc::VideoFrame& frame)> on_frame;
if (on_capturer_frame_) {
on_frame = [on_frame =
on_capturer_frame_](const webrtc::VideoFrame& frame) {
sora_conf::VideoFrame f = VideoFrameToConfig(frame);
on_frame(jsonif::to_json(f));
};
}

void* env = sora::GetJNIEnv();
void* android_context = GetAndroidApplicationContext(env);

#if defined(SORA_UNITY_SDK_ANDROID)
if (capturer_ != nullptr && capturer_type_ == 0) {
static_cast<sora::AndroidCapturer*>(capturer_.get())->Stop();
}
#endif
if (capturer_ != nullptr && capturer_type_ != 0) {
static_cast<UnityCameraCapturer*>(capturer_.get())->Stop();
}
capturer_ = nullptr;

auto capturer = CreateVideoCapturer(
cc.capturer_type, (void*)cc.unity_camera_texture, false,
cc.video_capturer_device, cc.video_width, cc.video_height, cc.video_fps,
on_frame, signaling_thread_.get(), env, android_context);
if (!capturer) {
RTC_LOG(LS_ERROR) << "Failed to CreateVideoCapturer";
return;
}

capturer_ = capturer;
capturer_type_ = cc.capturer_type;

std::string video_track_id = rtc::CreateRandomString(16);
auto video_track = factory_->CreateVideoTrack(video_track_id, capturer.get());
if (video_track_ == nullptr) {
auto track_id = renderer_->AddTrack(video_track.get());
PushEvent([this, track_id]() {
if (on_add_track_) {
on_add_track_(track_id, "");
}
});
} else {
renderer_->ReplaceTrack(video_track_.get(), video_track.get());
}
video_sender_->SetTrack(video_track.get());
video_track_ = video_track;
}

void Sora::RenderCallbackStatic(int event_id) {
auto sora = (Sora*)IdPointer::Instance().Lookup(event_id);
if (sora == nullptr) {
Expand Down Expand Up @@ -754,6 +819,7 @@ void Sora::OnSetOffer(std::string offer) {
webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpSenderInterface>>
video_result = signaling_->GetPeerConnection()->AddTrack(video_track_,
{stream_id});
video_sender_ = video_result.value();
}

if (video_track_ != nullptr) {
Expand Down
3 changes: 3 additions & 0 deletions src/sora.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ class Sora : public std::enable_shared_from_this<Sora>,

void Connect(const sora_conf::internal::ConnectConfig& cc);
void Disconnect();
void SwitchCamera(const sora_conf::internal::CameraConfig& cc);

static void UNITY_INTERFACE_API RenderCallbackStatic(int event_id);
int GetRenderCallbackEventID() const;
Expand Down Expand Up @@ -95,6 +96,7 @@ class Sora : public std::enable_shared_from_this<Sora>,
private:
void DoConnect(const sora_conf::internal::ConnectConfig& config,
std::function<void(int, std::string)> on_disconnect);
void DoSwitchCamera(const sora_conf::internal::CameraConfig& cc);

static rtc::scoped_refptr<UnityAudioDevice> CreateADM(
webrtc::TaskQueueFactory* task_queue_factory,
Expand Down Expand Up @@ -146,6 +148,7 @@ class Sora : public std::enable_shared_from_this<Sora>,
std::unique_ptr<UnityRenderer> renderer_;
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track_;
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_;
rtc::scoped_refptr<webrtc::RtpSenderInterface> video_sender_;
std::function<void(ptrid_t, std::string)> on_add_track_;
std::function<void(ptrid_t, std::string)> on_remove_track_;
std::function<void(std::string)> on_set_offer_;
Expand Down
Loading

0 comments on commit 05e399c

Please sign in to comment.