From 9d996824465410b4f25cf1d20bdee01d7091533a Mon Sep 17 00:00:00 2001 From: Yuwei Huang Date: Tue, 9 May 2023 16:01:33 -0700 Subject: [PATCH 01/42] [M114] Fix bug of messages being delivered before data channel is open If the caller calls RegisterObserver() on the network thread while the state is not kOpen but there are queued received data, those received data will be immediately delivered to the observer before the state is transitioned to kOpen, which may break the observer's assertions and cause problems. The problem turns out to be that, when SctpDataChannel::RegisterObserver calls DeliverQueuedReceivedData(), the data will be passed to the observer without checking the |state_| first, meanwhile SctpDataChannel::UpdateState does effectively check the state and null-check |observer_| before delivering the received data. This CL fixes this by simply making DeliverQueuedReceivedData() also check `state_ == kOpen`. In case the state transitions to kOpen after RegisterObserver() is called, the first DeliverQueuedReceivedData() call will be no-op, while the second DeliverQueuedReceivedData() call will do the work. (cherry picked from commit 20838941240536b52e24e47ce99ad6c2175dba1a) No-Try: True Bug: chromium:1442696 Change-Id: If25ce6a038d704939b1a8ae73d7ced110448b050 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/304687 Reviewed-by: Tomas Gunnarsson Commit-Queue: Tomas Gunnarsson Cr-Original-Commit-Position: refs/heads/main@{#40036} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/305380 Reviewed-by: Mirko Bonadei Cr-Commit-Position: refs/branch-heads/5735@{#1} Cr-Branched-From: df7df199abd619e75b9f1d9a7e12fc3f3f748775-refs/heads/main@{#39949} --- pc/sctp_data_channel.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pc/sctp_data_channel.cc b/pc/sctp_data_channel.cc index 3fed03ca11..dda9110bfc 100644 --- a/pc/sctp_data_channel.cc +++ b/pc/sctp_data_channel.cc @@ -843,7 +843,7 @@ void SctpDataChannel::SetState(DataState state) { // RTC_RUN_ON(network_thread_). void SctpDataChannel::DeliverQueuedReceivedData() { - if (!observer_) { + if (!observer_ || state_ != kOpen) { return; } From ecab2a49da48f0279a3b6422dab602614630005e Mon Sep 17 00:00:00 2001 From: Philipp Hancke Date: Tue, 9 May 2023 09:41:03 +0200 Subject: [PATCH 02/42] [m114] Attempt to recycle a stopped data m-line before creating a new one MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit which avoids an infinitely growing SDP if the remote end rejects the datachannel section. This will reactivate the m-line even if all datachannels are closed. BUG=chromium:1442604 (cherry picked from commit 522380ff734174faab694e1b67c9d20fffa8738e) No-Try: True Change-Id: If60f93b406271163df692d96102baab701923602 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/304241 Reviewed-by: Harald Alvestrand Commit-Queue: Philipp Hancke Reviewed-by: Henrik Boström Cr-Original-Commit-Position: refs/heads/main@{#40029} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/305420 Commit-Queue: Harald Alvestrand Reviewed-by: Mirko Bonadei Cr-Commit-Position: refs/branch-heads/5735@{#2} Cr-Branched-From: df7df199abd619e75b9f1d9a7e12fc3f3f748775-refs/heads/main@{#39949} --- pc/data_channel_controller.cc | 2 +- pc/data_channel_controller.h | 5 +- pc/data_channel_controller_unittest.cc | 6 +- pc/peer_connection.cc | 7 ++- pc/sdp_offer_answer.cc | 40 +++++++++++-- pc/sdp_offer_answer_unittest.cc | 80 ++++++++++++++++++++++++++ 6 files changed, 126 insertions(+), 14 deletions(-) diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc index 0a0d93e236..166e181cfe 100644 --- a/pc/data_channel_controller.cc +++ b/pc/data_channel_controller.cc @@ -28,7 +28,7 @@ DataChannelController::~DataChannelController() { << "Missing call to PrepareForShutdown?"; } -bool DataChannelController::HasDataChannelsForTest() const { +bool DataChannelController::HasDataChannels() const { auto has_channels = [&] { RTC_DCHECK_RUN_ON(network_thread()); return !sctp_data_channels_n_.empty(); diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h index ac47c8aa28..1b5c8beadb 100644 --- a/pc/data_channel_controller.h +++ b/pc/data_channel_controller.h @@ -87,8 +87,9 @@ class DataChannelController : public SctpDataChannelControllerInterface, const InternalDataChannelInit& config); void AllocateSctpSids(rtc::SSLRole role); - // Used by tests to check if data channels are currently tracked. - bool HasDataChannelsForTest() const; + // Check if data channels are currently tracked. Used to decide whether a + // rejected m=application section should be reoffered. + bool HasDataChannels() const; // At some point in time, a data channel has existed. bool HasUsedDataChannels() const; diff --git a/pc/data_channel_controller_unittest.cc b/pc/data_channel_controller_unittest.cc index ea72e85218..3b8adb6819 100644 --- a/pc/data_channel_controller_unittest.cc +++ b/pc/data_channel_controller_unittest.cc @@ -105,17 +105,17 @@ TEST_F(DataChannelControllerTest, CreateDataChannelEarlyRelease) { TEST_F(DataChannelControllerTest, CreateDataChannelEarlyClose) { DataChannelControllerForTest dcc(pc_.get()); - EXPECT_FALSE(dcc.HasDataChannelsForTest()); + EXPECT_FALSE(dcc.HasDataChannels()); EXPECT_FALSE(dcc.HasUsedDataChannels()); auto ret = dcc.InternalCreateDataChannelWithProxy( "label", InternalDataChannelInit(DataChannelInit())); ASSERT_TRUE(ret.ok()); auto channel = ret.MoveValue(); - EXPECT_TRUE(dcc.HasDataChannelsForTest()); + EXPECT_TRUE(dcc.HasDataChannels()); EXPECT_TRUE(dcc.HasUsedDataChannels()); channel->Close(); run_loop_.Flush(); - EXPECT_FALSE(dcc.HasDataChannelsForTest()); + EXPECT_FALSE(dcc.HasDataChannels()); EXPECT_TRUE(dcc.HasUsedDataChannels()); } diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 4839fcf0a1..627e4afcfa 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -1422,9 +1422,10 @@ PeerConnection::CreateDataChannelOrError(const std::string& label, rtc::scoped_refptr channel = ret.MoveValue(); - // Trigger the onRenegotiationNeeded event for - // the first SCTP DataChannel. - if (first_datachannel) { + // Check the onRenegotiationNeeded event (with plan-b backward compat) + if (configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan || + (configuration_.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED && + first_datachannel)) { sdp_handler_->UpdateNegotiationNeeded(); } NoteUsageEvent(UsageEvent::DATA_ADDED); diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc index 59f0b01dfd..0cb10b7d07 100644 --- a/pc/sdp_offer_answer.cc +++ b/pc/sdp_offer_answer.cc @@ -3342,8 +3342,20 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { // 4. If connection has created any RTCDataChannels, and no m= section in // description has been negotiated yet for data, return true. if (data_channel_controller()->HasUsedDataChannels()) { - if (!cricket::GetFirstDataContent(description->description()->contents())) + const cricket::ContentInfo* data_content = + cricket::GetFirstDataContent(description->description()->contents()); + if (!data_content) { return true; + } + // The remote end might have rejected the data content. + const cricket::ContentInfo* remote_data_content = + current_remote_description() + ? current_remote_description()->description()->GetContentByName( + data_content->name) + : nullptr; + if (remote_data_content && remote_data_content->rejected) { + return true; + } } if (!ConfiguredForMedia()) { return false; @@ -4263,10 +4275,28 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( } // Lastly, add a m-section if we have requested local data channels and an // m section does not already exist. - if (!pc_->GetDataMid() && data_channel_controller()->HasUsedDataChannels()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData( - mid_generator_.GenerateString())); + if (!pc_->GetDataMid() && data_channel_controller()->HasUsedDataChannels() && + data_channel_controller()->HasDataChannels()) { + // Attempt to recycle a stopped m-line. + // TODO(crbug.com/1442604): GetDataMid() should return the mid if one was + // ever created but rejected. + bool recycled = false; + for (size_t i = 0; i < session_options->media_description_options.size(); + i++) { + auto media_description = session_options->media_description_options[i]; + if (media_description.type == cricket::MEDIA_TYPE_DATA && + media_description.stopped) { + session_options->media_description_options[i] = + GetMediaDescriptionOptionsForActiveData(media_description.mid); + recycled = true; + break; + } + } + if (!recycled) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData( + mid_generator_.GenerateString())); + } } } diff --git a/pc/sdp_offer_answer_unittest.cc b/pc/sdp_offer_answer_unittest.cc index 33b1e746d9..4c4554f12c 100644 --- a/pc/sdp_offer_answer_unittest.cc +++ b/pc/sdp_offer_answer_unittest.cc @@ -34,6 +34,7 @@ #include "modules/audio_processing/include/audio_processing.h" #include "p2p/base/port_allocator.h" #include "pc/peer_connection_wrapper.h" +#include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/rtc_certificate_generator.h" @@ -467,4 +468,83 @@ TEST_F(SdpOfferAnswerTest, RollbackPreservesAddTrackMid) { EXPECT_EQ(saved_mid, first_transceiver->mid()); } +#ifdef WEBRTC_HAVE_SCTP + +TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoNotGetReoffered) { + auto pc = CreatePeerConnection(); + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok()); + EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); + auto mid = pc->pc()->local_description()->description()->contents()[0].mid(); + + // An answer that rejects the datachannel content. + std::string sdp = + "v=0\r\n" + "o=- 4131505339648218884 3 IN IP4 **-----**\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n" + "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n" + "a=fingerprint:sha-256 " + "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:" + "B5:27:3E:30:B1:7D:69:42\r\n" + "a=setup:passive\r\n" + "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sctp-port:5000\r\n" + "a=max-message-size:262144\r\n" + "a=mid:" + + mid + "\r\n"; + auto answer = CreateSessionDescription(SdpType::kAnswer, sdp); + ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer))); + // The subsequent offer should not recycle the m-line since the existing data + // channel is closed. + auto offer = pc->CreateOffer(); + const auto& offer_contents = offer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + EXPECT_EQ(offer_contents[0].mid(), mid); + EXPECT_EQ(offer_contents[0].rejected, true); +} + +TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoGetReofferedWhenActive) { + auto pc = CreatePeerConnection(); + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok()); + EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); + auto mid = pc->pc()->local_description()->description()->contents()[0].mid(); + + // An answer that rejects the datachannel content. + std::string sdp = + "v=0\r\n" + "o=- 4131505339648218884 3 IN IP4 **-----**\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n" + "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n" + "a=fingerprint:sha-256 " + "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:" + "B5:27:3E:30:B1:7D:69:42\r\n" + "a=setup:passive\r\n" + "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sctp-port:5000\r\n" + "a=max-message-size:262144\r\n" + "a=mid:" + + mid + "\r\n"; + auto answer = CreateSessionDescription(SdpType::kAnswer, sdp); + ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer))); + + // The subsequent offer should recycle the m-line when there is a new data + // channel. + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc2", nullptr).ok()); + EXPECT_TRUE(pc->pc()->ShouldFireNegotiationNeededEvent( + pc->observer()->latest_negotiation_needed_event())); + + auto offer = pc->CreateOffer(); + const auto& offer_contents = offer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + EXPECT_EQ(offer_contents[0].mid(), mid); + EXPECT_EQ(offer_contents[0].rejected, false); +} + +#endif // WEBRTC_HAVE_SCTP + } // namespace webrtc From e46e37b6f831763aceaf5f5bd081a47cbd562890 Mon Sep 17 00:00:00 2001 From: Tommi Date: Thu, 1 Jun 2023 16:08:52 +0200 Subject: [PATCH 03/42] [M114] Move transceiver iteration loop over to the signaling thread. This is required for ReportTransportStats since iterating over the transceiver list from the network thread is not safe. (cherry picked from commit dba22d31909298161318e00d43a80cdb0abc940f) No-Try: true Bug: chromium:1446274, webrtc:12692 Change-Id: I7c514df9f029112c4b1da85826af91217850fb26 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/307340 Reviewed-by: Harald Alvestrand Commit-Queue: Tomas Gunnarsson Cr-Original-Commit-Position: refs/heads/main@{#40197} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/308001 Reviewed-by: Mirko Bonadei Cr-Commit-Position: refs/branch-heads/5735@{#3} Cr-Branched-From: df7df199abd619e75b9f1d9a7e12fc3f3f748775-refs/heads/main@{#39949} --- pc/peer_connection.cc | 35 +++++++++++++++++---------- pc/peer_connection.h | 3 ++- pc/peer_connection_integrationtest.cc | 8 ++++++ 3 files changed, 32 insertions(+), 14 deletions(-) diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 627e4afcfa..f7aec9ebcc 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -735,9 +735,6 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( transport_controller_->SubscribeIceConnectionState( [this](cricket::IceConnectionState s) { RTC_DCHECK_RUN_ON(network_thread()); - if (s == cricket::kIceConnectionConnected) { - ReportTransportStats(); - } signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -2401,6 +2398,20 @@ void PeerConnection::OnTransportControllerConnectionState( case cricket::kIceConnectionConnected: RTC_LOG(LS_INFO) << "Changing to ICE connected state because " "all transports are writable."; + { + std::vector transceivers; + if (ConfiguredForMedia()) { + transceivers = rtp_manager()->transceivers()->List(); + } + + network_thread()->PostTask( + SafeTask(network_thread_safety_, + [this, transceivers = std::move(transceivers)] { + RTC_DCHECK_RUN_ON(network_thread()); + ReportTransportStats(std::move(transceivers)); + })); + } + SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected); NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED); break; @@ -2743,20 +2754,18 @@ void PeerConnection::OnTransportControllerGatheringState( } // Runs on network_thread(). -void PeerConnection::ReportTransportStats() { +void PeerConnection::ReportTransportStats( + std::vector transceivers) { TRACE_EVENT0("webrtc", "PeerConnection::ReportTransportStats"); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> media_types_by_transport_name; - if (ConfiguredForMedia()) { - for (const auto& transceiver : - rtp_manager()->transceivers()->UnsafeList()) { - if (transceiver->internal()->channel()) { - std::string transport_name( - transceiver->internal()->channel()->transport_name()); - media_types_by_transport_name[transport_name].insert( - transceiver->media_type()); - } + for (const auto& transceiver : transceivers) { + if (transceiver->internal()->channel()) { + std::string transport_name( + transceiver->internal()->channel()->transport_name()); + media_types_by_transport_name[transport_name].insert( + transceiver->media_type()); } } diff --git a/pc/peer_connection.h b/pc/peer_connection.h index 32d304e6e7..21b9a5a430 100644 --- a/pc/peer_connection.h +++ b/pc/peer_connection.h @@ -569,7 +569,8 @@ class PeerConnection : public PeerConnectionInternal, // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. - void ReportTransportStats() RTC_RUN_ON(network_thread()); + void ReportTransportStats(std::vector transceivers) + RTC_RUN_ON(network_thread()); // Gather the usage of IPv4/IPv6 as best connection. static void ReportBestConnectionState(const cricket::TransportStats& stats); diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc index 812833000b..07c5e0c0d3 100644 --- a/pc/peer_connection_integrationtest.cc +++ b/pc/peer_connection_integrationtest.cc @@ -1831,6 +1831,10 @@ TEST_P(PeerConnectionIntegrationTest, EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, callee()->ice_connection_state(), kDefaultTimeout); + // Part of reporting the stats will occur on the network thread, so flush it + // before checking NumEvents. + SendTask(network_thread(), [] {}); + EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( "WebRTC.PeerConnection.CandidatePairType_UDP", webrtc::kIceCandidatePairHostNameHostName)); @@ -1959,6 +1963,10 @@ TEST_P(PeerConnectionIntegrationIceStatesTest, MAYBE_VerifyBestConnection) { EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, callee()->ice_connection_state(), kDefaultTimeout); + // Part of reporting the stats will occur on the network thread, so flush it + // before checking NumEvents. + SendTask(network_thread(), [] {}); + // TODO(bugs.webrtc.org/9456): Fix it. const int num_best_ipv4 = webrtc::metrics::NumEvents( "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv4); From d20849d0710783142175551d81dfe0dcbffcf2b1 Mon Sep 17 00:00:00 2001 From: Philipp Hancke Date: Thu, 15 Jun 2023 07:21:56 +0200 Subject: [PATCH 04/42] [M114] sdp: reject duplicate ssrcs in ssrc-groups while not really covered by https://www.rfc-editor.org/rfc/rfc5576.html#section-4.2 and using the same SSRC for RTX and primary payload may work since payload type demuxing *could* be used is not a good idea. This also applies to flexfec's FEC-FR. For the nonstandard SIM ssrc-group duplicates make no sense. This rejects duplicates for unknown ssrc-groups as well. BUG=chromium:1454860 (cherry picked from commit 6a38a3eb38f732b89ca0d8e36c43a434670c4ef5) No-Try: true Change-Id: I3e86101dbd5d6c4099f2fdb7b4a52d5cd0809c5f Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/308820 Reviewed-by: Taylor Brandstetter Reviewed-by: Harald Alvestrand Commit-Queue: Philipp Hancke Cr-Original-Commit-Position: refs/heads/main@{#40292} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/309601 Cr-Commit-Position: refs/branch-heads/5735@{#4} Cr-Branched-From: df7df199abd619e75b9f1d9a7e12fc3f3f748775-refs/heads/main@{#39949} --- pc/webrtc_sdp.cc | 5 +++++ pc/webrtc_sdp_unittest.cc | 26 ++++++++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc index 6d041ff0a1..4beb6b0faa 100644 --- a/pc/webrtc_sdp.cc +++ b/pc/webrtc_sdp.cc @@ -3542,6 +3542,11 @@ bool ParseSsrcGroupAttribute(absl::string_view line, if (!GetValueFromString(line, fields[i], &ssrc, error)) { return false; } + // Reject duplicates. While not forbidden by RFC 5576, + // they don't make sense. + if (absl::c_linear_search(ssrcs, ssrc)) { + return ParseFailed(line, "Duplicate SSRC in ssrc-group", error); + } ssrcs.push_back(ssrc); } ssrc_groups->push_back(SsrcGroup(semantics, ssrcs)); diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc index 0e1e62e53f..b2c751717e 100644 --- a/pc/webrtc_sdp_unittest.cc +++ b/pc/webrtc_sdp_unittest.cc @@ -5056,3 +5056,29 @@ TEST_F(WebRtcSdpTest, RejectSessionLevelMediaLevelExtmapMixedUsage) { JsepSessionDescription jdesc(kDummyType); EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); } + +TEST_F(WebRtcSdpTest, RejectDuplicateSsrcInSsrcGroup) { + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:96 VP8/90000\r\n" + "a=rtpmap:97 rtx/90000\r\n" + "a=fmtp:97 apt=96\r\n" + "a=ssrc-group:FID 1234 1234\r\n" + "a=ssrc:1234 cname:test\r\n"; + JsepSessionDescription jdesc(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); +} From 3cccc9febd3ffa16f753b0f6945cdd22238fa1e6 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Thu, 7 Oct 2021 09:47:06 +0800 Subject: [PATCH 05/42] Add Apache-2.0 license and some note to README.md. (#9) * Add Apache-2.0 license and some notes to README.md. * Add user link. * update. Updated readme detailing changes from original (#42) Adding membrane framework (#51) Co-authored-by: David Zhao --- NOTICE | 26 ++++++++++++++++++++++ README.md | 65 +++++++++++++++++++++++++++++++++++-------------------- 2 files changed, 67 insertions(+), 24 deletions(-) create mode 100644 NOTICE diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..3972578ec4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,26 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +https://github.com/webrtc-sdk/webrtc/commit/dfec53e93a0a1cb93f444caf50f844ec0068c7b7 +https://github.com/webrtc-sdk/webrtc/commit/403b4678543c5d4ac77bd1ea5753c02637b3bb89 +https://github.com/webrtc-sdk/webrtc/commit/77d5d685a90fb4bded17835ae72ec6671b26d696 + +Apache License 2.0 + +Copyright 2019-2021, Wandbox LLC (Original Author) +Copyright 2019-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 332efcc58b..de413d3bdb 100644 --- a/README.md +++ b/README.md @@ -1,32 +1,49 @@ -**WebRTC is a free, open software project** that provides browsers and mobile -applications with Real-Time Communications (RTC) capabilities via simple APIs. -The WebRTC components have been optimized to best serve this purpose. +# WebRTC-SDK -**Our mission:** To enable rich, high-quality RTC applications to be -developed for the browser, mobile platforms, and IoT devices, and allow them -all to communicate via a common set of protocols. +This repository contains a fork of WebRTC from Google with various improvements. -The WebRTC initiative is a project supported by Google, Mozilla and Opera, -amongst others. +## Changes -### Development +### All -See [here][native-dev] for instructions on how to get started -developing with the native code. +- Dynamically acquire decoder to mitigate decoder limitations #25 #26 -[Authoritative list](native-api.md) of directories that contain the -native API header files. +### Android -### More info +- Support for video simulcast #3 - * Official web site: http://www.webrtc.org - * Master source code repo: https://webrtc.googlesource.com/src - * Samples and reference apps: https://github.com/webrtc - * Mailing list: http://groups.google.com/group/discuss-webrtc - * Continuous build: https://ci.chromium.org/p/webrtc/g/ci/console - * [Coding style guide](g3doc/style-guide.md) - * [Code of conduct](CODE_OF_CONDUCT.md) - * [Reporting bugs](docs/bug-reporting.md) - * [Documentation](g3doc/sitemap.md) +### iOS + +- Do not request microphone permissions for playback-only #2 #5 +- Improvements to AVAudioSession interactions #7 #8 +- Support for video simulcast #4 +- Support for voice processing bypass #15 + +### Mac + +- Support for video simulcast #10 +- Remove hardcoded limitation of outputting to only right speaker on MBP #22 +- Screen capture support #24 #36 #37 +- Support for audio output device selection #35 +- Cross-platform RTCMTLVideoView #40 + +### Windows + +- Fixed unable to acquire Mic when built-in AEC is enabled #29 + +## LICENSE + +- [Google WebRTC](https://chromium.googlesource.com/external/webrtc.git), is licensed under [BSD license](/LICENSE). + +- Contains patches from [shiguredo-webrtc-build](https://github.com/shiguredo-webrtc-build), licensed under [Apache 2.0](/NOTICE). + +- Contains changes from LiveKit, licensed under Apache 2.0. + +## Who is using this project + +- [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) + +- [LiveKit](https://github.com/livekit) + +- [Membrane Framework](https://github.com/membraneframework/membrane_rtc_engine) -[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/index.md From 272127d457ab48e36241e82549870405864851f6 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Sat, 11 Sep 2021 19:27:32 +0800 Subject: [PATCH 06/42] Audio Device Optimization allow listen-only mode in AudioUnit, adjust when category changes (#2) release mic when category changes (#5) Change defaults to iOS defaults (#7) Sync audio session config (#8) feat: support bypass voice processing for iOS. (#15) Remove MacBookPro audio pan right code (#22) fix: Fix can't open mic alone when built-in AEC is enabled. (#29) feat: add audio device changes detect for windows. (#41) fix Linux compile (#47) AudioUnit: Don't rely on category switch for mic indicator to turn off (#52) Stop recording on mute (turn off mic indicator) (#55) Cherry pick audio selection from m97 release (#35) [Mac] Allow audio device selection (#21) RTCAudioDeviceModule.outputDevice / inputDevice getter and setter (#80) Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Co-authored-by: David Zhao --- audio/audio_send_stream.cc | 5 + audio/audio_send_stream.h | 1 + audio/audio_state.cc | 64 +++- audio/audio_state.h | 5 + audio/channel_send.cc | 4 +- audio/channel_send.h | 2 + call/audio_send_stream.h | 1 + call/audio_state.h | 3 + media/engine/webrtc_voice_engine.cc | 3 + media/engine/webrtc_voice_engine.h | 3 +- .../audio_device_data_observer.cc | 4 + modules/audio_device/audio_device_generic.h | 4 + modules/audio_device/audio_device_impl.cc | 42 ++- modules/audio_device/audio_device_impl.h | 11 +- modules/audio_device/include/audio_device.h | 19 +- modules/audio_device/mac/audio_device_mac.cc | 293 ++++++++++------- modules/audio_device/mac/audio_device_mac.h | 14 +- .../audio_device/win/audio_device_core_win.cc | 70 +++++ .../audio_device/win/audio_device_core_win.h | 33 ++ sdk/BUILD.gn | 9 + .../RTCAudioDeviceModule+Private.h | 31 ++ .../api/peerconnection/RTCAudioDeviceModule.h | 56 ++++ .../peerconnection/RTCAudioDeviceModule.mm | 294 ++++++++++++++++++ .../api/peerconnection/RTCIODevice+Private.h | 28 ++ sdk/objc/api/peerconnection/RTCIODevice.h | 41 +++ sdk/objc/api/peerconnection/RTCIODevice.mm | 71 +++++ .../RTCPeerConnectionFactory+Native.h | 12 +- .../peerconnection/RTCPeerConnectionFactory.h | 10 + .../RTCPeerConnectionFactory.mm | 69 +++- .../RTCPeerConnectionFactoryBuilder.mm | 3 +- .../audio/RTCAudioSession+Configuration.mm | 6 +- sdk/objc/components/audio/RTCAudioSession.mm | 3 +- .../audio/RTCAudioSessionConfiguration.m | 8 +- sdk/objc/native/api/audio_device_module.mm | 4 + sdk/objc/native/src/audio/audio_device_ios.h | 12 +- sdk/objc/native/src/audio/audio_device_ios.mm | 113 +++++-- .../src/audio/audio_device_module_ios.h | 3 + .../src/audio/audio_device_module_ios.mm | 6 + .../src/audio/voice_processing_audio_unit.h | 2 +- .../src/audio/voice_processing_audio_unit.mm | 28 +- 40 files changed, 1187 insertions(+), 203 deletions(-) create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule.h create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm create mode 100644 sdk/objc/api/peerconnection/RTCIODevice+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCIODevice.h create mode 100644 sdk/objc/api/peerconnection/RTCIODevice.mm diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index e0ad1a8bd6..7e033849ff 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -424,6 +424,11 @@ void AudioSendStream::SetMuted(bool muted) { channel_send_->SetInputMute(muted); } +bool AudioSendStream::GetMuted() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return channel_send_->InputMute(); +} + webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const { return GetStats(true); } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 6cda9c3d52..422c815e38 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -96,6 +96,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, int payload_frequency, int event, int duration_ms) override; + bool GetMuted() override; void SetMuted(bool muted) override; webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats( diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 6f20e7b128..3a21d9b3a9 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -98,14 +98,26 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, UpdateAudioTransportWithSendingStreams(); // Make sure recording is initialized; start recording if enabled. - auto* adm = config_.audio_device_module.get(); - if (!adm->Recording()) { - if (adm->InitRecording() == 0) { - if (recording_enabled_) { - adm->StartRecording(); + if (ShouldRecord()) { + auto* adm = config_.audio_device_module.get(); + if (!adm->Recording()) { + if (adm->InitRecording() == 0) { + if (recording_enabled_) { + + // TODO: Verify if the following windows only logic is still required. +#if defined(WEBRTC_WIN) + if (adm->BuiltInAECIsAvailable() && !adm->Playing()) { + if (!adm->PlayoutIsInitialized()) { + adm->InitPlayout(); + } + adm->StartPlayout(); + } +#endif + adm->StartRecording(); + } + } else { + RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } - } else { - RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } } } @@ -115,7 +127,8 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { auto count = sending_streams_.erase(stream); RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); - if (sending_streams_.empty()) { + + if (!ShouldRecord()) { config_.audio_device_module->StopRecording(); } } @@ -143,7 +156,7 @@ void AudioState::SetRecording(bool enabled) { if (recording_enabled_ != enabled) { recording_enabled_ = enabled; if (enabled) { - if (!sending_streams_.empty()) { + if (ShouldRecord()) { config_.audio_device_module->StartRecording(); } } else { @@ -203,6 +216,39 @@ void AudioState::UpdateNullAudioPollerState() { null_audio_poller_.Stop(); } } + +void AudioState::OnMuteStreamChanged() { + + auto* adm = config_.audio_device_module.get(); + bool should_record = ShouldRecord(); + + if (should_record && !adm->Recording()) { + if (adm->InitRecording() == 0) { + adm->StartRecording(); + } + } else if (!should_record && adm->Recording()) { + adm->StopRecording(); + } +} + +bool AudioState::ShouldRecord() { + // no streams to send + if (sending_streams_.empty()) { + return false; + } + + int stream_count = sending_streams_.size(); + + int muted_count = 0; + for (const auto& kv : sending_streams_) { + if (kv.first->GetMuted()) { + muted_count++; + } + } + + return muted_count != stream_count; +} + } // namespace internal rtc::scoped_refptr AudioState::Create( diff --git a/audio/audio_state.h b/audio/audio_state.h index 88aaaa3697..f21cca771e 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -47,6 +47,8 @@ class AudioState : public webrtc::AudioState { void SetStereoChannelSwapping(bool enable) override; + void OnMuteStreamChanged() override; + AudioDeviceModule* audio_device_module() { RTC_DCHECK(config_.audio_device_module); return config_.audio_device_module.get(); @@ -64,6 +66,9 @@ class AudioState : public webrtc::AudioState { void UpdateAudioTransportWithSendingStreams(); void UpdateNullAudioPollerState() RTC_RUN_ON(&thread_checker_); + // Returns true when at least 1 stream exists and all streams are not muted. + bool ShouldRecord(); + SequenceChecker thread_checker_; SequenceChecker process_thread_checker_{SequenceChecker::kDetached}; const webrtc::AudioState::Config config_; diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 91a17d4d72..8d33fa7107 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -98,6 +98,8 @@ class ChannelSend : public ChannelSendInterface, // Muting, Volume and Level. void SetInputMute(bool enable) override; + bool InputMute() const override; + // Stats. ANAStats GetANAStatistics() const override; @@ -161,8 +163,6 @@ class ChannelSend : public ChannelSendInterface, size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - bool InputMute() const; - int32_t SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp, diff --git a/audio/channel_send.h b/audio/channel_send.h index 08d3f92f1d..d969452185 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -95,6 +95,8 @@ class ChannelSendInterface { virtual bool SendTelephoneEventOutband(int event, int duration_ms) = 0; virtual void OnBitrateAllocation(BitrateAllocationUpdate update) = 0; virtual int GetTargetBitrate() const = 0; + + virtual bool InputMute() const = 0; virtual void SetInputMute(bool muted) = 0; virtual void ProcessAndEncodeAudio( diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index 9c2fad652f..7e73ab2ce6 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -190,6 +190,7 @@ class AudioSendStream : public AudioSender { int event, int duration_ms) = 0; + virtual bool GetMuted() = 0; virtual void SetMuted(bool muted) = 0; virtual Stats GetStats() const = 0; diff --git a/call/audio_state.h b/call/audio_state.h index 79fb5cf981..85f04758dd 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -59,6 +59,9 @@ class AudioState : public rtc::RefCountInterface { virtual void SetStereoChannelSwapping(bool enable) = 0; + // Notify the AudioState that a stream updated it's mute state. + virtual void OnMuteStreamChanged() = 0; + static rtc::scoped_refptr Create( const AudioState::Config& config); diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index 55cebbaa54..35e2657876 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -2285,6 +2285,9 @@ bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) { ap->set_output_will_be_muted(all_muted); } + // Notfy the AudioState that the mute state has updated. + engine_->audio_state()->OnMuteStreamChanged(); + return true; } diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h index f5d8080723..a75a707788 100644 --- a/media/engine/webrtc_voice_engine.h +++ b/media/engine/webrtc_voice_engine.h @@ -93,6 +93,8 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { absl::optional GetAudioDeviceStats() override; + // Moved to public so WebRtcVoiceMediaChannel can access it. + webrtc::AudioState* audio_state(); private: // Every option that is "set" will be applied. Every option not "set" will be @@ -105,7 +107,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { webrtc::AudioDeviceModule* adm(); webrtc::AudioProcessing* apm() const; - webrtc::AudioState* audio_state(); std::vector CollectCodecs( const std::vector& specs) const; diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc index 0524830327..88a8301c4f 100644 --- a/modules/audio_device/audio_device_data_observer.cc +++ b/modules/audio_device/audio_device_data_observer.cc @@ -307,6 +307,10 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { } #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override { + return impl_->SetAudioDeviceSink(sink); + } + protected: rtc::scoped_refptr impl_; AudioDeviceDataObserver* legacy_observer_ = nullptr; diff --git a/modules/audio_device/audio_device_generic.h b/modules/audio_device/audio_device_generic.h index 41e24eb3b0..0585129de4 100644 --- a/modules/audio_device/audio_device_generic.h +++ b/modules/audio_device/audio_device_generic.h @@ -135,6 +135,10 @@ class AudioDeviceGeneric { virtual int GetRecordAudioParameters(AudioParameters* params) const; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; virtual ~AudioDeviceGeneric() {} diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc index 092b98f2bf..80e208ceec 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -72,15 +72,17 @@ namespace webrtc { rtc::scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; - return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory); + return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory, bypass_voice_processing); } // static rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; // The "AudioDeviceModule::kWindowsCoreAudio2" audio layer has its own @@ -93,7 +95,7 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( // Create the generic reference counted (platform independent) implementation. auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory); + audio_layer, task_queue_factory, bypass_voice_processing); // Ensure that the current platform is supported. if (audio_device->CheckPlatform() == -1) { @@ -116,8 +118,13 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioDeviceModuleImpl::AudioDeviceModuleImpl( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) - : audio_layer_(audio_layer), audio_device_buffer_(task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) + : audio_layer_(audio_layer), + #if defined(WEBRTC_IOS) + bypass_voice_processing_(bypass_voice_processing), + #endif + audio_device_buffer_(task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; } @@ -280,7 +287,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { audio_device_.reset( - new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false)); + new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/bypass_voice_processing_)); RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; } // END #if defined(WEBRTC_IOS) @@ -937,6 +944,27 @@ int AudioDeviceModuleImpl::GetRecordAudioParameters( } #endif // WEBRTC_IOS +int32_t AudioDeviceModuleImpl::SetAudioDeviceSink(AudioDeviceSink* sink) const { + RTC_LOG(LS_INFO) << __FUNCTION__ << "(" << sink << ")"; + int32_t ok = audio_device_->SetAudioDeviceSink(sink); + RTC_LOG(LS_INFO) << "output: " << ok; + return ok; +} + +int32_t AudioDeviceModuleImpl::GetPlayoutDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetPlayoutDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + +int32_t AudioDeviceModuleImpl::GetRecordingDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetRecordingDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const { RTC_LOG(LS_INFO) << __FUNCTION__; return platform_type_; diff --git a/modules/audio_device/audio_device_impl.h b/modules/audio_device/audio_device_impl.h index 45f73dcd65..40ffa4d303 100644 --- a/modules/audio_device/audio_device_impl.h +++ b/modules/audio_device/audio_device_impl.h @@ -43,7 +43,8 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int32_t AttachAudioBuffer(); AudioDeviceModuleImpl(AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); ~AudioDeviceModuleImpl() override; // Retrieve the currently utilized audio layer @@ -145,6 +146,10 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + int32_t GetPlayoutDevice() const override; + int32_t GetRecordingDevice() const override; + #if defined(WEBRTC_ANDROID) // Only use this acccessor for test purposes on Android. AudioManager* GetAndroidAudioManagerForTest() { @@ -165,7 +170,9 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioLayer audio_layer_; PlatformType platform_type_ = kPlatformNotSupported; bool initialized_ = false; -#if defined(WEBRTC_ANDROID) +#if defined(WEBRTC_IOS) + bool bypass_voice_processing_; +#elif defined(WEBRTC_ANDROID) // Should be declared first to ensure that it outlives other resources. std::unique_ptr audio_manager_android_; #endif diff --git a/modules/audio_device/include/audio_device.h b/modules/audio_device/include/audio_device.h index 936ee6cb04..58019cc24f 100644 --- a/modules/audio_device/include/audio_device.h +++ b/modules/audio_device/include/audio_device.h @@ -21,6 +21,15 @@ namespace webrtc { class AudioDeviceModuleForTest; +// Sink for callbacks related to a audio device. +class AudioDeviceSink { + public: + virtual ~AudioDeviceSink() = default; + + // input/output devices updated or default device changed + virtual void OnDevicesUpdated() = 0; +}; + class AudioDeviceModule : public rtc::RefCountInterface { public: enum AudioLayer { @@ -56,12 +65,14 @@ class AudioDeviceModule : public rtc::RefCountInterface { // Creates a default ADM for usage in production code. static rtc::scoped_refptr Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Creates an ADM with support for extra test methods. Don't use this factory // in production code. static rtc::scoped_refptr CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Retrieve the currently utilized audio layer virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; @@ -171,6 +182,10 @@ class AudioDeviceModule : public rtc::RefCountInterface { virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + protected: ~AudioDeviceModule() override {} }; diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc index ed7b0e4669..0a371737b3 100644 --- a/modules/audio_device/mac/audio_device_mac.cc +++ b/modules/audio_device/mac/audio_device_mac.cc @@ -119,8 +119,6 @@ AudioDeviceMac::AudioDeviceMac() _twoDevices(true), _doStop(false), _doStopRec(false), - _macBookPro(false), - _macBookProPanRight(false), _captureLatencyUs(0), _renderLatencyUs(0), _captureDelayUs(0), @@ -277,8 +275,11 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { // but now must be explicitly specified. HAL would otherwise try to use the // main thread to issue notifications. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyRunLoop, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster + }; + CFRunLoopRef runLoop = NULL; UInt32 size = sizeof(CFRunLoopRef); int aoerr = AudioObjectSetPropertyData( @@ -294,22 +295,15 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - // Determine if this is a MacBook Pro - _macBookPro = false; - _macBookProPanRight = false; - char buf[128]; - size_t length = sizeof(buf); - memset(buf, 0, length); + // Listen for default output device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0); - if (intErr != 0) { - RTC_LOG(LS_ERROR) << "Error in sysctlbyname(): " << err; - } else { - RTC_LOG(LS_VERBOSE) << "Hardware model: " << buf; - if (strncmp(buf, "MacBookPro", 10) == 0) { - _macBookPro = true; - } - } + // Listen for default input device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); _initialized = true; @@ -337,9 +331,21 @@ int32_t AudioDeviceMac::Terminate() { OSStatus err = noErr; int retVal = 0; + // Remove listeners for global scope. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, // selector + kAudioObjectPropertyScopeGlobal, // scope + kAudioObjectPropertyElementMaster // element + }; + + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); @@ -789,6 +795,14 @@ int16_t AudioDeviceMac::PlayoutDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetPlayoutDevice() const { + if (_outputDeviceIsSpecified) { + return _outputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) { MutexLock lock(&mutex_); @@ -830,13 +844,11 @@ int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index, } memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid != NULL) { - memset(guid, 0, kAdmMaxGuidSize); - } + memset(guid, 0, kAdmMaxGuidSize); return GetDeviceName(kAudioDevicePropertyScopeOutput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, @@ -855,7 +867,8 @@ int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeInput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int16_t AudioDeviceMac::RecordingDevices() { @@ -864,6 +877,14 @@ int16_t AudioDeviceMac::RecordingDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetRecordingDevice() const { + if (_inputDeviceIsSpecified) { + return _inputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) { if (_recIsInitialized) { return -1; @@ -979,34 +1000,8 @@ int32_t AudioDeviceMac::InitPlayout() { _renderDeviceIsAlive = 1; _doStop = false; - // The internal microphone of a MacBook Pro is located under the left speaker - // grille. When the internal speakers are in use, we want to fully stereo - // pan to the right. AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0}; - if (_macBookPro) { - _macBookProPanRight = false; - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - UInt32 dataSource = 0; - size = sizeof(dataSource); - WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData( - _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource)); - - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - - // Add a listener to determine if the status changes. - WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } // Get current stream description propertyAddress.mSelector = kAudioDevicePropertyStreamFormat; @@ -1297,7 +1292,7 @@ int32_t AudioDeviceMac::StartRecording() { while (CaptureWorkerThread()) { } }, - "CaptureWorkerThread", + "Audio_CaptureWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); OSStatus err = noErr; @@ -1390,7 +1385,11 @@ int32_t AudioDeviceMac::StopRecording() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeInput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _inputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1434,7 +1433,7 @@ int32_t AudioDeviceMac::StartPlayout() { while (RenderWorkerThread()) { } }, - "RenderWorkerThread", + "Audio_RenderWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); if (_twoDevices || !_recording) { @@ -1503,7 +1502,11 @@ int32_t AudioDeviceMac::StopPlayout() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeOutput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1511,16 +1514,6 @@ int32_t AudioDeviceMac::StopPlayout() { WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - if (_macBookPro) { - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - propertyAddress.mSelector = kAudioDevicePropertyDataSource; - WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } - _playIsInitialized = false; _playing = false; @@ -1548,8 +1541,11 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, OSStatus err = noErr; AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster, + }; + UInt32 size = 0; WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size)); @@ -1648,7 +1644,8 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, const uint16_t index, - rtc::ArrayView name) { + rtc::ArrayView name, + rtc::ArrayView guid) { OSStatus err = noErr; AudioDeviceID deviceIds[MaxNumberDevices]; @@ -1685,10 +1682,9 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, isDefaultDevice = true; } } - AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0}; - + rtc::SimpleStringBuilder guid_ss(guid); if (isDefaultDevice) { std::array devName; UInt32 len = devName.size(); @@ -1698,6 +1694,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, rtc::SimpleStringBuilder ss(name); ss.AppendFormat("default (%s)", devName.data()); + guid_ss << "default"; } else { if (index < numberDevices) { usedID = deviceIds[index]; @@ -1705,7 +1702,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, usedID = index; } UInt32 len = name.size(); - + guid_ss << std::to_string(deviceIds[index]); WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( usedID, &propertyAddress, 0, NULL, &len, name.data())); } @@ -1916,6 +1913,66 @@ OSStatus AudioDeviceMac::implObjectListenerProc( HandleDataSourceChange(objectId, addresses[i]); } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) { HandleProcessorOverload(addresses[i]); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultOutputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultOutputDevice"; + // default audio output device changed + HandleDefaultOutputDeviceChange(); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultInputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultInputDevice"; + // default audio input device changed + HandleDefaultInputDeviceChange(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultOutputDeviceChange() { + + if (SpeakerIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio output device has changed"; + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + if (wasPlaying && _outputDeviceIsSpecified && _outputDeviceIndex == 0) { + + StopPlayout(); + + // default is already selected _outputDeviceIndex(0) + // re-init and start playout + InitPlayout(); + StartPlayout(); + } + + // Notify default output device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultInputDeviceChange() { + + if (MicrophoneIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio input device has changed"; + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + if (wasRecording && _inputDeviceIsSpecified && _inputDeviceIndex == 0) { + + StopRecording(); + + // default is already selected _inputDeviceIndex(0) + // re-init and start recording + InitRecording(); + StartRecording(); + } + + // Notify default input device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); } } @@ -1938,9 +1995,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Capture device is not alive (probably removed)"; - _captureDeviceIsAlive = 0; - _mixerManager.CloseMicrophone(); + RTC_LOG(LS_WARNING) << "Audio input device is not alive (probably removed) deviceID: " << _inputDeviceID; + //AtomicSet32(&_captureDeviceIsAlive, 0); + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + StopRecording(); + + // was playing & default device exists + if (wasRecording && SetRecordingDevice(0) == 0) { + InitRecording(); + StartRecording(); + } else { + _mixerManager.CloseMicrophone(); + } + + // Notify input device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -1957,9 +2034,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Render device is not alive (probably removed)"; - _renderDeviceIsAlive = 0; - _mixerManager.CloseSpeaker(); + RTC_LOG(LS_WARNING) << "Audio output device is not alive (probably removed) deviceID: " << _outputDeviceID; + // AtomicSet32(&_renderDeviceIsAlive, 0); // StopPlayout() does this + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + StopPlayout(); + + // was playing & default device exists + if (wasPlaying && SetPlayoutDevice(0) == 0) { + InitPlayout(); + StartPlayout(); + } else { + _mixerManager.CloseSpeaker(); + } + + // Notify output device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -2061,28 +2158,10 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( int32_t AudioDeviceMac::HandleDataSourceChange( const AudioObjectID objectId, const AudioObjectPropertyAddress propertyAddress) { - OSStatus err = noErr; - - if (_macBookPro && - propertyAddress.mScope == kAudioDevicePropertyScopeOutput) { - RTC_LOG(LS_VERBOSE) << "Data source changed"; - - _macBookProPanRight = false; - UInt32 dataSource = 0; - UInt32 size = sizeof(UInt32); - WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( - objectId, &propertyAddress, 0, NULL, &size, &dataSource)); - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - } return 0; } + int32_t AudioDeviceMac::HandleProcessorOverload( const AudioObjectPropertyAddress propertyAddress) { // TODO(xians): we probably want to notify the user in some way of the @@ -2400,24 +2479,6 @@ bool AudioDeviceMac::RenderWorkerThread() { uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame; SInt16* pPlayBuffer = (SInt16*)&playBuffer; - if (_macBookProPanRight && (_playChannels == 2)) { - // Mix entirely into the right channel and zero the left channel. - SInt32 sampleInt32 = 0; - for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) { - sampleInt32 = pPlayBuffer[sampleIdx]; - sampleInt32 += pPlayBuffer[sampleIdx + 1]; - sampleInt32 /= 2; - - if (sampleInt32 > 32767) { - sampleInt32 = 32767; - } else if (sampleInt32 < -32768) { - sampleInt32 = -32768; - } - - pPlayBuffer[sampleIdx] = 0; - pPlayBuffer[sampleIdx + 1] = static_cast(sampleInt32); - } - } PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples); diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h index bb06395d03..6cb5482a84 100644 --- a/modules/audio_device/mac/audio_device_mac.h +++ b/modules/audio_device/mac/audio_device_mac.h @@ -154,6 +154,13 @@ class AudioDeviceMac : public AudioDeviceGeneric { virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) RTC_LOCKS_EXCLUDED(mutex_); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) RTC_LOCKS_EXCLUDED(mutex_) { + audio_device_module_sink_ = sink; + return 0; + } + virtual int32_t GetPlayoutDevice() const; + virtual int32_t GetRecordingDevice() const; + private: int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -180,7 +187,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { int32_t GetDeviceName(AudioObjectPropertyScope scope, uint16_t index, - rtc::ArrayView name); + rtc::ArrayView name, + rtc::ArrayView guid); int32_t InitDevice(uint16_t userDeviceIndex, AudioDeviceID& deviceId, @@ -201,6 +209,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { const AudioObjectPropertyAddress addresses[]); int32_t HandleDeviceChange(); + int32_t HandleDefaultOutputDeviceChange(); + int32_t HandleDefaultInputDeviceChange(); int32_t HandleStreamFormatChange(AudioObjectID objectId, AudioObjectPropertyAddress propertyAddress); @@ -343,6 +353,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { // Typing detection // 0x5c is key "9", after that comes function keys. bool prev_key_state_[0x5d]; + + AudioDeviceSink *audio_device_module_sink_ = nullptr; }; } // namespace webrtc diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc index aa8b6a9ebe..c1c2b32a9b 100644 --- a/modules/audio_device/win/audio_device_core_win.cc +++ b/modules/audio_device/win/audio_device_core_win.cc @@ -461,6 +461,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() _playChannelsPrioList[0] = 2; // stereo is prio 1 _playChannelsPrioList[1] = 1; // mono is prio 2 + _deviceStateListener = new DeviceStateListener(); + HRESULT hr; // We know that this API will work since it has already been verified in @@ -474,6 +476,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() reinterpret_cast(&_ptrEnumerator)); RTC_DCHECK(_ptrEnumerator); + _ptrEnumerator->RegisterEndpointNotificationCallback(_deviceStateListener); + // DMO initialization for built-in WASAPI AEC. { IMediaObject* ptrDMO = NULL; @@ -499,6 +503,8 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { Terminate(); + _ptrEnumerator->UnregisterEndpointNotificationCallback(_deviceStateListener); + // The IMMDeviceEnumerator is created during construction. Must release // it here and not in Terminate() since we don't recreate it in Init(). SAFE_RELEASE(_ptrEnumerator); @@ -535,6 +541,11 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { _hShutdownCaptureEvent = NULL; } + if(NULL != _deviceStateListener) { + delete _deviceStateListener; + _deviceStateListener = NULL; + } + if (_avrtLibrary) { BOOL freeOK = FreeLibrary(_avrtLibrary); if (!freeOK) { @@ -3894,6 +3905,65 @@ int32_t AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, return 0; } +int32_t AudioDeviceWindowsCore::SetAudioDeviceSink(AudioDeviceSink* sink) { + _deviceStateListener->SetAudioDeviceSink(sink); + return 0; +} + +void AudioDeviceWindowsCore::DeviceStateListener::SetAudioDeviceSink(AudioDeviceSink *sink) { + callback_ = sink; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceStateChanged(LPCWSTR pwstrDeviceId, DWORD dwNewState) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceStateChanged => " << pwstrDeviceId << ", NewState => " << dwNewState; + if(callback_) callback_->OnDevicesUpdated(); + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceAdded(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceAdded => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceRemoved(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceRemoved => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDefaultDeviceChanged(EDataFlow flow, ERole role, LPCWSTR pwstrDefaultDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDefaultDeviceChanged => " << pwstrDefaultDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnPropertyValueChanged(LPCWSTR pwstrDeviceId, const PROPERTYKEY key) { + //RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnPropertyValueChanged => " << pwstrDeviceId; + return S_OK; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::AddRef() { + ULONG new_ref = InterlockedIncrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__AddRef => " << new_ref; + return new_ref; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::Release() { + ULONG new_ref = InterlockedDecrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__Release => " << new_ref; + return new_ref; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::QueryInterface(REFIID iid, void** object) { + if (object == nullptr) { + return E_POINTER; + } + if (iid == IID_IUnknown || iid == __uuidof(IMMNotificationClient)) { + *object = static_cast(this); + return S_OK; + } + *object = nullptr; + return E_NOINTERFACE; +} + // ---------------------------------------------------------------------------- // _GetDefaultDevice // ---------------------------------------------------------------------------- diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h index 380effb449..10b6a92b7f 100644 --- a/modules/audio_device/win/audio_device_core_win.h +++ b/modules/audio_device/win/audio_device_core_win.h @@ -22,6 +22,8 @@ #include #include // IMediaObject #include // MMDevice +#include +#include #include "api/scoped_refptr.h" #include "modules/audio_device/audio_device_generic.h" @@ -50,6 +52,34 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { AudioDeviceWindowsCore(); ~AudioDeviceWindowsCore(); + class DeviceStateListener : public IMMNotificationClient { + public: + virtual ~DeviceStateListener() = default; + HRESULT __stdcall OnDeviceStateChanged(LPCWSTR pwstrDeviceId, + DWORD dwNewState) override; + HRESULT __stdcall OnDeviceAdded(LPCWSTR pwstrDeviceId) override; + + HRESULT __stdcall OnDeviceRemoved(LPCWSTR pwstrDeviceId) override; + + HRESULT + __stdcall OnDefaultDeviceChanged(EDataFlow flow, + ERole role, + LPCWSTR pwstrDefaultDeviceId) override; + + HRESULT __stdcall OnPropertyValueChanged(LPCWSTR pwstrDeviceId, + const PROPERTYKEY key) override; + // IUnknown (required by IMMNotificationClient). + ULONG __stdcall AddRef() override; + ULONG __stdcall Release() override; + HRESULT __stdcall QueryInterface(REFIID iid, void** object) override; + + void SetAudioDeviceSink(AudioDeviceSink *sink); + + private: + LONG ref_count_ = 1; + AudioDeviceSink *callback_ = nullptr; + }; + static bool CoreAudioIsSupported(); // Retrieve the currently utilized audio layer @@ -150,6 +180,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { virtual int32_t EnableBuiltInAEC(bool enable); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink); + public: virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); @@ -237,6 +269,7 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { IAudioEndpointVolume* _ptrCaptureVolume; ISimpleAudioVolume* _ptrRenderSimpleVolume; + DeviceStateListener *_deviceStateListener = nullptr; // DirectX Media Object (DMO) for the built-in AEC. rtc::scoped_refptr _dmo; rtc::scoped_refptr _mediaBuffer; diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index ff89b21721..ccaef5504e 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -943,6 +943,11 @@ if (is_ios || is_mac) { ] configs += [ "..:no_global_constructors" ] sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", + "objc/api/peerconnection/RTCAudioDeviceModule.mm", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCIODevice.mm", "objc/api/peerconnection/RTCAudioSource+Private.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.mm", @@ -1308,6 +1313,8 @@ if (is_ios || is_mac) { "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/UIDevice+RTCDevice.h", + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCConfiguration.h", @@ -1422,6 +1429,8 @@ if (is_ios || is_mac) { output_name = "WebRTC" sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCCertificate.h", diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h new file mode 100644 index 0000000000..4eb91b93c7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioDeviceModule.h" +#import "sdk/objc/native/api/audio_device_module.h" + +#include "rtc_base/thread.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCAudioDeviceModule () + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread *)workerThread; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h new file mode 100644 index 0000000000..1a9e339bd6 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^RTCOnAudioDevicesDidUpdate)(); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioDeviceModule) : NSObject + +@property(nonatomic, readonly) NSArray *outputDevices; +@property(nonatomic, readonly) NSArray *inputDevices; + +@property(nonatomic, readonly) BOOL playing; +@property(nonatomic, readonly) BOOL recording; + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *outputDevice; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *inputDevice; + +// Executes low-level API's in sequence to switch the device +// Use outputDevice / inputDevice property unless you need to know if setting the device is +// successful. +- (BOOL)trySetOutputDevice:(nullable RTCIODevice *)device; +- (BOOL)trySetInputDevice:(nullable RTCIODevice *)device; + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; + +- (BOOL)startPlayout; +- (BOOL)stopPlayout; +- (BOOL)initPlayout; +- (BOOL)startRecording; +- (BOOL)stopRecording; +- (BOOL)initRecording; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm new file mode 100644 index 0000000000..5c116fae53 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -0,0 +1,294 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" +#import "RTCIODevice+Private.h" +#import "base/RTCLogging.h" + +#import "sdk/objc/native/api/audio_device_module.h" + +class AudioDeviceSink : public webrtc::AudioDeviceSink { + public: + AudioDeviceSink() {} + + void OnDevicesUpdated() override { + + RTCLogInfo(@"AudioDeviceSink OnDevicesUpdated"); + + if (callback_handler_) { + callback_handler_(); + } + } + + // private: + RTCOnAudioDevicesDidUpdate callback_handler_; +}; + +@implementation RTC_OBJC_TYPE (RTCAudioDeviceModule) { + rtc::Thread *_workerThread; + rtc::scoped_refptr _native; + AudioDeviceSink *_sink; +} + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread * )workerThread { + + RTCLogInfo(@"RTCAudioDeviceModule initWithNativeModule:workerThread:"); + + self = [super init]; + _native = module; + _workerThread = workerThread; + + _sink = new AudioDeviceSink(); + + _workerThread->BlockingCall([self] { + _native->SetAudioDeviceSink(_sink); + }); + + return self; +} + +- (NSArray *)outputDevices { + + return _workerThread->BlockingCall([self] { + return [self _outputDevices]; + }); +} + +- (NSArray *)inputDevices { + return _workerThread->BlockingCall([self] { + return [self _inputDevices]; + }); +} + +- (RTCIODevice *)outputDevice { + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _outputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetPlayoutDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setOutputDevice: (RTCIODevice *)device { + [self trySetOutputDevice: device]; +} + +- (BOOL)trySetOutputDevice: (RTCIODevice *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _outputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopPlayout(); + + if (_native->SetPlayoutDevice(index) == 0 + && _native->InitPlayout() == 0 + && _native->StartPlayout() == 0) { + + return YES; + } + + return NO; + }); +} + +- (RTCIODevice *)inputDevice { + + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _inputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetRecordingDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setInputDevice: (RTCIODevice *)device { + [self trySetInputDevice: device]; +} + +- (BOOL)trySetInputDevice: (RTCIODevice *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _inputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopRecording(); + + if (_native->SetRecordingDevice(index) == 0 + && _native->InitRecording() == 0 + && _native->StartRecording() == 0) { + + return YES; + } + + return NO; + }); +} + +- (BOOL)playing { + + return _workerThread->BlockingCall([self] { + return _native->Playing(); + }); +} + +- (BOOL)recording { + + return _workerThread->BlockingCall([self] { + return _native->Recording(); + }); +} + +#pragma mark - Low-level access + +- (BOOL)startPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StartPlayout() == 0; + }); +} + +- (BOOL)stopPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StopPlayout() == 0; + }); +} + +- (BOOL)initPlayout { + + return _workerThread->BlockingCall([self] { + return _native->InitPlayout() == 0; + }); +} + +- (BOOL)startRecording { + + return _workerThread->BlockingCall([self] { + return _native->StartRecording() == 0; + }); +} + +- (BOOL)stopRecording { + + return _workerThread->BlockingCall([self] { + return _native->StopRecording() == 0; + }); +} + +- (BOOL)initRecording { + + return _workerThread->BlockingCall([self] { + return _native->InitRecording() == 0; + }); +} + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler { + _sink->callback_handler_ = handler; + return YES; +} + +#pragma mark - Private + +- (NSArray *)_outputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->PlayoutDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->PlayoutDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +- (NSArray *)_inputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->RecordingDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->RecordingDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h new file mode 100644 index 0000000000..0eb09b83a4 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -0,0 +1,28 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCIODevice () + +- (instancetype)initWithType:(RTCIODeviceType)type + deviceId:(NSString *)deviceId + name:(NSString* )name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.h b/sdk/objc/api/peerconnection/RTCIODevice.h new file mode 100644 index 0000000000..f44d532081 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.h @@ -0,0 +1,41 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef NS_ENUM(NSInteger, RTCIODeviceType) { + RTCIODeviceTypeOutput, + RTCIODeviceTypeInput, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE(RTCIODevice) : NSObject + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type; +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) BOOL isDefault; +@property(nonatomic, readonly) RTCIODeviceType type; +@property(nonatomic, copy, readonly) NSString *deviceId; +@property(nonatomic, copy, readonly) NSString *name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm new file mode 100644 index 0000000000..27e1255e8e --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -0,0 +1,71 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCIODevice.h" +#import "RTCIODevice+Private.h" + +NSString *const kDefaultDeviceId = @"default"; + +@implementation RTCIODevice + +@synthesize type = _type; +@synthesize deviceId = _deviceId; +@synthesize name = _name; + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type { + return [[self alloc] initWithType: type + deviceId: kDefaultDeviceId + name: @""]; +} + +- (instancetype)initWithType: (RTCIODeviceType)type + deviceId: (NSString *)deviceId + name: (NSString* )name { + if (self = [super init]) { + _type = type; + _deviceId = deviceId; + _name = name; + } + return self; +} + +#pragma mark - IODevice + +- (BOOL)isDefault { + return [_deviceId isEqualToString: kDefaultDeviceId]; +} + +#pragma mark - Equatable + +- (BOOL)isEqual: (id)object { + if (self == object) { + return YES; + } + if (object == nil) { + return NO; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + + return [_deviceId isEqualToString:((RTC_OBJC_TYPE(RTCIODevice) *)object).deviceId]; +} + +- (NSUInteger)hash { + return [_deviceId hash]; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index f361b9f0ea..902925936b 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -50,7 +50,8 @@ NS_ASSUME_NONNULL_BEGIN audioDeviceModule: (nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule; + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithNativeAudioEncoderFactory: @@ -65,12 +66,19 @@ NS_ASSUME_NONNULL_BEGIN audioProcessingModule: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory:(std::unique_ptr) - networkControllerFactory; + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithEncoderFactory:(nullable id)encoderFactory decoderFactory:(nullable id)decoderFactory; +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory; + /** Initialize an RTCPeerConnection with a configuration, constraints, and * dependencies. */ diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 5575af98c9..7757323acd 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -23,6 +23,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoSource); @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@class RTC_OBJC_TYPE(RTCAudioDeviceModule); @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE @@ -51,6 +52,15 @@ RTC_OBJC_EXPORT decoderFactory:(nullable id)decoderFactory audioDevice:(nullable id)audioDevice; +/* Initialize object with bypass voice processing */ +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory; + +@property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; + /** Initialize an RTCAudioSource with constraints. */ - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 62b55543d4..9e2b2f35c0 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -14,6 +14,9 @@ #import "RTCPeerConnectionFactory+Private.h" #import "RTCPeerConnectionFactoryOptions+Private.h" +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" + #import "RTCAudioSource+Private.h" #import "RTCAudioTrack+Private.h" #import "RTCMediaConstraints+Private.h" @@ -55,14 +58,17 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; + rtc::scoped_refptr _nativeAudioDeviceModule; + BOOL _hasStartedAecDump; } @synthesize nativeFactory = _nativeFactory; +@synthesize audioDeviceModule = _audioDeviceModule; -- (rtc::scoped_refptr)audioDeviceModule { +- (rtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) - return webrtc::CreateAudioDeviceModule(); + return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else return nullptr; #endif @@ -76,8 +82,9 @@ - (instancetype)init { RTCVideoEncoderFactoryH264) alloc] init]) nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( RTCVideoDecoderFactoryH264) alloc] init]) - audioDeviceModule:[self audioDeviceModule].get() - audioProcessingModule:nullptr]; + audioDeviceModule:[self createAudioDeviceModule:NO].get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; } - (instancetype) @@ -105,14 +112,42 @@ - (instancetype)init { if (audioDevice) { audio_device_module = webrtc::CreateAudioDeviceModule(audioDevice); } else { - audio_device_module = [self audioDeviceModule]; + audio_device_module = [self createAudioDeviceModule:NO]; + } + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:audio_device_module.get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; +#endif +} + +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory: + (nullable id)decoderFactory { +#ifdef HAVE_NO_MEDIA + return [self initWithNoMedia]; +#else + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; + if (encoderFactory) { + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); } + if (decoderFactory) { + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); + } + rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) audioDeviceModule:audio_device_module.get() - audioProcessingModule:nullptr]; + audioProcessingModule:nullptr + bypassVoiceProcessing:bypassVoiceProcessing]; #endif } @@ -161,14 +196,16 @@ - (instancetype)initWithNativeAudioEncoderFactory: (std::unique_ptr)videoDecoderFactory audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule { + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { return [self initWithNativeAudioEncoderFactory:audioEncoderFactory nativeAudioDecoderFactory:audioDecoderFactory nativeVideoEncoderFactory:std::move(videoEncoderFactory) nativeVideoDecoderFactory:std::move(videoDecoderFactory) audioDeviceModule:audioDeviceModule audioProcessingModule:audioProcessingModule - networkControllerFactory:nullptr]; + networkControllerFactory:nullptr + bypassVoiceProcessing:NO]; } - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioEncoderFactory @@ -183,7 +220,8 @@ - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory: (std::unique_ptr) - networkControllerFactory { + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { if (self = [self initNative]) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); @@ -196,7 +234,18 @@ - (instancetype)initWithNativeAudioEncoderFactory: dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(dependencies.trials.get()); cricket::MediaEngineDependencies media_deps; - media_deps.adm = std::move(audioDeviceModule); + + // always create ADM on worker thread + _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { + return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, + dependencies.task_queue_factory.get(), + bypassVoiceProcessing == YES); + }); + + _audioDeviceModule = [[RTCAudioDeviceModule alloc] initWithNativeModule: _nativeAudioDeviceModule + workerThread: _workerThread.get()]; + + media_deps.adm = _nativeAudioDeviceModule; media_deps.task_queue_factory = dependencies.task_queue_factory.get(); media_deps.audio_encoder_factory = std::move(audioEncoderFactory); media_deps.audio_decoder_factory = std::move(audioDecoderFactory); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 627909a0e3..0981fb3879 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -39,7 +39,8 @@ + (RTCPeerConnectionFactoryBuilder *)builder { nativeVideoEncoderFactory:std::move(_videoEncoderFactory) nativeVideoDecoderFactory:std::move(_videoDecoderFactory) audioDeviceModule:_audioDeviceModule.get() - audioProcessingModule:_audioProcessingModule]; + audioProcessingModule:_audioProcessingModule + bypassVoiceProcessing:NO]; } - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory { diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index 449f31e9dd..b123e2002e 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -55,7 +55,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (![self setCategory:configuration.category withOptions:configuration.categoryOptions error:&categoryError]) { - RTCLogError(@"Failed to set category: %@", + RTCLogError(@"Failed to set category to %@: %@", + self.category, categoryError.localizedDescription); error = categoryError; } else { @@ -66,7 +67,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (self.mode != configuration.mode) { NSError *modeError = nil; if (![self setMode:configuration.mode error:&modeError]) { - RTCLogError(@"Failed to set mode: %@", + RTCLogError(@"Failed to set mode to %@: %@", + self.mode, modeError.localizedDescription); error = modeError; } else { diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 550a426d36..e6da689946 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -540,8 +540,7 @@ - (void)handleRouteChangeNotification:(NSNotification *)notification { RTCLog(@"Audio route changed: OldDeviceUnavailable"); break; case AVAudioSessionRouteChangeReasonCategoryChange: - RTCLog(@"Audio route changed: CategoryChange to :%@", - self.session.category); + RTCLog(@"Audio route changed: CategoryChange to :%@", self.session.category); break; case AVAudioSessionRouteChangeReasonOverride: RTCLog(@"Audio route changed: Override"); diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 39e9ac13ec..7591d86f7d 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -65,15 +65,17 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) - (instancetype)init { if (self = [super init]) { + // Use AVAudioSession values for default + AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. // By default, using this category implies that our app’s audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. - _category = AVAudioSessionCategoryPlayAndRecord; - _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth; + _category = session.category; + _categoryOptions = session.categoryOptions; // Specify mode for two-way voice communication (e.g. VoIP). - _mode = AVAudioSessionModeVoiceChat; + _mode = session.mode; // Set the session's sample rate or the hardware sample rate. // It is essential that we use the same sample rate as stream format diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 4e7b681e69..ada25bd9ee 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -13,7 +13,11 @@ #include "api/make_ref_counted.h" #include "rtc_base/logging.h" +#if defined(WEBRTC_IOS) #include "sdk/objc/native/src/audio/audio_device_module_ios.h" +#endif + +#include "modules/audio_device/include/audio_device.h" namespace webrtc { diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index a86acb56fe..605cf402d4 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -172,6 +172,8 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandlePlayoutGlitchDetected(); void HandleOutputVolumeChange(); + bool RestartAudioUnit(bool enable_input); + // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. void UpdateAudioDeviceBuffer(); @@ -200,7 +202,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // Activates our audio session, creates and initializes the voice-processing // audio unit and verifies that we got the preferred native audio parameters. - bool InitPlayOrRecord(); + bool InitPlayOrRecord(bool enable_input); // Closes and deletes the voice-processing I/O unit. void ShutdownPlayOrRecord(); @@ -260,19 +262,19 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // will be changed dynamically to account for this behavior. rtc::BufferT record_audio_buffer_; + bool recording_is_initialized_; + // Set to 1 when recording is active and 0 otherwise. std::atomic recording_; + bool playout_is_initialized_; + // Set to 1 when playout is active and 0 otherwise. std::atomic playing_; // Set to true after successful call to Init(), false otherwise. bool initialized_ RTC_GUARDED_BY(thread_); - // Set to true after successful call to InitRecording() or InitPlayout(), - // false otherwise. - bool audio_is_initialized_; - // Set to true if audio session is interrupted, false otherwise. bool is_interrupted_; diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index dd2c11bdd2..2021ec314c 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -61,6 +61,16 @@ const UInt16 kFixedPlayoutDelayEstimate = 30; const UInt16 kFixedRecordDelayEstimate = 30; +enum AudioDeviceMessageType : uint32_t { + kMessageTypeInterruptionBegin, + kMessageTypeInterruptionEnd, + kMessageTypeValidRouteChange, + kMessageTypeCanPlayOrRecordChange, + kMessageTypePlayoutGlitchDetected, + kMessageOutputVolumeChange, + kMessageTypeAudioWillRecord, +}; + using ios::CheckAndLogError; #if !defined(NDEBUG) @@ -94,10 +104,11 @@ static void LogDeviceInfo() { : bypass_voice_processing_(bypass_voice_processing), audio_device_buffer_(nullptr), audio_unit_(nullptr), + recording_is_initialized_(false), recording_(0), + playout_is_initialized_(false), playing_(0), initialized_(false), - audio_is_initialized_(false), is_interrupted_(false), has_configured_session_(false), num_detected_playout_glitches_(0), @@ -176,48 +187,57 @@ static void LogDeviceInfo() { LOGI() << "InitPlayout"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!playout_is_initialized_); RTC_DCHECK(!playing_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!recording_is_initialized_) { + // recording not initialized yet, init with no input + if (!InitPlayOrRecord(false)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!"; return -1; } } - audio_is_initialized_ = true; + + playout_is_initialized_ = true; + return 0; } bool AudioDeviceIOS::PlayoutIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return playout_is_initialized_; } bool AudioDeviceIOS::RecordingIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return recording_is_initialized_; } int32_t AudioDeviceIOS::InitRecording() { LOGI() << "InitRecording"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!recording_is_initialized_); RTC_DCHECK(!recording_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!playout_is_initialized_) { + // playout not initialized yet, init with input + if (!InitPlayOrRecord(true)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!"; return -1; } + } else { + // playout already initialized, restart audio unit with input + RestartAudioUnit(true); } - audio_is_initialized_ = true; + + recording_is_initialized_ = true; + return 0; } int32_t AudioDeviceIOS::StartPlayout() { LOGI() << "StartPlayout"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(playout_is_initialized_); RTC_DCHECK(!playing_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -242,14 +262,16 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopPlayout() { LOGI() << "StopPlayout"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !playing_.load()) { + if (!playout_is_initialized_ || !playing_.load()) { return 0; } if (!recording_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + recording_is_initialized_ = false; } playing_.store(0, std::memory_order_release); + playout_is_initialized_ = false; // Derive average number of calls to OnGetPlayoutData() between detected // audio glitches and add the result to a histogram. @@ -273,7 +295,7 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StartRecording() { LOGI() << "StartRecording"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(recording_is_initialized_); RTC_DCHECK(!recording_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -296,14 +318,19 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopRecording() { LOGI() << "StopRecording"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !recording_.load()) { + if (!recording_is_initialized_ || !recording_.load()) { return 0; } if (!playing_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + playout_is_initialized_ = false; + } else if (playout_is_initialized_) { + // restart audio unit with no input + RestartAudioUnit(false); } recording_.store(0, std::memory_order_release); + recording_is_initialized_ = false; return 0; } @@ -445,7 +472,7 @@ static void LogDeviceInfo() { // Exclude extreme delta values since they do most likely not correspond // to a real glitch. Instead, the most probable cause is that a headset // has been plugged in or out. There are more direct ways to detect - // audio device changes (see HandleValidRouteChange()) but experiments + // audio device changes (see ValidRouteChange()) but experiments // show that using it leads to more complex implementations. // TODO(henrika): more tests might be needed to come up with an even // better upper limit. @@ -579,7 +606,7 @@ static void LogDeviceInfo() { SetupAudioBuffersForActiveAudioSession(); // Initialize the audio unit again with the new sample rate. - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize the audio unit with sample rate: %d", playout_parameters_.sample_rate()); return; @@ -633,6 +660,46 @@ static void LogDeviceInfo() { last_output_volume_change_time_ = rtc::TimeMillis(); } +bool AudioDeviceIOS::RestartAudioUnit(bool enable_input) { + RTC_DCHECK_RUN_ON(&io_thread_checker_); + + LOGI() << "RestartAudioUnit"; + + // If we don't have an audio unit yet, or the audio unit is uninitialized, + // there is no work to do. + if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + return false; + } + + bool restart_audio_unit = false; + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + audio_unit_->Stop(); + PrepareForNewStart(); + restart_audio_unit = true; + } + + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + audio_unit_->Uninitialize(); + } + + // Initialize the audio unit again with the same sample rate. + const double sample_rate = playout_parameters_.sample_rate(); + + if (!audio_unit_->Initialize(sample_rate, enable_input)) { + RTCLogError(@"Failed to initialize the audio unit with sample rate: %f", sample_rate); + return false; + } + + // Restart the audio unit if it was already running. + if (restart_audio_unit && !audio_unit_->Start()) { + RTCLogError(@"Failed to start audio unit with sample rate: %f", sample_rate); + return false; + } + + LOGI() << "Successfully enabled audio unit for recording."; + return true; +} + void AudioDeviceIOS::UpdateAudioDeviceBuffer() { LOGI() << "UpdateAudioDevicebuffer"; // AttachAudioBuffer() is called at construction by the main class but check @@ -726,7 +793,7 @@ static void LogDeviceInfo() { // If we're not initialized we don't need to do anything. Audio unit will // be initialized on initialization. - if (!audio_is_initialized_) return; + if (!playout_is_initialized_ && !recording_is_initialized_) return; // If we're initialized, we must have an audio unit. RTC_DCHECK(audio_unit_); @@ -764,7 +831,7 @@ static void LogDeviceInfo() { RTCLog(@"Initializing audio unit for UpdateAudioUnit"); ConfigureAudioSession(); SetupAudioBuffersForActiveAudioSession(); - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize audio unit."); return; } @@ -854,7 +921,7 @@ static void LogDeviceInfo() { RTCLog(@"Unconfigured audio session."); } -bool AudioDeviceIOS::InitPlayOrRecord() { +bool AudioDeviceIOS::InitPlayOrRecord(bool enable_input) { LOGI() << "InitPlayOrRecord"; RTC_DCHECK_RUN_ON(thread_); @@ -890,7 +957,7 @@ static void LogDeviceInfo() { return false; } SetupAudioBuffersForActiveAudioSession(); - audio_unit_->Initialize(playout_parameters_.sample_rate()); + audio_unit_->Initialize(playout_parameters_.sample_rate(), enable_input); } // Release the lock. diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h index 189d7e6c9c..2f9b95a0a8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.h +++ b/sdk/objc/native/src/audio/audio_device_module_ios.h @@ -129,6 +129,9 @@ class AudioDeviceModuleIOS : public AudioDeviceModule { int GetPlayoutAudioParameters(AudioParameters* params) const override; int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + private: const bool bypass_voice_processing_; bool initialized_ = false; diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.mm b/sdk/objc/native/src/audio/audio_device_module_ios.mm index 5effef3abd..5f93a06ee8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_module_ios.mm @@ -665,5 +665,11 @@ return r; } #endif // WEBRTC_IOS + + int32_t AudioDeviceModuleIOS::SetAudioDeviceSink(AudioDeviceSink* sink) const { + // not implemented + RTC_LOG(LS_WARNING) << __FUNCTION__ << "(" << sink << ") Not implemented"; + return -1; + } } } diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/sdk/objc/native/src/audio/voice_processing_audio_unit.h index ed9dd98568..b474cda104 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.h +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.h @@ -75,7 +75,7 @@ class VoiceProcessingAudioUnit { VoiceProcessingAudioUnit::State GetState() const; // Initializes the underlying audio unit with the given sample rate. - bool Initialize(Float64 sample_rate); + bool Initialize(Float64 sample_rate, bool enable_input); // Starts the underlying audio unit. OSStatus Start(); diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 3905b6857a..b3daacb334 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -111,19 +111,6 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return false; } - // Enable input on the input scope of the input element. - UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, - sizeof(enable_input)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to enable input on input scope of input element. " - "Error=%ld.", - (long)result); - return false; - } - // Enable output on the output scope of the output element. UInt32 enable_output = 1; result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, @@ -193,7 +180,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return state_; } -bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { +bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate, bool enable_input) { RTC_DCHECK_GE(state_, kUninitialized); RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate); @@ -204,6 +191,19 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { LogStreamDescription(format); #endif + UInt32 _enable_input = enable_input ? 1 : 0; + RTCLog(@"Initializing AudioUnit, _enable_input=%d", (int) _enable_input); + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &_enable_input, + sizeof(_enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } + // Set the format on the output scope of the input element/bus. result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, From ee030264e2274a2c90548a99b448782049e48fb4 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Sat, 11 Sep 2021 19:28:25 +0800 Subject: [PATCH 07/42] Simulcast support for iOS/Android. Simulcast support for iOS SDK (#4) Support for simulcast in Android SDK (#3) include simulcast headers for mac also (#10) Fix simulcast using hardware encoder on Android (#48) Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Co-authored-by: Angelika Serwa --- sdk/BUILD.gn | 23 ++++++++++ sdk/android/BUILD.gn | 18 ++++++++ .../api/org/webrtc/SimulcastVideoEncoder.java | 28 ++++++++++++ .../webrtc/SimulcastVideoEncoderFactory.java | 43 +++++++++++++++++++ .../java/org/webrtc/HardwareVideoEncoder.java | 15 ++++++- .../src/jni/simulcast_video_encoder.cc | 34 +++++++++++++++ sdk/android/src/jni/simulcast_video_encoder.h | 22 ++++++++++ .../video_codec/RTCVideoEncoderSimulcast.h | 13 ++++++ .../video_codec/RTCVideoEncoderSimulcast.mm | 26 +++++++++++ .../RTCVideoEncoderFactorySimulcast.h | 16 +++++++ .../RTCVideoEncoderFactorySimulcast.mm | 39 +++++++++++++++++ sdk/objc/native/src/audio/audio_device_ios.mm | 10 ----- 12 files changed, 275 insertions(+), 12 deletions(-) create mode 100644 sdk/android/api/org/webrtc/SimulcastVideoEncoder.java create mode 100644 sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java create mode 100644 sdk/android/src/jni/simulcast_video_encoder.cc create mode 100644 sdk/android/src/jni/simulcast_video_encoder.h create mode 100644 sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h create mode 100644 sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm create mode 100644 sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h create mode 100644 sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index ccaef5504e..a3e47737ef 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -741,6 +741,7 @@ if (is_ios || is_mac) { ] deps = [ + ":simulcast", ":base_objc", ":native_video", ":videocodec_objc", @@ -842,6 +843,22 @@ if (is_ios || is_mac) { ] } + rtc_library("simulcast") { + sources = [ + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + ] + + deps = [ + ":base_objc", + ":wrapped_native_codec_objc", + "../media:rtc_media_base", + "../media:rtc_simulcast_encoder_adapter", + ] + } + rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ @@ -1360,6 +1377,9 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoEncoderAV1.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { @@ -1510,6 +1530,9 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { sources += [ diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 5bf25ffc6d..b88d228839 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -522,6 +522,8 @@ if (is_android) { sources = [ "api/org/webrtc/SoftwareVideoDecoderFactory.java", "api/org/webrtc/SoftwareVideoEncoderFactory.java", + "api/org/webrtc/SimulcastVideoEncoder.java", + "api/org/webrtc/SimulcastVideoEncoderFactory.java", ] deps = [ @@ -884,6 +886,21 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("simulcast_jni") { + visibility = [ "*" ] + allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/simulcast_video_encoder.cc", + "src/jni/simulcast_video_encoder.h" + ] + deps = [ + ":base_jni", + ":video_jni", + ":native_api_codecs", + "../../media:rtc_simulcast_encoder_adapter" + ] + } + rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] @@ -897,6 +914,7 @@ if (current_os == "linux" || is_android) { ":libvpx_vp8_jni", ":libvpx_vp9_jni", ":native_api_jni", + ":simulcast_jni", ":video_jni", "../../api/video_codecs:builtin_video_decoder_factory", "../../api/video_codecs:builtin_video_encoder_factory", diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java new file mode 100644 index 0000000000..af6c8f61c7 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java @@ -0,0 +1,28 @@ +package org.webrtc; + +public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { + + static native long nativeCreateEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; + + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } + + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(primary, fallback, info); + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + +} + diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java new file mode 100644 index 0000000000..97b4f32087 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Arrays; + +public class SimulcastVideoEncoderFactory implements VideoEncoderFactory { + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + + public SimulcastVideoEncoderFactory(VideoEncoderFactory primary, VideoEncoderFactory fallback) { + this.primary = primary; + this.fallback = fallback; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + return new SimulcastVideoEncoder(primary, fallback, info); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List codecs = new ArrayList(); + codecs.addAll(Arrays.asList(primary.getSupportedCodecs())); + codecs.addAll(Arrays.asList(fallback.getSupportedCodecs())); + return codecs.toArray(new VideoCodecInfo[codecs.size()]); + } + +} diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java index 94dfdf0728..6d0edcf972 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -45,8 +45,8 @@ class HardwareVideoEncoder implements VideoEncoder { private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; - // Size of the input frames should be multiple of 16 for the H/W encoder. - private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + // Size of the input frames should be multiple of 2 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 2; /** * Keeps track of the number of output buffers that have been passed down the pipeline and not yet @@ -210,6 +210,11 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { this.callback = callback; automaticResizeOn = settings.automaticResizeOn; + if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } this.width = settings.width; this.height = settings.height; useSurfaceMode = canUseSurface(); @@ -533,6 +538,12 @@ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseS if (status != VideoCodecStatus.OK) { return status; } + + if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } width = newWidth; height = newHeight; useSurfaceMode = newUseSurfaceMode; diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc new file mode 100644 index 0000000000..da31fbbfa5 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -0,0 +1,34 @@ +#include + +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/native_api/codecs/wrapper.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "rtc_base/logging.h" + +using namespace webrtc; +using namespace webrtc::jni; + +#ifdef __cplusplus +extern "C" { +#endif + +// (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jobject primary, jobject fallback, jobject info) { + RTC_LOG(LS_INFO) << "Create simulcast video encoder"; + JavaParamRef info_ref(info); + SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); + + // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい + // https://github.com/shiguredo-webrtc-build/webrtc-build/pull/16#pullrequestreview-600675795 + return NativeToJavaPointer(std::make_unique( + JavaToNativeVideoEncoderFactory(env, primary).release(), + JavaToNativeVideoEncoderFactory(env, fallback).release(), + format).release()); +} + + +#ifdef __cplusplus +} +#endif diff --git a/sdk/android/src/jni/simulcast_video_encoder.h b/sdk/android/src/jni/simulcast_video_encoder.h new file mode 100644 index 0000000000..519be778e8 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.h @@ -0,0 +1,22 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class org_webrtc_SimulcastVideoEncoder */ + +#ifndef _Included_org_webrtc_SimulcastVideoEncoder +#define _Included_org_webrtc_SimulcastVideoEncoder +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: org_webrtc_SimulcastVideoEncoder + * Method: nativeCreateEncoder + * Signature: (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)J + */ + +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder + (JNIEnv *, jclass, jobject, jobject, jobject); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h new file mode 100644 index 0000000000..4f1b55c713 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h @@ -0,0 +1,13 @@ +#import "RTCMacros.h" +#import "RTCVideoEncoder.h" +#import "RTCVideoEncoderFactory.h" +#import "RTCVideoCodecInfo.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) : NSObject + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm new file mode 100644 index 0000000000..c7d5e2939c --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -0,0 +1,26 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderSimulcast.h" +#import "RTCWrappedNativeVideoEncoder.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" + +#include "native/api/video_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + auto nativePrimary = webrtc::ObjCToNativeVideoEncoderFactory(primary); + auto nativeFallback = webrtc::ObjCToNativeVideoEncoderFactory(fallback); + auto nativeFormat = [videoCodecInfo nativeSdpVideoFormat]; + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc] + initWithNativeEncoder: std::make_unique( + nativePrimary.release(), + nativeFallback.release(), + std::move(nativeFormat))]; +} + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h new file mode 100644 index 0000000000..4070af22e4 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h @@ -0,0 +1,16 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) : NSObject + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm new file mode 100644 index 0000000000..e2ca5867c0 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -0,0 +1,39 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCodecInfo.h" +#import "RTCVideoEncoderFactorySimulcast.h" +#import "api/video_codec/RTCVideoEncoderSimulcast.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) () + +@property id primary; +@property id fallback; + +@end + + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) + +@synthesize primary = _primary; +@synthesize fallback = _fallback; + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback { + if (self = [super init]) { + _primary = primary; + _fallback = fallback; + } + return self; +} + +- (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [RTCVideoEncoderSimulcast simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; +} + +- (NSArray *)supportedCodecs { + return [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; +} + + +@end diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 2021ec314c..1689aa39e1 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -61,16 +61,6 @@ const UInt16 kFixedPlayoutDelayEstimate = 30; const UInt16 kFixedRecordDelayEstimate = 30; -enum AudioDeviceMessageType : uint32_t { - kMessageTypeInterruptionBegin, - kMessageTypeInterruptionEnd, - kMessageTypeValidRouteChange, - kMessageTypeCanPlayOrRecordChange, - kMessageTypePlayoutGlitchDetected, - kMessageOutputVolumeChange, - kMessageTypeAudioWillRecord, -}; - using ios::CheckAndLogError; #if !defined(NDEBUG) From 61fc7d615ac87d67762a94b4c700aa4c33f79a84 Mon Sep 17 00:00:00 2001 From: davidliu Date: Tue, 10 May 2022 21:31:59 +0900 Subject: [PATCH 08/42] Android improvements. Start/Stop receiving stream method for VideoTrack (#25) Properly remove observer upon deconstruction (#26) feat: Expose setCodecPreferences/getCapabilities for android. (#61) fix: add WrappedVideoDecoderFactory.java. (#74) Exposing Adapter types in PeerConnectionFactory (#78) Co-authored-by: davidliu Co-authored-by: Mohamed Risaldar UT <73091075+MohamedRisaldarTA@users.noreply.github.com> --- api/media_stream_interface.cc | 4 + api/media_stream_interface.h | 2 + media/base/media_channel.h | 2 + media/engine/webrtc_video_engine.cc | 29 ++++ media/engine/webrtc_video_engine.h | 5 + pc/media_stream_track_proxy.h | 2 + pc/video_rtp_receiver.cc | 41 +++++- pc/video_rtp_receiver.h | 11 +- pc/video_track.cc | 13 ++ pc/video_track.h | 4 + sdk/android/BUILD.gn | 5 + .../api/org/webrtc/PeerConnectionFactory.java | 27 +++- .../api/org/webrtc/RtpCapabilities.java | 131 ++++++++++++++++++ .../api/org/webrtc/RtpTransceiver.java | 6 + sdk/android/api/org/webrtc/VideoTrack.java | 20 +++ .../webrtc/WrappedVideoDecoderFactory.java | 75 ++++++++++ .../src/jni/pc/peer_connection_factory.cc | 18 +++ sdk/android/src/jni/pc/rtp_capabilities.cc | 114 +++++++++++++++ sdk/android/src/jni/pc/rtp_capabilities.h | 41 ++++++ sdk/android/src/jni/pc/rtp_transceiver.cc | 11 ++ sdk/android/src/jni/video_track.cc | 11 ++ sdk/objc/api/peerconnection/RTCVideoTrack.h | 3 + sdk/objc/api/peerconnection/RTCVideoTrack.mm | 8 ++ 23 files changed, 574 insertions(+), 9 deletions(-) create mode 100644 sdk/android/api/org/webrtc/RtpCapabilities.java create mode 100644 sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java create mode 100644 sdk/android/src/jni/pc/rtp_capabilities.cc create mode 100644 sdk/android/src/jni/pc/rtp_capabilities.h diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc index e07907917b..5362522262 100644 --- a/api/media_stream_interface.cc +++ b/api/media_stream_interface.cc @@ -18,6 +18,10 @@ const char* const MediaStreamTrackInterface::kVideoKind = const char* const MediaStreamTrackInterface::kAudioKind = cricket::kMediaTypeAudio; +bool VideoTrackInterface::should_receive() const { + return true; +} + VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { return ContentHint::kNone; } diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index 9d336739e4..6d9b4aa6ca 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -188,6 +188,8 @@ class RTC_EXPORT VideoTrackInterface virtual VideoTrackSourceInterface* GetSource() const = 0; + virtual void set_should_receive(bool should_receive) {} + virtual bool should_receive() const; virtual ContentHint content_hint() const; virtual void set_content_hint(ContentHint hint) {} diff --git a/media/base/media_channel.h b/media/base/media_channel.h index 378042f1b2..f66ee7769a 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -997,6 +997,8 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { bool nack_enabled, webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) = 0; + virtual void StartReceive(uint32_t ssrc) {} + virtual void StopReceive(uint32_t ssrc) {} }; } // namespace cricket diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 83581bf9fd..8a63ffcbb3 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -928,6 +928,24 @@ void WebRtcVideoChannel::RequestEncoderSwitch( } } +void WebRtcVideoChannel::StartReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StartStream(); +} + +void WebRtcVideoChannel::StopReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StopStream(); +} + bool WebRtcVideoChannel::ApplyChangedParams( const ChangedSendParameters& changed_params) { RTC_DCHECK_RUN_ON(&thread_checker_); @@ -3182,6 +3200,17 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( } } +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StartStream(){ + if (stream_) { + stream_->Start(); + } +} +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StopStream(){ + if (stream_) { + stream_->Stop(); + } +} + void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateReceiveStream() { RTC_DCHECK(stream_); absl::optional base_minimum_playout_delay_ms; diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index 2cdd05ffd6..a3e433f002 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -270,6 +270,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) override; + void StartReceive(uint32_t ssrc) override; + void StopReceive(uint32_t ssrc) override; private: class WebRtcVideoReceiveStream; @@ -537,6 +539,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer); + + void StartStream(); + void StopStream(); void SetLocalSsrc(uint32_t local_ssrc); void UpdateRtxSsrc(uint32_t ssrc); diff --git a/pc/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h index 2af3aedb22..fab23d17ec 100644 --- a/pc/media_stream_track_proxy.h +++ b/pc/media_stream_track_proxy.h @@ -55,6 +55,8 @@ PROXY_SECONDARY_METHOD2(void, PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) +PROXY_CONSTMETHOD0(bool, should_receive) +PROXY_METHOD1(void, set_should_receive, bool) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index 8a2e65c162..63a2d72879 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -41,15 +41,20 @@ VideoRtpReceiver::VideoRtpReceiver( rtc::Thread::Current(), worker_thread, VideoTrack::Create(receiver_id, source_, worker_thread))), - attachment_id_(GenerateUniqueId()) { + cached_track_should_receive_(track_->should_receive()), + attachment_id_(GenerateUniqueId()), + worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); SetStreams(streams); + track_->RegisterObserver(this); RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(!media_channel_); + + track_->UnregisterObserver(this); } std::vector VideoRtpReceiver::stream_ids() const { @@ -114,6 +119,39 @@ void VideoRtpReceiver::Stop() { track_->internal()->set_ended(); } +void VideoRtpReceiver::OnChanged() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + if (cached_track_should_receive_ != track_->should_receive()) { + cached_track_should_receive_ = track_->should_receive(); + worker_thread_->PostTask( + [this, receive = cached_track_should_receive_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if(receive) { + StartMediaChannel(); + } else { + StopMediaChannel(); + } + }); + } +} + +void VideoRtpReceiver::StartMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StartReceive(signaled_ssrc_.value_or(0)); + OnGenerateKeyFrame(); +} + +void VideoRtpReceiver::StopMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StopReceive(signaled_ssrc_.value_or(0)); +} + void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); MediaSourceInterface::SourceState state = source_->state(); @@ -209,6 +247,7 @@ void VideoRtpReceiver::set_transport( void VideoRtpReceiver::SetStreams( const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index ef88016052..491efe2931 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -42,7 +42,8 @@ namespace webrtc { -class VideoRtpReceiver : public RtpReceiverInternal { +class VideoRtpReceiver : public RtpReceiverInternal, + public ObserverInterface { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. @@ -60,6 +61,9 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::scoped_refptr video_track() const { return track_; } + // ObserverInterface implementation + void OnChanged() override; + // RtpReceiverInterface implementation rtc::scoped_refptr track() const override { return track_; @@ -115,6 +119,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { cricket::MediaReceiveChannelInterface* media_channel); private: + void StartMediaChannel(); + void StopMediaChannel(); void RestartMediaChannel(absl::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); void RestartMediaChannel_w(absl::optional ssrc, @@ -162,6 +168,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; + + bool cached_track_should_receive_ RTC_GUARDED_BY(&signaling_thread_checker_); const int attachment_id_; rtc::scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); @@ -177,6 +185,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // or switched. bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false; bool saved_encoded_sink_enabled_ RTC_GUARDED_BY(worker_thread_) = false; + const rtc::scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/pc/video_track.cc b/pc/video_track.cc index 0bf8687af3..8922cdaf1f 100644 --- a/pc/video_track.cc +++ b/pc/video_track.cc @@ -76,6 +76,19 @@ VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { return video_source_->internal(); } +void VideoTrack::set_should_receive(bool receive) { + RTC_DCHECK_RUN_ON(&signaling_thread_); + if (should_receive_ == receive) + return; + should_receive_ = receive; + Notifier::FireOnChanged(); +} + +bool VideoTrack::should_receive() const { + RTC_DCHECK_RUN_ON(&signaling_thread_); + return should_receive_; +} + VideoTrackInterface::ContentHint VideoTrack::content_hint() const { RTC_DCHECK_RUN_ON(&signaling_thread_); return content_hint_; diff --git a/pc/video_track.h b/pc/video_track.h index 13a51c454b..b56c64ef20 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -48,6 +48,9 @@ class VideoTrack : public MediaStreamTrack, void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; + void set_should_receive(bool should_receive) override; + bool should_receive() const override; + ContentHint content_hint() const override; void set_content_hint(ContentHint hint) override; bool set_enabled(bool enable) override; @@ -81,6 +84,7 @@ class VideoTrack : public MediaStreamTrack, // be queried without blocking on the worker thread by callers that don't // use an api proxy to call the `enabled()` method. bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; + bool should_receive_ RTC_GUARDED_BY(signaling_thread_) = true; }; } // namespace webrtc diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index b88d228839..73cfe51730 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -287,6 +287,7 @@ if (is_android) { "api/org/webrtc/RTCStatsCollectorCallback.java", "api/org/webrtc/RTCStatsReport.java", "api/org/webrtc/RtcCertificatePem.java", + "api/org/webrtc/RtpCapabilities.java", "api/org/webrtc/RtpParameters.java", "api/org/webrtc/RtpReceiver.java", "api/org/webrtc/RtpSender.java", @@ -359,6 +360,7 @@ if (is_android) { sources = [ "api/org/webrtc/DefaultVideoDecoderFactory.java", "api/org/webrtc/DefaultVideoEncoderFactory.java", + "api/org/webrtc/WrappedVideoDecoderFactory.java", ] deps = [ @@ -736,6 +738,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/rtc_certificate.h", "src/jni/pc/rtc_stats_collector_callback_wrapper.cc", "src/jni/pc/rtc_stats_collector_callback_wrapper.h", + "src/jni/pc/rtp_capabilities.cc", + "src/jni/pc/rtp_capabilities.h", "src/jni/pc/rtp_parameters.cc", "src/jni/pc/rtp_parameters.h", "src/jni/pc/rtp_receiver.cc", @@ -1409,6 +1413,7 @@ if (current_os == "linux" || is_android) { "api/org/webrtc/RTCStatsCollectorCallback.java", "api/org/webrtc/RTCStatsReport.java", "api/org/webrtc/RtcCertificatePem.java", + "api/org/webrtc/RtpCapabilities.java", "api/org/webrtc/RtpParameters.java", "api/org/webrtc/RtpReceiver.java", "api/org/webrtc/RtpSender.java", diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index ca67b3afc1..1817d41beb 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -18,6 +18,7 @@ import org.webrtc.PeerConnection; import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.RtpCapabilities; /** * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to @@ -134,13 +135,13 @@ public static class Options { // Keep in sync with webrtc/rtc_base/network.h! // // These bit fields are defined for `networkIgnoreMask` below. - static final int ADAPTER_TYPE_UNKNOWN = 0; - static final int ADAPTER_TYPE_ETHERNET = 1 << 0; - static final int ADAPTER_TYPE_WIFI = 1 << 1; - static final int ADAPTER_TYPE_CELLULAR = 1 << 2; - static final int ADAPTER_TYPE_VPN = 1 << 3; - static final int ADAPTER_TYPE_LOOPBACK = 1 << 4; - static final int ADAPTER_TYPE_ANY = 1 << 5; + public static final int ADAPTER_TYPE_UNKNOWN = 0; + public static final int ADAPTER_TYPE_ETHERNET = 1 << 0; + public static final int ADAPTER_TYPE_WIFI = 1 << 1; + public static final int ADAPTER_TYPE_CELLULAR = 1 << 2; + public static final int ADAPTER_TYPE_VPN = 1 << 3; + public static final int ADAPTER_TYPE_LOOPBACK = 1 << 4; + public static final int ADAPTER_TYPE_ANY = 1 << 5; public int networkIgnoreMask; public boolean disableEncryption; @@ -471,6 +472,16 @@ public AudioTrack createAudioTrack(String id, AudioSource source) { return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.getNativeAudioSource())); } + public RtpCapabilities getRtpReceiverCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpReceiverCapabilities(nativeFactory, mediaType); + } + + public RtpCapabilities getRtpSenderCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpSenderCapabilities(nativeFactory, mediaType); + } + // Starts recording an AEC dump. Ownership of the file is transfered to the // native code. If an AEC dump is already in progress, it will be stopped and // a new one will start using the provided file. @@ -615,4 +626,6 @@ private static native boolean nativeStartAecDump( private static native void nativeInjectLoggable(JNILogging jniLogging, int severity); private static native void nativeDeleteLoggable(); private static native void nativePrintStackTrace(int tid); + private static native RtpCapabilities nativeGetRtpSenderCapabilities(long factory, MediaStreamTrack.MediaType mediaType); + private static native RtpCapabilities nativeGetRtpReceiverCapabilities(long factory, MediaStreamTrack.MediaType mediaType); } diff --git a/sdk/android/api/org/webrtc/RtpCapabilities.java b/sdk/android/api/org/webrtc/RtpCapabilities.java new file mode 100644 index 0000000000..612acb8cf6 --- /dev/null +++ b/sdk/android/api/org/webrtc/RtpCapabilities.java @@ -0,0 +1,131 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.List; +import java.util.Map; +import org.webrtc.MediaStreamTrack; + +public class RtpCapabilities { + public static class CodecCapability { + public int preferredPayloadType; + // Name used to identify the codec. Equivalent to MIME subtype. + public String name; + // The media type of this codec. Equivalent to MIME top-level type. + public MediaStreamTrack.MediaType kind; + // Clock rate in Hertz. + public Integer clockRate; + // The number of audio channels used. Set to null for video codecs. + public Integer numChannels; + // The "format specific parameters" field from the "a=fmtp" line in the SDP + public Map parameters; + // The MIME type of the codec. This is a convenience field. + public String mimeType; + + public CodecCapability() {} + + @CalledByNative("CodecCapability") + CodecCapability(int preferredPayloadType, String name, MediaStreamTrack.MediaType kind, + Integer clockRate, Integer numChannels, String mimeType, Map parameters) { + this.preferredPayloadType = preferredPayloadType; + this.name = name; + this.kind = kind; + this.clockRate = clockRate; + this.numChannels = numChannels; + this.parameters = parameters; + this.mimeType = mimeType; + } + + @CalledByNative("CodecCapability") + int getPreferredPayloadType() { + return preferredPayloadType; + } + + @CalledByNative("CodecCapability") + String getName() { + return name; + } + + @CalledByNative("CodecCapability") + MediaStreamTrack.MediaType getKind() { + return kind; + } + + @CalledByNative("CodecCapability") + Integer getClockRate() { + return clockRate; + } + + @CalledByNative("CodecCapability") + Integer getNumChannels() { + return numChannels; + } + + @CalledByNative("CodecCapability") + Map getParameters() { + return parameters; + } + } + + public static class HeaderExtensionCapability { + private final String uri; + private final int preferredId; + private final boolean preferredEncrypted; + + @CalledByNative("HeaderExtensionCapability") + HeaderExtensionCapability(String uri, int preferredId, boolean preferredEncrypted) { + this.uri = uri; + this.preferredId = preferredId; + this.preferredEncrypted = preferredEncrypted; + } + + @CalledByNative("HeaderExtensionCapability") + public String getUri() { + return uri; + } + + @CalledByNative("HeaderExtensionCapability") + public int getPreferredId() { + return preferredId; + } + + @CalledByNative("HeaderExtensionCapability") + public boolean getPreferredEncrypted() { + return preferredEncrypted; + } + } + + public List codecs; + public List headerExtensions; + + @CalledByNative + RtpCapabilities(List codecs, List headerExtensions) { + this.headerExtensions = headerExtensions; + this.codecs = codecs; + } + + @CalledByNative + public List getHeaderExtensions() { + return headerExtensions; + } + + @CalledByNative + List getCodecs() { + return codecs; + } +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/RtpTransceiver.java b/sdk/android/api/org/webrtc/RtpTransceiver.java index 1102bd7eb1..b60ac6450a 100644 --- a/sdk/android/api/org/webrtc/RtpTransceiver.java +++ b/sdk/android/api/org/webrtc/RtpTransceiver.java @@ -215,6 +215,11 @@ public void stop() { nativeStopInternal(nativeRtpTransceiver); } + public void setCodecPreferences(List codecs) { + checkRtpTransceiverExists(); + nativeSetCodecPreferences(nativeRtpTransceiver, codecs); + } + /** * The StopInternal method stops the RtpTransceiver, like Stop, but goes * immediately to Stopped state. @@ -263,4 +268,5 @@ private void checkRtpTransceiverExists() { private static native void nativeStopStandard(long rtpTransceiver); private static native boolean nativeSetDirection( long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection); + private static native void nativeSetCodecPreferences(long rtpTransceiver, List codecs); } diff --git a/sdk/android/api/org/webrtc/VideoTrack.java b/sdk/android/api/org/webrtc/VideoTrack.java index 5593d424f3..bd5022f39a 100644 --- a/sdk/android/api/org/webrtc/VideoTrack.java +++ b/sdk/android/api/org/webrtc/VideoTrack.java @@ -54,6 +54,24 @@ public void removeSink(VideoSink sink) { } } + /** + * For a remote video track, starts/stops receiving the video stream. + * + * If this is a local video track, this is a no-op. + */ + public void setShouldReceive(boolean shouldReceive){ + nativeSetShouldReceive(getNativeMediaStreamTrack(), shouldReceive); + } + + /** + * The current receive status for a remote video track. + * + * This has no meaning for a local video track. + */ + public boolean shouldReceive(){ + return nativeGetShouldReceive(getNativeMediaStreamTrack()); + } + @Override public void dispose() { for (long nativeSink : sinks.values()) { @@ -73,4 +91,6 @@ long getNativeVideoTrack() { private static native void nativeRemoveSink(long track, long nativeSink); private static native long nativeWrapSink(VideoSink sink); private static native void nativeFreeSink(long sink); + private static native void nativeSetShouldReceive(long track, boolean shouldReceive); + private static native boolean nativeGetShouldReceive(long track); } diff --git a/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java new file mode 100644 index 0000000000..a7acd37289 --- /dev/null +++ b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; + +import java.util.Arrays; +import java.util.LinkedHashSet; + +public class WrappedVideoDecoderFactory implements VideoDecoderFactory { + public WrappedVideoDecoderFactory(@Nullable EglBase.Context eglContext) { + this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext); + this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext); + } + + private final VideoDecoderFactory hardwareVideoDecoderFactory; + private final VideoDecoderFactory hardwareVideoDecoderFactoryWithoutEglContext = new HardwareVideoDecoderFactory(null) ; + private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + @Nullable + private final VideoDecoderFactory platformSoftwareVideoDecoderFactory; + + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoDecoder softwareDecoder = this.softwareVideoDecoderFactory.createDecoder(codecType); + VideoDecoder hardwareDecoder = this.hardwareVideoDecoderFactory.createDecoder(codecType); + if (softwareDecoder == null && this.platformSoftwareVideoDecoderFactory != null) { + softwareDecoder = this.platformSoftwareVideoDecoderFactory.createDecoder(codecType); + } + + if(hardwareDecoder != null && disableSurfaceTextureFrame(hardwareDecoder.getImplementationName())) { + hardwareDecoder.release(); + hardwareDecoder = this.hardwareVideoDecoderFactoryWithoutEglContext.createDecoder(codecType); + } + + if (hardwareDecoder != null && softwareDecoder != null) { + return new VideoDecoderFallback(softwareDecoder, hardwareDecoder); + } else { + return hardwareDecoder != null ? hardwareDecoder : softwareDecoder; + } + } + + private boolean disableSurfaceTextureFrame(String name) { + if (name.startsWith("OMX.qcom.") || name.startsWith("OMX.hisi.")) { + return true; + } + return false; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + supportedCodecInfos.addAll(Arrays.asList(this.softwareVideoDecoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(this.hardwareVideoDecoderFactory.getSupportedCodecs())); + if (this.platformSoftwareVideoDecoderFactory != null) { + supportedCodecInfos.addAll(Arrays.asList(this.platformSoftwareVideoDecoderFactory.getSupportedCodecs())); + } + + return (VideoCodecInfo[])supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 4c682089db..f4a1b16e89 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -39,7 +39,9 @@ #include "sdk/android/src/jni/logging/log_sink.h" #include "sdk/android/src/jni/pc/android_network_monitor.h" #include "sdk/android/src/jni/pc/audio.h" +#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/ice_candidate.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" #include "sdk/android/src/jni/pc/owned_factory_and_threads.h" #include "sdk/android/src/jni/pc/peer_connection.h" #include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h" @@ -393,6 +395,22 @@ jlong JNI_PeerConnectionFactory_CreateAudioTrack( return jlongFromPointer(track.release()); } +ScopedJavaLocalRef JNI_PeerConnectionFactory_GetRtpSenderCapabilities( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef& media_type) { + auto factory = PeerConnectionFactoryFromJava(native_factory); + return NativeToJavaRtpCapabilities(jni, factory->GetRtpSenderCapabilities(JavaToNativeMediaType(jni, media_type))); +} + +ScopedJavaLocalRef JNI_PeerConnectionFactory_GetRtpReceiverCapabilities( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef& media_type) { + auto factory = PeerConnectionFactoryFromJava(native_factory); + return NativeToJavaRtpCapabilities(jni, factory->GetRtpReceiverCapabilities(JavaToNativeMediaType(jni, media_type))); +} + static jboolean JNI_PeerConnectionFactory_StartAecDump( JNIEnv* jni, jlong native_factory, diff --git a/sdk/android/src/jni/pc/rtp_capabilities.cc b/sdk/android/src/jni/pc/rtp_capabilities.cc new file mode 100644 index 0000000000..fb71491ee1 --- /dev/null +++ b/sdk/android/src/jni/pc/rtp_capabilities.cc @@ -0,0 +1,114 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/rtp_capabilities.h" + +#include "sdk/android/generated_peerconnection_jni/RtpCapabilities_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef NativeToJavaRtpCodecParameter( + JNIEnv* env, + const RtpCodecCapability& codec) { + return Java_CodecCapability_Constructor(env, codec.preferred_payload_type.value(), + NativeToJavaString(env, codec.name), + NativeToJavaMediaType(env, codec.kind), + NativeToJavaInteger(env, codec.clock_rate), + NativeToJavaInteger(env, codec.num_channels), + NativeToJavaString(env, codec.mime_type()), + NativeToJavaStringMap(env, codec.parameters)); +} + +ScopedJavaLocalRef NativeToJavaRtpHeaderExtensionParameter( + JNIEnv* env, + const RtpHeaderExtensionCapability& extension) { + return Java_HeaderExtensionCapability_Constructor( + env, NativeToJavaString(env, extension.uri), extension.preferred_id.value(), + extension.preferred_encrypt); +} +} // namespace + +RtpCapabilities JavaToNativeRtpCapabilities(JNIEnv* jni, + const JavaRef& j_capabilities) { + RtpCapabilities capabilities; + + ScopedJavaLocalRef j_header_extensions = + Java_RtpCapabilities_getHeaderExtensions(jni, j_capabilities); + for (const JavaRef& j_header_extension : + Iterable(jni, j_header_extensions)) { + RtpHeaderExtensionCapability header_extension; + header_extension.uri = JavaToStdString( + jni, Java_HeaderExtensionCapability_getUri(jni, j_header_extension)); + header_extension.preferred_id = Java_HeaderExtensionCapability_getPreferredId(jni, j_header_extension); + header_extension.preferred_encrypt = + Java_HeaderExtensionCapability_getPreferredEncrypted(jni, j_header_extension); + capabilities.header_extensions.push_back(header_extension); + } + + // Convert codecs. + ScopedJavaLocalRef j_codecs = + Java_RtpCapabilities_getCodecs(jni, j_capabilities); + for (const JavaRef& j_codec : Iterable(jni, j_codecs)) { + RtpCodecCapability codec; + codec.preferred_payload_type = Java_CodecCapability_getPreferredPayloadType(jni, j_codec); + codec.name = JavaToStdString(jni, Java_CodecCapability_getName(jni, j_codec)); + codec.kind = JavaToNativeMediaType(jni, Java_CodecCapability_getKind(jni, j_codec)); + codec.clock_rate = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getClockRate(jni, j_codec)); + codec.num_channels = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getNumChannels(jni, j_codec)); + auto parameters_map = + JavaToNativeStringMap(jni, Java_CodecCapability_getParameters(jni, j_codec)); + codec.parameters.insert(parameters_map.begin(), parameters_map.end()); + capabilities.codecs.push_back(codec); + } + return capabilities; +} + +ScopedJavaLocalRef NativeToJavaRtpCapabilities( + JNIEnv* env, + const RtpCapabilities& capabilities) { + return Java_RtpCapabilities_Constructor( + env, NativeToJavaList(env, capabilities.codecs, &NativeToJavaRtpCodecParameter), + NativeToJavaList(env, capabilities.header_extensions, + &NativeToJavaRtpHeaderExtensionParameter) + ); +} + +RtpCodecCapability JavaToNativeRtpCodecCapability(JNIEnv* jni, + const JavaRef& j_codec) { + RtpCodecCapability codec; + codec.preferred_payload_type = Java_CodecCapability_getPreferredPayloadType(jni, j_codec); + codec.name = JavaToStdString(jni, Java_CodecCapability_getName(jni, j_codec)); + codec.kind = JavaToNativeMediaType(jni, Java_CodecCapability_getKind(jni, j_codec)); + codec.clock_rate = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getClockRate(jni, j_codec)); + codec.num_channels = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getNumChannels(jni, j_codec)); + auto parameters_map = + JavaToNativeStringMap(jni, Java_CodecCapability_getParameters(jni, j_codec)); + codec.parameters.insert(parameters_map.begin(), parameters_map.end()); + return codec; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/rtp_capabilities.h b/sdk/android/src/jni/pc/rtp_capabilities.h new file mode 100644 index 0000000000..4acf611c82 --- /dev/null +++ b/sdk/android/src/jni/pc/rtp_capabilities.h @@ -0,0 +1,41 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ + +#include + +#include "api/rtp_parameters.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +RtpCapabilities JavaToNativeRtpCapabilities(JNIEnv* jni, + const JavaRef& j_capabilities); + +ScopedJavaLocalRef NativeToJavaRtpCapabilities( + JNIEnv* jni, + const RtpCapabilities& capabilities); + +RtpCodecCapability JavaToNativeRtpCodecCapability(JNIEnv* jni, + const JavaRef& j_codec_capability); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ diff --git a/sdk/android/src/jni/pc/rtp_transceiver.cc b/sdk/android/src/jni/pc/rtp_transceiver.cc index 1d468461f1..34cba6ab5f 100644 --- a/sdk/android/src/jni/pc/rtp_transceiver.cc +++ b/sdk/android/src/jni/pc/rtp_transceiver.cc @@ -16,6 +16,7 @@ #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/rtp_parameters.h" #include "sdk/android/src/jni/pc/rtp_receiver.h" #include "sdk/android/src/jni/pc/rtp_sender.h" @@ -139,6 +140,16 @@ ScopedJavaLocalRef JNI_RtpTransceiver_CurrentDirection( : nullptr; } + +void JNI_RtpTransceiver_SetCodecPreferences(JNIEnv* jni, + jlong j_rtp_transceiver_pointer, + const JavaParamRef& j_codecs) { + std::vector codecs = + JavaListToNativeVector( + jni, j_codecs, &JavaToNativeRtpCodecCapability); + reinterpret_cast(j_rtp_transceiver_pointer)->SetCodecPreferences(codecs); +} + void JNI_RtpTransceiver_StopInternal(JNIEnv* jni, jlong j_rtp_transceiver_pointer) { reinterpret_cast(j_rtp_transceiver_pointer) diff --git a/sdk/android/src/jni/video_track.cc b/sdk/android/src/jni/video_track.cc index eb343ebdb3..2078359cbc 100644 --- a/sdk/android/src/jni/video_track.cc +++ b/sdk/android/src/jni/video_track.cc @@ -44,5 +44,16 @@ static void JNI_VideoTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { delete reinterpret_cast*>(j_native_sink); } +static void JNI_VideoTrack_SetShouldReceive(JNIEnv* jni, + jlong j_native_track, + jboolean should_receive) { + reinterpret_cast(j_native_track)->set_should_receive(should_receive); +} + +static jboolean JNI_VideoTrack_GetShouldReceive(JNIEnv* jni, + jlong j_native_track) { + return reinterpret_cast(j_native_track)->should_receive(); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index 5382b7169f..56d25c1568 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -25,6 +25,9 @@ RTC_OBJC_EXPORT /** The video source for this video track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; +/** The receive state, if this is a remote video track. */ +@property(nonatomic, assign) BOOL shouldReceive; + - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index d3296f6279..df294d2f3e 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -70,6 +70,14 @@ - (void)dealloc { return _source; } +- (BOOL)shouldReceive { + return self.nativeVideoTrack->should_receive(); +} + +- (void)setShouldReceive:(BOOL)shouldReceive { + self.nativeVideoTrack->set_should_receive(shouldReceive); +} + - (void)addRenderer:(id)renderer { if (!_workerThread->IsCurrent()) { _workerThread->BlockingCall([renderer, self] { [self addRenderer:renderer]; }); From 0228e1d975df04b01b2d9a5e38d317c3bc4b41ce Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 18 May 2022 13:04:36 +0800 Subject: [PATCH 09/42] Darwin improvements [Mac/iOS] feat: Add RTCYUVHelper for darwin. (#28) Cross-platform `RTCMTLVideoView` for both iOS / macOS (#40) rotationOverride should not be assign (#44) [ObjC] Expose properties / methods required for AV1 codec support (#60) Workaround: Render PixelBuffer in RTCMTLVideoView (#58) Improve iOS/macOS H264 encoder (#70) fix: fix video encoder not resuming correctly upon foregrounding (#75). Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> --- media/engine/webrtc_video_engine.cc | 17 +- sdk/BUILD.gn | 22 +- .../peerconnection/RTCPeerConnectionFactory.h | 8 + .../RTCPeerConnectionFactory.mm | 17 ++ .../RTCRtpCapabilities+Private.h | 33 +++ .../api/peerconnection/RTCRtpCapabilities.h | 40 +++ .../api/peerconnection/RTCRtpCapabilities.mm | 50 ++++ .../RTCRtpCodecCapability+Private.h | 33 +++ .../peerconnection/RTCRtpCodecCapability.h | 63 +++++ .../peerconnection/RTCRtpCodecCapability.mm | 138 +++++++++++ .../peerconnection/RTCRtpCodecParameters.h | 1 + .../peerconnection/RTCRtpCodecParameters.mm | 1 + .../peerconnection/RTCRtpEncodingParameters.h | 4 + .../RTCRtpEncodingParameters.mm | 7 + .../api/peerconnection/RTCRtpTransceiver.h | 4 + .../api/peerconnection/RTCRtpTransceiver.mm | 32 +++ .../renderer/metal/RTCMTLNSVideoView.h | 16 +- .../renderer/metal/RTCMTLNSVideoView.m | 122 ---------- .../renderer/metal/RTCMTLVideoView.h | 20 +- .../renderer/metal/RTCMTLVideoView.m | 68 ++++-- .../video_codec/RTCVideoEncoderH264.mm | 230 +++++++++++++----- sdk/objc/helpers/RTCYUVHelper.h | 118 +++++++++ sdk/objc/helpers/RTCYUVHelper.mm | 179 ++++++++++++++ video/video_stream_encoder.cc | 12 +- 24 files changed, 999 insertions(+), 236 deletions(-) create mode 100644 sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCRtpCapabilities.h create mode 100644 sdk/objc/api/peerconnection/RTCRtpCapabilities.mm create mode 100644 sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCRtpCodecCapability.h create mode 100644 sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm delete mode 100644 sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m create mode 100644 sdk/objc/helpers/RTCYUVHelper.h create mode 100644 sdk/objc/helpers/RTCYUVHelper.mm diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 8a63ffcbb3..14deb20eac 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -642,18 +642,15 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const { webrtc::RtpExtension::kVideoContentTypeUri, webrtc::RtpExtension::kVideoTimingUri, webrtc::RtpExtension::kColorSpaceUri, webrtc::RtpExtension::kMidUri, - webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) { + webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri, + // "WebRTC-DependencyDescriptorAdvertised" + webrtc::RtpExtension::kDependencyDescriptorUri}) { result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv); } result.emplace_back(webrtc::RtpExtension::kGenericFrameDescriptorUri00, id++, IsEnabled(trials_, "WebRTC-GenericDescriptorAdvertised") ? webrtc::RtpTransceiverDirection::kSendRecv : webrtc::RtpTransceiverDirection::kStopped); - result.emplace_back( - webrtc::RtpExtension::kDependencyDescriptorUri, id++, - IsEnabled(trials_, "WebRTC-DependencyDescriptorAdvertised") - ? webrtc::RtpTransceiverDirection::kSendRecv - : webrtc::RtpTransceiverDirection::kStopped); result.emplace_back( webrtc::RtpExtension::kVideoLayersAllocationUri, id++, @@ -931,7 +928,7 @@ void WebRtcVideoChannel::RequestEncoderSwitch( void WebRtcVideoChannel::StartReceive(uint32_t ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); - if(!stream) { + if (!stream) { return; } stream->StartStream(); @@ -940,7 +937,7 @@ void WebRtcVideoChannel::StartReceive(uint32_t ssrc) { void WebRtcVideoChannel::StopReceive(uint32_t ssrc) { RTC_DCHECK_RUN_ON(&thread_checker_); WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); - if(!stream) { + if (!stream) { return; } stream->StopStream(); @@ -3200,12 +3197,12 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( } } -void WebRtcVideoChannel::WebRtcVideoReceiveStream::StartStream(){ +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StartStream() { if (stream_) { stream_->Start(); } } -void WebRtcVideoChannel::WebRtcVideoReceiveStream::StopStream(){ +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StopStream() { if (stream_) { stream_->Stop(); } diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index a3e47737ef..41de1bb2af 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -143,12 +143,15 @@ if (is_ios || is_mac) { "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", ] deps = [ ":base_objc", "../rtc_base:checks", + "//third_party/libyuv", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] @@ -635,17 +638,13 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios) { + if (is_ios || is_mac) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", ] } if (is_mac) { - sources += [ - "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/metal/RTCMTLNSVideoView.m", - ] frameworks += [ "AppKit.framework" ] } deps = [ @@ -1032,6 +1031,12 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCRtcpParameters+Private.h", "objc/api/peerconnection/RTCRtcpParameters.h", "objc/api/peerconnection/RTCRtcpParameters.mm", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCapabilities.mm", + "objc/api/peerconnection/RTCRtpCapabilities+Private.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", + "objc/api/peerconnection/RTCRtpCodecCapability.mm", + "objc/api/peerconnection/RTCRtpCodecCapability+Private.h", "objc/api/peerconnection/RTCRtpCodecParameters+Private.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpCodecParameters.mm", @@ -1329,6 +1334,7 @@ if (is_ios || is_mac) { "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", "objc/api/peerconnection/RTCAudioDeviceModule.h", "objc/api/peerconnection/RTCIODevice.h", @@ -1352,6 +1358,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCPeerConnectionFactory.h", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", "objc/api/peerconnection/RTCRtcpParameters.h", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpEncodingParameters.h", "objc/api/peerconnection/RTCRtpHeaderExtension.h", @@ -1474,6 +1482,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCPeerConnectionFactory.h", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", "objc/api/peerconnection/RTCRtcpParameters.h", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpEncodingParameters.h", "objc/api/peerconnection/RTCRtpHeaderExtension.h", @@ -1517,6 +1527,7 @@ if (is_ios || is_mac) { "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/opengl/RTCNSGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", @@ -1530,6 +1541,7 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", # Added for Simulcast support "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/api/video_codec/RTCVideoEncoderSimulcast.h", diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 7757323acd..d74b99bd95 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -24,6 +24,10 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); @class RTC_OBJC_TYPE(RTCAudioDeviceModule); +@class RTC_OBJC_TYPE(RTCRtpCapabilities); + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE @@ -61,6 +65,10 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType; + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType; + /** Initialize an RTCAudioSource with constraints. */ - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 9e2b2f35c0..49ff38161f 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -24,6 +24,9 @@ #import "RTCPeerConnection+Private.h" #import "RTCVideoSource+Private.h" #import "RTCVideoTrack+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" @@ -124,6 +127,20 @@ - (instancetype)init { #endif } +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; +} + - (instancetype) initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing encoderFactory:(nullable id)encoderFactory diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h b/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h new file mode 100644 index 0000000000..d5fff9e016 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCapabilities.h" + +#include "api/rtp_parameters.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCRtpCapabilities) +() + + @property(nonatomic, readonly) webrtc::RtpCapabilities nativeCapabilities; + +- (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nativeCapabilities + NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.h b/sdk/objc/api/peerconnection/RTCRtpCapabilities.h new file mode 100644 index 0000000000..7c84732ad1 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.h @@ -0,0 +1,40 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCRtpCapabilities) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) NSArray *codecs; + +// Not implemented. +// std::vector header_extensions; + +// Not implemented. +// std::vector fec; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm new file mode 100644 index 0000000000..32664e7c9f --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm @@ -0,0 +1,50 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" + +#import "RTCMediaStreamTrack.h" +#import "helpers/NSString+StdString.h" + +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" + +@implementation RTC_OBJC_TYPE (RTCRtpCapabilities) + +@synthesize nativeCapabilities = _nativeCapabilities; + +- (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nativeCapabilities { + if (self = [super init]) { + _nativeCapabilities = nativeCapabilities; + } + + return self; +} + +- (NSArray *)codecs { + NSMutableArray *result = [NSMutableArray array]; + + for (auto &element : _nativeCapabilities.codecs) { + RTCRtpCodecCapability *object = + [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability:element]; + [result addObject:object]; + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h b/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h new file mode 100644 index 0000000000..43b12d6b7d --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCodecCapability.h" + +#include "api/rtp_parameters.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCRtpCodecCapability) +() + + @property(nonatomic, readonly) webrtc::RtpCodecCapability nativeCodecCapability; + +- (instancetype)initWithNativeCodecCapability: + (const webrtc::RtpCodecCapability &)nativeCodecCapability NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h new file mode 100644 index 0000000000..01f1d7eb46 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h @@ -0,0 +1,63 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCRtpCodecCapability) : NSObject + +// Build MIME "type/subtype" string from `name` and `kind`. +@property(nonatomic, readonly) NSString *mimeType; + +// Used to identify the codec. Equivalent to MIME subtype. +@property(nonatomic, copy) NSString *name; + +// The media type of this codec. Equivalent to MIME top-level type. +@property(nonatomic, assign) RTCRtpMediaType kind; + +// Clock rate in Hertz. If unset, the codec is applicable to any clock rate. +@property(nonatomic, copy, nullable) NSNumber *clockRate; + +// Default payload type for this codec. Mainly needed for codecs that use +// that have statically assigned payload types. +@property(nonatomic, copy, nullable) NSNumber *preferredPayloadType; + +// The number of audio channels supported. Unused for video codecs. +@property(nonatomic, copy, nullable) NSNumber *numChannels; + +// Codec-specific parameters that must be signaled to the remote party. +// +// Corresponds to "a=fmtp" parameters in SDP. +// +// Contrary to ORTC, these parameters are named using all lowercase strings. +// This helps make the mapping to SDP simpler, if an application is using SDP. +// Boolean values are represented by the string "1". +// std::map parameters; +@property(nonatomic, copy) NSDictionary *parameters; + +// Feedback mechanisms supported for this codec. +// std::vector rtcp_feedback; +// Not implemented. + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm new file mode 100644 index 0000000000..f310bf6829 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -0,0 +1,138 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCodecCapability+Private.h" + +#import "RTCRtpReceiver+Private.h" + +#import "RTCMediaStreamTrack.h" +#import "helpers/NSString+StdString.h" + +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" + +@implementation RTC_OBJC_TYPE (RTCRtpCodecCapability) + +@synthesize nativeCodecCapability = _nativeCodecCapability; + +- (instancetype)init { + return [self initWithNativeCodecCapability:webrtc::RtpCodecCapability()]; +} + +- (instancetype)initWithNativeCodecCapability: + (const webrtc::RtpCodecCapability &)nativeCodecCapability { + if (self = [super init]) { + _nativeCodecCapability = nativeCodecCapability; + } + + return self; +} + +- (NSString *)mimeType { + return [NSString stringWithUTF8String:_nativeCodecCapability.mime_type().c_str()]; +} + +- (NSString *)name { + return [NSString stringWithUTF8String:_nativeCodecCapability.name.c_str()]; +} + +- (void)setName:(NSString *)name { + _nativeCodecCapability.name = std::string([name UTF8String]); +} + +- (RTCRtpMediaType)kind { + return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; +} + +- (void)setKind:(RTCRtpMediaType)kind { + _nativeCodecCapability.kind = [RTCRtpReceiver nativeMediaTypeForMediaType:kind]; +} + +- (NSNumber *)clockRate { + if (!_nativeCodecCapability.clock_rate) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.clock_rate]; +} + +- (void)setClockRate:(NSNumber *)clockRate { + if (clockRate == nil) { + _nativeCodecCapability.clock_rate = absl::optional(); + return; + } + + _nativeCodecCapability.clock_rate = absl::optional(clockRate.intValue); +} + +- (NSNumber *)preferredPayloadType { + if (!_nativeCodecCapability.preferred_payload_type) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.preferred_payload_type]; +} + +- (void)setPreferredPayloadType:(NSNumber *)preferredPayloadType { + if (preferredPayloadType == nil) { + _nativeCodecCapability.preferred_payload_type = absl::optional(); + return; + } + + _nativeCodecCapability.preferred_payload_type = + absl::optional(preferredPayloadType.intValue); +} + +- (NSNumber *)numChannels { + if (!_nativeCodecCapability.num_channels) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.num_channels]; +} + +- (void)setNumChannels:(NSNumber *)numChannels { + if (numChannels == nil) { + _nativeCodecCapability.num_channels = absl::optional(); + return; + } + + _nativeCodecCapability.num_channels = absl::optional(numChannels.intValue); +} + +- (NSDictionary *)parameters { + NSMutableDictionary *result = [NSMutableDictionary dictionary]; + auto _parameters = _nativeCodecCapability.parameters; + for (auto it = _parameters.begin(); it != _parameters.end(); ++it) { + [result setObject:[NSString stringForStdString:it->second] + forKey:[NSString stringForStdString:it->first]]; + } + + return result; +} + +- (void)setParameters:(NSDictionary *)parameters { + std::map _parameters; + for (NSString *paramKey in parameters.allKeys) { + std::string key = [NSString stdStringForString:paramKey]; + std::string value = [NSString stdStringForString:parameters[paramKey]]; + _parameters[key] = value; + } + + _nativeCodecCapability.parameters = _parameters; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 6135223720..4d24d3ccd6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -30,6 +30,7 @@ RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; +RTC_EXTERN const NSString *const kRTCAv1CodecName; /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index 6201e57b93..42a310cb79 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -32,6 +32,7 @@ const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); +const NSString * const kRTCAv1CodecName = @(cricket::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 07f6b7a39c..af0c6993bc 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -69,6 +69,10 @@ RTC_OBJC_EXPORT https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ @property(nonatomic, assign) BOOL adaptiveAudioPacketTime; +/** A case-sensitive identifier of the scalability mode to be used for this stream. + https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters */ +@property(nonatomic, copy, nullable) NSString *scalabilityMode; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index d6087dafb0..aecb88b6f6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -25,6 +25,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; @synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime; +@synthesize scalabilityMode = _scalabilityMode; - (instancetype)init { webrtc::RtpEncodingParameters nativeParameters; @@ -59,6 +60,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } + if (nativeParameters.scalability_mode) { + _scalabilityMode = [NSString stringWithUTF8String:nativeParameters.scalability_mode->c_str()]; + } _bitratePriority = nativeParameters.bitrate_priority; _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) priorityFromNativePriority:nativeParameters.network_priority]; @@ -92,6 +96,9 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } + if (_scalabilityMode != nil) { + parameters.scalability_mode = absl::optional(std::string([_scalabilityMode UTF8String])); + } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index fd59013639..3ffea8efd7 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -14,6 +14,8 @@ #import "RTCRtpReceiver.h" #import "RTCRtpSender.h" +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + NS_ASSUME_NONNULL_BEGIN extern NSString *const kRTCRtpTransceiverErrorDomain; @@ -104,6 +106,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, copy) NSArray *codecPreferences; + /** The currentDirection attribute indicates the current direction negotiated * for this transceiver. If this transceiver has never been represented in an * offer/answer exchange, or if the transceiver is stopped, the value is not diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index ae1cf79864..acb1b8032a 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -14,6 +14,8 @@ #import "RTCRtpParameters+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCRtpCodecCapability.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" @@ -65,6 +67,36 @@ - (NSString *)mid { } } +- (void)setCodecPreferences:(NSArray *)codecPreferences { + + std::vector objects; + + for (RTCRtpCodecCapability *object in codecPreferences) { + objects.push_back(object.nativeCodecCapability); + } + + //webrtc::RTCError error = + _nativeRtpTransceiver->SetCodecPreferences(rtc::ArrayView(objects.data(), objects.size())); + + // if (!error.ok()) { + // [NSException raise:@"setCodecPreferences" format:@"SDK returned error: %@", [NSString stringWithUTF8String: error.message()]]; + // } +} + +- (NSArray *)codecPreferences { + + NSMutableArray *result = [NSMutableArray array]; + + std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); + + for (auto & element : capabilities) { + RTCRtpCodecCapability *object = [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability: element]; + [result addObject: object]; + } + + return result; +} + @synthesize sender = _sender; @synthesize receiver = _receiver; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index f70e2ad5ee..d30b83037f 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -8,17 +8,5 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import - -#import "RTCVideoRenderer.h" - -NS_AVAILABLE_MAC(10.11) - -RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView - -@property(nonatomic, weak) id delegate; - -+ (BOOL)isMetalAvailable; - -@end +// Deprecated: Use RTCMTLVideoView instead +@compatibility_alias RTCMTLNSVideoView RTCMTLVideoView; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m deleted file mode 100644 index 625fb1caa7..0000000000 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMTLNSVideoView.h" - -#import -#import - -#import "base/RTCVideoFrame.h" - -#import "RTCMTLI420Renderer.h" - -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) -() @property(nonatomic) id renderer; -@property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@end - -@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { - id _renderer; -} - -@synthesize delegate = _delegate; -@synthesize renderer = _renderer; -@synthesize metalView = _metalView; -@synthesize videoFrame = _videoFrame; - -- (instancetype)initWithFrame:(CGRect)frameRect { - self = [super initWithFrame:frameRect]; - if (self) { - [self configure]; - } - return self; -} - -- (instancetype)initWithCoder:(NSCoder *)aCoder { - self = [super initWithCoder:aCoder]; - if (self) { - [self configure]; - } - return self; -} - -#pragma mark - Private - -+ (BOOL)isMetalAvailable { - return [MTLCopyAllDevices() count] > 0; -} - -- (void)configure { - if ([[self class] isMetalAvailable]) { - _metalView = [[MTKView alloc] initWithFrame:self.bounds]; - [self addSubview:_metalView]; - _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; - _metalView.translatesAutoresizingMaskIntoConstraints = NO; - _metalView.framebufferOnly = YES; - _metalView.delegate = self; - - _renderer = [[RTCMTLI420Renderer alloc] init]; - if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { - _renderer = nil; - }; - } -} - -- (void)updateConstraints { - NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); - - NSArray *constraintsHorizontal = - [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsHorizontal]; - - NSArray *constraintsVertical = - [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsVertical]; - [super updateConstraints]; -} - -#pragma mark - MTKViewDelegate methods -- (void)drawInMTKView:(nonnull MTKView *)view { - if (self.videoFrame == nil) { - return; - } - if (view == self.metalView) { - [_renderer drawFrame:self.videoFrame]; - } -} - -- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -- (void)setSize:(CGSize)size { - _metalView.drawableSize = size; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); - [_metalView draw]; -} - -- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - if (frame == nil) { - return; - } - self.videoFrame = [frame newI420VideoFrame]; -} - -@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 3320d12076..bed02ffa92 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -10,6 +10,10 @@ #import +#if TARGET_OS_OSX +#import +#endif + #import "RTCMacros.h" #import "RTCVideoFrame.h" #import "RTCVideoRenderer.h" @@ -22,14 +26,26 @@ NS_ASSUME_NONNULL_BEGIN * It has id property that renders video frames in the view's * bounds using Metal. */ +#if TARGET_OS_IPHONE NS_CLASS_AVAILABLE_IOS(9) +#elif TARGET_OS_OSX +NS_AVAILABLE_MAC(10.11) +#endif RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : + +#if TARGET_OS_IPHONE + UIView +#elif TARGET_OS_OSX + NSView +#endif @property(nonatomic, weak) id delegate; +#if TARGET_OS_IPHONE @property(nonatomic) UIViewContentMode videoContentMode; +#endif /** @abstract Enables/disables rendering. */ @@ -39,6 +55,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, nullable) NSValue* rotationOverride; ++ (BOOL)isMetalAvailable; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c5d9e4385f..8b2ec1aaa3 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -51,6 +51,14 @@ @implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize lastFrameTimeNs = _lastFrameTimeNs; @synthesize rotationOverride = _rotationOverride; ++ (BOOL)isMetalAvailable { +#if TARGET_OS_IPHONE + return MTLCreateSystemDefaultDevice() != nil; +#elif TARGET_OS_OSX + return [MTLCopyAllDevices() count] > 0; +#endif +} + - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; if (self) { @@ -75,6 +83,7 @@ - (void)setEnabled:(BOOL)enabled { self.metalView.paused = !enabled; } +#if TARGET_OS_IPHONE - (UIViewContentMode)videoContentMode { return self.metalView.contentMode; } @@ -82,13 +91,10 @@ - (UIViewContentMode)videoContentMode { - (void)setVideoContentMode:(UIViewContentMode)mode { self.metalView.contentMode = mode; } +#endif #pragma mark - Private -+ (BOOL)isMetalAvailable { - return MTLCreateSystemDefaultDevice() != nil; -} - + (MTKView *)createMetalView:(CGRect)frame { return [[MTKViewClass alloc] initWithFrame:frame]; } @@ -102,7 +108,7 @@ + (RTCMTLI420Renderer *)createI420Renderer { } + (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRenderer alloc] init]; + return [[RTCMTLRGBRendererClass alloc] init]; } - (void)configure { @@ -111,19 +117,24 @@ - (void)configure { self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; +#if TARGET_OS_IPHONE self.metalView.contentMode = UIViewContentModeScaleAspectFill; +#elif TARGET_OS_OSX + self.metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; +#endif + [self addSubview:self.metalView]; self.videoFrameSize = CGSizeZero; } +#if TARGET_OS_IPHONE - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - self.metalView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + self.metalView.multipleTouchEnabled = multipleTouchEnabled; } +#endif -- (void)layoutSubviews { - [super layoutSubviews]; - +- (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { @@ -203,10 +214,10 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)frameRotation { +- (RTCVideoRotation)videoRotation { if (self.rotationOverride) { RTCVideoRotation rotation; - if (@available(iOS 11, *)) { + if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { [self.rotationOverride getValue:&rotation]; @@ -220,10 +231,10 @@ - (RTCVideoRotation)frameRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation frameRotation = [self frameRotation]; + RTCVideoRotation videoRotation = [self videoRotation]; BOOL useLandscape = - (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180); + (videoRotation == RTCVideoRotation_0) || (videoRotation == RTCVideoRotation_180); BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || (self.videoFrame.rotation == RTCVideoRotation_180); @@ -259,7 +270,34 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; + + // Workaround to support RTCCVPixelBuffer rendering. + // RTCMTLRGBRenderer seems to be broken at the moment. + BOOL useI420 = NO; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + useI420 = pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB; + } + self.videoFrame = useI420 ? [frame newI420VideoFrame] : frame; +} + +#pragma mark - Cross platform + +#if TARGET_OS_IPHONE +- (void)layoutSubviews { + [super layoutSubviews]; + [self performLayout]; +} +#elif TARGET_OS_OSX +- (void)layout { + [super layout]; + [self performLayout]; +} + +- (void)setNeedsLayout { + self.needsLayout = YES; } +#endif @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 2160d79ae5..d3dd33aef6 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -54,14 +54,42 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags // The ratio between kVTCompressionPropertyKey_DataRateLimits and // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher // than the average bit rate to avoid undershooting the target. -const float kLimitToAverageBitRateFactor = 1.5f; +const float kLimitToAverageBitRateFactor = 10.0f; // These thresholds deviate from the default h264 QP thresholds, as they // have been found to work better on devices that support VideoToolbox const int kLowH264QpThreshold = 28; const int kHighH264QpThreshold = 39; +const int kBitsPerByte = 8; const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { + Variable = 0, + Constant = 1, +}; + +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTCVideoEncodeMode mode) { + switch (mode) { + case Variable: { + // 5 seconds should be an okay interval for VBR to enforce the long-term + // limit. + float avgInterval = 5.0; + uint32_t avgBytesPerSecond = computedBitrateBps / kBitsPerByte * avgInterval; + // And the peak bitrate is measured per-second in a way similar to CBR. + float peakInterval = 1.0; + uint32_t peakBytesPerSecond = + computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; + return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; + } + case Constant: { + // CBR should be enforces with granularity of a second. + float targetInterval = 1.0; + int32_t targetBitrate = computedBitrateBps / kBitsPerByte; + return @[ @(targetBitrate), @(targetInterval) ]; + } + } +} + // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { @@ -180,10 +208,13 @@ void compressionOutputCallback(void *encoder, // no specific VideoToolbox profile for the specified level, AutoLevel will be // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. -CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) { +CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id, bool screenSharing) { switch (profile_level_id.profile) { case webrtc::H264Profile::kProfileConstrainedBaseline: case webrtc::H264Profile::kProfileBaseline: + if (screenSharing) { + return kVTProfileLevel_H264_Baseline_AutoLevel; + } switch (profile_level_id.level) { case webrtc::H264Level::kLevel3: return kVTProfileLevel_H264_Baseline_3_0; @@ -319,8 +350,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; - std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; + uint32_t _targetFrameRate; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; @@ -330,10 +361,17 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; - RTCVideoCodecMode _mode; + CVPixelBufferPoolRef _pixelBufferPool; + RTCVideoCodecMode _codecMode; + unsigned int _maxQP; + unsigned int _minBitrate; + unsigned int _maxBitrate; + RTCVideoEncodeMode _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; + + CMTime _previousPresentationTimeStamp; } // .5 is set as a mininum to prevent overcompensating for large temporary @@ -346,12 +384,14 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { - (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; - _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); _packetizationMode = RTCH264PacketizationModeNonInterleaved; _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); + _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); - RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id)); + RTC_LOG(LS_INFO) << "Using profile " + << CFStringToString(ExtractProfile( + *_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); } return self; @@ -368,7 +408,12 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s _width = settings.width; _height = settings.height; - _mode = settings.mode; + _codecMode = settings.mode; + _maxQP = settings.qpMax; + + _encodeMode = Variable; // Always variable mode for now + _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. + _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. uint32_t aligned_width = (((_width + 15) >> 4) << 4); uint32_t aligned_height = (((_height + 15) >> 4) << 4); @@ -376,9 +421,15 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); - _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + if (_encodeMode == Constant) { + _targetBitrateBps = _maxBitrate; + } else { + _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. + } + + _targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + _encoderBitrateBps = 0; + _encoderFrameRate = 0; if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate << " is larger than the " @@ -397,8 +448,15 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - BOOL isKeyframeRequired = NO; + CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); + if (CMTimeCompare(presentationTimeStamp, _previousPresentationTimeStamp) == 0) { + // Same PTS + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + _previousPresentationTimeStamp = presentationTimeStamp; + + BOOL isKeyframeRequired = NO; // Get a pixel buffer from the pool and copy frame data over. if ([self resetCompressionSessionIfNeededWithFrame:frame]) { isKeyframeRequired = YES; @@ -425,8 +483,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame int dstWidth = CVPixelBufferGetWidth(pixelBuffer); int dstHeight = CVPixelBufferGetHeight(pixelBuffer); if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { - int size = - [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight]; + int size = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth + height:dstHeight]; _frameScaleBuffer.resize(size); } else { _frameScaleBuffer.clear(); @@ -466,7 +524,6 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame } } - CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); CFDictionaryRef frameProperties = nullptr; if (isKeyframeRequired) { CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; @@ -484,8 +541,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame frame.rotation)); encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; - // Update the bitrate if needed. - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate]; + // Update encoder bitrate or frameRate if needed. + [self updateEncoderBitrateAndFrameRate]; OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, pixelBuffer, @@ -526,14 +583,19 @@ - (void)setCallback:(RTCVideoEncoderCallback)callback { } - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { - _targetBitrateBps = 1000 * bitrateKbit; - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); + // set target bitrate bps + _targetBitrateBps = bitrateKbit * 1000; + + RTC_LOG(LS_INFO) << "setBitrateKBit: " << bitrateKbit << " targetBps: " << _targetBitrateBps + << " frameRate: " << framerate; + if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the " << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } - framerate = MIN(framerate, _maxAllowedFrameRate); - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate]; + + _targetFrameRate = MIN(framerate, _maxAllowedFrameRate); + return WEBRTC_VIDEO_CODEC_OK; } @@ -585,7 +647,8 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * CVPixelBufferPoolRef pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); if (!pixelBufferPool) { - return NO; + [self resetCompressionSessionWithPixelFormat:framePixelFormat]; + return YES; } NSDictionary *poolAttributes = @@ -631,14 +694,19 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat), }; - NSDictionary *encoder_specs; + NSMutableDictionary *encoder_specs; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Currently hw accl is supported above 360p on mac, below 360p // the compression session will be created with hw accl disabled. - encoder_specs = @{ + encoder_specs = [@{ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), - }; - + } mutableCopy]; + // Enable low-latency video encoding + if (@available(iOS 14.5, macOS 11.3, *)) { + [encoder_specs addEntriesFromDictionary:@{ + (NSString *)kVTVideoEncoderSpecification_EnableLowLatencyRateControl : @(YES), + }]; + } #endif OSStatus status = VTCompressionSessionCreate( nullptr, // use default allocator @@ -675,11 +743,30 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id)); + // Sacrifice encoding speed over quality when necessary + if (@available(iOS 14.0, macOS 11.0, *)) { + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_PrioritizeEncodingSpeedOverQuality, true); + } + // Set maximum QP for screen sharing mode, range must be within 1 to 51 + // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp + if (@available(iOS 15.0, macOS 12.0, *)) { + // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. + if (_codecMode == RTCVideoCodecModeScreensharing) { + RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold + << " mode: " << _codecMode; + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_MaxAllowedFrameQP, kHighH264QpThreshold); + } + } + SetVTSessionProperty( + _compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); - [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; + + // [self updateEncoderBitrateAndFrameRate]; + // TODO(tkchin): Look at entropy mode and colorspace matrices. // TODO(tkchin): Investigate to see if there's any way to make this work. // May need it to interop with Android. Currently this call just fails. @@ -706,49 +793,59 @@ - (NSString *)implementationName { return @"VideoToolbox"; } -- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) { - [self setEncoderBitrateBps:bitrateBps frameRate:frameRate]; +- (void)updateEncoderBitrateAndFrameRate { + // If no compression session simply return + if (!_compressionSession) { + return; } -} + // Initial status + OSStatus status = noErr; -- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_compressionSession) { - SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps); + uint32_t computedBitrateBps = _targetBitrateBps; - // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. - if (_maxAllowedFrameRate > 0) { - SetVTSessionProperty( - _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate); - } + // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. + uint32_t computedFrameRate = _maxAllowedFrameRate > 0 ? _targetFrameRate : 0; - // TODO(tkchin): Add a helper method to set array value. - int64_t dataLimitBytesPerSecondValue = - static_cast(bitrateBps * kLimitToAverageBitRateFactor / 8); - CFNumberRef bytesPerSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue); - int64_t oneSecondValue = 1; - CFNumberRef oneSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); - const void *nums[2] = {bytesPerSecond, oneSecond}; - CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); - OSStatus status = VTSessionSetProperty( - _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits); - if (bytesPerSecond) { - CFRelease(bytesPerSecond); - } - if (oneSecond) { - CFRelease(oneSecond); + // Set frame rate + if (computedFrameRate != _encoderFrameRate) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_ExpectedFrameRate, + (__bridge CFTypeRef) @(computedFrameRate)); + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to set frame rate: " << computedFrameRate + << " error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder frame rate: " << computedFrameRate; } - if (dataRateLimits) { - CFRelease(dataRateLimits); + _encoderFrameRate = computedFrameRate; + } + + // Set bitrate + if (computedBitrateBps != _encoderBitrateBps) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_AverageBitRate, + (__bridge CFTypeRef) @(computedBitrateBps)); + + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to update encoder bitrate: " << computedBitrateBps + << "error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder bitrate: " << computedBitrateBps; } + + status = VTSessionSetProperty( + _compressionSession, + kVTCompressionPropertyKey_DataRateLimits, + (__bridge CFArrayRef)CreateRateLimitArray(computedBitrateBps, _encodeMode)); if (status != noErr) { - RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status; + RTC_LOG(LS_ERROR) << "Failed to update encoder data rate limits"; + } else { + RTC_LOG(LS_INFO) << "Did update encoder data rate limits"; } - _encoderBitrateBps = bitrateBps; - _encoderFrameRate = frameRate; + _encoderBitrateBps = computedBitrateBps; } } @@ -804,8 +901,9 @@ - (void)frameWasEncoded:(OSStatus)status frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTCVideoCodecModeScreensharing) ? + RTCVideoContentTypeScreenshare : + RTCVideoContentTypeUnspecified; frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); @@ -816,7 +914,6 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_ERROR) << "Encode callback failed"; return; } - _bitrateAdjuster->Update(frame.buffer.length); } - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { @@ -826,3 +923,4 @@ - (void)frameWasEncoded:(OSStatus)status } @end + diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h new file mode 100644 index 0000000000..2e6309c034 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -0,0 +1,118 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCYUVHelper) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + width:(int)height + mode:(RTCVideoRotation)mode; + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height; + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height; + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height; + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height; + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height; + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height; + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height; + +@end diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm new file mode 100644 index 0000000000..3f610ff990 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -0,0 +1,179 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCYUVHelper.h" + +#include "third_party/libyuv/include/libyuv.h" + +@implementation RTC_OBJC_TYPE (RTCYUVHelper) + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + width:(int)height + mode:(RTCVideoRotation)mode { + libyuv::I420Rotate(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstU, + dstStrideU, + dstV, + dstStrideV, + width, + height, + (libyuv::RotationMode)mode); +} + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height { + return libyuv::I420ToNV12(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height { + return libyuv::I420ToNV21(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height { + return libyuv::I420ToARGB( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstARGB, dstStrideARGB, width, height); +} + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height { + return libyuv::I420ToBGRA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstBGRA, dstStrideBGRA, width, height); +} + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height { + return libyuv::I420ToABGR( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstABGR, dstStrideABGR, width, height); +} + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height { + return libyuv::I420ToRGBA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstRGBA, dstStrideRGBA, width, height); +} + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height { + return libyuv::I420ToRGB24(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstRGB24, + dstStrideRGB24, + width, + height); +} + +@end diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index e556ff9b8c..5fb3dd1b78 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -2042,9 +2042,15 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + } else { + RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " + << encode_status; + } + return; } From 8e832d1163644ab504412c9b8f3ba8510d9890d6 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Tue, 13 Jun 2023 00:32:04 +0800 Subject: [PATCH 10/42] Desktop Capture for macOS. [Mac] feat: Support screen capture for macOS. (#24) (#36) fix: Get thumbnails asynchronously. (#37) fix: Use CVPixelBuffer to build DesktopCapture Frame, fix the crash caused by non-CVPixelBuffer frame in RTCVideoEncoderH264 that cannot be cropped. (#63) Fix the crash when setting the fps of the virtual camera. (#62) --- .../mac/screen_capturer_mac.mm | 11 +- sdk/BUILD.gn | 41 +++ .../capturer/RTCCameraVideoCapturer.m | 4 +- .../capturer/RTCDesktopCapturer+Private.h | 49 ++++ .../components/capturer/RTCDesktopCapturer.h | 61 +++++ .../components/capturer/RTCDesktopCapturer.mm | 104 ++++++++ .../capturer/RTCDesktopMediaList+Private.h | 40 +++ .../components/capturer/RTCDesktopMediaList.h | 51 ++++ .../capturer/RTCDesktopMediaList.mm | 99 +++++++ .../capturer/RTCDesktopSource+Private.h | 37 +++ .../components/capturer/RTCDesktopSource.h | 40 +++ .../components/capturer/RTCDesktopSource.mm | 68 +++++ sdk/objc/native/src/objc_desktop_capture.h | 70 +++++ sdk/objc/native/src/objc_desktop_capture.mm | 205 ++++++++++++++ sdk/objc/native/src/objc_desktop_media_list.h | 111 ++++++++ .../native/src/objc_desktop_media_list.mm | 252 ++++++++++++++++++ .../native/src/objc_video_track_source.mm | 2 +- 17 files changed, 1233 insertions(+), 12 deletions(-) create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer.h create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer.mm create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList.h create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList.mm create mode 100644 sdk/objc/components/capturer/RTCDesktopSource+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopSource.h create mode 100644 sdk/objc/components/capturer/RTCDesktopSource.mm create mode 100644 sdk/objc/native/src/objc_desktop_capture.h create mode 100644 sdk/objc/native/src/objc_desktop_capture.mm create mode 100644 sdk/objc/native/src/objc_desktop_media_list.h create mode 100644 sdk/objc/native/src/objc_desktop_media_list.mm diff --git a/modules/desktop_capture/mac/screen_capturer_mac.mm b/modules/desktop_capture/mac/screen_capturer_mac.mm index 8f0c68d48b..1f3889f250 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.mm +++ b/modules/desktop_capture/mac/screen_capturer_mac.mm @@ -297,16 +297,7 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s ScreenConfigurationChanged(); } - // When screen is zoomed in/out, OSX only updates the part of Rects currently - // displayed on screen, with relative location to current top-left on screen. - // This will cause problems when we copy the dirty regions to the captured - // image. So we invalidate the whole screen to copy all the screen contents. - // With CGI method, the zooming will be ignored and the whole screen contents - // will be captured as before. - // With IOSurface method, the zoomed screen contents will be captured. - if (UAZoomEnabled()) { - helper_.InvalidateScreen(screen_pixel_bounds_.size()); - } + helper_.InvalidateScreen(screen_pixel_bounds_.size()); DesktopRegion region; helper_.TakeInvalidRegion(®ion); diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 41de1bb2af..bfbfc33eaf 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -700,6 +700,43 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", ] } + + rtc_library("desktopcapture_objc") { + visibility = [ "*" ] + sources = [ + "objc/components/capturer/RTCDesktopCapturer+Private.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.mm", + "objc/components/capturer/RTCDesktopSource+Private.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopSource.mm", + "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/capturer/RTCDesktopMediaList.mm", + "objc/native/src/objc_desktop_capture.h", + "objc/native/src/objc_desktop_capture.mm", + "objc/native/src/objc_desktop_media_list.h", + "objc/native/src/objc_desktop_media_list.mm", + ] + frameworks = [ + "AppKit.framework", + ] + + configs += [ "..:common_objc" ] + + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":videoframebuffer_objc", + "../rtc_base/system:gcd_helpers", + "../modules/desktop_capture", + ] + if(is_mac) { + deps += [ "//third_party:jpeg", ] + } + } rtc_library("videocodec_objc") { visibility = [ "*" ] @@ -1527,6 +1564,9 @@ if (is_ios || is_mac) { "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopMediaList.h", "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/opengl/RTCNSGLVideoView.h", @@ -1561,6 +1601,7 @@ if (is_ios || is_mac) { ":opengl_ui_objc", ":peerconnectionfactory_base_objc", ":videocapture_objc", + ":desktopcapture_objc", ":videocodec_objc", ":videotoolbox_objc", ] diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 98d3cf9f45..f7ae04e814 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -495,7 +495,9 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + if(![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + } } @catch (NSException *exception) { RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); return; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h new file mode 100644 index 0000000000..7e293a1a56 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopCapturer.h" + +#include "sdk/objc/native/src/objc_desktop_capture.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate) +-(void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *) frame; +-(void)didSourceCaptureStart; +-(void)didSourceCapturePaused; +-(void)didSourceCaptureStop; +-(void)didSourceCaptureError; +@end + +@interface RTCDesktopCapturer () + +@property(nonatomic, readonly)std::shared_ptr nativeCapturer; + +- (void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +-(void)didSourceCaptureStart; + +-(void)didSourceCapturePaused; + +-(void)didSourceCaptureStop; + +-(void)didSourceCaptureError; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h new file mode 100644 index 0000000000..160c00d208 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -0,0 +1,61 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTCDesktopCapturer; + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopCapturerDelegate) +-(void)didSourceCaptureStart:(RTCDesktopCapturer *) capturer; + +-(void)didSourceCapturePaused:(RTCDesktopCapturer *) capturer; + +-(void)didSourceCaptureStop:(RTCDesktopCapturer *) capturer; + +-(void)didSourceCaptureError:(RTCDesktopCapturer *) capturer; +@end + +RTC_OBJC_EXPORT +// Screen capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). +@interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) + +@property(nonatomic, readonly) RTCDesktopSource *source; + +- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (void)startCapture; + +- (void)startCaptureWithFPS:(NSInteger)fps; + +- (void)stopCapture; + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm new file mode 100644 index 0000000000..a7d5c60eb2 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -0,0 +1,104 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrameBuffer.h" + +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#import "RTCDesktopCapturer.h" +#import "RTCDesktopCapturer+Private.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { + __weak id _delegate; +} + +@synthesize nativeCapturer = _nativeCapturer; +@synthesize source = _source; + +- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(source.sourceType == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeCapturer = std::make_shared(captureType, source.nativeMediaSource->id(), self); + _source = source; + _delegate = delegate; + } + return self; +} + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + _nativeCapturer = std::make_unique(webrtc::kScreen, -1, self); + _source = nil; + _delegate = delegate; + } + return self; +} + + +-(void)dealloc { + _nativeCapturer->Stop(); + _nativeCapturer = nullptr; +} + +- (void)startCapture { + [self didSourceCaptureStart]; + _nativeCapturer->Start(30); +} + +- (void)startCaptureWithFPS:(NSInteger)fps { + _nativeCapturer->Start(fps); +} + +- (void)didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + [self.delegate capturer:self didCaptureVideoFrame:frame]; +} + +- (void)stopCapture { + _nativeCapturer->Stop(); +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if(completionHandler != nil) { + completionHandler(); + } +} + +-(void)didSourceCaptureStart { + [_delegate didSourceCaptureStart:self]; +} + +-(void)didSourceCapturePaused { + [_delegate didSourceCapturePaused:self]; +} + +-(void)didSourceCaptureStop { + [_delegate didSourceCaptureStop:self]; +} + +-(void)didSourceCaptureError { + [_delegate didSourceCaptureError:self]; +} + +@end diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h new file mode 100644 index 0000000000..fc3b080ad2 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +namespace webrtc { + class ObjCDesktopMediaList; + class MediaSource; +} + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCDesktopMediaList () + +@property(nonatomic, readonly)std::shared_ptr nativeMediaList; + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source; + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source; + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source; + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.h b/sdk/objc/components/capturer/RTCDesktopMediaList.h new file mode 100644 index 0000000000..fafeaf5e0d --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.h @@ -0,0 +1,51 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopMediaListDelegate) + +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopMediaList) : NSObject + +-(instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail; + +- (NSArray*) getSources; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm new file mode 100644 index 0000000000..7aa7dca1be --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -0,0 +1,99 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +#import "RTCDesktopSource+Private.h" +#import "RTCDesktopMediaList+Private.h" + +@implementation RTCDesktopMediaList { + RTCDesktopSourceType _sourceType; + NSMutableArray* _sources; + __weak id _delegate; +} + +@synthesize sourceType = _sourceType; +@synthesize nativeMediaList = _nativeMediaList; + +- (instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate{ + if (self = [super init]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(type == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeMediaList = std::make_shared(captureType, self); + _sourceType = type; + _delegate = delegate; + } + return self; +} + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail { + return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); +} + +-(NSArray*) getSources { + _sources = [NSMutableArray array]; + int sourceCount = _nativeMediaList->GetSourceCount(); + for (int i = 0; i < sourceCount; i++) { + webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); + [_sources addObject:[[RTCDesktopSource alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + } + return _sources; +} + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source { + RTCDesktopSource *desktopSource = [[RTCDesktopSource alloc] initWithNativeSource:source sourceType:_sourceType]; + [_sources addObject:desktopSource]; + [_delegate didDesktopSourceAdded:desktopSource]; +} + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source { + RTCDesktopSource *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [_sources removeObject:desktopSource]; + [_delegate didDesktopSourceRemoved:desktopSource]; + } +} + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { + RTCDesktopSource *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setName:source->name().c_str()]; + [_delegate didDesktopSourceNameChanged:desktopSource]; + } +} + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { + RTCDesktopSource *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setThumbnail:source->thumbnail()]; + [_delegate didDesktopSourceThumbnailChanged:desktopSource]; + } +} + +-(RTCDesktopSource *)getSourceById:(webrtc::MediaSource *) source { + NSEnumerator *enumerator = [_sources objectEnumerator]; + RTCDesktopSource *object; + while ((object = enumerator.nextObject) != nil) { + if(object.nativeMediaSource == source) { + return object; + } + } + return nil; +} + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h new file mode 100644 index 0000000000..f5a0d14e0f --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -0,0 +1,37 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import + +#import "RTCDesktopSource.h" + +#include "sdk/objc/native/src/objc_desktop_media_list.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCDesktopSource () + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource + sourceType:(RTCDesktopSourceType) sourceType; + +@property(nonatomic, readonly)webrtc::MediaSource* nativeMediaSource; + +-(void) setName:(const char *) name; + +-(void) setThumbnail:(std::vector) thumbnail; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.h b/sdk/objc/components/capturer/RTCDesktopSource.h new file mode 100644 index 0000000000..82da458ce6 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import +#import +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTCDesktopSourceType) { + RTCDesktopSourceTypeScreen, + RTCDesktopSourceTypeWindow, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopSource) : NSObject + +@property(nonatomic, readonly) NSString *sourceId; + +@property(nonatomic, readonly) NSString *name; + +@property(nonatomic, readonly) NSImage *thumbnail; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +-( NSImage *)UpdateThumbnail; + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm new file mode 100644 index 0000000000..3f5f23894b --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCDesktopSource.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTCDesktopSource { + NSString *_sourceId; + NSString *_name; + NSImage *_thumbnail; + RTCDesktopSourceType _sourceType; +} + +@synthesize sourceId = _sourceId; +@synthesize name = _name; +@synthesize thumbnail = _thumbnail; +@synthesize sourceType = _sourceType; +@synthesize nativeMediaSource = _nativeMediaSource; + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*)nativeSource + sourceType:(RTCDesktopSourceType) sourceType { + if (self = [super init]) { + _nativeMediaSource = nativeSource; + _sourceId = [NSString stringWithUTF8String:std::to_string(nativeSource->id()).c_str()]; + _name = [NSString stringWithUTF8String:nativeSource->name().c_str()]; + _thumbnail = [self createThumbnailFromNativeSource:nativeSource->thumbnail()]; + _sourceType = sourceType; + } + return self; +} + +-(NSImage*)createThumbnailFromNativeSource:(std::vector)thumbnail { + NSData* data = [[NSData alloc] initWithBytes:thumbnail.data() length:thumbnail.size()]; + NSImage *image = [[NSImage alloc] initWithData:data]; + return image; +} + +-( NSImage *)UpdateThumbnail { + if(_nativeMediaSource->UpdateThumbnail()) { + _thumbnail = [self createThumbnailFromNativeSource:_nativeMediaSource->thumbnail()]; + } + return _thumbnail; +} + +-(void)setName:(const char *) name { + _name = [NSString stringWithUTF8String:name]; +} + +-(void)setThumbnail:(std::vector) thumbnail { + _thumbnail = [self createThumbnailFromNativeSource:thumbnail]; +} + +@end diff --git a/sdk/objc/native/src/objc_desktop_capture.h b/sdk/objc/native/src/objc_desktop_capture.h new file mode 100644 index 0000000000..a781457220 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.h @@ -0,0 +1,70 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_and_cursor_composer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate); + +namespace webrtc { + +enum DesktopType { kScreen, kWindow }; + +class ObjCDesktopCapturer : public DesktopCapturer::Callback { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + + public: + ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate); + virtual ~ObjCDesktopCapturer(); + + virtual CaptureState Start(uint32_t fps); + + virtual void Stop(); + + virtual bool IsRunning(); + + protected: + virtual void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override; + private: + void CaptureFrame(); + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + CaptureState capture_state_ = CS_STOPPED; + DesktopType type_; + webrtc::DesktopCapturer::SourceId source_id_; + id delegate_; + uint32_t capture_delay_ = 1000; // 1s + webrtc::DesktopCapturer::Result result_ = webrtc::DesktopCapturer::Result::SUCCESS; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ diff --git a/sdk/objc/native/src/objc_desktop_capture.mm b/sdk/objc/native/src/objc_desktop_capture.mm new file mode 100644 index 0000000000..7aba3e5612 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.mm @@ -0,0 +1,205 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_desktop_capture.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +#import "components/capturer/RTCDesktopCapturer+Private.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +namespace webrtc { + +enum { kCaptureDelay = 33, kCaptureMessageId = 1000 }; + +ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate) + : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) { + RTC_DCHECK(thread_); + type_ = type; + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); + } else { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); + } + }); +} + +ObjCDesktopCapturer::~ObjCDesktopCapturer() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +ObjCDesktopCapturer::CaptureState ObjCDesktopCapturer::Start(uint32_t fps) { + if(capture_state_ == CS_RUNNING) { + return capture_state_; + } + + if(fps == 0) { + capture_state_ = CS_FAILED; + return capture_state_; + } + + if (fps >= 60) { + capture_delay_ = uint32_t(1000.0 / 60.0); + } else { + capture_delay_ = uint32_t(1000.0 / fps); + } + + if (source_id_ != -1) { + if (!capturer_->SelectSource(source_id_)) { + capture_state_ = CS_FAILED; + return capture_state_; + } + if (type_ == kWindow) { + if (!capturer_->FocusOnSelectedSource()) { + capture_state_ = CS_FAILED; + return capture_state_; + } + } + } + + thread_->BlockingCall([this] { + capturer_->Start(this); + }); + capture_state_ = CS_RUNNING; + + thread_->PostTask([this] { + CaptureFrame(); + }); + + [delegate_ didSourceCaptureStart]; + return capture_state_; +} + +void ObjCDesktopCapturer::Stop() { + [delegate_ didSourceCaptureStop]; + capture_state_ = CS_STOPPED; +} + +bool ObjCDesktopCapturer::IsRunning() { + return capture_state_ == CS_RUNNING; +} + +void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != result_) { + if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) { + [delegate_ didSourceCaptureError]; + capture_state_ = CS_FAILED; + return; + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + result_ = result; + [delegate_ didSourceCapturePaused]; + return; + } + + if (result == webrtc::DesktopCapturer::Result::SUCCESS) { + result_ = result; + [delegate_ didSourceCaptureStart]; + } + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + return; + } + + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; + NSTimeInterval timeStampSeconds = CACurrentMediaTime(); + int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + CVPixelBufferRelease(pixelBuffer); + [delegate_ didCaptureVideoFrame:videoFrame]; +} + +void ObjCDesktopCapturer::CaptureFrame() { + RTC_DCHECK_RUN_ON(thread_.get()); + if (capture_state_ == CS_RUNNING) { + capturer_->CaptureFrame(); + thread_->PostDelayedHighPrecisionTask( + [this]() { + CaptureFrame(); + }, + TimeDelta::Millis(capture_delay_)); + } +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_desktop_media_list.h b/sdk/objc/native/src/objc_desktop_media_list.h new file mode 100644 index 0000000000..ecb2d27221 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +#include "objc_desktop_capture.h" + +#import "components/capturer/RTCDesktopMediaList+Private.h" + +namespace webrtc { + +class MediaSource { + public: + MediaSource( ObjCDesktopMediaList *mediaList, DesktopCapturer::Source src, DesktopType type) + : source(src), mediaList_(mediaList), type_(type) {} + virtual ~MediaSource() {} + + DesktopCapturer::Source source; + + // source id + DesktopCapturer::SourceId id() const { return source.id; } + + // source name + std::string name() const { return source.title; } + + // Returns the thumbnail of the source, jpeg format. + std::vector thumbnail() const { return thumbnail_; } + + + + DesktopType type() const { return type_; } + + bool UpdateThumbnail(); + + void SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame); + + private: + std::vector thumbnail_; + ObjCDesktopMediaList *mediaList_; + DesktopType type_; +}; + +class ObjCDesktopMediaList { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + public: + ObjCDesktopMediaList(DesktopType type, RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList); + + virtual ~ObjCDesktopMediaList(); + + virtual int32_t UpdateSourceList(bool force_reload = false, bool get_thumbnail = true); + + virtual int GetSourceCount() const; + + virtual MediaSource* GetSource(int index); + + virtual bool GetThumbnail(MediaSource *source, bool notify); + + private: + class CallbackProxy : public DesktopCapturer::Callback { + public: + CallbackProxy(){} + void SetCallback(std::function frame)> on_capture_result) { + on_capture_result_ = on_capture_result; + } + private: + void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override { + if(on_capture_result_) on_capture_result_(result, std::move(frame)); + } + std::function frame)> on_capture_result_ = nullptr; + }; + private: + std::unique_ptr callback_; + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + std::vector> sources_; + RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList_; + DesktopType type_; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ diff --git a/sdk/objc/native/src/objc_desktop_media_list.mm b/sdk/objc/native/src/objc_desktop_media_list.mm new file mode 100644 index 0000000000..574e47b6f3 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.mm @@ -0,0 +1,252 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/objc/native/src/objc_desktop_media_list.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +extern "C" { +#if defined(USE_SYSTEM_LIBJPEG) +#include +#else +// Include directory supplied by gn +#include "jpeglib.h" // NOLINT +#endif +} + +#include +#include + +#import + +namespace webrtc { + +ObjCDesktopMediaList::ObjCDesktopMediaList(DesktopType type, + RTC_OBJC_TYPE(RTCDesktopMediaList) * objcMediaList) + : thread_(rtc::Thread::Create()), objcMediaList_(objcMediaList), type_(type) { + RTC_DCHECK(thread_); + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + + callback_ = std::make_unique(); + + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = webrtc::DesktopCapturer::CreateScreenCapturer(options_); + } else { + capturer_ = webrtc::DesktopCapturer::CreateWindowCapturer(options_); + } + capturer_->Start(callback_.get()); + }); +} + +ObjCDesktopMediaList::~ObjCDesktopMediaList() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +int32_t ObjCDesktopMediaList::UpdateSourceList(bool force_reload, bool get_thumbnail) { + if (force_reload) { + for (auto source : sources_) { + [objcMediaList_ mediaSourceRemoved:source.get()]; + } + sources_.clear(); + } + + webrtc::DesktopCapturer::SourceList new_sources; + + thread_->BlockingCall([this, &new_sources] { + capturer_->GetSourceList(&new_sources); + }); + + typedef std::set SourceSet; + SourceSet new_source_set; + for (size_t i = 0; i < new_sources.size(); ++i) { + if (type_ == kScreen && new_sources[i].title.length() == 0) { + new_sources[i].title = std::string("Screen " + std::to_string(i + 1)); + } + new_source_set.insert(new_sources[i].id); + } + // Iterate through the old sources to find the removed sources. + for (size_t i = 0; i < sources_.size(); ++i) { + if (new_source_set.find(sources_[i]->id()) == new_source_set.end()) { + [objcMediaList_ mediaSourceRemoved:(*(sources_.begin() + i)).get()]; + sources_.erase(sources_.begin() + i); + --i; + } + } + // Iterate through the new sources to find the added sources. + if (new_sources.size() > sources_.size()) { + SourceSet old_source_set; + for (size_t i = 0; i < sources_.size(); ++i) { + old_source_set.insert(sources_[i]->id()); + } + for (size_t i = 0; i < new_sources.size(); ++i) { + if (old_source_set.find(new_sources[i].id) == old_source_set.end()) { + MediaSource *source = new MediaSource(this, new_sources[i], type_); + sources_.insert(sources_.begin() + i, std::shared_ptr(source)); + [objcMediaList_ mediaSourceAdded:source]; + GetThumbnail(source, true); + } + } + } + + RTC_DCHECK_EQ(new_sources.size(), sources_.size()); + + // Find the moved/changed sources. + size_t pos = 0; + while (pos < sources_.size()) { + if (!(sources_[pos]->id() == new_sources[pos].id)) { + // Find the source that should be moved to |pos|, starting from |pos + 1| + // of |sources_|, because entries before |pos| should have been sorted. + size_t old_pos = pos + 1; + for (; old_pos < sources_.size(); ++old_pos) { + if (sources_[old_pos]->id() == new_sources[pos].id) break; + } + RTC_DCHECK(sources_[old_pos]->id() == new_sources[pos].id); + + // Move the source from |old_pos| to |pos|. + auto temp = sources_[old_pos]; + sources_.erase(sources_.begin() + old_pos); + sources_.insert(sources_.begin() + pos, temp); + //[objcMediaList_ mediaSourceMoved:old_pos newIndex:pos]; + } + + if (sources_[pos]->source.title != new_sources[pos].title) { + sources_[pos]->source.title = new_sources[pos].title; + [objcMediaList_ mediaSourceNameChanged:sources_[pos].get()]; + } + ++pos; + } + + if (get_thumbnail) { + for (auto source : sources_) { + GetThumbnail(source.get(), true); + } + } + return sources_.size(); +} + +bool ObjCDesktopMediaList::GetThumbnail(MediaSource *source, bool notify) { + thread_->PostTask([this, source, notify] { + if(capturer_->SelectSource(source->id())){ + callback_->SetCallback([&](webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + auto old_thumbnail = source->thumbnail(); + source->SaveCaptureResult(result, std::move(frame)); + if(old_thumbnail.size() != source->thumbnail().size() && notify) { + [objcMediaList_ mediaSourceThumbnailChanged:source]; + } + }); + capturer_->CaptureFrame(); + } + }); + + return true; +} + +int ObjCDesktopMediaList::GetSourceCount() const { + return sources_.size(); +} + +MediaSource *ObjCDesktopMediaList::GetSource(int index) { + return sources_[index].get(); +} + +bool MediaSource::UpdateThumbnail() { + return mediaList_->GetThumbnail(this, true); +} + +void MediaSource::SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != webrtc::DesktopCapturer::Result::SUCCESS) { + return; + } + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CGRect outputSize = CGRectMake(0, 0, width, height); + + CIContext *tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData *imageData; + NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + imageData = [newRep representationUsingType:NSJPEGFileType + properties:@{ + NSImageCompressionFactor : @1.0f + }]; + + thumbnail_.resize(imageData.length); + const void *_Nullable rawData = [imageData bytes]; + char *src = (char *)rawData; + std::copy(src, src + imageData.length, thumbnail_.begin()); + + CGImageRelease(cgImage); + CVPixelBufferRelease(pixelBuffer); +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 7937e90505..51b4d4e6b4 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -37,7 +37,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer ObjCVideoTrackSource::ObjCVideoTrackSource() : ObjCVideoTrackSource(false) {} ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast) - : AdaptedVideoTrackSource(/* required resolution alignment */ 2), + : AdaptedVideoTrackSource(/* required resolution alignment */ is_screencast? 16 : 2), is_screencast_(is_screencast) {} ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { From 3a2c008529a15fecde5f979a6ebb75c05463d45e Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Mon, 20 Feb 2023 14:16:07 +0800 Subject: [PATCH 11/42] Frame Cryptor Support. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit feat: Frame Cryptor (aes gcm/cbc). (#54) feat: key ratchet/derive. (#66) fix: skip invalid key when decryption failed. (#81) Co-authored-by: Théo Monnom --- .gitignore | 6 + api/crypto/BUILD.gn | 18 + api/crypto/frame_crypto_transformer.cc | 675 ++++++++++++++++++ api/crypto/frame_crypto_transformer.h | 329 +++++++++ api/frame_transformer_interface.h | 2 + audio/channel_receive.cc | 9 +- ...sender_video_frame_transformer_delegate.cc | 2 + ...eam_receiver_frame_transformer_delegate.cc | 4 + pc/audio_rtp_receiver.cc | 2 +- sdk/BUILD.gn | 11 + sdk/android/BUILD.gn | 12 + sdk/android/api/org/webrtc/FrameCryptor.java | 108 +++ .../api/org/webrtc/FrameCryptorAlgorithm.java | 22 + .../api/org/webrtc/FrameCryptorFactory.java | 44 ++ .../org/webrtc/FrameCryptorKeyProvider.java | 66 ++ sdk/android/src/jni/pc/frame_cryptor.cc | 190 +++++ sdk/android/src/jni/pc/frame_cryptor.h | 50 ++ .../src/jni/pc/frame_cryptor_key_provider.cc | 79 ++ .../src/jni/pc/frame_cryptor_key_provider.h | 35 + .../peerconnection/RTCFrameCryptor+Private.h | 45 ++ sdk/objc/api/peerconnection/RTCFrameCryptor.h | 75 ++ .../api/peerconnection/RTCFrameCryptor.mm | 182 +++++ .../RTCFrameCryptorKeyProvider+Private.h | 31 + .../RTCFrameCryptorKeyProvider.h | 39 + .../RTCFrameCryptorKeyProvider.mm | 69 ++ .../native/src/objc_video_track_source.mm | 2 +- 26 files changed, 2099 insertions(+), 8 deletions(-) create mode 100644 api/crypto/frame_crypto_transformer.cc create mode 100644 api/crypto/frame_crypto_transformer.h create mode 100644 sdk/android/api/org/webrtc/FrameCryptor.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorFactory.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java create mode 100644 sdk/android/src/jni/pc/frame_cryptor.cc create mode 100644 sdk/android/src/jni/pc/frame_cryptor.h create mode 100644 sdk/android/src/jni/pc/frame_cryptor_key_provider.cc create mode 100644 sdk/android/src/jni/pc/frame_cryptor_key_provider.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor.mm create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm diff --git a/.gitignore b/.gitignore index 8202b82014..2a4acfebb7 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,9 @@ /xcodebuild /.vscode !webrtc/* +/tmp.patch +/out-release +/out-debug +/node_modules +/libwebrtc +/args.txt diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn index 8d041ea059..9249b7716a 100644 --- a/api/crypto/BUILD.gn +++ b/api/crypto/BUILD.gn @@ -16,6 +16,24 @@ group("crypto") { ] } +rtc_library("frame_crypto_transformer") { + visibility = [ "*" ] + sources = [ + "frame_crypto_transformer.cc", + "frame_crypto_transformer.h", + ] + + deps = [ + "//api:frame_transformer_interface", + ] + + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ ":external_ssl_library" ] + } +} + rtc_library("options") { visibility = [ "*" ] sources = [ diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc new file mode 100644 index 0000000000..ed5a17c957 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.cc @@ -0,0 +1,675 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "frame_crypto_transformer.h" + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/array_view.h" +#include "common_video/h264/h264_common.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/logging.h" + +enum class EncryptOrDecrypt { kEncrypt = 0, kDecrypt }; + +#define Success 0 +#define ErrorUnexpected -1 +#define OperationError -2 +#define ErrorDataTooSmall -3 +#define ErrorInvalidAesGcmTagLength -4 + +webrtc::VideoCodecType get_video_codec_type( + webrtc::TransformableFrameInterface* frame) { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec; +} + +webrtc::H264PacketizationMode get_h264_packetization_mode( + webrtc::TransformableFrameInterface* frame) { + auto video_frame = + static_cast(frame); + const auto& h264_header = absl::get( + video_frame->header().video_type_header); + return h264_header.packetization_mode; +} + +const EVP_AEAD* GetAesGcmAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aead_aes_128_gcm(); + case 32: + return EVP_aead_aes_256_gcm(); + default: + return nullptr; + } +} + +const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aes_128_cbc(); + case 32: + return EVP_aes_256_cbc(); + default: + return nullptr; + } +} + +std::string to_uint8_list(const uint8_t* data, int len) { + std::stringstream ss; + ss << "["; + for (int i = 0; i < len; i++) { + ss << static_cast(data[i]) << ","; + } + ss << "]"; + return ss.str(); +} + +std::string to_hex(const uint8_t* data, int len) { + std::stringstream ss; + ss << std::uppercase << std::hex << std::setfill('0'); + for (int i = 0; i < len; i++) { + ss << std::setw(2) << static_cast(data[i]); + } + return ss.str(); +} + +uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + uint8_t unencrypted_bytes = 0; + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kAudioFrame: + unencrypted_bytes = 1; + break; + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecAV1) { + unencrypted_bytes = 0; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecVP8) { + unencrypted_bytes = videoFrame->IsKeyFrame() ? 10 : 3; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264) { + rtc::ArrayView date_in = frame->GetData(); + std::vector nalu_indices = + webrtc::H264::FindNaluIndices(date_in.data(), date_in.size()); + + int idx = 0; + for (const auto& index : nalu_indices) { + const uint8_t* slice = date_in.data() + index.payload_start_offset; + webrtc::H264::NaluType nalu_type = + webrtc::H264::ParseNaluType(slice[0]); + switch (nalu_type) { + case webrtc::H264::NaluType::kIdr: + case webrtc::H264::NaluType::kSlice: + unencrypted_bytes = index.payload_start_offset + 2; + RTC_LOG(LS_INFO) + << "NonParameterSetNalu::payload_size: " << index.payload_size + << ", nalu_type " << nalu_type << ", NaluIndex [" << idx++ + << "] offset: " << index.payload_start_offset; + break; + default: + break; + } + } + } + break; + } + default: + break; + } + return unencrypted_bytes; +} + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key) { + size_t key_size_bytes = optional_length_bits / 8; + derived_key->resize(key_size_bytes); + + if (PKCS5_PBKDF2_HMAC((const char*)raw_key.data(), raw_key.size(), + salt.data(), salt.size(), 100000, EVP_sha256(), + key_size_bytes, derived_key->data()) != 1) { + RTC_LOG(LS_ERROR) << "Failed to derive AES key from password."; + return ErrorUnexpected; + } + + RTC_LOG(LS_INFO) << "raw_key " + << to_uint8_list(raw_key.data(), raw_key.size()) << " len " + << raw_key.size() << " slat << " + << to_uint8_list(salt.data(), salt.size()) << " len " + << salt.size() << "\n derived_key " + << to_uint8_list(derived_key->data(), derived_key->size()) + << " len " << derived_key->size(); + + return Success; +} + +int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector raw_key, + const rtc::ArrayView data, + unsigned int tag_length_bytes, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const EVP_AEAD* aead_alg, + std::vector* buffer) { + bssl::ScopedEVP_AEAD_CTX ctx; + + if (!aead_alg) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } + + if (!EVP_AEAD_CTX_init(ctx.get(), aead_alg, raw_key.data(), raw_key.size(), + tag_length_bytes, nullptr)) { + RTC_LOG(LS_ERROR) << "Failed to initialize AES-GCM context."; + return OperationError; + } + + size_t len; + int ok; + + if (mode == EncryptOrDecrypt::kDecrypt) { + if (data.size() < tag_length_bytes) { + RTC_LOG(LS_ERROR) << "Data too small for AES-GCM tag."; + return ErrorDataTooSmall; + } + + buffer->resize(data.size() - tag_length_bytes); + + ok = EVP_AEAD_CTX_open(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } else { + buffer->resize(data.size() + EVP_AEAD_max_overhead(aead_alg)); + + ok = EVP_AEAD_CTX_seal(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } + + if (!ok) { + RTC_LOG(LS_ERROR) << "Failed to perform AES-GCM operation."; + return OperationError; + } + + buffer->resize(len); + + return Success; +} + +int AesCbcEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector& raw_key, + rtc::ArrayView iv, + const rtc::ArrayView input, + std::vector* output) { + const EVP_CIPHER* cipher = GetAesCbcAlgorithmFromKeySize(raw_key.size()); + RTC_DCHECK(cipher); // Already handled in Init(); + RTC_DCHECK_EQ(EVP_CIPHER_iv_length(cipher), iv.size()); + RTC_DCHECK_EQ(EVP_CIPHER_key_length(cipher), raw_key.size()); + + bssl::ScopedEVP_CIPHER_CTX ctx; + if (!EVP_CipherInit_ex(ctx.get(), cipher, nullptr, + reinterpret_cast(raw_key.data()), + iv.data(), + mode == EncryptOrDecrypt::kEncrypt ? 1 : 0)) { + return OperationError; + } + + // Encrypting needs a block size of space to allow for any padding. + output->resize(input.size() + + (mode == EncryptOrDecrypt::kEncrypt ? iv.size() : 0)); + int out_len; + if (!EVP_CipherUpdate(ctx.get(), output->data(), &out_len, input.data(), + input.size())) + return OperationError; + + // Write out the final block plus padding (if any) to the end of the data + // just written. + int tail_len; + if (!EVP_CipherFinal_ex(ctx.get(), output->data() + out_len, &tail_len)) + return OperationError; + + out_len += tail_len; + RTC_CHECK_LE(out_len, static_cast(output->size())); + return Success; +} + +int AesEncryptDecrypt(EncryptOrDecrypt mode, + webrtc::FrameCryptorTransformer::Algorithm algorithm, + const std::vector& raw_key, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const rtc::ArrayView data, + std::vector* buffer) { + switch (algorithm) { + case webrtc::FrameCryptorTransformer::Algorithm::kAesGcm: { + unsigned int tag_length_bits = 128; + return AesGcmEncryptDecrypt( + mode, raw_key, data, tag_length_bits / 8, iv, additional_data, + GetAesGcmAlgorithmFromKeySize(raw_key.size()), buffer); + } + case webrtc::FrameCryptorTransformer::Algorithm::kAesCbc: + return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); + } +} + +namespace webrtc { + +FrameCryptorTransformer::FrameCryptorTransformer( + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider) + : participant_id_(participant_id), + type_(type), + algorithm_(algorithm), + key_provider_(key_provider) { + RTC_DCHECK(key_provider_ != nullptr); +} + +void FrameCryptorTransformer::Transform( + std::unique_ptr frame) { + webrtc::MutexLock lock(&sink_mutex_); + if (sink_callback_ == nullptr && sink_callbacks_.size() == 0) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::Transform sink_callback_ is NULL"; + return; + } + + // do encrypt or decrypt here... + switch (frame->GetDirection()) { + case webrtc::TransformableFrameInterface::Direction::kSender: + encryptFrame(std::move(frame)); + break; + case webrtc::TransformableFrameInterface::Direction::kReceiver: + decryptFrame(std::move(frame)); + break; + case webrtc::TransformableFrameInterface::Direction::kUnknown: + // do nothing + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::Transform() kUnknown"; + break; + } +} + +void FrameCryptorTransformer::encryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::encryptFrame() sink_callback is NULL"; + if (last_enc_error_ != FrameCryptionState::kInternalError) { + last_enc_error_ = FrameCryptionState::kInternalError; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_enc_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + if (date_in.size() == 0 || !enabled_cryption) { + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto key_handler = key_provider_->GetKey(participant_id_); + if (key_handler == nullptr || key_handler->GetKeySet(key_index_) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() no keys, or " + "key_index[" + << key_index_ << "] out of range for participant " + << participant_id_; + if (last_enc_error_ != FrameCryptionState::kMissingKey) { + last_enc_error_ = FrameCryptionState::kMissingKey; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_enc_error_); + } + return; + } + + auto key_set = key_handler->GetKeySet(key_index_); + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = getIvSize(); + frame_trailer[1] = key_index_; + rtc::Buffer iv = makeIv(frame->GetSsrc(), frame->GetTimestamp()); + + rtc::Buffer payload(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + payload[i - unencrypted_bytes] = date_in[i]; + } + + std::vector buffer; + if (AesEncryptDecrypt(EncryptOrDecrypt::kEncrypt, algorithm_, + key_set->encryption_key, iv, frame_header, payload, + &buffer) == Success) { + rtc::Buffer encrypted_payload(buffer.data(), buffer.size()); + rtc::Buffer data_out; + data_out.AppendData(frame_header); + data_out.AppendData(encrypted_payload); + data_out.AppendData(iv); + data_out.AppendData(frame_trailer); + + RTC_CHECK_EQ(data_out.size(), frame_header.size() + + encrypted_payload.size() + iv.size() + + frame_trailer.size()); + + frame->SetData(data_out); + + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() ivLength=" + << static_cast(iv.size()) << " unencrypted_bytes=" + << static_cast(unencrypted_bytes) + << " key_index=" << static_cast(key_index_) + << " aesKey=" + << to_hex(key_set->encryption_key.data(), + key_set->encryption_key.size()) + << " iv=" << to_hex(iv.data(), iv.size()); + if (last_enc_error_ != FrameCryptionState::kOk) { + last_enc_error_ = FrameCryptionState::kOk; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_enc_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); + } else { + if (last_enc_error_ != FrameCryptionState::kEncryptionFailed) { + last_enc_error_ = FrameCryptionState::kEncryptionFailed; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_enc_error_); + } + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::encryptFrame() failed"; + } +} + +void FrameCryptorTransformer::decryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::decryptFrame() sink_callback is NULL"; + if (last_dec_error_ != FrameCryptionState::kInternalError) { + last_dec_error_ = FrameCryptionState::kInternalError; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_dec_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + + if (date_in.size() == 0 || !enabled_cryption) { + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; + if (uncrypted_magic_bytes.size() > 0 && + date_in.size() >= uncrypted_magic_bytes.size() + 1) { + auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size() + 1), + uncrypted_magic_bytes.size()); + + if (uncrypted_magic_bytes == std::vector(tmp.begin(), tmp.end())) { + + RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); + auto frame_type = date_in.subview(date_in.size() - 1, 1); + RTC_CHECK_EQ(frame_type.size(), 1); + + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( type " + << frame_type[0] << ", tmp " + << to_hex(tmp.data(), tmp.size()) << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), + uncrypted_magic_bytes.size()) + << ")"; + + // magic bytes detected, this is a non-encrypted frame, skip frame decryption. + rtc::Buffer data_out; + data_out.AppendData( + date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size() - 1)); + frame->SetData(data_out); + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + } + + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = date_in[date_in.size() - 2]; + frame_trailer[1] = date_in[date_in.size() - 1]; + uint8_t ivLength = frame_trailer[0]; + uint8_t key_index = frame_trailer[1]; + + if (ivLength != getIvSize()) { + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::decryptFrame() ivLength[" + << static_cast(ivLength) << "] != getIvSize()[" + << static_cast(getIvSize()) << "]"; + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_dec_error_); + } + return; + } + + auto key_handler = key_provider_->GetKey(participant_id_); + if (key_index >= KEYRING_SIZE || key_handler == nullptr || + key_handler->GetKeySet(key_index) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " + "key_index[" + << key_index_ << "] out of range for participant " + << participant_id_; + if (last_dec_error_ != FrameCryptionState::kMissingKey) { + last_dec_error_ = FrameCryptionState::kMissingKey; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_dec_error_); + } + return; + } + + if(last_dec_error_ == kDecryptionFailed && !key_handler->have_valid_key) { + // if decryption failed and we have an invalid key, + // please try to decrypt with the next new key + return; + } + + auto key_set = key_handler->GetKeySet(key_index); + + rtc::Buffer iv = rtc::Buffer(ivLength); + for (size_t i = 0; i < ivLength; i++) { + iv[i] = date_in[date_in.size() - 2 - ivLength + i]; + } + + rtc::Buffer encrypted_payload(date_in.size() - unencrypted_bytes - ivLength - + 2); + for (size_t i = unencrypted_bytes; i < date_in.size() - ivLength - 2; i++) { + encrypted_payload[i - unencrypted_bytes] = date_in[i]; + } + std::vector buffer; + + int ratchet_count = 0; + auto initialKeyMaterial = key_set->material; + bool decryption_success = false; + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + key_set->encryption_key, iv, frame_header, + encrypted_payload, &buffer) == Success) { + decryption_success = true; + } else { + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::decryptFrame() failed"; + std::shared_ptr ratcheted_key_set; + auto currentKeyMaterial = key_set->material; + if (key_handler->options().ratchet_window_size > 0) { + while (ratchet_count < key_handler->options().ratchet_window_size) { + ratchet_count++; + + RTC_LOG(LS_INFO) << "ratcheting key attempt " << ratchet_count << " of " + << key_handler->options().ratchet_window_size; + + auto new_material = key_handler->RatchetKeyMaterial(currentKeyMaterial); + ratcheted_key_set = key_handler->DeriveKeys(new_material, key_handler->options().ratchet_salt, 128); + + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + ratcheted_key_set->encryption_key, iv, frame_header, + encrypted_payload, &buffer) == Success) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " + "ratcheted to key_index=" + << static_cast(key_index); + decryption_success = true; + // success, so we set the new key + key_handler->SetKeyFromMaterial(new_material, key_index); + if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { + last_dec_error_ = FrameCryptionState::kKeyRatcheted; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_dec_error_); + } + break; + } + // for the next ratchet attempt + currentKeyMaterial = new_material; + } + + /* Since the key it is first send and only afterwards actually used for + encrypting, there were situations when the decrypting failed due to the + fact that the received frame was not encrypted yet and ratcheting, of + course, did not solve the problem. So if we fail RATCHET_WINDOW_SIZE + times, we come back to the initial key. + */ + if (!decryption_success || + ratchet_count >= key_handler->options().ratchet_window_size) { + key_handler->SetKeyFromMaterial(initialKeyMaterial, key_index); + } + } + } + + if (!decryption_success) { + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + key_handler->have_valid_key = false; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, + last_dec_error_); + } + return; + } + + rtc::Buffer payload(buffer.data(), buffer.size()); + rtc::Buffer data_out; + data_out.AppendData(frame_header); + data_out.AppendData(payload); + frame->SetData(data_out); + + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() ivLength=" + << static_cast(ivLength) << " unencrypted_bytes=" + << static_cast(unencrypted_bytes) + << " key_index=" << static_cast(key_index_) << " aesKey=" + << to_hex(key_set->encryption_key.data(), + key_set->encryption_key.size()) + << " iv=" << to_hex(iv.data(), iv.size()); + + if (last_dec_error_ != FrameCryptionState::kOk) { + last_dec_error_ = FrameCryptionState::kOk; + if (observer_) + observer_->OnFrameCryptionStateChanged(participant_id_, last_dec_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); +} + +rtc::Buffer FrameCryptorTransformer::makeIv(uint32_t ssrc, uint32_t timestamp) { + uint32_t send_count = 0; + if (send_counts_.find(ssrc) == send_counts_.end()) { + srand((unsigned)time(NULL)); + send_counts_[ssrc] = floor(rand() * 0xFFFF); + } else { + send_count = send_counts_[ssrc]; + } + rtc::ByteBufferWriter buf; + buf.WriteUInt32(ssrc); + buf.WriteUInt32(timestamp); + buf.WriteUInt32(timestamp - (send_count % 0xFFFF)); + send_counts_[ssrc] = send_count + 1; + + RTC_CHECK_EQ(buf.Length(), getIvSize()); + + return rtc::Buffer(buf.Data(), buf.Length()); +} + +uint8_t FrameCryptorTransformer::getIvSize() { + switch (algorithm_) { + case Algorithm::kAesGcm: + return 12; + case Algorithm::kAesCbc: + return 16; + default: + return 0; + } +} + +} // namespace webrtc diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h new file mode 100644 index 0000000000..7f98deb11d --- /dev/null +++ b/api/crypto/frame_crypto_transformer.h @@ -0,0 +1,329 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ +#define WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key); + +namespace webrtc { + +const size_t KEYRING_SIZE = 16; + +struct KeyProviderOptions { + bool shared_key; + std::vector ratchet_salt; + std::vector uncrypted_magic_bytes; + int ratchet_window_size; + KeyProviderOptions() : shared_key(false), ratchet_window_size(0) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + uncrypted_magic_bytes(copy.uncrypted_magic_bytes), + ratchet_window_size(copy.ratchet_window_size) {} +}; + +class ParticipantKeyHandler { + friend class FrameCryptorTransformer; + public: + struct KeySet { + std::vector material; + std::vector encryption_key; + KeySet(std::vector material, std::vector encryptionKey) + : material(material), encryption_key(encryptionKey) {} + }; + + public: + ParticipantKeyHandler(KeyProviderOptions options) : options_(options) { + crypto_key_ring_.resize(KEYRING_SIZE); + } + + virtual ~ParticipantKeyHandler() = default; + + virtual std::vector RatchetKey(int key_index) { + auto current_material = GetKeySet(key_index)->material; + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, options_.ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + SetKeyFromMaterial(new_material, + key_index != -1 ? key_index : current_key_index_); + return new_material; + } + + virtual std::shared_ptr GetKeySet(int key_index) { + return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; + } + + virtual void SetKey(std::vector password, int key_index) { + SetKeyFromMaterial(password, key_index); + have_valid_key = true; + } + + virtual void SetKeyFromMaterial(std::vector password, int key_index) { + if (key_index >= 0) { + current_key_index_ = key_index % crypto_key_ring_.size(); + } + crypto_key_ring_[current_key_index_] = + DeriveKeys(password, options_.ratchet_salt, 128); + } + + virtual KeyProviderOptions& options() { return options_; } + + std::shared_ptr DeriveKeys(std::vector password, + std::vector ratchet_salt, + unsigned int optional_length_bits) { + std::vector derived_key; + if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, + &derived_key) == 0) { + return std::make_shared(password, derived_key); + } + return nullptr; + } + + std::vector RatchetKeyMaterial( + std::vector current_material) { + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, options_.ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + return new_material; + } + protected: + bool have_valid_key = false; + private: + int current_key_index_ = 0; + KeyProviderOptions options_; + std::vector> crypto_key_ring_; +}; + +class KeyProvider : public rtc::RefCountInterface { + public: + enum { kRawKeySize = 32 }; + + public: + virtual const std::shared_ptr GetKey( + const std::string participant_id) const = 0; + + virtual bool SetKey(const std::string participant_id, + int index, + std::vector key) = 0; + + virtual const std::vector RatchetKey( + const std::string participant_id, + int key_index) = 0; + + virtual const std::vector ExportKey(const std::string participant_id, + int key_index) const = 0; + + virtual KeyProviderOptions& options() = 0; + + protected: + virtual ~KeyProvider() {} +}; + +class DefaultKeyProviderImpl : public KeyProvider { + public: + DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} + ~DefaultKeyProviderImpl() override = default; + + /// Set the key at the given index. + bool SetKey(const std::string participant_id, + int index, + std::vector key) override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + keys_[participant_id] = std::make_shared(options_); + } + + auto key_handler = keys_[participant_id]; + key_handler->SetKey(key, index); + return true; + } + + const std::shared_ptr GetKey( + const std::string participant_id) const override { + webrtc::MutexLock lock(&mutex_); + if (keys_.find(participant_id) == keys_.end()) { + return nullptr; + } + + return keys_.find(participant_id)->second; + } + + const std::vector RatchetKey(const std::string participant_id, + int key_index) override { + webrtc::MutexLock lock(&mutex_); + if (keys_.find(participant_id) == keys_.end()) { + return std::vector(); + } + + return keys_[participant_id]->RatchetKey(key_index); + } + + const std::vector ExportKey(const std::string participant_id, + int key_index) const override { + webrtc::MutexLock lock(&mutex_); + if (keys_.find(participant_id) == keys_.end()) { + return std::vector(); + } + + auto key_set = GetKey(participant_id); + + if (!key_set) { + return std::vector(); + } + + return key_set->GetKeySet(key_index)->material; + } + + KeyProviderOptions& options() override { return options_; } + + private: + mutable webrtc::Mutex mutex_; + KeyProviderOptions options_; + std::unordered_map> keys_; +}; + +enum FrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class FrameCryptorTransformerObserver { + public: + virtual void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState error) = 0; + + protected: + virtual ~FrameCryptorTransformerObserver() {} +}; + +class RTC_EXPORT FrameCryptorTransformer + : public rtc::RefCountedObject { + public: + enum class MediaType { + kAudioFrame = 0, + kVideoFrame, + }; + + enum class Algorithm { + kAesGcm = 0, + kAesCbc, + }; + + explicit FrameCryptorTransformer(const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider); + + virtual void SetFrameCryptorTransformerObserver( + FrameCryptorTransformerObserver* observer) { + webrtc::MutexLock lock(&mutex_); + observer_ = observer; + } + + virtual void SetKeyIndex(int index) { + webrtc::MutexLock lock(&mutex_); + key_index_ = index; + } + + virtual int key_index() const { return key_index_; } + + virtual void SetEnabled(bool enabled) { + webrtc::MutexLock lock(&mutex_); + enabled_cryption_ = enabled; + } + virtual bool enabled() const { + webrtc::MutexLock lock(&mutex_); + return enabled_cryption_; + } + virtual const std::string participant_id() const { return participant_id_; } + + protected: + virtual void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = callback; + } + virtual void UnregisterTransformedFrameCallback() override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = nullptr; + } + virtual void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callbacks_[ssrc] = callback; + } + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + sink_callbacks_.erase(it); + } + } + + virtual void Transform( + std::unique_ptr frame) override; + + private: + void encryptFrame(std::unique_ptr frame); + void decryptFrame(std::unique_ptr frame); + rtc::Buffer makeIv(uint32_t ssrc, uint32_t timestamp); + uint8_t getIvSize(); + + private: + std::string participant_id_; + mutable webrtc::Mutex mutex_; + mutable webrtc::Mutex sink_mutex_; + bool enabled_cryption_ RTC_GUARDED_BY(mutex_) = false; + MediaType type_; + Algorithm algorithm_; + rtc::scoped_refptr sink_callback_; + std::map> + sink_callbacks_; + int key_index_ = 0; + std::map send_counts_; + rtc::scoped_refptr key_provider_; + FrameCryptorTransformerObserver* observer_ = nullptr; + std::unique_ptr thread_; + FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; +}; + +} // namespace webrtc + +#endif // WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index 2f7b139f9f..b7c03da468 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -61,6 +61,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { virtual VideoFrameMetadata Metadata() const = 0; virtual void SetMetadata(const VideoFrameMetadata&) = 0; + + virtual const RTPVideoHeader& header () const = 0; }; // Extends the TransformableFrameInterface to expose audio-specific information. diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index e6435ad1b1..99405b1c78 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -357,7 +357,6 @@ void ChannelReceive::OnReceivedPayloadData( void ChannelReceive::InitFrameTransformerDelegate( rtc::scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); - RTC_DCHECK(!frame_transformer_delegate_); RTC_DCHECK(worker_thread_->IsCurrent()); // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by @@ -920,11 +919,9 @@ void ChannelReceive::SetAssociatedSendChannel( void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // Depending on when the channel is created, the transformer might be set - // twice. Don't replace the delegate if it was already initialized. - if (!frame_transformer || frame_transformer_delegate_) { - RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; - return; + + if(frame_transformer_delegate_ && frame_transformer) { + frame_transformer_delegate_->Reset(); } InitFrameTransformerDelegate(std::move(frame_transformer)); diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 34b4af0ec9..6203ff57ff 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -78,6 +78,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { csrcs_ = metadata.GetCsrcs(); } + const RTPVideoHeader& header() const override { return header_; } + const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } absl::optional GetCodecType() const { return codec_type_; } diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index 6e915bf758..394ff5f7a9 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -58,6 +58,10 @@ class TransformableVideoReceiverFrame << "TransformableVideoReceiverFrame::SetMetadata is not implemented"; } + const RTPVideoHeader& header () const override { + return frame_->GetRtpVideoHeader(); + } + std::unique_ptr ExtractFrame() && { return std::move(frame_); } diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index 804d31352d..1f78574b9d 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -298,7 +298,7 @@ void AudioRtpReceiver::Reconfigure(bool track_enabled) { media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_); } - if (frame_transformer_) { + if (frame_transformer_ && track_enabled) { media_channel_->SetDepacketizerToDecoderFrameTransformer( signaled_ssrc_.value_or(0), frame_transformer_); } diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index bfbfc33eaf..5fe0f67b63 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -1026,6 +1026,12 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDtmfSender.mm", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFieldTrials.mm", + "objc/api/peerconnection/RTCFrameCryptor+Private.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptor.mm", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm", "objc/api/peerconnection/RTCIceCandidate+Private.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidate.mm", @@ -1133,6 +1139,7 @@ if (is_ios || is_mac) { ":videorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api/crypto:frame_crypto_transformer", "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", @@ -1380,6 +1387,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCConfiguration.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", @@ -1504,6 +1513,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 73cfe51730..596dbc9693 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -261,6 +261,10 @@ if (is_android) { "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", "api/org/webrtc/FecControllerFactoryFactoryInterface.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorAlgorithm.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/FrameDecryptor.java", "api/org/webrtc/FrameEncryptor.java", "api/org/webrtc/IceCandidate.java", @@ -719,6 +723,10 @@ if (current_os == "linux" || is_android) { "src/jni/pc/data_channel.cc", "src/jni/pc/data_channel.h", "src/jni/pc/dtmf_sender.cc", + "src/jni/pc/frame_cryptor.cc", + "src/jni/pc/frame_cryptor.h", + "src/jni/pc/frame_cryptor_key_provider.cc", + "src/jni/pc/frame_cryptor_key_provider.h", "src/jni/pc/ice_candidate.cc", "src/jni/pc/ice_candidate.h", "src/jni/pc/media_constraints.cc", @@ -776,6 +784,7 @@ if (current_os == "linux" || is_android) { "../../api:rtp_parameters", "../../api:rtp_sender_interface", "../../api:turn_customizer", + "../../api/crypto:frame_crypto_transformer", "../../api/crypto:options", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", @@ -1401,6 +1410,9 @@ if (current_os == "linux" || is_android) { "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/IceCandidate.java", "api/org/webrtc/IceCandidateErrorEvent.java", "api/org/webrtc/MediaConstraints.java", diff --git a/sdk/android/api/org/webrtc/FrameCryptor.java b/sdk/android/api/org/webrtc/FrameCryptor.java new file mode 100644 index 0000000000..d633e05005 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +public class FrameCryptor { + public enum FrameCryptionState { + NEW, + OK, + ENCRYPTIONFAILED, + DECRYPTIONFAILED, + MISSINGKEY, + KEYRATCHETED, + INTERNALERROR; + + @CalledByNative("FrameCryptionState") + static FrameCryptionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static interface Observer { + @CalledByNative("Observer") + void onFrameCryptionStateChanged(String participantId, FrameCryptionState newState); + } + + private long nativeFrameCryptor; + private long observerPtr; + + public long getNativeFrameCryptor() { + return nativeFrameCryptor; + } + + @CalledByNative + public FrameCryptor(long nativeFrameCryptor) { + this.nativeFrameCryptor = nativeFrameCryptor; + this.observerPtr = 0; + } + + public void setEnabled(boolean enabled) { + checkFrameCryptorExists(); + nativeSetEnabled(nativeFrameCryptor, enabled); + } + + public boolean isEnabled() { + checkFrameCryptorExists(); + return nativeIsEnabled(nativeFrameCryptor); + } + + public int getKeyIndex() { + checkFrameCryptorExists(); + return nativeGetKeyIndex(nativeFrameCryptor); + } + + public void setKeyIndex(int index) { + checkFrameCryptorExists(); + nativeSetKeyIndex(nativeFrameCryptor, index); + } + + public void dispose() { + checkFrameCryptorExists(); + nativeUnSetObserver(nativeFrameCryptor); + JniCommon.nativeReleaseRef(nativeFrameCryptor); + nativeFrameCryptor = 0; + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + } + + public void setObserver(@Nullable Observer observer) { + checkFrameCryptorExists(); + long newPtr = nativeSetObserver(nativeFrameCryptor, observer); + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + newPtr = observerPtr; + } + + private void checkFrameCryptorExists() { + if (nativeFrameCryptor == 0) { + throw new IllegalStateException("FrameCryptor has been disposed."); + } + } + + private static native void nativeSetEnabled(long frameCryptorPointer, boolean enabled); + private static native boolean nativeIsEnabled(long frameCryptorPointer); + private static native void nativeSetKeyIndex(long frameCryptorPointer, int index); + private static native int nativeGetKeyIndex(long frameCryptorPointer); + private static native long nativeSetObserver(long frameCryptorPointer, Observer observer); + private static native void nativeUnSetObserver(long frameCryptorPointer); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java new file mode 100644 index 0000000000..d0d4dc8374 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -0,0 +1,22 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public enum FrameCryptorAlgorithm { + AES_GCM, + AES_CBC, +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java new file mode 100644 index 0000000000..74df6a5b29 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public class FrameCryptorFactory { + public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes); + } + + public static FrameCryptor createFrameCryptorForRtpSender(RtpSender rtpSender, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpSender(rtpSender.getNativeRtpSender(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + public static FrameCryptor createFrameCryptorForRtpReceiver(RtpReceiver rtpReceiver, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpReceiver(rtpReceiver.getNativeRtpReceiver(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender( + long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver( + long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + + private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java new file mode 100644 index 0000000000..1c89eac55b --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -0,0 +1,66 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.util.ArrayList; + +public class FrameCryptorKeyProvider { + private long nativeKeyProvider; + + @CalledByNative + public FrameCryptorKeyProvider(long nativeKeyProvider) { + this.nativeKeyProvider = nativeKeyProvider; + } + + public long getNativeKeyProvider() { + return nativeKeyProvider; + } + + public boolean setKey(String participantId, int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetKey(nativeKeyProvider, participantId, index, key); + } + + public byte[] ratchetKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeRatchetKey(nativeKeyProvider, participantId, index); + } + + public byte[] exportKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeExportKey(nativeKeyProvider, participantId, index); + } + + public void dispose() { + checkKeyProviderExists(); + JniCommon.nativeReleaseRef(nativeKeyProvider); + nativeKeyProvider = 0; + } + + private void checkKeyProviderExists() { + if (nativeKeyProvider == 0) { + throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); + } + } + + private static native boolean nativeSetKey( + long keyProviderPointer, String participantId, int index, byte[] key); + private static native byte[] nativeRatchetKey( + long keyProviderPointer, String participantId, int index); + private static native byte[] nativeExportKey( + long keyProviderPointer, String participantId, int index); +} \ No newline at end of file diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc new file mode 100644 index 0000000000..d02f0c62da --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -0,0 +1,190 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor.h" + +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptorFactory_jni.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptor_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" + +namespace webrtc { +namespace jni { + +FrameCryptorObserverJni::FrameCryptorObserverJni( + JNIEnv* jni, + const JavaRef& j_observer) + : j_observer_global_(jni, j_observer) {} + +FrameCryptorObserverJni::~FrameCryptorObserverJni() {} + +void FrameCryptorObserverJni::OnFrameCryptionStateChanged( + const std::string participant_id, + FrameCryptionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onFrameCryptionStateChanged( + env, j_observer_global_, NativeToJavaString(env, participant_id), + Java_FrameCryptionState_fromNativeIndex(env, new_state)); +} + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor) { + if (!cryptor) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptor.dispose(). + return Java_FrameCryptor_Constructor(env, + jlongFromPointer(cryptor.release())); +} + +static void JNI_FrameCryptor_SetEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jboolean j_enabled) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetEnabled(j_enabled); +} + +static jboolean JNI_FrameCryptor_IsEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->enabled(); +} + +static void JNI_FrameCryptor_SetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jint j_index) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetKeyIndex(j_index); +} + +static jint JNI_FrameCryptor_GetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->key_index(); +} + +static jlong JNI_FrameCryptor_SetObserver( + JNIEnv* jni, + jlong j_frame_cryptor_pointer, + const JavaParamRef& j_observer) { + auto observer = + rtc::make_ref_counted(jni, j_observer); + observer->AddRef(); + reinterpret_cast(j_frame_cryptor_pointer) + ->SetFrameCryptorTransformerObserver(observer.get()); + return jlongFromPointer(observer.get()); +} + +static void JNI_FrameCryptor_UnSetObserver(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetFrameCryptorTransformerObserver(nullptr); +} + +webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { + switch (index) { + case 0: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case 1: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( + JNIEnv* env, + jlong j_rtp_receiver_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + auto keyProvider = + reinterpret_cast(j_key_provider); + auto participant_id = JavaToStdString(env, participantId); + auto rtpReceiver = + reinterpret_cast(j_rtp_receiver_pointer); + auto mediaType = + rtpReceiver->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer( + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpReceiver->SetDepacketizerToDecoderFrameTransformer( + frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( + JNIEnv* env, + jlong j_rtp_sender_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + auto keyProvider = + reinterpret_cast(j_key_provider); + auto rtpSender = reinterpret_cast(j_rtp_sender_pointer); + auto participant_id = JavaToStdString(env, participantId); + auto mediaType = + rtpSender->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer( + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( + JNIEnv* env, + jboolean j_shared, + const base::android::JavaParamRef& j_ratchetSalt, + jint j_ratchetWindowSize, + const base::android::JavaParamRef& j_uncryptedMagicBytes) { + auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); + KeyProviderOptions options; + options.ratchet_salt = + std::vector(ratchetSalt.begin(), ratchetSalt.end()); + options.ratchet_window_size = j_ratchetWindowSize; + + auto uncryptedMagicBytes = JavaToNativeByteArray(env, j_uncryptedMagicBytes); + options.uncrypted_magic_bytes = + std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); + options.shared_key = j_shared; + return NativeToJavaFrameCryptorKeyProvider( + env, rtc::make_ref_counted(options)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor.h b/sdk/android/src/jni/pc/frame_cryptor.h new file mode 100644 index 0000000000..66645bb33c --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.h @@ -0,0 +1,50 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +class FrameCryptorObserverJni : public FrameCryptorTransformerObserver, + public rtc::RefCountInterface { + public: + FrameCryptorObserverJni(JNIEnv* jni, const JavaRef& j_observer); + ~FrameCryptorObserverJni() override; + + protected: + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + const ScopedJavaGlobalRef j_observer_global_; + const ScopedJavaGlobalRef j_observer_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc new file mode 100644 index 0000000000..2732693c0f --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -0,0 +1,79 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" + +#include "sdk/android/generated_peerconnection_jni/FrameCryptorKeyProvider_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr key_provider) { + if (!key_provider) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptorKeyProvider.dispose(). + return Java_FrameCryptorKeyProvider_Constructor( + env, jlongFromPointer(key_provider.release())); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetKey( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& participantId, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + auto participant_id = JavaToStdString(jni, participantId); + return reinterpret_cast(j_key_provider) + ->SetKey(participant_id, j_index, + std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetKey(participant_id, j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportKey(participant_id, j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.h b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h new file mode 100644 index 0000000000..8832a83035 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h @@ -0,0 +1,35 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h new file mode 100644 index 0000000000..86e6fdff8c --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h @@ -0,0 +1,45 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptor) +() + + @end + +namespace webrtc { + +class RTCFrameCryptorDelegateAdapter : public FrameCryptorTransformerObserver { + public: + RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * frameCryptor); + ~RTCFrameCryptorDelegateAdapter() override; + + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + __weak RTC_OBJC_TYPE(RTCFrameCryptor) * frame_cryptor_; +}; + +} // namespace webrtc + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h new file mode 100644 index 0000000000..6712ca3688 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -0,0 +1,75 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider); +@class RTC_OBJC_TYPE(RTCFrameCryptor); + +typedef NS_ENUM(NSUInteger, RTCCyrptorAlgorithm) { + RTCCyrptorAlgorithmAesGcm = 0, + RTCCyrptorAlgorithmAesCbc, +}; + +typedef NS_ENUM(NSInteger, FrameCryptionState) { + FrameCryptionStateNew = 0, + FrameCryptionStateOk, + FrameCryptionStateEncryptionFailed, + FrameCryptionStateDecryptionFailed, + FrameCryptionStateMissingKey, + FrameCryptionStateKeyRatcheted, + FrameCryptionStateInternalError, +}; + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCFrameCryptorDelegate) + /** Called when the RTCFrameCryptor got errors. */ + - (void)frameCryptor + : (RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor didStateChangeWithParticipantId + : (NSString *)participantId withState : (FrameCryptionState)stateChanged; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptor) : NSObject + +@property(nonatomic, assign) BOOL enabled; + +@property(nonatomic, assign) int keyIndex; + +@property(nonatomic, readonly) NSString *participantId; + +@property(nonatomic, weak, nullable) id delegate; + +- (instancetype)initWithRtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (instancetype)initWithRtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm new file mode 100644 index 0000000000..312331f72d --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -0,0 +1,182 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor+Private.h" +#import "RTCFrameCryptorKeyProvider+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpSender+Private.h" + +#include + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" + +namespace webrtc { + +RTCFrameCryptorDelegateAdapter::RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * + frameCryptor) + : frame_cryptor_(frameCryptor) {} + +RTCFrameCryptorDelegateAdapter::~RTCFrameCryptorDelegateAdapter() {} + +/* + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kInternalError, +*/ +void RTCFrameCryptorDelegateAdapter::OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) { + RTC_OBJC_TYPE(RTCFrameCryptor) *frameCryptor = frame_cryptor_; + if (frameCryptor.delegate) { + switch (state) { + case FrameCryptionState::kNew: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateNew]; + break; + case FrameCryptionState::kOk: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateOk]; + break; + case FrameCryptionState::kEncryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateEncryptionFailed]; + break; + case FrameCryptionState::kDecryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateDecryptionFailed]; + break; + case FrameCryptionState::kMissingKey: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateMissingKey]; + break; + case FrameCryptionState::kKeyRatcheted: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateKeyRatcheted]; + break; + case FrameCryptionState::kInternalError: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateInternalError]; + break; + } + } +} +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCFrameCryptor) { + const webrtc::RtpSenderInterface *_sender; + const webrtc::RtpReceiverInterface *_receiver; + NSString *_participantId; + rtc::scoped_refptr frame_crypto_transformer_; + std::unique_ptr _observer; +} + +@synthesize participantId = _participantId; +@synthesize delegate = _delegate; + +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCyrptorAlgorithm)algorithm { + switch (algorithm) { + case RTCCyrptorAlgorithmAesGcm: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case RTCCyrptorAlgorithmAesCbc: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +- (instancetype)initWithRtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _observer.reset(new webrtc::RTCFrameCryptorDelegateAdapter(self)); + _participantId = participantId; + auto rtpSender = sender.nativeRtpSender; + auto mediaType = rtpSender->track()->kind() == "audio" ? + webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : + webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + frame_crypto_transformer_ = rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer([participantId stdString], + mediaType, + [self algorithmFromEnum:algorithm], + keyProvider.nativeKeyProvider)); + + rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer_); + frame_crypto_transformer_->SetEnabled(false); + frame_crypto_transformer_->SetFrameCryptorTransformerObserver(_observer.get()); + } + return self; +} + +- (instancetype)initWithRtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _observer.reset(new webrtc::RTCFrameCryptorDelegateAdapter(self)); + _participantId = participantId; + auto rtpReceiver = receiver.nativeRtpReceiver; + auto mediaType = rtpReceiver->track()->kind() == "audio" ? + webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : + webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + frame_crypto_transformer_ = rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer([participantId stdString], + mediaType, + [self algorithmFromEnum:algorithm], + keyProvider.nativeKeyProvider)); + + rtpReceiver->SetDepacketizerToDecoderFrameTransformer(frame_crypto_transformer_); + frame_crypto_transformer_->SetEnabled(false); + frame_crypto_transformer_->SetFrameCryptorTransformerObserver(_observer.get()); + } + return self; +} + +- (BOOL)enabled { + return frame_crypto_transformer_->enabled(); +} + +- (void)setEnabled:(BOOL)enabled { + frame_crypto_transformer_->SetEnabled(enabled); +} + +- (int)keyIndex { + return frame_crypto_transformer_->key_index(); +} + +- (void)setKeyIndex:(int)keyIndex { + frame_crypto_transformer_->SetKeyIndex(keyIndex); +} + +- (void)dealloc { + frame_crypto_transformer_->SetFrameCryptorTransformerObserver(nullptr); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h new file mode 100644 index 0000000000..eb7c83e2e7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "rtc_base/ref_count.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) +() + + @property(nonatomic, readonly) rtc::scoped_refptr nativeKeyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h new file mode 100644 index 0000000000..276b2e730c --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -0,0 +1,39 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) : NSObject + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId; + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index; + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm new file mode 100644 index 0000000000..c27e18975b --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -0,0 +1,69 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider+Private.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +@implementation RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) { + rtc::scoped_refptr _nativeKeyProvider; +} + +- (rtc::scoped_refptr)nativeKeyProvider { + return _nativeKeyProvider; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(NSData *)uncryptedMagicBytes { + if (self = [super init]) { + webrtc::KeyProviderOptions options; + options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, + ((const uint8_t *)salt.bytes) + salt.length); + options.ratchet_window_size = windowSize; + options.shared_key = sharedKey; + if(uncryptedMagicBytes != nil) { + options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, + ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); + } + _nativeKeyProvider = rtc::make_ref_counted(options); + } + return self; +} + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId { + _nativeKeyProvider->SetKey( + [participantId stdString], + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +@end diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 51b4d4e6b4..7937e90505 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -37,7 +37,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer ObjCVideoTrackSource::ObjCVideoTrackSource() : ObjCVideoTrackSource(false) {} ObjCVideoTrackSource::ObjCVideoTrackSource(bool is_screencast) - : AdaptedVideoTrackSource(/* required resolution alignment */ is_screencast? 16 : 2), + : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { From d5ec6fa78b7e8c0813b40f10bdac6c0ce21bc241 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9o=20Monnom?= Date: Fri, 2 Dec 2022 19:24:19 +0100 Subject: [PATCH 12/42] Other improvements. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Added yuv_helper (#57) ABGRToI420, ARGBToI420 & ARGBToRGB24 (#65) Co-authored-by: Théo Monnom Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> --- api/BUILD.gn | 1 + api/video/BUILD.gn | 13 +++ api/video/yuv_helper.cc | 218 ++++++++++++++++++++++++++++++++++++++++ api/video/yuv_helper.h | 169 +++++++++++++++++++++++++++++++ 4 files changed, 401 insertions(+) create mode 100644 api/video/yuv_helper.cc create mode 100644 api/video/yuv_helper.h diff --git a/api/BUILD.gn b/api/BUILD.gn index a0a9563ce0..7afad0bf29 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -330,6 +330,7 @@ rtc_library("libjingle_peerconnection_api") { "video:encoded_image", "video:video_bitrate_allocator_factory", "video:video_frame", + "video:yuv_helper", "video:video_rtp_headers", "video_codecs:video_codecs_api", diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index c8deac37e9..3e53f041c2 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -79,6 +79,19 @@ rtc_library("video_frame") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("yuv_helper") { + visibility = [ "*" ] + sources = [ + "yuv_helper.cc", + "yuv_helper.h", + ] + + deps = [ + "../../rtc_base/system:rtc_export", + "//third_party/libyuv", + ] +} + if (is_android) { java_cpp_enum("video_frame_enums") { sources = [ "video_frame_buffer.h" ] diff --git a/api/video/yuv_helper.cc b/api/video/yuv_helper.cc new file mode 100644 index 0000000000..3ed5dd4104 --- /dev/null +++ b/api/video/yuv_helper.cc @@ -0,0 +1,218 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_helper.h" + +#include "libyuv/convert.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode) { + return libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, + static_cast(mode)); +} + +int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, + dst_stride_uv, width, height); +} + +int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); +} + +int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I420ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return libyuv::I420ToBGRA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_bgra, dst_stride_bgra, width, + height); +} + +int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return libyuv::I420ToRGBA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, width, + height); +} + +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::I420ToRGB24(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, width, + height); +} + +int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering) { + return libyuv::I420Scale(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_width, src_height, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, dst_width, dst_height, filtering); +} + +int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ARGBToI420(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ABGRToI420(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::ARGBToRGB24(src_argb, src_stride_argb, dst_rgb24, + dst_stride_rgb24, width, height); +} + +} // namespace webrtc diff --git a/api/video/yuv_helper.h b/api/video/yuv_helper.h new file mode 100644 index 0000000000..0ab007fc57 --- /dev/null +++ b/api/video/yuv_helper.h @@ -0,0 +1,169 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "libyuv/convert.h" +#include "rtc_base/system/rtc_export.h" +#include "stdint.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +RTC_EXPORT int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode); + +RTC_EXPORT int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +RTC_EXPORT int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +RTC_EXPORT int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering); + +RTC_EXPORT int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +} // namespace webrtc From b0f3927ad1472a3411fed7f5b8cd972907b63feb Mon Sep 17 00:00:00 2001 From: David Zhao Date: Sat, 29 Jul 2023 10:14:23 -0700 Subject: [PATCH 13/42] Updated readme (#83) --- README.md | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index de413d3bdb..1c862c4ca4 100644 --- a/README.md +++ b/README.md @@ -2,34 +2,29 @@ This repository contains a fork of WebRTC from Google with various improvements. -## Changes +## Main changes ### All -- Dynamically acquire decoder to mitigate decoder limitations #25 #26 +- Dynamically acquire decoder to mitigate decoder limitations [#25](https://github.com/webrtc-sdk/webrtc/pull/25) +- Support for video simulcast with hardware & software encoders [patch](https://github.com/webrtc-sdk/webrtc/commit/ee030264e2274a2c90548a99b448782049e48fb4) +- Frame cryptor support (for end-to-end encryption) [patch](https://github.com/webrtc-sdk/webrtc/commit/3a2c008529a15fecde5f979a6ebb75c05463d45e) ### Android -- Support for video simulcast #3 +- WrappedVideoDecoderFactory [#74](https://github.com/webrtc-sdk/webrtc/pull/74) -### iOS +### iOS / Mac -- Do not request microphone permissions for playback-only #2 #5 -- Improvements to AVAudioSession interactions #7 #8 -- Support for video simulcast #4 -- Support for voice processing bypass #15 - -### Mac - -- Support for video simulcast #10 -- Remove hardcoded limitation of outputting to only right speaker on MBP #22 -- Screen capture support #24 #36 #37 -- Support for audio output device selection #35 -- Cross-platform RTCMTLVideoView #40 +- Sane audio handling [patch](https://github.com/webrtc-sdk/webrtc/commit/272127d457ab48e36241e82549870405864851f6) + - Do not acquire microphone/permissions unless actively publishing audio + - Abililty to bypass voice processing on iOS + - Remove hardcoded limitation of outputting only to right speaker on MacBook Pro +- Desktop capture for Mac [patch](https://github.com/webrtc-sdk/webrtc/commit/8e832d1163644ab504412c9b8f3ba8510d9890d6) ### Windows -- Fixed unable to acquire Mic when built-in AEC is enabled #29 +- Fixed unable to acquire Mic when built-in AEC is enabled [#29](https://github.com/webrtc-sdk/webrtc/pull/29) ## LICENSE @@ -47,3 +42,6 @@ This repository contains a fork of WebRTC from Google with various improvements. - [Membrane Framework](https://github.com/membraneframework/membrane_rtc_engine) +- [Louper](https://louper.io) + +Are you using WebRTC SDK in your framework or app? Feel free to open a PR and add yourself! From ebaa79bbbb578df725f32ad4f1b96242e3ac6e6a Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Thu, 24 Aug 2023 19:25:15 +0900 Subject: [PATCH 14/42] Expose remote audio sample buffers on RTCAudioTrack (#84) * audio renderer protocol * basic set up * progress * update header year * impl * stereo * fail gracefully * minor fix * doc * optimize AudioStreamBasicDescription * logging * minor refactoring * weak reference to delegates * fix timestamp computation * change swift delegate signature * use os_unfair_lock instead * avoid deadlock --- sdk/BUILD.gn | 3 + .../peerconnection/RTCAudioTrack+Private.h | 10 +- sdk/objc/api/peerconnection/RTCAudioTrack.h | 8 + sdk/objc/api/peerconnection/RTCAudioTrack.mm | 215 +++++++++++++++++- sdk/objc/base/RTCAudioRenderer.h | 34 +++ 5 files changed, 264 insertions(+), 6 deletions(-) create mode 100644 sdk/objc/base/RTCAudioRenderer.h diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 5fe0f67b63..7c74df1c87 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -119,6 +119,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.mm", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", ] @@ -1357,6 +1358,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/audio/RTCAudioDevice.h", "objc/components/audio/RTCAudioSession.h", @@ -1572,6 +1574,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 6495500484..38c0bd3b1b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import #import "RTCAudioTrack.h" #include "api/media_stream_interface.h" @@ -15,17 +16,18 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCAudioTrack) -() +@interface RTC_OBJC_TYPE (RTCAudioTrack) () - /** AudioTrackInterface created or passed in at construction. */ - @property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; +/** AudioTrackInterface created or passed in at construction. */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; /** Initialize an RTCAudioTrack with an id. */ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index 95eb5d3d48..c8218ad926 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,6 +13,7 @@ NS_ASSUME_NONNULL_BEGIN +@protocol RTC_OBJC_TYPE (RTCAudioRenderer); @class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT @@ -23,6 +24,13 @@ RTC_OBJC_EXPORT /** The audio source for this audio track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; +/** Register a renderer that will receive all audio CMSampleBuffers on this track. + * Does not retain. */ +- (void)addRenderer:(id)renderer; + +/** Deregister a renderer */ +- (void)removeRenderer:(id)renderer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 5c1736f436..065064a1fe 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -8,8 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import +#import + #import "RTCAudioTrack+Private.h" +#import "RTCAudioRenderer.h" #import "RTCAudioSource+Private.h" #import "RTCMediaStreamTrack+Private.h" #import "RTCPeerConnectionFactory+Private.h" @@ -17,7 +21,167 @@ #include "rtc_base/checks.h" -@implementation RTC_OBJC_TYPE (RTCAudioTrack) +namespace webrtc { +/** + * Captures audio data and converts to CMSampleBuffers + */ +class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { + private: + os_unfair_lock *lock_; + __weak RTCAudioTrack *audio_track_; + int64_t total_frames_ = 0; + bool attached_ = false; + + public: + AudioSinkConverter(RTCAudioTrack *audioTrack, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; + audio_track_ = audioTrack; + lock_ = lock; + } + + ~AudioSinkConverter() { + // + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter dealloc"; + } + + // Must be called while locked + void TryAttach() { + if (attached_) { + // Already attached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack attaching sink..."; + // Reset for creating CMSampleTimingInfo correctly + audio_track_.nativeAudioTrack->AddSink(this); + total_frames_ = 0; + attached_ = true; + } + + // Must be called while locked + void TryDetach() { + if (!attached_) { + // Already detached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack detaching sink..."; + audio_track_.nativeAudioTrack->RemoveSink(this); + attached_ = false; + } + + void OnData(const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData bits_per_sample: " + << bits_per_sample << " sample_rate: " << sample_rate + << " number_of_channels: " << number_of_channels + << " number_of_frames: " << number_of_frames + << " absolute_capture_timestamp_ms: " + << (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0); + + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already locked, skipping..."; + return; + } + bool is_attached = attached_; + os_unfair_lock_unlock(lock_); + + if (!is_attached) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already detached, skipping..."; + return; + } + + /* + * Convert to CMSampleBuffer + */ + + if (!(number_of_channels == 1 || number_of_channels == 2)) { + NSLog(@"RTCAudioTrack: Only mono or stereo is supported currently. numberOfChannels: %zu", + number_of_channels); + return; + } + + OSStatus status; + + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = + number_of_channels == 2 ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; + + AudioStreamBasicDescription sd; + sd.mSampleRate = sample_rate; + sd.mFormatID = kAudioFormatLinearPCM; + sd.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; + sd.mFramesPerPacket = 1; + sd.mChannelsPerFrame = number_of_channels; + sd.mBitsPerChannel = bits_per_sample; /* 16 */ + sd.mBytesPerFrame = sd.mChannelsPerFrame * (sd.mBitsPerChannel / 8); + sd.mBytesPerPacket = sd.mBytesPerFrame; + + CMSampleTimingInfo timing = { + CMTimeMake(1, sample_rate), + CMTimeMake(total_frames_, sample_rate), + kCMTimeInvalid, + }; + + total_frames_ += number_of_frames; // update the total + + CMFormatDescriptionRef format = NULL; + status = CMAudioFormatDescriptionCreate( + kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, NULL, &format); + + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to create audio format description"); + return; + } + + CMSampleBufferRef buffer; + status = CMSampleBufferCreate(kCFAllocatorDefault, + NULL, + false, + NULL, + NULL, + format, + (CMItemCount)number_of_frames, + 1, + &timing, + 0, + NULL, + &buffer); + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to allocate sample buffer"); + return; + } + + AudioBufferList bufferList; + bufferList.mNumberBuffers = 1; + bufferList.mBuffers[0].mNumberChannels = sd.mChannelsPerFrame; + bufferList.mBuffers[0].mDataByteSize = (UInt32)(number_of_frames * sd.mBytesPerFrame); + bufferList.mBuffers[0].mData = (void *)audio_data; + status = CMSampleBufferSetDataBufferFromAudioBufferList( + buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &bufferList); + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to convert audio buffer list into sample buffer"); + return; + } + + // Report back to RTCAudioTrack + [audio_track_ didCaptureSampleBuffer:buffer]; + + CFRelease(buffer); + } +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCAudioTrack) { + rtc::scoped_refptr _audioConverter; + // Stores weak references to renderers + NSHashTable *_renderers; + os_unfair_lock _lock; +} @synthesize source = _source; @@ -43,7 +207,21 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(factory); NSParameterAssert(nativeTrack); NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); - return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { + RTC_LOG(LS_INFO) << "RTCAudioTrack init"; + _renderers = [NSHashTable weakObjectsHashTable]; + _audioConverter = new rtc::RefCountedObject(self, &_lock); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + _audioConverter->TryDetach(); + os_unfair_lock_unlock(&_lock); + + RTC_LOG(LS_INFO) << "RTCAudioTrack dealloc"; } - (RTC_OBJC_TYPE(RTCAudioSource) *)source { @@ -57,6 +235,25 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto return _source; } +- (void)addRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + _audioConverter->TryAttach(); + os_unfair_lock_unlock(&_lock); +} + +- (void)removeRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers removeObject:renderer]; + NSUInteger renderersCount = _renderers.allObjects.count; + + if (renderersCount == 0) { + // Detach if no more renderers... + _audioConverter->TryDetach(); + } + os_unfair_lock_unlock(&_lock); +} + #pragma mark - Private - (rtc::scoped_refptr)nativeAudioTrack { @@ -64,4 +261,18 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto static_cast(self.nativeTrack.get())); } +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { + bool is_locked = os_unfair_lock_trylock(&_lock); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack didCaptureSampleBuffer already locked, skipping..."; + return; + } + NSArray *renderers = [_renderers allObjects]; + os_unfair_lock_unlock(&_lock); + + for (id renderer in renderers) { + [renderer renderSampleBuffer:sampleBuffer]; + } +} + @end diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h new file mode 100644 index 0000000000..def20eac3c --- /dev/null +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -0,0 +1,34 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#if TARGET_OS_IPHONE +#import +#endif + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE +(RTCAudioRenderer) + + - (void)renderSampleBuffer : (CMSampleBufferRef)sampleBuffer + NS_SWIFT_NAME(render(sampleBuffer:)); + +@end + +NS_ASSUME_NONNULL_END From 68167af61298fb2e89d29a2ee3a17d60afb852db Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 25 Aug 2023 01:56:40 +0900 Subject: [PATCH 15/42] Allow custom audio processing by exposing AudioProcessingModule (#85) * apm * progress * expose raw buffer * config * runtime delegate update * always create audio processing adapter * delegate nullable * dynamic delegate update thread safety * fix delegate life cycle when swapped * refactor * avoid crash when no apm is passed in * format * make delegate weak & docs * fix memory issue --- sdk/BUILD.gn | 281 ++++++++++-------- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 1 + .../RTCPeerConnectionFactory+Native.h | 6 - .../peerconnection/RTCPeerConnectionFactory.h | 7 +- .../RTCPeerConnectionFactory.mm | 20 +- .../components/audio/RTCAudioBuffer+Private.h | 29 ++ sdk/objc/components/audio/RTCAudioBuffer.h | 38 +++ sdk/objc/components/audio/RTCAudioBuffer.mm | 55 ++++ .../RTCAudioCustomProcessingAdapter+Private.h | 43 +++ .../audio/RTCAudioCustomProcessingAdapter.h | 28 ++ .../audio/RTCAudioCustomProcessingAdapter.mm | 139 +++++++++ .../audio/RTCAudioCustomProcessingDelegate.h | 52 ++++ .../audio/RTCAudioProcessingConfig+Private.h | 29 ++ .../audio/RTCAudioProcessingConfig.h | 31 ++ .../audio/RTCAudioProcessingConfig.mm | 51 ++++ .../audio/RTCAudioProcessingModule.h | 33 ++ .../RTCDefaultAudioProcessingModule+Private.h | 29 ++ .../audio/RTCDefaultAudioProcessingModule.h | 47 +++ .../audio/RTCDefaultAudioProcessingModule.mm | 96 ++++++ 19 files changed, 875 insertions(+), 140 deletions(-) create mode 100644 sdk/objc/components/audio/RTCAudioBuffer+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioBuffer.h create mode 100644 sdk/objc/components/audio/RTCAudioBuffer.mm create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig.mm create mode 100644 sdk/objc/components/audio/RTCAudioProcessingModule.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 7c74df1c87..a0cf5b55d3 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -119,7 +119,6 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.mm", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", - "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", ] @@ -141,9 +140,9 @@ if (is_ios || is_mac) { "objc/helpers/AVCaptureSession+DevicePosition.mm", "objc/helpers/NSString+StdString.h", "objc/helpers/NSString+StdString.mm", - "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCYUVHelper.h", "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", @@ -705,15 +704,15 @@ if (is_ios || is_mac) { rtc_library("desktopcapture_objc") { visibility = [ "*" ] sources = [ - "objc/components/capturer/RTCDesktopCapturer+Private.h", "objc/components/capturer/RTCDesktopCapturer.h", "objc/components/capturer/RTCDesktopCapturer.mm", - "objc/components/capturer/RTCDesktopSource+Private.h", - "objc/components/capturer/RTCDesktopSource.h", - "objc/components/capturer/RTCDesktopSource.mm", - "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopCapturer+Private.h", "objc/components/capturer/RTCDesktopMediaList.h", "objc/components/capturer/RTCDesktopMediaList.mm", + "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopSource.mm", + "objc/components/capturer/RTCDesktopSource+Private.h", "objc/native/src/objc_desktop_capture.h", "objc/native/src/objc_desktop_capture.mm", "objc/native/src/objc_desktop_media_list.h", @@ -743,9 +742,9 @@ if (is_ios || is_mac) { visibility = [ "*" ] configs += [ "..:no_global_constructors" ] sources = [ - "objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.mm", + "objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h", "objc/components/video_codec/RTCH264ProfileLevelId.h", "objc/components/video_codec/RTCH264ProfileLevelId.mm", ] @@ -882,10 +881,10 @@ if (is_ios || is_mac) { rtc_library("simulcast") { sources = [ - "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", - "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", "objc/api/video_codec/RTCVideoEncoderSimulcast.h", "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", ] deps = [ @@ -899,9 +898,9 @@ if (is_ios || is_mac) { rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ - "objc/api/peerconnection/RTCMediaConstraints+Private.h", "objc/api/peerconnection/RTCMediaConstraints.h", "objc/api/peerconnection/RTCMediaConstraints.mm", + "objc/api/peerconnection/RTCMediaConstraints+Private.h", ] public_configs = [ ":common_config_objc" ] @@ -927,9 +926,9 @@ if (is_ios || is_mac) { visibility = [ "*" ] allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove. sources = [ - "objc/api/RTCVideoRendererAdapter+Private.h", "objc/api/RTCVideoRendererAdapter.h", "objc/api/RTCVideoRendererAdapter.mm", + "objc/api/RTCVideoRendererAdapter+Private.h", ] configs += [ "..:common_objc" ] @@ -946,9 +945,9 @@ if (is_ios || is_mac) { rtc_library("mediasource_objc") { sources = [ - "objc/api/peerconnection/RTCMediaSource+Private.h", "objc/api/peerconnection/RTCMediaSource.h", "objc/api/peerconnection/RTCMediaSource.mm", + "objc/api/peerconnection/RTCMediaSource+Private.h", ] configs += [ @@ -998,125 +997,141 @@ if (is_ios || is_mac) { configs += [ "..:no_global_constructors" ] sources = [ "objc/api/peerconnection/RTCAudioDeviceModule.h", - "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", "objc/api/peerconnection/RTCAudioDeviceModule.mm", - "objc/api/peerconnection/RTCIODevice.h", - "objc/api/peerconnection/RTCIODevice.mm", - "objc/api/peerconnection/RTCAudioSource+Private.h", + "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", + "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.mm", - "objc/api/peerconnection/RTCAudioTrack+Private.h", + "objc/api/peerconnection/RTCAudioSource+Private.h", + "objc/api/peerconnection/RTCAudioSource+Private.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCAudioTrack.mm", + "objc/api/peerconnection/RTCAudioTrack+Private.h", "objc/api/peerconnection/RTCCertificate.h", "objc/api/peerconnection/RTCCertificate.mm", - "objc/api/peerconnection/RTCConfiguration+Native.h", - "objc/api/peerconnection/RTCConfiguration+Private.h", "objc/api/peerconnection/RTCConfiguration.h", "objc/api/peerconnection/RTCConfiguration.mm", + "objc/api/peerconnection/RTCConfiguration+Native.h", + "objc/api/peerconnection/RTCConfiguration+Private.h", "objc/api/peerconnection/RTCCryptoOptions.h", "objc/api/peerconnection/RTCCryptoOptions.mm", - "objc/api/peerconnection/RTCDataChannel+Private.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannel.mm", - "objc/api/peerconnection/RTCDataChannelConfiguration+Private.h", + "objc/api/peerconnection/RTCDataChannel+Private.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDataChannelConfiguration.mm", - "objc/api/peerconnection/RTCDtmfSender+Private.h", + "objc/api/peerconnection/RTCDataChannelConfiguration+Private.h", "objc/api/peerconnection/RTCDtmfSender.h", "objc/api/peerconnection/RTCDtmfSender.mm", + "objc/api/peerconnection/RTCDtmfSender+Private.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFieldTrials.mm", - "objc/api/peerconnection/RTCFrameCryptor+Private.h", "objc/api/peerconnection/RTCFrameCryptor.h", "objc/api/peerconnection/RTCFrameCryptor.mm", - "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", + "objc/api/peerconnection/RTCFrameCryptor+Private.h", "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm", - "objc/api/peerconnection/RTCIceCandidate+Private.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidate.mm", - "objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h", + "objc/api/peerconnection/RTCIceCandidate+Private.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.mm", - "objc/api/peerconnection/RTCIceServer+Private.h", + "objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h", "objc/api/peerconnection/RTCIceServer.h", "objc/api/peerconnection/RTCIceServer.mm", - "objc/api/peerconnection/RTCLegacyStatsReport+Private.h", + "objc/api/peerconnection/RTCIceServer+Private.h", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCIODevice.mm", "objc/api/peerconnection/RTCLegacyStatsReport.h", "objc/api/peerconnection/RTCLegacyStatsReport.mm", - "objc/api/peerconnection/RTCMediaStream+Private.h", + "objc/api/peerconnection/RTCLegacyStatsReport+Private.h", "objc/api/peerconnection/RTCMediaStream.h", "objc/api/peerconnection/RTCMediaStream.mm", - "objc/api/peerconnection/RTCMediaStreamTrack+Private.h", + "objc/api/peerconnection/RTCMediaStream+Private.h", "objc/api/peerconnection/RTCMediaStreamTrack.h", "objc/api/peerconnection/RTCMediaStreamTrack.mm", + "objc/api/peerconnection/RTCMediaStreamTrack+Private.h", "objc/api/peerconnection/RTCMetrics.h", "objc/api/peerconnection/RTCMetrics.mm", - "objc/api/peerconnection/RTCMetricsSampleInfo+Private.h", "objc/api/peerconnection/RTCMetricsSampleInfo.h", "objc/api/peerconnection/RTCMetricsSampleInfo.mm", + "objc/api/peerconnection/RTCMetricsSampleInfo+Private.h", + "objc/api/peerconnection/RTCPeerConnection.h", + "objc/api/peerconnection/RTCPeerConnection.mm", "objc/api/peerconnection/RTCPeerConnection+DataChannel.mm", "objc/api/peerconnection/RTCPeerConnection+Private.h", "objc/api/peerconnection/RTCPeerConnection+Stats.mm", - "objc/api/peerconnection/RTCPeerConnection.h", - "objc/api/peerconnection/RTCPeerConnection.mm", - "objc/api/peerconnection/RTCPeerConnectionFactory+Native.h", - "objc/api/peerconnection/RTCPeerConnectionFactory+Private.h", "objc/api/peerconnection/RTCPeerConnectionFactory.h", "objc/api/peerconnection/RTCPeerConnectionFactory.mm", - "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h", - "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm", + "objc/api/peerconnection/RTCPeerConnectionFactory+Native.h", + "objc/api/peerconnection/RTCPeerConnectionFactory+Private.h", "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h", "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm", - "objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm", - "objc/api/peerconnection/RTCRtcpParameters+Private.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h", "objc/api/peerconnection/RTCRtcpParameters.h", "objc/api/peerconnection/RTCRtcpParameters.mm", + "objc/api/peerconnection/RTCRtcpParameters+Private.h", "objc/api/peerconnection/RTCRtpCapabilities.h", "objc/api/peerconnection/RTCRtpCapabilities.mm", "objc/api/peerconnection/RTCRtpCapabilities+Private.h", "objc/api/peerconnection/RTCRtpCodecCapability.h", "objc/api/peerconnection/RTCRtpCodecCapability.mm", "objc/api/peerconnection/RTCRtpCodecCapability+Private.h", - "objc/api/peerconnection/RTCRtpCodecParameters+Private.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpCodecParameters.mm", - "objc/api/peerconnection/RTCRtpEncodingParameters+Private.h", + "objc/api/peerconnection/RTCRtpCodecParameters+Private.h", "objc/api/peerconnection/RTCRtpEncodingParameters.h", "objc/api/peerconnection/RTCRtpEncodingParameters.mm", - "objc/api/peerconnection/RTCRtpHeaderExtension+Private.h", + "objc/api/peerconnection/RTCRtpEncodingParameters+Private.h", "objc/api/peerconnection/RTCRtpHeaderExtension.h", "objc/api/peerconnection/RTCRtpHeaderExtension.mm", - "objc/api/peerconnection/RTCRtpParameters+Private.h", + "objc/api/peerconnection/RTCRtpHeaderExtension+Private.h", "objc/api/peerconnection/RTCRtpParameters.h", "objc/api/peerconnection/RTCRtpParameters.mm", - "objc/api/peerconnection/RTCRtpReceiver+Native.h", - "objc/api/peerconnection/RTCRtpReceiver+Private.h", + "objc/api/peerconnection/RTCRtpParameters+Private.h", "objc/api/peerconnection/RTCRtpReceiver.h", "objc/api/peerconnection/RTCRtpReceiver.mm", - "objc/api/peerconnection/RTCRtpSender+Native.h", - "objc/api/peerconnection/RTCRtpSender+Private.h", + "objc/api/peerconnection/RTCRtpReceiver+Native.h", + "objc/api/peerconnection/RTCRtpReceiver+Private.h", "objc/api/peerconnection/RTCRtpSender.h", "objc/api/peerconnection/RTCRtpSender.mm", - "objc/api/peerconnection/RTCRtpTransceiver+Private.h", + "objc/api/peerconnection/RTCRtpSender+Native.h", + "objc/api/peerconnection/RTCRtpSender+Private.h", "objc/api/peerconnection/RTCRtpTransceiver.h", "objc/api/peerconnection/RTCRtpTransceiver.mm", - "objc/api/peerconnection/RTCSSLAdapter.h", - "objc/api/peerconnection/RTCSSLAdapter.mm", - "objc/api/peerconnection/RTCSessionDescription+Private.h", + "objc/api/peerconnection/RTCRtpTransceiver+Private.h", "objc/api/peerconnection/RTCSessionDescription.h", "objc/api/peerconnection/RTCSessionDescription.mm", - "objc/api/peerconnection/RTCStatisticsReport+Private.h", + "objc/api/peerconnection/RTCSessionDescription+Private.h", + "objc/api/peerconnection/RTCSSLAdapter.h", + "objc/api/peerconnection/RTCSSLAdapter.mm", "objc/api/peerconnection/RTCStatisticsReport.h", "objc/api/peerconnection/RTCStatisticsReport.mm", + "objc/api/peerconnection/RTCStatisticsReport+Private.h", "objc/api/peerconnection/RTCTracing.h", "objc/api/peerconnection/RTCTracing.mm", - "objc/api/peerconnection/RTCVideoTrack+Private.h", "objc/api/peerconnection/RTCVideoTrack.h", "objc/api/peerconnection/RTCVideoTrack.mm", + "objc/api/peerconnection/RTCVideoTrack+Private.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioBuffer.mm", + "objc/components/audio/RTCAudioBuffer+Private.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.mm", + "objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingConfig.mm", + "objc/components/audio/RTCAudioProcessingConfig+Private.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.mm", + "objc/components/audio/RTCDefaultAudioProcessingModule+Private.h", ] configs += [ @@ -1197,34 +1212,34 @@ if (is_ios || is_mac) { include_dirs = [ "objc/" ] sources = [ + "objc/unittests/frame_buffer_helpers.h", + "objc/unittests/frame_buffer_helpers.mm", + "objc/unittests/nalu_rewriter_xctest.mm", + "objc/unittests/objc_video_decoder_factory_tests.mm", + "objc/unittests/objc_video_encoder_factory_tests.mm", "objc/unittests/ObjCVideoTrackSource_xctest.mm", - "objc/unittests/RTCAudioDeviceModule_xctest.mm", "objc/unittests/RTCAudioDevice_xctest.mm", + "objc/unittests/RTCAudioDeviceModule_xctest.mm", "objc/unittests/RTCAudioSessionTest.mm", - "objc/unittests/RTCCVPixelBuffer_xctest.mm", "objc/unittests/RTCCallbackLogger_xctest.m", "objc/unittests/RTCCameraVideoCapturerTests.mm", "objc/unittests/RTCCertificateTest.mm", "objc/unittests/RTCConfigurationTest.mm", + "objc/unittests/RTCCVPixelBuffer_xctest.mm", "objc/unittests/RTCDataChannelConfigurationTest.mm", "objc/unittests/RTCEncodedImage_xctest.mm", "objc/unittests/RTCFileVideoCapturer_xctest.mm", "objc/unittests/RTCH264ProfileLevelId_xctest.m", "objc/unittests/RTCIceCandidateTest.mm", "objc/unittests/RTCIceServerTest.mm", - "objc/unittests/RTCMTLVideoView_xctest.m", "objc/unittests/RTCMediaConstraintsTest.mm", + "objc/unittests/RTCMTLVideoView_xctest.m", "objc/unittests/RTCNV12TextureCache_xctest.m", - "objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm", "objc/unittests/RTCPeerConnectionFactory_xctest.m", + "objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm", "objc/unittests/RTCPeerConnectionTest.mm", "objc/unittests/RTCSessionDescriptionTest.mm", "objc/unittests/RTCTracingTest.mm", - "objc/unittests/frame_buffer_helpers.h", - "objc/unittests/frame_buffer_helpers.mm", - "objc/unittests/nalu_rewriter_xctest.mm", - "objc/unittests/objc_video_decoder_factory_tests.mm", - "objc/unittests/objc_video_encoder_factory_tests.mm", "objc/unittests/scoped_cftyperef_tests.mm", ] @@ -1318,8 +1333,8 @@ if (is_ios || is_mac) { is_xctest = true info_plist = "//test/ios/Info.plist" sources = [ - "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m", "objc/unittests/main.mm", + "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m", ] extra_substitutions = [ "GTEST_BUNDLE_ID_SUFFIX=generic-unit-test" ] @@ -1339,62 +1354,22 @@ if (is_ios || is_mac) { output_name = "WebRTC" common_objc_headers = [ - "objc/base/RTCCodecSpecificInfo.h", - "objc/base/RTCEncodedImage.h", - "objc/base/RTCI420Buffer.h", - "objc/base/RTCLogging.h", - "objc/base/RTCMacros.h", - "objc/base/RTCMutableI420Buffer.h", - "objc/base/RTCMutableYUVPlanarBuffer.h", - "objc/base/RTCSSLCertificateVerifier.h", - "objc/base/RTCVideoCapturer.h", - "objc/base/RTCVideoCodecInfo.h", - "objc/base/RTCVideoDecoder.h", - "objc/base/RTCVideoDecoderFactory.h", - "objc/base/RTCVideoEncoder.h", - "objc/base/RTCVideoEncoderFactory.h", - "objc/base/RTCVideoEncoderQpThresholds.h", - "objc/base/RTCVideoEncoderSettings.h", - "objc/base/RTCVideoFrame.h", - "objc/base/RTCVideoFrameBuffer.h", - "objc/base/RTCVideoRenderer.h", - "objc/base/RTCAudioRenderer.h", - "objc/base/RTCYUVPlanarBuffer.h", - "objc/components/audio/RTCAudioDevice.h", - "objc/components/audio/RTCAudioSession.h", - "objc/components/audio/RTCAudioSessionConfiguration.h", - "objc/components/capturer/RTCCameraVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/network/RTCNetworkMonitor.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", - "objc/components/renderer/opengl/RTCEAGLVideoView.h", - "objc/components/renderer/opengl/RTCVideoViewShading.h", - "objc/components/video_codec/RTCCodecSpecificInfoH264.h", - "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h", - "objc/components/video_codec/RTCDefaultVideoEncoderFactory.h", - "objc/components/video_codec/RTCH264ProfileLevelId.h", - "objc/components/video_codec/RTCVideoDecoderFactoryH264.h", - "objc/components/video_codec/RTCVideoDecoderH264.h", - "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", - "objc/components/video_codec/RTCVideoEncoderH264.h", - "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", - "objc/helpers/RTCCameraPreviewView.h", - "objc/helpers/RTCDispatcher.h", - "objc/helpers/RTCYUVHelper.h", - "objc/helpers/UIDevice+RTCDevice.h", "objc/api/peerconnection/RTCAudioDeviceModule.h", - "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", + "objc/api/peerconnection/RTCCertificate.h", "objc/api/peerconnection/RTCConfiguration.h", + "objc/api/peerconnection/RTCCryptoOptions.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", + "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFrameCryptor.h", "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", - "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", "objc/api/peerconnection/RTCIceServer.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCLegacyStatsReport.h", "objc/api/peerconnection/RTCMediaConstraints.h", "objc/api/peerconnection/RTCMediaSource.h", @@ -1415,27 +1390,71 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCRtpReceiver.h", "objc/api/peerconnection/RTCRtpSender.h", "objc/api/peerconnection/RTCRtpTransceiver.h", - "objc/api/peerconnection/RTCDtmfSender.h", - "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCSessionDescription.h", + "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCStatisticsReport.h", "objc/api/peerconnection/RTCTracing.h", - "objc/api/peerconnection/RTCCertificate.h", - "objc/api/peerconnection/RTCCryptoOptions.h", "objc/api/peerconnection/RTCVideoSource.h", "objc/api/peerconnection/RTCVideoTrack.h", "objc/api/video_codec/RTCVideoCodecConstants.h", + "objc/api/video_codec/RTCVideoDecoderAV1.h", "objc/api/video_codec/RTCVideoDecoderVP8.h", "objc/api/video_codec/RTCVideoDecoderVP9.h", - "objc/api/video_codec/RTCVideoDecoderAV1.h", + "objc/api/video_codec/RTCVideoEncoderAV1.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", "objc/api/video_codec/RTCVideoEncoderVP8.h", "objc/api/video_codec/RTCVideoEncoderVP9.h", - "objc/api/video_codec/RTCVideoEncoderAV1.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", - # Added for Simulcast support + "objc/base/RTCAudioRenderer.h", + "objc/base/RTCCodecSpecificInfo.h", + "objc/base/RTCEncodedImage.h", + "objc/base/RTCI420Buffer.h", + "objc/base/RTCLogging.h", + "objc/base/RTCMacros.h", + "objc/base/RTCMutableI420Buffer.h", + "objc/base/RTCMutableYUVPlanarBuffer.h", + "objc/base/RTCSSLCertificateVerifier.h", + "objc/base/RTCVideoCapturer.h", + "objc/base/RTCVideoCodecInfo.h", + "objc/base/RTCVideoDecoder.h", + "objc/base/RTCVideoDecoderFactory.h", + "objc/base/RTCVideoEncoder.h", + "objc/base/RTCVideoEncoderFactory.h", + "objc/base/RTCVideoEncoderQpThresholds.h", + "objc/base/RTCVideoEncoderSettings.h", + "objc/base/RTCVideoFrame.h", + "objc/base/RTCVideoFrameBuffer.h", + "objc/base/RTCVideoRenderer.h", + "objc/base/RTCYUVPlanarBuffer.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioDevice.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCAudioSession.h", + "objc/components/audio/RTCAudioSessionConfiguration.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/capturer/RTCCameraVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/network/RTCNetworkMonitor.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", + "objc/components/renderer/opengl/RTCEAGLVideoView.h", + "objc/components/renderer/opengl/RTCVideoViewShading.h", + "objc/components/video_codec/RTCCodecSpecificInfoH264.h", + "objc/components/video_codec/RTCDefaultVideoDecoderFactory.h", + "objc/components/video_codec/RTCDefaultVideoEncoderFactory.h", + "objc/components/video_codec/RTCH264ProfileLevelId.h", + "objc/components/video_codec/RTCVideoDecoderFactoryH264.h", + "objc/components/video_codec/RTCVideoDecoderH264.h", + "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", - "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/components/video_codec/RTCVideoEncoderH264.h", + "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", + "objc/helpers/RTCCameraPreviewView.h", + "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/UIDevice+RTCDevice.h", ] if (!build_with_chromium) { @@ -1506,7 +1525,6 @@ if (is_ios || is_mac) { sources = [ "objc/api/peerconnection/RTCAudioDeviceModule.h", - "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCCertificate.h", @@ -1515,12 +1533,13 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFrameCryptor.h", "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", - "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", "objc/api/peerconnection/RTCIceServer.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCLegacyStatsReport.h", "objc/api/peerconnection/RTCMediaConstraints.h", "objc/api/peerconnection/RTCMediaSource.h", @@ -1541,8 +1560,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCRtpReceiver.h", "objc/api/peerconnection/RTCRtpSender.h", "objc/api/peerconnection/RTCRtpTransceiver.h", - "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCSessionDescription.h", + "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCStatisticsReport.h", "objc/api/peerconnection/RTCTracing.h", "objc/api/peerconnection/RTCVideoSource.h", @@ -1551,10 +1570,12 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoDecoderVP8.h", "objc/api/video_codec/RTCVideoDecoderVP9.h", "objc/api/video_codec/RTCVideoEncoderAV1.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", "objc/api/video_codec/RTCVideoEncoderVP8.h", "objc/api/video_codec/RTCVideoEncoderVP9.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCCodecSpecificInfo.h", "objc/base/RTCEncodedImage.h", "objc/base/RTCI420Buffer.h", @@ -1574,15 +1595,19 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", - "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", "objc/components/capturer/RTCCameraVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/capturer/RTCDesktopCapturer.h", - "objc/components/capturer/RTCDesktopSource.h", "objc/components/capturer/RTCDesktopMediaList.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/opengl/RTCNSGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1592,13 +1617,11 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoDecoderFactoryH264.h", "objc/components/video_codec/RTCVideoDecoderH264.h", "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCYUVHelper.h", - # Added for Simulcast support - "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", - "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { sources += [ @@ -1680,10 +1703,10 @@ if (is_ios || is_mac) { "objc/native/api/video_decoder_factory.mm", "objc/native/api/video_encoder_factory.h", "objc/native/api/video_encoder_factory.mm", - "objc/native/api/video_frame.h", - "objc/native/api/video_frame.mm", "objc/native/api/video_frame_buffer.h", "objc/native/api/video_frame_buffer.mm", + "objc/native/api/video_frame.h", + "objc/native/api/video_frame.mm", "objc/native/api/video_renderer.h", "objc/native/api/video_renderer.mm", ] diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 065064a1fe..3d3af34893 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -209,6 +209,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { RTC_LOG(LS_INFO) << "RTCAudioTrack init"; + _lock = OS_UNFAIR_LOCK_INIT; _renderers = [NSHashTable weakObjectsHashTable]; _audioConverter = new rtc::RefCountedObject(self, &_lock); } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index 902925936b..cc45aba1ec 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -73,12 +73,6 @@ NS_ASSUME_NONNULL_BEGIN initWithEncoderFactory:(nullable id)encoderFactory decoderFactory:(nullable id)decoderFactory; -- (instancetype) - initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing - encoderFactory:(nullable id)encoderFactory - decoderFactory: - (nullable id)decoderFactory; - /** Initialize an RTCPeerConnection with a configuration, constraints, and * dependencies. */ diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index d74b99bd95..439490d370 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -38,6 +38,8 @@ typedef NS_ENUM(NSInteger, RTCRtpMediaType); (RTCSSLCertificateVerifier); @protocol RTC_OBJC_TYPE (RTCAudioDevice); +@protocol RTC_OBJC_TYPE +(RTCAudioProcessingModule); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject @@ -60,8 +62,9 @@ RTC_OBJC_EXPORT - (instancetype) initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing encoderFactory:(nullable id)encoderFactory - decoderFactory: - (nullable id)decoderFactory; + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule; @property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 49ff38161f..5cb4f38c05 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -53,6 +53,9 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" #include "sdk/objc/native/src/objc_video_encoder_factory.h" +#import "components/audio/RTCAudioProcessingModule.h" +#import "components/audio/RTCDefaultAudioProcessingModule+Private.h" + #if defined(WEBRTC_IOS) #import "sdk/objc/native/api/audio_device_module.h" #endif @@ -62,6 +65,7 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _workerThread; std::unique_ptr _signalingThread; rtc::scoped_refptr _nativeAudioDeviceModule; + RTCDefaultAudioProcessingModule *_defaultAudioProcessingModule; BOOL _hasStartedAecDump; } @@ -144,8 +148,9 @@ - (instancetype)init { - (instancetype) initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing encoderFactory:(nullable id)encoderFactory - decoderFactory: - (nullable id)decoderFactory { + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule { #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else @@ -158,12 +163,21 @@ - (instancetype)init { native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); } rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; + + if ([audioProcessingModule isKindOfClass:[RTCDefaultAudioProcessingModule class]]) { + _defaultAudioProcessingModule = (RTCDefaultAudioProcessingModule *)audioProcessingModule; + } else { + _defaultAudioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + } + + NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) audioDeviceModule:audio_device_module.get() - audioProcessingModule:nullptr + audioProcessingModule:_defaultAudioProcessingModule.nativeAudioProcessingModule bypassVoiceProcessing:bypassVoiceProcessing]; #endif } diff --git a/sdk/objc/components/audio/RTCAudioBuffer+Private.h b/sdk/objc/components/audio/RTCAudioBuffer+Private.h new file mode 100644 index 0000000000..effd8bb429 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioBuffer)() + +- (instancetype)initWithNativeType: (webrtc::AudioBuffer *) audioBuffer; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.h b/sdk/objc/components/audio/RTCAudioBuffer.h new file mode 100644 index 0000000000..8bbd068657 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioBuffer) : NSObject + +@property(nonatomic, readonly) size_t channels; +@property(nonatomic, readonly) size_t frames; +@property(nonatomic, readonly) size_t framesPerBand; +@property(nonatomic, readonly) size_t bands; + +// Returns pointer arrays. Index range from 0 to `frames`. +- (float* _Nonnull)rawBufferForChannel:(size_t)channel; + +// TODO: More convenience methods... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.mm b/sdk/objc/components/audio/RTCAudioBuffer.mm new file mode 100644 index 0000000000..e37ea344dd --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.mm @@ -0,0 +1,55 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +@implementation RTC_OBJC_TYPE (RTCAudioBuffer) { + // Raw + webrtc::AudioBuffer *_audioBuffer; +} + +- (size_t)channels { + return _audioBuffer->num_channels(); +} + +- (size_t)frames { + return _audioBuffer->num_frames(); +} + +- (size_t)framesPerBand { + return _audioBuffer->num_frames_per_band(); +} + +- (size_t)bands { + return _audioBuffer->num_bands(); +} + +- (float *)rawBufferForChannel:(size_t)channel { + return _audioBuffer->channels()[channel]; +} + +#pragma mark - Private + +- (instancetype)initWithNativeType:(webrtc::AudioBuffer *)audioBuffer { + if (self = [super init]) { + _audioBuffer = audioBuffer; + } + return self; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h new file mode 100644 index 0000000000..a9dc3d8400 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -0,0 +1,43 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioCustomProcessingAdapter.h" +#import "RTCAudioCustomProcessingDelegate.h" +#import "RTCMacros.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCAudioCustomProcessingAdapter () + +// Thread safe set/get with os_unfair_lock. +@property(nonatomic, weak, nullable) id + audioCustomProcessingDelegate; + +// Direct read access without lock. +@property(nonatomic, readonly, weak, nullable) id + rawAudioCustomProcessingDelegate; + +@property(nonatomic, readonly) std::unique_ptr + nativeAudioCustomProcessingModule; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h new file mode 100644 index 0000000000..24239eac2d --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -0,0 +1,28 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTCAudioCustomProcessingAdapter : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm new file mode 100644 index 0000000000..c8d1dfe4f6 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -0,0 +1,139 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCAudioBuffer+Private.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioCustomProcessingAdapter.h" + +#include "rtc_base/logging.h" + +namespace webrtc { + +class AudioCustomProcessingAdapter : public webrtc::CustomProcessing { + public: + bool is_initialized_; + int sample_rate_hz_; + int num_channels_; + + AudioCustomProcessingAdapter(RTCAudioCustomProcessingAdapter *adapter, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; + + adapter_ = adapter; + lock_ = lock; + is_initialized_ = false; + sample_rate_hz_ = 0; + num_channels_ = 0; + } + + ~AudioCustomProcessingAdapter() { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; + + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingRelease]; + os_unfair_lock_unlock(lock_); + } + + void Initialize(int sample_rate_hz, int num_channels) override { + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; + is_initialized_ = true; + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + os_unfair_lock_unlock(lock_); + } + + void Process(AudioBuffer *audio_buffer) override { + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter Process " + "already locked, skipping..."; + + return; + } + id delegate = adapter_.rawAudioCustomProcessingDelegate; + if (delegate != nil) { + RTCAudioBuffer *audioBuffer = [[RTCAudioBuffer alloc] initWithNativeType:audio_buffer]; + [delegate audioProcessingProcess:audioBuffer]; + } + os_unfair_lock_unlock(lock_); + } + + std::string ToString() const override { return "AudioCustomProcessingAdapter"; } + + private: + __weak RTCAudioCustomProcessingAdapter *adapter_; + os_unfair_lock *lock_; +}; +} // namespace webrtc + +@implementation RTCAudioCustomProcessingAdapter { + webrtc::AudioCustomProcessingAdapter *_adapter; + os_unfair_lock _lock; +} + +@synthesize rawAudioCustomProcessingDelegate = _rawAudioCustomProcessingDelegate; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + _rawAudioCustomProcessingDelegate = audioCustomProcessingDelegate; + _adapter = new webrtc::AudioCustomProcessingAdapter(self, &_lock); + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter init"; + } + + return self; +} + +- (void)dealloc { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter dealloc"; +} + +#pragma mark - Getter & Setter for audioCustomProcessingDelegate + +- (nullable id)audioCustomProcessingDelegate { + os_unfair_lock_lock(&_lock); + id delegate = _rawAudioCustomProcessingDelegate; + os_unfair_lock_unlock(&_lock); + return delegate; +} + +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { + os_unfair_lock_lock(&_lock); + if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate audioProcessingRelease]; + } + _rawAudioCustomProcessingDelegate = delegate; + if (_adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate + audioProcessingInitializeWithSampleRate:_adapter->sample_rate_hz_ + channels:_adapter->num_channels_]; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - Private + +- (std::unique_ptr)nativeAudioCustomProcessingModule { + return std::unique_ptr(_adapter); +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h new file mode 100644 index 0000000000..6a2fec9433 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -0,0 +1,52 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioBuffer); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) + +/** +* (Re-)initialize the audio processor. +* This method can be invoked multiple times. +*/ +- (void)audioProcessingInitializeWithSampleRate : (size_t)sampleRateHz channels +: (size_t)channels NS_SWIFT_NAME(audioProcessingInitialize(sampleRate:channels:)); + +/** + * Process (read or write) the audio buffer. + * RTCAudioBuffer is a simple wrapper for webrtc::AudioBuffer and the valid scope is only inside + * this method. Do not retain it. + */ +- (void)audioProcessingProcess:(RTCAudioBuffer *)audioBuffer + NS_SWIFT_NAME(audioProcessingProcess(audioBuffer:)); + +// TOOD: +// virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); + +/** + * Suggests releasing resources allocated by the audio processor. + */ +- (void)audioProcessingRelease; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h new file mode 100644 index 0000000000..ed565ee0aa --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig)() + +@property(nonatomic, readonly) webrtc::AudioProcessing::Config nativeAudioProcessingConfig; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.h b/sdk/objc/components/audio/RTCAudioProcessingConfig.h new file mode 100644 index 0000000000..3c7dce45f1 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig) : NSObject + +@property(nonatomic, assign) BOOL echoCancellerEnabled; +@property(nonatomic, assign) BOOL echoCancellerMobileMode; + + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm new file mode 100644 index 0000000000..ca40f16e17 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -0,0 +1,51 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCAudioProcessingConfig) { + webrtc::AudioProcessing::Config _config; +} + +// config.echo_canceller.enabled + +- (BOOL)echoCancellerEnabled { + return _config.echo_canceller.enabled; +} + +- (void)setEchoCancellerEnabled:(BOOL)value { + _config.echo_canceller.enabled = value; +} + +// config.echo_canceller.mobile_mode + +- (BOOL)echoCancellerMobileMode { + return _config.echo_canceller.mobile_mode; +} + +- (void)setEchoCancellerMobileMode:(BOOL)value { + _config.echo_canceller.mobile_mode = value; +} + +#pragma mark - Private + +- (webrtc::AudioProcessing::Config)nativeAudioProcessingConfig { + return _config; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioProcessingModule.h b/sdk/objc/components/audio/RTCAudioProcessingModule.h new file mode 100644 index 0000000000..7bf0402427 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingModule.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioProcessingModule) + +- (void)applyConfig: (RTCAudioProcessingConfig *)config; + +// TODO: Implement... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h new file mode 100644 index 0000000000..4f8551e372 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule)() + +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioProcessingModule; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h new file mode 100644 index 0000000000..917d584d48 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -0,0 +1,47 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCAudioProcessingModule.h" +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); +@protocol RTC_OBJC_TYPE +(RTCAudioCustomProcessingDelegate); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject + +- (instancetype)initWithConfig: (nullable RTCAudioProcessingConfig *)config + capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate + NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; + +- (void)applyConfig:(RTCAudioProcessingConfig *)config; + +// Dynamically update delegates at runtime + +@property(nonatomic, weak, nullable) id + capturePostProcessingDelegate; +@property(nonatomic, weak, nullable) id + renderPreProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm new file mode 100644 index 0000000000..3875dec533 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -0,0 +1,96 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioProcessingConfig+Private.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { + rtc::scoped_refptr _nativeAudioProcessingModule; + // Custom processing adapters... + RTCAudioCustomProcessingAdapter *_capturePostProcessingAdapter; + RTCAudioCustomProcessingAdapter *_renderPreProcessingAdapter; +} + +- (instancetype)init { + return [self initWithConfig:nil + capturePostProcessingDelegate:nil + renderPreProcessingDelegate:nil]; +} + +- (instancetype)initWithConfig:(nullable RTCAudioProcessingConfig *)config + capturePostProcessingDelegate: + (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate:(nullable id) + renderPreProcessingDelegate { + if (self = [super init]) { + webrtc::AudioProcessingBuilder builder = webrtc::AudioProcessingBuilder(); + + // TODO: Custom Config... + + if (config != nil) { + builder.SetConfig(config.nativeAudioProcessingConfig); + } + + _capturePostProcessingAdapter = + [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:capturePostProcessingDelegate]; + builder.SetCapturePostProcessing( + _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); + + _renderPreProcessingAdapter = + [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:renderPreProcessingDelegate]; + builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); + + _nativeAudioProcessingModule = builder.Create(); + } + return self; +} + +#pragma mark - Getter & Setters for delegates + +- (nullable id)capturePostProcessingDelegate { + return _capturePostProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setCapturePostProcessingDelegate: + (nullable id)delegate { + _capturePostProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +- (nullable id)renderPreProcessingDelegate { + return _renderPreProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setRenderPreProcessingDelegate: + (nullable id)delegate { + _renderPreProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +#pragma mark - RTCAudioProcessingModule protocol + +- (void)applyConfig:(RTCAudioProcessingConfig *)config { + _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); +} + +#pragma mark - Private + +- (rtc::scoped_refptr)nativeAudioProcessingModule { + return _nativeAudioProcessingModule; +} + +@end From a59e8571ee8b708b89d9ce66d1322d12bf33d815 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9o=20Monnom?= Date: Tue, 29 Aug 2023 10:13:56 -0700 Subject: [PATCH 16/42] more yuv wrappers (#87) --- api/video/yuv_helper.cc | 232 +++++++++++++++++++++++++++++++++++++--- api/video/yuv_helper.h | 173 +++++++++++++++++++++++++++--- 2 files changed, 375 insertions(+), 30 deletions(-) diff --git a/api/video/yuv_helper.cc b/api/video/yuv_helper.cc index 3ed5dd4104..eab9126183 100644 --- a/api/video/yuv_helper.cc +++ b/api/video/yuv_helper.cc @@ -17,6 +17,9 @@ #include "yuv_helper.h" #include "libyuv/convert.h" +#include "libyuv/convert_argb.h" +#include "libyuv/convert_from_argb.h" +#include "libyuv/row.h" #include "third_party/libyuv/include/libyuv.h" #include "video_rotation.h" @@ -60,23 +63,6 @@ int I420ToNV12(const uint8_t* src_y, dst_stride_uv, width, height); } -int NV12ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height) { - return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, - dst_stride_y, dst_u, dst_stride_u, dst_v, - dst_stride_v, width, height); -} - int I420ToARGB(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, @@ -215,4 +201,216 @@ int ARGBToRGB24(const uint8_t* src_argb, dst_stride_rgb24, width, height); } +int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); +} + +int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I444ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I422ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I010ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::NV12ToARGB(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, width, height); +} + +int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::NV12ToABGR(src_y, src_stride_y, src_uv, src_stride_uv, + dst_abgr, dst_stride_abgr, width, height); +} + +int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I422ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I422ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I010ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I010ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ABGRToNV12(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ARGBToNV12(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + } // namespace webrtc diff --git a/api/video/yuv_helper.h b/api/video/yuv_helper.h index 0ab007fc57..5e86fb378b 100644 --- a/api/video/yuv_helper.h +++ b/api/video/yuv_helper.h @@ -51,19 +51,6 @@ RTC_EXPORT int I420ToNV12(const uint8_t* src_y, int width, int height); -RTC_EXPORT int NV12ToI420(const uint8_t* src_y, - int src_stride_y, - const uint8_t* src_uv, - int src_stride_uv, - uint8_t* dst_y, - int dst_stride_y, - uint8_t* dst_u, - int dst_stride_u, - uint8_t* dst_v, - int dst_stride_v, - int width, - int height); - RTC_EXPORT int I420ToARGB(const uint8_t* src_y, int src_stride_y, const uint8_t* src_u, @@ -166,4 +153,164 @@ RTC_EXPORT int ARGBToRGB24(const uint8_t* src_argb, int width, int height); +RTC_EXPORT int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + } // namespace webrtc From d0c4e2b935792bf4e14734683e8e1931d76a1335 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Thu, 31 Aug 2023 15:21:47 +0800 Subject: [PATCH 17/42] Improve e2ee, add setSharedKey to KeyProvider. (#88) * Improve e2ee, add setSharedKey to KeyProvider. * update. * reset has_valid_key after RatchetKey. * update. * clone key_handler from share_key for each participant. * add RatchetSharedKey and ExportSharedKey. * make KeyProvider::SetSharedKey only valid when KeyProviderOptions::shared_key == true. * remove unused enum. --- api/crypto/frame_crypto_transformer.cc | 67 +++--- api/crypto/frame_crypto_transformer.h | 220 +++++++++++++----- .../org/webrtc/FrameCryptorKeyProvider.java | 22 +- .../src/jni/pc/frame_cryptor_key_provider.cc | 35 +++ .../RTCFrameCryptorKeyProvider.h | 6 + .../RTCFrameCryptorKeyProvider.mm | 16 ++ 6 files changed, 273 insertions(+), 93 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index ed5a17c957..9d8ee44c9e 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -354,7 +354,10 @@ void FrameCryptorTransformer::encryptFrame( return; } - auto key_handler = key_provider_->GetKey(participant_id_); + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + if (key_handler == nullptr || key_handler->GetKeySet(key_index_) == nullptr) { RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() no keys, or " "key_index[" @@ -462,30 +465,31 @@ void FrameCryptorTransformer::decryptFrame( sink_callback->OnTransformedFrame(std::move(frame)); return; } - + auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; if (uncrypted_magic_bytes.size() > 0 && date_in.size() >= uncrypted_magic_bytes.size() + 1) { - auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size() + 1), - uncrypted_magic_bytes.size()); + auto tmp = + date_in.subview(date_in.size() - (uncrypted_magic_bytes.size() + 1), + uncrypted_magic_bytes.size()); if (uncrypted_magic_bytes == std::vector(tmp.begin(), tmp.end())) { - RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); auto frame_type = date_in.subview(date_in.size() - 1, 1); RTC_CHECK_EQ(frame_type.size(), 1); - RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( type " - << frame_type[0] << ", tmp " - << to_hex(tmp.data(), tmp.size()) << ", magic bytes " - << to_hex(uncrypted_magic_bytes.data(), - uncrypted_magic_bytes.size()) - << ")"; + RTC_LOG(LS_INFO) + << "FrameCryptorTransformer::uncrypted_magic_bytes( type " + << frame_type[0] << ", tmp " << to_hex(tmp.data(), tmp.size()) + << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), uncrypted_magic_bytes.size()) + << ")"; - // magic bytes detected, this is a non-encrypted frame, skip frame decryption. + // magic bytes detected, this is a non-encrypted frame, skip frame + // decryption. rtc::Buffer data_out; - data_out.AppendData( - date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size() - 1)); + data_out.AppendData(date_in.subview( + 0, date_in.size() - uncrypted_magic_bytes.size() - 1)); frame->SetData(data_out); sink_callback->OnTransformedFrame(std::move(frame)); return; @@ -518,7 +522,10 @@ void FrameCryptorTransformer::decryptFrame( return; } - auto key_handler = key_provider_->GetKey(participant_id_); + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + if (key_index >= KEYRING_SIZE || key_handler == nullptr || key_handler->GetKeySet(key_index) == nullptr) { RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " @@ -534,10 +541,10 @@ void FrameCryptorTransformer::decryptFrame( return; } - if(last_dec_error_ == kDecryptionFailed && !key_handler->have_valid_key) { - // if decryption failed and we have an invalid key, - // please try to decrypt with the next new key - return; + if (last_dec_error_ == kDecryptionFailed && !key_handler->HasValidKey()) { + // if decryption failed and we have an invalid key, + // please try to decrypt with the next new key + return; } auto key_set = key_handler->GetKeySet(key_index); @@ -565,25 +572,28 @@ void FrameCryptorTransformer::decryptFrame( RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::decryptFrame() failed"; std::shared_ptr ratcheted_key_set; auto currentKeyMaterial = key_set->material; - if (key_handler->options().ratchet_window_size > 0) { - while (ratchet_count < key_handler->options().ratchet_window_size) { + if (key_provider_->options().ratchet_window_size > 0) { + while (ratchet_count < key_provider_->options().ratchet_window_size) { ratchet_count++; RTC_LOG(LS_INFO) << "ratcheting key attempt " << ratchet_count << " of " - << key_handler->options().ratchet_window_size; + << key_provider_->options().ratchet_window_size; auto new_material = key_handler->RatchetKeyMaterial(currentKeyMaterial); - ratcheted_key_set = key_handler->DeriveKeys(new_material, key_handler->options().ratchet_salt, 128); + ratcheted_key_set = key_handler->DeriveKeys( + new_material, key_provider_->options().ratchet_salt, 128); if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, - ratcheted_key_set->encryption_key, iv, frame_header, - encrypted_payload, &buffer) == Success) { + ratcheted_key_set->encryption_key, iv, + frame_header, encrypted_payload, + &buffer) == Success) { RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " "ratcheted to key_index=" << static_cast(key_index); decryption_success = true; // success, so we set the new key key_handler->SetKeyFromMaterial(new_material, key_index); + key_handler->SetHasValidKey(true); if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { last_dec_error_ = FrameCryptionState::kKeyRatcheted; if (observer_) @@ -603,7 +613,7 @@ void FrameCryptorTransformer::decryptFrame( times, we come back to the initial key. */ if (!decryption_success || - ratchet_count >= key_handler->options().ratchet_window_size) { + ratchet_count >= key_provider_->options().ratchet_window_size) { key_handler->SetKeyFromMaterial(initialKeyMaterial, key_index); } } @@ -612,7 +622,7 @@ void FrameCryptorTransformer::decryptFrame( if (!decryption_success) { if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { last_dec_error_ = FrameCryptionState::kDecryptionFailed; - key_handler->have_valid_key = false; + key_handler->SetHasValidKey(false); if (observer_) observer_->OnFrameCryptionStateChanged(participant_id_, last_dec_error_); @@ -629,7 +639,8 @@ void FrameCryptorTransformer::decryptFrame( RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() ivLength=" << static_cast(ivLength) << " unencrypted_bytes=" << static_cast(unencrypted_bytes) - << " key_index=" << static_cast(key_index_) << " aesKey=" + << " key_index=" << static_cast(key_index_) + << " aesKey=" << to_hex(key_set->encryption_key.data(), key_set->encryption_key.size()) << " iv=" << to_hex(iv.data(), iv.size()); diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index 7f98deb11d..bd245729cf 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -34,6 +34,8 @@ namespace webrtc { const size_t KEYRING_SIZE = 16; +class ParticipantKeyHandler; + struct KeyProviderOptions { bool shared_key; std::vector ratchet_salt; @@ -47,8 +49,38 @@ struct KeyProviderOptions { ratchet_window_size(copy.ratchet_window_size) {} }; +class KeyProvider : public rtc::RefCountInterface { + public: + + virtual bool SetSharedKey(int key_index, std::vector key) = 0; + + virtual const std::shared_ptr GetSharedKey(const std::string participant_id) = 0; + + virtual const std::vector RatchetSharedKey(int key_index) = 0; + + virtual const std::vector ExportSharedKey(int key_index) const = 0; + + virtual bool SetKey(const std::string participant_id, + int key_index, + std::vector key) = 0; + + virtual const std::shared_ptr GetKey( + const std::string participant_id) const = 0; + + virtual const std::vector RatchetKey( + const std::string participant_id, + int key_index) = 0; + + virtual const std::vector ExportKey(const std::string participant_id, + int key_index) const = 0; + + virtual KeyProviderOptions& options() = 0; + + protected: + virtual ~KeyProvider() {} +}; + class ParticipantKeyHandler { - friend class FrameCryptorTransformer; public: struct KeySet { std::vector material; @@ -56,45 +88,58 @@ class ParticipantKeyHandler { KeySet(std::vector material, std::vector encryptionKey) : material(material), encryption_key(encryptionKey) {} }; - public: - ParticipantKeyHandler(KeyProviderOptions options) : options_(options) { + ParticipantKeyHandler(KeyProvider* key_provider) : key_provider_(key_provider) { crypto_key_ring_.resize(KEYRING_SIZE); } virtual ~ParticipantKeyHandler() = default; + std::shared_ptr Clone() { + auto clone = std::make_shared(key_provider_); + clone->crypto_key_ring_ = crypto_key_ring_; + clone->current_key_index_ = current_key_index_; + clone->has_valid_key_ = has_valid_key_; + return clone; + } + virtual std::vector RatchetKey(int key_index) { - auto current_material = GetKeySet(key_index)->material; + auto key_set = GetKeySet(key_index); + if (!key_set) { + return std::vector(); + } + auto current_material = key_set->material; std::vector new_material; - if (DerivePBKDF2KeyFromRawKey(current_material, options_.ratchet_salt, 256, + if (DerivePBKDF2KeyFromRawKey(current_material, key_provider_->options().ratchet_salt, 256, &new_material) != 0) { return std::vector(); } SetKeyFromMaterial(new_material, key_index != -1 ? key_index : current_key_index_); + SetHasValidKey(true); return new_material; } virtual std::shared_ptr GetKeySet(int key_index) { + webrtc::MutexLock lock(&mutex_); return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; } virtual void SetKey(std::vector password, int key_index) { SetKeyFromMaterial(password, key_index); - have_valid_key = true; + SetHasValidKey(true); } - virtual void SetKeyFromMaterial(std::vector password, int key_index) { - if (key_index >= 0) { - current_key_index_ = key_index % crypto_key_ring_.size(); + std::vector RatchetKeyMaterial( + std::vector current_material) { + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); } - crypto_key_ring_[current_key_index_] = - DeriveKeys(password, options_.ratchet_salt, 128); + return new_material; } - virtual KeyProviderOptions& options() { return options_; } - std::shared_ptr DeriveKeys(std::vector password, std::vector ratchet_salt, unsigned int optional_length_bits) { @@ -106,52 +151,103 @@ class ParticipantKeyHandler { return nullptr; } - std::vector RatchetKeyMaterial( - std::vector current_material) { - std::vector new_material; - if (DerivePBKDF2KeyFromRawKey(current_material, options_.ratchet_salt, 256, - &new_material) != 0) { - return std::vector(); + bool HasValidKey() { + webrtc::MutexLock lock(&mutex_); + return has_valid_key_; + } + + void SetHasValidKey(bool has_valid_key) { + webrtc::MutexLock lock(&mutex_); + has_valid_key_ = has_valid_key; + } + + void SetKeyFromMaterial(std::vector password, int key_index) { + webrtc::MutexLock lock(&mutex_); + if (key_index >= 0) { + current_key_index_ = key_index % crypto_key_ring_.size(); } - return new_material; + crypto_key_ring_[current_key_index_] = + DeriveKeys(password, key_provider_->options().ratchet_salt, 128); } - protected: - bool have_valid_key = false; + private: + bool has_valid_key_ = false; + mutable webrtc::Mutex mutex_; int current_key_index_ = 0; - KeyProviderOptions options_; + KeyProvider* key_provider_; std::vector> crypto_key_ring_; }; -class KeyProvider : public rtc::RefCountInterface { - public: - enum { kRawKeySize = 32 }; - +class DefaultKeyProviderImpl : public KeyProvider { public: - virtual const std::shared_ptr GetKey( - const std::string participant_id) const = 0; - - virtual bool SetKey(const std::string participant_id, - int index, - std::vector key) = 0; - - virtual const std::vector RatchetKey( - const std::string participant_id, - int key_index) = 0; + DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} + ~DefaultKeyProviderImpl() override = default; - virtual const std::vector ExportKey(const std::string participant_id, - int key_index) const = 0; + /// Set the shared key. + bool SetSharedKey(int key_index, std::vector key) override { + webrtc::MutexLock lock(&mutex_); + if(options_.shared_key) { + if (keys_.find("shared") == keys_.end()) { + keys_["shared"] = std::make_shared(this); + } + + auto key_handler = keys_["shared"]; + key_handler->SetKey(key, key_index); + + for(auto& key_pair : keys_) { + if(key_pair.first != "shared") { + key_pair.second->SetKey(key, key_index); + } + } + return true; + } + return false; + } - virtual KeyProviderOptions& options() = 0; + const std::vector RatchetSharedKey(int key_index) override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if(it == keys_.end()) { + return std::vector(); + } + auto new_key = it->second->RatchetKey(key_index); + if(options_.shared_key) { + for(auto& key_pair : keys_) { + if(key_pair.first != "shared") { + key_pair.second->SetKey(new_key, key_index); + } + } + } + return new_key; + } - protected: - virtual ~KeyProvider() {} -}; + const std::vector ExportSharedKey(int key_index) const override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if(it == keys_.end()) { + return std::vector(); + } + auto key_set = it->second->GetKeySet(key_index); + if(key_set) { + return key_set->material; + } + return std::vector(); + } -class DefaultKeyProviderImpl : public KeyProvider { - public: - DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} - ~DefaultKeyProviderImpl() override = default; + const std::shared_ptr GetSharedKey(const std::string participant_id) override { + webrtc::MutexLock lock(&mutex_); + if(options_.shared_key && keys_.find("shared") != keys_.end()) { + auto shared_key_handler = keys_["shared"]; + if (keys_.find(participant_id) != keys_.end()) { + return keys_[participant_id]; + } else { + auto key_handler_clone = shared_key_handler->Clone(); + keys_[participant_id] = key_handler_clone; + return key_handler_clone; + } + } + return nullptr; + } /// Set the key at the given index. bool SetKey(const std::string participant_id, @@ -160,7 +256,7 @@ class DefaultKeyProviderImpl : public KeyProvider { webrtc::MutexLock lock(&mutex_); if (keys_.find(participant_id) == keys_.end()) { - keys_[participant_id] = std::make_shared(options_); + keys_[participant_id] = std::make_shared(this); } auto key_handler = keys_[participant_id]; @@ -171,6 +267,7 @@ class DefaultKeyProviderImpl : public KeyProvider { const std::shared_ptr GetKey( const std::string participant_id) const override { webrtc::MutexLock lock(&mutex_); + if (keys_.find(participant_id) == keys_.end()) { return nullptr; } @@ -180,28 +277,23 @@ class DefaultKeyProviderImpl : public KeyProvider { const std::vector RatchetKey(const std::string participant_id, int key_index) override { - webrtc::MutexLock lock(&mutex_); - if (keys_.find(participant_id) == keys_.end()) { - return std::vector(); + auto key_handler = GetKey(participant_id); + if (key_handler) { + return key_handler->RatchetKey(key_index); } - - return keys_[participant_id]->RatchetKey(key_index); + return std::vector(); } const std::vector ExportKey(const std::string participant_id, int key_index) const override { - webrtc::MutexLock lock(&mutex_); - if (keys_.find(participant_id) == keys_.end()) { - return std::vector(); + auto key_handler = GetKey(participant_id); + if (key_handler) { + auto key_set = key_handler->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } } - - auto key_set = GetKey(participant_id); - - if (!key_set) { - return std::vector(); - } - - return key_set->GetKeySet(key_index)->material; + return std::vector(); } KeyProviderOptions& options() override { return options_; } diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java index 1c89eac55b..af2e3ff651 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -30,6 +30,21 @@ public long getNativeKeyProvider() { return nativeKeyProvider; } + public boolean setSharedKey(int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetSharedKey(nativeKeyProvider,index, key); + } + + public byte[] ratchetSharedKey(int index) { + checkKeyProviderExists(); + return nativeRatchetSharedKey(nativeKeyProvider, index); + } + + public byte[] exportSharedKey(int index) { + checkKeyProviderExists(); + return nativeExportSharedKey(nativeKeyProvider, index); + } + public boolean setKey(String participantId, int index, byte[] key) { checkKeyProviderExists(); return nativeSetKey(nativeKeyProvider, participantId, index, key); @@ -56,7 +71,12 @@ private void checkKeyProviderExists() { throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); } } - + private static native boolean nativeSetSharedKey( + long keyProviderPointer, int index, byte[] key); + private static native byte[] nativeRatchetSharedKey( + long keyProviderPointer, int index); + private static native byte[] nativeExportSharedKey( + long keyProviderPointer, int index); private static native boolean nativeSetKey( long keyProviderPointer, String participantId, int index, byte[] key); private static native byte[] nativeRatchetKey( diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc index 2732693c0f..2665bdc9ab 100644 --- a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -33,6 +33,41 @@ ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( env, jlongFromPointer(key_provider.release())); } +static jboolean JNI_FrameCryptorKeyProvider_SetSharedKey( + JNIEnv* jni, + jlong j_key_provider, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + return reinterpret_cast(j_key_provider) + ->SetSharedKey(j_index,std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetSharedKey(j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportSharedKey(j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + static jboolean JNI_FrameCryptorKeyProvider_SetKey( JNIEnv* jni, jlong j_key_provider, diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h index 276b2e730c..0122a31444 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -23,6 +23,12 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) : NSObject +- (void)setSharedKey:(NSData *)key withIndex:(int)index; + +- (NSData *)ratchetSharedKey:(int)index; + +- (NSData *)exportSharedKey:(int)index; + - (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId; - (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index; diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm index c27e18975b..218a3fafb0 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -56,6 +56,22 @@ - (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)par std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); } +- (void)setSharedKey:(NSData *)key withIndex:(int)index { + _nativeKeyProvider->SetSharedKey( + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (NSData *)ratchetSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + - (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index { std::vector nativeKey = _nativeKeyProvider->RatchetKey([participantId stdString], index); return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; From f12f8d9637c0ceaac51db2fcb0919422fb40ddaa Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 1 Sep 2023 23:31:16 +0900 Subject: [PATCH 18/42] Fix memory leak when creating audio CMSampleBuffer #86 --- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 3d3af34893..87deaf8aa8 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -151,6 +151,9 @@ void OnData(const void *audio_data, 0, NULL, &buffer); + // format is no longer required + CFRelease(format); + if (status != 0) { NSLog(@"RTCAudioTrack: Failed to allocate sample buffer"); return; From 28e2021de9ddab1fa9f5c55e0fea080a271b7cac Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Wed, 13 Sep 2023 12:25:06 +0900 Subject: [PATCH 19/42] Fix camera rotation (#92) Use UIInterfaceOrientation instead of UIDeviceOrientation for RTCVideoFrame's rotation --- .../capturer/RTCCameraVideoCapturer.m | 55 ++++++++++--------- 1 file changed, 29 insertions(+), 26 deletions(-) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index f7ae04e814..018c1d41cf 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -42,7 +42,7 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; #if TARGET_OS_IPHONE - UIDeviceOrientation _orientation; + UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif } @@ -75,7 +75,7 @@ - (instancetype)initWithDelegate:(__weak id_generatingOrientationNotifications = YES; } + // Must be called on main + [self updateOrientation]; }); #endif @@ -183,7 +185,6 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device return; } [self reconfigureCaptureSessionInput]; - [self updateOrientation]; [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; [self.captureSession startRunning]; @@ -226,10 +227,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #if TARGET_OS_IPHONE - (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; + [self updateOrientation]; } #endif @@ -265,22 +263,20 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position; } switch (_orientation) { - case UIDeviceOrientationPortrait: + case UIInterfaceOrientationPortrait: _rotation = RTCVideoRotation_90; break; - case UIDeviceOrientationPortraitUpsideDown: + case UIInterfaceOrientationPortraitUpsideDown: _rotation = RTCVideoRotation_270; break; - case UIDeviceOrientationLandscapeLeft: - _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; - break; - case UIDeviceOrientationLandscapeRight: + case UIInterfaceOrientationLandscapeLeft: _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; break; - case UIDeviceOrientationFaceUp: - case UIDeviceOrientationFaceDown: - case UIDeviceOrientationUnknown: - // Ignore. + case UIInterfaceOrientationLandscapeRight: + _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; + break; + case UIInterfaceOrientationUnknown: + _rotation = RTCVideoRotation_0; break; } #else @@ -495,8 +491,8 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; - if(![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + if (![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); } } @catch (NSException *exception) { RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); @@ -508,8 +504,8 @@ - (void)reconfigureCaptureSessionInput { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; - AVCaptureDeviceInput *input = - [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice + error:&error]; if (!input) { RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); return; @@ -526,12 +522,19 @@ - (void)reconfigureCaptureSessionInput { [_captureSession commitConfiguration]; } -- (void)updateOrientation { - NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateOrientation must be called on the capture queue."); #if TARGET_OS_IPHONE - _orientation = [UIDevice currentDevice].orientation; -#endif +- (void)updateOrientation { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], + @"statusBarOrientation must be called on the main queue."); + // statusBarOrientation must be called on the main queue + UIInterfaceOrientation newOrientation = [UIApplication sharedApplication].statusBarOrientation; + + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // Must be called on the capture queue + self->_orientation = newOrientation; + }]; } +#endif @end From 98fe34e1f43515dc348dc3f441953a86f6aaa3a5 Mon Sep 17 00:00:00 2001 From: davidliu Date: Wed, 13 Sep 2023 16:41:03 +0900 Subject: [PATCH 20/42] Expose audio sample buffers for Android (#89) * Initial draft * Working impl * doc and cleanup * doc update --- sdk/android/BUILD.gn | 4 ++ sdk/android/api/org/webrtc/AudioTrack.java | 48 +++++++++++++++++++ .../api/org/webrtc/AudioTrackSink.java | 27 +++++++++++ .../webrtc/audio/JavaAudioDeviceModule.java | 16 ++++++- .../org/webrtc/audio/WebRtcAudioTrack.java | 21 ++++++-- sdk/android/src/jni/pc/audio_sink.cc | 39 +++++++++++++++ sdk/android/src/jni/pc/audio_sink.h | 41 ++++++++++++++++ sdk/android/src/jni/pc/audio_track.cc | 26 ++++++++++ 8 files changed, 218 insertions(+), 4 deletions(-) create mode 100644 sdk/android/api/org/webrtc/AudioTrackSink.java create mode 100644 sdk/android/src/jni/pc/audio_sink.cc create mode 100644 sdk/android/src/jni/pc/audio_sink.h diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 596dbc9693..494b43f6d3 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -255,6 +255,7 @@ if (is_android) { "api/org/webrtc/AudioProcessingFactory.java", "api/org/webrtc/AudioSource.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", @@ -716,6 +717,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/add_ice_candidate_observer.cc", "src/jni/pc/add_ice_candidate_observer.h", "src/jni/pc/android_network_monitor.h", + "src/jni/pc/audio_sink.cc", + "src/jni/pc/audio_sink.h", "src/jni/pc/audio_track.cc", "src/jni/pc/call_session_file_rotating_log_sink.cc", "src/jni/pc/crypto_options.cc", @@ -1405,6 +1408,7 @@ if (current_os == "linux" || is_android) { sources = [ "api/org/webrtc/AddIceObserver.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", diff --git a/sdk/android/api/org/webrtc/AudioTrack.java b/sdk/android/api/org/webrtc/AudioTrack.java index ca745db634..b30e46cebc 100644 --- a/sdk/android/api/org/webrtc/AudioTrack.java +++ b/sdk/android/api/org/webrtc/AudioTrack.java @@ -10,8 +10,12 @@ package org.webrtc; +import java.util.IdentityHashMap; + /** Java wrapper for a C++ AudioTrackInterface */ public class AudioTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + public AudioTrack(long nativeTrack) { super(nativeTrack); } @@ -23,10 +27,54 @@ public void setVolume(double volume) { nativeSetVolume(getNativeAudioTrack(), volume); } + /** + * Adds an AudioTrackSink to the track. This callback is only + * called for remote audio tracks. + * + * Repeated addSink calls will not add the sink multiple times. + */ + public void addSink(AudioTrackSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The AudioTrackSink is not allowed to be null"); + } + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes an AudioTrackSink from the track. + * + * If the AudioTrackSink was not attached to the track, this is a no-op. + */ + public void removeSink(AudioTrackSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + /** Returns a pointer to webrtc::AudioTrackInterface. */ long getNativeAudioTrack() { return getNativeMediaStreamTrack(); } private static native void nativeSetVolume(long track, double volume); + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(AudioTrackSink sink); + private static native void nativeFreeSink(long sink); } diff --git a/sdk/android/api/org/webrtc/AudioTrackSink.java b/sdk/android/api/org/webrtc/AudioTrackSink.java new file mode 100644 index 0000000000..eca390f82c --- /dev/null +++ b/sdk/android/api/org/webrtc/AudioTrackSink.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * Java version of rtc::AudioTrackSinkInterface. + */ +public interface AudioTrackSink { + /** + * Implementations should copy the audio data into a local copy if they wish + * to use the data after this function returns. + */ + @CalledByNative + void onData(ByteBuffer audioData, int bitsPerSample, int sampleRate, + int numberOfChannels, int numberOfFrames, + long absoluteCaptureTimestampMs); +} diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index d3d57602a8..fc3dd6836c 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -42,6 +42,7 @@ public static class Builder { private AudioTrackErrorCallback audioTrackErrorCallback; private AudioRecordErrorCallback audioRecordErrorCallback; private SamplesReadyCallback samplesReadyCallback; + private PlaybackSamplesReadyCallback playbackSamplesReadyCallback; private AudioTrackStateCallback audioTrackStateCallback; private AudioRecordStateCallback audioRecordStateCallback; private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); @@ -140,6 +141,14 @@ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback return this; } + /** + * Set a callback to listen to the audio output passed to the AudioTrack. + */ + public Builder setPlaybackSamplesReadyCallback(PlaybackSamplesReadyCallback playbackSamplesReadyCallback) { + this.playbackSamplesReadyCallback = playbackSamplesReadyCallback; + return this; + } + /** * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. */ @@ -258,7 +267,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() { samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, - audioTrackStateCallback, useLowLatency, enableVolumeLogger); + audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } @@ -325,6 +334,11 @@ public static interface SamplesReadyCallback { void onWebRtcAudioRecordSamplesReady(AudioSamples samples); } + /** Called when new audio samples are ready. This should only be set for debug purposes */ + public static interface PlaybackSamplesReadyCallback { + void onWebRtcAudioTrackSamplesReady(AudioSamples samples); + } + /* AudioTrack */ // Audio playout/track error handler functions. public enum AudioTrackStartErrorCode { diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 2b34e34013..2f745cc62e 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -20,12 +20,14 @@ import android.os.Process; import androidx.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.Arrays; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.PlaybackSamplesReadyCallback; import org.webrtc.audio.LowLatencyAudioBufferManager; class WebRtcAudioTrack { @@ -76,6 +78,7 @@ class WebRtcAudioTrack { private final @Nullable AudioTrackErrorCallback errorCallback; private final @Nullable AudioTrackStateCallback stateCallback; + private final @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback; /** * Audio thread which keeps calling AudioTrack.write() to stream audio. @@ -129,6 +132,17 @@ public void run() { reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); } } + + if (audioSamplesReadyCallback != null && keepAlive) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + sizeInBytes + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioTrackSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), + audioTrack.getChannelCount(), audioTrack.getSampleRate(), data)); + } + if (useLowLatency) { bufferManager.maybeAdjustBufferSize(audioTrack); } @@ -154,13 +168,13 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + null /* stateCallback */, null /* audioSamplesReadyCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); } WebRtcAudioTrack(Context context, AudioManager audioManager, @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, - boolean enableVolumeLogger) { + @Nullable AudioTrackStateCallback stateCallback, @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback, + boolean useLowLatency, boolean enableVolumeLogger) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -168,6 +182,7 @@ public void stopThread() { this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; this.useLowLatency = useLowLatency; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } diff --git a/sdk/android/src/jni/pc/audio_sink.cc b/sdk/android/src/jni/pc/audio_sink.cc new file mode 100644 index 0000000000..5bd88c75f6 --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/audio_sink.h" + +#include "sdk/android/generated_peerconnection_jni/AudioTrackSink_jni.h" + +namespace webrtc { +namespace jni { + +AudioTrackSinkWrapper::AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink) + : j_sink_(jni, j_sink) {} + +AudioTrackSinkWrapper::~AudioTrackSinkWrapper() {} + +void AudioTrackSinkWrapper::OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + int length = (bits_per_sample / 8) * number_of_channels * number_of_frames; + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(jni, (void *) audio_data, length); + Java_AudioTrackSink_onData(jni, j_sink_, + audio_buffer, bits_per_sample, sample_rate, (int) number_of_channels, (int) number_of_frames, (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.h b/sdk/android/src/jni/pc/audio_sink.h new file mode 100644 index 0000000000..809f460e0c --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.h @@ -0,0 +1,41 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ + +#include + +#include "api/media_stream_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AudioTrackSinkWrapper : public webrtc::AudioTrackSinkInterface { + public: + AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink); + ~AudioTrackSinkWrapper() override; + + private: + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override; + + const ScopedJavaGlobalRef j_sink_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ diff --git a/sdk/android/src/jni/pc/audio_track.cc b/sdk/android/src/jni/pc/audio_track.cc index 36ed43f1d4..df2d605893 100644 --- a/sdk/android/src/jni/pc/audio_track.cc +++ b/sdk/android/src/jni/pc/audio_track.cc @@ -9,6 +9,8 @@ */ #include "api/media_stream_interface.h" +#include "sdk/android/src/jni/pc/audio_sink.h" + #include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h" namespace webrtc { @@ -20,5 +22,29 @@ static void JNI_AudioTrack_SetVolume(JNIEnv*, jlong j_p, jdouble volume) { source->SetVolume(volume); } +static void JNI_AudioTrack_AddSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->AddSink(reinterpret_cast(j_native_sink)); +} + +static void JNI_AudioTrack_RemoveSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->RemoveSink(reinterpret_cast(j_native_sink)); +} + +static jlong JNI_AudioTrack_WrapSink(JNIEnv* jni, + const JavaParamRef& sink) { + return jlongFromPointer(new AudioTrackSinkWrapper(jni, sink)); +} + +static void JNI_AudioTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { + delete reinterpret_cast(j_native_sink); +} + + } // namespace jni } // namespace webrtc From 87977caac70dfc3ff5fc0e94c8ab76e20940767c Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sat, 9 Sep 2023 08:56:07 +0800 Subject: [PATCH 21/42] add failure tolerance for framecryptor. (#91) * add failure tolerance for framecryptor. * add failureTolerance for android/objc. * fix: make H264's unencrypted_bytes consistent with js-sdk. * add SetSifTrailer. --- api/crypto/frame_crypto_transformer.cc | 12 +++---- api/crypto/frame_crypto_transformer.h | 35 +++++++++++++++---- .../api/org/webrtc/FrameCryptorFactory.java | 6 ++-- .../org/webrtc/FrameCryptorKeyProvider.java | 7 ++++ sdk/android/src/jni/pc/frame_cryptor.cc | 4 ++- .../src/jni/pc/frame_cryptor_key_provider.cc | 9 +++++ .../RTCFrameCryptorKeyProvider.h | 8 +++++ .../RTCFrameCryptorKeyProvider.mm | 19 ++++++++++ 8 files changed, 84 insertions(+), 16 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 9d8ee44c9e..9ef356ce0f 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -136,7 +136,7 @@ uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, << "NonParameterSetNalu::payload_size: " << index.payload_size << ", nalu_type " << nalu_type << ", NaluIndex [" << idx++ << "] offset: " << index.payload_start_offset; - break; + return unencrypted_bytes; default: break; } @@ -219,7 +219,7 @@ int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, } if (!ok) { - RTC_LOG(LS_ERROR) << "Failed to perform AES-GCM operation."; + RTC_LOG(LS_WARNING) << "Failed to perform AES-GCM operation."; return OperationError; } @@ -510,7 +510,7 @@ void FrameCryptorTransformer::decryptFrame( uint8_t key_index = frame_trailer[1]; if (ivLength != getIvSize()) { - RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::decryptFrame() ivLength[" + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() ivLength[" << static_cast(ivLength) << "] != getIvSize()[" << static_cast(getIvSize()) << "]"; if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { @@ -569,7 +569,7 @@ void FrameCryptorTransformer::decryptFrame( encrypted_payload, &buffer) == Success) { decryption_success = true; } else { - RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::decryptFrame() failed"; + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() failed"; std::shared_ptr ratcheted_key_set; auto currentKeyMaterial = key_set->material; if (key_provider_->options().ratchet_window_size > 0) { @@ -593,7 +593,7 @@ void FrameCryptorTransformer::decryptFrame( decryption_success = true; // success, so we set the new key key_handler->SetKeyFromMaterial(new_material, key_index); - key_handler->SetHasValidKey(true); + key_handler->SetHasValidKey(); if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { last_dec_error_ = FrameCryptionState::kKeyRatcheted; if (observer_) @@ -622,7 +622,7 @@ void FrameCryptorTransformer::decryptFrame( if (!decryption_success) { if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { last_dec_error_ = FrameCryptionState::kDecryptionFailed; - key_handler->SetHasValidKey(false); + key_handler->DecryptionFailure(); if (observer_) observer_->OnFrameCryptionStateChanged(participant_id_, last_dec_error_); diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index bd245729cf..c3042b2021 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -41,12 +41,14 @@ struct KeyProviderOptions { std::vector ratchet_salt; std::vector uncrypted_magic_bytes; int ratchet_window_size; - KeyProviderOptions() : shared_key(false), ratchet_window_size(0) {} + int failure_tolerance; + KeyProviderOptions() : shared_key(false), ratchet_window_size(0), failure_tolerance(-1) {} KeyProviderOptions(KeyProviderOptions& copy) : shared_key(copy.shared_key), ratchet_salt(copy.ratchet_salt), uncrypted_magic_bytes(copy.uncrypted_magic_bytes), - ratchet_window_size(copy.ratchet_window_size) {} + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance) {} }; class KeyProvider : public rtc::RefCountInterface { @@ -74,6 +76,8 @@ class KeyProvider : public rtc::RefCountInterface { virtual const std::vector ExportKey(const std::string participant_id, int key_index) const = 0; + virtual void SetSifTrailer(const std::vector trailer) = 0; + virtual KeyProviderOptions& options() = 0; protected: @@ -116,7 +120,7 @@ class ParticipantKeyHandler { } SetKeyFromMaterial(new_material, key_index != -1 ? key_index : current_key_index_); - SetHasValidKey(true); + SetHasValidKey(); return new_material; } @@ -127,7 +131,7 @@ class ParticipantKeyHandler { virtual void SetKey(std::vector password, int key_index) { SetKeyFromMaterial(password, key_index); - SetHasValidKey(true); + SetHasValidKey(); } std::vector RatchetKeyMaterial( @@ -156,9 +160,10 @@ class ParticipantKeyHandler { return has_valid_key_; } - void SetHasValidKey(bool has_valid_key) { + void SetHasValidKey() { webrtc::MutexLock lock(&mutex_); - has_valid_key_ = has_valid_key; + decryption_failure_count_ = 0; + has_valid_key_ = true; } void SetKeyFromMaterial(std::vector password, int key_index) { @@ -170,8 +175,21 @@ class ParticipantKeyHandler { DeriveKeys(password, key_provider_->options().ratchet_salt, 128); } + void DecryptionFailure() { + webrtc::MutexLock lock(&mutex_); + if (key_provider_->options().failure_tolerance < 0) { + return; + } + decryption_failure_count_ += 1; + + if (decryption_failure_count_ > key_provider_->options().failure_tolerance) { + has_valid_key_ = false; + } + } + private: bool has_valid_key_ = false; + int decryption_failure_count_ = 0; mutable webrtc::Mutex mutex_; int current_key_index_ = 0; KeyProvider* key_provider_; @@ -296,6 +314,11 @@ class DefaultKeyProviderImpl : public KeyProvider { return std::vector(); } + void SetSifTrailer(const std::vector trailer) override { + webrtc::MutexLock lock(&mutex_); + options_.uncrypted_magic_bytes = trailer; + } + KeyProviderOptions& options() override { return options_; } private: diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java index 74df6a5b29..b70f61cc6b 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorFactory.java +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -18,8 +18,8 @@ public class FrameCryptorFactory { public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( - boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes) { - return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes); + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance); } public static FrameCryptor createFrameCryptorForRtpSender(RtpSender rtpSender, @@ -40,5 +40,5 @@ private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver( long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( - boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes); + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance); } diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java index af2e3ff651..6ab0cdddf5 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -60,6 +60,11 @@ public byte[] exportKey(String participantId, int index) { return nativeExportKey(nativeKeyProvider, participantId, index); } + public void setSifTrailer(byte[] sifTrailer) { + checkKeyProviderExists(); + nativeSetSifTrailer(nativeKeyProvider, sifTrailer); + } + public void dispose() { checkKeyProviderExists(); JniCommon.nativeReleaseRef(nativeKeyProvider); @@ -83,4 +88,6 @@ private static native byte[] nativeRatchetKey( long keyProviderPointer, String participantId, int index); private static native byte[] nativeExportKey( long keyProviderPointer, String participantId, int index); + private static native void nativeSetSifTrailer( + long keyProviderPointer, byte[] sifTrailer); } \ No newline at end of file diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc index d02f0c62da..6c36ece9ee 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.cc +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -171,7 +171,8 @@ JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( jboolean j_shared, const base::android::JavaParamRef& j_ratchetSalt, jint j_ratchetWindowSize, - const base::android::JavaParamRef& j_uncryptedMagicBytes) { + const base::android::JavaParamRef& j_uncryptedMagicBytes, + jint j_failureTolerance) { auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); KeyProviderOptions options; options.ratchet_salt = @@ -182,6 +183,7 @@ JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( options.uncrypted_magic_bytes = std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); options.shared_key = j_shared; + options.failure_tolerance = j_failureTolerance; return NativeToJavaFrameCryptorKeyProvider( env, rtc::make_ref_counted(options)); } diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc index 2665bdc9ab..e41d16ed91 100644 --- a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -110,5 +110,14 @@ JNI_FrameCryptorKeyProvider_ExportKey( return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); } +static void JNI_FrameCryptorKeyProvider_SetSifTrailer( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& j_trailer) { + auto trailer = JavaToNativeByteArray(jni, j_trailer); + reinterpret_cast(j_key_provider) + ->SetSifTrailer(std::vector(trailer.begin(), trailer.end())); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h index 0122a31444..b10cf8c4b2 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -35,11 +35,19 @@ RTC_OBJC_EXPORT - (NSData *)exportKey:(NSString *)participantId withIndex:(int)index; +- (void)setSifTrailer:(NSData *)trailer; + - (instancetype)initWithRatchetSalt:(NSData *)salt ratchetWindowSize:(int)windowSize sharedKeyMode:(BOOL)sharedKey uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes; +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm index 218a3fafb0..1596b98016 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -34,12 +34,25 @@ - (instancetype)initWithRatchetSalt:(NSData *)salt ratchetWindowSize:(int)windowSize sharedKeyMode:(BOOL)sharedKey uncryptedMagicBytes:(NSData *)uncryptedMagicBytes { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance { if (self = [super init]) { webrtc::KeyProviderOptions options; options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, ((const uint8_t *)salt.bytes) + salt.length); options.ratchet_window_size = windowSize; options.shared_key = sharedKey; + options.failure_tolerance = failureTolerance; if(uncryptedMagicBytes != nil) { options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); @@ -82,4 +95,10 @@ - (NSData *)exportKey:(NSString *)participantId withIndex:(int)index { return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; } +- (void)setSifTrailer:(NSData *)trailer { + _nativeKeyProvider->SetSifTrailer( + std::vector((const uint8_t *)trailer.bytes, + ((const uint8_t *)trailer.bytes) + trailer.length)); +} + @end From 7159977e07d39e3b277797b65839b80b989e1f6d Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 8 Sep 2023 11:35:51 +0800 Subject: [PATCH 22/42] Add scalabilityMode support for AV1/VP9. (#90) * add scalabilityMode for AV1. * fix bug for scalability-mode. * add scalability-mode support for VP9. * wip: ScalabilityModes for android. * update. * wip. * wip. * wip. * wip. * done. * fix * update. --- api/video_codecs/video_encoder_factory.h | 7 +---- .../webrtc/HardwareVideoEncoderFactory.java | 4 +-- .../api/org/webrtc/LibaomAv1Encoder.java | 7 +++++ .../api/org/webrtc/LibvpxVp9Encoder.java | 7 +++++ sdk/android/api/org/webrtc/RtpParameters.java | 13 ++++++++-- .../api/org/webrtc/VideoCodecInfo.java | 13 +++++++++- ...ndroidVideoDecoderInstrumentationTest.java | 2 +- .../DefaultVideoEncoderFactoryTest.java | 4 +-- .../SoftwareVideoDecoderFactoryTest.java | 3 ++- .../SoftwareVideoEncoderFactoryTest.java | 3 ++- .../org/webrtc/CodecsWrapperTestHelper.java | 3 ++- .../src/java/org/webrtc/H264Utils.java | 5 ++-- .../webrtc/MediaCodecVideoDecoderFactory.java | 4 +-- sdk/android/src/jni/libaom_av1_encoder.cc | 12 +++++++++ sdk/android/src/jni/pc/rtp_parameters.cc | 6 +++++ sdk/android/src/jni/video_codec_info.cc | 26 +++++++++++++++++-- sdk/android/src/jni/vp9_codec.cc | 16 ++++++++++++ .../RTCVideoCodecInfo+Private.mm | 17 ++++++++++-- sdk/objc/api/video_codec/RTCVideoEncoderAV1.h | 2 ++ .../api/video_codec/RTCVideoEncoderAV1.mm | 16 ++++++++++++ sdk/objc/api/video_codec/RTCVideoEncoderVP9.h | 2 ++ .../api/video_codec/RTCVideoEncoderVP9.mm | 15 +++++++++++ sdk/objc/base/RTCVideoCodecInfo.h | 6 +++++ sdk/objc/base/RTCVideoCodecInfo.m | 17 +++++++++++- .../RTCDefaultVideoEncoderFactory.m | 6 +++-- 25 files changed, 188 insertions(+), 28 deletions(-) diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index d28a2a4035..bf19540914 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -85,13 +85,8 @@ class VideoEncoderFactory { virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, absl::optional scalability_mode) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if scalability_mode is - // specified. CodecSupport codec_support; - if (!scalability_mode) { - codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); - } + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); return codec_support; } diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java index d43fc27fa0..05c39a4b38 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java @@ -143,11 +143,11 @@ public VideoCodecInfo[] getSupportedCodecs() { // supported by the decoder. if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java index 569a719f44..1bb5ff9c5e 100644 --- a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java +++ b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { @Override @@ -22,4 +23,10 @@ public long createNativeVideoEncoder() { public boolean isHardwareEncoder() { return false; } + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java index 1211ae93fb..c1cc86a2f2 100644 --- a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java +++ b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { @Override @@ -24,4 +25,10 @@ public boolean isHardwareEncoder() { } static native boolean nativeIsSupported(); + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java index 9ca8311610..4e3f106785 100644 --- a/sdk/android/api/org/webrtc/RtpParameters.java +++ b/sdk/android/api/org/webrtc/RtpParameters.java @@ -76,6 +76,8 @@ public static class Encoding { // If non-null, scale the width and height down by this factor for video. If null, // implementation default scaling factor will be used. @Nullable public Double scaleResolutionDownBy; + // Scalability modes are used to represent simulcast and SVC layers. + @Nullable public String scalabilityMode; // SSRC to be used by this encoding. // Can't be changed between getParameters/setParameters. public Long ssrc; @@ -93,8 +95,8 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { @CalledByNative("Encoding") Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, - Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc, - boolean adaptiveAudioPacketTime) { + Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, + Long ssrc, boolean adaptiveAudioPacketTime) { this.rid = rid; this.active = active; this.bitratePriority = bitratePriority; @@ -104,6 +106,7 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { this.maxFramerate = maxFramerate; this.numTemporalLayers = numTemporalLayers; this.scaleResolutionDownBy = scaleResolutionDownBy; + this.scalabilityMode = scalabilityMode; this.ssrc = ssrc; this.adaptiveAudioPacketTime = adaptiveAudioPacketTime; } @@ -160,6 +163,12 @@ Double getScaleResolutionDownBy() { return scaleResolutionDownBy; } + @Nullable + @CalledByNative("Encoding") + String getScalabilityMode() { + return scalabilityMode; + } + @CalledByNative("Encoding") Long getSsrc() { return ssrc; diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java index 4f97cf74cf..e0f5153d47 100644 --- a/sdk/android/api/org/webrtc/VideoCodecInfo.java +++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java @@ -14,6 +14,8 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.List; +import java.util.ArrayList; /** * Represent a video codec as encoded in SDP. @@ -34,13 +36,16 @@ public class VideoCodecInfo { public final String name; public final Map params; + public final List scalabilityModes; + @Deprecated public final int payload; @CalledByNative - public VideoCodecInfo(String name, Map params) { + public VideoCodecInfo(String name, Map params, List scalabilityModes) { this.payload = 0; this.name = name; this.params = params; + this.scalabilityModes = scalabilityModes; } @Deprecated @@ -48,6 +53,7 @@ public VideoCodecInfo(int payload, String name, Map params) { this.payload = payload; this.name = name; this.params = params; + this.scalabilityModes = new ArrayList<>(); } @Override @@ -83,4 +89,9 @@ String getName() { Map getParams() { return params; } + + @CalledByNative + List getScalabilityModes() { + return scalabilityModes; + } } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java index 6f448124e8..d9fadabfd9 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java @@ -48,7 +48,7 @@ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglCo if (codecName.equals("H264")) { this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC; } else { - this.codecType = new VideoCodecInfo(codecName, new HashMap<>()); + this.codecType = new VideoCodecInfo(codecName, new HashMap<>(), new ArrayList<>()); } this.useEglContext = useEglContext; } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java index fe608c794e..1a9dd5fc38 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java @@ -47,7 +47,7 @@ public void setUp() { @SmallTest @Test public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { - VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>()); + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>(), new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); @@ -62,7 +62,7 @@ public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { public void getSupportedCodecs_hwVp8WithDifferentParams_twoVp8() { VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap() { { put("param", "value"); } - }); + }, new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java index 8a5d9788ee..8be15624da 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -55,7 +56,7 @@ public void createDecoder_supportedCodec_returnsNotNull() { @Test public void createDecoder_unsupportedCodec_returnsNull() { VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoDecoder decoder = factory.createDecoder(codec); assertThat(decoder).isNull(); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java index 696b423cde..0fa4c4cc17 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,7 @@ public void createEncoder_supportedCodec_returnsNotNull() { @Test public void createEncoder_unsupportedCodec_returnsNull() { VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoEncoder encoder = factory.createEncoder(codec); assertThat(encoder).isNull(); } diff --git a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java index 70151d3b78..72c5c64191 100644 --- a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java +++ b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.ArrayList; public class CodecsWrapperTestHelper { @CalledByNative @@ -20,7 +21,7 @@ public static VideoCodecInfo createTestVideoCodecInfo() { params.put( VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); - VideoCodecInfo codec_info = new VideoCodecInfo("H264", params); + VideoCodecInfo codec_info = new VideoCodecInfo("H264", params, new ArrayList<>()); return codec_info; } diff --git a/sdk/android/src/java/org/webrtc/H264Utils.java b/sdk/android/src/java/org/webrtc/H264Utils.java index abb79c6582..4bf292ee12 100644 --- a/sdk/android/src/java/org/webrtc/H264Utils.java +++ b/sdk/android/src/java/org/webrtc/H264Utils.java @@ -12,6 +12,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.ArrayList; /** Container for static helper functions related to dealing with H264 codecs. */ class H264Utils { @@ -38,9 +39,9 @@ public static Map getDefaultH264Params(boolean isHighProfile) { } public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false), new ArrayList<>()); public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true), new ArrayList<>()); public static boolean isSameH264Profile( Map params1, Map params2) { diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 9a73bc49ff..d5b892279c 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -72,11 +72,11 @@ public VideoCodecInfo[] getSupportedCodecs() { String name = type.name(); if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/src/jni/libaom_av1_encoder.cc b/sdk/android/src/jni/libaom_av1_encoder.cc index 96e0dda169..32c1fc7993 100644 --- a/sdk/android/src/jni/libaom_av1_encoder.cc +++ b/sdk/android/src/jni/libaom_av1_encoder.cc @@ -15,6 +15,9 @@ #include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -22,5 +25,14 @@ static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); } +static webrtc::ScopedJavaLocalRef JNI_LibaomAv1Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/rtp_parameters.cc b/sdk/android/src/jni/pc/rtp_parameters.cc index 4bd9ee0e1d..6feb6a631b 100644 --- a/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/sdk/android/src/jni/pc/rtp_parameters.cc @@ -53,6 +53,7 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( NativeToJavaInteger(env, encoding.max_framerate), NativeToJavaInteger(env, encoding.num_temporal_layers), NativeToJavaDouble(env, encoding.scale_resolution_down_by), + NativeToJavaString(env, encoding.scalability_mode), encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, encoding.adaptive_ptime); } @@ -116,6 +117,11 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); encoding.scale_resolution_down_by = JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + ScopedJavaLocalRef j_scalability_mode = + Java_Encoding_getScalabilityMode(jni, j_encoding_parameters); + if (!IsNull(jni, j_scalability_mode)) { + encoding.scalability_mode = JavaToNativeString(jni,j_scalability_mode); + } encoding.adaptive_ptime = Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); ScopedJavaLocalRef j_ssrc = diff --git a/sdk/android/src/jni/video_codec_info.cc b/sdk/android/src/jni/video_codec_info.cc index a218a1d23f..a85dde67dc 100644 --- a/sdk/android/src/jni/video_codec_info.cc +++ b/sdk/android/src/jni/video_codec_info.cc @@ -13,15 +13,28 @@ #include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "api/video_codecs/scalability_mode.h" +#include "modules/video_coding/svc/scalability_mode_util.h" namespace webrtc { namespace jni { SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, const JavaRef& j_info) { + std::vector params = + JavaToStdVectorStrings(jni, Java_VideoCodecInfo_getScalabilityModes(jni, j_info)); + absl::InlinedVector + scalability_modes; + for (auto mode : params) { + auto scalability_mode = ScalabilityModeFromString(mode); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } return SdpVideoFormat( JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), - JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); + JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)), + scalability_modes); } ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( @@ -29,8 +42,17 @@ ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( const SdpVideoFormat& format) { ScopedJavaLocalRef j_params = NativeToJavaStringMap(jni, format.parameters); + webrtc::ScopedJavaLocalRef j_scalability_modes; + if (!format.scalability_modes.empty()) { + JavaListBuilder builder(jni); + for (auto mode : format.scalability_modes) { + std::string scalability_mode(ScalabilityModeToString(mode)); + builder.add(NativeToJavaString(jni, scalability_mode)); + } + j_scalability_modes = builder.java_list(); + } return Java_VideoCodecInfo_Constructor( - jni, NativeToJavaString(jni, format.name), j_params); + jni, NativeToJavaString(jni, format.name), j_params, j_scalability_modes); } } // namespace jni diff --git a/sdk/android/src/jni/vp9_codec.cc b/sdk/android/src/jni/vp9_codec.cc index ad9ca793ce..e5929ad2f4 100644 --- a/sdk/android/src/jni/vp9_codec.cc +++ b/sdk/android/src/jni/vp9_codec.cc @@ -10,11 +10,18 @@ #include +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h" #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -34,5 +41,14 @@ static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) { return !SupportedVP9Codecs().empty(); } +static webrtc::ScopedJavaLocalRef JNI_LibvpxVp9Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 2eb8d366d2..88c2add1df 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -12,6 +12,11 @@ #import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) (Private) @@ -31,8 +36,16 @@ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format { std::string value = [NSString stdStringForString:self.parameters[paramKey]]; parameters[key] = value; } - - return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters); + + absl::InlinedVector + scalability_modes; + for (NSString *scalabilityMode in self.scalabilityModes) { + auto scalability_mode = webrtc::ScalabilityModeFromString([NSString stdStringForString:scalabilityMode]); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } + return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters, scalability_modes); } @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h index 8aa55e4bfa..a900728049 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm index d2fe65293b..f9256387dc 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm @@ -14,7 +14,13 @@ #import "RTCMacros.h" #import "RTCVideoEncoderAV1.h" #import "RTCWrappedNativeVideoEncoder.h" + +#import "helpers/NSString+StdString.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @implementation RTC_OBJC_TYPE (RTCVideoEncoderAV1) @@ -28,4 +34,14 @@ + (bool)isSupported { return true; } ++ (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; +} + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index f7dac6117d..adfca0f9a4 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index 18a9353f7e..3db0629074 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -15,7 +15,12 @@ #import "RTCVideoEncoderVP9.h" #import "RTCWrappedNativeVideoEncoder.h" +#import "helpers/NSString+StdString.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9) @@ -36,4 +41,14 @@ + (bool)isSupported { #endif } ++ (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; +} + @end diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index fa28958f25..9da0c7aa81 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -26,10 +26,16 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *)parameters NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes + NS_DESIGNATED_INITIALIZER; + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; @property(nonatomic, readonly) NSString *name; @property(nonatomic, readonly) NSDictionary *parameters; +@property(nonatomic, readonly) NSArray *scalabilityModes; @end diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index ce26ae1de3..441ecde7c5 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -14,6 +14,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) @synthesize name = _name; @synthesize parameters = _parameters; +@synthesize scalabilityModes = _scalabilityModes; - (instancetype)initWithName:(NSString *)name { return [self initWithName:name parameters:nil]; @@ -24,15 +25,29 @@ - (instancetype)initWithName:(NSString *)name if (self = [super init]) { _name = name; _parameters = (parameters ? parameters : @{}); + _scalabilityModes = @[]; } return self; } +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes { + if (self = [super init]) { + _name = name; + _parameters = (parameters ? parameters : @{}); + _scalabilityModes = (scalabilityModes ? scalabilityModes : @[]); + } + + return self; + } + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if (!info || ![self.name isEqualToString:info.name] || - ![self.parameters isEqualToDictionary:info.parameters]) { + ![self.parameters isEqualToDictionary:info.parameters] || + ![self.scalabilityModes isEqualToArray:info.scalabilityModes]) { return NO; } return YES; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 8de55bde4a..61da5f4514 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -55,11 +55,13 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTCVideoEncoderVP9 scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTCVideoEncoderAV1 scalabilityModes]]; + [result addObject:av1Info]; #endif return result; From 13fe8b2907a0db48c331c75a7631c7f5ef488238 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 20 Sep 2023 13:33:48 +0800 Subject: [PATCH 23/42] fix h264 freeze. (#93) * Improve e2ee, add setSharedKey to KeyProvider. * update. * reset has_valid_key after RatchetKey. * update. * clone key_handler from share_key for each participant. * add RatchetSharedKey and ExportSharedKey. * make KeyProvider::SetSharedKey only valid when KeyProviderOptions::shared_key == true. * remove unused enum. * Fix memory leak when creating audio CMSampleBuffer #86 * add scalabilityMode for AV1. * fix bug for scalability-mode. * add scalability-mode support for VP9. * add failure tolerance for framecryptor. * add failureTolerance for android/objc. * fix: make H264's unencrypted_bytes consistent with js-sdk. * wip: ScalabilityModes for android. * update. * wip. * wip. * wip. * wip. * Fix camera rotation (#92) Use UIInterfaceOrientation instead of UIDeviceOrientation for RTCVideoFrame's rotation * done. * fix * update. * Expose audio sample buffers for Android (#89) * Initial draft * Working impl * doc and cleanup * doc update * add SetSifTrailer. * fix h264 freeze. --------- Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Co-authored-by: davidliu --- api/crypto/frame_crypto_transformer.cc | 93 ++++++++++++++++++++------ 1 file changed, 72 insertions(+), 21 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 9ef356ce0f..c957220757 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -82,6 +82,28 @@ const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { } } +inline bool FrameIsH264(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264; + } + default: + return false; + } +} + +inline bool NeedsRbspUnescaping(const uint8_t* frameData, size_t frameSize) { + for (size_t i = 0; i < frameSize - 3; ++i) { + if (frameData[i] == 0 && frameData[i + 1] == 0 && frameData[i + 2] == 3) + return true; + } + return false; +} + std::string to_uint8_list(const uint8_t* data, int len) { std::stringstream ss; ss << "["; @@ -395,26 +417,41 @@ void FrameCryptorTransformer::encryptFrame( key_set->encryption_key, iv, frame_header, payload, &buffer) == Success) { rtc::Buffer encrypted_payload(buffer.data(), buffer.size()); + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, + 16); + rtc::Buffer data_without_header; + data_without_header.AppendData(encrypted_payload); + data_without_header.AppendData(iv); + data_without_header.AppendData(frame_trailer); + rtc::Buffer data_out; data_out.AppendData(frame_header); - data_out.AppendData(encrypted_payload); - data_out.AppendData(iv); - data_out.AppendData(frame_trailer); - RTC_CHECK_EQ(data_out.size(), frame_header.size() + - encrypted_payload.size() + iv.size() + - frame_trailer.size()); + if (FrameIsH264(frame.get(), type_)) { + H264::WriteRbsp(data_without_header.data(),data_without_header.size(), &data_out); + } else { + data_out.AppendData(data_without_header); + RTC_CHECK_EQ(data_out.size(), frame_header.size() + + encrypted_payload.size() + iv.size() + + frame_trailer.size()); + } frame->SetData(data_out); - RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() ivLength=" - << static_cast(iv.size()) << " unencrypted_bytes=" + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() " + << "frame length = " << static_cast(date_in.size()) + << " encrypted_length = " + << static_cast(data_out.size()) + << " ivLength=" << static_cast(iv.size()) + << " unencrypted_bytes=" << static_cast(unencrypted_bytes) + << " tag=" << to_hex(tag.data(), tag.size()) << " key_index=" << static_cast(key_index_) << " aesKey=" << to_hex(key_set->encryption_key.data(), key_set->encryption_key.size()) << " iv=" << to_hex(iv.data(), iv.size()); + if (last_enc_error_ != FrameCryptionState::kOk) { last_enc_error_ = FrameCryptionState::kOk; if (observer_) @@ -554,11 +591,34 @@ void FrameCryptorTransformer::decryptFrame( iv[i] = date_in[date_in.size() - 2 - ivLength + i]; } - rtc::Buffer encrypted_payload(date_in.size() - unencrypted_bytes - ivLength - - 2); - for (size_t i = unencrypted_bytes; i < date_in.size() - ivLength - 2; i++) { - encrypted_payload[i - unencrypted_bytes] = date_in[i]; + rtc::Buffer encrypted_buffer(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + encrypted_buffer[i - unencrypted_bytes] = date_in[i]; + } + + if (FrameIsH264(frame.get(), type_) && + NeedsRbspUnescaping(encrypted_buffer.data(), encrypted_buffer.size())) { + encrypted_buffer.SetData(H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); + } + + rtc::Buffer encrypted_payload(encrypted_buffer.size() - ivLength - 2); + for (size_t i = 0; i < encrypted_payload.size(); i++) { + encrypted_payload[i] = encrypted_buffer[i]; } + + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, 16); + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " + << " frame length = " << static_cast(date_in.size()) + << " ivLength=" << static_cast(iv.size()) + << " unencrypted_bytes=" + << static_cast(unencrypted_bytes) + << " tag=" << to_hex(tag.data(), tag.size()) + << " key_index=" << static_cast(key_index_) + << " aesKey=" + << to_hex(key_set->encryption_key.data(), + key_set->encryption_key.size()) + << " iv=" << to_hex(iv.data(), iv.size()); + std::vector buffer; int ratchet_count = 0; @@ -636,15 +696,6 @@ void FrameCryptorTransformer::decryptFrame( data_out.AppendData(payload); frame->SetData(data_out); - RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() ivLength=" - << static_cast(ivLength) << " unencrypted_bytes=" - << static_cast(unencrypted_bytes) - << " key_index=" << static_cast(key_index_) - << " aesKey=" - << to_hex(key_set->encryption_key.data(), - key_set->encryption_key.size()) - << " iv=" << to_hex(iv.data(), iv.size()); - if (last_dec_error_ != FrameCryptionState::kOk) { last_dec_error_ = FrameCryptionState::kOk; if (observer_) From 6af5bfdf36dae99bee428d761b307f463858e8ef Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 20 Sep 2023 13:34:14 +0800 Subject: [PATCH 24/42] Fix/send frame cryptor events from signaling thread (#95) * fix: send framecyrotor events from signaling thread. (WIP) * replace std::shared_ptr to rtc::scoped_refptr. * create framecryptor with signaling_thread. * null check. * fix. * fix. --- api/crypto/frame_crypto_transformer.cc | 58 +++++++++---------- api/crypto/frame_crypto_transformer.h | 53 ++++++++++------- .../api/org/webrtc/FrameCryptorFactory.java | 12 ++-- sdk/android/src/jni/pc/frame_cryptor.cc | 15 +++-- sdk/android/src/jni/pc/frame_cryptor.h | 3 +- sdk/objc/api/peerconnection/RTCFrameCryptor.h | 21 ++++--- .../api/peerconnection/RTCFrameCryptor.mm | 37 +++++++----- 7 files changed, 108 insertions(+), 91 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index c957220757..d82edfb465 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -305,15 +305,16 @@ int AesEncryptDecrypt(EncryptOrDecrypt mode, return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); } } - namespace webrtc { FrameCryptorTransformer::FrameCryptorTransformer( + rtc::Thread* signaling_thread, const std::string participant_id, MediaType type, Algorithm algorithm, rtc::scoped_refptr key_provider) - : participant_id_(participant_id), + : signaling_thread_(signaling_thread), + participant_id_(participant_id), type_(type), algorithm_(algorithm), key_provider_(key_provider) { @@ -363,9 +364,7 @@ void FrameCryptorTransformer::encryptFrame( << "FrameCryptorTransformer::encryptFrame() sink_callback is NULL"; if (last_enc_error_ != FrameCryptionState::kInternalError) { last_enc_error_ = FrameCryptionState::kInternalError; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_enc_error_); + onFrameCryptionStateChanged(last_enc_error_); } return; } @@ -387,9 +386,7 @@ void FrameCryptorTransformer::encryptFrame( << participant_id_; if (last_enc_error_ != FrameCryptionState::kMissingKey) { last_enc_error_ = FrameCryptionState::kMissingKey; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_enc_error_); + onFrameCryptionStateChanged(last_enc_error_); } return; } @@ -454,17 +451,13 @@ void FrameCryptorTransformer::encryptFrame( if (last_enc_error_ != FrameCryptionState::kOk) { last_enc_error_ = FrameCryptionState::kOk; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_enc_error_); + onFrameCryptionStateChanged(last_enc_error_); } sink_callback->OnTransformedFrame(std::move(frame)); } else { if (last_enc_error_ != FrameCryptionState::kEncryptionFailed) { last_enc_error_ = FrameCryptionState::kEncryptionFailed; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_enc_error_); + onFrameCryptionStateChanged(last_enc_error_); } RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::encryptFrame() failed"; } @@ -489,9 +482,7 @@ void FrameCryptorTransformer::decryptFrame( << "FrameCryptorTransformer::decryptFrame() sink_callback is NULL"; if (last_dec_error_ != FrameCryptionState::kInternalError) { last_dec_error_ = FrameCryptionState::kInternalError; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } return; } @@ -552,9 +543,7 @@ void FrameCryptorTransformer::decryptFrame( << static_cast(getIvSize()) << "]"; if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { last_dec_error_ = FrameCryptionState::kDecryptionFailed; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } return; } @@ -571,9 +560,7 @@ void FrameCryptorTransformer::decryptFrame( << participant_id_; if (last_dec_error_ != FrameCryptionState::kMissingKey) { last_dec_error_ = FrameCryptionState::kMissingKey; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } return; } @@ -630,7 +617,7 @@ void FrameCryptorTransformer::decryptFrame( decryption_success = true; } else { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() failed"; - std::shared_ptr ratcheted_key_set; + rtc::scoped_refptr ratcheted_key_set; auto currentKeyMaterial = key_set->material; if (key_provider_->options().ratchet_window_size > 0) { while (ratchet_count < key_provider_->options().ratchet_window_size) { @@ -656,9 +643,7 @@ void FrameCryptorTransformer::decryptFrame( key_handler->SetHasValidKey(); if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { last_dec_error_ = FrameCryptionState::kKeyRatcheted; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } break; } @@ -683,9 +668,7 @@ void FrameCryptorTransformer::decryptFrame( if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { last_dec_error_ = FrameCryptionState::kDecryptionFailed; key_handler->DecryptionFailure(); - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, - last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } return; } @@ -698,12 +681,23 @@ void FrameCryptorTransformer::decryptFrame( if (last_dec_error_ != FrameCryptionState::kOk) { last_dec_error_ = FrameCryptionState::kOk; - if (observer_) - observer_->OnFrameCryptionStateChanged(participant_id_, last_dec_error_); + onFrameCryptionStateChanged(last_dec_error_); } sink_callback->OnTransformedFrame(std::move(frame)); } +void FrameCryptorTransformer::onFrameCryptionStateChanged(FrameCryptionState state) { + webrtc::MutexLock lock(&mutex_); + if(observer_) { + RTC_DCHECK(signaling_thread_ != nullptr); + signaling_thread_->PostTask( + [observer = observer_, state = state, participant_id = participant_id_]() mutable { + observer->OnFrameCryptionStateChanged(participant_id, state); + } + ); + } +} + rtc::Buffer FrameCryptorTransformer::makeIv(uint32_t ssrc, uint32_t timestamp) { uint32_t send_count = 0; if (send_counts_.find(ssrc) == send_counts_.end()) { diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index c3042b2021..3be78567c2 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -20,6 +20,8 @@ #include #include "api/frame_transformer_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" #include "rtc_base/buffer.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" @@ -56,7 +58,7 @@ class KeyProvider : public rtc::RefCountInterface { virtual bool SetSharedKey(int key_index, std::vector key) = 0; - virtual const std::shared_ptr GetSharedKey(const std::string participant_id) = 0; + virtual const rtc::scoped_refptr GetSharedKey(const std::string participant_id) = 0; virtual const std::vector RatchetSharedKey(int key_index) = 0; @@ -66,7 +68,7 @@ class KeyProvider : public rtc::RefCountInterface { int key_index, std::vector key) = 0; - virtual const std::shared_ptr GetKey( + virtual const rtc::scoped_refptr GetKey( const std::string participant_id) const = 0; virtual const std::vector RatchetKey( @@ -84,9 +86,9 @@ class KeyProvider : public rtc::RefCountInterface { virtual ~KeyProvider() {} }; -class ParticipantKeyHandler { +class ParticipantKeyHandler : public rtc::RefCountInterface { public: - struct KeySet { + struct KeySet : public rtc::RefCountInterface { std::vector material; std::vector encryption_key; KeySet(std::vector material, std::vector encryptionKey) @@ -99,8 +101,8 @@ class ParticipantKeyHandler { virtual ~ParticipantKeyHandler() = default; - std::shared_ptr Clone() { - auto clone = std::make_shared(key_provider_); + rtc::scoped_refptr Clone() { + auto clone = rtc::make_ref_counted(key_provider_); clone->crypto_key_ring_ = crypto_key_ring_; clone->current_key_index_ = current_key_index_; clone->has_valid_key_ = has_valid_key_; @@ -124,7 +126,7 @@ class ParticipantKeyHandler { return new_material; } - virtual std::shared_ptr GetKeySet(int key_index) { + virtual rtc::scoped_refptr GetKeySet(int key_index) { webrtc::MutexLock lock(&mutex_); return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; } @@ -144,13 +146,13 @@ class ParticipantKeyHandler { return new_material; } - std::shared_ptr DeriveKeys(std::vector password, + rtc::scoped_refptr DeriveKeys(std::vector password, std::vector ratchet_salt, unsigned int optional_length_bits) { std::vector derived_key; if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, &derived_key) == 0) { - return std::make_shared(password, derived_key); + return rtc::make_ref_counted(password, derived_key); } return nullptr; } @@ -193,7 +195,7 @@ class ParticipantKeyHandler { mutable webrtc::Mutex mutex_; int current_key_index_ = 0; KeyProvider* key_provider_; - std::vector> crypto_key_ring_; + std::vector> crypto_key_ring_; }; class DefaultKeyProviderImpl : public KeyProvider { @@ -206,7 +208,7 @@ class DefaultKeyProviderImpl : public KeyProvider { webrtc::MutexLock lock(&mutex_); if(options_.shared_key) { if (keys_.find("shared") == keys_.end()) { - keys_["shared"] = std::make_shared(this); + keys_["shared"] = rtc::make_ref_counted(this); } auto key_handler = keys_["shared"]; @@ -252,7 +254,7 @@ class DefaultKeyProviderImpl : public KeyProvider { return std::vector(); } - const std::shared_ptr GetSharedKey(const std::string participant_id) override { + const rtc::scoped_refptr GetSharedKey(const std::string participant_id) override { webrtc::MutexLock lock(&mutex_); if(options_.shared_key && keys_.find("shared") != keys_.end()) { auto shared_key_handler = keys_["shared"]; @@ -274,7 +276,7 @@ class DefaultKeyProviderImpl : public KeyProvider { webrtc::MutexLock lock(&mutex_); if (keys_.find(participant_id) == keys_.end()) { - keys_[participant_id] = std::make_shared(this); + keys_[participant_id] = rtc::make_ref_counted(this); } auto key_handler = keys_[participant_id]; @@ -282,7 +284,7 @@ class DefaultKeyProviderImpl : public KeyProvider { return true; } - const std::shared_ptr GetKey( + const rtc::scoped_refptr GetKey( const std::string participant_id) const override { webrtc::MutexLock lock(&mutex_); @@ -324,7 +326,7 @@ class DefaultKeyProviderImpl : public KeyProvider { private: mutable webrtc::Mutex mutex_; KeyProviderOptions options_; - std::unordered_map> keys_; + std::unordered_map> keys_; }; enum FrameCryptionState { @@ -337,7 +339,7 @@ enum FrameCryptionState { kInternalError, }; -class FrameCryptorTransformerObserver { +class FrameCryptorTransformerObserver : public rtc::RefCountInterface { public: virtual void OnFrameCryptionStateChanged(const std::string participant_id, FrameCryptionState error) = 0; @@ -359,17 +361,23 @@ class RTC_EXPORT FrameCryptorTransformer kAesCbc, }; - explicit FrameCryptorTransformer(const std::string participant_id, + explicit FrameCryptorTransformer(rtc::Thread* signaling_thread, + const std::string participant_id, MediaType type, Algorithm algorithm, rtc::scoped_refptr key_provider); - virtual void SetFrameCryptorTransformerObserver( - FrameCryptorTransformerObserver* observer) { + virtual void RegisterFrameCryptorTransformerObserver( + rtc::scoped_refptr observer) { webrtc::MutexLock lock(&mutex_); observer_ = observer; } + virtual void UnRegisterFrameCryptorTransformerObserver() { + webrtc::MutexLock lock(&mutex_); + observer_ = nullptr; + } + virtual void SetKeyIndex(int index) { webrtc::MutexLock lock(&mutex_); key_index_ = index; @@ -417,10 +425,12 @@ class RTC_EXPORT FrameCryptorTransformer private: void encryptFrame(std::unique_ptr frame); void decryptFrame(std::unique_ptr frame); + void onFrameCryptionStateChanged(FrameCryptionState error); rtc::Buffer makeIv(uint32_t ssrc, uint32_t timestamp); uint8_t getIvSize(); private: + TaskQueueBase* const signaling_thread_; std::string participant_id_; mutable webrtc::Mutex mutex_; mutable webrtc::Mutex sink_mutex_; @@ -433,10 +443,9 @@ class RTC_EXPORT FrameCryptorTransformer int key_index_ = 0; std::map send_counts_; rtc::scoped_refptr key_provider_; - FrameCryptorTransformerObserver* observer_ = nullptr; - std::unique_ptr thread_; + rtc::scoped_refptr observer_; FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; - FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; }; } // namespace webrtc diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java index b70f61cc6b..1ade0b6c9e 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorFactory.java +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -22,21 +22,21 @@ public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance); } - public static FrameCryptor createFrameCryptorForRtpSender(RtpSender rtpSender, + public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { - return nativeCreateFrameCryptorForRtpSender(rtpSender.getNativeRtpSender(), participantId, + return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, algorithm.ordinal(), keyProvider.getNativeKeyProvider()); } - public static FrameCryptor createFrameCryptorForRtpReceiver(RtpReceiver rtpReceiver, + public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { - return nativeCreateFrameCryptorForRtpReceiver(rtpReceiver.getNativeRtpReceiver(), participantId, + return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, algorithm.ordinal(), keyProvider.getNativeKeyProvider()); } - private static native FrameCryptor nativeCreateFrameCryptorForRtpSender( + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory, long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); - private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver( + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long factory, long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc index 6c36ece9ee..59710cd6d3 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.cc +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -23,6 +23,7 @@ #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" namespace webrtc { namespace jni { @@ -88,14 +89,14 @@ static jlong JNI_FrameCryptor_SetObserver( rtc::make_ref_counted(jni, j_observer); observer->AddRef(); reinterpret_cast(j_frame_cryptor_pointer) - ->SetFrameCryptorTransformerObserver(observer.get()); + ->RegisterFrameCryptorTransformerObserver(observer); return jlongFromPointer(observer.get()); } static void JNI_FrameCryptor_UnSetObserver(JNIEnv* jni, jlong j_frame_cryptor_pointer) { reinterpret_cast(j_frame_cryptor_pointer) - ->SetFrameCryptorTransformerObserver(nullptr); + ->UnRegisterFrameCryptorTransformerObserver(); } webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { @@ -112,10 +113,13 @@ webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { static base::android::ScopedJavaLocalRef JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( JNIEnv* env, + jlong native_factory, jlong j_rtp_receiver_pointer, const base::android::JavaParamRef& participantId, jint j_algorithm_index, jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); auto keyProvider = reinterpret_cast(j_key_provider); auto participant_id = JavaToStdString(env, participantId); @@ -127,7 +131,7 @@ JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; auto frame_crypto_transformer = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), rtc::scoped_refptr(keyProvider))); @@ -141,10 +145,13 @@ JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( static base::android::ScopedJavaLocalRef JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( JNIEnv* env, + jlong native_factory, jlong j_rtp_sender_pointer, const base::android::JavaParamRef& participantId, jint j_algorithm_index, jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); auto keyProvider = reinterpret_cast(j_key_provider); auto rtpSender = reinterpret_cast(j_rtp_sender_pointer); @@ -155,7 +162,7 @@ JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; auto frame_crypto_transformer = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), rtc::scoped_refptr(keyProvider))); diff --git a/sdk/android/src/jni/pc/frame_cryptor.h b/sdk/android/src/jni/pc/frame_cryptor.h index 66645bb33c..dd0788d212 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.h +++ b/sdk/android/src/jni/pc/frame_cryptor.h @@ -29,8 +29,7 @@ ScopedJavaLocalRef NativeToJavaFrameCryptor( JNIEnv* env, rtc::scoped_refptr cryptor); -class FrameCryptorObserverJni : public FrameCryptorTransformerObserver, - public rtc::RefCountInterface { +class FrameCryptorObserverJni : public FrameCryptorTransformerObserver { public: FrameCryptorObserverJni(JNIEnv* jni, const JavaRef& j_observer); ~FrameCryptorObserverJni() override; diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h index 6712ca3688..5b58bf04d5 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -24,6 +24,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCRtpReceiver); @class RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider); @class RTC_OBJC_TYPE(RTCFrameCryptor); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); typedef NS_ENUM(NSUInteger, RTCCyrptorAlgorithm) { RTCCyrptorAlgorithmAesGcm = 0, @@ -60,15 +61,17 @@ RTC_OBJC_EXPORT @property(nonatomic, weak, nullable) id delegate; -- (instancetype)initWithRtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; - -- (instancetype)initWithRtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; @end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index 312331f72d..e0b93c62bb 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -18,6 +18,7 @@ #import "RTCFrameCryptorKeyProvider+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCPeerConnectionFactory+Private.h" #include @@ -94,7 +95,7 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { const webrtc::RtpReceiverInterface *_receiver; NSString *_participantId; rtc::scoped_refptr frame_crypto_transformer_; - std::unique_ptr _observer; + rtc::scoped_refptr _observer; } @synthesize participantId = _participantId; @@ -111,50 +112,54 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { } } -- (instancetype)initWithRtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { - _observer.reset(new webrtc::RTCFrameCryptorDelegateAdapter(self)); + _observer = rtc::make_ref_counted(self); _participantId = participantId; auto rtpSender = sender.nativeRtpSender; auto mediaType = rtpSender->track()->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; frame_crypto_transformer_ = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer([participantId stdString], + new webrtc::FrameCryptorTransformer(factory.signalingThread, + [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer_); frame_crypto_transformer_->SetEnabled(false); - frame_crypto_transformer_->SetFrameCryptorTransformerObserver(_observer.get()); + frame_crypto_transformer_->RegisterFrameCryptorTransformerObserver(_observer); } return self; } -- (instancetype)initWithRtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCyrptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { - _observer.reset(new webrtc::RTCFrameCryptorDelegateAdapter(self)); + _observer = rtc::make_ref_counted(self); _participantId = participantId; auto rtpReceiver = receiver.nativeRtpReceiver; auto mediaType = rtpReceiver->track()->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; frame_crypto_transformer_ = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer([participantId stdString], + new webrtc::FrameCryptorTransformer(factory.signalingThread, + [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); rtpReceiver->SetDepacketizerToDecoderFrameTransformer(frame_crypto_transformer_); frame_crypto_transformer_->SetEnabled(false); - frame_crypto_transformer_->SetFrameCryptorTransformerObserver(_observer.get()); + frame_crypto_transformer_->RegisterFrameCryptorTransformerObserver(_observer); } return self; } @@ -176,7 +181,7 @@ - (void)setKeyIndex:(int)keyIndex { } - (void)dealloc { - frame_crypto_transformer_->SetFrameCryptorTransformerObserver(nullptr); + frame_crypto_transformer_->UnRegisterFrameCryptorTransformerObserver(); } @end From 06492146d42362cf9e40793bf518bd8dfc238f9d Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Thu, 21 Sep 2023 13:03:18 +0800 Subject: [PATCH 25/42] more improvements for E2EE. (#96) * other improvements for E2EE. * fix log. * update. * fix. * revert changes. * clang-format. --- api/crypto/frame_crypto_transformer.cc | 85 +++++++++++++++----------- api/crypto/frame_crypto_transformer.h | 79 ++++++++++++++---------- 2 files changed, 96 insertions(+), 68 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index d82edfb465..3e888a2175 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -83,7 +83,7 @@ const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { } inline bool FrameIsH264(webrtc::TransformableFrameInterface* frame, - webrtc::FrameCryptorTransformer::MediaType type) { + webrtc::FrameCryptorTransformer::MediaType type) { switch (type) { case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { auto videoFrame = @@ -314,11 +314,18 @@ FrameCryptorTransformer::FrameCryptorTransformer( Algorithm algorithm, rtc::scoped_refptr key_provider) : signaling_thread_(signaling_thread), + thread_(rtc::Thread::Create()), participant_id_(participant_id), type_(type), algorithm_(algorithm), key_provider_(key_provider) { RTC_DCHECK(key_provider_ != nullptr); + thread_->SetName("FrameCryptorTransformer", this); + thread_->Start(); +} + +FrameCryptorTransformer::~FrameCryptorTransformer() { + thread_->Stop(); } void FrameCryptorTransformer::Transform( @@ -333,10 +340,16 @@ void FrameCryptorTransformer::Transform( // do encrypt or decrypt here... switch (frame->GetDirection()) { case webrtc::TransformableFrameInterface::Direction::kSender: - encryptFrame(std::move(frame)); + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + encryptFrame(std::move(frame)); + }); break; case webrtc::TransformableFrameInterface::Direction::kReceiver: - decryptFrame(std::move(frame)); + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + decryptFrame(std::move(frame)); + }); break; case webrtc::TransformableFrameInterface::Direction::kUnknown: // do nothing @@ -371,6 +384,8 @@ void FrameCryptorTransformer::encryptFrame( rtc::ArrayView date_in = frame->GetData(); if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; sink_callback->OnTransformedFrame(std::move(frame)); return; } @@ -425,7 +440,8 @@ void FrameCryptorTransformer::encryptFrame( data_out.AppendData(frame_header); if (FrameIsH264(frame.get(), type_)) { - H264::WriteRbsp(data_without_header.data(),data_without_header.size(), &data_out); + H264::WriteRbsp(data_without_header.data(), data_without_header.size(), + &data_out); } else { data_out.AppendData(data_without_header); RTC_CHECK_EQ(data_out.size(), frame_header.size() + @@ -490,34 +506,31 @@ void FrameCryptorTransformer::decryptFrame( rtc::ArrayView date_in = frame->GetData(); if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; sink_callback->OnTransformedFrame(std::move(frame)); return; } auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; if (uncrypted_magic_bytes.size() > 0 && - date_in.size() >= uncrypted_magic_bytes.size() + 1) { - auto tmp = - date_in.subview(date_in.size() - (uncrypted_magic_bytes.size() + 1), - uncrypted_magic_bytes.size()); - - if (uncrypted_magic_bytes == std::vector(tmp.begin(), tmp.end())) { + date_in.size() >= uncrypted_magic_bytes.size()) { + auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size()), + uncrypted_magic_bytes.size()); + auto data = std::vector(tmp.begin(), tmp.end()); + if (uncrypted_magic_bytes == data) { RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); - auto frame_type = date_in.subview(date_in.size() - 1, 1); - RTC_CHECK_EQ(frame_type.size(), 1); - - RTC_LOG(LS_INFO) - << "FrameCryptorTransformer::uncrypted_magic_bytes( type " - << frame_type[0] << ", tmp " << to_hex(tmp.data(), tmp.size()) - << ", magic bytes " - << to_hex(uncrypted_magic_bytes.data(), uncrypted_magic_bytes.size()) - << ")"; + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( tmp " + << to_hex(tmp.data(), tmp.size()) << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), + uncrypted_magic_bytes.size()) + << ")"; // magic bytes detected, this is a non-encrypted frame, skip frame // decryption. rtc::Buffer data_out; - data_out.AppendData(date_in.subview( - 0, date_in.size() - uncrypted_magic_bytes.size() - 1)); + data_out.AppendData( + date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size())); frame->SetData(data_out); sink_callback->OnTransformedFrame(std::move(frame)); return; @@ -539,8 +552,8 @@ void FrameCryptorTransformer::decryptFrame( if (ivLength != getIvSize()) { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() ivLength[" - << static_cast(ivLength) << "] != getIvSize()[" - << static_cast(getIvSize()) << "]"; + << static_cast(ivLength) << "] != getIvSize()[" + << static_cast(getIvSize()) << "]"; if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { last_dec_error_ = FrameCryptionState::kDecryptionFailed; onFrameCryptionStateChanged(last_dec_error_); @@ -585,7 +598,8 @@ void FrameCryptorTransformer::decryptFrame( if (FrameIsH264(frame.get(), type_) && NeedsRbspUnescaping(encrypted_buffer.data(), encrypted_buffer.size())) { - encrypted_buffer.SetData(H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); + encrypted_buffer.SetData( + H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); } rtc::Buffer encrypted_payload(encrypted_buffer.size() - ivLength - 2); @@ -665,10 +679,11 @@ void FrameCryptorTransformer::decryptFrame( } if (!decryption_success) { - if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { - last_dec_error_ = FrameCryptionState::kDecryptionFailed; - key_handler->DecryptionFailure(); - onFrameCryptionStateChanged(last_dec_error_); + if (key_handler->DecryptionFailure()) { + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } } return; } @@ -686,15 +701,15 @@ void FrameCryptorTransformer::decryptFrame( sink_callback->OnTransformedFrame(std::move(frame)); } -void FrameCryptorTransformer::onFrameCryptionStateChanged(FrameCryptionState state) { +void FrameCryptorTransformer::onFrameCryptionStateChanged( + FrameCryptionState state) { webrtc::MutexLock lock(&mutex_); - if(observer_) { + if (observer_) { RTC_DCHECK(signaling_thread_ != nullptr); - signaling_thread_->PostTask( - [observer = observer_, state = state, participant_id = participant_id_]() mutable { - observer->OnFrameCryptionStateChanged(participant_id, state); - } - ); + signaling_thread_->PostTask([observer = observer_, state = state, + participant_id = participant_id_]() mutable { + observer->OnFrameCryptionStateChanged(participant_id, state); + }); } } diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index 3be78567c2..f9027d08f7 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -44,7 +44,8 @@ struct KeyProviderOptions { std::vector uncrypted_magic_bytes; int ratchet_window_size; int failure_tolerance; - KeyProviderOptions() : shared_key(false), ratchet_window_size(0), failure_tolerance(-1) {} + KeyProviderOptions() + : shared_key(false), ratchet_window_size(0), failure_tolerance(-1) {} KeyProviderOptions(KeyProviderOptions& copy) : shared_key(copy.shared_key), ratchet_salt(copy.ratchet_salt), @@ -55,10 +56,10 @@ struct KeyProviderOptions { class KeyProvider : public rtc::RefCountInterface { public: - virtual bool SetSharedKey(int key_index, std::vector key) = 0; - virtual const rtc::scoped_refptr GetSharedKey(const std::string participant_id) = 0; + virtual const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) = 0; virtual const std::vector RatchetSharedKey(int key_index) = 0; @@ -94,8 +95,10 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { KeySet(std::vector material, std::vector encryptionKey) : material(material), encryption_key(encryptionKey) {} }; + public: - ParticipantKeyHandler(KeyProvider* key_provider) : key_provider_(key_provider) { + ParticipantKeyHandler(KeyProvider* key_provider) + : key_provider_(key_provider) { crypto_key_ring_.resize(KEYRING_SIZE); } @@ -116,7 +119,8 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { } auto current_material = key_set->material; std::vector new_material; - if (DerivePBKDF2KeyFromRawKey(current_material, key_provider_->options().ratchet_salt, 256, + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, &new_material) != 0) { return std::vector(); } @@ -139,7 +143,8 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { std::vector RatchetKeyMaterial( std::vector current_material) { std::vector new_material; - if (DerivePBKDF2KeyFromRawKey(current_material, key_provider_->options().ratchet_salt, 256, + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, &new_material) != 0) { return std::vector(); } @@ -147,8 +152,8 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { } rtc::scoped_refptr DeriveKeys(std::vector password, - std::vector ratchet_salt, - unsigned int optional_length_bits) { + std::vector ratchet_salt, + unsigned int optional_length_bits) { std::vector derived_key; if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, &derived_key) == 0) { @@ -177,16 +182,19 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { DeriveKeys(password, key_provider_->options().ratchet_salt, 128); } - void DecryptionFailure() { + bool DecryptionFailure() { webrtc::MutexLock lock(&mutex_); if (key_provider_->options().failure_tolerance < 0) { - return; + return false; } decryption_failure_count_ += 1; - if (decryption_failure_count_ > key_provider_->options().failure_tolerance) { + if (decryption_failure_count_ > + key_provider_->options().failure_tolerance) { has_valid_key_ = false; + return true; } + return false; } private: @@ -206,7 +214,7 @@ class DefaultKeyProviderImpl : public KeyProvider { /// Set the shared key. bool SetSharedKey(int key_index, std::vector key) override { webrtc::MutexLock lock(&mutex_); - if(options_.shared_key) { + if (options_.shared_key) { if (keys_.find("shared") == keys_.end()) { keys_["shared"] = rtc::make_ref_counted(this); } @@ -214,8 +222,8 @@ class DefaultKeyProviderImpl : public KeyProvider { auto key_handler = keys_["shared"]; key_handler->SetKey(key, key_index); - for(auto& key_pair : keys_) { - if(key_pair.first != "shared") { + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { key_pair.second->SetKey(key, key_index); } } @@ -227,13 +235,13 @@ class DefaultKeyProviderImpl : public KeyProvider { const std::vector RatchetSharedKey(int key_index) override { webrtc::MutexLock lock(&mutex_); auto it = keys_.find("shared"); - if(it == keys_.end()) { + if (it == keys_.end()) { return std::vector(); } auto new_key = it->second->RatchetKey(key_index); - if(options_.shared_key) { - for(auto& key_pair : keys_) { - if(key_pair.first != "shared") { + if (options_.shared_key) { + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { key_pair.second->SetKey(new_key, key_index); } } @@ -244,19 +252,20 @@ class DefaultKeyProviderImpl : public KeyProvider { const std::vector ExportSharedKey(int key_index) const override { webrtc::MutexLock lock(&mutex_); auto it = keys_.find("shared"); - if(it == keys_.end()) { + if (it == keys_.end()) { return std::vector(); } auto key_set = it->second->GetKeySet(key_index); - if(key_set) { + if (key_set) { return key_set->material; } return std::vector(); } - const rtc::scoped_refptr GetSharedKey(const std::string participant_id) override { + const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) override { webrtc::MutexLock lock(&mutex_); - if(options_.shared_key && keys_.find("shared") != keys_.end()) { + if (options_.shared_key && keys_.find("shared") != keys_.end()) { auto shared_key_handler = keys_["shared"]; if (keys_.find(participant_id) != keys_.end()) { return keys_[participant_id]; @@ -276,7 +285,8 @@ class DefaultKeyProviderImpl : public KeyProvider { webrtc::MutexLock lock(&mutex_); if (keys_.find(participant_id) == keys_.end()) { - keys_[participant_id] = rtc::make_ref_counted(this); + keys_[participant_id] = + rtc::make_ref_counted(this); } auto key_handler = keys_[participant_id]; @@ -326,7 +336,8 @@ class DefaultKeyProviderImpl : public KeyProvider { private: mutable webrtc::Mutex mutex_; KeyProviderOptions options_; - std::unordered_map> keys_; + std::unordered_map> + keys_; }; enum FrameCryptionState { @@ -361,19 +372,20 @@ class RTC_EXPORT FrameCryptorTransformer kAesCbc, }; - explicit FrameCryptorTransformer(rtc::Thread* signaling_thread, - const std::string participant_id, - MediaType type, - Algorithm algorithm, - rtc::scoped_refptr key_provider); - + explicit FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider); + ~FrameCryptorTransformer(); virtual void RegisterFrameCryptorTransformerObserver( - rtc::scoped_refptr observer) { + rtc::scoped_refptr observer) { webrtc::MutexLock lock(&mutex_); observer_ = observer; } - virtual void UnRegisterFrameCryptorTransformerObserver() { + virtual void UnRegisterFrameCryptorTransformerObserver() { webrtc::MutexLock lock(&mutex_); observer_ = nullptr; } @@ -431,6 +443,7 @@ class RTC_EXPORT FrameCryptorTransformer private: TaskQueueBase* const signaling_thread_; + std::unique_ptr thread_; std::string participant_id_; mutable webrtc::Mutex mutex_; mutable webrtc::Mutex sink_mutex_; @@ -445,7 +458,7 @@ class RTC_EXPORT FrameCryptorTransformer rtc::scoped_refptr key_provider_; rtc::scoped_refptr observer_; FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; - FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; }; } // namespace webrtc From fcab26b4e9abd49a715886b6a82d0187dbde94dc Mon Sep 17 00:00:00 2001 From: "lauren n. liberda" Date: Thu, 28 Sep 2023 05:35:29 +0200 Subject: [PATCH 26/42] roll libvpx to include fix for CVE-2023-5217 (#98) --- DEPS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEPS b/DEPS index 68bf5180d9..ed8a0b7053 100644 --- a/DEPS +++ b/DEPS @@ -289,7 +289,7 @@ deps = { 'src/third_party/perfetto': 'https://android.googlesource.com/platform/external/perfetto.git@20b114cd063623e63ef1b0a31167d60081567e51', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@27171320f5e36f7b18071bfa1d9616863ca1b4e8', + 'https://chromium.googlesource.com/webm/libvpx.git@7aaffe2df4c9426ab204a272ca5ca52286ca86d4', 'src/third_party/libyuv': 'https://chromium.googlesource.com/libyuv/libyuv.git@77c2121f7e6b8e694d6e908bbbe9be24214097da', 'src/third_party/lss': { From d5afc4b7be7caffbcc213c2a6486dbf3b9492763 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Wed, 11 Oct 2023 17:30:30 +0800 Subject: [PATCH 27/42] Fix missing `RTC_OBJC_TYPE` macros (#100) * RTCVideoRendererAdapter * RTCDisplayLinkTimer * RTCNetworkMonitor * RTCMTLVideoView * RTCMTLRGBRenderer * progress * fix * ObjCAudioDeviceDelegate * remaining types * fix macos * revert prefix * prefix string references * fix RTCMTLVideoView symbols --- sdk/BUILD.gn | 7 +++- .../api/RTCVideoRendererAdapter+Private.h | 2 +- sdk/objc/api/RTCVideoRendererAdapter.h | 4 ++- sdk/objc/api/RTCVideoRendererAdapter.mm | 19 +++++----- .../RTCAudioDeviceModule+Private.h | 2 +- .../api/peerconnection/RTCAudioDeviceModule.h | 4 +-- .../peerconnection/RTCAudioDeviceModule.mm | 20 +++++------ sdk/objc/api/peerconnection/RTCAudioTrack.mm | 6 ++-- .../peerconnection/RTCEncodedImage+Private.mm | 10 +++--- .../api/peerconnection/RTCIODevice+Private.h | 2 +- sdk/objc/api/peerconnection/RTCIODevice.mm | 2 +- .../peerconnection/RTCPeerConnectionFactory.h | 2 +- .../RTCPeerConnectionFactory.mm | 21 +++++------ ...nnectionFactoryBuilder+DefaultComponents.h | 4 +-- ...nectionFactoryBuilder+DefaultComponents.mm | 6 ++-- .../RTCPeerConnectionFactoryBuilder.h | 4 +-- .../RTCPeerConnectionFactoryBuilder.mm | 6 ++-- .../api/peerconnection/RTCRtpCapabilities.mm | 4 +-- .../peerconnection/RTCRtpCodecCapability.mm | 4 +-- .../api/peerconnection/RTCRtpTransceiver.mm | 4 +-- sdk/objc/api/peerconnection/RTCVideoTrack.mm | 19 +++++----- sdk/objc/base/RTCAudioRenderer.h | 6 ++-- .../RTCAudioCustomProcessingAdapter+Private.h | 8 ++--- .../audio/RTCAudioCustomProcessingAdapter.h | 2 +- .../audio/RTCAudioCustomProcessingAdapter.mm | 20 +++++------ .../audio/RTCAudioCustomProcessingDelegate.h | 2 +- .../audio/RTCAudioProcessingModule.h | 2 +- .../audio/RTCDefaultAudioProcessingModule.h | 7 ++-- .../audio/RTCDefaultAudioProcessingModule.mm | 12 +++---- .../RTCNativeAudioSessionDelegateAdapter.h | 2 +- .../RTCNativeAudioSessionDelegateAdapter.mm | 2 +- .../capturer/RTCDesktopCapturer+Private.h | 2 +- .../components/capturer/RTCDesktopCapturer.h | 14 ++++---- .../components/capturer/RTCDesktopCapturer.mm | 2 +- .../capturer/RTCDesktopMediaList+Private.h | 2 +- .../capturer/RTCDesktopMediaList.mm | 20 +++++------ .../capturer/RTCDesktopSource+Private.h | 2 +- .../components/capturer/RTCDesktopSource.mm | 2 +- .../network/RTCNetworkMonitor+Private.h | 14 ++++---- .../components/network/RTCNetworkMonitor.h | 4 ++- .../components/network/RTCNetworkMonitor.mm | 6 ++-- .../renderer/metal/RTCMTLI420Renderer.h | 2 +- .../renderer/metal/RTCMTLI420Renderer.mm | 2 +- .../renderer/metal/RTCMTLNSVideoView.h | 4 ++- .../renderer/metal/RTCMTLNV12Renderer.h | 2 +- .../renderer/metal/RTCMTLNV12Renderer.mm | 2 +- .../renderer/metal/RTCMTLRGBRenderer.h | 3 +- .../renderer/metal/RTCMTLRGBRenderer.mm | 30 +++++++++------- .../renderer/metal/RTCMTLRenderer+Private.h | 2 +- .../renderer/metal/RTCMTLRenderer.h | 4 +-- .../renderer/metal/RTCMTLRenderer.mm | 2 +- .../renderer/metal/RTCMTLVideoView.m | 33 ++++++++--------- .../renderer/opengl/RTCDefaultShader.h | 2 +- .../renderer/opengl/RTCDefaultShader.mm | 2 +- .../renderer/opengl/RTCDisplayLinkTimer.h | 4 ++- .../renderer/opengl/RTCDisplayLinkTimer.m | 10 +++--- .../renderer/opengl/RTCEAGLVideoView.m | 36 +++++++++---------- .../renderer/opengl/RTCI420TextureCache.h | 2 +- .../renderer/opengl/RTCI420TextureCache.mm | 2 +- .../renderer/opengl/RTCNSGLVideoView.m | 8 ++--- .../renderer/opengl/RTCNV12TextureCache.h | 2 +- .../renderer/opengl/RTCNV12TextureCache.m | 2 +- .../RTCDefaultVideoEncoderFactory.m | 4 +-- .../RTCVideoEncoderFactorySimulcast.mm | 2 +- sdk/objc/native/api/video_capturer.mm | 2 +- sdk/objc/native/src/audio/audio_device_ios.h | 4 +-- sdk/objc/native/src/audio/audio_device_ios.mm | 2 +- sdk/objc/native/src/objc_audio_device.h | 4 +-- sdk/objc/native/src/objc_audio_device.mm | 2 +- .../native/src/objc_audio_device_delegate.h | 2 +- .../native/src/objc_audio_device_delegate.mm | 2 +- sdk/objc/native/src/objc_network_monitor.h | 2 +- sdk/objc/native/src/objc_network_monitor.mm | 2 +- sdk/objc/native/src/objc_video_track_source.h | 6 ++-- .../native/src/objc_video_track_source.mm | 6 ++-- sdk/objc/unittests/RTCMTLVideoView_xctest.m | 6 ++-- .../RTCPeerConnectionFactoryBuilderTest.mm | 4 +-- 77 files changed, 247 insertions(+), 242 deletions(-) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index a0cf5b55d3..2ad8b84f85 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -385,7 +385,12 @@ if (is_ios || is_mac) { "objc/components/network/RTCNetworkMonitor.mm", ] - configs += [ ":used_from_extension" ] + configs += [ + "..:common_objc", + ":used_from_extension", + ] + + public_configs = [ ":common_config_objc" ] frameworks = [ "Network.framework" ] diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index 9b123d2d05..cac9ab665c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoRendererAdapter () +@interface RTC_OBJC_TYPE(RTCVideoRendererAdapter) () /** * The Objective-C video renderer passed to this adapter during construction. diff --git a/sdk/objc/api/RTCVideoRendererAdapter.h b/sdk/objc/api/RTCVideoRendererAdapter.h index b0b6f04488..bbb8c6e71c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.h +++ b/sdk/objc/api/RTCVideoRendererAdapter.h @@ -10,6 +10,8 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /* @@ -18,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN * adapter adapts calls made to that interface to the RTCVideoRenderer supplied * during construction. */ -@interface RTCVideoRendererAdapter : NSObject +@interface RTC_OBJC_TYPE (RTCVideoRendererAdapter): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index ef02f72f60..d992c64108 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -17,10 +17,9 @@ namespace webrtc { -class VideoRendererAdapter - : public rtc::VideoSinkInterface { +class VideoRendererAdapter : public rtc::VideoSinkInterface { public: - VideoRendererAdapter(RTCVideoRendererAdapter* adapter) { + VideoRendererAdapter(RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter) { adapter_ = adapter; size_ = CGSizeZero; } @@ -28,9 +27,9 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); - CGSize current_size = (videoFrame.rotation % 180 == 0) - ? CGSizeMake(videoFrame.width, videoFrame.height) - : CGSizeMake(videoFrame.height, videoFrame.width); + CGSize current_size = (videoFrame.rotation % 180 == 0) ? + CGSizeMake(videoFrame.width, videoFrame.height) : + CGSizeMake(videoFrame.height, videoFrame.width); if (!CGSizeEqualToSize(size_, current_size)) { size_ = current_size; @@ -40,12 +39,12 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { } private: - __weak RTCVideoRendererAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter_; CGSize size_; }; -} +} // namespace webrtc -@implementation RTCVideoRendererAdapter { +@implementation RTC_OBJC_TYPE (RTCVideoRendererAdapter) { std::unique_ptr _adapter; } @@ -60,7 +59,7 @@ - (instancetype)initWithNativeRenderer:(id)vide return self; } -- (rtc::VideoSinkInterface *)nativeVideoRenderer { +- (rtc::VideoSinkInterface*)nativeVideoRenderer { return _adapter.get(); } diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h index 4eb91b93c7..73c1a4e26a 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioDeviceModule () +@interface RTC_OBJC_TYPE(RTCAudioDeviceModule) () - (instancetype)initWithNativeModule:(rtc::scoped_refptr )module workerThread:(rtc::Thread *)workerThread; diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h index 1a9e339bd6..b02cecfd0b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -39,8 +39,8 @@ RTC_OBJC_EXPORT // Executes low-level API's in sequence to switch the device // Use outputDevice / inputDevice property unless you need to know if setting the device is // successful. -- (BOOL)trySetOutputDevice:(nullable RTCIODevice *)device; -- (BOOL)trySetInputDevice:(nullable RTCIODevice *)device; +- (BOOL)trySetOutputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; +- (BOOL)trySetInputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm index 5c116fae53..c88de392d7 100644 --- a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -77,7 +77,7 @@ - (instancetype)initWithNativeModule:(rtc::scoped_refptrBlockingCall([self] { NSArray *devices = [self _outputDevices]; @@ -92,11 +92,11 @@ - (RTCIODevice *)outputDevice { }); } -- (void)setOutputDevice: (RTCIODevice *)device { +- (void)setOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { [self trySetOutputDevice: device]; } -- (BOOL)trySetOutputDevice: (RTCIODevice *)device { +- (BOOL)trySetOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { return _workerThread->BlockingCall([self, device] { @@ -108,7 +108,7 @@ - (BOOL)trySetOutputDevice: (RTCIODevice *)device { } if (device != nil) { - index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { return (*stop = [e.deviceId isEqualToString:device.deviceId]); }]; if (index == NSNotFound) { @@ -129,7 +129,7 @@ - (BOOL)trySetOutputDevice: (RTCIODevice *)device { }); } -- (RTCIODevice *)inputDevice { +- (RTC_OBJC_TYPE(RTCIODevice) *)inputDevice { return _workerThread->BlockingCall([self] { @@ -145,11 +145,11 @@ - (RTCIODevice *)inputDevice { }); } -- (void)setInputDevice: (RTCIODevice *)device { +- (void)setInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { [self trySetInputDevice: device]; } -- (BOOL)trySetInputDevice: (RTCIODevice *)device { +- (BOOL)trySetInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { return _workerThread->BlockingCall([self, device] { @@ -161,7 +161,7 @@ - (BOOL)trySetInputDevice: (RTCIODevice *)device { } if (device != nil) { - index = [devices indexOfObjectPassingTest:^BOOL(RTCIODevice *e, NSUInteger i, BOOL *stop) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { return (*stop = [e.deviceId isEqualToString:device.deviceId]); }]; if (index == NSNotFound) { @@ -261,7 +261,7 @@ - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler _native->PlayoutDeviceName(i, name, guid); NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; - RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; [result addObject: device]; } } @@ -283,7 +283,7 @@ - (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler _native->RecordingDeviceName(i, name, guid); NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; - RTCIODevice *device = [[RTCIODevice alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; [result addObject: device]; } } diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 87deaf8aa8..40a6f5362c 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -28,12 +28,12 @@ class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { private: os_unfair_lock *lock_; - __weak RTCAudioTrack *audio_track_; + __weak RTC_OBJC_TYPE(RTCAudioTrack) *audio_track_; int64_t total_frames_ = 0; bool attached_ = false; public: - AudioSinkConverter(RTCAudioTrack *audioTrack, os_unfair_lock *lock) { + AudioSinkConverter(RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack, os_unfair_lock *lock) { RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; audio_track_ = audioTrack; lock_ = lock; @@ -274,7 +274,7 @@ - (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { NSArray *renderers = [_renderers allObjects]; os_unfair_lock_unlock(&_lock); - for (id renderer in renderers) { + for (id renderer in renderers) { [renderer renderSampleBuffer:sampleBuffer]; } } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index 7f8ae739e0..3e9c768a6f 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -34,16 +34,16 @@ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} NSData *data_; }; -} +} // namespace // A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated // objects. -@interface RTCWrappedEncodedImageBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer): NSObject @property(nonatomic) rtc::scoped_refptr buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer; @end -@implementation RTCWrappedEncodedImageBuffer +@implementation RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer) @synthesize buffer = _buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer { @@ -59,7 +59,7 @@ @implementation RTC_OBJC_TYPE (RTCEncodedImage) (Private) - (rtc::scoped_refptr)encodedData { - RTCWrappedEncodedImageBuffer *wrappedBuffer = + RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; } @@ -68,7 +68,7 @@ - (void)setEncodedData:(rtc::scoped_refptr) return objc_setAssociatedObject( self, @selector(encodedData), - [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer], + [[RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) alloc] initWithEncodedImageBuffer:buffer], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h index 0eb09b83a4..e736c993e1 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice+Private.h +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCIODevice () +@interface RTC_OBJC_TYPE(RTCIODevice) () - (instancetype)initWithType:(RTCIODeviceType)type deviceId:(NSString *)deviceId diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm index 27e1255e8e..b3738f71fe 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice.mm +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -19,7 +19,7 @@ NSString *const kDefaultDeviceId = @"default"; -@implementation RTCIODevice +@implementation RTC_OBJC_TYPE(RTCIODevice) @synthesize type = _type; @synthesize deviceId = _deviceId; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 439490d370..5c82750d20 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -66,7 +66,7 @@ RTC_OBJC_EXPORT audioProcessingModule: (nullable id)audioProcessingModule; -@property(nonatomic, readonly) RTCAudioDeviceModule *audioDeviceModule; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioDeviceModule) *audioDeviceModule; - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 5cb4f38c05..7b0557ba25 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -65,7 +65,7 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _workerThread; std::unique_ptr _signalingThread; rtc::scoped_refptr _nativeAudioDeviceModule; - RTCDefaultAudioProcessingModule *_defaultAudioProcessingModule; + RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *_defaultAudioProcessingModule; BOOL _hasStartedAecDump; } @@ -133,16 +133,16 @@ - (instancetype)init { - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { - webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; } - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { - webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; } - (instancetype) @@ -164,10 +164,10 @@ - (instancetype)init { } rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; - if ([audioProcessingModule isKindOfClass:[RTCDefaultAudioProcessingModule class]]) { - _defaultAudioProcessingModule = (RTCDefaultAudioProcessingModule *)audioProcessingModule; + if ([audioProcessingModule isKindOfClass:[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) class]]) { + _defaultAudioProcessingModule = (RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *)audioProcessingModule; } else { - _defaultAudioProcessingModule = [[RTCDefaultAudioProcessingModule alloc] init]; + _defaultAudioProcessingModule = [[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) alloc] init]; } NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); @@ -273,8 +273,9 @@ - (instancetype)initWithNativeAudioEncoderFactory: bypassVoiceProcessing == YES); }); - _audioDeviceModule = [[RTCAudioDeviceModule alloc] initWithNativeModule: _nativeAudioDeviceModule - workerThread: _workerThread.get()]; + _audioDeviceModule = + [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule:_nativeAudioDeviceModule + workerThread:_workerThread.get()]; media_deps.adm = _nativeAudioDeviceModule; media_deps.task_queue_factory = dependencies.task_queue_factory.get(); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h index 070a0e74a5..4d7025bf93 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h @@ -12,9 +12,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder (DefaultComponents) +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 522e520e12..a2f633e1a4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -22,10 +22,10 @@ #import "sdk/objc/native/api/audio_device_module.h" #endif -@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents) +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder { - RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder { + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory(); [builder setAudioEncoderFactory:audioEncoderFactory]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index f0b0de156a..a46839b6b3 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -25,9 +25,9 @@ class AudioProcessing; NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder : NSObject +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) : NSObject -+ (RTCPeerConnectionFactoryBuilder *)builder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder; - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 0981fb3879..4cb12b0a59 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -18,7 +18,7 @@ #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" -@implementation RTCPeerConnectionFactoryBuilder { +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) { std::unique_ptr _videoEncoderFactory; std::unique_ptr _videoDecoderFactory; rtc::scoped_refptr _audioEncoderFactory; @@ -27,8 +27,8 @@ @implementation RTCPeerConnectionFactoryBuilder { rtc::scoped_refptr _audioProcessingModule; } -+ (RTCPeerConnectionFactoryBuilder *)builder { - return [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder { + return [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; } - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm index 32664e7c9f..5d5abc1511 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm @@ -39,8 +39,8 @@ - (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nati NSMutableArray *result = [NSMutableArray array]; for (auto &element : _nativeCapabilities.codecs) { - RTCRtpCodecCapability *object = - [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability:element]; + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = + [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability:element]; [result addObject:object]; } diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm index f310bf6829..35d21054b0 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -54,11 +54,11 @@ - (void)setName:(NSString *)name { } - (RTCRtpMediaType)kind { - return [RTCRtpReceiver mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; + return [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; } - (void)setKind:(RTCRtpMediaType)kind { - _nativeCodecCapability.kind = [RTCRtpReceiver nativeMediaTypeForMediaType:kind]; + _nativeCodecCapability.kind = [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:kind]; } - (NSNumber *)clockRate { diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index acb1b8032a..b7cc37c2f8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -71,7 +71,7 @@ - (void)setCodecPreferences:(NSArray *)c std::vector objects; - for (RTCRtpCodecCapability *object in codecPreferences) { + for (RTC_OBJC_TYPE(RTCRtpCodecCapability) *object in codecPreferences) { objects.push_back(object.nativeCodecCapability); } @@ -90,7 +90,7 @@ - (void)setCodecPreferences:(NSArray *)c std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); for (auto & element : capabilities) { - RTCRtpCodecCapability *object = [[RTCRtpCodecCapability alloc] initWithNativeCodecCapability: element]; + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability: element]; [result addObject: object]; } diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index df294d2f3e..546ec80a61 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -53,7 +53,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } - (void)dealloc { - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer); } } @@ -85,18 +85,17 @@ - (void)addRenderer:(id)renderer { } // Make sure we don't have this renderer yet. - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { if (adapter.videoRenderer == renderer) { RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; return; } } // Create a wrapper that provides a native pointer for us. - RTCVideoRendererAdapter* adapter = - [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCVideoRendererAdapter) alloc] initWithNativeRenderer:renderer]; [_adapters addObject:adapter]; - self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, - rtc::VideoSinkWants()); + self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, rtc::VideoSinkWants()); } - (void)removeRenderer:(id)renderer { @@ -105,9 +104,8 @@ - (void)removeRenderer:(id)renderer { return; } __block NSUInteger indexToRemove = NSNotFound; - [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, - NSUInteger idx, - BOOL *stop) { + [_adapters enumerateObjectsUsingBlock:^( + RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter, NSUInteger idx, BOOL * stop) { if (adapter.videoRenderer == renderer) { indexToRemove = idx; *stop = YES; @@ -117,8 +115,7 @@ - (void)removeRenderer:(id)renderer { RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; return; } - RTCVideoRendererAdapter *adapterToRemove = - [_adapters objectAtIndex:indexToRemove]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer); [_adapters removeObjectAtIndex:indexToRemove]; } diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h index def20eac3c..3669831fca 100644 --- a/sdk/objc/base/RTCAudioRenderer.h +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -23,11 +23,9 @@ NS_ASSUME_NONNULL_BEGIN -RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE -(RTCAudioRenderer) +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) - - (void)renderSampleBuffer : (CMSampleBufferRef)sampleBuffer - NS_SWIFT_NAME(render(sampleBuffer:)); +- (void)renderSampleBuffer: (CMSampleBufferRef)sampleBuffer NS_SWIFT_NAME(render(sampleBuffer:)); @end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h index a9dc3d8400..9995b58abb 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -22,21 +22,21 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioCustomProcessingAdapter () +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) () // Thread safe set/get with os_unfair_lock. -@property(nonatomic, weak, nullable) id +@property(nonatomic, weak, nullable) id audioCustomProcessingDelegate; // Direct read access without lock. -@property(nonatomic, readonly, weak, nullable) id +@property(nonatomic, readonly, weak, nullable) id rawAudioCustomProcessingDelegate; @property(nonatomic, readonly) std::unique_ptr nativeAudioCustomProcessingModule; - (instancetype)initWithDelegate: - (nullable id)audioCustomProcessingDelegate; + (nullable id)audioCustomProcessingDelegate; @end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h index 24239eac2d..3230c19323 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCAudioCustomProcessingAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm index c8d1dfe4f6..c0f297c786 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -31,7 +31,7 @@ int sample_rate_hz_; int num_channels_; - AudioCustomProcessingAdapter(RTCAudioCustomProcessingAdapter *adapter, os_unfair_lock *lock) { + AudioCustomProcessingAdapter(RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter, os_unfair_lock *lock) { RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; adapter_ = adapter; @@ -45,14 +45,14 @@ RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; os_unfair_lock_lock(lock_); - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; [delegate audioProcessingRelease]; os_unfair_lock_unlock(lock_); } void Initialize(int sample_rate_hz, int num_channels) override { os_unfair_lock_lock(lock_); - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; is_initialized_ = true; sample_rate_hz_ = sample_rate_hz; @@ -68,9 +68,9 @@ void Process(AudioBuffer *audio_buffer) override { return; } - id delegate = adapter_.rawAudioCustomProcessingDelegate; + id delegate = adapter_.rawAudioCustomProcessingDelegate; if (delegate != nil) { - RTCAudioBuffer *audioBuffer = [[RTCAudioBuffer alloc] initWithNativeType:audio_buffer]; + RTC_OBJC_TYPE(RTCAudioBuffer) *audioBuffer = [[RTC_OBJC_TYPE(RTCAudioBuffer) alloc] initWithNativeType:audio_buffer]; [delegate audioProcessingProcess:audioBuffer]; } os_unfair_lock_unlock(lock_); @@ -79,12 +79,12 @@ void Process(AudioBuffer *audio_buffer) override { std::string ToString() const override { return "AudioCustomProcessingAdapter"; } private: - __weak RTCAudioCustomProcessingAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter_; os_unfair_lock *lock_; }; } // namespace webrtc -@implementation RTCAudioCustomProcessingAdapter { +@implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { webrtc::AudioCustomProcessingAdapter *_adapter; os_unfair_lock _lock; } @@ -109,14 +109,14 @@ - (void)dealloc { #pragma mark - Getter & Setter for audioCustomProcessingDelegate -- (nullable id)audioCustomProcessingDelegate { +- (nullable id)audioCustomProcessingDelegate { os_unfair_lock_lock(&_lock); - id delegate = _rawAudioCustomProcessingDelegate; + id delegate = _rawAudioCustomProcessingDelegate; os_unfair_lock_unlock(&_lock); return delegate; } -- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { os_unfair_lock_lock(&_lock); if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { [_rawAudioCustomProcessingDelegate audioProcessingRelease]; diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h index 6a2fec9433..a8e4981fbc 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -36,7 +36,7 @@ RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) -- (void)applyConfig: (RTCAudioProcessingConfig *)config; +- (void)applyConfig: (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; // TODO: Implement... diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h index 917d584d48..2047b3f797 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -22,18 +22,17 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCAudioProcessingConfig); -@protocol RTC_OBJC_TYPE -(RTCAudioCustomProcessingDelegate); +@protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject -- (instancetype)initWithConfig: (nullable RTCAudioProcessingConfig *)config +- (instancetype)initWithConfig: (nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; -- (void)applyConfig:(RTCAudioProcessingConfig *)config; +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; // Dynamically update delegates at runtime diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm index 3875dec533..2f592cefa4 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -23,8 +23,8 @@ @implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { rtc::scoped_refptr _nativeAudioProcessingModule; // Custom processing adapters... - RTCAudioCustomProcessingAdapter *_capturePostProcessingAdapter; - RTCAudioCustomProcessingAdapter *_renderPreProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_capturePostProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_renderPreProcessingAdapter; } - (instancetype)init { @@ -33,7 +33,7 @@ - (instancetype)init { renderPreProcessingDelegate:nil]; } -- (instancetype)initWithConfig:(nullable RTCAudioProcessingConfig *)config +- (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate renderPreProcessingDelegate:(nullable id) @@ -48,12 +48,12 @@ - (instancetype)initWithConfig:(nullable RTCAudioProcessingConfig *)config } _capturePostProcessingAdapter = - [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:capturePostProcessingDelegate]; + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:capturePostProcessingDelegate]; builder.SetCapturePostProcessing( _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); _renderPreProcessingAdapter = - [[RTCAudioCustomProcessingAdapter alloc] initWithDelegate:renderPreProcessingDelegate]; + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:renderPreProcessingDelegate]; builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); _nativeAudioProcessingModule = builder.Create(); @@ -83,7 +83,7 @@ - (void)setRenderPreProcessingDelegate: #pragma mark - RTCAudioProcessingModule protocol -- (void)applyConfig:(RTCAudioProcessingConfig *)config { +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); } diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 6a75f01479..1799c11415 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a4..f652ad1e5f 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -14,7 +14,7 @@ #import "base/RTCLogging.h" -@implementation RTCNativeAudioSessionDelegateAdapter { +@implementation RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) { webrtc::AudioSessionObserver *_observer; } diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h index 7e293a1a56..30aed69d29 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -30,7 +30,7 @@ RTC_OBJC_EXPORT -(void)didSourceCaptureError; @end -@interface RTCDesktopCapturer () +@interface RTC_OBJC_TYPE(RTCDesktopCapturer) () @property(nonatomic, readonly)std::shared_ptr nativeCapturer; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h index 160c00d208..b63912acf0 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -23,18 +23,18 @@ NS_ASSUME_NONNULL_BEGIN -@class RTCDesktopCapturer; +@class RTC_OBJC_TYPE(RTCDesktopCapturer); RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCDesktopCapturerDelegate) --(void)didSourceCaptureStart:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCapturePaused:(RTCDesktopCapturer *) capturer; +-(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCaptureStop:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureStop:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; --(void)didSourceCaptureError:(RTCDesktopCapturer *) capturer; +-(void)didSourceCaptureError:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; @end RTC_OBJC_EXPORT @@ -42,9 +42,9 @@ RTC_OBJC_EXPORT // RTCVideoCapturerDelegate (usually RTCVideoSource). @interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) -@property(nonatomic, readonly) RTCDesktopSource *source; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSource) *source; -- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; - (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm index a7d5c60eb2..a1948684d3 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.mm +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -32,7 +32,7 @@ @implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { @synthesize nativeCapturer = _nativeCapturer; @synthesize source = _source; -- (instancetype)initWithSource:(RTCDesktopSource*)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { if (self = [super initWithDelegate:captureDelegate]) { webrtc::DesktopType captureType = webrtc::kScreen; if(source.sourceType == RTCDesktopSourceTypeWindow) { diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h index fc3b080ad2..eb1e76ddbb 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -23,7 +23,7 @@ namespace webrtc { NS_ASSUME_NONNULL_BEGIN -@interface RTCDesktopMediaList () +@interface RTC_OBJC_TYPE(RTCDesktopMediaList) () @property(nonatomic, readonly)std::shared_ptr nativeMediaList; diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm index 7aa7dca1be..2bd6c1da0e 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList.mm +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -19,9 +19,9 @@ #import "RTCDesktopSource+Private.h" #import "RTCDesktopMediaList+Private.h" -@implementation RTCDesktopMediaList { +@implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { RTCDesktopSourceType _sourceType; - NSMutableArray* _sources; + NSMutableArray* _sources; __weak id _delegate; } @@ -45,24 +45,24 @@ - (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateT return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); } --(NSArray*) getSources { +-(NSArray*) getSources { _sources = [NSMutableArray array]; int sourceCount = _nativeMediaList->GetSourceCount(); for (int i = 0; i < sourceCount; i++) { webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); - [_sources addObject:[[RTCDesktopSource alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + [_sources addObject:[[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; } return _sources; } -(void)mediaSourceAdded:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [[RTCDesktopSource alloc] initWithNativeSource:source sourceType:_sourceType]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:source sourceType:_sourceType]; [_sources addObject:desktopSource]; [_delegate didDesktopSourceAdded:desktopSource]; } -(void)mediaSourceRemoved:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [_sources removeObject:desktopSource]; [_delegate didDesktopSourceRemoved:desktopSource]; @@ -70,7 +70,7 @@ -(void)mediaSourceRemoved:(webrtc::MediaSource *) source { } -(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [desktopSource setName:source->name().c_str()]; [_delegate didDesktopSourceNameChanged:desktopSource]; @@ -78,16 +78,16 @@ -(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { } -(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { - RTCDesktopSource *desktopSource = [self getSourceById:source]; + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; if(desktopSource != nil) { [desktopSource setThumbnail:source->thumbnail()]; [_delegate didDesktopSourceThumbnailChanged:desktopSource]; } } --(RTCDesktopSource *)getSourceById:(webrtc::MediaSource *) source { +-(RTC_OBJC_TYPE(RTCDesktopSource) *)getSourceById:(webrtc::MediaSource *) source { NSEnumerator *enumerator = [_sources objectEnumerator]; - RTCDesktopSource *object; + RTC_OBJC_TYPE(RTCDesktopSource) *object; while ((object = enumerator.nextObject) != nil) { if(object.nativeMediaSource == source) { return object; diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h index f5a0d14e0f..3f4c4ef25f 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCDesktopSource () +@interface RTC_OBJC_TYPE(RTCDesktopSource) () - (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource sourceType:(RTCDesktopSourceType) sourceType; diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm index 3f5f23894b..e1bdc6893a 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource.mm +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -19,7 +19,7 @@ #import "RTCDesktopSource.h" #import "RTCDesktopSource+Private.h" -@implementation RTCDesktopSource { +@implementation RTC_OBJC_TYPE(RTCDesktopSource) { NSString *_sourceId; NSString *_name; NSImage *_thumbnail; diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h index b5c786be18..f3761f7ba3 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor+Private.h +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -9,16 +9,18 @@ */ #import "RTCNetworkMonitor.h" +#import "RTCMacros.h" #include "sdk/objc/native/src/network_monitor_observer.h" -@interface RTCNetworkMonitor () +@interface RTC_OBJC_TYPE (RTCNetworkMonitor) +() -/** `observer` is a raw pointer and should be kept alive - * for this object's lifetime. - */ -- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer - NS_DESIGNATED_INITIALIZER; + /** `observer` is a raw pointer and should be kept alive + * for this object's lifetime. + */ + - (instancetype)initWithObserver + : (webrtc::NetworkMonitorObserver *)observer NS_DESIGNATED_INITIALIZER; /** Stops the receiver from posting updates to `observer`. */ - (void)stop; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.h b/sdk/objc/components/network/RTCNetworkMonitor.h index 21d22f5463..4b0cb4baf0 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -10,12 +10,14 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /** Listens for NWPathMonitor updates and forwards the results to a C++ * observer. */ -@interface RTCNetworkMonitor : NSObject +@interface RTC_OBJC_TYPE (RTCNetworkMonitor): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 7e75b2b4c0..2e42ab5290 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -46,7 +46,7 @@ } // namespace -@implementation RTCNetworkMonitor { +@implementation RTC_OBJC_TYPE (RTCNetworkMonitor) { webrtc::NetworkMonitorObserver *_observer; nw_path_monitor_t _pathMonitor; dispatch_queue_t _monitorQueue; @@ -63,12 +63,12 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { return nil; } RTCLog(@"NW path monitor created."); - __weak RTCNetworkMonitor *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { if (weakSelf == nil) { return; } - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; RTCLog(@"NW path monitor: updated."); nw_path_status_t status = nw_path_get_status(path); if (status == nw_path_status_invalid) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h index e5987fe22a..c4e2724042 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -13,5 +13,5 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLI420Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLI420Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index f4c76fa313..963f36c62a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -70,7 +70,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLI420Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLI420Renderer) { // Textures. id _yTexture; id _uTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index d30b83037f..330533bef0 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -8,5 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import "RTCMacros.h" + // Deprecated: Use RTCMTLVideoView instead -@compatibility_alias RTCMTLNSVideoView RTCMTLVideoView; +@compatibility_alias RTC_OBJC_TYPE(RTCMTLNSVideoView) RTC_OBJC_TYPE(RTCMTLVideoView); diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h index 866b7ea17e..125612a269 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -13,6 +13,6 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLNV12Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLNV12Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 7b037c6dbc..c4000b1b1d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -60,7 +60,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLNV12Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLNV12Renderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _yTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h index 9db422cd22..5e355a8504 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -11,12 +11,13 @@ #import #import "RTCMTLRenderer.h" +#import "RTCMacros.h" /** @abstract RGB/BGR renderer. * @discussion This renderer handles both kCVPixelFormatType_32BGRA and * kCVPixelFormatType_32ARGB. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRGBRenderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE (RTCMTLRGBRenderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index e5dc4ef80a..6ca4a4000d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -30,12 +30,12 @@ } Vertex; typedef struct { - float4 position[[position]]; + float4 position [[position]]; float2 texcoord; } VertexIO; - vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], - uint vid[[vertex_id]]) { + vertex VertexIO vertexPassthrough(constant Vertex * verticies [[buffer(0)]], + uint vid [[vertex_id]]) { VertexIO out; constant Vertex &v = verticies[vid]; out.position = float4(float2(v.position), 0.0, 1.0); @@ -43,9 +43,9 @@ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], return out; } - fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], - texture2d texture[[texture(0)]], - constant bool &isARGB[[buffer(0)]]) { + fragment half4 fragmentColorConversion(VertexIO in [[stage_in]], + texture2d texture [[texture(0)]], + constant bool &isARGB [[buffer(0)]]) { constexpr sampler s(address::clamp_to_edge, filter::linear); half4 out = texture.sample(s, in.texcoord); @@ -56,7 +56,7 @@ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], return out; }); -@implementation RTCMTLRGBRenderer { +@implementation RTC_OBJC_TYPE (RTCMTLRGBRenderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _texture; @@ -73,8 +73,8 @@ - (BOOL)addRenderingDestination:(__kindof MTKView *)view { } - (BOOL)initializeTextureCache { - CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], - nil, &_textureCache); + CVReturn status = CVMetalTextureCacheCreate( + kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache); if (status != kCVReturnSuccess) { RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); return NO; @@ -130,9 +130,15 @@ - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { return NO; } - CVReturn result = CVMetalTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat, - width, height, 0, &textureOut); + CVReturn result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _textureCache, + pixelBuffer, + nil, + mtlPixelFormat, + width, + height, + 0, + &textureOut); if (result == kCVReturnSuccess) { gpuTexture = CVMetalTextureGetTexture(textureOut); } diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index 916d4d4430..f6a82db56a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMTLRenderer (Private) +@interface RTC_OBJC_TYPE(RTCMTLRenderer) (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index aa31545973..6bbca3d985 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN /** * Protocol defining ability to render RTCVideoFrame in Metal enabled views. */ -@protocol RTCMTLRenderer +@protocol RTC_OBJC_TYPE(RTCMTLRenderer) /** * Method to be implemented to perform actual rendering of the provided frame. @@ -49,7 +49,7 @@ NS_ASSUME_NONNULL_BEGIN * Implementation of RTCMTLRenderer protocol. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRenderer : NSObject +@interface RTC_OBJC_TYPE(RTCMTLRenderer) : NSObject /** @abstract A wrapped RTCVideoRotation, or nil. @discussion When not nil, the rotation of the actual frame is ignored when rendering. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 410590a7b1..ca3fcc3e51 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -87,7 +87,7 @@ static inline void getCubeVertexData(int cropX, // In future we might use triple buffering method if it improves performance. static const NSInteger kMaxInflightBuffers = 1; -@implementation RTCMTLRenderer { +@implementation RTC_OBJC_TYPE(RTCMTLRenderer) { __kindof MTKView *_view; // Controller. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index 8b2ec1aaa3..d4d98a0bf4 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -22,17 +22,12 @@ #import "RTCMTLNV12Renderer.h" #import "RTCMTLRGBRenderer.h" -// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime. -// Linking errors occur when compiling for architectures that don't support Metal. -#define MTKViewClass NSClassFromString(@"MTKView") -#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") -#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") -#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") - -@interface RTC_OBJC_TYPE (RTCMTLVideoView) -() @property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +#import "RTCMTLRenderer+Private.h" + +@interface RTC_OBJC_TYPE (RTCMTLVideoView) () +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLI420Renderer) *rendererI420; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLNV12Renderer) * rendererNV12; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLRGBRenderer) * rendererRGB; @property(nonatomic) MTKView *metalView; @property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @@ -96,19 +91,19 @@ - (void)setVideoContentMode:(UIViewContentMode)mode { #pragma mark - Private + (MTKView *)createMetalView:(CGRect)frame { - return [[MTKViewClass alloc] initWithFrame:frame]; + return [[MTKView alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLNV12Renderer) *)createNV12Renderer { + return [[RTC_OBJC_TYPE(RTCMTLNV12Renderer) alloc] init]; } -+ (RTCMTLI420Renderer *)createI420Renderer { - return [[RTCMTLI420RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLI420Renderer) *)createI420Renderer { + return [[RTC_OBJC_TYPE(RTCMTLI420Renderer) alloc] init]; } -+ (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLRGBRenderer) *)createRGBRenderer { + return [[RTC_OBJC_TYPE(RTCMTLRGBRenderer) alloc] init]; } - (void)configure { @@ -159,7 +154,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { return; } - RTCMTLRenderer *renderer; + RTC_OBJC_TYPE(RTCMTLRenderer) * renderer; if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 71a073ab21..b00cf8047d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTC_OBJC_TYPE(RTCDefaultShader) : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index 51dca3223d..b0c111293f 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -69,7 +69,7 @@ " 1.0);\n" " }\n"; -@implementation RTCDefaultShader { +@implementation RTC_OBJC_TYPE(RTCDefaultShader) { GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation changes. diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h index b78501e9e6..1c5b64fdfc 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -10,11 +10,13 @@ #import +#import "RTCMacros.h" + // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid // a retain cycle since CADisplayLink takes a strong reference onto its target. // The timer is paused by default. -@interface RTCDisplayLinkTimer : NSObject +@interface RTC_OBJC_TYPE (RTCDisplayLinkTimer): NSObject @property(nonatomic) BOOL isPaused; diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m index 906bb898d6..f4cf03304d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -12,7 +12,7 @@ #import -@implementation RTCDisplayLinkTimer { +@implementation RTC_OBJC_TYPE (RTCDisplayLinkTimer) { CADisplayLink *_displayLink; void (^_timerHandler)(void); } @@ -21,17 +21,15 @@ - (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { NSParameterAssert(timerHandler); if (self = [super init]) { _timerHandler = timerHandler; - _displayLink = - [CADisplayLink displayLinkWithTarget:self - selector:@selector(displayLinkDidFire:)]; + _displayLink = [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; _displayLink.paused = YES; #if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 _displayLink.preferredFramesPerSecond = 30; #else [_displayLink setFrameInterval:2]; #endif - [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] - forMode:NSRunLoopCommonModes]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; } return self; } diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 89e62d2ce7..0a00494d2d 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -42,14 +42,14 @@ @interface RTC_OBJC_TYPE (RTCEAGLVideoView) @end @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { - RTCDisplayLinkTimer *_timer; + RTC_OBJC_TYPE(RTCDisplayLinkTimer) * _timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; id _shader; - RTCNV12TextureCache *_nv12TextureCache; - RTCI420TextureCache *_i420TextureCache; + RTC_OBJC_TYPE(RTCNV12TextureCache) *_nv12TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *_i420TextureCache; // As timestamps should be unique between frames, will store last // drawn frame timestamp instead of the whole frame to reduce memory usage. int64_t _lastDrawnFrameTimeStampNs; @@ -61,11 +61,11 @@ @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { @synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frame { - return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { - return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; + return [self initWithCoder:aDecoder shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { @@ -90,8 +90,7 @@ - (instancetype)initWithCoder:(NSCoder *)aDecoder } - (BOOL)configure { - EAGLContext *glContext = - [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; if (!glContext) { glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; } @@ -102,8 +101,7 @@ - (BOOL)configure { _glContext = glContext; // GLKView manages a framebuffer for us. - _glkView = [[GLKView alloc] initWithFrame:CGRectZero - context:_glContext]; + _glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext]; _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone; _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone; @@ -115,8 +113,7 @@ - (BOOL)configure { // Listen to application state in order to clean up OpenGL before app goes // away. - NSNotificationCenter *notificationCenter = - [NSNotificationCenter defaultCenter]; + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; [notificationCenter addObserver:self selector:@selector(willResignActive) name:UIApplicationWillResignActiveNotification @@ -130,7 +127,7 @@ - (BOOL)configure { // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; - _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + _timer = [[RTC_OBJC_TYPE(RTCDisplayLinkTimer) alloc] initWithTimerHandler:^{ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf displayLinkTimerDidFire]; }]; @@ -141,14 +138,13 @@ - (BOOL)configure { } - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - _glkView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + _glkView.multipleTouchEnabled = multipleTouchEnabled; } - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - UIApplicationState appState = - [UIApplication sharedApplication].applicationState; + UIApplicationState appState = [UIApplication sharedApplication].applicationState; if (appState == UIApplicationStateActive) { [self teardownGL]; } @@ -189,14 +185,14 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { return; } RTCVideoRotation rotation = frame.rotation; - if(_rotationOverride != nil) { - [_rotationOverride getValue: &rotation]; + if (_rotationOverride != nil) { + [_rotationOverride getValue:&rotation]; } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { - _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + _nv12TextureCache = [[RTC_OBJC_TYPE(RTCNV12TextureCache) alloc] initWithContext:_glContext]; } if (_nv12TextureCache) { [_nv12TextureCache uploadFrameToTextures:frame]; @@ -211,7 +207,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } else { if (!_i420TextureCache) { - _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext]; + _i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:_glContext]; } [_i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 9fdcc5a695..2c2319d043 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -11,7 +11,7 @@ #import "RTCOpenGLDefines.h" #import "base/RTCVideoFrame.h" -@interface RTCI420TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCI420TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 5dccd4bf6a..0ed19a842c 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -28,7 +28,7 @@ static const GLsizei kNumTexturesPerSet = 3; static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; -@implementation RTCI420TextureCache { +@implementation RTC_OBJC_TYPE(RTCI420TextureCache) { BOOL _hasUnpackRowLength; GLint _currentTextureSet; // Handles for OpenGL constructs. diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m index 168c73126f..97957faf24 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -29,7 +29,7 @@ @interface RTC_OBJC_TYPE (RTCNSGLVideoView) // from the display link callback so atomicity is required. @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@property(atomic, strong) RTCI420TextureCache *i420TextureCache; +@property(atomic, strong) RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache; - (void)drawFrame; @end @@ -57,7 +57,7 @@ @implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { @synthesize i420TextureCache = _i420TextureCache; - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { - return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame pixelFormat:format shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(NSRect)frame @@ -140,9 +140,9 @@ - (void)drawFrame { // TODO(magjed): Add support for NV12 texture cache on OS X. frame = [frame newI420VideoFrame]; if (!self.i420TextureCache) { - self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; + self.i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:context]; } - RTCI420TextureCache *i420TextureCache = self.i420TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache = self.i420TextureCache; if (i420TextureCache) { [i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index f202b836b5..420490b1ab 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCNV12TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCNV12TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uvTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index a520ac45b4..096767be55 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -14,7 +14,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -@implementation RTCNV12TextureCache { +@implementation RTC_OBJC_TYPE(RTCNV12TextureCache) { CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureRef _yTextureRef; CVOpenGLESTextureRef _uvTextureRef; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 61da5f4514..3cc92382e6 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -55,12 +55,12 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTCVideoEncoderVP9 scalabilityModes]]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTCVideoEncoderAV1 scalabilityModes]]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; [result addObject:av1Info]; #endif diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm index e2ca5867c0..55916435a1 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -28,7 +28,7 @@ - (instancetype)initWithPrimary:(id)prima } - (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - return [RTCVideoEncoderSimulcast simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; + return [RTC_OBJC_TYPE(RTCVideoEncoderSimulcast) simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; } - (NSArray *)supportedCodecs { diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index a7260ab802..d5cf6fd563 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -20,7 +20,7 @@ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { - RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter = [[RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) alloc] init]; rtc::scoped_refptr objc_video_track_source = rtc::make_ref_counted(adapter); rtc::scoped_refptr video_source = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index 605cf402d4..69f04164e7 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -25,7 +25,7 @@ #include "sdk/objc/base/RTCMacros.h" #include "voice_processing_audio_unit.h" -RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)); namespace webrtc { @@ -279,7 +279,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, bool is_interrupted_; // Audio interruption observer instance. - RTCNativeAudioSessionDelegateAdapter* audio_session_observer_ + RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)* audio_session_observer_ RTC_GUARDED_BY(thread_); // Set to true if we've activated the audio session. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 1689aa39e1..3211ce69d5 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -110,7 +110,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = [[RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) alloc] initWithObserver:this]; } AudioDeviceIOS::~AudioDeviceIOS() { diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index fcfe7a6e8b..88f6f19f99 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/include/audio_device.h" #include "rtc_base/thread.h" -@class ObjCAudioDeviceDelegate; +@class RTC_OBJC_TYPE(ObjCAudioDeviceDelegate); namespace webrtc { @@ -267,7 +267,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { rtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - ObjCAudioDeviceDelegate* audio_device_delegate_; + RTC_OBJC_TYPE(ObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index d629fae20f..5fb72d8a5c 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -77,7 +77,7 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] + audio_device_delegate_ = [[RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) alloc] initWithAudioDeviceModule:rtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 3af079dad9..0b546f269c 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,7 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate : NSObject +@interface RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) : NSObject - (instancetype)initWithAudioDeviceModule: (rtc::scoped_refptr)audioDeviceModule diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index 156d6326a4..f4c8cfb71a 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -55,7 +55,7 @@ } // namespace -@implementation ObjCAudioDeviceDelegate { +@implementation RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) { rtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h index 709e9dfbe5..c5440d587b 100644 --- a/sdk/objc/native/src/objc_network_monitor.h +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -59,7 +59,7 @@ class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface, std::map adapter_type_by_name_ RTC_GUARDED_BY(thread_); rtc::scoped_refptr safety_flag_; - RTCNetworkMonitor* network_monitor_ = nil; + RTC_OBJC_TYPE(RTCNetworkMonitor) * network_monitor_ = nil; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index 535548c64c..e0785e6d0b 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -39,7 +39,7 @@ thread_ = rtc::Thread::Current(); RTC_DCHECK_RUN_ON(thread_); safety_flag_->SetAlive(); - network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + network_monitor_ = [[RTC_OBJC_TYPE(RTCNetworkMonitor) alloc] initWithObserver:this]; if (network_monitor_ == nil) { RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; } diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 19a3d6db43..5fe39baade 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,7 +19,7 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) : NSObject @end namespace webrtc { @@ -28,7 +28,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { public: ObjCVideoTrackSource(); explicit ObjCVideoTrackSource(bool is_screencast); - explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter); + explicit ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter); bool is_screencast() const override; @@ -50,7 +50,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { rtc::VideoBroadcaster broadcaster_; rtc::TimestampAligner timestamp_aligner_; - RTCObjCVideoSourceAdapter* adapter_; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter_; bool is_screencast_; }; diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 7937e90505..401db1d111 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -17,11 +17,11 @@ #include "api/video/i420_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCObjCVideoSourceAdapter () +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) () @property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource; @end -@implementation RTCObjCVideoSourceAdapter +@implementation RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) @synthesize objCVideoTrackSource = _objCVideoTrackSource; @@ -40,7 +40,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter) : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index f152eeec91..159025803e 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -32,8 +32,8 @@ @interface RTC_OBJC_TYPE (RTCMTLVideoView) + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; -+ (id)createNV12Renderer; -+ (id)createI420Renderer; ++ (id)createNV12Renderer; ++ (id)createI420Renderer; - (void)drawInMTKView:(id)view; @end @@ -91,7 +91,7 @@ - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { } - (id)rendererMockWithSuccessfulSetup:(BOOL)success { - id rendererMock = OCMClassMock([RTCMTLRenderer class]); + id rendererMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLRenderer) class]); OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success); return rendererMock; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 5ba5a52a53..c4dda5aef1 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -46,7 +46,7 @@ - (void)testBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); @@ -63,7 +63,7 @@ - (void)testDefaultComponentsBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) defaultBuilder]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); From 8c9aa75abf1aaa4bd79d5aaa70fc000565b9b564 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 3 Jan 2024 11:07:20 +0800 Subject: [PATCH 28/42] feat: add external audio processor for android. (#103) * feat: add external audio processor for android. * update. * update. * update. * add null ptr check. * rename files. * fix typo. * some comment. * update. --- sdk/android/BUILD.gn | 7 + .../ExternalAudioProcessingFactory.java | 144 ++++++++++++++++++ .../pc/external_audio_processing_factory.cc | 143 +++++++++++++++++ .../pc/external_audio_processing_factory.h | 68 +++++++++ .../pc/external_audio_processing_interface.h | 33 ++++ .../src/jni/pc/external_audio_processor.cc | 71 +++++++++ .../src/jni/pc/external_audio_processor.h | 57 +++++++ 7 files changed, 523 insertions(+) create mode 100644 sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java create mode 100644 sdk/android/src/jni/pc/external_audio_processing_factory.cc create mode 100644 sdk/android/src/jni/pc/external_audio_processing_factory.h create mode 100644 sdk/android/src/jni/pc/external_audio_processing_interface.h create mode 100644 sdk/android/src/jni/pc/external_audio_processor.cc create mode 100644 sdk/android/src/jni/pc/external_audio_processor.h diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 494b43f6d3..63a3f477d3 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -261,6 +261,7 @@ if (is_android) { "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", "api/org/webrtc/FecControllerFactoryFactoryInterface.java", "api/org/webrtc/FrameCryptor.java", "api/org/webrtc/FrameCryptorAlgorithm.java", @@ -726,6 +727,11 @@ if (current_os == "linux" || is_android) { "src/jni/pc/data_channel.cc", "src/jni/pc/data_channel.h", "src/jni/pc/dtmf_sender.cc", + "src/jni/pc/external_audio_processing_factory.cc", + "src/jni/pc/external_audio_processing_factory.h", + "src/jni/pc/external_audio_processing_interface.h", + "src/jni/pc/external_audio_processor.cc", + "src/jni/pc/external_audio_processor.h", "src/jni/pc/frame_cryptor.cc", "src/jni/pc/frame_cryptor.h", "src/jni/pc/frame_cryptor_key_provider.cc", @@ -1414,6 +1420,7 @@ if (current_os == "linux" || is_android) { "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", "api/org/webrtc/FrameCryptor.java", "api/org/webrtc/FrameCryptorFactory.java", "api/org/webrtc/FrameCryptorKeyProvider.java", diff --git a/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java new file mode 100644 index 0000000000..7425d2af57 --- /dev/null +++ b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java @@ -0,0 +1,144 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +import androidx.annotation.Nullable; +import org.webrtc.AudioProcessingFactory; + + +public class ExternalAudioProcessingFactory implements AudioProcessingFactory { + + /** + * Interface for external audio processing. + */ + public static interface AudioProcessing { + /** + * Called when the processor should be initialized with a new sample rate and + * number of channels. + */ + @CalledByNative("AudioProcessing") + void initialize(int sampleRateHz, int numChannels); + /** Called when the processor should be reset with a new sample rate. */ + @CalledByNative("AudioProcessing") + void reset(int newRate); + /** + * Processes the given capture or render signal. NOTE: `buffer.data` will be + * freed once this function returns so callers who want to use the data + * asynchronously must make sure to copy it first. + */ + @CalledByNative("AudioProcessing") + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + private long apmPtr; + private long capturePostProcessingPtr; + private long renderPreProcessingPtr; + + public ExternalAudioProcessingFactory() { + apmPtr = nativeGetDefaultApm(); + capturePostProcessingPtr = 0; + renderPreProcessingPtr = 0; + } + + @Override + public long createNative() { + if(apmPtr == 0) { + apmPtr = nativeGetDefaultApm(); + } + return apmPtr; + } + + /** + * Sets the capture post processing module. + * This module is applied to the audio signal after capture and before sending + * to the audio encoder. + */ + public void setCapturePostProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetCapturePostProcessing(processing); + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + capturePostProcessingPtr = newPtr; + } + + /** + * Sets the render pre processing module. + * This module is applied to the audio signal after receiving from the audio + * decoder and before rendering. + */ + public void setRenderPreProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetRenderPreProcessing(processing); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + renderPreProcessingPtr = newPtr; + } + + /** + * Sets the bypass flag for the capture post processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForCapturePost( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForCapturePost(bypass); + } + + /** + * Sets the bypass flag for the render pre processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForRenderPre( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForRenderPre(bypass); + } + + /** + * Destroys the ExternalAudioProcessor. + */ + public void destroy() { + checkExternalAudioProcessorExists(); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + nativeDestroy(); + apmPtr = 0; + } + + private void checkExternalAudioProcessorExists() { + if (apmPtr == 0) { + throw new IllegalStateException("ExternalAudioProcessor has been disposed."); + } + } + + private static native long nativeGetDefaultApm(); + private static native long nativeSetCapturePostProcessing(AudioProcessing processing); + private static native long nativeSetRenderPreProcessing(AudioProcessing processing); + private static native void nativeSetBypassFlagForCapturePost(boolean bypass); + private static native void nativeSetBypassFlagForRenderPre(boolean bypass); + private static native void nativeDestroy(); +} diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.cc b/sdk/android/src/jni/pc/external_audio_processing_factory.cc new file mode 100644 index 0000000000..3d7ee7a4d9 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.cc @@ -0,0 +1,143 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processing_factory.h" + +#include +#include + +#include "api/make_ref_counted.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/ExternalAudioProcessingFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { +namespace jni { + +ExternalAudioProcessingJni::ExternalAudioProcessingJni( + JNIEnv* jni, + const JavaRef& j_processing) + : j_processing_global_(jni, j_processing) {} +ExternalAudioProcessingJni::~ExternalAudioProcessingJni() {} +void ExternalAudioProcessingJni::Initialize(int sample_rate_hz, + int num_channels) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_initialize(env, j_processing_global_, sample_rate_hz, + num_channels); +} + +void ExternalAudioProcessingJni::Reset(int new_rate) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_reset(env, j_processing_global_, new_rate); +} + +void ExternalAudioProcessingJni::Process(int num_bands, int num_frames, int buffer_size, float* buffer) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(env, (void*)buffer, buffer_size * sizeof(float)); + Java_AudioProcessing_process(env, j_processing_global_, num_bands, num_frames, audio_buffer); +} + +ExternalAudioProcessingFactory::ExternalAudioProcessingFactory() { + capture_post_processor_ = new ExternalAudioProcessor(); + std::unique_ptr capture_post_processor( + capture_post_processor_); + + render_pre_processor_ = new ExternalAudioProcessor(); + std::unique_ptr render_pre_processor( + render_pre_processor_); + + apm_ = webrtc::AudioProcessingBuilder() + .SetCapturePostProcessing(std::move(capture_post_processor)) + .SetRenderPreProcessing(std::move(render_pre_processor)) + .Create(); + + webrtc::AudioProcessing::Config config; + apm_->ApplyConfig(config); +} + +static ExternalAudioProcessingFactory* default_processor_ptr; + +static jlong JNI_ExternalAudioProcessingFactory_GetDefaultApm(JNIEnv* env) { + if (!default_processor_ptr) { + auto default_processor = rtc::make_ref_counted(); + default_processor_ptr = default_processor.release(); + } + return webrtc::jni::jlongFromPointer(default_processor_ptr->apm().get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetCapturePostProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetRenderPreProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForCapturePost( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->capture_post_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForRenderPre( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_Destroy(JNIEnv* env) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + nullptr); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + nullptr); + delete default_processor_ptr; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.h b/sdk/android/src/jni/pc/external_audio_processing_factory.h new file mode 100644 index 0000000000..5dfebe81fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.h @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { +namespace jni { + +class ExternalAudioProcessingJni + : public webrtc::ExternalAudioProcessingInterface, + public rtc::RefCountInterface { + public: + ExternalAudioProcessingJni(JNIEnv* jni, const JavaRef& j_processing); + ~ExternalAudioProcessingJni(); + + protected: + virtual void Initialize(int sample_rate_hz, int num_channels) override; + virtual void Reset(int new_rate) override; + virtual void Process(int num_bans, int num_frames, int buffer_size, float* buffer) override; + + private: + const ScopedJavaGlobalRef j_processing_global_; + const ScopedJavaGlobalRef j_processing_; +}; + +class ExternalAudioProcessingFactory : public rtc::RefCountInterface { + public: + ExternalAudioProcessingFactory(); + virtual ~ExternalAudioProcessingFactory() = default; + + ExternalAudioProcessor* capture_post_processor() { + return capture_post_processor_; + } + + ExternalAudioProcessor* render_pre_processor() { + return render_pre_processor_; + } + + rtc::scoped_refptr apm() { return apm_; } + + private: + rtc::scoped_refptr apm_; + ExternalAudioProcessor* capture_post_processor_; + ExternalAudioProcessor* render_pre_processor_; +}; + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_interface.h b/sdk/android/src/jni/pc/external_audio_processing_interface.h new file mode 100644 index 0000000000..1202be106b --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_interface.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ +#define API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ + +namespace webrtc { + +class ExternalAudioProcessingInterface { + public: + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + virtual void Reset(int new_rate) = 0; + virtual void Process(int num_bands, int num_frames, int buffer_size, float* buffer) = 0; + + protected: + virtual ~ExternalAudioProcessingInterface() = default; +}; + +} // namespace webrtc + +#endif // API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ diff --git a/sdk/android/src/jni/pc/external_audio_processor.cc b/sdk/android/src/jni/pc/external_audio_processor.cc new file mode 100644 index 0000000000..2d8747427e --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.cc @@ -0,0 +1,71 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { + +void ExternalAudioProcessor::SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor) { + webrtc::MutexLock lock(&mutex_); + external_processor_ = processor; + if (initialized_) { + external_processor_->Initialize(sample_rate_hz_, num_channels_); + } +} + +void ExternalAudioProcessor::SetBypassFlag(bool bypass) { + webrtc::MutexLock lock(&mutex_); + bypass_flag_ = bypass; +} + +void ExternalAudioProcessor::Initialize(int sample_rate_hz, int num_channels) { + webrtc::MutexLock lock(&mutex_); + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + if (external_processor_) { + external_processor_->Initialize(sample_rate_hz, num_channels); + } + initialized_ = true; +} + +void ExternalAudioProcessor::Process(webrtc::AudioBuffer* audio) { + webrtc::MutexLock lock(&mutex_); + if (!external_processor_ || bypass_flag_ || !initialized_) { + return; + } + + size_t num_frames = audio->num_frames(); + size_t num_bands =audio->num_bands(); + + int rate = num_frames * 1000; + + if (rate != sample_rate_hz_) { + external_processor_->Reset(rate); + sample_rate_hz_ = rate; + } + + external_processor_->Process(num_bands, num_frames, kNsFrameSize * num_bands, audio->channels()[0]); +} + +std::string ExternalAudioProcessor::ToString() const { + return "ExternalAudioProcessor"; +} + +void ExternalAudioProcessor::SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) {} + +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processor.h b/sdk/android/src/jni/pc/external_audio_processor.h new file mode 100644 index 0000000000..1dc31809fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.h @@ -0,0 +1,57 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/audio_processing_impl.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { + +class ExternalAudioProcessor : public webrtc::CustomProcessing { + public: + ExternalAudioProcessor() = default; + ~ExternalAudioProcessor() override = default; + + void SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor); + + void SetBypassFlag(bool bypass); + + private: + void Initialize(int sample_rate_hz, int num_channels) override; + void Process(webrtc::AudioBuffer* audio) override; + std::string ToString() const override; + void SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) override; + + private: + mutable webrtc::Mutex mutex_; + ExternalAudioProcessingInterface* external_processor_; + bool bypass_flag_ = false; + bool initialized_ = false; + int sample_rate_hz_ = 0; + int num_channels_ = 0; +}; + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ From b951613befee9e718798dad97c98a299e1518328 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9o=20Monnom?= Date: Tue, 13 Feb 2024 19:17:58 +0100 Subject: [PATCH 29/42] remove too verbose logs (#107) --- api/crypto/frame_crypto_transformer.cc | 26 -------------------------- 1 file changed, 26 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 3e888a2175..c5b823409e 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -451,20 +451,6 @@ void FrameCryptorTransformer::encryptFrame( frame->SetData(data_out); - RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() " - << "frame length = " << static_cast(date_in.size()) - << " encrypted_length = " - << static_cast(data_out.size()) - << " ivLength=" << static_cast(iv.size()) - << " unencrypted_bytes=" - << static_cast(unencrypted_bytes) - << " tag=" << to_hex(tag.data(), tag.size()) - << " key_index=" << static_cast(key_index_) - << " aesKey=" - << to_hex(key_set->encryption_key.data(), - key_set->encryption_key.size()) - << " iv=" << to_hex(iv.data(), iv.size()); - if (last_enc_error_ != FrameCryptionState::kOk) { last_enc_error_ = FrameCryptionState::kOk; onFrameCryptionStateChanged(last_enc_error_); @@ -608,18 +594,6 @@ void FrameCryptorTransformer::decryptFrame( } rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, 16); - RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " - << " frame length = " << static_cast(date_in.size()) - << " ivLength=" << static_cast(iv.size()) - << " unencrypted_bytes=" - << static_cast(unencrypted_bytes) - << " tag=" << to_hex(tag.data(), tag.size()) - << " key_index=" << static_cast(key_index_) - << " aesKey=" - << to_hex(key_set->encryption_key.data(), - key_set->encryption_key.size()) - << " iv=" << to_hex(iv.data(), iv.size()); - std::vector buffer; int ratchet_count = 0; From 5065016cd0200d0f067dafdcd984436745750338 Mon Sep 17 00:00:00 2001 From: davidliu Date: Thu, 14 Mar 2024 21:24:39 +0900 Subject: [PATCH 30/42] Fix external audio processor sample rate calculation (#108) --- sdk/android/src/jni/pc/external_audio_processor.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/android/src/jni/pc/external_audio_processor.cc b/sdk/android/src/jni/pc/external_audio_processor.cc index 2d8747427e..274982d6d4 100644 --- a/sdk/android/src/jni/pc/external_audio_processor.cc +++ b/sdk/android/src/jni/pc/external_audio_processor.cc @@ -51,7 +51,8 @@ void ExternalAudioProcessor::Process(webrtc::AudioBuffer* audio) { size_t num_frames = audio->num_frames(); size_t num_bands =audio->num_bands(); - int rate = num_frames * 1000; + // 1 buffer = 10ms of frames + int rate = num_frames * 100; if (rate != sample_rate_hz_) { external_processor_->Reset(rate); From 3cdeeb0e8a48d1b7fcf605ef4bce06296a061cff Mon Sep 17 00:00:00 2001 From: davidliu Date: Wed, 3 Apr 2024 00:22:37 +0900 Subject: [PATCH 31/42] Allow ice gathering on any address ports (#110) * Allow ice gathering on any address ports Some VPNs use 0.0.0.0 mask which will be ignored by the default gathering. This flag allows those to be picked up as a last resort. * fixes * set explicit default in RTCConfiguration --- api/peer_connection_interface.h | 9 +++++++++ pc/peer_connection.cc | 9 ++++++++- sdk/android/api/org/webrtc/PeerConnection.java | 17 +++++++++++++++++ sdk/android/src/jni/pc/peer_connection.cc | 3 +++ sdk/objc/api/peerconnection/RTCConfiguration.h | 11 +++++++++++ sdk/objc/api/peerconnection/RTCConfiguration.mm | 3 +++ 6 files changed, 51 insertions(+), 1 deletion(-) diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index 6ce4650e5f..06476ef86b 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -691,6 +691,15 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // The burst interval of the pacer, see TaskQueuePacedSender constructor. absl::optional pacer_burst_interval; + // When this flag is set, ports not bound to any specific network interface + // will be used, in addition to normal ports bound to the enumerated + // interfaces. Without this flag, these "any address" ports would only be + // used when network enumeration fails or is disabled. But under certain + // conditions, these ports may succeed where others fail, so they may allow + // the application to work in a wider variety of environments, at the expense + // of having to allocate additional candidates. + bool enable_any_address_ports = false; + // // Don't forget to update operator== if adding something. // diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index f7aec9ebcc..562101be5b 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -345,6 +345,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( std::vector vpn_list; PortAllocatorConfig port_allocator_config; absl::optional pacer_burst_interval; + bool enable_any_address_ports; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -411,7 +412,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && port_allocator_config.flags == o.port_allocator_config.flags && - pacer_burst_interval == o.pacer_burst_interval; + pacer_burst_interval == o.pacer_burst_interval && + enable_any_address_ports == o.enable_any_address_ports; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -2174,6 +2176,11 @@ PeerConnection::InitializePortAllocator_n( RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces."; } + if (configuration.enable_any_address_ports) { + port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS; + RTC_LOG(LS_INFO) << "Enable gathering on any address ports."; + } + port_allocator_->set_flags(port_allocator_flags); // No step delay is used while allocating ports. port_allocator_->set_step_delay(cricket::kMinimumStepDelay); diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index e334a3fd9d..e87c9a2e27 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -572,6 +572,17 @@ public static class RTCConfiguration { * See: https://www.chromestatus.com/feature/6269234631933952 */ public boolean offerExtmapAllowMixed; + + /** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ + public boolean enableIceGatheringOnAnyAddressPorts; // TODO(deadbeef): Instead of duplicating the defaults here, we should do // something to pick up the defaults from C++. The Objective-C equivalent @@ -616,6 +627,7 @@ public RTCConfiguration(List iceServers) { allowCodecSwitching = null; enableImplicitRollback = false; offerExtmapAllowMixed = true; + enableIceGatheringOnAnyAddressPorts = false; } @CalledByNative("RTCConfiguration") @@ -836,6 +848,11 @@ boolean getEnableImplicitRollback() { boolean getOfferExtmapAllowMixed() { return offerExtmapAllowMixed; } + + @CalledByNative("RTCConfiguration") + boolean getEnableIceGatheringOnAnyAddressPorts() { + return enableIceGatheringOnAnyAddressPorts; + } }; private final List localStreams = new ArrayList<>(); diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 9983ae7df2..b9fea85c4b 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -278,6 +278,9 @@ void JavaToNativeRTCConfiguration( rtc_config->enable_implicit_rollback = Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + rtc_config->enable_any_address_ports = + Java_RTCConfiguration_getEnableIceGatheringOnAnyAddressPorts(jni, j_rtc_config); + ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 1b0d14baf1..dc7fd4b990 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -261,6 +261,17 @@ RTC_OBJC_EXPORT */ @property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout; +/** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ +@property(nonatomic, assign) BOOL enableIceGatheringOnAnyAddressPorts; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 8e42cb2a82..994bb93f0b 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -63,6 +63,7 @@ @implementation RTC_OBJC_TYPE (RTCConfiguration) @synthesize iceUnwritableTimeout = _iceUnwritableTimeout; @synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks; @synthesize iceInactiveTimeout = _iceInactiveTimeout; +@synthesize enableIceGatheringOnAnyAddressPorts = _enableIceGatheringOnAnyAddressPorts; - (instancetype)init { // Copy defaults. @@ -158,6 +159,7 @@ - (instancetype)initWithNativeConfiguration: _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ? [NSNumber numberWithInt:*config.ice_inactive_timeout] : nil; + _enableIceGatheringOnAnyAddressPorts = config.enable_any_address_ports; } return self; } @@ -306,6 +308,7 @@ - (NSString *)description { if (_iceInactiveTimeout != nil) { nativeConfig->ice_inactive_timeout = absl::optional(_iceInactiveTimeout.intValue); } + nativeConfig->enable_any_address_ports = _enableIceGatheringOnAnyAddressPorts; return nativeConfig.release(); } From 50b64369864aab4a2931d9c5812795ae8699a47b Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Mon, 8 Apr 2024 19:16:26 +0800 Subject: [PATCH 32/42] Add key ring size to keyProviderOptions. (#109) * Add key ring size to keyProviderOptions. * add discard_frame_when_cryptor_not_ready to KeyProviderOptions. * update. --- api/crypto/frame_crypto_transformer.cc | 11 +++++++-- api/crypto/frame_crypto_transformer.h | 24 +++++++++++++++---- .../api/org/webrtc/FrameCryptorFactory.java | 6 ++--- sdk/android/src/jni/pc/frame_cryptor.cc | 9 ++++--- .../RTCFrameCryptorKeyProvider.h | 11 ++++++++- .../RTCFrameCryptorKeyProvider.mm | 24 +++++++++++++++++-- 6 files changed, 70 insertions(+), 15 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index c5b823409e..06ec888170 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -386,6 +386,9 @@ void FrameCryptorTransformer::encryptFrame( if (date_in.size() == 0 || !enabled_cryption) { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } sink_callback->OnTransformedFrame(std::move(frame)); return; } @@ -494,6 +497,10 @@ void FrameCryptorTransformer::decryptFrame( if (date_in.size() == 0 || !enabled_cryption) { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + sink_callback->OnTransformedFrame(std::move(frame)); return; } @@ -551,11 +558,11 @@ void FrameCryptorTransformer::decryptFrame( ? key_provider_->GetSharedKey(participant_id_) : key_provider_->GetKey(participant_id_); - if (key_index >= KEYRING_SIZE || key_handler == nullptr || + if (0 > key_index || key_index >= key_provider_->options().key_ring_size || key_handler == nullptr || key_handler->GetKeySet(key_index) == nullptr) { RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " "key_index[" - << key_index_ << "] out of range for participant " + << key_index << "] out of range for participant " << participant_id_; if (last_dec_error_ != FrameCryptionState::kMissingKey) { last_dec_error_ = FrameCryptionState::kMissingKey; diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index f9027d08f7..9689ec1593 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -34,7 +34,8 @@ int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, namespace webrtc { -const size_t KEYRING_SIZE = 16; +const size_t DEFAULT_KEYRING_SIZE = 16; +const size_t MAX_KEYRING_SIZE = 255; class ParticipantKeyHandler; @@ -44,14 +45,22 @@ struct KeyProviderOptions { std::vector uncrypted_magic_bytes; int ratchet_window_size; int failure_tolerance; + // key ring size should be between 1 and 255 + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; KeyProviderOptions() - : shared_key(false), ratchet_window_size(0), failure_tolerance(-1) {} + : shared_key(false), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} KeyProviderOptions(KeyProviderOptions& copy) : shared_key(copy.shared_key), ratchet_salt(copy.ratchet_salt), uncrypted_magic_bytes(copy.uncrypted_magic_bytes), ratchet_window_size(copy.ratchet_window_size), - failure_tolerance(copy.failure_tolerance) {} + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} }; class KeyProvider : public rtc::RefCountInterface { @@ -99,7 +108,14 @@ class ParticipantKeyHandler : public rtc::RefCountInterface { public: ParticipantKeyHandler(KeyProvider* key_provider) : key_provider_(key_provider) { - crypto_key_ring_.resize(KEYRING_SIZE); + int key_ring_size = key_provider_->options().key_ring_size; + if(key_ring_size <= 0) { + key_ring_size = DEFAULT_KEYRING_SIZE; + } else if (key_ring_size > (int)MAX_KEYRING_SIZE) { + // Keyring size needs to be between 1 and 256 + key_ring_size = MAX_KEYRING_SIZE; + } + crypto_key_ring_.resize(key_ring_size); } virtual ~ParticipantKeyHandler() = default; diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java index 1ade0b6c9e..865a4b78bb 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorFactory.java +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -18,8 +18,8 @@ public class FrameCryptorFactory { public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( - boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance) { - return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance); + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); } public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, @@ -40,5 +40,5 @@ private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long f long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( - boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance); + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady); } diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc index 59710cd6d3..af2fd8f2b0 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.cc +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -179,18 +179,21 @@ JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( const base::android::JavaParamRef& j_ratchetSalt, jint j_ratchetWindowSize, const base::android::JavaParamRef& j_uncryptedMagicBytes, - jint j_failureTolerance) { + jint j_failureTolerance, + jint j_keyRingSize, + jboolean j_discardFrameWhenCryptorNotReady) { auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); KeyProviderOptions options; options.ratchet_salt = std::vector(ratchetSalt.begin(), ratchetSalt.end()); options.ratchet_window_size = j_ratchetWindowSize; - auto uncryptedMagicBytes = JavaToNativeByteArray(env, j_uncryptedMagicBytes); options.uncrypted_magic_bytes = std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); options.shared_key = j_shared; - options.failure_tolerance = j_failureTolerance; + options.failure_tolerance = j_failureTolerance; + options.key_ring_size = j_keyRingSize; + options.discard_frame_when_cryptor_not_ready = j_discardFrameWhenCryptorNotReady; return NativeToJavaFrameCryptorKeyProvider( env, rtc::make_ref_counted(options)); } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h index b10cf8c4b2..6443b23349 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -46,7 +46,16 @@ RTC_OBJC_EXPORT ratchetWindowSize:(int)windowSize sharedKeyMode:(BOOL)sharedKey uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes - failureTolerance:(int)failureTolerance; + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady; @end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm index 1596b98016..88bebfcd9d 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -38,14 +38,32 @@ - (instancetype)initWithRatchetSalt:(NSData *)salt ratchetWindowSize:windowSize sharedKeyMode:sharedKey uncryptedMagicBytes:uncryptedMagicBytes - failureTolerance:-1]; + failureTolerance:-1 + keyRingSize:webrtc::DEFAULT_KEYRING_SIZE]; } - (instancetype)initWithRatchetSalt:(NSData *)salt ratchetWindowSize:(int)windowSize sharedKeyMode:(BOOL)sharedKey uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes - failureTolerance:(int)failureTolerance { + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:keyRingSize + discardFrameWhenCryptorNotReady:false]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady { if (self = [super init]) { webrtc::KeyProviderOptions options; options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, @@ -53,6 +71,8 @@ - (instancetype)initWithRatchetSalt:(NSData *)salt options.ratchet_window_size = windowSize; options.shared_key = sharedKey; options.failure_tolerance = failureTolerance; + options.key_ring_size = keyRingSize; + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; if(uncryptedMagicBytes != nil) { options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); From 9f39c3dac63ed0a7a36dda68e77a2b010d75cc76 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Thu, 25 Apr 2024 14:53:39 +0800 Subject: [PATCH 33/42] Fix naming for yuv helper (#113) * add PrivacyInfo.xcprivacy to darwin frameworks. * Fix some function naming of yuv helper. * revert changes. --- sdk/objc/helpers/RTCYUVHelper.h | 6 +++--- sdk/objc/helpers/RTCYUVHelper.mm | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h index 2e6309c034..ec8ce48355 100644 --- a/sdk/objc/helpers/RTCYUVHelper.h +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -31,7 +31,7 @@ RTC_OBJC_EXPORT dstV:(uint8_t*)dstV dstStrideV:(int)dstStrideV width:(int)width - width:(int)height + height:(int)height mode:(RTCVideoRotation)mode; + (int)I420ToNV12:(const uint8_t*)srcY @@ -45,7 +45,7 @@ RTC_OBJC_EXPORT dstUV:(uint8_t*)dstUV dstStrideUV:(int)dstStrideUV width:(int)width - width:(int)height; + height:(int)height; + (int)I420ToNV21:(const uint8_t*)srcY srcStrideY:(int)srcStrideY @@ -58,7 +58,7 @@ RTC_OBJC_EXPORT dstUV:(uint8_t*)dstUV dstStrideUV:(int)dstStrideUV width:(int)width - width:(int)height; + height:(int)height; + (int)I420ToARGB:(const uint8_t*)srcY srcStrideY:(int)srcStrideY diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm index 3f610ff990..4a39d469da 100644 --- a/sdk/objc/helpers/RTCYUVHelper.mm +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -27,7 +27,7 @@ + (void)I420Rotate:(const uint8_t*)srcY dstV:(uint8_t*)dstV dstStrideV:(int)dstStrideV width:(int)width - width:(int)height + height:(int)height mode:(RTCVideoRotation)mode { libyuv::I420Rotate(srcY, srcStrideY, @@ -57,7 +57,7 @@ + (int)I420ToNV12:(const uint8_t*)srcY dstUV:(uint8_t*)dstUV dstStrideUV:(int)dstStrideUV width:(int)width - width:(int)height { + height:(int)height { return libyuv::I420ToNV12(srcY, srcStrideY, srcU, @@ -83,7 +83,7 @@ + (int)I420ToNV21:(const uint8_t*)srcY dstUV:(uint8_t*)dstUV dstStrideUV:(int)dstStrideUV width:(int)width - width:(int)height { + height:(int)height { return libyuv::I420ToNV21(srcY, srcStrideY, srcU, From c1896bb2e60ad09ae0aa4b103e9c97ac11c2a26e Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Thu, 25 Apr 2024 16:34:10 +0800 Subject: [PATCH 34/42] add PrivacyInfo.xcprivacy to darwin frameworks. (#112) * add PrivacyInfo.xcprivacy to darwin frameworks. * update. --- sdk/BUILD.gn | 9 +++++++++ sdk/objc/PrivacyInfo.xcprivacy | 26 ++++++++++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 sdk/objc/PrivacyInfo.xcprivacy diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 2ad8b84f85..877481a642 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -1353,6 +1353,13 @@ if (is_ios || is_mac) { } } + bundle_data("darwin_privacy_info") { + sources = [ + "objc/PrivacyInfo.xcprivacy", + ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + if (is_ios) { apple_framework_bundle_with_umbrella_header("framework_objc") { info_plist = "objc/Info.plist" @@ -1489,6 +1496,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (rtc_ios_macos_use_opengl_rendering) { deps += [ ":opengl_ui_objc" ] @@ -1646,6 +1654,7 @@ if (is_ios || is_mac) { ":desktopcapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..367894c223 --- /dev/null +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -0,0 +1,26 @@ + + + + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategorySystemBootTime + NSPrivacyAccessedAPITypeReasons + + 35F9.1 + 8FFB.1 + + + + + \ No newline at end of file From 5cc0748d0978506855a4179e1e8c0378d08e82a1 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Thu, 25 Apr 2024 20:30:02 +0700 Subject: [PATCH 35/42] Add NSPrivacyCollectedDataTypes key to xcprivacy file (#114) Looks like Xcode requires `NSPrivacyCollectedDataTypes` key. ![LiveKitExample (macOS)-PrivacyReport 2024-04-25 18-38-31](https://github.com/webrtc-sdk/webrtc/assets/548776/03b26039-893f-4e69-8358-73863d704ebd) --- sdk/objc/PrivacyInfo.xcprivacy | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy index 367894c223..7204a67c33 100644 --- a/sdk/objc/PrivacyInfo.xcprivacy +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -2,6 +2,8 @@ + NSPrivacyCollectedDataTypes + NSPrivacyAccessedAPITypes From 7cf1e433027c52bd8f0e66254f716b56eeeb9fb5 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 10 May 2024 00:44:57 +0700 Subject: [PATCH 36/42] Thread-safe `RTCInitFieldTrialDictionary` (#116) --- sdk/objc/api/peerconnection/RTCFieldTrials.mm | 31 +++++++++++++------ 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 193da9e4f7..b5a2eca8f0 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -10,47 +10,58 @@ #import "RTCFieldTrials.h" +#import #include - #import "base/RTCLogging.h" #include "system_wrappers/include/field_trial.h" NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; -NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; +NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; +NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = @"WebRTC-Audio-MinimizeResamplingOnMobile"; NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; -NSString * const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; +static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; } + // Assemble the keys and values into the field trial string. - // We don't perform any extra format checking. That should be done by the underlying WebRTC calls. NSMutableString *fieldTrialInitString = [NSMutableString string]; for (NSString *key in fieldTrials) { NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]]; [fieldTrialInitString appendString:fieldTrialEntry]; } + size_t len = fieldTrialInitString.length + 1; + + // Locking before modifying global variable + os_unfair_lock_lock(&fieldTrialLock); if (gFieldTrialInitString != nullptr) { delete[] gFieldTrialInitString; + gFieldTrialInitString = nullptr; } + gFieldTrialInitString = new char[len]; - if (![fieldTrialInitString getCString:gFieldTrialInitString - maxLength:len - encoding:NSUTF8StringEncoding]) { + bool success = [fieldTrialInitString getCString:gFieldTrialInitString + maxLength:len + encoding:NSUTF8StringEncoding]; + if (!success) { RTCLogError(@"Failed to convert field trial string."); + os_unfair_lock_unlock(&fieldTrialLock); return; } + webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString); + os_unfair_lock_unlock(&fieldTrialLock); } From 9316c0301071f7727fe35ee42cf824a575916f51 Mon Sep 17 00:00:00 2001 From: davidliu Date: Mon, 20 May 2024 07:53:35 +0700 Subject: [PATCH 37/42] android: make audio output attributes modifiable (#118) Needed to do audio mode changes on react native --- sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java | 4 ++-- sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index fc3dd6836c..f01baa5197 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -376,8 +376,8 @@ public static boolean isBuiltInNoiseSuppressorSupported() { private final Context context; private final AudioManager audioManager; - private final WebRtcAudioRecord audioInput; - private final WebRtcAudioTrack audioOutput; + public final WebRtcAudioRecord audioInput; + public final WebRtcAudioTrack audioOutput; private final int inputSampleRate; private final int outputSampleRate; private final boolean useStereoInput; diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 2f745cc62e..25d10e4f61 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -64,7 +64,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; - private @Nullable final AudioAttributes audioAttributes; + public @Nullable AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; From 8f6e7aa54cd2e50d8bdca29f3e7c61a162ec8d68 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Wed, 29 May 2024 02:53:01 +0900 Subject: [PATCH 38/42] Set RTCCameraVideoCapturer initial zoom factor (#121) Some devices, such as `.builtInTripleCamera`, have a zoom factor of 2.0 for the normal zoom. This adjustment changes it to 2.0 instead of 1.0. Without this adjustment, when using `.builtInTripleCamera`, it will appear zoomed out. --- .../capturer/RTCCameraVideoCapturer.m | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 018c1d41cf..f743fb8e0d 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -175,8 +175,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device NSError *error = nil; if (![self.currentDevice lockForConfiguration:&error]) { - RTCLogError(@"Failed to lock device %@. Error: %@", - self.currentDevice, + RTCLogError(@"Failed to lock device %@. Error: %@", self.currentDevice, error.userInfo); if (completionHandler) { completionHandler(error); @@ -187,6 +186,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device [self reconfigureCaptureSessionInput]; [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; + [self updateZoomFactor]; [self.captureSession startRunning]; [self.currentDevice unlockForConfiguration]; self.isRunning = YES; @@ -287,7 +287,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * - kNanosecondsPerSecond; + kNanosecondsPerSecond; RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation @@ -417,8 +417,7 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - (dispatch_queue_t)frameQueue { if (!_frameQueue) { _frameQueue = RTCDispatchQueueCreateWithTarget( - "org.webrtc.cameravideocapturer.video", - DISPATCH_QUEUE_SERIAL, + "org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); } return _frameQueue; @@ -500,6 +499,18 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger } } +- (void)updateZoomFactor { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], + @"updateZoomFactor must be called on the capture queue."); + +#if TARGET_OS_IOS || TARGET_OS_TV + CGFloat firstSwitchOverZoomFactor = 1.0; + NSNumber *first = _currentDevice.virtualDeviceSwitchOverVideoZoomFactors.firstObject; + if (first != nil) firstSwitchOverZoomFactor = first.doubleValue; + _currentDevice.videoZoomFactor = firstSwitchOverZoomFactor; +#endif +} + - (void)reconfigureCaptureSessionInput { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); From 66fd81b1d8449352cb2287176f34b07c23114e74 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:01:48 +0900 Subject: [PATCH 39/42] Unlock configuration before starting capture session (#122) `unlockForConfiguration` before `[captureSession startRunning]` to reduce start glitch. Expose `+defaultZoomFactorForDeviceType:` method. Initial zoom factor was incorrect for devices such as AVCaptureDeviceTypeBuiltInDualCamera. --- .../capturer/RTCCameraVideoCapturer.h | 2 ++ .../capturer/RTCCameraVideoCapturer.m | 29 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 370bfa70f0..74f0b39925 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -30,6 +30,8 @@ NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") // Returns list of formats that are supported by this class for this device. + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; + // Returns the most efficient supported output pixel format for this capturer. - (FourCharCode)preferredOutputPixelFormat; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index f743fb8e0d..626c83f855 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -136,6 +136,26 @@ - (void)dealloc { return device.formats; } ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType { + // AVCaptureDeviceTypeBuiltInTripleCamera, Virtual, switchOver: [2, 6], default: 2 + // AVCaptureDeviceTypeBuiltInDualCamera, Virtual, switchOver: [3], default: 1 + // AVCaptureDeviceTypeBuiltInDualWideCamera, Virtual, switchOver: [2], default: 2 + // AVCaptureDeviceTypeBuiltInWideAngleCamera, Physical, General purpose use + // AVCaptureDeviceTypeBuiltInTelephotoCamera, Physical + // AVCaptureDeviceTypeBuiltInUltraWideCamera, Physical +#if TARGET_OS_IOS || TARGET_OS_TV + if (@available(iOS 13.0, tvOS 17.0, *)) { + if ([deviceType isEqualToString:AVCaptureDeviceTypeBuiltInTripleCamera] || + [deviceType isEqualToString:AVCaptureDeviceTypeBuiltInDualWideCamera]) + // For AVCaptureDeviceTypeBuiltInTripleCamera and AVCaptureDeviceTypeBuiltInDualWideCamera, + // it will switch over from ultra-wide to wide on 2.0, so to prefer wide by default. + return 2.0; + } +#endif + + return 1.0; +} + - (FourCharCode)preferredOutputPixelFormat { return _preferredOutputPixelFormat; } @@ -187,8 +207,9 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; [self updateZoomFactor]; - [self.captureSession startRunning]; [self.currentDevice unlockForConfiguration]; + + [self.captureSession startRunning]; self.isRunning = YES; if (completionHandler) { completionHandler(nil); @@ -504,10 +525,8 @@ - (void)updateZoomFactor { @"updateZoomFactor must be called on the capture queue."); #if TARGET_OS_IOS || TARGET_OS_TV - CGFloat firstSwitchOverZoomFactor = 1.0; - NSNumber *first = _currentDevice.virtualDeviceSwitchOverVideoZoomFactors.firstObject; - if (first != nil) firstSwitchOverZoomFactor = first.doubleValue; - _currentDevice.videoZoomFactor = firstSwitchOverZoomFactor; + CGFloat videoZoomFactor = [[self class] defaultZoomFactorForDeviceType:_currentDevice.deviceType]; + [_currentDevice setVideoZoomFactor:videoZoomFactor]; #endif } From dac8015ce039c8658706b222746808f01968256b Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Wed, 5 Jun 2024 14:55:27 +0900 Subject: [PATCH 40/42] Improvements to RTCFrameCryptor (#123) ### Improvements * nil checks * Thread safety * Fix typo for `RTCCryptorAlgorithm` ### Breaking changes * `RTCCyrptorAlgorithm` renamed to `RTCCryptorAlgorithm` * Initialization of RTCFrameCryptor could return nil. --- sdk/objc/api/peerconnection/RTCFrameCryptor.h | 28 ++-- .../api/peerconnection/RTCFrameCryptor.mm | 141 +++++++++++------- 2 files changed, 103 insertions(+), 66 deletions(-) diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h index 5b58bf04d5..864e55be95 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -26,9 +26,9 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCFrameCryptor); @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -typedef NS_ENUM(NSUInteger, RTCCyrptorAlgorithm) { - RTCCyrptorAlgorithmAesGcm = 0, - RTCCyrptorAlgorithmAesCbc, +typedef NS_ENUM(NSUInteger, RTCCryptorAlgorithm) { + RTCCryptorAlgorithmAesGcm = 0, + RTCCryptorAlgorithmAesCbc, }; typedef NS_ENUM(NSInteger, FrameCryptionState) { @@ -61,17 +61,17 @@ RTC_OBJC_EXPORT @property(nonatomic, weak, nullable) id delegate; -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; - -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; @end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index e0b93c62bb..c51b77e9c5 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -16,10 +16,11 @@ #import "RTCFrameCryptor+Private.h" #import "RTCFrameCryptorKeyProvider+Private.h" +#import "RTCPeerConnectionFactory+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" -#import "RTCPeerConnectionFactory+Private.h" +#import #include #import "base/RTCLogging.h" @@ -93,95 +94,131 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { const webrtc::RtpSenderInterface *_sender; const webrtc::RtpReceiverInterface *_receiver; - NSString *_participantId; - rtc::scoped_refptr frame_crypto_transformer_; + rtc::scoped_refptr _frame_crypto_transformer; rtc::scoped_refptr _observer; + os_unfair_lock _lock; } @synthesize participantId = _participantId; @synthesize delegate = _delegate; -- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCyrptorAlgorithm)algorithm { +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCryptorAlgorithm)algorithm { switch (algorithm) { - case RTCCyrptorAlgorithmAesGcm: + case RTCCryptorAlgorithmAesGcm: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; - case RTCCyrptorAlgorithmAesCbc: + case RTCCryptorAlgorithmAesCbc: return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; default: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; } } -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpSender = sender.nativeRtpSender; + if (nativeRtpSender == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpSender->track(); + if (nativeTrack == nullptr) return nil; + _observer = rtc::make_ref_counted(self); _participantId = participantId; - auto rtpSender = sender.nativeRtpSender; - auto mediaType = rtpSender->track()->kind() == "audio" ? - webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : - webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; - frame_crypto_transformer_ = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer(factory.signalingThread, - [participantId stdString], - mediaType, - [self algorithmFromEnum:algorithm], - keyProvider.nativeKeyProvider)); - - rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer_); - frame_crypto_transformer_->SetEnabled(false); - frame_crypto_transformer_->RegisterFrameCryptorTransformerObserver(_observer); + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); } + return self; } -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver - participantId:(NSString *)participantId - algorithm:(RTCCyrptorAlgorithm)algorithm - keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpReceiver = receiver.nativeRtpReceiver; + if (nativeRtpReceiver == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpReceiver->track(); + if (nativeTrack == nullptr) return nil; + _observer = rtc::make_ref_counted(self); _participantId = participantId; - auto rtpReceiver = receiver.nativeRtpReceiver; - auto mediaType = rtpReceiver->track()->kind() == "audio" ? - webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : - webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; - frame_crypto_transformer_ = rtc::scoped_refptr( - new webrtc::FrameCryptorTransformer(factory.signalingThread, - [participantId stdString], - mediaType, - [self algorithmFromEnum:algorithm], - keyProvider.nativeKeyProvider)); - - rtpReceiver->SetDepacketizerToDecoderFrameTransformer(frame_crypto_transformer_); - frame_crypto_transformer_->SetEnabled(false); - frame_crypto_transformer_->RegisterFrameCryptorTransformerObserver(_observer); + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); } + return self; } +- (void)dealloc { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->UnRegisterFrameCryptorTransformerObserver(); + _frame_crypto_transformer = nullptr; + } + _observer = nullptr; + os_unfair_lock_unlock(&_lock); +} + - (BOOL)enabled { - return frame_crypto_transformer_->enabled(); + os_unfair_lock_lock(&_lock); + BOOL result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->enabled() : NO; + os_unfair_lock_unlock(&_lock); + return result; } - (void)setEnabled:(BOOL)enabled { - frame_crypto_transformer_->SetEnabled(enabled); + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetEnabled(enabled); + } + os_unfair_lock_unlock(&_lock); } - (int)keyIndex { - return frame_crypto_transformer_->key_index(); + os_unfair_lock_lock(&_lock); + int result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->key_index() : 0; + os_unfair_lock_unlock(&_lock); + return result; } - (void)setKeyIndex:(int)keyIndex { - frame_crypto_transformer_->SetKeyIndex(keyIndex); -} - -- (void)dealloc { - frame_crypto_transformer_->UnRegisterFrameCryptorTransformerObserver(); + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetKeyIndex(keyIndex); + } + os_unfair_lock_unlock(&_lock); } @end From 653f6f0dad71f70aaecef460dea86e6e4f78d764 Mon Sep 17 00:00:00 2001 From: basharalbashier Date: Fri, 14 Jun 2024 16:17:59 +0300 Subject: [PATCH 41/42] bashar-dev --- BUILD.gn | 2 +- modules/video_coding/include/video_codec_interface.h | 8 ++++---- test/fuzzers/rtp_frame_reference_finder_fuzzer.cc | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/BUILD.gn b/BUILD.gn index d5289b85d7..51de20591c 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -37,7 +37,7 @@ if (!build_with_chromium) { # 'ninja default' and then 'ninja all', the second build should do no work. group("default") { testonly = true - deps = [ ":webrtc" ] + deps = [ ":webrtc","//libwebrtc", ] if (rtc_build_examples) { deps += [ "examples" ] } diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index 46ae0d29e1..4faa309b13 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -51,7 +51,7 @@ struct CodecSpecificInfoVP8 { size_t updatedBuffers[kBuffersCount]; size_t updatedBuffersCount; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoVP9 { @@ -82,7 +82,7 @@ struct CodecSpecificInfoVP9 { ABSL_DEPRECATED("") bool end_of_picture; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoH264 { @@ -91,14 +91,14 @@ struct CodecSpecificInfoH264 { bool base_layer_sync; bool idr_frame; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); union CodecSpecificInfoUnion { CodecSpecificInfoVP8 VP8; CodecSpecificInfoVP9 VP9; CodecSpecificInfoH264 H264; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Note: if any pointers are added to this struct or its sub-structs, it // must be fitted with a copy-constructor. This is because it is copied diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc index b1cabc31ac..f3e0d1ad6c 100644 --- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc +++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc @@ -23,7 +23,7 @@ class DataReader { template void CopyTo(T* object) { - static_assert(std::is_pod(), ""); + static_assert(std::is_trivial(), ""); uint8_t* destination = reinterpret_cast(object); size_t object_size = sizeof(T); size_t num_bytes = std::min(size_ - offset_, object_size); From 89de95fe4a779c4585272302a85510199c9f4896 Mon Sep 17 00:00:00 2001 From: basharalbashier Date: Sat, 22 Jun 2024 07:21:45 +0300 Subject: [PATCH 42/42] preper gn file to opencv --- .gitignore | 1 + BUILD.gn | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 2a4acfebb7..96d8d1396a 100644 --- a/.gitignore +++ b/.gitignore @@ -78,3 +78,4 @@ /node_modules /libwebrtc /args.txt +libwebrtc \ No newline at end of file diff --git a/BUILD.gn b/BUILD.gn index 51de20591c..e1da40c650 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -38,6 +38,7 @@ if (!build_with_chromium) { group("default") { testonly = true deps = [ ":webrtc","//libwebrtc", ] + if (rtc_build_examples) { deps += [ "examples" ] } @@ -344,7 +345,14 @@ config("common_config") { # See https://reviews.llvm.org/D56731 for details about this # warning. "-Wctad-maybe-unsupported", + # "-I./opencv4" , + "-fexceptions", ] + cflags_cc += [ + "-fexceptions" + ] + # include_dirs += [ "opencv4" ] + # cflags += [ "-I./opencv4" ] } if (build_with_chromium) { @@ -372,7 +380,7 @@ config("common_config") { # "-Wnested-externs", (C/Obj-C only) ] cflags_objc += [ "-Wstrict-prototypes" ] - cflags_cc = [ + cflags_cc += [ "-Wnon-virtual-dtor", # This is enabled for clang; enable for gcc as well.