From 982acc83fc5c2ec3d296a11df28991d32f3936f4 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 3 Oct 2025 21:42:54 +0800 Subject: [PATCH 1/6] Implementation 1 --- ios/AudioUtils.swift | 6 +++- ios/LiveKitReactNativeModule.swift | 45 +++++++++++++++++++----------- ios/LivekitReactNativeModule.m | 4 ++- 3 files changed, 37 insertions(+), 18 deletions(-) diff --git a/ios/AudioUtils.swift b/ios/AudioUtils.swift index e06dbe67..43a0675b 100644 --- a/ios/AudioUtils.swift +++ b/ios/AudioUtils.swift @@ -6,7 +6,11 @@ public class AudioUtils { case "default_": .default case "voicePrompt": - .voicePrompt + if #available(iOS 12.0, *) { + .voicePrompt + } else { + .default + } case "videoRecording": .videoRecording case "videoChat": diff --git a/ios/LiveKitReactNativeModule.swift b/ios/LiveKitReactNativeModule.swift index 2f116992..53b4b20b 100644 --- a/ios/LiveKitReactNativeModule.swift +++ b/ios/LiveKitReactNativeModule.swift @@ -117,23 +117,26 @@ public class LivekitReactNativeModule: RCTEventEmitter { resolve(nil) } - - @objc(setAppleAudioConfiguration:) - public func setAppleAudioConfiguration(_ configuration: NSDictionary) { + + @objc(setAppleAudioConfiguration:withResolver:withRejecter:) + public func setAppleAudioConfiguration(_ configuration: NSDictionary, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { let session = RTCAudioSession.sharedInstance() let config = RTCAudioSessionConfiguration.webRTC() - + let appleAudioCategory = configuration["audioCategory"] as? String let appleAudioCategoryOptions = configuration["audioCategoryOptions"] as? [String] let appleAudioMode = configuration["audioMode"] as? String - + session.lockForConfiguration() - + defer { + session.unlockForConfiguration() + } + var categoryChanged = false - + if let appleAudioCategoryOptions = appleAudioCategoryOptions { categoryChanged = true - + var newOptions: AVAudioSession.CategoryOptions = [] for option in appleAudioCategoryOptions { if option == "mixWithOthers" { @@ -152,33 +155,43 @@ public class LivekitReactNativeModule: RCTEventEmitter { } config.categoryOptions = newOptions } - + if let appleAudioCategory = appleAudioCategory { categoryChanged = true config.category = AudioUtils.audioSessionCategoryFromString(appleAudioCategory).rawValue } - + if categoryChanged { do { try session.setCategory(AVAudioSession.Category(rawValue: config.category), with: config.categoryOptions) } catch { - NSLog("Error setting category: %@", error.localizedDescription) + reject("setAppleAudioConfiguration", "Error setting category: \(error.localizedDescription)", error) + return } } - + if let appleAudioMode = appleAudioMode { let mode = AudioUtils.audioSessionModeFromString(appleAudioMode) config.mode = mode.rawValue do { try session.setMode(mode) } catch { - NSLog("Error setting mode: %@", error.localizedDescription) + reject("setAppleAudioConfiguration", "Error setting mode: \(error.localizedDescription)", error) + return } } - - session.unlockForConfiguration() + + // Activate the audio session + do { + try session.setActive(true) + } catch { + reject("setAppleAudioConfiguration", "Error activating audio session: \(error.localizedDescription)", error) + return + } + + resolve(nil) } - + @objc(createAudioSinkListener:trackId:) public func createAudioSinkListener(_ pcId: NSNumber, trackId: String) -> String { let renderer = AudioSinkRenderer(eventEmitter: self) diff --git a/ios/LivekitReactNativeModule.m b/ios/LivekitReactNativeModule.m index 27a86bec..38375e4c 100644 --- a/ios/LivekitReactNativeModule.m +++ b/ios/LivekitReactNativeModule.m @@ -19,7 +19,9 @@ @interface RCT_EXTERN_MODULE(LivekitReactNativeModule, RCTEventEmitter) /// Configure audio config for WebRTC -RCT_EXTERN_METHOD(setAppleAudioConfiguration:(NSDictionary *) configuration) +RCT_EXTERN_METHOD(setAppleAudioConfiguration:(NSDictionary *)configuration + withResolver:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(createAudioSinkListener:(nonnull NSNumber *)pcId trackId:(nonnull NSString *)trackId) From 6edb055cb78f5f6bf18afc0b93233d2310519300 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 3 Oct 2025 23:07:13 +0800 Subject: [PATCH 2/6] Make session config optional --- src/index.tsx | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/src/index.tsx b/src/index.tsx index 44a34f80..b6101095 100644 --- a/src/index.tsx +++ b/src/index.tsx @@ -24,14 +24,34 @@ import RNKeyProvider, { type RNKeyProviderOptions } from './e2ee/RNKeyProvider'; import { setupNativeEvents } from './events/EventEmitter'; import { ReadableStream, WritableStream } from 'web-streams-polyfill'; +export interface RegisterGlobalsOptions { + /** + * Automatically configure audio session before accessing microphone. + * When enabled, sets the iOS audio category to 'playAndRecord' before getUserMedia. + * + * @default true + * @platform ios + */ + autoConfigureAudioSession?: boolean; +} + /** * Registers the required globals needed for LiveKit to work. * * Must be called before using LiveKit. + * + * @param options Optional configuration for global registration */ -export function registerGlobals() { +export function registerGlobals(options?: RegisterGlobalsOptions) { + const opts = { + autoConfigureAudioSession: true, + ...options, + }; + webrtcRegisterGlobals(); - iosCategoryEnforce(); + if (opts.autoConfigureAudioSession) { + iosCategoryEnforce(); + } livekitRegisterGlobals(); setupURLPolyfill(); fixWebrtcAdapter(); @@ -161,4 +181,5 @@ export type { LogLevel, SetLogLevelOptions, RNKeyProviderOptions, + RegisterGlobalsOptions, }; From 70f52c3b7135e0228cbbd309c77412f222d0f3e8 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Sat, 4 Oct 2025 00:39:49 +0800 Subject: [PATCH 3/6] Fix --- src/index.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/src/index.tsx b/src/index.tsx index b6101095..76cf2ffa 100644 --- a/src/index.tsx +++ b/src/index.tsx @@ -181,5 +181,4 @@ export type { LogLevel, SetLogLevelOptions, RNKeyProviderOptions, - RegisterGlobalsOptions, }; From 928fc11870c6eb848652262518e55541bf836bf7 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Sat, 4 Oct 2025 11:07:01 +0800 Subject: [PATCH 4/6] Updates --- ios/AudioUtils.swift | 35 +++++++- ios/LiveKitReactNativeModule.swift | 134 +++++++++++++---------------- ios/LivekitReactNativeModule.m | 6 +- 3 files changed, 96 insertions(+), 79 deletions(-) diff --git a/ios/AudioUtils.swift b/ios/AudioUtils.swift index 43a0675b..a84fcb2c 100644 --- a/ios/AudioUtils.swift +++ b/ios/AudioUtils.swift @@ -30,7 +30,7 @@ public class AudioUtils { } return retMode } - + public static func audioSessionCategoryFromString(_ category: String) -> AVAudioSession.Category { let retCategory: AVAudioSession.Category = switch category { case "ambient": @@ -46,8 +46,39 @@ public class AudioUtils { case "multiRoute": .multiRoute default: - .ambient + .soloAmbient } return retCategory } + + public static func audioSessionCategoryOptionsFromStrings(_ options: [String]) -> AVAudioSession.CategoryOptions { + var categoryOptions: AVAudioSession.CategoryOptions = [] + for option in options { + switch option { + case "mixWithOthers": + categoryOptions.insert(.mixWithOthers) + case "duckOthers": + categoryOptions.insert(.duckOthers) + case "allowBluetooth": + categoryOptions.insert(.allowBluetooth) + case "allowBluetoothA2DP": + categoryOptions.insert(.allowBluetoothA2DP) + case "allowAirPlay": + categoryOptions.insert(.allowAirPlay) + case "defaultToSpeaker": + categoryOptions.insert(.defaultToSpeaker) + case "interruptSpokenAudioAndMixWithOthers": + if #available(iOS 13.0, *) { + categoryOptions.insert(.interruptSpokenAudioAndMixWithOthers) + } + case "overrideMutedMicrophoneInterruption": + if #available(iOS 14.5, *) { + categoryOptions.insert(.overrideMutedMicrophoneInterruption) + } + default: + break + } + } + return categoryOptions + } } diff --git a/ios/LiveKitReactNativeModule.swift b/ios/LiveKitReactNativeModule.swift index 53b4b20b..b3c3ef47 100644 --- a/ios/LiveKitReactNativeModule.swift +++ b/ios/LiveKitReactNativeModule.swift @@ -11,7 +11,7 @@ struct LKEvents { @objc(LivekitReactNativeModule) public class LivekitReactNativeModule: RCTEventEmitter { - + // This cannot be initialized in init as self.bridge is given afterwards. private var _audioRendererManager: AudioRendererManager? = nil public var audioRendererManager: AudioRendererManager { @@ -19,11 +19,11 @@ public class LivekitReactNativeModule: RCTEventEmitter { if _audioRendererManager == nil { _audioRendererManager = AudioRendererManager(bridge: self.bridge) } - + return _audioRendererManager! } } - + @objc public override init() { super.init() @@ -31,10 +31,10 @@ public class LivekitReactNativeModule: RCTEventEmitter { config.category = AVAudioSession.Category.playAndRecord.rawValue config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] config.mode = AVAudioSession.Mode.videoChat.rawValue - + RTCAudioSessionConfiguration.setWebRTC(config) } - + @objc override public static func requiresMainQueueSetup() -> Bool { return false @@ -48,19 +48,19 @@ public class LivekitReactNativeModule: RCTEventEmitter { options.videoEncoderFactory = simulcastVideoEncoderFactory options.audioProcessingModule = LKAudioProcessingManager.sharedInstance().audioProcessingModule } - + @objc(configureAudio:) public func configureAudio(_ config: NSDictionary) { guard let iOSConfig = config["ios"] as? NSDictionary else { return } - + let defaultOutput = iOSConfig["defaultOutput"] as? String ?? "speaker" - + let rtcConfig = RTCAudioSessionConfiguration() rtcConfig.category = AVAudioSession.Category.playAndRecord.rawValue - + if (defaultOutput == "earpiece") { rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP]; rtcConfig.mode = AVAudioSession.Mode.voiceChat.rawValue @@ -70,17 +70,39 @@ public class LivekitReactNativeModule: RCTEventEmitter { } RTCAudioSessionConfiguration.setWebRTC(rtcConfig) } - - @objc(startAudioSession) - public func startAudioSession() { - // intentionally left empty + + @objc(startAudioSession:withRejecter:) + public func startAudioSession(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { + let session = RTCAudioSession.sharedInstance() + session.lockForConfiguration() + defer { + session.unlockForConfiguration() + } + + do { + try session.setActive(true) + resolve(nil) + } catch { + reject("startAudioSession", "Error activating audio session: \(error.localizedDescription)", error) + } } - - @objc(stopAudioSession) - public func stopAudioSession() { - // intentionally left empty + + @objc(stopAudioSession:withRejecter:) + public func stopAudioSession(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { + let session = RTCAudioSession.sharedInstance() + session.lockForConfiguration() + defer { + session.unlockForConfiguration() + } + + do { + try session.setActive(false) + resolve(nil) + } catch { + reject("stopAudioSession", "Error deactivating audio session: \(error.localizedDescription)", error) + } } - + @objc(showAudioRoutePicker) public func showAudioRoutePicker() { if #available(iOS 11.0, *) { @@ -95,12 +117,12 @@ public class LivekitReactNativeModule: RCTEventEmitter { } } } - + @objc(getAudioOutputsWithResolver:withRejecter:) public func getAudioOutputs(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock){ resolve(["default", "force_speaker"]) } - + @objc(selectAudioOutput:withResolver:withRejecter:) public func selectAudioOutput(_ deviceId: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { let session = AVAudioSession.sharedInstance() @@ -114,7 +136,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { reject("selectAudioOutput error", error.localizedDescription, error) return } - + resolve(nil) } @@ -132,64 +154,26 @@ public class LivekitReactNativeModule: RCTEventEmitter { session.unlockForConfiguration() } - var categoryChanged = false - - if let appleAudioCategoryOptions = appleAudioCategoryOptions { - categoryChanged = true - - var newOptions: AVAudioSession.CategoryOptions = [] - for option in appleAudioCategoryOptions { - if option == "mixWithOthers" { - newOptions.insert(.mixWithOthers) - } else if option == "duckOthers" { - newOptions.insert(.duckOthers) - } else if option == "allowBluetooth" { - newOptions.insert(.allowBluetooth) - } else if option == "allowBluetoothA2DP" { - newOptions.insert(.allowBluetoothA2DP) - } else if option == "allowAirPlay" { - newOptions.insert(.allowAirPlay) - } else if option == "defaultToSpeaker" { - newOptions.insert(.defaultToSpeaker) - } - } - config.categoryOptions = newOptions - } - if let appleAudioCategory = appleAudioCategory { - categoryChanged = true config.category = AudioUtils.audioSessionCategoryFromString(appleAudioCategory).rawValue } - if categoryChanged { - do { - try session.setCategory(AVAudioSession.Category(rawValue: config.category), with: config.categoryOptions) - } catch { - reject("setAppleAudioConfiguration", "Error setting category: \(error.localizedDescription)", error) - return - } + if let appleAudioCategoryOptions = appleAudioCategoryOptions { + config.categoryOptions = AudioUtils.audioSessionCategoryOptionsFromStrings(appleAudioCategoryOptions) } if let appleAudioMode = appleAudioMode { - let mode = AudioUtils.audioSessionModeFromString(appleAudioMode) - config.mode = mode.rawValue - do { - try session.setMode(mode) - } catch { - reject("setAppleAudioConfiguration", "Error setting mode: \(error.localizedDescription)", error) - return - } + config.mode = AudioUtils.audioSessionModeFromString(appleAudioMode).rawValue } - // Activate the audio session do { - try session.setActive(true) + try session.setConfiguration(config) + resolve(nil) } catch { - reject("setAppleAudioConfiguration", "Error activating audio session: \(error.localizedDescription)", error) + reject("setAppleAudioConfiguration", "Error setting category: \(error.localizedDescription)", error) return } - resolve(nil) } @objc(createAudioSinkListener:trackId:) @@ -198,7 +182,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { let reactTag = self.audioRendererManager.registerRenderer(renderer) renderer.reactTag = reactTag self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId) - + return reactTag } @@ -206,7 +190,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { public func deleteAudioSinkListener(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? { self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId) self.audioRendererManager.unregisterRenderer(forReactTag: reactTag) - + return nil } @@ -216,7 +200,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { let reactTag = self.audioRendererManager.registerRenderer(renderer) renderer.reactTag = reactTag self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId) - + return reactTag } @@ -224,7 +208,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { public func deleteVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? { self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId) self.audioRendererManager.unregisterRenderer(forReactTag: reactTag) - + return nil } @@ -234,7 +218,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { let minFrequency = (options["minFrequency"] as? NSNumber)?.floatValue ?? 1000 let maxFrequency = (options["maxFrequency"] as? NSNumber)?.floatValue ?? 8000 let intervalMs = (options["updateInterval"] as? NSNumber)?.floatValue ?? 40 - + let renderer = MultibandVolumeAudioRenderer( bands: bands, minFrequency: minFrequency, @@ -245,18 +229,18 @@ public class LivekitReactNativeModule: RCTEventEmitter { let reactTag = self.audioRendererManager.registerRenderer(renderer) renderer.reactTag = reactTag self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId) - + return reactTag } - + @objc(deleteMultibandVolumeProcessor:pcId:trackId:) public func deleteMultibandVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? { self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId) self.audioRendererManager.unregisterRenderer(forReactTag: reactTag) - + return nil } - + @objc(setDefaultAudioTrackVolume:) public func setDefaultAudioTrackVolume(_ volume: NSNumber) -> Any? { let options = WebRTCModuleOptions.sharedInstance() @@ -264,7 +248,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { return nil } - + override public func supportedEvents() -> [String]! { return [ LKEvents.kEventVolumeProcessed, diff --git a/ios/LivekitReactNativeModule.m b/ios/LivekitReactNativeModule.m index 38375e4c..dfe83d6c 100644 --- a/ios/LivekitReactNativeModule.m +++ b/ios/LivekitReactNativeModule.m @@ -5,8 +5,10 @@ @interface RCT_EXTERN_MODULE(LivekitReactNativeModule, RCTEventEmitter) RCT_EXTERN_METHOD(configureAudio:(NSDictionary *) config) -RCT_EXTERN_METHOD(startAudioSession) -RCT_EXTERN_METHOD(stopAudioSession) +RCT_EXTERN_METHOD(startAudioSession:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) +RCT_EXTERN_METHOD(stopAudioSession:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) RCT_EXTERN_METHOD(setDefaultAudioTrackVolume:(nonnull NSNumber *) volume) From 7b609819352fcb2bc8fc4ff8fd711bc830aa9b6c Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 21:46:57 +0800 Subject: [PATCH 5/6] Update iOS audio management --- example/index.tsx | 4 +- example/src/RoomPage.tsx | 1 - ios/LiveKitReactNativeModule.swift | 2 +- src/audio/AudioManager.ts | 177 ++++++++++------------------- src/audio/AudioSession.ts | 31 ----- 5 files changed, 65 insertions(+), 150 deletions(-) diff --git a/example/index.tsx b/example/index.tsx index 231dc300..f6d69392 100644 --- a/example/index.tsx +++ b/example/index.tsx @@ -1,7 +1,7 @@ import { AppRegistry } from 'react-native'; import App from './src/App'; import { name as appName } from './app.json'; -import { registerGlobals, setLogLevel } from '@livekit/react-native'; +import { registerGlobals, setLogLevel, useIOSAudioManagement } from '@livekit/react-native'; import { LogLevel } from 'livekit-client'; import { setupErrorLogHandler } from './src/utils/ErrorLogHandler'; import { setupCallService } from './src/callservice/CallService'; @@ -16,3 +16,5 @@ setupCallService(); // Required React-Native setup for app registerGlobals(); AppRegistry.registerComponent(appName, () => App); + +useIOSAudioManagement(); diff --git a/example/src/RoomPage.tsx b/example/src/RoomPage.tsx index a4b14b0e..baaf66b9 100644 --- a/example/src/RoomPage.tsx +++ b/example/src/RoomPage.tsx @@ -106,7 +106,6 @@ const RoomView = ({ navigation, e2ee }: RoomViewProps) => { return () => {}; }, [room, e2ee]); - useIOSAudioManagement(room, true); // Setup room listeners const { send } = useDataChannel( (dataMessage: ReceivedDataMessage) => { diff --git a/ios/LiveKitReactNativeModule.swift b/ios/LiveKitReactNativeModule.swift index b3c3ef47..feb6d415 100644 --- a/ios/LiveKitReactNativeModule.swift +++ b/ios/LiveKitReactNativeModule.swift @@ -29,7 +29,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { super.init() let config = RTCAudioSessionConfiguration() config.category = AVAudioSession.Category.playAndRecord.rawValue - config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] + config.categoryOptions = [.allowAirPlay, .allowBluetoothHFP, .allowBluetoothA2DP, .defaultToSpeaker] config.mode = AVAudioSession.Mode.videoChat.rawValue RTCAudioSessionConfiguration.setWebRTC(config) diff --git a/src/audio/AudioManager.ts b/src/audio/AudioManager.ts index f227bd7c..708341c6 100644 --- a/src/audio/AudioManager.ts +++ b/src/audio/AudioManager.ts @@ -1,141 +1,86 @@ -import { useState, useEffect, useMemo } from 'react'; -import { Platform } from 'react-native'; -import { - RoomEvent, - Room, - type LocalTrackPublication, - type RemoteTrackPublication, -} from 'livekit-client'; import AudioSession, { - getDefaultAppleAudioConfigurationForMode, type AppleAudioConfiguration, - type AudioTrackState, } from './AudioSession'; import { log } from '..'; +import { audioDeviceModuleEvents } from '@livekit/react-native-webrtc'; + +export type AudioEngineConfigurationState = { + isPlayoutEnabled: boolean; + isRecordingEnabled: boolean; + preferSpeakerOutput: boolean; +}; /** * Handles setting the appropriate AVAudioSession options automatically * depending on the audio track states of the Room. * - * @param room * @param preferSpeakerOutput * @param onConfigureNativeAudio A custom method for determining options used. */ export function useIOSAudioManagement( - room: Room, - preferSpeakerOutput: boolean = true, - onConfigureNativeAudio?: ( - trackState: AudioTrackState, - preferSpeakerOutput: boolean - ) => AppleAudioConfiguration + preferSpeakerOutput = true, + onConfigureNativeAudio?: (configurationState: AudioEngineConfigurationState) => AppleAudioConfiguration ) { - const [localTrackCount, setLocalTrackCount] = useState(0); - const [remoteTrackCount, setRemoteTrackCount] = useState(0); - const trackState = useMemo( - () => computeAudioTrackState(localTrackCount, remoteTrackCount), - [localTrackCount, remoteTrackCount] - ); - - useEffect(() => { - let recalculateTrackCounts = () => { - setLocalTrackCount(getLocalAudioTrackCount(room)); - setRemoteTrackCount(getRemoteAudioTrackCount(room)); - }; - - recalculateTrackCounts(); - - room.on(RoomEvent.Connected, recalculateTrackCounts); - - return () => { - room.off(RoomEvent.Connected, recalculateTrackCounts); - }; - }, [room]); - useEffect(() => { - if (Platform.OS !== 'ios') { - return () => {}; - } + let audioEngineState: AudioEngineConfigurationState = { + isPlayoutEnabled: false, + isRecordingEnabled: false, + preferSpeakerOutput: preferSpeakerOutput, + }; - let onLocalPublished = (publication: LocalTrackPublication) => { - if (publication.kind === 'audio') { - setLocalTrackCount(localTrackCount + 1); - } - }; - let onLocalUnpublished = (publication: LocalTrackPublication) => { - if (publication.kind === 'audio') { - if (localTrackCount - 1 < 0) { - log.warn( - 'mismatched local audio track count! attempted to reduce track count below zero.' - ); - } - setLocalTrackCount(Math.max(localTrackCount - 1, 0)); - } - }; - let onRemotePublished = (publication: RemoteTrackPublication) => { - if (publication.kind === 'audio') { - setRemoteTrackCount(remoteTrackCount + 1); + const tryConfigure = async (newState: AudioEngineConfigurationState, oldState: AudioEngineConfigurationState) => { + if ((!newState.isPlayoutEnabled && !newState.isRecordingEnabled) && (oldState.isPlayoutEnabled || oldState.isRecordingEnabled)) { + log.info("AudioSession deactivating...") + await AudioSession.stopAudioSession() + } else if (newState.isRecordingEnabled || newState.isPlayoutEnabled) { + const config = onConfigureNativeAudio ? onConfigureNativeAudio(newState) : getDefaultAppleAudioConfigurationForAudioState(newState); + log.info("AudioSession configuring category:", config.audioCategory) + await AudioSession.setAppleAudioConfiguration(config) + if (!oldState.isPlayoutEnabled && !oldState.isRecordingEnabled) { + log.info("AudioSession activating...") + await AudioSession.startAudioSession() } - }; - let onRemoteUnpublished = (publication: RemoteTrackPublication) => { - if (publication.kind === 'audio') { - if (remoteTrackCount - 1 < 0) { - log.warn( - 'mismatched remote audio track count! attempted to reduce track count below zero.' - ); - } - setRemoteTrackCount(Math.max(remoteTrackCount - 1, 0)); - } - }; - - room - .on(RoomEvent.LocalTrackPublished, onLocalPublished) - .on(RoomEvent.LocalTrackUnpublished, onLocalUnpublished) - .on(RoomEvent.TrackPublished, onRemotePublished) - .on(RoomEvent.TrackUnpublished, onRemoteUnpublished); + } + }; - return () => { - room - .off(RoomEvent.LocalTrackPublished, onLocalPublished) - .off(RoomEvent.LocalTrackUnpublished, onLocalUnpublished) - .off(RoomEvent.TrackPublished, onRemotePublished) - .off(RoomEvent.TrackUnpublished, onRemoteUnpublished); + const handleEngineStateUpdate = async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { + const oldState = audioEngineState; + const newState = { + isPlayoutEnabled, + isRecordingEnabled, + preferSpeakerOutput: audioEngineState.preferSpeakerOutput, }; - }, [room, localTrackCount, remoteTrackCount]); - useEffect(() => { - if (Platform.OS !== 'ios') { - return; - } + // If this throws, the audio engine will not continue it's operation + await tryConfigure(newState, oldState); + // Update the audio state only if configure succeeds + audioEngineState = newState; + }; - let configFunc = - onConfigureNativeAudio ?? getDefaultAppleAudioConfigurationForMode; - let audioConfig = configFunc(trackState, preferSpeakerOutput); - AudioSession.setAppleAudioConfiguration(audioConfig); - }, [trackState, onConfigureNativeAudio, preferSpeakerOutput]); + // Attach audio engine events + audioDeviceModuleEvents.setWillEnableEngineHandler(handleEngineStateUpdate); + audioDeviceModuleEvents.setDidDisableEngineHandler(handleEngineStateUpdate); } -function computeAudioTrackState( - localTracks: number, - remoteTracks: number -): AudioTrackState { - if (localTracks > 0 && remoteTracks > 0) { - return 'localAndRemote'; - } else if (localTracks > 0 && remoteTracks === 0) { - return 'localOnly'; - } else if (localTracks === 0 && remoteTracks > 0) { - return 'remoteOnly'; - } else { - return 'none'; +function getDefaultAppleAudioConfigurationForAudioState( + configurationState: AudioEngineConfigurationState, +): AppleAudioConfiguration { + if (configurationState.isRecordingEnabled) { + return { + audioCategory: 'playAndRecord', + audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'], + audioMode: configurationState.preferSpeakerOutput ? 'videoChat' : 'voiceChat', + }; + } else if (configurationState.isPlayoutEnabled) { + return { + audioCategory: 'playback', + audioCategoryOptions: ['mixWithOthers'], + audioMode: 'spokenAudio', + }; } -} - -function getLocalAudioTrackCount(room: Room): number { - return room.localParticipant.audioTrackPublications.size; -} -function getRemoteAudioTrackCount(room: Room): number { - var audioTracks = 0; - room.remoteParticipants.forEach((participant) => { - audioTracks += participant.audioTrackPublications.size; - }); - return audioTracks; + return { + audioCategory: 'soloAmbient', + audioCategoryOptions: [], + audioMode: 'default', + }; } diff --git a/src/audio/AudioSession.ts b/src/audio/AudioSession.ts index ed3b9a9c..83bba8cc 100644 --- a/src/audio/AudioSession.ts +++ b/src/audio/AudioSession.ts @@ -197,37 +197,6 @@ export type AppleAudioConfiguration = { audioMode?: AppleAudioMode; }; -export type AudioTrackState = - | 'none' - | 'remoteOnly' - | 'localOnly' - | 'localAndRemote'; - -export function getDefaultAppleAudioConfigurationForMode( - mode: AudioTrackState, - preferSpeakerOutput: boolean = true -): AppleAudioConfiguration { - if (mode === 'remoteOnly') { - return { - audioCategory: 'playback', - audioCategoryOptions: ['mixWithOthers'], - audioMode: 'spokenAudio', - }; - } else if (mode === 'localAndRemote' || mode === 'localOnly') { - return { - audioCategory: 'playAndRecord', - audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'], - audioMode: preferSpeakerOutput ? 'videoChat' : 'voiceChat', - }; - } - - return { - audioCategory: 'soloAmbient', - audioCategoryOptions: [], - audioMode: 'default', - }; -} - export default class AudioSession { /** * Applies the provided audio configuration to the underlying AudioSession. From f467a48bf53f5e9b93c98db75a5d698a5f92c073 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 6 Oct 2025 23:29:13 +0800 Subject: [PATCH 6/6] Revert "Update iOS audio management" This reverts commit 7b609819352fcb2bc8fc4ff8fd711bc830aa9b6c. --- example/index.tsx | 4 +- example/src/RoomPage.tsx | 1 + ios/LiveKitReactNativeModule.swift | 2 +- src/audio/AudioManager.ts | 177 +++++++++++++++++++---------- src/audio/AudioSession.ts | 31 +++++ 5 files changed, 150 insertions(+), 65 deletions(-) diff --git a/example/index.tsx b/example/index.tsx index f6d69392..231dc300 100644 --- a/example/index.tsx +++ b/example/index.tsx @@ -1,7 +1,7 @@ import { AppRegistry } from 'react-native'; import App from './src/App'; import { name as appName } from './app.json'; -import { registerGlobals, setLogLevel, useIOSAudioManagement } from '@livekit/react-native'; +import { registerGlobals, setLogLevel } from '@livekit/react-native'; import { LogLevel } from 'livekit-client'; import { setupErrorLogHandler } from './src/utils/ErrorLogHandler'; import { setupCallService } from './src/callservice/CallService'; @@ -16,5 +16,3 @@ setupCallService(); // Required React-Native setup for app registerGlobals(); AppRegistry.registerComponent(appName, () => App); - -useIOSAudioManagement(); diff --git a/example/src/RoomPage.tsx b/example/src/RoomPage.tsx index baaf66b9..a4b14b0e 100644 --- a/example/src/RoomPage.tsx +++ b/example/src/RoomPage.tsx @@ -106,6 +106,7 @@ const RoomView = ({ navigation, e2ee }: RoomViewProps) => { return () => {}; }, [room, e2ee]); + useIOSAudioManagement(room, true); // Setup room listeners const { send } = useDataChannel( (dataMessage: ReceivedDataMessage) => { diff --git a/ios/LiveKitReactNativeModule.swift b/ios/LiveKitReactNativeModule.swift index feb6d415..b3c3ef47 100644 --- a/ios/LiveKitReactNativeModule.swift +++ b/ios/LiveKitReactNativeModule.swift @@ -29,7 +29,7 @@ public class LivekitReactNativeModule: RCTEventEmitter { super.init() let config = RTCAudioSessionConfiguration() config.category = AVAudioSession.Category.playAndRecord.rawValue - config.categoryOptions = [.allowAirPlay, .allowBluetoothHFP, .allowBluetoothA2DP, .defaultToSpeaker] + config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] config.mode = AVAudioSession.Mode.videoChat.rawValue RTCAudioSessionConfiguration.setWebRTC(config) diff --git a/src/audio/AudioManager.ts b/src/audio/AudioManager.ts index 708341c6..f227bd7c 100644 --- a/src/audio/AudioManager.ts +++ b/src/audio/AudioManager.ts @@ -1,86 +1,141 @@ +import { useState, useEffect, useMemo } from 'react'; +import { Platform } from 'react-native'; +import { + RoomEvent, + Room, + type LocalTrackPublication, + type RemoteTrackPublication, +} from 'livekit-client'; import AudioSession, { + getDefaultAppleAudioConfigurationForMode, type AppleAudioConfiguration, + type AudioTrackState, } from './AudioSession'; import { log } from '..'; -import { audioDeviceModuleEvents } from '@livekit/react-native-webrtc'; - -export type AudioEngineConfigurationState = { - isPlayoutEnabled: boolean; - isRecordingEnabled: boolean; - preferSpeakerOutput: boolean; -}; /** * Handles setting the appropriate AVAudioSession options automatically * depending on the audio track states of the Room. * + * @param room * @param preferSpeakerOutput * @param onConfigureNativeAudio A custom method for determining options used. */ export function useIOSAudioManagement( - preferSpeakerOutput = true, - onConfigureNativeAudio?: (configurationState: AudioEngineConfigurationState) => AppleAudioConfiguration + room: Room, + preferSpeakerOutput: boolean = true, + onConfigureNativeAudio?: ( + trackState: AudioTrackState, + preferSpeakerOutput: boolean + ) => AppleAudioConfiguration ) { - let audioEngineState: AudioEngineConfigurationState = { - isPlayoutEnabled: false, - isRecordingEnabled: false, - preferSpeakerOutput: preferSpeakerOutput, - }; + const [localTrackCount, setLocalTrackCount] = useState(0); + const [remoteTrackCount, setRemoteTrackCount] = useState(0); + const trackState = useMemo( + () => computeAudioTrackState(localTrackCount, remoteTrackCount), + [localTrackCount, remoteTrackCount] + ); - const tryConfigure = async (newState: AudioEngineConfigurationState, oldState: AudioEngineConfigurationState) => { - if ((!newState.isPlayoutEnabled && !newState.isRecordingEnabled) && (oldState.isPlayoutEnabled || oldState.isRecordingEnabled)) { - log.info("AudioSession deactivating...") - await AudioSession.stopAudioSession() - } else if (newState.isRecordingEnabled || newState.isPlayoutEnabled) { - const config = onConfigureNativeAudio ? onConfigureNativeAudio(newState) : getDefaultAppleAudioConfigurationForAudioState(newState); - log.info("AudioSession configuring category:", config.audioCategory) - await AudioSession.setAppleAudioConfiguration(config) - if (!oldState.isPlayoutEnabled && !oldState.isRecordingEnabled) { - log.info("AudioSession activating...") - await AudioSession.startAudioSession() - } + useEffect(() => { + let recalculateTrackCounts = () => { + setLocalTrackCount(getLocalAudioTrackCount(room)); + setRemoteTrackCount(getRemoteAudioTrackCount(room)); + }; + + recalculateTrackCounts(); + + room.on(RoomEvent.Connected, recalculateTrackCounts); + + return () => { + room.off(RoomEvent.Connected, recalculateTrackCounts); + }; + }, [room]); + useEffect(() => { + if (Platform.OS !== 'ios') { + return () => {}; } - }; - const handleEngineStateUpdate = async ({ isPlayoutEnabled, isRecordingEnabled }: { isPlayoutEnabled: boolean, isRecordingEnabled: boolean }) => { - const oldState = audioEngineState; - const newState = { - isPlayoutEnabled, - isRecordingEnabled, - preferSpeakerOutput: audioEngineState.preferSpeakerOutput, + let onLocalPublished = (publication: LocalTrackPublication) => { + if (publication.kind === 'audio') { + setLocalTrackCount(localTrackCount + 1); + } + }; + let onLocalUnpublished = (publication: LocalTrackPublication) => { + if (publication.kind === 'audio') { + if (localTrackCount - 1 < 0) { + log.warn( + 'mismatched local audio track count! attempted to reduce track count below zero.' + ); + } + setLocalTrackCount(Math.max(localTrackCount - 1, 0)); + } + }; + let onRemotePublished = (publication: RemoteTrackPublication) => { + if (publication.kind === 'audio') { + setRemoteTrackCount(remoteTrackCount + 1); + } }; + let onRemoteUnpublished = (publication: RemoteTrackPublication) => { + if (publication.kind === 'audio') { + if (remoteTrackCount - 1 < 0) { + log.warn( + 'mismatched remote audio track count! attempted to reduce track count below zero.' + ); + } + setRemoteTrackCount(Math.max(remoteTrackCount - 1, 0)); + } + }; + + room + .on(RoomEvent.LocalTrackPublished, onLocalPublished) + .on(RoomEvent.LocalTrackUnpublished, onLocalUnpublished) + .on(RoomEvent.TrackPublished, onRemotePublished) + .on(RoomEvent.TrackUnpublished, onRemoteUnpublished); - // If this throws, the audio engine will not continue it's operation - await tryConfigure(newState, oldState); - // Update the audio state only if configure succeeds - audioEngineState = newState; - }; + return () => { + room + .off(RoomEvent.LocalTrackPublished, onLocalPublished) + .off(RoomEvent.LocalTrackUnpublished, onLocalUnpublished) + .off(RoomEvent.TrackPublished, onRemotePublished) + .off(RoomEvent.TrackUnpublished, onRemoteUnpublished); + }; + }, [room, localTrackCount, remoteTrackCount]); + + useEffect(() => { + if (Platform.OS !== 'ios') { + return; + } - // Attach audio engine events - audioDeviceModuleEvents.setWillEnableEngineHandler(handleEngineStateUpdate); - audioDeviceModuleEvents.setDidDisableEngineHandler(handleEngineStateUpdate); + let configFunc = + onConfigureNativeAudio ?? getDefaultAppleAudioConfigurationForMode; + let audioConfig = configFunc(trackState, preferSpeakerOutput); + AudioSession.setAppleAudioConfiguration(audioConfig); + }, [trackState, onConfigureNativeAudio, preferSpeakerOutput]); } -function getDefaultAppleAudioConfigurationForAudioState( - configurationState: AudioEngineConfigurationState, -): AppleAudioConfiguration { - if (configurationState.isRecordingEnabled) { - return { - audioCategory: 'playAndRecord', - audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'], - audioMode: configurationState.preferSpeakerOutput ? 'videoChat' : 'voiceChat', - }; - } else if (configurationState.isPlayoutEnabled) { - return { - audioCategory: 'playback', - audioCategoryOptions: ['mixWithOthers'], - audioMode: 'spokenAudio', - }; +function computeAudioTrackState( + localTracks: number, + remoteTracks: number +): AudioTrackState { + if (localTracks > 0 && remoteTracks > 0) { + return 'localAndRemote'; + } else if (localTracks > 0 && remoteTracks === 0) { + return 'localOnly'; + } else if (localTracks === 0 && remoteTracks > 0) { + return 'remoteOnly'; + } else { + return 'none'; } +} + +function getLocalAudioTrackCount(room: Room): number { + return room.localParticipant.audioTrackPublications.size; +} - return { - audioCategory: 'soloAmbient', - audioCategoryOptions: [], - audioMode: 'default', - }; +function getRemoteAudioTrackCount(room: Room): number { + var audioTracks = 0; + room.remoteParticipants.forEach((participant) => { + audioTracks += participant.audioTrackPublications.size; + }); + return audioTracks; } diff --git a/src/audio/AudioSession.ts b/src/audio/AudioSession.ts index 83bba8cc..ed3b9a9c 100644 --- a/src/audio/AudioSession.ts +++ b/src/audio/AudioSession.ts @@ -197,6 +197,37 @@ export type AppleAudioConfiguration = { audioMode?: AppleAudioMode; }; +export type AudioTrackState = + | 'none' + | 'remoteOnly' + | 'localOnly' + | 'localAndRemote'; + +export function getDefaultAppleAudioConfigurationForMode( + mode: AudioTrackState, + preferSpeakerOutput: boolean = true +): AppleAudioConfiguration { + if (mode === 'remoteOnly') { + return { + audioCategory: 'playback', + audioCategoryOptions: ['mixWithOthers'], + audioMode: 'spokenAudio', + }; + } else if (mode === 'localAndRemote' || mode === 'localOnly') { + return { + audioCategory: 'playAndRecord', + audioCategoryOptions: ['allowBluetooth', 'mixWithOthers'], + audioMode: preferSpeakerOutput ? 'videoChat' : 'voiceChat', + }; + } + + return { + audioCategory: 'soloAmbient', + audioCategoryOptions: [], + audioMode: 'default', + }; +} + export default class AudioSession { /** * Applies the provided audio configuration to the underlying AudioSession.