Skip to content

Commit 4a491ce

Browse files
committed
Fix concurrency warnings
1 parent d830f65 commit 4a491ce

File tree

1 file changed

+32
-28
lines changed

1 file changed

+32
-28
lines changed

firebaseai/FirebaseAIExample/Shared/Audio/AudioController.swift

Lines changed: 32 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -96,39 +96,41 @@ actor AudioController {
9696
print("Failed to disable voice processing: \(error.localizedDescription)")
9797
}
9898
}
99-
microphone?.stop()
100-
audioPlayer?.stop()
99+
Task { @MainActor [audioPlayer, microphone] in
100+
microphone?.stop()
101+
audioPlayer?.stop()
102+
}
101103
microphoneDataQueue.finish()
102104
routeTask?.cancel()
103105
}
104106

105107
/// Kicks off audio processing, and returns a stream of recorded microphone audio data.
106-
public func listenToMic() throws -> AsyncStream<AVAudioPCMBuffer> {
107-
try spawnAudioProcessingThread()
108+
public func listenToMic() async throws -> AsyncStream<AVAudioPCMBuffer> {
109+
try await spawnAudioProcessingThread()
108110
return microphoneData
109111
}
110112

111113
/// Permanently stop all audio processing.
112114
///
113115
/// To start again, create a new instance of ``AudioController``.
114-
public func stop() {
116+
public func stop() async {
115117
stopped = true
116-
stopListeningAndPlayback()
118+
await stopListeningAndPlayback()
117119
microphoneDataQueue.finish()
118120
routeTask?.cancel()
119121
}
120122

121123
/// Queues audio for playback.
122-
public func playAudio(audio: Data) throws {
123-
try audioPlayer?.play(audio)
124+
public func playAudio(audio: Data) async throws {
125+
try await audioPlayer?.play(audio)
124126
}
125127

126128
/// Interrupts and clears the currently pending audio playback queue.
127-
public func interrupt() {
128-
audioPlayer?.interrupt()
129+
public func interrupt() async {
130+
await audioPlayer?.interrupt()
129131
}
130132

131-
private func stopListeningAndPlayback() {
133+
private func stopListeningAndPlayback() async {
132134
listenTask?.cancel()
133135
// audio engine needs to be stopped before disconnecting nodes
134136
audioEngine?.pause()
@@ -143,8 +145,8 @@ actor AudioController {
143145
print("Failed to disable voice processing: \(error.localizedDescription)")
144146
}
145147
}
146-
microphone?.stop()
147-
audioPlayer?.stop()
148+
await microphone?.stop()
149+
await audioPlayer?.stop()
148150
}
149151

150152
/// Start audio processing functionality.
@@ -154,16 +156,16 @@ actor AudioController {
154156
/// This function is also called whenever the input or output device change,
155157
/// so it needs to be able to setup the audio processing without disrupting
156158
/// the consumer of the microphone data.
157-
private func spawnAudioProcessingThread() throws {
159+
private func spawnAudioProcessingThread() async throws {
158160
if stopped { return }
159161

160-
stopListeningAndPlayback()
162+
await stopListeningAndPlayback()
161163

162164
// we need to start a new audio engine if the output device changed, so we might as well do it regardless
163165
let audioEngine = AVAudioEngine()
164166
self.audioEngine = audioEngine
165167

166-
try setupAudioPlayback(audioEngine)
168+
try await setupAudioPlayback(audioEngine)
167169
try setupVoiceProcessing(audioEngine)
168170

169171
do {
@@ -172,30 +174,30 @@ actor AudioController {
172174
throw ApplicationError("Failed to start audio engine: \(error.localizedDescription)")
173175
}
174176

175-
try setupMicrophone(audioEngine)
177+
try await setupMicrophone(audioEngine)
176178
}
177179

178-
private func setupMicrophone(_ engine: AVAudioEngine) throws {
179-
let microphone = Microphone(engine: engine)
180+
private func setupMicrophone(_ engine: AVAudioEngine) async throws {
181+
let microphone = await Microphone(engine: engine)
180182
self.microphone = microphone
181183

182-
microphone.start()
184+
await microphone.start()
183185

184186
let micFormat = engine.inputNode.outputFormat(forBus: 0)
185187
guard let converter = AVAudioConverter(from: micFormat, to: modelInputFormat) else {
186188
throw ApplicationError("Failed to create audio converter")
187189
}
188190

189191
listenTask = Task {
190-
for await audio in microphone.audio {
191-
try microphoneDataQueue.yield(converter.convertBuffer(audio))
192+
for await audio in await microphone.audio {
193+
try microphoneDataQueue.yield(await converter.convertBuffer(audio))
192194
}
193195
}
194196
}
195197

196-
private func setupAudioPlayback(_ engine: AVAudioEngine) throws {
198+
private func setupAudioPlayback(_ engine: AVAudioEngine) async throws {
197199
let playbackFormat = engine.outputNode.outputFormat(forBus: 0)
198-
audioPlayer = try AudioPlayer(
200+
audioPlayer = try await AudioPlayer(
199201
engine: engine,
200202
inputFormat: modelOutputFormat,
201203
outputFormat: playbackFormat
@@ -240,10 +242,12 @@ actor AudioController {
240242

241243
switch reason {
242244
case .newDeviceAvailable, .oldDeviceUnavailable:
243-
do {
244-
try spawnAudioProcessingThread()
245-
} catch {
246-
print("Failed to spawn audio processing thread: \(String(describing: error))")
245+
Task { @MainActor in
246+
do {
247+
try await spawnAudioProcessingThread()
248+
} catch {
249+
print("Failed to spawn audio processing thread: \(String(describing: error))")
250+
}
247251
}
248252
default: ()
249253
}

0 commit comments

Comments
 (0)