Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion OpenOats/Sources/OpenOats/App/AppCoordinator.swift
Original file line number Diff line number Diff line change
Expand Up @@ -254,20 +254,44 @@ final class AppCoordinator {
let title = transcriptStore.conversationState.currentTopic.isEmpty
? nil : transcriptStore.conversationState.currentTopic

// Extract meeting app name from state machine metadata (available in .ending state)
let meetingAppName: String?
if case .ending(let metadata) = state {
meetingAppName = metadata.detectionContext?.meetingApp?.name
} else {
meetingAppName = nil
}

// Capture the ASR engine name from current settings
let engineName = settings?.transcriptionModel.rawValue

let index = SessionIndex(
id: sessionID,
startedAt: transcriptStore.utterances.first?.timestamp ?? Date(),
endedAt: Date(),
templateSnapshot: sessionTemplateSnapshot,
title: title,
utteranceCount: utteranceCount,
hasNotes: false
hasNotes: false,
meetingApp: meetingAppName,
engine: engineName
)
let sidecar = SessionSidecar(index: index, notes: nil)

// 4. Write sidecar
await sessionStore.writeSidecar(sidecar)

// 4b. Generate structured Markdown file from JSONL (has refined text after backfill)
let jsonlRecords = await sessionStore.loadTranscript(sessionID: sessionID)
if !jsonlRecords.isEmpty, let settings {
let outputDir = URL(fileURLWithPath: settings.notesFolderPath)
MarkdownMeetingWriter.write(
metadata: .init(from: index),
records: jsonlRecords,
outputDirectory: outputDir
)
}

// 5. Close JSONL file
await sessionStore.endSession()

Expand Down
12 changes: 12 additions & 0 deletions OpenOats/Sources/OpenOats/App/AppRuntime.swift
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ final class AppRuntime {
let notesDirectory: URL

private var didSeedInitialData = false
private var didInitializeServices = false

init(
mode: AppRuntimeMode,
Expand Down Expand Up @@ -168,6 +169,17 @@ final class AppRuntime {
)
}

func ensureServicesInitialized(settings: AppSettings, coordinator: AppCoordinator) {
guard !didInitializeServices else { return }
didInitializeServices = true

let services = makeServices(settings: settings, coordinator: coordinator)
coordinator.transcriptionEngine = services.transcriptionEngine
coordinator.transcriptLogger = services.transcriptLogger
coordinator.refinementEngine = services.refinementEngine
coordinator.audioRecorder = services.audioRecorder
}

func seedIfNeeded(coordinator: AppCoordinator) async {
guard !didSeedInitialData else { return }
didSeedInitialData = true
Expand Down
12 changes: 11 additions & 1 deletion OpenOats/Sources/OpenOats/Audio/MicCapture.swift
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ final class MicCapture: @unchecked Sendable {
)
}

func bufferStream(deviceID: AudioDeviceID? = nil) -> AsyncStream<AVAudioPCMBuffer> {
func bufferStream(deviceID: AudioDeviceID? = nil, echoCancellation: Bool = false) -> AsyncStream<AVAudioPCMBuffer> {
// Defensive cleanup of any prior state
_streamContinuation.withLock { $0?.finish(); $0 = nil }
engine.inputNode.removeTap(onBus: 0)
Expand All @@ -55,6 +55,16 @@ final class MicCapture: @unchecked Sendable {
let inputNode = engine.inputNode
diagLog("[MIC-1b] input node ready")

// Enable voice processing (AEC + noise suppression) if requested
if echoCancellation {
do {
try inputNode.setVoiceProcessingEnabled(true)
diagLog("[MIC-1c] voice processing (AEC) enabled")
} catch {
diagLog("[MIC-1c] failed to enable voice processing: \(error.localizedDescription)")
}
}

// Set input device before accessing inputNode format
var resolvedDeviceID: AudioDeviceID?
if let id = deviceID {
Expand Down
Loading
Loading