Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion OpenOats/Sources/OpenOats/App/AppCoordinator.swift
Original file line number Diff line number Diff line change
Expand Up @@ -254,20 +254,44 @@ final class AppCoordinator {
let title = transcriptStore.conversationState.currentTopic.isEmpty
? nil : transcriptStore.conversationState.currentTopic

// Extract meeting app name from state machine metadata (available in .ending state)
let meetingAppName: String?
if case .ending(let metadata) = state {
meetingAppName = metadata.detectionContext?.meetingApp?.name
} else {
meetingAppName = nil
}

// Capture the ASR engine name from current settings
let engineName = settings?.transcriptionModel.rawValue

let index = SessionIndex(
id: sessionID,
startedAt: transcriptStore.utterances.first?.timestamp ?? Date(),
endedAt: Date(),
templateSnapshot: sessionTemplateSnapshot,
title: title,
utteranceCount: utteranceCount,
hasNotes: false
hasNotes: false,
meetingApp: meetingAppName,
engine: engineName
)
let sidecar = SessionSidecar(index: index, notes: nil)

// 4. Write sidecar
await sessionStore.writeSidecar(sidecar)

// 4b. Generate structured Markdown file from JSONL (has refined text after backfill)
let jsonlRecords = await sessionStore.loadTranscript(sessionID: sessionID)
if !jsonlRecords.isEmpty, let settings {
let outputDir = URL(fileURLWithPath: settings.notesFolderPath)
MarkdownMeetingWriter.write(
metadata: .init(from: index),
records: jsonlRecords,
outputDirectory: outputDir
)
}

// 5. Close JSONL file
await sessionStore.endSession()

Expand Down
157 changes: 135 additions & 22 deletions OpenOats/Sources/OpenOats/App/OpenOatsApp.swift
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import SwiftUI
import AppKit
import Sparkle
import UserNotifications

public struct OpenOatsRootApp: App {
@NSApplicationDelegateAdaptor(AppDelegate.self) var appDelegate
Expand Down Expand Up @@ -28,7 +29,14 @@ public struct OpenOatsRootApp: App {
.defaultAppStorage(defaults)
.onAppear {
appDelegate.coordinator = coordinator
appDelegate.settings = settings
appDelegate.defaults = defaults
appDelegate.runtime = runtime
appDelegate.setupMenuBarIfNeeded(
coordinator: coordinator,
settings: settings,
showMainWindow: { [self] in showMainWindow() }
)
settings.applyScreenShareVisibility()
}
.onOpenURL { url in
Expand Down Expand Up @@ -84,52 +92,66 @@ public struct OpenOatsRootApp: App {
}

extension OpenOatsRootApp {
static let mainWindowID = "main"

private func openNotesWindow() {
openWindow(id: "notes")
}

private func showMainWindow() {
NSApp.setActivationPolicy(.regular)
NSApp.activate(ignoringOtherApps: true)
if let window = NSApp.windows.first(where: { $0.identifier?.rawValue == Self.mainWindowID }) {
window.makeKeyAndOrderFront(nil)
} else {
openWindow(id: Self.mainWindowID)
}
}
}

/// Observes new window creation and applies screen-share visibility setting.
@MainActor
final class AppDelegate: NSObject, NSApplicationDelegate {
final class AppDelegate: NSObject, NSApplicationDelegate, NSWindowDelegate {
private var windowObserver: Any?
private var menuBarController: MenuBarController?
private var isTerminating = false
var coordinator: AppCoordinator?
var settings: AppSettings?
var runtime: AppRuntime?
var defaults: UserDefaults = .standard

func applicationDockMenu(_ sender: NSApplication) -> NSMenu? {
guard let coordinator else { return nil }
let menu = NSMenu()
if coordinator.isRecording {
let item = NSMenuItem(title: "Stop Recording", action: #selector(stopRecording), keyEquivalent: "")
item.target = self
menu.addItem(item)
} else {
let item = NSMenuItem(title: "Start Recording", action: #selector(startRecording), keyEquivalent: "")
item.target = self
menu.addItem(item)
}
return menu
}
func setupMenuBarIfNeeded(
coordinator: AppCoordinator,
settings: AppSettings,
showMainWindow: @escaping () -> Void
) {
guard menuBarController == nil else { return }

@objc private func startRecording() {
coordinator?.queueExternalCommand(.startSession)
}
runtime?.ensureServicesInitialized(settings: settings, coordinator: coordinator)

@objc private func stopRecording() {
coordinator?.queueExternalCommand(.stopSession)
let controller = MenuBarController(
coordinator: coordinator,
settings: settings
)
controller.onShowMainWindow = showMainWindow
controller.onQuitApp = { [weak self] in
self?.handleQuit()
}
menuBarController = controller
}

func applicationDidFinishLaunching(_ notification: Notification) {
NSApp.setActivationPolicy(.regular)

let hidden = defaults.object(forKey: "hideFromScreenShare") == nil
? true
: defaults.bool(forKey: "hideFromScreenShare")
let sharingType: NSWindow.SharingType = hidden ? .none : .readOnly

for window in NSApp.windows {
window.sharingType = sharingType
window.delegate = self
}

// Watch for new windows being created (e.g. Settings window)
windowObserver = NotificationCenter.default.addObserver(
forName: NSWindow.didBecomeKeyNotification,
object: nil,
Expand All @@ -142,8 +164,99 @@ final class AppDelegate: NSObject, NSApplicationDelegate {
let type: NSWindow.SharingType = hide ? .none : .readOnly
for window in NSApp.windows {
window.sharingType = type
if window.delegate == nil || window.delegate === self {
window.delegate = self
}
}
}
}
}

func applicationShouldTerminate(_ sender: NSApplication) -> NSApplication.TerminateReply {
guard let coordinator else { return .terminateNow }

if isTerminating {
return .terminateNow
}

guard coordinator.isRecording else {
return .terminateNow
}

let alert = NSAlert()
alert.messageText = "Recording in Progress"
alert.informativeText = "Stop recording and quit?"
alert.alertStyle = .warning
alert.addButton(withTitle: "Stop & Quit")
alert.addButton(withTitle: "Cancel")

let response = alert.runModal()
guard response == .alertFirstButtonReturn else {
return .terminateCancel
}

isTerminating = true
coordinator.handle(.userStopped, settings: settings)

Task { @MainActor [weak self] in
let deadline = Date().addingTimeInterval(30)
while Date() < deadline {
if case .idle = coordinator.state { break }
try? await Task.sleep(for: .milliseconds(100))
}
self?.isTerminating = true
NSApp.reply(toApplicationShouldTerminate: true)
}
return .terminateLater
}

func applicationShouldTerminateAfterLastWindowClosed(_ sender: NSApplication) -> Bool {
false
}

// MARK: - NSWindowDelegate

func windowShouldClose(_ sender: NSWindow) -> Bool {
let isMainWindow = sender.identifier?.rawValue == OpenOatsRootApp.mainWindowID

if isMainWindow {
sender.orderOut(nil)
NSApp.setActivationPolicy(.accessory)
showBackgroundModeHintIfNeeded()
return false
}
return true
}

// MARK: - One-Shot Background Notification

private func showBackgroundModeHintIfNeeded() {
guard !defaults.bool(forKey: "hasShownBackgroundModeHint") else { return }
guard settings?.meetingAutoDetectEnabled == true else { return }

defaults.set(true, forKey: "hasShownBackgroundModeHint")

Task {
let center = UNUserNotificationCenter.current()
let granted = try? await center.requestAuthorization(options: [.alert])
guard granted == true else { return }

let content = UNMutableNotificationContent()
content.title = "OpenOats is still running"
content.body = "Meeting detection is active. Click the menu bar icon to access controls."

let request = UNNotificationRequest(
identifier: "background-mode-hint",
content: content,
trigger: nil
)
try? await center.add(request)
}
}

// MARK: - Quit

private func handleQuit() {
NSApp.terminate(nil)
}
}
77 changes: 66 additions & 11 deletions OpenOats/Sources/OpenOats/Audio/AudioRecorder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@ final class AudioRecorder: @unchecked Sendable {

func writeMicBuffer(_ buffer: AVAudioPCMBuffer) {
lock.withLock {
guard buffer.frameLength > 0, let src = buffer.floatChannelData else { return }
guard buffer.frameLength > 0 else { return }
let frames = Int(buffer.frameLength)
let channels = Int(buffer.format.channelCount)

// Lazily create file as mono 48kHz (avoids deinterleaved format issues)
// Lazily create file as mono at the source sample rate
if micFile == nil, let url = micTempURL {
let monoFormat = AVAudioFormat(
standardFormatWithSampleRate: buffer.format.sampleRate, channels: 1
Expand All @@ -53,23 +53,78 @@ final class AudioRecorder: @unchecked Sendable {
diagLog("[RECORDER] mic file created: \(url.lastPathComponent) mono at \(buffer.format.sampleRate)Hz")
}

// Downmix to mono inline
// Downmix to mono inline — handle float32, int16, and int32 formats
guard let monoFormat = AVAudioFormat(
standardFormatWithSampleRate: buffer.format.sampleRate, channels: 1
),
let monoBuf = AVAudioPCMBuffer(pcmFormat: monoFormat, frameCapacity: buffer.frameCapacity),
let dst = monoBuf.floatChannelData?[0] else { return }
monoBuf.frameLength = buffer.frameLength

if channels == 1 {
memcpy(dst, src[0], frames * MemoryLayout<Float>.size)
} else {
let scale = 1.0 / Float(channels)
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += src[ch][i] }
dst[i] = sum * scale
if let src = buffer.floatChannelData {
if channels == 1 {
if buffer.format.isInterleaved {
memcpy(dst, src[0], frames * MemoryLayout<Float>.size)
} else {
memcpy(dst, src[0], frames * MemoryLayout<Float>.size)
}
} else {
let scale = 1.0 / Float(channels)
if buffer.format.isInterleaved {
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += src[0][(i * channels) + ch] }
dst[i] = sum * scale
}
} else {
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += src[ch][i] }
dst[i] = sum * scale
}
}
}
} else if let src = buffer.int16ChannelData {
let scale = 1.0 / Float(Int16.max)
if channels == 1 {
for i in 0..<frames { dst[i] = Float(src[0][i]) * scale }
} else if buffer.format.isInterleaved {
let invCh = 1.0 / Float(channels)
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += Float(src[0][(i * channels) + ch]) * scale }
dst[i] = sum * invCh
}
} else {
let invCh = 1.0 / Float(channels)
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += Float(src[ch][i]) * scale }
dst[i] = sum * invCh
}
}
} else if let src = buffer.int32ChannelData {
let scale = 1.0 / Float(Int32.max)
if channels == 1 {
for i in 0..<frames { dst[i] = Float(src[0][i]) * scale }
} else if buffer.format.isInterleaved {
let invCh = 1.0 / Float(channels)
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += Float(src[0][(i * channels) + ch]) * scale }
dst[i] = sum * invCh
}
} else {
let invCh = 1.0 / Float(channels)
for i in 0..<frames {
var sum: Float = 0
for ch in 0..<channels { sum += Float(src[ch][i]) * scale }
dst[i] = sum * invCh
}
}
} else {
diagLog("[RECORDER] mic write SKIP: unsupported buffer format \(buffer.format.commonFormat.rawValue)")
return
}

micWriteCount += 1
Expand Down
Loading
Loading