diff --git a/Sources/AppState.swift b/Sources/AppState.swift index f4200ce..18a6d93 100644 --- a/Sources/AppState.swift +++ b/Sources/AppState.swift @@ -49,8 +49,11 @@ final class AppState: ObservableObject, @unchecked Sendable { private let customContextPromptLastModifiedStorageKey = "custom_context_prompt_last_modified" private let shortcutStartDelayStorageKey = "shortcut_start_delay" private let forceHTTP2TranscriptionStorageKey = "force_http2_transcription" + private let maxRecordingSecondsStorageKey = "max_recording_seconds" private let transcribingIndicatorDelay: TimeInterval = 1.0 let maxPipelineHistoryCount = 20 + static let defaultMaxRecordingSeconds: Int = 120 + private var recordingTimer: Timer? @Published var hasCompletedSetup: Bool { didSet { @@ -141,6 +144,12 @@ final class AppState: ObservableObject, @unchecked Sendable { } } + @Published var maxRecordingSeconds: Int { + didSet { + UserDefaults.standard.set(maxRecordingSeconds, forKey: maxRecordingSecondsStorageKey) + } + } + @Published var isRecording = false @Published var isTranscribing = false @Published var lastTranscript: String = "" @@ -209,6 +218,8 @@ final class AppState: ObservableObject, @unchecked Sendable { let customContextPromptLastModified = UserDefaults.standard.string(forKey: customContextPromptLastModifiedStorageKey) ?? "" let shortcutStartDelay = max(0, UserDefaults.standard.double(forKey: shortcutStartDelayStorageKey)) let forceHTTP2Transcription = UserDefaults.standard.bool(forKey: forceHTTP2TranscriptionStorageKey) + let storedMaxRecording = UserDefaults.standard.integer(forKey: maxRecordingSecondsStorageKey) + let maxRecordingSeconds = storedMaxRecording > 0 ? storedMaxRecording : Self.defaultMaxRecordingSeconds let initialAccessibility = AXIsProcessTrusted() let initialScreenCapturePermission = CGPreflightScreenCaptureAccess() var removedAudioFileNames: [String] = [] @@ -239,6 +250,7 @@ final class AppState: ObservableObject, @unchecked Sendable { self.customContextPromptLastModified = customContextPromptLastModified self.shortcutStartDelay = shortcutStartDelay self.forceHTTP2Transcription = forceHTTP2Transcription + self.maxRecordingSeconds = maxRecordingSeconds self.pipelineHistory = savedHistory self.hasAccessibility = initialAccessibility self.hasScreenRecordingPermission = initialScreenCapturePermission @@ -785,6 +797,7 @@ final class AppState: ObservableObject, @unchecked Sendable { .sink { [weak self] level in self?.overlayManager.updateAudioLevel(level) } + self.startRecordingTimer() } } catch { DispatchQueue.main.async { @@ -852,6 +865,7 @@ final class AppState: ObservableObject, @unchecked Sendable { } private func stopAndTranscribe() { + stopRecordingTimer() cancelPendingShortcutStart() shortcutSessionController.reset() activeRecordingTriggerMode = nil @@ -1062,6 +1076,27 @@ final class AppState: ObservableObject, @unchecked Sendable { } } + private func startRecordingTimer() { + recordingTimer?.invalidate() + overlayManager.overlayState.elapsedSeconds = 0 + overlayManager.overlayState.maxRecordingSeconds = maxRecordingSeconds + recordingTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in + guard let self else { return } + DispatchQueue.main.async { + self.overlayManager.overlayState.elapsedSeconds += 1 + if self.overlayManager.overlayState.elapsedSeconds >= self.maxRecordingSeconds { + self.stopAndTranscribe() + } + } + } + } + + private func stopRecordingTimer() { + recordingTimer?.invalidate() + recordingTimer = nil + overlayManager.overlayState.elapsedSeconds = 0 + } + private func startContextCapture() { contextCaptureTask?.cancel() capturedContext = nil diff --git a/Sources/RecordingOverlay.swift b/Sources/RecordingOverlay.swift index 487945f..7173f6b 100644 --- a/Sources/RecordingOverlay.swift +++ b/Sources/RecordingOverlay.swift @@ -7,6 +7,9 @@ final class RecordingOverlayState: ObservableObject { @Published var phase: OverlayPhase = .recording @Published var audioLevel: Float = 0.0 @Published var recordingTriggerMode: RecordingTriggerMode = .hold + @Published var elapsedSeconds: Int = 0 + /// Maximum recording duration in seconds (used for timer color warnings) + var maxRecordingSeconds: Int = 120 } enum OverlayPhase { @@ -58,7 +61,7 @@ private func makeNotchContent( final class RecordingOverlayManager { private var overlayWindow: NSPanel? private var transcribingPanel: NSPanel? - private let overlayState = RecordingOverlayState() + let overlayState = RecordingOverlayState() var onStopButtonPressed: (() -> Void)? @@ -226,7 +229,7 @@ final class RecordingOverlayManager { } private var overlayWidth: CGFloat { - let baseWidth: CGFloat = overlayState.phase == .recording && overlayState.recordingTriggerMode == .toggle ? 150 : 92 + let baseWidth: CGFloat = overlayState.phase == .recording && overlayState.recordingTriggerMode == .toggle ? 190 : 130 guard screenHasNotch else { return baseWidth } return max(notchWidth, baseWidth) } @@ -388,6 +391,34 @@ struct InitializingDotsView: View { } } +struct ElapsedTimerView: View { + let elapsedSeconds: Int + let maxSeconds: Int + + private var timerColor: Color { + let remaining = maxSeconds - elapsedSeconds + if remaining <= 10 { + return .red + } else if remaining <= 30 { + return .yellow + } + return .white + } + + private var formattedTime: String { + let minutes = elapsedSeconds / 60 + let seconds = elapsedSeconds % 60 + return String(format: "%d:%02d", minutes, seconds) + } + + var body: some View { + Text(formattedTime) + .font(.system(size: 11, weight: .medium, design: .monospaced)) + .foregroundColor(timerColor) + .animation(.easeInOut(duration: 0.3), value: timerColor) + } +} + struct RecordingOverlayView: View { @ObservedObject var state: RecordingOverlayState let onStopButtonPressed: () -> Void @@ -399,8 +430,14 @@ struct RecordingOverlayView: View { InitializingDotsView() .transition(.opacity) } else { - WaveformView(audioLevel: state.audioLevel) - .transition(.opacity) + HStack(spacing: 6) { + WaveformView(audioLevel: state.audioLevel) + ElapsedTimerView( + elapsedSeconds: state.elapsedSeconds, + maxSeconds: state.maxRecordingSeconds + ) + } + .transition(.opacity) } } diff --git a/Sources/SettingsView.swift b/Sources/SettingsView.swift index dcce169..a4b9f85 100644 --- a/Sources/SettingsView.swift +++ b/Sources/SettingsView.swift @@ -98,6 +98,8 @@ struct GeneralSettingsView: View { @State private var keyValidationSuccess = false @State private var customVocabularyInput: String = "" @State private var micPermissionGranted = false + @State private var useCustomRecordingLimit = false + @State private var customRecordingLimitInput: String = "" @StateObject private var githubCache = GitHubMetadataCache.shared @ObservedObject private var updateManager = UpdateManager.shared private let freeflowRepoURL = URL(string: "https://github.com/zachlatta/freeflow")! @@ -240,6 +242,9 @@ struct GeneralSettingsView: View { SettingsCard("Custom Vocabulary", icon: "text.book.closed.fill") { vocabularySection } + SettingsCard("Recording Limit", icon: "timer") { + recordingLimitSection + } SettingsCard("Permissions", icon: "lock.shield.fill") { permissionsSection } @@ -250,6 +255,9 @@ struct GeneralSettingsView: View { apiKeyInput = appState.apiKey apiBaseURLInput = appState.apiBaseURL customVocabularyInput = appState.customVocabulary + let isCustom = appState.maxRecordingSeconds != AppState.defaultMaxRecordingSeconds + useCustomRecordingLimit = isCustom + customRecordingLimitInput = isCustom ? "\(appState.maxRecordingSeconds)" : "" checkMicPermission() appState.refreshLaunchAtLoginStatus() Task { await githubCache.fetchIfNeeded() } @@ -581,6 +589,81 @@ struct GeneralSettingsView: View { } } + // MARK: Recording Limit + + private var recordingLimitSection: some View { + VStack(alignment: .leading, spacing: 10) { + Text("Maximum recording duration before auto-stop.") + .font(.caption) + .foregroundStyle(.secondary) + + VStack(alignment: .leading, spacing: 8) { + // Default option + HStack { + Image(systemName: useCustomRecordingLimit ? "circle" : "checkmark.circle.fill") + .foregroundColor(useCustomRecordingLimit ? .secondary : .accentColor) + Text("Default (2 minutes)") + .font(.body) + Spacer() + } + .contentShape(Rectangle()) + .onTapGesture { + useCustomRecordingLimit = false + customRecordingLimitInput = "" + appState.maxRecordingSeconds = AppState.defaultMaxRecordingSeconds + } + + // Custom option + HStack { + Image(systemName: useCustomRecordingLimit ? "checkmark.circle.fill" : "circle") + .foregroundColor(useCustomRecordingLimit ? .accentColor : .secondary) + Text("Custom") + .font(.body) + if useCustomRecordingLimit { + TextField("seconds", text: $customRecordingLimitInput) + .textFieldStyle(.roundedBorder) + .frame(width: 70) + .onChange(of: customRecordingLimitInput) { newValue in + let filtered = newValue.filter { $0.isNumber } + if filtered != newValue { + customRecordingLimitInput = filtered + } + if let value = Int(filtered), value >= 15, value <= 600 { + appState.maxRecordingSeconds = value + } + } + Text("seconds") + .font(.caption) + .foregroundStyle(.secondary) + } + Spacer() + } + .contentShape(Rectangle()) + .onTapGesture { + useCustomRecordingLimit = true + if customRecordingLimitInput.isEmpty { + customRecordingLimitInput = "\(appState.maxRecordingSeconds)" + } + } + } + + if useCustomRecordingLimit { + Text("Enter a value between 15 and 600 seconds.") + .font(.caption) + .foregroundStyle(.secondary) + } + + Label { + Text("Longer recordings take more time to transcribe and may time out.") + .font(.caption) + } icon: { + Image(systemName: "info.circle") + .font(.caption) + } + .foregroundStyle(.secondary) + } + } + // MARK: Permissions private var permissionsSection: some View {