Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions Sources/AppState.swift
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,11 @@ final class AppState: ObservableObject, @unchecked Sendable {
private let customContextPromptLastModifiedStorageKey = "custom_context_prompt_last_modified"
private let shortcutStartDelayStorageKey = "shortcut_start_delay"
private let forceHTTP2TranscriptionStorageKey = "force_http2_transcription"
private let maxRecordingSecondsStorageKey = "max_recording_seconds"
private let transcribingIndicatorDelay: TimeInterval = 1.0
let maxPipelineHistoryCount = 20
static let defaultMaxRecordingSeconds: Int = 120
private var recordingTimer: Timer?

@Published var hasCompletedSetup: Bool {
didSet {
Expand Down Expand Up @@ -141,6 +144,12 @@ final class AppState: ObservableObject, @unchecked Sendable {
}
}

@Published var maxRecordingSeconds: Int {
didSet {
UserDefaults.standard.set(maxRecordingSeconds, forKey: maxRecordingSecondsStorageKey)
}
}

@Published var isRecording = false
@Published var isTranscribing = false
@Published var lastTranscript: String = ""
Expand Down Expand Up @@ -209,6 +218,8 @@ final class AppState: ObservableObject, @unchecked Sendable {
let customContextPromptLastModified = UserDefaults.standard.string(forKey: customContextPromptLastModifiedStorageKey) ?? ""
let shortcutStartDelay = max(0, UserDefaults.standard.double(forKey: shortcutStartDelayStorageKey))
let forceHTTP2Transcription = UserDefaults.standard.bool(forKey: forceHTTP2TranscriptionStorageKey)
let storedMaxRecording = UserDefaults.standard.integer(forKey: maxRecordingSecondsStorageKey)
let maxRecordingSeconds = storedMaxRecording > 0 ? storedMaxRecording : Self.defaultMaxRecordingSeconds
let initialAccessibility = AXIsProcessTrusted()
let initialScreenCapturePermission = CGPreflightScreenCaptureAccess()
var removedAudioFileNames: [String] = []
Expand Down Expand Up @@ -239,6 +250,7 @@ final class AppState: ObservableObject, @unchecked Sendable {
self.customContextPromptLastModified = customContextPromptLastModified
self.shortcutStartDelay = shortcutStartDelay
self.forceHTTP2Transcription = forceHTTP2Transcription
self.maxRecordingSeconds = maxRecordingSeconds
self.pipelineHistory = savedHistory
self.hasAccessibility = initialAccessibility
self.hasScreenRecordingPermission = initialScreenCapturePermission
Expand Down Expand Up @@ -785,6 +797,7 @@ final class AppState: ObservableObject, @unchecked Sendable {
.sink { [weak self] level in
self?.overlayManager.updateAudioLevel(level)
}
self.startRecordingTimer()
}
} catch {
DispatchQueue.main.async {
Expand Down Expand Up @@ -852,6 +865,7 @@ final class AppState: ObservableObject, @unchecked Sendable {
}

private func stopAndTranscribe() {
stopRecordingTimer()
cancelPendingShortcutStart()
shortcutSessionController.reset()
activeRecordingTriggerMode = nil
Expand Down Expand Up @@ -1062,6 +1076,27 @@ final class AppState: ObservableObject, @unchecked Sendable {
}
}

private func startRecordingTimer() {
recordingTimer?.invalidate()
overlayManager.overlayState.elapsedSeconds = 0
overlayManager.overlayState.maxRecordingSeconds = maxRecordingSeconds
recordingTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in
guard let self else { return }
DispatchQueue.main.async {
self.overlayManager.overlayState.elapsedSeconds += 1
if self.overlayManager.overlayState.elapsedSeconds >= self.maxRecordingSeconds {
self.stopAndTranscribe()
}
}
}
}

private func stopRecordingTimer() {
recordingTimer?.invalidate()
recordingTimer = nil
overlayManager.overlayState.elapsedSeconds = 0
}

private func startContextCapture() {
contextCaptureTask?.cancel()
capturedContext = nil
Expand Down
45 changes: 41 additions & 4 deletions Sources/RecordingOverlay.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ final class RecordingOverlayState: ObservableObject {
@Published var phase: OverlayPhase = .recording
@Published var audioLevel: Float = 0.0
@Published var recordingTriggerMode: RecordingTriggerMode = .hold
@Published var elapsedSeconds: Int = 0
/// Maximum recording duration in seconds (used for timer color warnings)
var maxRecordingSeconds: Int = 120
}

enum OverlayPhase {
Expand Down Expand Up @@ -58,7 +61,7 @@ private func makeNotchContent<V: View>(
final class RecordingOverlayManager {
private var overlayWindow: NSPanel?
private var transcribingPanel: NSPanel?
private let overlayState = RecordingOverlayState()
let overlayState = RecordingOverlayState()

var onStopButtonPressed: (() -> Void)?

Expand Down Expand Up @@ -226,7 +229,7 @@ final class RecordingOverlayManager {
}

private var overlayWidth: CGFloat {
let baseWidth: CGFloat = overlayState.phase == .recording && overlayState.recordingTriggerMode == .toggle ? 150 : 92
let baseWidth: CGFloat = overlayState.phase == .recording && overlayState.recordingTriggerMode == .toggle ? 190 : 130
guard screenHasNotch else { return baseWidth }
return max(notchWidth, baseWidth)
}
Expand Down Expand Up @@ -388,6 +391,34 @@ struct InitializingDotsView: View {
}
}

struct ElapsedTimerView: View {
let elapsedSeconds: Int
let maxSeconds: Int

private var timerColor: Color {
let remaining = maxSeconds - elapsedSeconds
if remaining <= 10 {
return .red
} else if remaining <= 30 {
return .yellow
}
return .white
}

private var formattedTime: String {
let minutes = elapsedSeconds / 60
let seconds = elapsedSeconds % 60
return String(format: "%d:%02d", minutes, seconds)
}

var body: some View {
Text(formattedTime)
.font(.system(size: 11, weight: .medium, design: .monospaced))
.foregroundColor(timerColor)
.animation(.easeInOut(duration: 0.3), value: timerColor)
}
}

struct RecordingOverlayView: View {
@ObservedObject var state: RecordingOverlayState
let onStopButtonPressed: () -> Void
Expand All @@ -399,8 +430,14 @@ struct RecordingOverlayView: View {
InitializingDotsView()
.transition(.opacity)
} else {
WaveformView(audioLevel: state.audioLevel)
.transition(.opacity)
HStack(spacing: 6) {
WaveformView(audioLevel: state.audioLevel)
ElapsedTimerView(
elapsedSeconds: state.elapsedSeconds,
maxSeconds: state.maxRecordingSeconds
)
}
.transition(.opacity)
}
}

Expand Down
83 changes: 83 additions & 0 deletions Sources/SettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ struct GeneralSettingsView: View {
@State private var keyValidationSuccess = false
@State private var customVocabularyInput: String = ""
@State private var micPermissionGranted = false
@State private var useCustomRecordingLimit = false
@State private var customRecordingLimitInput: String = ""
@StateObject private var githubCache = GitHubMetadataCache.shared
@ObservedObject private var updateManager = UpdateManager.shared
private let freeflowRepoURL = URL(string: "https://github.com/zachlatta/freeflow")!
Expand Down Expand Up @@ -240,6 +242,9 @@ struct GeneralSettingsView: View {
SettingsCard("Custom Vocabulary", icon: "text.book.closed.fill") {
vocabularySection
}
SettingsCard("Recording Limit", icon: "timer") {
recordingLimitSection
}
SettingsCard("Permissions", icon: "lock.shield.fill") {
permissionsSection
}
Expand All @@ -250,6 +255,9 @@ struct GeneralSettingsView: View {
apiKeyInput = appState.apiKey
apiBaseURLInput = appState.apiBaseURL
customVocabularyInput = appState.customVocabulary
let isCustom = appState.maxRecordingSeconds != AppState.defaultMaxRecordingSeconds
useCustomRecordingLimit = isCustom
customRecordingLimitInput = isCustom ? "\(appState.maxRecordingSeconds)" : ""
checkMicPermission()
appState.refreshLaunchAtLoginStatus()
Task { await githubCache.fetchIfNeeded() }
Expand Down Expand Up @@ -581,6 +589,81 @@ struct GeneralSettingsView: View {
}
}

// MARK: Recording Limit

private var recordingLimitSection: some View {
VStack(alignment: .leading, spacing: 10) {
Text("Maximum recording duration before auto-stop.")
.font(.caption)
.foregroundStyle(.secondary)

VStack(alignment: .leading, spacing: 8) {
// Default option
HStack {
Image(systemName: useCustomRecordingLimit ? "circle" : "checkmark.circle.fill")
.foregroundColor(useCustomRecordingLimit ? .secondary : .accentColor)
Text("Default (2 minutes)")
.font(.body)
Spacer()
}
.contentShape(Rectangle())
.onTapGesture {
useCustomRecordingLimit = false
customRecordingLimitInput = ""
appState.maxRecordingSeconds = AppState.defaultMaxRecordingSeconds
}

// Custom option
HStack {
Image(systemName: useCustomRecordingLimit ? "checkmark.circle.fill" : "circle")
.foregroundColor(useCustomRecordingLimit ? .accentColor : .secondary)
Text("Custom")
.font(.body)
if useCustomRecordingLimit {
TextField("seconds", text: $customRecordingLimitInput)
.textFieldStyle(.roundedBorder)
.frame(width: 70)
.onChange(of: customRecordingLimitInput) { newValue in
let filtered = newValue.filter { $0.isNumber }
if filtered != newValue {
customRecordingLimitInput = filtered
}
if let value = Int(filtered), value >= 15, value <= 600 {
appState.maxRecordingSeconds = value
}
}
Text("seconds")
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
}
.contentShape(Rectangle())
.onTapGesture {
useCustomRecordingLimit = true
if customRecordingLimitInput.isEmpty {
customRecordingLimitInput = "\(appState.maxRecordingSeconds)"
}
}
}

if useCustomRecordingLimit {
Text("Enter a value between 15 and 600 seconds.")
.font(.caption)
.foregroundStyle(.secondary)
}

Label {
Text("Longer recordings take more time to transcribe and may time out.")
.font(.caption)
} icon: {
Image(systemName: "info.circle")
.font(.caption)
}
.foregroundStyle(.secondary)
}
}

// MARK: Permissions

private var permissionsSection: some View {
Expand Down