diff --git a/Fluid.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Fluid.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
index 9c4cd8b0..5ea70f85 100644
--- a/Fluid.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
+++ b/Fluid.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved
@@ -16,7 +16,7 @@
"location" : "https://github.com/altic-dev/DynamicNotchKit.git",
"state" : {
"branch" : "main",
- "revision" : "cd0b3e52d537db115ad3a9d89601f20e0bee8d27"
+ "revision" : "708f31da5319436c64059ee7ae566953407063d7"
}
},
{
diff --git a/Info.plist b/Info.plist
index f001db0b..3a9e730f 100644
--- a/Info.plist
+++ b/Info.plist
@@ -15,7 +15,7 @@
CFBundleVersion
10
CFBundleShortVersionString
- 1.5.12
+ 1.5.13-beta.1
LSMinimumSystemVersion
$(MACOSX_DEPLOYMENT_TARGET)
LSApplicationCategoryType
diff --git a/Package.resolved b/Package.resolved
index 1e2aa041..1f3d17e9 100644
--- a/Package.resolved
+++ b/Package.resolved
@@ -15,7 +15,7 @@
"location" : "https://github.com/altic-dev/DynamicNotchKit.git",
"state" : {
"branch" : "main",
- "revision" : "cd0b3e52d537db115ad3a9d89601f20e0bee8d27"
+ "revision" : "708f31da5319436c64059ee7ae566953407063d7"
}
},
{
diff --git a/Sources/Fluid/ContentView.swift b/Sources/Fluid/ContentView.swift
index b6ff8c17..06f5b783 100644
--- a/Sources/Fluid/ContentView.swift
+++ b/Sources/Fluid/ContentView.swift
@@ -296,27 +296,34 @@ struct ContentView: View {
// Set up notch click callback for expanding command conversation
NotchOverlayManager.shared.onNotchClicked = {
+ guard NotchOverlayManager.shared.canHandleNotchCommandTap else { return }
// When notch is clicked in command mode, show expanded conversation
- if !NotchContentState.shared.commandConversationHistory.isEmpty {
+ if NotchOverlayManager.shared.canShowExpandedCommandOutput,
+ !NotchContentState.shared.commandConversationHistory.isEmpty
+ {
NotchOverlayManager.shared.showExpandedCommandOutput()
}
}
// Set up command mode callbacks for notch
NotchOverlayManager.shared.onCommandFollowUp = { [weak commandModeService] text in
+ guard NotchOverlayManager.shared.allowsCommandNotchActions else { return }
await commandModeService?.processFollowUpCommand(text)
}
// Chat management callbacks
NotchOverlayManager.shared.onNewChat = { [weak commandModeService] in
+ guard NotchOverlayManager.shared.allowsCommandNotchActions else { return }
commandModeService?.createNewChat()
}
NotchOverlayManager.shared.onSwitchChat = { [weak commandModeService] chatID in
+ guard NotchOverlayManager.shared.allowsCommandNotchActions else { return }
commandModeService?.switchToChat(id: chatID)
}
NotchOverlayManager.shared.onClearChat = { [weak commandModeService] in
+ guard NotchOverlayManager.shared.allowsCommandNotchActions else { return }
commandModeService?.deleteCurrentChat()
}
@@ -1712,12 +1719,12 @@ struct ContentView: View {
self.clearActiveRecordingMode()
- // Show "Transcribing..." state before calling stop() to keep overlay visible.
+ // Show "Transcribing" state before calling stop() to keep overlay visible.
// The asr.stop() call performs the final transcription which can take a moment
// (especially for slower models like Whisper Medium/Large).
DebugLogger.shared.debug("Showing transcription processing state", source: "ContentView")
self.menuBarManager.setProcessing(true)
- NotchOverlayManager.shared.updateTranscriptionText("Transcribing...")
+ NotchOverlayManager.shared.updateTranscriptionText("Transcribing")
// Give SwiftUI a chance to render the processing state before we do heavier work
// (ASR finalization + optional AI post-processing).
@@ -1734,6 +1741,7 @@ struct ContentView: View {
DebugLogger.shared.debug("Transcription returned empty text", source: "ContentView")
// Hide processing state when returning early
self.menuBarManager.setProcessing(false)
+ NotchOverlayManager.shared.hide()
return
}
@@ -1807,7 +1815,7 @@ struct ContentView: View {
let postProcessingStart = Date()
// Update overlay text to show we're now refining (processing already true)
- NotchOverlayManager.shared.updateTranscriptionText("Refining...")
+ NotchOverlayManager.shared.updateTranscriptionText("Refining")
// Ensure the status label becomes visible immediately.
await Task.yield()
@@ -1957,6 +1965,10 @@ struct ContentView: View {
]
)
}
+
+ if !didTypeExternally {
+ NotchOverlayManager.shared.hide()
+ }
}
private func currentDictationOutputRouteForHotkeyStop() -> DictationOutputRoute {
diff --git a/Sources/Fluid/Persistence/SettingsStore.swift b/Sources/Fluid/Persistence/SettingsStore.swift
index f4deb986..c9e36727 100644
--- a/Sources/Fluid/Persistence/SettingsStore.swift
+++ b/Sources/Fluid/Persistence/SettingsStore.swift
@@ -1332,6 +1332,22 @@ final class SettingsStore: ObservableObject {
}
}
+ /// Internal presentation modes for the top notch overlay.
+ /// This is intentionally separate from bottom overlay sizing.
+ enum NotchPresentationMode: String, CaseIterable, Codable {
+ case standard
+ case minimal
+
+ var displayName: String {
+ switch self {
+ case .standard:
+ return "Standard Notch"
+ case .minimal:
+ return "Compact"
+ }
+ }
+ }
+
/// Where the recording overlay appears (default: bottom)
var overlayPosition: OverlayPosition {
get {
@@ -1348,6 +1364,22 @@ final class SettingsStore: ObservableObject {
}
}
+ /// Internal-only top notch presentation mode. No public settings UI yet.
+ var notchPresentationMode: NotchPresentationMode {
+ get {
+ guard let raw = self.defaults.string(forKey: Keys.notchPresentationMode),
+ let mode = NotchPresentationMode(rawValue: raw)
+ else {
+ return .standard
+ }
+ return mode
+ }
+ set {
+ objectWillChange.send()
+ self.defaults.set(newValue.rawValue, forKey: Keys.notchPresentationMode)
+ }
+ }
+
/// Vertical offset for the bottom overlay (distance from bottom of screen/dock)
var overlayBottomOffset: Double {
get {
@@ -3581,6 +3613,7 @@ private extension SettingsStore {
// Overlay Position
static let overlayPosition = "OverlayPosition"
+ static let notchPresentationMode = "NotchPresentationMode"
static let overlayBottomOffset = "OverlayBottomOffset"
static let overlayBottomOffsetMigratedTo50 = "OverlayBottomOffsetMigratedTo50"
static let overlaySize = "OverlaySize"
diff --git a/Sources/Fluid/Services/CommandModeService.swift b/Sources/Fluid/Services/CommandModeService.swift
index be5d5dcb..785656e5 100644
--- a/Sources/Fluid/Services/CommandModeService.swift
+++ b/Sources/Fluid/Services/CommandModeService.swift
@@ -33,6 +33,10 @@ final class CommandModeService: ObservableObject {
self.loadCurrentChatFromStore()
}
+ private var shouldSyncCommandNotchState: Bool {
+ self.enableNotchOutput && NotchOverlayManager.shared.shouldSyncCommandConversationToNotch
+ }
+
private func loadCurrentChatFromStore() {
if let session = chatStore.currentSession {
self.currentChatID = session.id
@@ -278,6 +282,11 @@ final class CommandModeService: ObservableObject {
/// Sync conversation history to NotchContentState
private func syncToNotchState() {
+ guard self.shouldSyncCommandNotchState else {
+ NotchContentState.shared.clearCommandOutput()
+ return
+ }
+
NotchContentState.shared.clearCommandOutput()
for msg in self.conversationHistory {
@@ -308,7 +317,7 @@ final class CommandModeService: ObservableObject {
self.saveCurrentChat()
// Push to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.addCommandMessage(role: .user, content: text)
NotchContentState.shared.setCommandProcessing(true)
}
@@ -322,14 +331,18 @@ final class CommandModeService: ObservableObject {
// Add to both histories
self.conversationHistory.append(Message(role: .user, content: text))
- NotchContentState.shared.addCommandMessage(role: .user, content: text)
+ if self.shouldSyncCommandNotchState {
+ NotchContentState.shared.addCommandMessage(role: .user, content: text)
+ }
// Auto-save after adding user message
self.saveCurrentChat()
self.isProcessing = true
self.didRequireConfirmationThisRun = false
- NotchContentState.shared.setCommandProcessing(true)
+ if self.shouldSyncCommandNotchState {
+ NotchContentState.shared.setCommandProcessing(true)
+ }
await self.processNextTurn()
}
@@ -374,7 +387,7 @@ final class CommandModeService: ObservableObject {
self.captureCommandRunCompleted(success: false)
// Push to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.addCommandMessage(role: .assistant, content: errorMsg)
NotchContentState.shared.setCommandProcessing(false)
self.showExpandedNotchIfNeeded()
@@ -386,7 +399,7 @@ final class CommandModeService: ObservableObject {
self.currentStep = .thinking("Analyzing...")
// Push status to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.addCommandMessage(role: .status, content: "Thinking...")
}
@@ -414,7 +427,7 @@ final class CommandModeService: ObservableObject {
))
// Push step to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
let statusText = tc.purpose ?? self.stepDescription(for: stepType)
NotchContentState.shared.addCommandMessage(role: .status, content: statusText)
}
@@ -432,7 +445,7 @@ final class CommandModeService: ObservableObject {
self.currentStep = nil
// Push confirmation needed to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.addCommandMessage(role: .status, content: "⚠️ Confirmation needed in Command Mode window")
NotchContentState.shared.setCommandProcessing(false)
}
@@ -464,7 +477,7 @@ final class CommandModeService: ObservableObject {
self.captureCommandRunCompleted(success: isFinal)
// Push final response to notch and show expanded view
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.updateCommandStreamingText("") // Clear streaming
NotchContentState.shared.addCommandMessage(role: .assistant, content: response.content)
NotchContentState.shared.setCommandProcessing(false)
@@ -488,7 +501,7 @@ final class CommandModeService: ObservableObject {
self.captureCommandRunCompleted(success: false)
// Push error to notch
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.addCommandMessage(role: .assistant, content: errorMsg)
NotchContentState.shared.setCommandProcessing(false)
self.showExpandedNotchIfNeeded()
@@ -530,7 +543,8 @@ final class CommandModeService: ObservableObject {
/// Show expanded notch output if there's content to display
private func showExpandedNotchIfNeeded() {
- guard self.enableNotchOutput else { return }
+ guard self.shouldSyncCommandNotchState else { return }
+ guard NotchOverlayManager.shared.canShowExpandedCommandOutput else { return }
guard !NotchContentState.shared.commandConversationHistory.isEmpty else { return }
// Show the expanded notch
@@ -912,7 +926,7 @@ final class CommandModeService: ObservableObject {
self.streamingText = fullContent
// Push to notch for real-time display
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.updateCommandStreamingText(fullContent)
}
}
@@ -928,7 +942,7 @@ final class CommandModeService: ObservableObject {
let fullContent = self.streamingBuffer.joined()
if !fullContent.isEmpty {
self.streamingText = fullContent
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.updateCommandStreamingText(fullContent)
}
}
@@ -945,7 +959,7 @@ final class CommandModeService: ObservableObject {
self.thinkingBuffer = [] // Clear thinking buffer
// Clear notch streaming text as well
- if self.enableNotchOutput {
+ if self.shouldSyncCommandNotchState {
NotchContentState.shared.updateCommandStreamingText("")
}
diff --git a/Sources/Fluid/Services/MenuBarManager.swift b/Sources/Fluid/Services/MenuBarManager.swift
index f1771905..c438fc64 100644
--- a/Sources/Fluid/Services/MenuBarManager.swift
+++ b/Sources/Fluid/Services/MenuBarManager.swift
@@ -44,6 +44,8 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
// Track pending overlay operations to prevent spam
private var pendingShowOperation: DispatchWorkItem?
private var pendingHideOperation: DispatchWorkItem?
+ private var pendingProcessingShowOperation: DispatchWorkItem?
+ private let processingVisualDelay: DispatchTimeInterval = .milliseconds(100)
// Subscription for forwarding audio levels to expanded command notch
private var expandedModeAudioSubscription: AnyCancellable?
@@ -87,9 +89,7 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
.receive(on: DispatchQueue.main)
.sink { [weak self] newText in
guard self != nil else { return }
- // CRITICAL FIX: Check if streaming preview is enabled before updating notch
- // The "Show Live Preview" toggle in Preferences should control this behavior
- if SettingsStore.shared.enableStreamingPreview {
+ if NotchOverlayManager.shared.shouldShowOrTrackLivePreviewText {
NotchOverlayManager.shared.updateTranscriptionText(newText)
}
}
@@ -107,7 +107,6 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
// Prevent rapid state changes that could cause cycles
guard self.overlayVisible != isRunning else { return }
- let delay: DispatchTimeInterval = .milliseconds(30)
if isRunning {
// Cancel any pending hide operation
self.pendingHideOperation?.cancel()
@@ -119,7 +118,7 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
if NotchOverlayManager.shared.isCommandOutputExpanded {
// Only keep expanded notch if this is a command mode recording (follow-up)
// For other modes (dictation, rewrite), close it and show regular notch
- if self.currentOverlayMode == .command {
+ if self.currentOverlayMode == .command, NotchOverlayManager.shared.supportsCommandNotchUI {
// Enable recording visualization in the expanded notch
NotchContentState.shared.setRecordingInExpandedMode(true)
@@ -143,7 +142,10 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
// Double-check expanded notch isn't showing (could have changed during delay)
// But only block if we're in command mode
- if NotchOverlayManager.shared.isCommandOutputExpanded && self.currentOverlayMode == .command {
+ if NotchOverlayManager.shared.isCommandOutputExpanded,
+ self.currentOverlayMode == .command,
+ NotchOverlayManager.shared.supportsCommandNotchUI
+ {
self.pendingShowOperation = nil
return
}
@@ -157,7 +159,7 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
self.pendingShowOperation = nil
}
self.pendingShowOperation = showItem
- DispatchQueue.main.asyncAfter(deadline: .now() + delay, execute: showItem)
+ DispatchQueue.main.async(execute: showItem)
} else {
// Cancel any pending show operation
self.pendingShowOperation?.cancel()
@@ -191,7 +193,7 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
self.pendingHideOperation = nil
}
self.pendingHideOperation = hideItem
- DispatchQueue.main.asyncAfter(deadline: .now() + delay, execute: hideItem)
+ DispatchQueue.main.asyncAfter(deadline: .now() + .milliseconds(30), execute: hideItem)
}
}
@@ -211,11 +213,22 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
self.isProcessingActive = processing
if processing {
+ self.pendingProcessingShowOperation?.cancel()
// Cancel any pending hide - we want to keep the overlay visible for AI processing
self.pendingHideOperation?.cancel()
self.pendingHideOperation = nil
self.overlayVisible = true
+
+ let showItem = DispatchWorkItem { [weak self] in
+ guard let self = self, self.isProcessingActive else { return }
+ NotchOverlayManager.shared.setProcessing(true)
+ self.pendingProcessingShowOperation = nil
+ }
+ self.pendingProcessingShowOperation = showItem
+ DispatchQueue.main.asyncAfter(deadline: .now() + self.processingVisualDelay, execute: showItem)
} else {
+ self.pendingProcessingShowOperation?.cancel()
+ self.pendingProcessingShowOperation = nil
// When processing ends, schedule the hide (unless expanded output is showing)
self.overlayVisible = false
@@ -240,8 +253,9 @@ final class MenuBarManager: NSObject, ObservableObject, NSMenuDelegate {
}
self.pendingHideOperation = hideItem
DispatchQueue.main.asyncAfter(deadline: .now() + .milliseconds(100), execute: hideItem)
+ NotchOverlayManager.shared.setProcessing(false)
+ return
}
- NotchOverlayManager.shared.setProcessing(processing)
}
private func setupMenuBarSafely() {
diff --git a/Sources/Fluid/Services/NotchOverlayManager.swift b/Sources/Fluid/Services/NotchOverlayManager.swift
index 939cf5f1..464f0c43 100644
--- a/Sources/Fluid/Services/NotchOverlayManager.swift
+++ b/Sources/Fluid/Services/NotchOverlayManager.swift
@@ -24,11 +24,22 @@ enum OverlayMode: String {
final class NotchOverlayManager {
static let shared = NotchOverlayManager()
- private var notch: DynamicNotch?
+ struct NotchPresentationPolicy: Equatable {
+ let usesCompactPresentation: Bool
+ let showsPromptSelector: Bool
+ let showsStreamingPreview: Bool
+ let showsModeLabel: Bool
+ let allowsCommandExpansion: Bool
+ let allowsCommandActions: Bool
+ let allowsExpandedCommandOutput: Bool
+ }
+
+ private var notch: DynamicNotch?
private var commandOutputNotch: DynamicNotch<
NotchCommandOutputExpandedView,
NotchCompactLeadingView,
- NotchCompactTrailingView
+ NotchCompactTrailingView,
+ EmptyView
>?
private var currentMode: OverlayMode = .dictation
@@ -78,7 +89,13 @@ final class NotchOverlayManager {
private var globalEscapeMonitor: Any?
private var localEscapeMonitor: Any?
+ private(set) var currentNotchPresentationMode: SettingsStore.NotchPresentationMode = .standard
+ private(set) var currentNotchPresentationPolicy = NotchPresentationPolicy.standard
+ private(set) var currentScreenSupportsCompactPresentation = false
+ private var presentationPolicyScreen: NSScreen?
+
private init() {
+ self.refreshNotchPresentationPolicy()
self.setupEscapeKeyMonitors()
}
@@ -113,6 +130,8 @@ final class NotchOverlayManager {
}
func show(audioLevelPublisher: AnyPublisher, mode: OverlayMode) {
+ self.refreshNotchPresentationPolicy()
+
// Don't show regular notch if expanded command output is visible
if self.isCommandOutputExpanded {
// Just store the publisher for later use
@@ -187,6 +206,11 @@ final class NotchOverlayManager {
/// Show notch overlay (original behavior)
private func showNotchOverlay(audioLevelPublisher: AnyPublisher, mode: OverlayMode) {
+ let targetScreen = self.preferredPresentationScreen()
+ self.presentationPolicyScreen = targetScreen
+ self.refreshNotchPresentationPolicy(for: targetScreen)
+ self.currentAudioPublisher = audioLevelPublisher
+
// Hide bottom overlay if it was visible
if self.isBottomOverlayVisible {
BottomOverlayWindowController.shared.hide()
@@ -207,21 +231,28 @@ final class NotchOverlayManager {
// Create notch with SwiftUI views
let newNotch = DynamicNotch(
- hoverBehavior: [.keepVisible, .hapticFeedback],
+ hoverBehavior: [.keepVisible],
style: .auto
) {
NotchExpandedView(audioPublisher: audioLevelPublisher)
} compactLeading: {
NotchCompactLeadingView()
} compactTrailing: {
- NotchCompactTrailingView()
+ NotchCompactTrailingView(audioPublisher: audioLevelPublisher)
+ } compactBottom: {
+ NotchCompactBottomView()
}
self.notch = newNotch
+ let shouldUseCompactPresentation = self.currentNotchPresentationPolicy.usesCompactPresentation
- // Show in expanded state
+ // Resolve presentation from policy so future notch modes don't require call-site changes.
Task { [weak self] in
- await newNotch.expand()
+ if shouldUseCompactPresentation {
+ await newNotch.compact(on: targetScreen)
+ } else {
+ await newNotch.expand(on: targetScreen)
+ }
// Only update state if we're still the active generation
guard let self = self, self.generation == currentGeneration else { return }
self.state = .visible
@@ -281,6 +312,8 @@ final class NotchOverlayManager {
}
func setMode(_ mode: OverlayMode) {
+ self.refreshNotchPresentationPolicy()
+
// Always update NotchContentState to ensure UI stays in sync
// (can get out of sync during show/hide transitions)
let normalized = self.normalizedOverlayMode(mode)
@@ -302,6 +335,12 @@ final class NotchOverlayManager {
}
func updateTranscriptionText(_ text: String) {
+ guard self.shouldShowOrTrackLivePreviewText else {
+ if !NotchContentState.shared.transcriptionText.isEmpty {
+ NotchContentState.shared.updateTranscription("")
+ }
+ return
+ }
NotchContentState.shared.updateTranscription(text)
}
@@ -333,6 +372,8 @@ final class NotchOverlayManager {
/// Show expanded command output notch
func showExpandedCommandOutput() {
+ guard self.canShowExpandedCommandOutput else { return }
+
// Hide regular notch first if visible
if self.notch != nil {
self.hide()
@@ -346,6 +387,7 @@ final class NotchOverlayManager {
}
private func showExpandedCommandOutputInternal() async {
+ guard self.canShowExpandedCommandOutput else { return }
guard self.commandOutputState == .idle else { return }
self.commandOutputGeneration &+= 1
@@ -373,32 +415,38 @@ final class NotchOverlayManager {
}
},
onSubmit: { [weak self] text in
- await self?.onCommandFollowUp?(text)
+ guard let self, self.allowsCommandNotchActions else { return }
+ await self.onCommandFollowUp?(text)
},
onNewChat: { [weak self] in
Task { @MainActor in
- self?.onNewChat?()
+ guard let self, self.allowsCommandNotchActions else { return }
+ self.onNewChat?()
// Refresh recent chats in notch state
NotchContentState.shared.refreshRecentChats()
}
},
onSwitchChat: { [weak self] chatID in
Task { @MainActor in
- self?.onSwitchChat?(chatID)
+ guard let self, self.allowsCommandNotchActions else { return }
+ self.onSwitchChat?(chatID)
// Refresh recent chats in notch state
NotchContentState.shared.refreshRecentChats()
}
},
onClearChat: { [weak self] in
Task { @MainActor in
- self?.onClearChat?()
+ guard let self, self.allowsCommandNotchActions else { return }
+ self.onClearChat?()
}
}
)
} compactLeading: {
NotchCompactLeadingView()
} compactTrailing: {
- NotchCompactTrailingView()
+ NotchCompactTrailingView(audioPublisher: publisher)
+ } compactBottom: {
+ EmptyView()
}
self.commandOutputNotch = newNotch
@@ -463,12 +511,59 @@ final class NotchOverlayManager {
func toggleExpandedCommandOutput() {
if self.isCommandOutputExpanded {
self.hideExpandedCommandOutput()
- } else if NotchContentState.shared.commandConversationHistory.isEmpty == false {
+ } else if self.canShowExpandedCommandOutput,
+ NotchContentState.shared.commandConversationHistory.isEmpty == false
+ {
// Only show if there's history to show
self.showExpandedCommandOutput()
}
}
+ var canShowExpandedCommandOutput: Bool {
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.allowsExpandedCommandOutput
+ }
+
+ var canHandleNotchCommandTap: Bool {
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.allowsCommandExpansion &&
+ self.currentNotchPresentationPolicy.allowsCommandActions
+ }
+
+ var allowsCommandNotchActions: Bool {
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.allowsCommandActions
+ }
+
+ var supportsCommandNotchUI: Bool {
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.allowsCommandExpansion ||
+ self.currentNotchPresentationPolicy.allowsExpandedCommandOutput ||
+ self.currentNotchPresentationPolicy.allowsCommandActions
+ }
+
+ var shouldShowOrTrackLivePreviewText: Bool {
+ guard SettingsStore.shared.enableStreamingPreview else { return false }
+ if SettingsStore.shared.overlayPosition == .bottom {
+ return true
+ }
+
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.showsStreamingPreview
+ }
+
+ var shouldSyncCommandConversationToNotch: Bool {
+ guard self.enableNotchFeatures else { return false }
+
+ self.refreshNotchPresentationPolicy()
+ return self.currentNotchPresentationPolicy.allowsExpandedCommandOutput ||
+ self.currentNotchPresentationPolicy.allowsCommandActions
+ }
+
+ private var enableNotchFeatures: Bool {
+ SettingsStore.shared.overlayPosition == .top || self.supportsCommandNotchUI
+ }
+
/// Check if any notch (regular or expanded) is visible
var isAnyNotchVisible: Bool {
return self.state == .visible || self.state == .showing || self.isCommandOutputExpanded
@@ -479,4 +574,58 @@ final class NotchOverlayManager {
self.lastAudioPublisher = publisher
self.currentAudioPublisher = publisher
}
+
+ private func preferredPresentationScreen() -> NSScreen {
+ let mouseLocation = NSEvent.mouseLocation
+ if let screenUnderMouse = NSScreen.screens.first(where: { NSMouseInRect(mouseLocation, $0.frame, false) }) {
+ return screenUnderMouse
+ }
+ return NSScreen.main ?? NSScreen.screens[0]
+ }
+
+ private func supportsCompactPresentation(on screen: NSScreen) -> Bool {
+ screen.auxiliaryTopLeftArea?.width != nil && screen.auxiliaryTopRightArea?.width != nil
+ }
+
+ private func refreshNotchPresentationPolicy(for screen: NSScreen? = nil) {
+ let mode = SettingsStore.shared.notchPresentationMode
+ self.currentNotchPresentationMode = mode
+ let resolvedScreen = screen ?? self.presentationPolicyScreen ?? self.preferredPresentationScreen()
+ self.currentScreenSupportsCompactPresentation = self.supportsCompactPresentation(on: resolvedScreen)
+ self.currentNotchPresentationPolicy = .forMode(
+ mode,
+ supportsCompactPresentation: self.currentScreenSupportsCompactPresentation
+ )
+ }
+}
+
+private extension NotchOverlayManager.NotchPresentationPolicy {
+ static let standard = Self(
+ usesCompactPresentation: false,
+ showsPromptSelector: true,
+ showsStreamingPreview: true,
+ showsModeLabel: true,
+ allowsCommandExpansion: true,
+ allowsCommandActions: true,
+ allowsExpandedCommandOutput: true
+ )
+
+ static let minimal = Self(
+ usesCompactPresentation: true,
+ showsPromptSelector: false,
+ showsStreamingPreview: true,
+ showsModeLabel: true,
+ allowsCommandExpansion: false,
+ allowsCommandActions: false,
+ allowsExpandedCommandOutput: false
+ )
+
+ static func forMode(_ mode: SettingsStore.NotchPresentationMode, supportsCompactPresentation: Bool) -> Self {
+ switch mode {
+ case .standard:
+ return .standard
+ case .minimal:
+ return supportsCompactPresentation ? .minimal : .standard
+ }
+ }
}
diff --git a/Sources/Fluid/UI/SettingsView.swift b/Sources/Fluid/UI/SettingsView.swift
index 22d09f88..7837e470 100644
--- a/Sources/Fluid/UI/SettingsView.swift
+++ b/Sources/Fluid/UI/SettingsView.swift
@@ -1209,22 +1209,24 @@ struct SettingsView: View {
}
}
- // Bottom overlay specific settings (only show when bottom is selected)
- if self.settings.overlayPosition == .bottom {
- Divider().padding(.vertical, 4)
+ Divider().padding(.vertical, 4)
- // Overlay Size
- HStack {
- VStack(alignment: .leading, spacing: 2) {
- Text("Overlay Size")
- .font(.body)
- Text("How large the recording indicator appears")
- .font(.subheadline)
- .foregroundStyle(.secondary)
- }
+ HStack {
+ VStack(alignment: .leading, spacing: 2) {
+ Text(self.settings.overlayPosition == .bottom ? "Overlay Size" : "Notch Style")
+ .font(.body)
+ Text(
+ self.settings.overlayPosition == .bottom
+ ? "How large the recording indicator appears"
+ : "Choose the regular notch or the compact layout"
+ )
+ .font(.subheadline)
+ .foregroundStyle(.secondary)
+ }
- Spacer()
+ Spacer()
+ if self.settings.overlayPosition == .bottom {
Picker("", selection: self.$settings.overlaySize) {
ForEach(SettingsStore.OverlaySize.allCases, id: \.self) { size in
Text(size.displayName).tag(size)
@@ -1232,7 +1234,38 @@ struct SettingsView: View {
}
.pickerStyle(.menu)
.frame(width: 170, alignment: .trailing)
+ } else {
+ Picker("", selection: self.$settings.notchPresentationMode) {
+ ForEach(SettingsStore.NotchPresentationMode.allCases, id: \.self) { mode in
+ Text(mode.displayName).tag(mode)
+ }
+ }
+ .pickerStyle(.menu)
+ .frame(width: 170, alignment: .trailing)
}
+ }
+
+ HStack {
+ VStack(alignment: .leading, spacing: 2) {
+ Text("Live Preview")
+ .font(.body)
+ Text("Show transcription text in the overlay while you speak")
+ .font(.subheadline)
+ .foregroundStyle(.secondary)
+ }
+
+ Spacer()
+
+ Toggle("", isOn: self.$enableStreamingPreview)
+ .labelsHidden()
+ .onChange(of: self.enableStreamingPreview) { _, newValue in
+ SettingsStore.shared.enableStreamingPreview = newValue
+ }
+ }
+
+ // Bottom overlay specific settings (only show when bottom is selected)
+ if self.settings.overlayPosition == .bottom {
+ Divider().padding(.vertical, 4)
// Bottom Offset
HStack {
diff --git a/Sources/Fluid/Views/BottomOverlayView.swift b/Sources/Fluid/Views/BottomOverlayView.swift
index f61dd05f..0b64593d 100644
--- a/Sources/Fluid/Views/BottomOverlayView.swift
+++ b/Sources/Fluid/Views/BottomOverlayView.swift
@@ -1770,7 +1770,8 @@ struct BottomOverlayView: View {
/// (e.g. "Transcribing...", "Refining..."). Prefer that when present.
private var processingStatusText: String {
let t = self.contentState.transcriptionText.trimmingCharacters(in: .whitespacesAndNewlines)
- return t.isEmpty ? self.processingLabel : t
+ guard Self.transientOverlayStatusTexts.contains(t) else { return self.processingLabel }
+ return t
}
private var hasTranscription: Bool {
diff --git a/Sources/Fluid/Views/NotchContentViews.swift b/Sources/Fluid/Views/NotchContentViews.swift
index a5a8e76d..2a9ffa18 100644
--- a/Sources/Fluid/Views/NotchContentViews.swift
+++ b/Sources/Fluid/Views/NotchContentViews.swift
@@ -279,43 +279,64 @@ struct NotchExpandedView: View {
@ObservedObject private var settings = SettingsStore.shared
@ObservedObject private var activeAppMonitor = ActiveAppMonitor.shared
@Environment(\.theme) private var theme
+ @State private var isHoveringPromptChip = false
+ @State private var isHoveringPromptMenu = false
+ @State private var hoveredPromptMenuRowID: String?
@State private var showPromptHoverMenu = false
- @State private var promptHoverWorkItem: DispatchWorkItem?
+ @State private var promptHoverGeneration: UInt64 = 0
+ @State private var promptSelectorLeading: CGFloat = 0
private var modeColor: Color {
self.contentState.mode.notchColor
}
- private var modeLabel: String {
- switch self.contentState.mode {
- case .dictation: return "Dictate"
- case .edit, .rewrite, .write: return "Edit"
- case .command: return "Command"
- }
+ private var presentationPolicy: NotchOverlayManager.NotchPresentationPolicy {
+ NotchOverlayManager.shared.currentNotchPresentationPolicy
}
private var processingLabel: String {
switch self.contentState.mode {
- case .dictation: return "Refining..."
- case .edit, .rewrite, .write: return "Thinking..."
- case .command: return "Working..."
+ case .dictation: return "Transcribing"
+ case .edit, .rewrite, .write: return "Thinking"
+ case .command: return "Working"
}
}
+ private static let transientOverlayStatusTexts: Set = [
+ "Transcribing",
+ "Refining",
+ "Thinking",
+ "Working",
+ "Transcribing...",
+ "Refining...",
+ "Thinking...",
+ "Working...",
+ ]
+
/// ContentView writes transient status strings into transcriptionText while processing
/// (e.g. "Transcribing...", "Refining..."). Prefer that when present.
private var processingStatusText: String {
let t = self.contentState.transcriptionText.trimmingCharacters(in: .whitespacesAndNewlines)
- return t.isEmpty ? self.processingLabel : t
+ guard Self.transientOverlayStatusTexts.contains(t) else { return self.processingLabel }
+ return t
}
private var hasTranscription: Bool {
- !self.contentState.transcriptionText.isEmpty
+ !self.visiblePreviewText.isEmpty
+ }
+
+ private var visiblePreviewText: String {
+ let previewText = self.contentState.cachedPreviewText.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard !Self.transientOverlayStatusTexts.contains(previewText) else { return "" }
+ return previewText
}
/// Check if there's command history that can be expanded
private var canExpandCommandHistory: Bool {
- self.contentState.mode == .command && !self.contentState.commandConversationHistory.isEmpty
+ self.presentationPolicy.allowsCommandExpansion &&
+ self.presentationPolicy.allowsCommandActions &&
+ self.contentState.mode == .command &&
+ !self.contentState.commandConversationHistory.isEmpty
}
private var normalizedOverlayMode: OverlayMode {
@@ -384,6 +405,12 @@ struct NotchExpandedView: View {
return "Default"
}
+ private var compactPromptLabel: String {
+ let label = self.selectedPromptLabel.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard label.count > 7 else { return label }
+ return String(label.prefix(7))
+ }
+
private var previewMaxHeight: CGFloat {
60
}
@@ -392,209 +419,347 @@ struct NotchExpandedView: View {
180
}
- private func handlePromptHover(_ hovering: Bool) {
+ private var promptSelectorFixedWidth: CGFloat {
+ 52
+ }
+
+ private var promptMenuWidth: CGFloat {
+ self.promptSelectorFixedWidth
+ }
+
+ private var promptMenuRowVerticalPadding: CGFloat {
+ 4
+ }
+
+ private var promptMenuMaxVisibleRows: CGFloat {
+ 3
+ }
+
+ private var promptMenuRowHeight: CGFloat {
+ 21
+ }
+
+ private var promptMenuListMaxHeight: CGFloat {
+ self.promptMenuRowHeight * self.promptMenuMaxVisibleRows
+ }
+
+ private static let notchContentCoordinateSpace = "NotchExpandedContent"
+
+ private var notchContentWidth: CGFloat {
+ 176
+ }
+
+ @ViewBuilder
+ private var appIconView: some View {
+ if let appIcon = self.contentState.targetAppIcon ?? self.activeAppMonitor.activeAppIcon {
+ Image(nsImage: appIcon)
+ .resizable()
+ .aspectRatio(contentMode: .fit)
+ .frame(width: 18, height: 18)
+ .clipShape(RoundedRectangle(cornerRadius: 3))
+ }
+ }
+
+ private func updatePromptMenuVisibility() {
guard self.isPromptSelectableMode, !self.contentState.isProcessing else {
- self.showPromptHoverMenu = false
+ self.dismissPromptHoverMenu()
return
}
- self.promptHoverWorkItem?.cancel()
- let task = DispatchWorkItem {
- self.showPromptHoverMenu = hovering
+
+ let shouldShow = self.isHoveringPromptChip || self.isHoveringPromptMenu
+ self.promptHoverGeneration &+= 1
+ let generation = self.promptHoverGeneration
+ let delay = shouldShow ? 0.03 : 0.28
+ DispatchQueue.main.asyncAfter(deadline: .now() + delay) {
+ guard generation == self.promptHoverGeneration else { return }
+ self.showPromptHoverMenu = shouldShow
+ }
+ }
+
+ private func dismissPromptHoverMenu() {
+ self.promptHoverGeneration &+= 1
+ self.isHoveringPromptChip = false
+ self.isHoveringPromptMenu = false
+ self.hoveredPromptMenuRowID = nil
+ self.showPromptHoverMenu = false
+ }
+
+ private func handlePromptChipHover(_ hovering: Bool) {
+ self.isHoveringPromptChip = hovering
+ self.updatePromptMenuVisibility()
+ }
+
+ private func handlePromptMenuHover(_ hovering: Bool) {
+ self.isHoveringPromptMenu = hovering
+ self.updatePromptMenuVisibility()
+ }
+
+ private func restoreRecordingTargetFocus() {
+ let pid = NotchContentState.shared.recordingTargetPID
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
+ if let pid { _ = TypingService.activateApp(pid: pid) }
+ }
+ }
+
+ private func promptMenuRowBackground(isSelected: Bool, rowID: String) -> some View {
+ let isHovered = self.hoveredPromptMenuRowID == rowID
+ let fillColor: Color
+ if isSelected {
+ fillColor = Color.white.opacity(0.18)
+ } else if isHovered {
+ fillColor = Color.white.opacity(0.10)
+ } else {
+ fillColor = .clear
+ }
+
+ let strokeColor: Color
+ if isSelected {
+ strokeColor = Color.white.opacity(0.24)
+ } else if isHovered {
+ strokeColor = Color.white.opacity(0.14)
+ } else {
+ strokeColor = .clear
+ }
+
+ return RoundedRectangle(cornerRadius: 7)
+ .fill(fillColor)
+ .overlay(
+ RoundedRectangle(cornerRadius: 7)
+ .stroke(strokeColor, lineWidth: 1)
+ )
+ }
+
+ @ViewBuilder
+ private func promptMenuRow(
+ _ title: String,
+ rowID: String,
+ isSelected: Bool,
+ action: @escaping () -> Void
+ ) -> some View {
+ Button(action: action) {
+ Text(title)
+ .font(.system(size: 9, weight: isSelected ? .semibold : .medium))
+ .foregroundStyle(.white.opacity(isSelected ? 0.96 : 0.84))
+ .lineLimit(1)
+ .truncationMode(.tail)
+ .frame(maxWidth: .infinity, alignment: .leading)
+ .padding(.horizontal, 6)
+ .padding(.vertical, self.promptMenuRowVerticalPadding)
+ .background(self.promptMenuRowBackground(isSelected: isSelected, rowID: rowID))
+ }
+ .buttonStyle(.plain)
+ .onHover { hovering in
+ self.hoveredPromptMenuRowID = hovering ? rowID : nil
}
- self.promptHoverWorkItem = task
- DispatchQueue.main.asyncAfter(deadline: .now() + (hovering ? 0.05 : 0.15), execute: task)
}
private func promptMenuContent() -> some View {
let promptMode = self.activePromptMode ?? .dictate
let activeDictationSlot = self.activeDictationShortcutSlot
+ return VStack(alignment: .leading, spacing: 2) {
+ Text("AI Prompt")
+ .font(.system(size: 8, weight: .semibold))
+ .foregroundStyle(Color.white.opacity(0.42))
+ .padding(.horizontal, 6)
+ .padding(.top, 2)
+ .padding(.bottom, 3)
- return VStack(alignment: .leading, spacing: 0) {
- if promptMode.normalized == .dictate {
- Button(action: {
- self.contentState.onDictationPromptSelectionRequested?(.off)
- let pid = NotchContentState.shared.recordingTargetPID
- DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
- if let pid { _ = TypingService.activateApp(pid: pid) }
- }
- self.showPromptHoverMenu = false
- }) {
- HStack {
- Text("Off")
- Spacer()
- let isSelected = self.settings.dictationPromptSelection(for: activeDictationSlot) == .off
- if isSelected {
- Image(systemName: "checkmark")
- .font(.system(size: 10, weight: .semibold))
- }
- }
- }
- .buttonStyle(.plain)
- .padding(.vertical, 4)
-
- Divider()
- .padding(.vertical, 4)
- }
-
- Button(action: {
- if promptMode.normalized == .dictate {
- self.contentState.onDictationPromptSelectionRequested?(.default)
- } else {
- self.settings.setSelectedPromptID(nil, for: promptMode)
- }
- let pid = NotchContentState.shared.recordingTargetPID
- DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
- if let pid { _ = TypingService.activateApp(pid: pid) }
- }
- self.showPromptHoverMenu = false
- }) {
- HStack {
- Text("Default")
- Spacer()
- let isSelected = promptMode.normalized == .dictate
+ ScrollView(.vertical, showsIndicators: true) {
+ VStack(alignment: .leading, spacing: 2) {
+ let defaultSelected = promptMode.normalized == .dictate
? (self.settings.dictationPromptSelection(for: activeDictationSlot) == .default)
: (self.settings.selectedPromptID(for: promptMode) == nil)
- if isSelected {
- Image(systemName: "checkmark")
- .font(.system(size: 10, weight: .semibold))
- }
- }
- }
- .buttonStyle(.plain)
- .padding(.vertical, 4)
- if !self.settings.promptProfiles(for: promptMode).isEmpty {
- Divider()
- .padding(.vertical, 4)
+ if promptMode.normalized == .dictate {
+ self.promptMenuRow(
+ "Off",
+ rowID: "off",
+ isSelected: self.settings.dictationPromptSelection(for: activeDictationSlot) == .off
+ ) {
+ self.contentState.onDictationPromptSelectionRequested?(.off)
+ self.restoreRecordingTargetFocus()
+ self.dismissPromptHoverMenu()
+ }
+ }
- ForEach(self.settings.promptProfiles(for: promptMode)) { profile in
- Button(action: {
+ self.promptMenuRow("Default", rowID: "default", isSelected: defaultSelected) {
if promptMode.normalized == .dictate {
- self.contentState.onDictationPromptSelectionRequested?(.profile(profile.id))
+ self.contentState.onDictationPromptSelectionRequested?(.default)
} else {
- self.settings.setSelectedPromptID(profile.id, for: promptMode)
- }
- let pid = NotchContentState.shared.recordingTargetPID
- DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) {
- if let pid { _ = TypingService.activateApp(pid: pid) }
+ self.settings.setSelectedPromptID(nil, for: promptMode)
}
- self.showPromptHoverMenu = false
- }) {
- HStack {
- Text(profile.name.isEmpty ? "Untitled" : profile.name)
- Spacer()
+ self.restoreRecordingTargetFocus()
+ self.dismissPromptHoverMenu()
+ }
+
+ let profiles = self.settings.promptProfiles(for: promptMode)
+ if !profiles.isEmpty {
+ ForEach(profiles) { profile in
let isSelected = promptMode.normalized == .dictate
? (self.settings.dictationPromptSelection(for: activeDictationSlot) == .profile(profile.id))
: (self.settings.selectedPromptID(for: promptMode) == profile.id)
- if isSelected {
- Image(systemName: "checkmark")
- .font(.system(size: 10, weight: .semibold))
+ self.promptMenuRow(
+ profile.name.isEmpty ? "Untitled" : profile.name,
+ rowID: profile.id,
+ isSelected: isSelected
+ ) {
+ if promptMode.normalized == .dictate {
+ self.contentState.onDictationPromptSelectionRequested?(.profile(profile.id))
+ } else {
+ self.settings.setSelectedPromptID(profile.id, for: promptMode)
+ }
+ self.restoreRecordingTargetFocus()
+ self.dismissPromptHoverMenu()
}
}
}
- .buttonStyle(.plain)
- .padding(.vertical, 4)
}
}
+ .frame(maxHeight: self.promptMenuListMaxHeight)
}
- .font(.system(size: 9, weight: .medium))
- .padding(.horizontal, 8)
- .padding(.vertical, 6)
- .foregroundStyle(.white)
+ .padding(3)
.background(Color.black)
- .cornerRadius(8)
+ .clipShape(RoundedRectangle(cornerRadius: 9))
.overlay(
- RoundedRectangle(cornerRadius: 8)
+ RoundedRectangle(cornerRadius: 9)
.stroke(Color.white.opacity(0.12), lineWidth: 1)
)
+ .shadow(color: .black.opacity(0.24), radius: 8, x: 0, y: 5)
.onHover { hovering in
- self.handlePromptHover(hovering)
+ self.handlePromptMenuHover(hovering)
}
}
- var body: some View {
- VStack(spacing: 4) {
- // Visualization + Mode label row
- HStack(spacing: 6) {
- // Target app icon (the app where text will be typed)
- if let appIcon = self.contentState.targetAppIcon {
- Image(nsImage: appIcon)
- .resizable()
- .aspectRatio(contentMode: .fit)
- .frame(width: 16, height: 16)
- .clipShape(RoundedRectangle(cornerRadius: 3))
+ @ViewBuilder
+ private var promptSelectorControl: some View {
+ if self.presentationPolicy.showsPromptSelector {
+ HStack(spacing: 3) {
+ Text(self.compactPromptLabel)
+ .font(.system(size: 9, weight: .medium))
+ .foregroundStyle(self.isHoveringPromptChip ? .white.opacity(0.94) : .white.opacity(0.86))
+ .lineLimit(1)
+ .truncationMode(.tail)
+ .fixedSize(horizontal: true, vertical: false)
+ Image(systemName: "chevron.down")
+ .font(.system(size: 8, weight: .bold))
+ .foregroundStyle(self.isHoveringPromptChip ? .white.opacity(0.78) : .white.opacity(0.62))
+ if self.isAppPromptOverrideActive {
+ Text("App")
+ .font(.system(size: 8, weight: .semibold))
+ .foregroundStyle(.white.opacity(0.82))
+ .padding(.horizontal, 3)
+ .padding(.vertical, 1)
+ .background(
+ Capsule()
+ .fill(Color.white.opacity(0.12))
+ )
}
-
- NotchWaveformView(
- audioPublisher: self.audioPublisher,
- color: self.modeColor
- )
- .frame(width: 80, height: 22)
-
- // Mode label - shimmer effect when processing
- if self.contentState.isProcessing {
- ShimmerText(text: self.processingStatusText, color: self.modeColor)
- } else {
- Text(self.modeLabel)
- .font(.system(size: 9, weight: .medium))
- .foregroundStyle(self.modeColor)
- .opacity(0.9)
- .onHover { hovering in
- self.handlePromptHover(hovering)
+ }
+ .padding(.horizontal, 5)
+ .padding(.vertical, 3)
+ .frame(width: self.promptSelectorFixedWidth, alignment: .leading)
+ .background(
+ Capsule()
+ .fill(
+ LinearGradient(
+ colors: [
+ Color.black.opacity(self.isHoveringPromptChip ? 0.96 : 0.92),
+ Color(white: self.isHoveringPromptChip ? 0.10 : 0.06),
+ ],
+ startPoint: .top,
+ endPoint: .bottom
+ )
+ )
+ )
+ .shadow(color: .black.opacity(0.28), radius: 8, x: 0, y: 4)
+ .shadow(color: .white.opacity(self.isHoveringPromptChip ? 0.06 : 0.03), radius: 0, x: 0, y: 1)
+ .opacity(self.isPromptSelectableMode ? (self.contentState.isProcessing ? 0.7 : 1.0) : 0.6)
+ .allowsHitTesting(self.isPromptSelectableMode && !self.contentState.isProcessing)
+ .onHover { hovering in
+ self.handlePromptChipHover(hovering)
+ }
+ .background(
+ GeometryReader { geometry in
+ Color.clear
+ .onAppear {
+ self.promptSelectorLeading = geometry.frame(in: .named(Self.notchContentCoordinateSpace)).minX
+ }
+ .onChange(of: geometry.frame(in: .named(Self.notchContentCoordinateSpace)).minX) { _, newLeading in
+ self.promptSelectorLeading = newLeading
}
}
+ )
+ .transition(.opacity)
+ }
+ }
+
+ @ViewBuilder
+ private var promptHoverMenuRow: some View {
+ if self.showPromptHoverMenu {
+ HStack(spacing: 0) {
+ Color.clear
+ .frame(width: self.promptSelectorLeading)
+
+ self.promptMenuContent()
+ .frame(width: self.promptMenuWidth, alignment: .leading)
+
+ Spacer(minLength: 0)
}
+ .frame(maxWidth: .infinity, alignment: .leading)
+ .padding(.top, -2)
+ .transition(.opacity)
+ }
+ }
- // Prompt selector
- if !self.contentState.isProcessing {
- ZStack(alignment: .top) {
- HStack(spacing: 6) {
- Text("AI Prompt:")
- .font(.system(size: 9, weight: .medium))
- .foregroundStyle(.white.opacity(0.5))
- Text(self.selectedPromptLabel)
- .font(.system(size: 9, weight: .medium))
- .foregroundStyle(.white.opacity(0.75))
- .lineLimit(1)
- if self.isAppPromptOverrideActive {
- Text("App")
- .font(.system(size: 8, weight: .semibold))
- .foregroundStyle(.white.opacity(0.9))
- .padding(.horizontal, 4)
- .padding(.vertical, 1)
- .background(
- Capsule()
- .fill(Color.white.opacity(0.15))
- )
- }
- Image(systemName: "chevron.down")
- .font(.system(size: 8, weight: .semibold))
- .foregroundStyle(.white.opacity(0.45))
- }
- .padding(.horizontal, 6)
- .padding(.vertical, 4)
- .background(Color.white.opacity(0.00))
- .cornerRadius(6)
- .opacity(self.isPromptSelectableMode ? 1.0 : 0.6)
+ var body: some View {
+ Group {
+ if self.canExpandCommandHistory {
+ self.notchBodyContent
+ .contentShape(Rectangle())
.onTapGesture {
- guard self.isPromptSelectableMode, !self.contentState.isProcessing else { return }
- self.showPromptHoverMenu.toggle()
+ if NotchOverlayManager.shared.canHandleNotchCommandTap {
+ NotchOverlayManager.shared.onNotchClicked?()
+ }
}
+ } else {
+ self.notchBodyContent
+ }
+ }
+ .onChange(of: self.contentState.mode) { _, _ in
+ if !self.isPromptSelectableMode {
+ self.dismissPromptHoverMenu()
+ }
+ switch self.contentState.mode {
+ case .dictation: self.contentState.promptPickerMode = .dictate
+ case .edit, .write, .rewrite: self.contentState.promptPickerMode = .edit
+ case .command: break
+ }
+ }
+ .animation(.spring(response: 0.25, dampingFraction: 0.8), value: self.hasTranscription)
+ .animation(.easeInOut(duration: 0.2), value: self.contentState.mode)
+ .animation(.easeInOut(duration: 0.25), value: self.contentState.isProcessing)
+ }
- if self.showPromptHoverMenu {
- self.promptMenuContent()
- .padding(.top, 26)
- .transition(.opacity)
- .zIndex(10)
- }
- }
- .frame(maxWidth: 180, alignment: .top)
- .transition(.opacity)
+ private var notchBodyContent: some View {
+ VStack(alignment: .center, spacing: 6) {
+ HStack(spacing: 4) {
+ self.appIconView
+
+ CompactNotchWaveformView(
+ audioPublisher: self.audioPublisher,
+ color: self.modeColor
+ )
+ .frame(width: 48, height: 18)
+
+ self.promptSelectorControl
}
+ .frame(maxWidth: .infinity, alignment: .center)
+ .offset(x: 4, y: 0)
- // Transcription preview (wrapped, fixed width)
- if self.hasTranscription && !self.contentState.isProcessing {
- let previewText = self.contentState.cachedPreviewText
+ self.promptHoverMenuRow
+
+ if self.presentationPolicy.showsStreamingPreview && self.hasTranscription && !self.contentState.isProcessing {
+ let previewText = self.visiblePreviewText
if !previewText.isEmpty {
ScrollViewReader { proxy in
ScrollView(.vertical, showsIndicators: false) {
@@ -621,34 +786,17 @@ struct NotchExpandedView: View {
}
}
}
+ .frame(maxWidth: .infinity, alignment: .center)
.transition(.opacity.combined(with: .scale(scale: 0.95)))
}
}
}
- .frame(width: 216) // Fixed width prevents notch from resizing and causing edge artifacts
- .padding(.horizontal, 8)
- .padding(.vertical, 6)
- .background(Color.black) // Must be pure black to blend with macOS notch
- .contentShape(Rectangle()) // Make entire area tappable
- .onTapGesture {
- // If in command mode with history, clicking expands the conversation
- if self.canExpandCommandHistory {
- NotchOverlayManager.shared.onNotchClicked?()
- }
- }
- .onChange(of: self.contentState.mode) { _, _ in
- if !self.isPromptSelectableMode {
- self.showPromptHoverMenu = false
- }
- switch self.contentState.mode {
- case .dictation: self.contentState.promptPickerMode = .dictate
- case .edit, .write, .rewrite: self.contentState.promptPickerMode = .edit
- case .command: break
- }
- }
- .animation(.spring(response: 0.25, dampingFraction: 0.8), value: self.hasTranscription)
- .animation(.easeInOut(duration: 0.2), value: self.contentState.mode)
- .animation(.easeInOut(duration: 0.25), value: self.contentState.isProcessing)
+ .coordinateSpace(name: Self.notchContentCoordinateSpace)
+ .frame(width: self.notchContentWidth)
+ .padding(.horizontal, 6)
+ .padding(.top, 0)
+ .padding(.bottom, 4)
+ .background(Color.black)
}
}
@@ -660,14 +808,17 @@ struct NotchWaveformView: View {
@StateObject private var data: AudioVisualizationData
@ObservedObject private var contentState = NotchContentState.shared
- @State private var barHeights: [CGFloat] = Array(repeating: 3, count: 7)
+ @State private var barHeights: [CGFloat] = Array(repeating: 3, count: 5)
@State private var noiseThreshold: CGFloat = .init(SettingsStore.shared.visualizerNoiseThreshold)
- private let barCount = 7
- private let barWidth: CGFloat = 3
- private let barSpacing: CGFloat = 4
+ private let barCount = 5
+ private let barWidth: CGFloat = 2.5
+ private let barSpacing: CGFloat = 2
private let minHeight: CGFloat = 3
- private let maxHeight: CGFloat = 20
+ private let maxHeight: CGFloat = 12
+ private let processingSweepSeconds: Double = 2.15
+ private let processingBandHalfWidth: CGFloat = 0.42
+ private let processingFlatHeight: CGFloat = 3
private var currentGlowIntensity: CGFloat {
self.contentState.isProcessing ? 0.0 : 0.35
@@ -688,13 +839,22 @@ struct NotchWaveformView: View {
}
var body: some View {
- HStack(spacing: self.barSpacing) {
- ForEach(0.. CGFloat) -> some View {
+ HStack(spacing: self.barSpacing) {
+ ForEach(0.. some View {
+ let progress = date.timeIntervalSinceReferenceDate.truncatingRemainder(dividingBy: self.processingSweepSeconds) / self.processingSweepSeconds
+ let centerX = CGFloat(-0.25 + progress * 1.5)
+
+ return Rectangle()
+ .foregroundStyle(
+ LinearGradient(
+ colors: [
+ self.color.opacity(0.12),
+ self.color.opacity(0.28),
+ .white.opacity(0.88),
+ self.color.opacity(1.0),
+ .white.opacity(0.88),
+ self.color.opacity(0.28),
+ self.color.opacity(0.12),
+ ],
+ startPoint: UnitPoint(x: centerX - self.processingBandHalfWidth, y: 0.5),
+ endPoint: UnitPoint(x: centerX + self.processingBandHalfWidth, y: 0.5)
+ )
+ )
+ }
+
+ private func displayHeight(for index: Int) -> CGFloat {
+ guard self.contentState.isProcessing else {
+ return self.barHeights[index]
+ }
+ return self.processingFlatHeight
+ }
+
+ private func resetBarsToBaseline(animated: Bool) {
+ let update = {
+ for index in 0.. self.noiseThreshold // Use user's sensitivity setting
+ let adjustedLevel = normalizedLevel > self.noiseThreshold
+ ? (normalizedLevel - self.noiseThreshold) / (1.0 - self.noiseThreshold)
+ : 0
- withAnimation(.spring(response: 0.15, dampingFraction: 0.6)) {
- for i in 0.. 0 else {
+ self.resetBarsToBaseline(animated: false)
+ return
+ }
- if isActive {
- // Scale audio level relative to threshold for smoother response
- let adjustedLevel = (normalizedLevel - self.noiseThreshold) / (1.0 - self.noiseThreshold)
- let randomVariation = CGFloat.random(in: 0.7...1.0)
- self.barHeights[i] = self.minHeight + (self.maxHeight - self.minHeight) * adjustedLevel * centerFactor * randomVariation
- } else {
- // Complete stillness when below threshold
- self.barHeights[i] = self.minHeight
- }
+ withAnimation(.easeOut(duration: 0.1)) {
+ for index in 0..
@ObservedObject private var contentState = NotchContentState.shared
- @State private var isPulsing = false
var body: some View {
- Circle()
- .fill(self.contentState.mode.notchColor)
- .frame(width: 5, height: 5)
- .opacity(self.isPulsing ? 0.5 : 1.0)
- .scaleEffect(self.isPulsing ? 0.85 : 1.0)
- .animation(.easeInOut(duration: 1.0).repeatForever(autoreverses: true), value: self.isPulsing)
- .onAppear { self.isPulsing = true }
- .onDisappear { self.isPulsing = false }
+ CompactNotchWaveformView(
+ audioPublisher: self.audioPublisher,
+ color: self.contentState.mode.notchColor
+ )
+ .frame(width: 34, height: 16)
+ }
+}
+
+struct NotchCompactBottomView: View {
+ @ObservedObject private var contentState = NotchContentState.shared
+
+ private let previewWidth: CGFloat = 250
+ private let previewHeight: CGFloat = 20
+ private static let transientOverlayStatusTexts: Set = [
+ "Transcribing",
+ "Refining",
+ "Thinking",
+ "Working",
+ "Transcribing...",
+ "Refining...",
+ "Thinking...",
+ "Working...",
+ ]
+
+ private var compactPreviewText: String {
+ let source = self.contentState.isProcessing
+ ? self.contentState.transcriptionText
+ : self.contentState.cachedPreviewText
+ let trimmed = source.trimmingCharacters(in: .whitespacesAndNewlines)
+ guard !Self.transientOverlayStatusTexts.contains(trimmed) else { return "" }
+ return trimmed
+ }
+
+ private var shouldShowPreview: Bool {
+ SettingsStore.shared.enableStreamingPreview && !self.compactPreviewText.isEmpty
+ }
+
+ var body: some View {
+ ZStack(alignment: .leading) {
+ Text(self.compactPreviewText)
+ .font(.system(size: 9, weight: .medium))
+ .foregroundStyle(.white.opacity(0.82))
+ .lineLimit(1)
+ .truncationMode(.head)
+ .offset(y: self.shouldShowPreview ? 0 : -4)
+ .opacity(self.shouldShowPreview ? 1 : 0)
+ }
+ .frame(width: self.previewWidth, height: SettingsStore.shared.enableStreamingPreview ? self.previewHeight : 0, alignment: .leading)
+ .padding(.horizontal, SettingsStore.shared.enableStreamingPreview ? 10 : 0)
+ .padding(.bottom, SettingsStore.shared.enableStreamingPreview ? 8 : 0)
+ .clipped()
+ .animation(.easeOut(duration: 0.2), value: self.shouldShowPreview)
}
}
@@ -1296,3 +1551,146 @@ struct ExpandedModeWaveformView: View {
}
}
}
+
+struct CompactNotchWaveformView: View {
+ let audioPublisher: AnyPublisher
+ let color: Color
+
+ @StateObject private var data: AudioVisualizationData
+ @ObservedObject private var contentState = NotchContentState.shared
+ @State private var barHeights: [CGFloat] = Array(repeating: 3, count: 8)
+
+ private let barCount = 8
+ private let barWidth: CGFloat = 2.5
+ private let barSpacing: CGFloat = 2
+ private let minHeight: CGFloat = 3
+ private let maxHeight: CGFloat = 15
+ private let noiseThreshold: CGFloat = 0.05
+ private let processingSweepSeconds: Double = 2.15
+ private let processingBandHalfWidth: CGFloat = 0.42
+ private let processingFlatHeight: CGFloat = 3
+
+ init(audioPublisher: AnyPublisher, color: Color) {
+ self.audioPublisher = audioPublisher
+ self.color = color
+ _data = StateObject(wrappedValue: AudioVisualizationData(audioLevelPublisher: audioPublisher))
+ }
+
+ var body: some View {
+ TimelineView(.animation(minimumInterval: 1.0 / 30.0)) { timeline in
+ ZStack {
+ self.barsView(using: { index in
+ self.displayHeight(for: index)
+ })
+ .foregroundStyle(self.color.opacity(self.contentState.isProcessing ? 0.16 : 1.0))
+
+ if self.contentState.isProcessing {
+ self.processingSweep(at: timeline.date)
+ .mask {
+ self.barsView(using: { index in
+ self.displayHeight(for: index)
+ })
+ }
+ .shadow(color: .white.opacity(0.28), radius: 2.5, x: 0, y: 0)
+ }
+ }
+ }
+ .onChange(of: self.data.audioLevel) { _, level in
+ if !self.contentState.isProcessing {
+ self.updateBars(level: level)
+ }
+ }
+ .onChange(of: self.contentState.isProcessing) { _, processing in
+ if processing {
+ self.resetBarsToBaseline(animated: false)
+ } else {
+ self.updateBars(level: self.data.audioLevel)
+ }
+ }
+ .onAppear {
+ if self.contentState.isProcessing {
+ self.resetBarsToBaseline(animated: false)
+ } else {
+ self.updateBars(level: self.data.audioLevel)
+ }
+ }
+ }
+
+ @ViewBuilder
+ private func barsView(using height: @escaping (Int) -> CGFloat) -> some View {
+ HStack(spacing: self.barSpacing) {
+ ForEach(0.. some View {
+ let progress = self.processingProgress(at: date)
+ let centerX = CGFloat(-0.25 + progress * 1.5)
+
+ return Rectangle()
+ .foregroundStyle(
+ LinearGradient(
+ colors: [
+ self.color.opacity(0.12),
+ self.color.opacity(0.28),
+ .white.opacity(0.88),
+ self.color.opacity(1.0),
+ .white.opacity(0.88),
+ self.color.opacity(0.28),
+ self.color.opacity(0.12),
+ ],
+ startPoint: UnitPoint(x: centerX - self.processingBandHalfWidth, y: 0.5),
+ endPoint: UnitPoint(x: centerX + self.processingBandHalfWidth, y: 0.5)
+ )
+ )
+ }
+
+ private func processingProgress(at date: Date) -> Double {
+ date.timeIntervalSinceReferenceDate.truncatingRemainder(dividingBy: self.processingSweepSeconds) / self.processingSweepSeconds
+ }
+
+ private func displayHeight(for index: Int) -> CGFloat {
+ guard self.contentState.isProcessing else {
+ return self.barHeights[index]
+ }
+
+ return self.processingFlatHeight
+ }
+
+ private func updateBars(level: CGFloat) {
+ let normalizedLevel = min(max(level, 0), 1)
+ let adjustedLevel = normalizedLevel > self.noiseThreshold
+ ? (normalizedLevel - self.noiseThreshold) / (1.0 - self.noiseThreshold)
+ : 0
+
+ guard adjustedLevel > 0 else {
+ self.resetBarsToBaseline(animated: false)
+ return
+ }
+
+ withAnimation(.easeOut(duration: 0.1)) {
+ for index in 0..