diff --git a/src/components/PromptInput/PromptInput.tsx b/src/components/PromptInput/PromptInput.tsx index cf390f11..a00d6100 100644 --- a/src/components/PromptInput/PromptInput.tsx +++ b/src/components/PromptInput/PromptInput.tsx @@ -120,6 +120,8 @@ import { useMaybeTruncateInput } from './useMaybeTruncateInput.js'; import { usePromptInputPlaceholder } from './usePromptInputPlaceholder.js'; import { useShowFastIconHint } from './useShowFastIconHint.js'; import { useSwarmBanner } from './useSwarmBanner.js'; +import { expandPastedTextRefs, parseReferences } from '../../history.js'; +import { roughTokenCountEstimation } from '../../services/tokenEstimation.js'; import { isNonSpacePrintable, isVimModeEnabled } from './utils.js'; type Props = { debug: boolean; @@ -250,6 +252,63 @@ function PromptInput({ show: false }); const [cursorOffset, setCursorOffset] = useState(input.length); + + const calculateTotalTokens = useCallback((text: string, contents: Record): number => { + const expandedText = expandPastedTextRefs(text, contents); + let tokens = roughTokenCountEstimation(expandedText); + + const refs = parseReferences(text); + for (const ref of refs) { + const content = contents[ref.id]; + if (content?.type === 'image') { + tokens += 2000; + } + } + + return tokens; + }, []); + + const [tokenCount, setTokenCount] = useState( + input.length === 0 ? 0 : calculateTotalTokens(input, pastedContents) + ); + const [isCalculatingTokens, setIsCalculatingTokens] = useState(false); + const tokenDebounceTimerRef = useRef | null>(null); + + useEffect(() => { + if (tokenDebounceTimerRef.current) { + clearTimeout(tokenDebounceTimerRef.current); + tokenDebounceTimerRef.current = null; + } + + if (input.length === 0) { + setTokenCount(0); + setIsCalculatingTokens(false); + return; + } + + setIsCalculatingTokens(true); + + tokenDebounceTimerRef.current = setTimeout(() => { + const tokens = calculateTotalTokens(input, pastedContents); + setTokenCount(tokens); + setIsCalculatingTokens(false); + tokenDebounceTimerRef.current = null; + }, 300); + + return () => { + if (tokenDebounceTimerRef.current) { + clearTimeout(tokenDebounceTimerRef.current); + } + }; + }, [input, pastedContents, calculateTotalTokens]); + + useEffect(() => { + return () => { + if (tokenDebounceTimerRef.current) { + clearTimeout(tokenDebounceTimerRef.current); + } + }; + }, []); // Track the last input value set via internal handlers so we can detect // external input changes (e.g. speech-to-text injection) and move cursor to end. const lastInternalInputRef = React.useRef(input); @@ -2271,7 +2330,7 @@ function PromptInput({ {textInputElement} } - 0} isLoading={isLoading} tasksSelected={tasksSelected} teamsSelected={teamsSelected} bridgeSelected={bridgeSelected} tmuxSelected={tmuxSelected} teammateFooterIndex={teammateFooterIndex} ideSelection={ideSelection} mcpClients={mcpClients} isPasting={isPasting} isInputWrapped={isInputWrapped} messages={messages} isSearching={isSearchingHistory} historyQuery={historyQuery} setHistoryQuery={setHistoryQuery} historyFailedMatch={historyFailedMatch} onOpenTasksDialog={isFullscreenEnvEnabled() ? handleOpenTasksDialog : undefined} /> + 0} isLoading={isLoading} tasksSelected={tasksSelected} teamsSelected={teamsSelected} bridgeSelected={bridgeSelected} tmuxSelected={tmuxSelected} teammateFooterIndex={teammateFooterIndex} ideSelection={ideSelection} mcpClients={mcpClients} isPasting={isPasting} isInputWrapped={isInputWrapped} messages={messages} isSearching={isSearchingHistory} historyQuery={historyQuery} setHistoryQuery={setHistoryQuery} historyFailedMatch={historyFailedMatch} onOpenTasksDialog={isFullscreenEnvEnabled() ? handleOpenTasksDialog : undefined} tokenCount={tokenCount} isCalculatingTokens={isCalculatingTokens} /> {isFullscreenEnvEnabled() ? null : autoModeOptInDialog} {isFullscreenEnvEnabled() ? // position=absolute takes zero layout height so the spinner diff --git a/src/components/PromptInput/PromptInputFooter.tsx b/src/components/PromptInput/PromptInputFooter.tsx index e881ddb7..a0523467 100644 --- a/src/components/PromptInput/PromptInputFooter.tsx +++ b/src/components/PromptInput/PromptInputFooter.tsx @@ -59,6 +59,8 @@ type Props = { setHistoryQuery: (query: string) => void; historyFailedMatch: boolean; onOpenTasksDialog?: (taskId?: string) => void; + tokenCount: number; + isCalculatingTokens: boolean; }; function PromptInputFooter({ apiKeyStatus, @@ -92,7 +94,9 @@ function PromptInputFooter({ historyQuery, setHistoryQuery, historyFailedMatch, - onOpenTasksDialog + onOpenTasksDialog, + tokenCount, + isCalculatingTokens }: Props): ReactNode { const settings = useSettings(); const { @@ -142,6 +146,9 @@ function PromptInputFooter({ + + {isCalculatingTokens ? '...' : `${tokenCount} tokens`} + {isFullscreen ? null : } {"external" === 'ant' && isUndercover() && undercover}