From 3c4427764815a8dc624d466461093e06c6da8095 Mon Sep 17 00:00:00 2001 From: bk-ty Date: Thu, 30 Apr 2026 10:44:35 -0500 Subject: [PATCH 1/3] feat: custom system prompt overrides for chat, briefing, and tagging Adds three new per-database settings that let users override the default system prompts used by AI features: - chat_prompt: prepended to the chat agent system prompt - briefing_prompt: replaces the briefing generation system prompt - tagging_prompt: replaces the tag extraction system prompt The wiki generation prompt override already existed; this moves it to a dedicated Prompts tab in Settings alongside the three new prompt fields. Backend changes: - settings.rs: register chat_prompt, briefing_prompt, tagging_prompt as default (empty) settings - agent.rs: read chat_prompt and prepend to base system prompt when set - briefing/agentic.rs: read briefing_prompt and use as system prompt when set (falls back to built-in SYSTEM_PROMPT) - extraction.rs: accept optional custom_system_prompt param and apply when non-empty - embedding.rs: read tagging_prompt from settings and pass through to run_tagging_strategy -> extract_tags_from_content Frontend changes: - SettingsModal.tsx: add Prompts tab; move wiki generation prompt from AI tab to Prompts tab; add briefing, chat, and tagging prompt fields --- crates/atomic-core/src/agent.rs | 21 ++- crates/atomic-core/src/briefing/agentic.rs | 13 +- crates/atomic-core/src/embedding.rs | 9 +- crates/atomic-core/src/extraction.rs | 7 +- crates/atomic-core/src/settings.rs | 3 + package-lock.json | 11 -- src/components/settings/SettingsModal.tsx | 205 +++++++++++++++------ 7 files changed, 194 insertions(+), 75 deletions(-) diff --git a/crates/atomic-core/src/agent.rs b/crates/atomic-core/src/agent.rs index 7d2508b3..551d8b8d 100644 --- a/crates/atomic-core/src/agent.rs +++ b/crates/atomic-core/src/agent.rs @@ -1530,7 +1530,16 @@ where }; // Build message history for API - let mut api_messages = vec![Message::system(get_system_prompt(&scope_description))]; + let custom_chat_prefix = settings_map + .get("chat_prompt") + .filter(|s| !s.is_empty()) + .map(|s| s.as_str()); + let base_system = get_system_prompt(&scope_description); + let system_prompt = match custom_chat_prefix { + Some(prefix) => format!("{prefix}\n\n{base_system}"), + None => base_system, + }; + let mut api_messages = vec![Message::system(system_prompt)]; api_messages.extend(messages); // Truncate to fit context window for providers with limited context @@ -1734,7 +1743,15 @@ where }; // Build message history for API, with canvas context appended to system prompt - let mut system_prompt = get_system_prompt(&scope_description); + let custom_chat_prefix = settings_map + .get("chat_prompt") + .filter(|s| !s.is_empty()) + .map(|s| s.as_str()); + let base_system = get_system_prompt(&scope_description); + let mut system_prompt = match custom_chat_prefix { + Some(prefix) => format!("{prefix}\n\n{base_system}"), + None => base_system, + }; if page_context.is_some() { system_prompt.push_str(get_page_context_system_prompt()); } diff --git a/crates/atomic-core/src/briefing/agentic.rs b/crates/atomic-core/src/briefing/agentic.rs index 424c6122..e2b5fc69 100644 --- a/crates/atomic-core/src/briefing/agentic.rs +++ b/crates/atomic-core/src/briefing/agentic.rs @@ -320,7 +320,7 @@ struct AgentState { done_called: bool, } -async fn resolve_model(core: &AtomicCore) -> Result<(ProviderConfig, String), String> { +async fn resolve_model(core: &AtomicCore) -> Result<(ProviderConfig, String, Option), String> { let settings = core .get_settings() .await @@ -336,7 +336,11 @@ async fn resolve_model(core: &AtomicCore) -> Result<(ProviderConfig, String), St .cloned() .unwrap_or_else(|| "anthropic/claude-sonnet-4.6".to_string()), }; - Ok((config, model)) + let custom_prompt = settings + .get("briefing_prompt") + .filter(|s| !s.is_empty()) + .cloned(); + Ok((config, model, custom_prompt)) } async fn run_research( @@ -501,14 +505,15 @@ pub(crate) async fn generate( new_atoms: &[AtomWithTags], total_new: i32, ) -> Result<(String, Vec<(i32, String, String)>), String> { - let (provider_config, model) = resolve_model(core).await?; + let (provider_config, model, custom_system_prompt) = resolve_model(core).await?; tracing::info!(model = %model, atoms = new_atoms.len(), "[briefing/agentic] Running agent"); let user_prompt = build_user_prompt(since, new_atoms, total_new); + let system = custom_system_prompt.as_deref().unwrap_or(SYSTEM_PROMPT); let mut state = AgentState { messages: vec![ - Message::system(SYSTEM_PROMPT.to_string()), + Message::system(system.to_string()), Message::user(user_prompt), ], done_called: false, diff --git a/crates/atomic-core/src/embedding.rs b/crates/atomic-core/src/embedding.rs index b45d79af..71ddc019 100644 --- a/crates/atomic-core/src/embedding.rs +++ b/crates/atomic-core/src/embedding.rs @@ -792,6 +792,10 @@ async fn process_tagging_only_inner( return Ok(TaggingOutcome::Skipped); } + let custom_tagging_prompt = settings_map + .get("tagging_prompt") + .filter(|s| !s.is_empty()) + .map(|s| s.as_str()); let tags = run_tagging_strategy( tagging_strategy, &provider_config, @@ -799,6 +803,7 @@ async fn process_tagging_only_inner( &tag_tree_json, &tagging_model, supported_params, + custom_tagging_prompt, ) .await?; @@ -852,7 +857,6 @@ async fn process_tagging_only_inner( new_tags_created: all_new_tag_ids, }) } - async fn run_tagging_strategy( strategy: TaggingStrategy, provider_config: &ProviderConfig, @@ -860,6 +864,7 @@ async fn run_tagging_strategy( tag_tree_json: &str, model: &str, supported_params: Option>, + custom_system_prompt: Option<&str>, ) -> Result, String> { match strategy { TaggingStrategy::TruncatedFullContent => { @@ -869,6 +874,7 @@ async fn run_tagging_strategy( tag_tree_json, model, supported_params, + custom_system_prompt, ) .await } @@ -882,6 +888,7 @@ async fn run_tagging_strategy( tag_tree_json, model, supported_params, + custom_system_prompt, ) .await } diff --git a/crates/atomic-core/src/extraction.rs b/crates/atomic-core/src/extraction.rs index a316e9d1..be6f8c23 100644 --- a/crates/atomic-core/src/extraction.rs +++ b/crates/atomic-core/src/extraction.rs @@ -261,8 +261,8 @@ pub async fn extract_tags_from_content( tag_tree_json: &str, model: &str, supported_params: Option>, + custom_system_prompt: Option<&str>, ) -> Result, String> { - // Truncate based on provider's context length let max_chars = max_tagging_chars(provider_config, tag_tree_json, model); let text = if content.len() > max_chars { // Find the nearest char boundary at or before max_chars @@ -280,7 +280,10 @@ pub async fn extract_tags_from_content( tag_tree_json, text ); - let messages = vec![Message::system(SYSTEM_PROMPT), Message::user(user_content)]; + let system = custom_system_prompt + .filter(|s| !s.is_empty()) + .unwrap_or(SYSTEM_PROMPT); + let messages = vec![Message::system(system), Message::user(user_content)]; let call = StructuredCall::::new( provider_config, diff --git a/crates/atomic-core/src/settings.rs b/crates/atomic-core/src/settings.rs index 6d00b00d..b9ba5709 100644 --- a/crates/atomic-core/src/settings.rs +++ b/crates/atomic-core/src/settings.rs @@ -73,6 +73,9 @@ pub const DEFAULT_SETTINGS: &[(&str, &str)] = &[ ("openai_compat_timeout_secs", "300"), // 5 minutes default for OpenAI-compatible servers ("wiki_generation_prompt", ""), ("wiki_update_prompt", ""), + ("briefing_prompt", ""), + ("chat_prompt", ""), + ("tagging_prompt", ""), // Scheduled tasks — see crate::scheduler::state for key format ("task.daily_briefing.enabled", "true"), ("task.daily_briefing.interval_hours", "24"), diff --git a/package-lock.json b/package-lock.json index c0216e34..65760a2a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13424,17 +13424,6 @@ "browserslist": ">= 4.21.0" } }, - "node_modules/use-sync-external-store": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", - "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", - "license": "MIT", - "optional": true, - "peer": true, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", diff --git a/src/components/settings/SettingsModal.tsx b/src/components/settings/SettingsModal.tsx index 0f3038e0..342e0c9b 100644 --- a/src/components/settings/SettingsModal.tsx +++ b/src/components/settings/SettingsModal.tsx @@ -76,11 +76,12 @@ import { formatRelativeDate } from '../../lib/date'; import { useDatabasesStore, type DatabaseInfo, type DatabaseStats } from '../../stores/databases'; import { OverrideControls } from './OverrideControls'; -export type SettingsTab = 'general' | 'ai' | 'tag-categories' | 'connection' | 'integrations' | 'databases'; +export type SettingsTab = 'general' | 'ai' | 'tag-categories' | 'connection' | 'integrations' | 'databases' | 'prompts'; const SETTINGS_TABS: { id: SettingsTab; label: string }[] = [ { id: 'general', label: 'General' }, { id: 'ai', label: 'AI Models' }, + { id: 'prompts', label: 'Prompts' }, { id: 'tag-categories', label: 'Tags' }, { id: 'connection', label: 'Connection' }, { id: 'integrations', label: 'Integrations' }, @@ -800,6 +801,9 @@ export function SettingsModal({ isOpen, onClose, initialTab }: SettingsModalProp const [wikiStrategy, setWikiStrategy] = useState('centroid'); const [wikiGenerationPrompt, setWikiGenerationPrompt] = useState(''); const [wikiUpdatePrompt, setWikiUpdatePrompt] = useState(''); + const [briefingPrompt, setBriefingPrompt] = useState(''); + const [chatPrompt, setChatPrompt] = useState(''); + const [taggingPrompt, setTaggingPrompt] = useState(''); const [chatModel, setChatModel] = useState('anthropic/claude-sonnet-4.6'); const [saveError, setSaveError] = useState(null); @@ -1193,6 +1197,9 @@ export function SettingsModal({ isOpen, onClose, initialTab }: SettingsModalProp setWikiStrategy(settings.wiki_strategy || 'centroid'); setWikiGenerationPrompt(settings.wiki_generation_prompt || ''); setWikiUpdatePrompt(settings.wiki_update_prompt || ''); + setBriefingPrompt(settings.briefing_prompt || ''); + setChatPrompt(settings.chat_prompt || ''); + setTaggingPrompt(settings.tagging_prompt || ''); setChatModel(settings.chat_model || 'anthropic/claude-sonnet-4.6'); setOllamaHost(settings.ollama_host || 'http://127.0.0.1:11434'); setOllamaEmbeddingModel(settings.ollama_embedding_model || 'nomic-embed-text'); @@ -1756,60 +1763,6 @@ export function SettingsModal({ isOpen, onClose, initialTab }: SettingsModalProp - {/* Wiki Generation Prompt */} -
- -

- System prompt for generating new wiki articles. Leave empty to use the default. -

-