diff --git a/agents/analyst/agent.toml b/agents/analyst/agent.toml index 7d3766fd3..3850c6cb8 100644 --- a/agents/analyst/agent.toml +++ b/agents/analyst/agent.toml @@ -42,7 +42,7 @@ api_key_env = "GROQ_API_KEY" max_llm_tokens_per_hour = 150000 [capabilities] -tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "web_fetch", "memory_store", "memory_recall"] +tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "list_searxng_categories", "web_fetch", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*", "shared.*"] diff --git a/agents/coder/agent.toml b/agents/coder/agent.toml index 0974a7b96..804cd8f05 100644 --- a/agents/coder/agent.toml +++ b/agents/coder/agent.toml @@ -40,7 +40,7 @@ max_llm_tokens_per_hour = 200000 max_concurrent_tools = 10 [capabilities] -tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "web_fetch", "memory_store", "memory_recall"] +tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "list_searxng_categories", "web_fetch", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*"] diff --git a/agents/data-scientist/agent.toml b/agents/data-scientist/agent.toml index cb69103cf..03b1d7242 100644 --- a/agents/data-scientist/agent.toml +++ b/agents/data-scientist/agent.toml @@ -44,7 +44,7 @@ api_key_env = "GROQ_API_KEY" max_llm_tokens_per_hour = 150000 [capabilities] -tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "web_fetch", "memory_store", "memory_recall"] +tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "list_searxng_categories", "web_fetch", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*", "shared.*"] diff --git a/agents/debugger/agent.toml b/agents/debugger/agent.toml index 41887d239..08df0fc39 100644 --- a/agents/debugger/agent.toml +++ b/agents/debugger/agent.toml @@ -45,7 +45,7 @@ api_key_env = "GROQ_API_KEY" max_llm_tokens_per_hour = 150000 [capabilities] -tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "web_fetch", "memory_store", "memory_recall"] +tools = ["file_read", "file_write", "file_list", "shell_exec", "web_search", "list_searxng_categories", "web_fetch", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*", "shared.*"] diff --git a/agents/hello-world/agent.toml b/agents/hello-world/agent.toml index c6b007f2c..2644be9f7 100644 --- a/agents/hello-world/agent.toml +++ b/agents/hello-world/agent.toml @@ -22,7 +22,7 @@ Keep responses brief (2-4 paragraphs max) unless the user asks for detail.""" max_llm_tokens_per_hour = 100000 [capabilities] -tools = ["file_read", "file_list", "web_fetch", "web_search", "memory_store", "memory_recall"] +tools = ["file_read", "file_list", "web_fetch", "web_search", "list_searxng_categories", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*"] diff --git a/agents/researcher/agent.toml b/agents/researcher/agent.toml index d53afa0e1..52ebfcdea 100644 --- a/agents/researcher/agent.toml +++ b/agents/researcher/agent.toml @@ -44,7 +44,7 @@ api_key_env = "GROQ_API_KEY" max_llm_tokens_per_hour = 150000 [capabilities] -tools = ["web_search", "web_fetch", "file_read", "file_write", "file_list", "memory_store", "memory_recall"] +tools = ["web_search", "list_searxng_categories", "web_fetch", "file_read", "file_write", "file_list", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*", "shared.*"] diff --git a/agents/travel-planner/agent.toml b/agents/travel-planner/agent.toml index 189ed8b9f..f34f428e2 100644 --- a/agents/travel-planner/agent.toml +++ b/agents/travel-planner/agent.toml @@ -59,7 +59,7 @@ max_llm_tokens_per_hour = 150000 max_concurrent_tools = 5 [capabilities] -tools = ["file_read", "file_write", "file_list", "memory_store", "memory_recall", "web_search", "web_fetch", "browser_navigate", "browser_click", "browser_type", "browser_read_page", "browser_screenshot", "browser_close"] +tools = ["file_read", "file_write", "file_list", "memory_store", "memory_recall", "web_search", "list_searxng_categories", "web_fetch", "browser_navigate", "browser_click", "browser_type", "browser_read_page", "browser_screenshot", "browser_close"] network = ["*"] memory_read = ["*"] memory_write = ["self.*", "shared.*"] diff --git a/agents/writer/agent.toml b/agents/writer/agent.toml index 5c5ada560..99582e542 100644 --- a/agents/writer/agent.toml +++ b/agents/writer/agent.toml @@ -38,7 +38,7 @@ api_key_env = "GEMINI_API_KEY" max_llm_tokens_per_hour = 100000 [capabilities] -tools = ["file_read", "file_write", "file_list", "web_search", "web_fetch", "memory_store", "memory_recall"] +tools = ["file_read", "file_write", "file_list", "web_search", "list_searxng_categories", "web_fetch", "memory_store", "memory_recall"] network = ["*"] memory_read = ["*"] memory_write = ["self.*"] diff --git a/crates/openfang-kernel/src/kernel.rs b/crates/openfang-kernel/src/kernel.rs index d9fe60f97..baabb5d15 100644 --- a/crates/openfang-kernel/src/kernel.rs +++ b/crates/openfang-kernel/src/kernel.rs @@ -881,12 +881,12 @@ impl OpenFangKernel { // Auto-detect embedding provider by checking API key env vars in // priority order. First match wins. const API_KEY_PROVIDERS: &[(&str, &str)] = &[ - ("OPENAI_API_KEY", "openai"), - ("GROQ_API_KEY", "groq"), - ("MISTRAL_API_KEY", "mistral"), - ("TOGETHER_API_KEY", "together"), + ("OPENAI_API_KEY", "openai"), + ("GROQ_API_KEY", "groq"), + ("MISTRAL_API_KEY", "mistral"), + ("TOGETHER_API_KEY", "together"), ("FIREWORKS_API_KEY", "fireworks"), - ("COHERE_API_KEY", "cohere"), + ("COHERE_API_KEY", "cohere"), ]; let detected_from_key = API_KEY_PROVIDERS @@ -1127,8 +1127,7 @@ impl OpenFangKernel { != entry.manifest.tool_allowlist || disk_manifest.tool_blocklist != entry.manifest.tool_blocklist - || disk_manifest.skills - != entry.manifest.skills + || disk_manifest.skills != entry.manifest.skills || disk_manifest.mcp_servers != entry.manifest.mcp_servers; if changed { diff --git a/crates/openfang-runtime/src/tool_runner.rs b/crates/openfang-runtime/src/tool_runner.rs index b2913ff1d..8e5afb965 100644 --- a/crates/openfang-runtime/src/tool_runner.rs +++ b/crates/openfang-runtime/src/tool_runner.rs @@ -235,6 +235,18 @@ pub async fn execute_tool( tool_web_search_legacy(input).await } } + "list_searxng_categories" => { + if let Some(ctx) = web_ctx { + match ctx.search.list_searxng_categories().await { + Ok(categories) => { + Ok(serde_json::to_string(&categories).unwrap_or_else(|_| "[]".to_string())) + } + Err(e) => Err(e), + } + } else { + Err("SearXNG is not configured".to_string()) + } + } // Shell tool — metacharacter check + exec policy + taint check "shell_exec" => { @@ -620,7 +632,7 @@ pub fn builtin_tool_definitions() -> Vec { }, ToolDefinition { name: "web_search".to_string(), - description: "Search the web using multiple providers (Tavily, Brave, Perplexity, DuckDuckGo) with automatic fallback. Returns structured results with titles, URLs, and snippets.".to_string(), + description: "Search the web using multiple providers (Tavily, Brave, Perplexity, Searxng, DuckDuckGo) with automatic fallback. Returns structured results with titles, URLs, and snippets.".to_string(), input_schema: serde_json::json!({ "type": "object", "properties": { @@ -630,6 +642,15 @@ pub fn builtin_tool_definitions() -> Vec { "required": ["query"] }), }, + ToolDefinition { + name: "list_searxng_categories".to_string(), + description: "List available search categories from the SearXNG instance. Returns the list of categories the instance supports (e.g., 'general', 'images', 'news', 'videos'). Only works when SearXNG is configured as the search provider.".to_string(), + input_schema: serde_json::json!({ + "type": "object", + "properties": {}, + "required": [] + }), + }, // --- Shell tool --- ToolDefinition { name: "shell_exec".to_string(), diff --git a/crates/openfang-runtime/src/web_fetch.rs b/crates/openfang-runtime/src/web_fetch.rs index 81021aefc..85e70b79a 100644 --- a/crates/openfang-runtime/src/web_fetch.rs +++ b/crates/openfang-runtime/src/web_fetch.rs @@ -506,7 +506,11 @@ mod tests { assert!(check_ssrf("http://169.254.169.254/latest/meta-data/", &allow).is_err()); // Also verify hostname-based metadata blocks let allow2 = vec!["metadata.google.internal".to_string()]; - assert!(check_ssrf("http://metadata.google.internal/computeMetadata/v1/", &allow2).is_err()); + assert!(check_ssrf( + "http://metadata.google.internal/computeMetadata/v1/", + &allow2 + ) + .is_err()); } #[test] diff --git a/crates/openfang-runtime/src/web_search.rs b/crates/openfang-runtime/src/web_search.rs index 28e92259e..738f2fb09 100644 --- a/crates/openfang-runtime/src/web_search.rs +++ b/crates/openfang-runtime/src/web_search.rs @@ -1,7 +1,7 @@ //! Multi-provider web search engine with auto-fallback. //! -//! Supports 4 providers: Tavily (AI-agent-native), Brave, Perplexity, and -//! DuckDuckGo (zero-config fallback). Auto mode cascades through available +//! Supports 5 providers: Tavily (AI-agent-native), Brave, Perplexity, +//! Searxng (self-hosted), and DuckDuckGo (zero-config fallback). Auto mode cascades through available //! providers based on configured API keys. //! //! All API keys use `Zeroizing` via `resolve_api_key()` to auto-wipe @@ -55,7 +55,7 @@ impl WebSearchEngine { SearchProvider::Tavily => self.search_tavily(query, max_results).await, SearchProvider::Perplexity => self.search_perplexity(query).await, SearchProvider::DuckDuckGo => self.search_duckduckgo(query, max_results).await, - SearchProvider::Searxng => self.search_searxng(query, max_results, None, 1).await, + SearchProvider::Searxng => self.search_searxng(query, max_results).await, SearchProvider::Auto => self.search_auto(query, max_results).await, }; @@ -100,7 +100,7 @@ impl WebSearchEngine { // Searxng fourth (self-hosted, no API key needed) if !self.config.searxng.url.is_empty() { debug!("Auto: trying Searxng"); - match self.search_searxng(query, max_results, None, 1).await { + match self.search_searxng(query, max_results).await { Ok(result) => return Ok(result), Err(e) => warn!("Searxng failed, falling back: {e}"), } @@ -325,46 +325,25 @@ impl WebSearchEngine { } /// Search via SearXNG self-hosted instance. - async fn search_searxng( - &self, - query: &str, - max_results: usize, - category: Option<&str>, - page: u32, - ) -> Result { + /// + /// Uses the `!category` syntax embedded in the query string (e.g., `!news rust latest`). + /// Without a category prefix, SearXNG defaults to `general` search. + async fn search_searxng(&self, query: &str, max_results: usize) -> Result { if self.config.searxng.url.is_empty() { return Err("SearXNG URL is not configured".to_string()); } - let category = category.unwrap_or("general"); - - // Validate category against SearXNG instance - match self.list_searxng_categories().await { - Ok(cats) => { - if !cats.iter().any(|c| c == category) { - return Err(format!( - "Invalid SearXNG category '{}'. Available: {}", - category, - cats.join(", ") - )); - } - } - Err(e) => warn!("Could not validate SearXNG category: {e}"), - } - let limit = max_results; debug!(query, "Searching via SearXNG"); let resp = self .client - .get(format!("{}/search", self.config.searxng.url.trim_end_matches('/'))) - .query(&[ - ("q", query), - ("format", "json"), - ("categories", category), - ("page", &page.to_string()), - ]) + .get(format!( + "{}/search", + self.config.searxng.url.trim_end_matches('/') + )) + .query(&[("q", query), ("format", "json")]) .header("User-Agent", "Mozilla/5.0 (compatible; OpenFangAgent/0.1)") .send() .await @@ -451,7 +430,10 @@ impl WebSearchEngine { let resp = self .client - .get(format!("{}/config", self.config.searxng.url.trim_end_matches('/'))) + .get(format!( + "{}/config", + self.config.searxng.url.trim_end_matches('/') + )) .header("User-Agent", "Mozilla/5.0 (compatible; OpenFangAgent/0.1)") .send() .await diff --git a/crates/openfang-skills/bundled/searxng/SKILL.md b/crates/openfang-skills/bundled/searxng/SKILL.md deleted file mode 100644 index 0f5d06026..000000000 --- a/crates/openfang-skills/bundled/searxng/SKILL.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -name: searxng -description: Privacy-respecting metasearch specialist using SearXNG instances ---- -# SearXNG Search Specialist - -You are a privacy-respecting web search specialist using SearXNG, a self-hosted metasearch engine that aggregates results from multiple search engines without tracking. - -## Key Principles - -- Prefer SearXNG for privacy-sensitive searches — no API keys, no tracking, no user profiling. -- Always cite sources with URLs so the user can verify information. -- Prefer primary sources (official docs, research papers) over secondary ones (blog posts, forums). -- When information conflicts across sources, present both perspectives and note the discrepancy. -- State the date of information when recency matters. - -## SearXNG Capabilities - -SearXNG supports 30+ search categories. Use the right category for the task: - -| Category | Use Case | -|----------|----------| -| `general` | Default web search | -| `images` | Image search | -| `news` | News articles | -| `videos` | Video results | -| `music` | Music and audio | -| `files` | File search | -| `it` | IT and programming | -| `science` | Scientific content | -| `books` | Book search | -| `maps` | Map and location | -| `q&a` | Q&A sites (Stack Overflow, etc.) | -| `social media` | Social media posts | -| `wikimedia` | Wikipedia and Wikimedia | -| `dictionaries` | Dictionary definitions | -| `currency` | Currency conversion | -| `weather` | Weather information | -| `translate` | Translation results | - -## Search Techniques - -- **Category selection**: Always specify a category when the topic is clear. Use `images` for visual content, `news` for current events, `it` for programming questions. -- **Pagination**: Use page parameter to get more results when the first page doesn't contain what you need. -- **Engine syntax**: SearXNG supports `!engine` syntax to target specific engines (e.g., `!wikipedia rust programming`). -- **Site search**: Use `site:example.com` in queries to search within a specific domain. -- **Exact phrases**: Use quotes for exact phrase matching (e.g., `"rust borrow checker"`). -- **Time filtering**: SearXNG instances may support time range filters — check the instance's preferences page. - -## Query Formulation - -- Start with specific, targeted queries. Use exact phrases for precise matches. -- Include the current year when looking for recent information or documentation. -- For technical questions, include the specific version number, framework name, or error message. -- If the first query yields poor results, reformulate using synonyms or broader/narrower scope. - -## Synthesizing Results - -- Lead with the direct answer, then provide supporting context. -- Organize findings by relevance, not by the order you found them. -- Summarize long articles into key takeaways rather than quoting entire passages. -- When comparing options, use structured comparisons with pros and cons. -- Flag information that may be outdated or from unreliable sources. - -## Pitfalls to Avoid - -- Never present information from a single source as definitive without corroboration. -- Do not include URLs you have not verified — broken links erode trust. -- Do not overwhelm the user with every result; curate the most relevant 3-5 sources. -- Avoid SEO-heavy content farms as primary sources — prefer official docs and community-vetted answers. diff --git a/crates/openfang-skills/bundled/web-search/SKILL.md b/crates/openfang-skills/bundled/web-search/SKILL.md index d632c276f..96916465d 100644 --- a/crates/openfang-skills/bundled/web-search/SKILL.md +++ b/crates/openfang-skills/bundled/web-search/SKILL.md @@ -8,31 +8,44 @@ You are a research specialist. You help users find accurate, up-to-date informat ## Key Principles -- Always cite your sources with URLs so the user can verify the information. -- Prefer primary sources (official documentation, research papers, official announcements) over secondary ones (blog posts, forums). -- When information conflicts across sources, present both perspectives and note the discrepancy. -- Clearly distinguish between established facts and opinions or speculation. -- State the date of information when recency matters (e.g., pricing, API versions, compatibility). +- Cite sources with URLs so users can verify. +- Prefer primary sources (official docs, research papers) over secondary (blogs, forums). +- When sources conflict, present both perspectives and note the discrepancy. +- Distinguish facts from opinions. +- State the date when recency matters (pricing, API versions). ## Search Techniques -- Start with specific, targeted queries. Use exact phrases in quotes for precise matches. -- Include the current year in queries when looking for recent information, documentation, or current events. -- Use site-specific searches (e.g., `site:docs.python.org`) when you know the authoritative source. -- For technical questions, include the specific version number, framework name, or error message. -- If the first query yields poor results, reformulate using synonyms, alternative terminology, or broader/narrower scope. +- Start with specific queries. Use exact phrases in quotes. +- Include the current year for recent info or docs. +- Use site-specific searches (e.g., `site:docs.python.org`) when you know source. +- For technical questions, include version numbers or error messages. +- If results are poor, reformulate using synonyms or broader/narrower scope. + +## SearXNG Search + +When SearXNG is configured, `web_search` uses it automatically. Verify with `list_searxng_categories()`. + +### Search Syntax + +SearXNG supports prefixes: + +- **`!` prefix** — Select engine or category: `!wp paris`, `!images Wau Holland` +- **`:` prefix** — Select language: `:fr !wp Wau Holland` + +Modifiers are chainable. See SearXNG preferences for full list. ## Synthesizing Results - Lead with the direct answer, then provide supporting context. -- Organize findings by relevance, not by the order you found them. -- Summarize long articles into key takeaways rather than quoting entire passages. -- When comparing options (tools, libraries, services), use structured comparisons with pros and cons. -- Flag information that may be outdated or from unreliable sources. +- Organize findings by relevance, not by discovery order. +- Summarize long articles into key takeaways. +- Use pros/cons when comparing options. +- Flag outdated or unreliable information. ## Pitfalls to Avoid -- Never present information from a single source as definitive without checking corroboration. -- Do not include URLs you have not verified — broken links erode trust. -- Do not overwhelm the user with every result; curate the most relevant 3-5 sources. -- Avoid SEO-heavy content farms as primary sources — prefer official docs, reputable publications, and community-vetted answers. +- Never present single-source info as definitive without corroboration. +- Do not include unverified URLs — broken links erode trust. +- Do not overwhelm users; curate the most relevant 3-5 sources. +- Avoid SEO-heavy content farms — prefer official docs and community answers. \ No newline at end of file diff --git a/crates/openfang-skills/src/bundled.rs b/crates/openfang-skills/src/bundled.rs index 203c5a413..3b29f6e68 100644 --- a/crates/openfang-skills/src/bundled.rs +++ b/crates/openfang-skills/src/bundled.rs @@ -13,7 +13,6 @@ pub fn bundled_skills() -> Vec<(&'static str, &'static str)> { ("github", include_str!("../bundled/github/SKILL.md")), ("docker", include_str!("../bundled/docker/SKILL.md")), ("web-search", include_str!("../bundled/web-search/SKILL.md")), - ("searxng", include_str!("../bundled/searxng/SKILL.md")), ( "code-reviewer", include_str!("../bundled/code-reviewer/SKILL.md"), @@ -196,7 +195,7 @@ mod tests { #[test] fn test_bundled_skills_count() { let skills = bundled_skills(); - assert_eq!(skills.len(), 61, "Expected 61 bundled skills"); + assert_eq!(skills.len(), 60, "Expected 60 bundled skills"); } #[test] diff --git a/docs/architecture.md b/docs/architecture.md index 5aff03f65..71b85e932 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -53,7 +53,7 @@ openfang-types Shared types: Agent, Capability, Event, Memory, Message, |-------|-------------| | **openfang-types** | Core type definitions used across all crates. Defines `AgentManifest`, `AgentId`, `Capability`, `Event`, `ToolDefinition`, `KernelConfig`, `OpenFangError`, taint tracking (`TaintLabel`, `TaintSet`), Ed25519 manifest signing, model catalog types (`ModelCatalogEntry`, `ProviderInfo`, `ModelTier`), tool compatibility mappings (21 OpenClaw-to-OpenFang), MCP/A2A config types, and web config types. All config structs use `#[serde(default)]` for forward-compatible TOML parsing. | | **openfang-memory** | SQLite-backed memory substrate (schema v5). Uses `Arc>` with `spawn_blocking` for async bridge. Provides structured KV storage, semantic search with vector embeddings, knowledge graph (entities and relations), session management, task board, usage event persistence (`usage_events` table, `UsageStore`), and canonical sessions for cross-channel memory. Five schema versions: V1 core, V2 collab, V3 embeddings, V4 usage, V5 canonical_sessions. | -| **openfang-runtime** | Agent execution engine. Contains the agent loop (`run_agent_loop`, `run_agent_loop_streaming`), 3 native LLM drivers (Anthropic, Gemini, OpenAI-compatible covering 20 providers), 23 built-in tools, WASM sandbox (Wasmtime with dual fuel+epoch metering), MCP client/server (JSON-RPC 2.0 over stdio/SSE), A2A protocol (AgentCard, task management), web search engine (4 providers: Tavily/Brave/Perplexity/DuckDuckGo), web fetch with SSRF protection, loop guard (SHA256-based tool loop detection), session repair (history validation), LLM session compactor (block-aware), Merkle hash chain audit trail, and embedding driver. Defines the `KernelHandle` trait that enables inter-agent tools without circular crate dependencies. | +| **openfang-runtime** | Agent execution engine. Contains the agent loop (`run_agent_loop`, `run_agent_loop_streaming`), 3 native LLM drivers (Anthropic, Gemini, OpenAI-compatible covering 20 providers), 23 built-in tools, WASM sandbox (Wasmtime with dual fuel+epoch metering), MCP client/server (JSON-RPC 2.0 over stdio/SSE), A2A protocol (AgentCard, task management), web search engine (5 providers: Tavily/Brave/Perplexity/Searxng/DuckDuckGo), web fetch with SSRF protection, loop guard (SHA256-based tool loop detection), session repair (history validation), LLM session compactor (block-aware), Merkle hash chain audit trail, and embedding driver. Defines the `KernelHandle` trait that enables inter-agent tools without circular crate dependencies. | | **openfang-kernel** | The central coordinator. `OpenFangKernel` assembles all subsystems: `AgentRegistry`, `AgentScheduler`, `CapabilityManager`, `EventBus`, `Supervisor`, `WorkflowEngine`, `TriggerEngine`, `BackgroundExecutor`, `WasmSandbox`, `ModelCatalog`, `MeteringEngine`, `ModelRouter`, `AuthManager` (RBAC), `HeartbeatMonitor`, `SetupWizard`, `SkillRegistry`, MCP connections, and `WebToolsContext`. Implements `KernelHandle` for inter-agent operations. Handles agent spawn/kill, message dispatch, workflow execution, trigger evaluation, capability inheritance validation, and graceful shutdown with state persistence. | | **openfang-api** | HTTP API server built on Axum 0.8 with 76 endpoints. Routes for agents, workflows, triggers, memory, channels, templates, models, providers, skills, ClawHub, MCP, health, status, version, and shutdown. WebSocket handler for real-time agent chat with streaming. SSE endpoint for streaming responses. OpenAI-compatible endpoints (`POST /v1/chat/completions`, `GET /v1/models`). A2A endpoints (`/.well-known/agent.json`, `/a2a/*`). Middleware: Bearer token auth, request ID injection, structured request logging, GCRA rate limiter (cost-aware), security headers (CSP, X-Frame-Options, etc.), health endpoint redaction. | | **openfang-channels** | Channel bridge layer with 40 adapters. Each adapter implements the `ChannelAdapter` trait. Includes: Telegram, Discord, Slack, WhatsApp, Signal, Matrix, Email, SMS, Webhook, Teams, Mattermost, IRC, Google Chat, Twitch, Rocket.Chat, Zulip, XMPP, LINE, Viber, Messenger, Reddit, Mastodon, Bluesky, Feishu, Revolt, Nextcloud, Guilded, Keybase, Threema, Nostr, Webex, Pumble, Flock, Twist, Mumble, DingTalk, Discourse, Gitter, Ntfy, Gotify, LinkedIn. Features: `AgentRouter` for message routing, `BridgeManager` for lifecycle coordination, `ChannelRateLimiter` (per-user DashMap tracking), `formatter.rs` (Markdown to TelegramHTML/SlackMrkdwn/PlainText), `ChannelOverrides` (model/system_prompt/dm_policy/group_policy/rate_limit/threading/output_format), DM/group policy enforcement. | @@ -124,7 +124,7 @@ When `OpenFangKernel::boot_with_config()` is called (either by the daemon or in- - Inject PromptOnly skill context into system prompts 11. Initialize web tools context - - Create WebSearchEngine (4-provider cascading: Tavily->Brave->Perplexity->DDG) + - Create WebSearchEngine (5-provider cascading: Tavily->Brave->Perplexity->Searxng->DDG) - Create WebFetchEngine (SSRF-protected) - Bundle as WebToolsContext @@ -868,7 +868,7 @@ The desktop app (`openfang-desktop`) wraps the full OpenFang stack in a native T | | A2A Proto | | | +------------+ | | +------------+ | -| | Web Search | | 4 engines: Tavily/Brave/Perplexity/DDG +| | Web Search | | 5 engines: Tavily/Brave/Perplexity/Searxng/DDG | | Web Fetch | | SSRF protection + TTL cache | +------------+ | | +------------+ |