Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions crates/openfang-kernel/src/kernel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -851,6 +851,13 @@ impl OpenFangKernel {
Arc<dyn openfang_runtime::embedding::EmbeddingDriver + Send + Sync>,
> = {
use openfang_runtime::embedding::create_embedding_driver;
if !config.memory.enabled {
info!("Memory disabled in config; runtime recall/remember and embeddings are off");
None
} else if !config.memory.embeddings_enabled {
info!("Memory embeddings disabled in config; using text-only memory path");
None
} else {
let configured_model = &config.memory.embedding_model;
if let Some(ref provider) = config.memory.embedding_provider {
// Explicit config takes priority — use the configured embedding model.
Expand Down Expand Up @@ -913,6 +920,7 @@ impl OpenFangKernel {
}
}
}
}
};

let browser_ctx = openfang_runtime::browser::BrowserManager::new(config.browser.clone());
Expand Down Expand Up @@ -1876,6 +1884,16 @@ impl OpenFangKernel {
};
manifest.model.system_prompt =
openfang_runtime::prompt_builder::build_system_prompt(&prompt_ctx);
manifest.metadata.insert(
"runtime_memory_enabled".to_string(),
serde_json::Value::Bool(self.config.memory.enabled),
);
manifest.metadata.insert(
"runtime_embeddings_enabled".to_string(),
serde_json::Value::Bool(
self.config.memory.enabled && self.config.memory.embeddings_enabled,
),
);
// Store canonical context separately for injection as user message
// (keeps system prompt stable across turns for provider prompt caching)
if let Some(cc_msg) =
Expand Down Expand Up @@ -2420,6 +2438,16 @@ impl OpenFangKernel {
};
manifest.model.system_prompt =
openfang_runtime::prompt_builder::build_system_prompt(&prompt_ctx);
manifest.metadata.insert(
"runtime_memory_enabled".to_string(),
serde_json::Value::Bool(self.config.memory.enabled),
);
manifest.metadata.insert(
"runtime_embeddings_enabled".to_string(),
serde_json::Value::Bool(
self.config.memory.enabled && self.config.memory.embeddings_enabled,
),
);
// Store canonical context separately for injection as user message
// (keeps system prompt stable across turns for provider prompt caching)
if let Some(cc_msg) =
Expand Down
203 changes: 120 additions & 83 deletions crates/openfang-runtime/src/agent_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,26 @@ pub async fn run_agent_loop(
.get("hand_allowed_env")
.and_then(|v| serde_json::from_value(v.clone()).ok())
.unwrap_or_default();
let memory_enabled = manifest
.metadata
.get("runtime_memory_enabled")
.and_then(|v| v.as_bool())
.unwrap_or(true);
let embeddings_enabled = manifest
.metadata
.get("runtime_embeddings_enabled")
.and_then(|v| v.as_bool())
.unwrap_or(true);
let embedding_driver = if memory_enabled && embeddings_enabled {
embedding_driver
} else {
None
};

// Recall relevant memories — prefer vector similarity search when embedding driver is available
let memories = if let Some(emb) = embedding_driver {
let memories = if !memory_enabled {
Vec::new()
} else if let Some(emb) = embedding_driver {
match emb.embed_one(user_message).await {
Ok(query_vec) => {
debug!("Using vector recall (dims={})", query_vec.len());
Expand Down Expand Up @@ -524,48 +541,50 @@ pub async fn run_agent_loop(
.await
.map_err(|e| OpenFangError::Memory(e.to_string()))?;

// Remember this interaction (with embedding if available)
let interaction_text = format!(
"User asked: {}\nI responded: {}",
user_message, final_response
);
if let Some(emb) = embedding_driver {
match emb.embed_one(&interaction_text).await {
Ok(vec) => {
let _ = memory
.remember_with_embedding_async(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
Some(&vec),
)
.await;
}
Err(e) => {
warn!("Embedding for remember failed: {e}");
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
if memory_enabled {
// Remember this interaction (with embedding if available)
let interaction_text = format!(
"User asked: {}\nI responded: {}",
user_message, final_response
);
if let Some(emb) = embedding_driver {
match emb.embed_one(&interaction_text).await {
Ok(vec) => {
let _ = memory
.remember_with_embedding_async(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
Some(&vec),
)
.await;
}
Err(e) => {
warn!("Embedding for remember failed: {e}");
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}
}
} else {
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}
} else {
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}

// Notify phase: Done
Expand Down Expand Up @@ -1180,9 +1199,25 @@ pub async fn run_agent_loop_streaming(
.get("hand_allowed_env")
.and_then(|v| serde_json::from_value(v.clone()).ok())
.unwrap_or_default();

let memory_enabled = manifest
.metadata
.get("runtime_memory_enabled")
.and_then(|v| v.as_bool())
.unwrap_or(true);
let embeddings_enabled = manifest
.metadata
.get("runtime_embeddings_enabled")
.and_then(|v| v.as_bool())
.unwrap_or(true);
let embedding_driver = if memory_enabled && embeddings_enabled {
embedding_driver
} else {
None
};
// Recall relevant memories — prefer vector similarity search when embedding driver is available
let memories = if let Some(emb) = embedding_driver {
let memories = if !memory_enabled {
Vec::new()
} else if let Some(emb) = embedding_driver {
match emb.embed_one(user_message).await {
Ok(query_vec) => {
debug!("Using vector recall (streaming, dims={})", query_vec.len());
Expand Down Expand Up @@ -1527,48 +1562,50 @@ pub async fn run_agent_loop_streaming(
.await
.map_err(|e| OpenFangError::Memory(e.to_string()))?;

// Remember this interaction (with embedding if available)
let interaction_text = format!(
"User asked: {}\nI responded: {}",
user_message, final_response
);
if let Some(emb) = embedding_driver {
match emb.embed_one(&interaction_text).await {
Ok(vec) => {
let _ = memory
.remember_with_embedding_async(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
Some(&vec),
)
.await;
}
Err(e) => {
warn!("Embedding for remember failed (streaming): {e}");
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
if memory_enabled {
// Remember this interaction (with embedding if available)
let interaction_text = format!(
"User asked: {}\nI responded: {}",
user_message, final_response
);
if let Some(emb) = embedding_driver {
match emb.embed_one(&interaction_text).await {
Ok(vec) => {
let _ = memory
.remember_with_embedding_async(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
Some(&vec),
)
.await;
}
Err(e) => {
warn!("Embedding for remember failed (streaming): {e}");
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}
}
} else {
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}
} else {
let _ = memory
.remember(
session.agent_id,
&interaction_text,
MemorySource::Conversation,
"episodic",
HashMap::new(),
)
.await;
}

// Notify phase: Done
Expand Down
16 changes: 16 additions & 0 deletions crates/openfang-types/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1471,6 +1471,9 @@ impl Default for DefaultModelConfig {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)]
pub struct MemoryConfig {
/// Whether runtime memory recall/remember is enabled at all.
#[serde(default = "default_memory_enabled")]
pub enabled: bool,
/// Path to SQLite database file.
pub sqlite_path: Option<PathBuf>,
/// Embedding model for semantic search.
Expand All @@ -1485,24 +1488,37 @@ pub struct MemoryConfig {
/// Environment variable name for the embedding API key.
#[serde(default)]
pub embedding_api_key_env: Option<String>,
/// Whether embedding-backed semantic recall is enabled.
#[serde(default = "default_memory_embeddings_enabled")]
pub embeddings_enabled: bool,
/// How often to run memory consolidation (hours). 0 = disabled.
#[serde(default = "default_consolidation_interval")]
pub consolidation_interval_hours: u64,
}

fn default_memory_enabled() -> bool {
true
}

fn default_memory_embeddings_enabled() -> bool {
true
}

fn default_consolidation_interval() -> u64 {
24
}

impl Default for MemoryConfig {
fn default() -> Self {
Self {
enabled: default_memory_enabled(),
sqlite_path: None,
embedding_model: "all-MiniLM-L6-v2".to_string(),
consolidation_threshold: 10_000,
decay_rate: 0.1,
embedding_provider: None,
embedding_api_key_env: None,
embeddings_enabled: default_memory_embeddings_enabled(),
consolidation_interval_hours: default_consolidation_interval(),
}
}
Expand Down
Loading