Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ PORT=8084
# AGENT_KEY_n8n=your-key-here
# AGENT_KEY_morpheus=your-key-here

# Optional compatibility toggle: allow ?key=... query auth for browser-only tools (e.g. graph HTML pages)
# Strongly recommended to keep disabled in production.
# ALLOW_QUERY_API_KEY=false

# --- Rate Limiting ---
# RATE_LIMIT_WRITES=60 # Max write requests per minute per key (default: 60)
# RATE_LIMIT_READS=120 # Max read requests per minute per key (default: 120)
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -629,6 +629,7 @@ All configuration is via environment variables. Copy `.env.example` to `.env` an
| `QDRANT_API_KEY` | — | Qdrant API key |
| `PORT` | `8084` | API server port |
| `HOST` | `127.0.0.1` | Bind address. Set to `0.0.0.0` for LAN/Docker access. |
| `ALLOW_QUERY_API_KEY` | `false` | Compatibility flag to accept `?key=...` query auth (not recommended; prefer `x-api-key`/Bearer headers). |

### Embedding Provider

Expand Down
28 changes: 27 additions & 1 deletion api/src/middleware/auth.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import crypto from 'crypto';

const ADMIN_KEY = process.env.BRAIN_API_KEY;
const ALLOW_QUERY_API_KEY = process.env.ALLOW_QUERY_API_KEY === 'true';

// Build agent registry from env vars: AGENT_KEY_<name>=<key>
// e.g. AGENT_KEY_claude_code=abc123 → { key: 'abc123', agent: 'claude-code' }
Expand Down Expand Up @@ -52,29 +53,54 @@ function safeEqual(a, b) {
return crypto.timingSafeEqual(Buffer.from(a), Buffer.from(b));
}

function extractKey(req) {
const headerKey = req.headers['x-api-key'];
if (headerKey) return { key: headerKey, source: 'header' };

const authHeader = req.headers.authorization || req.headers.Authorization;
if (typeof authHeader === 'string' && authHeader.startsWith('Bearer ')) {
const bearerToken = authHeader.slice('Bearer '.length).trim();
if (bearerToken) return { key: bearerToken, source: 'bearer' };
Comment on lines +61 to +63
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Parse Bearer auth scheme case-insensitively

Bearer scheme matching is currently case-sensitive (startsWith('Bearer ')), but HTTP auth schemes are case-insensitive, so valid headers like authorization: bearer <token> are rejected with 401. Since this commit introduces Bearer-token support, this causes legitimate clients/proxies using lowercase schemes to fail authentication unexpectedly.

Useful? React with 👍 / 👎.

}

if (ALLOW_QUERY_API_KEY && req.query?.key) {
return { key: req.query.key, source: 'query' };
}

return { key: null, source: null };
}

export function authMiddleware(req, res, next) {
const ip = req.ip || req.socket.remoteAddress;

if (isRateLimited(ip)) {
return res.status(429).json({ error: 'Too many failed attempts. Try again later.' });
}

const key = req.headers['x-api-key'] || req.query.key;
const { key, source } = extractKey(req);
if (!key) {
recordFailure(ip);
const queryKeyProvided = req.query?.key && !ALLOW_QUERY_API_KEY;
if (queryKeyProvided) {
return res.status(401).json({ error: 'Query-string API keys are disabled. Use x-api-key header.' });
}
return res.status(401).json({ error: 'Missing API key' });
}

// Check agent-specific keys first (binds identity)
const agentName = agentRegistry.get(key);
if (agentName) {
req.authenticatedAgent = agentName;
req.authSource = source;
req.rateLimitKey = key;
return next();
}

// Fall back to admin key (no agent binding — full access)
if (safeEqual(key, ADMIN_KEY)) {
req.authenticatedAgent = null; // admin — no agent identity enforced
req.authSource = source;
req.rateLimitKey = key;
return next();
}

Expand Down
2 changes: 1 addition & 1 deletion api/src/middleware/ratelimit.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ function classifyRequest(method, path) {
}

export function rateLimitMiddleware(req, res, next) {
const apiKey = req.headers['x-api-key'] || 'unknown';
const apiKey = req.rateLimitKey || req.headers['x-api-key'] || req.headers.authorization || req.query?.key || 'unknown';
const type = classifyRequest(req.method, req.path);
const { limited, retryAfter } = checkLimit(apiKey, type);

Expand Down
64 changes: 31 additions & 33 deletions api/src/routes/export.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import crypto from 'crypto';
import { scrollPoints, upsertPoint, findByPayload } from '../services/qdrant.js';
import { embed } from '../services/embedders/interface.js';
import { isStoreAvailable, createEvent, upsertFact, upsertStatus } from '../services/stores/interface.js';
import { buildDedupExtraFilter, normalizeImportRecord } from '../services/memory-write-utils.js';

export const exportRouter = Router();

Expand Down Expand Up @@ -96,49 +97,46 @@ exportRouter.post('/import', async (req, res) => {
// Process each record in the batch sequentially
for (const record of batch) {
try {
const content = record.content || record.text || '';
if (!content) {
const { normalized, contentHash, error } = normalizeImportRecord(record);
if (error) {
errors++;
continue;
}

// Compute content hash (SHA-256, first 16 hex chars — matches memory.js pattern)
const contentHash = crypto.createHash('sha256').update(content).digest('hex').slice(0, 16);

// Check for existing memory with same content hash
const existing = await findByPayload('content_hash', contentHash);
// Check for existing memory with same hash in the same tenant/type scope
const existing = await findByPayload('content_hash', contentHash, buildDedupExtraFilter(normalized.client_id, normalized.type));
if (existing.length > 0) {
skipped++;
continue;
}

// Embed and generate ID
const vector = await embed(content);
const pointId = record.id || crypto.randomUUID();
const vector = await embed(normalized.content, 'store');
const pointId = normalized.id || crypto.randomUUID();
const now = new Date().toISOString();

// Build full payload
const payload = {
text: content,
type: record.type || 'event',
key: record.key || null,
subject: record.subject || null,
client_id: record.client_id || 'global',
knowledge_category: record.knowledge_category || null,
category: record.category || 'episodic',
source_agent: record.source_agent || 'import',
importance: record.importance || 'medium',
confidence: record.confidence !== undefined ? record.confidence : 1.0,
access_count: record.access_count || 0,
active: record.active !== undefined ? record.active : true,
superseded_by: record.superseded_by || null,
entities: record.entities || [],
text: normalized.content,
type: normalized.type,
key: normalized.key || null,
subject: normalized.subject || null,
client_id: normalized.client_id,
knowledge_category: normalized.knowledge_category,
category: normalized.category,
source_agent: normalized.source_agent,
importance: normalized.importance,
confidence: normalized.confidence !== undefined ? normalized.confidence : 1.0,
access_count: normalized.access_count || 0,
active: normalized.active !== undefined ? normalized.active : true,
superseded_by: normalized.superseded_by || null,
entities: normalized.entities || [],
content_hash: contentHash,
created_at: record.created_at || now,
last_accessed_at: record.last_accessed_at || now,
observed_by: record.observed_by || [record.source_agent || 'import'],
observation_count: record.observation_count || 1,
consolidated: record.consolidated || false,
created_at: normalized.created_at || now,
last_accessed_at: normalized.last_accessed_at || now,
observed_by: normalized.observed_by || [normalized.source_agent],
observation_count: normalized.observation_count || 1,
consolidated: normalized.consolidated || false,
};

// Upsert to Qdrant
Expand All @@ -148,7 +146,7 @@ exportRouter.post('/import', async (req, res) => {
if (isStoreAvailable()) {
try {
const storeData = {
content,
content: normalized.content,
source_agent: payload.source_agent,
client_id: payload.client_id,
category: payload.category,
Expand All @@ -162,12 +160,12 @@ exportRouter.post('/import', async (req, res) => {
storeData.type = type;
await createEvent(storeData);
} else if (type === 'fact') {
storeData.key = record.key || contentHash;
storeData.value = content;
storeData.key = normalized.key || contentHash;
storeData.value = normalized.content;
await upsertFact(storeData);
} else if (type === 'status') {
storeData.subject = record.subject || 'unknown';
storeData.status = record.status_value || content;
storeData.subject = normalized.subject || 'unknown';
storeData.status = normalized.status_value || normalized.content;
await upsertStatus(storeData);
}
} catch (storeErr) {
Expand Down
36 changes: 22 additions & 14 deletions api/src/routes/memory.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,17 @@ import {
createEvent, upsertFact, upsertStatus, listEvents, listFacts, listStatuses, isStoreAvailable,
isEntityStoreAvailable, createEntity, findEntity, linkEntityToMemory, createRelationship,
} from '../services/stores/interface.js';
import { scrubCredentials, scrubObject } from '../services/scrub.js';
import { scrubObject } from '../services/scrub.js';
import { extractEntities, linkExtractedEntities } from '../services/entities.js';
import { validateMemoryInput, MAX_OBSERVED_BY } from '../middleware/validate.js';
import { MAX_OBSERVED_BY } from '../middleware/validate.js';
import { dispatchNotification } from '../services/notifications.js';
import { isKeywordSearchAvailable, indexMemory, deactivateMemory, keywordSearch } from '../services/keyword-search.js';
import { isGraphSearchAvailable, graphSearch } from '../services/graph-search.js';
import { reciprocalRankFusion } from '../services/rrf.js';
import { buildDedupExtraFilter, normalizeMemoryRecord } from '../services/memory-write-utils.js';
import { getClientResolver } from '../services/client-resolver.js';

const MULTI_PATH_SEARCH = process.env.MULTI_PATH_SEARCH !== 'false'; // default: true
import { getClientResolver } from '../services/client-resolver.js';

export const memoryRouter = Router();

Expand All @@ -27,12 +28,6 @@ memoryRouter.post('/', async (req, res) => {
try {
let { type, content, source_agent, client_id, category, importance, knowledge_category, metadata } = req.body;

// Validate all input fields
const validationError = validateMemoryInput(req.body);
if (validationError) {
return res.status(400).json({ error: validationError });
}

// Enforce agent identity: if authenticated with an agent key, source_agent must match
if (req.authenticatedAgent && source_agent !== req.authenticatedAgent) {
return res.status(403).json({
Expand All @@ -49,14 +44,27 @@ memoryRouter.post('/', async (req, res) => {
}
}

// Scrub credentials
const cleanContent = scrubCredentials(content);
const { normalized, contentHash, error: normalizationError } = normalizeMemoryRecord({
...req.body,
type,
content,
source_agent,
client_id,
category,
importance,
knowledge_category,
metadata,
});

if (normalizationError) {
return res.status(400).json({ error: normalizationError });
}

// Generate content hash for dedup
const contentHash = crypto.createHash('sha256').update(cleanContent).digest('hex').slice(0, 16);
({ type, content, source_agent, client_id, category, importance, knowledge_category, metadata } = normalized);
const cleanContent = normalized.content;

// --- Deduplication check ---
const duplicates = await findByPayload('content_hash', contentHash, { active: true });
const duplicates = await findByPayload('content_hash', contentHash, buildDedupExtraFilter(client_id, type));
if (duplicates.length > 0) {
const existing = duplicates[0];
const existingObservedBy = existing.payload.observed_by || [existing.payload.source_agent];
Expand Down
57 changes: 57 additions & 0 deletions api/src/services/memory-write-utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import crypto from 'crypto';
import { scrubCredentials } from './scrub.js';
import { validateMemoryInput } from '../middleware/validate.js';

export function buildDedupExtraFilter(clientId, type) {
return {
active: true,
client_id: clientId || 'global',
type,
};
}

export function normalizeMemoryRecord(record = {}, options = {}) {
const {
defaultType,
defaultSourceAgent,
} = options;

const rawContent = record.content || record.text || '';
const normalized = {
...record,
type: record.type || defaultType,
content: scrubCredentials(rawContent),
source_agent: record.source_agent || defaultSourceAgent,
client_id: record.client_id || 'global',
category: record.category || 'episodic',
importance: record.importance || 'medium',
knowledge_category: record.knowledge_category || 'general',
};

const validationError = validateMemoryInput({
type: normalized.type,
content: normalized.content,
source_agent: normalized.source_agent,
importance: normalized.importance,
Comment on lines +31 to +35
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Preserve metadata validation in normalization helper

This new centralized validation path does not pass metadata into validateMemoryInput, so POST /memory no longer enforces the metadata type/size/depth checks that were previously applied to req.body. In practice, malformed or deeply nested metadata now reaches scrubObject(metadata) and can trigger avoidable runtime failures or heavy recursive work instead of returning a deterministic 400 validation error.

Useful? React with 👍 / 👎.

client_id: normalized.client_id,
key: normalized.key,
subject: normalized.subject,
status_value: normalized.status_value,
});

if (validationError) {
return { error: validationError };
}

return {
normalized,
contentHash: crypto.createHash('sha256').update(normalized.content).digest('hex').slice(0, 16),
};
}

export function normalizeImportRecord(record = {}) {
return normalizeMemoryRecord(record, {
defaultType: 'event',
defaultSourceAgent: 'import',
});
}
1 change: 1 addition & 0 deletions api/src/services/stores/postgres.js
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ export class PostgresStore {

if (filters.source_agent) { sql += ` AND source_agent = $${i++}`; params.push(filters.source_agent); }
if (filters.category) { sql += ` AND category = $${i++}`; params.push(filters.category); }
if (filters.client_id) { sql += ` AND client_id = $${i++}`; params.push(filters.client_id); }
if (filters.subject) { sql += ` AND subject ILIKE $${i++}`; params.push(`%${filters.subject}%`); }

sql += ' ORDER BY updated_at DESC LIMIT 50';
Expand Down
19 changes: 1 addition & 18 deletions api/src/services/stores/sqlite.js
Original file line number Diff line number Diff line change
Expand Up @@ -125,24 +125,6 @@ export class SQLiteStore {
console.warn('[sqlite] idx_eml_unique creation failed:', e.message);
}
}

// Entity relationships table
this.db.exec(`
CREATE TABLE IF NOT EXISTS entity_relationships (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source_entity_id INTEGER REFERENCES entities(id),
target_entity_id INTEGER REFERENCES entities(id),
relationship_type TEXT NOT NULL DEFAULT 'co_occurrence',
strength INTEGER DEFAULT 1,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
UNIQUE(source_entity_id, target_entity_id, relationship_type)
);

CREATE INDEX IF NOT EXISTS idx_er_source ON entity_relationships(source_entity_id);
CREATE INDEX IF NOT EXISTS idx_er_target ON entity_relationships(target_entity_id);
`);

// FTS5 virtual table for keyword search (BM25)
try {
this.db.exec(`
Expand Down Expand Up @@ -294,6 +276,7 @@ export class SQLiteStore {

if (filters.source_agent) { sql += ' AND source_agent = @source_agent'; params.source_agent = filters.source_agent; }
if (filters.category) { sql += ' AND category = @category'; params.category = filters.category; }
if (filters.client_id) { sql += ' AND client_id = @client_id'; params.client_id = filters.client_id; }
if (filters.subject) { sql += ' AND subject LIKE @subject'; params.subject = `%${filters.subject}%`; }

sql += ' ORDER BY updated_at DESC LIMIT 50';
Expand Down
Loading
Loading