Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/workflows/deploy-api-production.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,22 @@ jobs:
echo "API healthy (${IMAGE_TAG})"
REMOTE

- name: Run database migration
env:
SSH_HOST: ${{ secrets.PRODUCTION_DEPLOY_HOST }}
SSH_USER: ${{ secrets.PRODUCTION_DEPLOY_USER }}
APP_DIR: ${{ secrets.PRODUCTION_APP_DIR || '/opt/buywhere' }}
run: |
ssh -i ~/.ssh/id_ed25519 "${SSH_USER}@${SSH_HOST}" \
env APP_DIR="${APP_DIR}" \
bash -s <<'REMOTE'
set -euo pipefail
echo "Running database migration..."
cd "${APP_DIR}"
docker compose exec -T api node dist/migrate.js
echo "Migration complete."
REMOTE

- name: Smoke test agent-readiness headers
run: |
sleep 3
Expand Down
101 changes: 101 additions & 0 deletions .github/workflows/inject-posthog-vm.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
name: Inject PostHog Key to Production VM

on:
workflow_dispatch:

permissions:
contents: read

jobs:
inject-posthog-vm:
name: Inject POSTHOG_API_KEY to Production VM
runs-on: ubuntu-latest
environment: production
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up SSH agent
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.PRODUCTION_DEPLOY_SSH_KEY }}

- name: Trust production host
run: |
mkdir -p ~/.ssh
ssh-keyscan -p "${{ secrets.PRODUCTION_DEPLOY_PORT || 22 }}" -H "${{ secrets.PRODUCTION_DEPLOY_HOST }}" >> ~/.ssh/known_hosts

- name: Detect service and inject POSTHOG_API_KEY
env:
DEPLOY_HOST: ${{ secrets.PRODUCTION_DEPLOY_HOST }}
DEPLOY_PORT: ${{ secrets.PRODUCTION_DEPLOY_PORT || 22 }}
DEPLOY_USER: ${{ secrets.PRODUCTION_DEPLOY_USER }}
POSTHOG_KEY: ${{ secrets.POSTHOG_API_KEY_PRODUCTION }}
run: |
ssh -p "$DEPLOY_PORT" "$DEPLOY_USER@$DEPLOY_HOST" << 'EOF'
set -euo pipefail

echo "=== Detecting service management ==="

POSTHOG_KEY="${POSTHOG_KEY}"

# Check for systemd service
if systemctl list-unit-files 2>/dev/null | grep -qiE 'buywhere|fastapi|uvicorn'; then
echo "Detected: systemd-managed"
SVC=$(systemctl list-units --type=service --all 2>/dev/null | grep -iE 'buywhere|fastapi|uvicorn' | awk '{print $1}' | head -1)
SVC="${SVC:-buywhere-api}"

# Update env file
ENV_FILE="/etc/systemd/system/${SVC}.d/override.conf"
mkdir -p "$(dirname $ENV_FILE)"
if ! grep -q "POSTHOG_API_KEY=" "$ENV_FILE" 2>/dev/null; then
echo "POSTHOG_API_KEY=$POSTHOG_KEY" >> "$ENV_FILE"
fi
systemctl daemon-reload
systemctl restart "$SVC" || true
echo "Done: systemd updated and service restarted"

# Check for PM2
elif command -v pm2 &>/dev/null && pm2 list 2>/dev/null | grep -qiE 'buywhere|api|fastapi'; then
echo "Detected: PM2-managed"
PM2_NAME=$(pm2 list 2>/dev/null | grep -iE 'buywhere|api|fastapi' | awk '{print $2}' | head -1)
if [ -n "$PM2_NAME" ]; then
export POSTHOG_API_KEY="$POSTHOG_KEY"
pm2 restart "$PM2_NAME" || true
echo "Done: PM2 process restarted"
fi

# Check for Docker
elif command -v docker &>/dev/null && docker ps 2>/dev/null | grep -qiE 'buywhere|api'; then
echo "Detected: Docker-managed"
CONTAINER=$(docker ps 2>/dev/null | grep -iE 'buywhere|api' | awk '{print $1}' | head -1)
if [ -n "$CONTAINER" ]; then
docker exec "$CONTAINER" env POSTHOG_API_KEY="$POSTHOG_KEY" sh -c 'echo "POSTHOG_API_KEY set"' 2>/dev/null || true
docker restart "$CONTAINER" 2>/dev/null || true
echo "Done: Docker container restarted"
fi

# Check for raw process
elif pgrep -f "uvicorn\|gunicorn" &>/dev/null; then
echo "Detected: Raw process (uvicorn/gunicorn)"
# Add to /etc/environment as fallback
if ! grep -q "POSTHOG_API_KEY=" /etc/environment 2>/dev/null; then
echo "POSTHOG_API_KEY=$POSTHOG_KEY" >> /etc/environment
fi
export POSTHOG_API_KEY="$POSTHOG_KEY"
PID=$(pgrep -f "uvicorn\|gunicorn" | head -1)
echo "Process PID: $PID — killed for restart"
kill -TERM "$PID" 2>/dev/null || true
sleep 2
# Restart hint
echo "WARNING: Manual restart required for raw process"
else
echo "No known service found — adding to /etc/environment"
if ! grep -q "POSTHOG_API_KEY=" /etc/environment 2>/dev/null; then
echo "POSTHOG_API_KEY=$POSTHOG_KEY" >> /etc/environment
fi
echo "Added to /etc/environment — verify service picks it up"
fi

echo "=== PostHog key injection complete ==="
EOF
32 changes: 32 additions & 0 deletions api/dist/analytics/posthog.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.trackApiQuery = trackApiQuery;
exports.trackAffiliateClick = trackAffiliateClick;
exports.trackRegistration = trackRegistration;
exports.trackProductSearch = trackProductSearch;
exports.trackProductView = trackProductView;
exports.trackComparePageView = trackComparePageView;
exports.trackCompareRetailerClick = trackCompareRetailerClick;
exports.shutdownPostHog = shutdownPostHog;
Expand Down Expand Up @@ -77,6 +79,36 @@ function trackRegistration(apiKey, agentName, signupChannel, utmSource) {
},
});
}
function trackProductSearch(event) {
const ph = getClient();
if (!ph)
return;
ph.capture({
distinctId: event.apiKey,
event: 'product_search',
properties: {
query_text: event.queryText,
result_count: event.resultCount,
response_time_ms: event.responseTimeMs,
source_page: event.sourcePage,
},
});
}
function trackProductView(event) {
const ph = getClient();
if (!ph)
return;
ph.capture({
distinctId: event.apiKey || 'anonymous',
event: 'product_view',
properties: {
product_id: event.productId,
retailer: event.retailer,
category: event.category,
source: event.source,
},
});
}
function trackComparePageView(event) {
const ph = getClient();
if (!ph)
Expand Down
3 changes: 3 additions & 0 deletions api/dist/mcp-server.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ app.get('/health', async (_req, res) => {
res.status(500).json({ status: 'error', error: String(err) });
}
});
app.get('/healthz', (_req, res) => {
res.json({ status: 'ok', server: 'mcp' });
});
app.use('/mcp', mcp_1.default);
// JSON-RPC root alias — allow POST / as shorthand for POST /mcp
app.use('/', mcp_1.default);
Expand Down
113 changes: 95 additions & 18 deletions api/dist/middleware/apiKey.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ exports.requireApiKey = requireApiKey;
exports.checkRateLimit = checkRateLimit;
const crypto_1 = require("crypto");
const config_1 = require("../config");
const errors_1 = require("./errors");
const PAPERCLIP_API_URL = process.env.PAPERCLIP_API_URL || 'https://api.paperclip.ai';
const TIER_LIMITS = {
free: config_1.FREE_TIER,
pro: { rpm: 300, daily: 10000 },
Expand All @@ -13,6 +15,65 @@ const TIER_LIMITS = {
function hashKey(rawKey) {
return (0, crypto_1.createHash)('sha256').update(rawKey).digest('hex');
}
function base64UrlDecode(s) {
const base64 = s.replace(/-/g, '+').replace(/_/g, '/');
return Buffer.from(base64, 'base64').toString('utf8');
}
function isPaperclipJwtPayload(payload) {
return payload.iss === 'paperclip' && payload.aud === 'paperclip-api';
}
async function verifyPaperclipTokenWithApi(token) {
try {
const resp = await fetch(`${PAPERCLIP_API_URL}/api/agents/me`, {
headers: { Authorization: `Bearer ${token}` },
signal: AbortSignal.timeout(10000),
});
if (resp.status === 200) {
const data = await resp.json();
if (data.id)
return data;
}
return null;
}
catch {
return null;
}
}
async function resolvePaperclipAgentKey(agentId) {
const result = await config_1.db.query(`SELECT id, key_hash, name, tier, signup_channel, attribution_source
FROM api_keys
WHERE signup_channel = 'paperclip_agent'
AND name = $1
AND is_active = true`, [agentId]);
if (result.rows.length > 0) {
const row = result.rows[0];
config_1.db.query('UPDATE api_keys SET last_used_at = NOW() WHERE key_hash = $1', [row.key_hash]).catch(() => { });
return row;
}
return null;
}
async function upsertPaperclipAgentKey(agentId, agentName, companyId) {
const existing = await resolvePaperclipAgentKey(agentId);
if (existing)
return existing;
const keyHash = hashKey(agentId);
const result = await config_1.db.query(`INSERT INTO api_keys (key_hash, name, tier, signup_channel, developer_id, rpm_limit, daily_limit)
VALUES ($1, $2, 'enterprise', 'paperclip_agent', $3, 1000, 100000)
ON CONFLICT (key_hash) DO UPDATE SET last_used_at = NOW()
RETURNING id, key_hash, name, tier, signup_channel, attribution_source`, [keyHash, agentName, companyId || null]);
return result.rows[0];
}
function decodeJwtPayload(token) {
const parts = token.split('.');
if (parts.length !== 3)
return null;
try {
return JSON.parse(base64UrlDecode(parts[1]));
}
catch {
return null;
}
}
async function requireApiKey(req, res, next) {
const authHeader = req.headers['authorization'] || '';
const queryKey = req.query['api_key'];
Expand All @@ -27,17 +88,43 @@ async function requireApiKey(req, res, next) {
key = queryKey;
}
if (!key) {
res.status(401).json({ error: 'API key required. Pass as Authorization: Bearer <key>' });
(0, errors_1.sendError)(res, errors_1.ErrorCode.MISSING_API_KEY);
return;
}
// Detect Paperclip JWT — decode payload without signature verification
const jwtPayload = decodeJwtPayload(key);
if (jwtPayload && isPaperclipJwtPayload(jwtPayload)) {
const agentInfo = await verifyPaperclipTokenWithApi(key);
if (agentInfo) {
const row = await upsertPaperclipAgentKey(agentInfo.id, agentInfo.name, agentInfo.companyId);
req.apiKeyRecord = {
id: row.id,
key,
agentName: row.name,
tier: row.tier,
rpmLimit: TIER_LIMITS.enterprise.rpm,
dailyLimit: TIER_LIMITS.enterprise.daily,
signupChannel: row.signup_channel,
attributionSource: row.attribution_source,
};
next();
return;
}
(0, errors_1.sendError)(res, errors_1.ErrorCode.INVALID_API_KEY, 'Invalid Paperclip token');
return;
}
const keyHash = hashKey(key);
const result = await config_1.db.query(`SELECT id, key_hash, name, tier, signup_channel, attribution_source
FROM api_keys WHERE key_hash = $1 AND is_active = true`, [keyHash]);
const result = await config_1.db.query(`SELECT id, key_hash, name, tier, signup_channel, attribution_source, is_active
FROM api_keys WHERE key_hash = $1`, [keyHash]);
if (result.rows.length === 0) {
res.status(401).json({ error: 'Invalid API key' });
(0, errors_1.sendError)(res, errors_1.ErrorCode.INVALID_API_KEY);
return;
}
const row = result.rows[0];
if (!row.is_active) {
(0, errors_1.sendError)(res, errors_1.ErrorCode.REVOKED_API_KEY);
return;
}
const tierLimits = TIER_LIMITS[row.tier] ?? config_1.FREE_TIER;
req.apiKeyRecord = {
id: row.id,
Expand Down Expand Up @@ -71,34 +158,24 @@ async function checkRateLimit(req, res, next) {
config_1.redis.incr(rpmKey),
config_1.redis.incr(dailyKey),
]);
// Set TTL on first increment
if (rpmCount === 1)
config_1.redis.expire(rpmKey, 120).catch(() => { });
if (dailyCount === 1)
config_1.redis.expire(dailyKey, 172800).catch(() => { });
}
catch (_err) {
// Redis unavailable — fail open and allow the request through.
// This is preferable to hanging requests when Redis is down.
console.warn('[rate-limit] Redis unavailable, skipping rate limit check');
next();
return;
}
if (rpmCount > req.apiKeyRecord.rpmLimit) {
res.status(429).json({
error: 'Rate limit exceeded',
limit: req.apiKeyRecord.rpmLimit,
window: 'per_minute',
retry_after: 60 - (now % 60000) / 1000,
});
const retryAfter = Math.ceil(60 - (now % 60000) / 1000);
(0, errors_1.sendRateLimitError)(res, retryAfter, req.apiKeyRecord.rpmLimit, 0, 'Per-minute rate limit exceeded.');
return;
}
if (dailyCount > req.apiKeyRecord.dailyLimit) {
res.status(429).json({
error: 'Daily limit exceeded',
limit: req.apiKeyRecord.dailyLimit,
window: 'per_day',
});
const retryAfter = Math.ceil(86400 - (now % 86400000) / 1000);
(0, errors_1.sendRateLimitError)(res, retryAfter, req.apiKeyRecord.dailyLimit, 0, 'Daily rate limit exceeded.');
return;
}
next();
Expand Down
18 changes: 15 additions & 3 deletions api/dist/routes/products.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const router = (0, express_1.Router)();
// GET /v1/products/search
// Query params: q, domain, region, country, min_price, max_price, currency, limit, offset, source_page
router.get('/search', agentDetect_1.agentDetectMiddleware, apiKey_1.requireApiKey, apiKey_1.checkRateLimit, (0, queryLog_1.queryLogMiddleware)('products.search'), async (req, res) => {
const start = Date.now();
const requestStart = Date.now();
const q = req.query.q || '';
const domain = req.query.domain;
const region = req.query.region;
Expand All @@ -37,7 +37,7 @@ router.get('/search', agentDetect_1.agentDetectMiddleware, apiKey_1.requireApiKe
const cached = await config_1.redis.get(cacheKey);
if (cached) {
const parsed = JSON.parse(cached);
const elapsed = Date.now() - start;
const elapsed = Date.now() - requestStart;
// compact envelope uses flat keys; legacy uses nested meta
if (parsed.meta) {
parsed.meta.cached = true;
Expand Down Expand Up @@ -152,7 +152,7 @@ router.get('/search', agentDetect_1.agentDetectMiddleware, apiKey_1.requireApiKe
params.push(limit, offset);
const dataResult = await config_1.db.query(dataQuery, params);
const total = parseInt(countResult.rows[0].count, 10);
const responseTimeMs = Date.now() - start;
const responseTimeMs = Date.now() - requestStart;
const products = dataResult.rows.map((row) => {
if (compact) {
// Compact format for AI agents (BUY-2073): Phase 2 shape.
Expand Down Expand Up @@ -250,6 +250,12 @@ router.get('/search', agentDetect_1.agentDetectMiddleware, apiKey_1.requireApiKe
sourcePage: sourcePage || null,
endpoint: 'products.search',
});
(0, posthog_1.trackProductSearch)({
apiKey: (0, apiKey_1.hashKey)(req.apiKeyRecord.key),
queryText: q,
resultCount: products.length,
responseTimeMs,
});
}
res.json(responseBody);
});
Expand Down Expand Up @@ -553,6 +559,12 @@ router.get('/:id', agentDetect_1.agentDetectMiddleware, apiKey_1.requireApiKey,
sourcePage: null,
endpoint: 'products.get',
});
(0, posthog_1.trackProductView)({
apiKey: (0, apiKey_1.hashKey)(req.apiKeyRecord.key),
productId: row.id,
retailer: row.domain,
category: (row.category_path ? row.category_path.split(' > ')[0] : null),
});
}
res.json({ data: product });
});
Expand Down
Loading