diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..0e19a1d1 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,41 @@ +# Git +.git +.gitignore + +# Docs (except README) +docs/ +notebooks/ +assets/ +CHANGELOG.md +CONTRIBUTING.md +SECURITY.md +*.md +!README.md + +# Dev files +.vscode/ +.github/ +.env +.env.* +!.env.example + +# Python cache +__pycache__/ +*.py[cod] +.pytest_cache/ +.ruff_cache/ +.coverage +*.egg-info/ +dist/ +.venv/ + +# Frontend (built separately) +src/frontend/ + +# Tests +tests/ + +# Logs/data +*.log +logs/ +.var/ diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 00000000..df2e2800 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,55 @@ +# Docker Build & Push Workflow +name: Docker + +on: + push: + branches: [main] + tags: ["v*"] + pull_request: + branches: [main] + +permissions: + contents: read + packages: write + +env: + REGISTRY: ghcr.io + +jobs: + build: + name: Build & Push + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to GHCR + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set lowercase image name + run: echo "IMAGE_NAME=$(echo ${{ github.repository }} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV + + - name: Build backend + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }},${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build frontend + uses: docker/build-push-action@v6 + with: + context: ./src/frontend + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-ui:${{ github.sha }},${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}-ui:latest + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..c0c04afb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,52 @@ +# syntax=docker/dockerfile:1 +# ============================================================================ +# AgenticFleet Backend Dockerfile +# Optimized multi-stage build for production +# ============================================================================ + +FROM python:3.12-slim AS builder + +# Install uv (pinned version for reproducibility) +COPY --from=ghcr.io/astral-sh/uv:0.8.0 /uv /usr/local/bin/uv + +WORKDIR /app + +# Copy only dependency files first (better caching) +COPY pyproject.toml uv.lock ./ + +# Sync dependencies WITHOUT installing the project itself +ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy +RUN uv sync --frozen --no-install-project --no-dev + +# Copy source and install project +COPY src/agentic_fleet ./src/agentic_fleet +COPY README.md LICENSE ./ +RUN uv sync --frozen --no-dev + +# ----------------------------------------------------------------------------- +# Runtime stage - minimal image +# ----------------------------------------------------------------------------- +FROM python:3.12-slim + +RUN apt-get update && apt-get install -y --no-install-recommends curl \ + && rm -rf /var/lib/apt/lists/* \ + && useradd -m -s /bin/bash app + +WORKDIR /app + +# Copy only the virtual environment (source is installed in venv) +COPY --from=builder --chown=app:app /app/.venv ./.venv + +# Create data directories with correct permissions +RUN mkdir -p .var/logs .var/cache .var/data && chown -R app:app .var + +ENV PATH="/app/.venv/bin:$PATH" \ + PYTHONUNBUFFERED=1 + +USER app +EXPOSE 8000 + +HEALTHCHECK --interval=30s --timeout=10s --start-period=15s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + +CMD ["python", "-m", "uvicorn", "agentic_fleet.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/README.md b/README.md index 644393f7..d2e8bae5 100644 --- a/README.md +++ b/README.md @@ -88,6 +88,21 @@ agentic-fleet run -m "Research the latest advances in AI agents" --verbose agentic-fleet dev ``` +### 🐳 Docker + +```bash +# Quick start with Docker Compose +# Clone the repo (skip if you're already in the project directory) +git clone https://github.com/Qredence/agentic-fleet.git +cd agentic-fleet +cp .env.example .env # Add your OPENAI_API_KEY +docker compose up -d + +# Access: Frontend http://localhost:3000, API http://localhost:8000 +``` + +See [Docker Deployment Guide](docs/guides/docker-deployment.md) for production setup. + ## 📖 Usage ### CLI diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..2b7c8e39 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,84 @@ +# ============================================================================ +# AgenticFleet Docker Compose +# Production-ready setup matching the project architecture +# ============================================================================ +# +# The project uses: +# - Backend (FastAPI) on port 8000 for API endpoints +# - Frontend (Vite/React) proxied through nginx on port 3000 +# +# Usage: +# docker compose up -d # Start full stack +# docker compose up -d backend # Backend only +# docker compose --profile tracing up -d # With Jaeger +# +# ============================================================================ + +services: + # -------------------------------------------------------------------------- + # Backend API (FastAPI + uvicorn) + # -------------------------------------------------------------------------- + backend: + build: + context: . + dockerfile: Dockerfile + image: ghcr.io/qredence/agentic-fleet:latest + container_name: agentic-fleet-backend + restart: unless-stopped + ports: + - "8000:8000" + environment: + # Required + - OPENAI_API_KEY=${OPENAI_API_KEY:-} + # Optional API keys + - TAVILY_API_KEY=${TAVILY_API_KEY:-} + - AZURE_OPENAI_ENDPOINT=${AZURE_OPENAI_ENDPOINT:-} + - AZURE_OPENAI_API_KEY=${AZURE_OPENAI_API_KEY:-} + # Settings + - LOG_LEVEL=${LOG_LEVEL:-INFO} + # Tracing + - ENABLE_OTEL=${ENABLE_OTEL:-false} + - OTLP_ENDPOINT=http://jaeger:4317 + volumes: + - backend-data:/app/.var + networks: + - agentic-net + # Healthcheck defined in Dockerfile + + # -------------------------------------------------------------------------- + # Frontend (Vite production build served by nginx) + # -------------------------------------------------------------------------- + frontend: + build: + context: ./src/frontend + dockerfile: Dockerfile + image: ghcr.io/qredence/agentic-fleet-ui:latest + container_name: agentic-fleet-frontend + restart: unless-stopped + ports: + - "3000:80" + depends_on: + backend: + condition: service_healthy + networks: + - agentic-net + + # -------------------------------------------------------------------------- + # Jaeger Tracing + # -------------------------------------------------------------------------- + jaeger: + image: jaegertracing/all-in-one:1.59 + container_name: agentic-fleet-jaeger + profiles: [tracing] + ports: + - "4317:4317" + - "16686:16686" + networks: + - agentic-net + +networks: + agentic-net: + driver: bridge + +volumes: + backend-data: diff --git a/docs/guides/docker-deployment.md b/docs/guides/docker-deployment.md new file mode 100644 index 00000000..30f883f3 --- /dev/null +++ b/docs/guides/docker-deployment.md @@ -0,0 +1,49 @@ +# Docker Deployment + +## Quick Start + +```bash +# 1. Configure environment +cp .env.example .env +# Edit .env and add OPENAI_API_KEY + +# 2. Start services +docker compose up -d + +# 3. Access +# Frontend: http://localhost:3000 +# API Docs: http://localhost:8000/docs +``` + +## Commands + +```bash +docker compose up -d # Start all +docker compose up -d backend # Backend only +docker compose --profile tracing up -d # With Jaeger tracing +docker compose logs -f # View logs +docker compose down # Stop +``` + +## Environment Variables + +| Variable | Required | Description | +|----------|----------|-------------| +| `OPENAI_API_KEY` | Yes | OpenAI API key | +| `TAVILY_API_KEY` | No | Web search | +| `AZURE_OPENAI_*` | No | Azure OpenAI config | +| `ENABLE_OTEL` | No | Enable tracing | + +## Production + +For production, use pre-built images: + +```bash +docker pull ghcr.io/qredence/agentic-fleet:latest +docker pull ghcr.io/qredence/agentic-fleet-ui:latest +``` + +Or build locally: +```bash +docker compose build +``` diff --git a/src/frontend/.dockerignore b/src/frontend/.dockerignore new file mode 100644 index 00000000..e9184df4 --- /dev/null +++ b/src/frontend/.dockerignore @@ -0,0 +1,4 @@ +node_modules/ +dist/ +*.log +.env* diff --git a/src/frontend/Dockerfile b/src/frontend/Dockerfile new file mode 100644 index 00000000..f7b12293 --- /dev/null +++ b/src/frontend/Dockerfile @@ -0,0 +1,31 @@ +# ============================================================================ +# AgenticFleet Frontend Dockerfile +# Lightweight nginx-based production image +# ============================================================================ + +FROM node:20-alpine AS builder + +WORKDIR /app +COPY package*.json ./ +RUN npm ci --legacy-peer-deps +COPY . . +RUN npm run build + +# ----------------------------------------------------------------------------- +# Production - nginx alpine (~25MB) +# ----------------------------------------------------------------------------- +FROM nginx:alpine + +# Install wget for healthcheck and remove default config +RUN apk add --no-cache wget && rm /etc/nginx/conf.d/default.conf +COPY nginx.conf /etc/nginx/conf.d/default.conf + +# Copy built assets +COPY --from=builder /app/dist /usr/share/nginx/html + +EXPOSE 80 + +HEALTHCHECK --interval=30s --timeout=5s --retries=3 \ + CMD wget -q --spider http://localhost/ || exit 1 + +CMD ["nginx", "-g", "daemon off;"] diff --git a/src/frontend/nginx.conf b/src/frontend/nginx.conf new file mode 100644 index 00000000..51a64b66 --- /dev/null +++ b/src/frontend/nginx.conf @@ -0,0 +1,50 @@ +# Nginx config for AgenticFleet frontend +# Serves SPA with API proxy to backend + +upstream api { + server backend:8000; +} + +server { + listen 80; + root /usr/share/nginx/html; + index index.html; + + # Gzip + gzip on; + gzip_types text/plain text/css application/json application/javascript text/xml image/svg+xml; + gzip_vary on; + gzip_proxied any; + gzip_comp_level 5; + gzip_min_length 1024; + + # Proxy API to backend + location /api { + proxy_pass http://api; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_read_timeout 300s; + proxy_buffering off; + } + + # Health/docs endpoints + location ~ ^/(health|ready|docs|redoc|openapi.json) { + proxy_pass http://api; + proxy_set_header Host $host; + } + + # Static assets with cache + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg|woff2?)$ { + expires 1y; + add_header Cache-Control "public, immutable"; + } + + # SPA fallback + location / { + try_files $uri $uri/ /index.html; + } +} diff --git a/src/frontend/src/components/chat/chat-messages.tsx b/src/frontend/src/components/chat/chat-messages.tsx index b1bbd475..37829e11 100644 --- a/src/frontend/src/components/chat/chat-messages.tsx +++ b/src/frontend/src/components/chat/chat-messages.tsx @@ -12,18 +12,6 @@ import { MessageContent, } from "@/components/message"; -/** - * Prepare message content for display. - * - * @param content - Message content to display; non-string values will be serialized - * @param isStreaming - Whether to append a trailing streaming cursor - * @returns The formatted message string; includes a trailing ` ▍` when `isStreaming` is true - */ -function formatMessageContent(content: unknown, isStreaming: boolean): string { - const baseContent = typeof content === "string" ? content : JSON.stringify(content); - return isStreaming ? baseContent + " ▍" : baseContent; -} - export type ChatMessagesProps = { messages: ChatMessage[]; isLoading?: boolean; @@ -141,4 +129,4 @@ export function ChatMessages({ ); -} \ No newline at end of file +}