Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ jobs:
name: Run Tests & Coverage
runs-on: ubuntu-latest

env:
CLERK_SECRET_KEY: ${{ secrets.CLERK_SECRET_KEY }}
CLERK_PUBLISHABLE_KEY: ${{ secrets.CLERK_PUBLISHABLE_KEY }}
DATABASE_URL: ${{ secrets.DATABASE_URL }}

steps:
- name: Checkout code
uses: actions/checkout@v4
Expand Down
Binary file modified README.md
Binary file not shown.
120 changes: 120 additions & 0 deletions backend/alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
"""
Alembic Environment Configuration for CodeGuard AI.

Este archivo configura Alembic para detectar todos los modelos ORM
y generar migraciones automáticamente contra Supabase PostgreSQL.
"""

import os
import sys
from logging.config import fileConfig

from sqlalchemy import engine_from_config, pool

from alembic import context

# ------------------------------------------------------------------------
# 1. Configuración de Rutas (Path)
# ------------------------------------------------------------------------
# Agregamos el directorio padre (backend/) al path de Python
# para que Alembic pueda encontrar la carpeta 'src'
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))

# ------------------------------------------------------------------------
# 2. Importación de Modelos y Configuración
# ------------------------------------------------------------------------
# Importamos la Base declarativa y TODOS los modelos
# IMPORTANTE: Cada modelo debe ser importado para que Alembic lo detecte
from src.models import (
Base,
UserEntity,
CodeReviewEntity,
AgentFindingEntity,
ReviewStatus,
SeverityEnum,
UserRole,
)

# Configuración de la base de datos
# Intentamos cargar desde settings, con fallback a variables de entorno
try:
from src.core.config.settings import settings
db_url = settings.DATABASE_URL
except ImportError:
from dotenv import load_dotenv
load_dotenv()
db_url = os.getenv("DATABASE_URL")

if not db_url:
raise ValueError(
"DATABASE_URL no está configurada. "
"Configúrala en .env o en src/core/config/settings.py"
)

# ------------------------------------------------------------------------
# 3. Configuración de Alembic
# ------------------------------------------------------------------------
config = context.config

# Interpretar el archivo de configuración para el logging
if config.config_file_name is not None:
fileConfig(config.config_file_name)

# Asignar la metadata de los modelos para que Alembic pueda "ver" las tablas
target_metadata = Base.metadata


def run_migrations_offline() -> None:
"""
Ejecuta migraciones en modo 'offline'.

Configura el contexto con solo una URL, sin crear un Engine.
Útil para generar scripts SQL sin conexión a la BD.
"""
context.configure(
url=db_url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
# Comparar tipos para detectar cambios en columnas
compare_type=True,
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online() -> None:
"""
Ejecuta migraciones en modo 'online'.

Crea un Engine y se conecta a la base de datos Supabase.
"""
# Obtenemos la configuración de alembic.ini
configuration = config.get_section(config.config_ini_section)

# Inyectar la URL de la base de datos desde el entorno
configuration["sqlalchemy.url"] = db_url

connectable = engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
# Comparar tipos para detectar cambios en columnas
compare_type=True,
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
33 changes: 33 additions & 0 deletions backend/alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}


def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}


def downgrade() -> None:
"""Downgrade schema."""





${downgrades if downgrades else "pass"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""create_initial_tables_users_code_reviews_findings

Revision ID: ba48c1bb8e18
Revises:
Create Date: 2025-11-27 02:43:54.598631

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision: str = 'ba48c1bb8e18'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('avatar_url', sa.String(length=500), nullable=True),
sa.Column('role', sa.Enum('DEVELOPER', 'ADMIN', name='userrole'), nullable=False),
sa.Column('daily_analysis_count', sa.Integer(), nullable=False),
sa.Column('last_analysis_date', sa.Date(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_role'), 'users', ['role'], unique=False)
op.create_table('code_reviews',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('user_id', sa.String(length=255), nullable=False),
sa.Column('filename', sa.String(length=500), nullable=False),
sa.Column('code_content', sa.LargeBinary(), nullable=False),
sa.Column('quality_score', sa.Integer(), nullable=True),
sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'COMPLETED', 'FAILED', name='reviewstatus'), nullable=False),
sa.Column('total_findings', sa.Integer(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_code_reviews_created_at'), 'code_reviews', ['created_at'], unique=False)
op.create_index(op.f('ix_code_reviews_status'), 'code_reviews', ['status'], unique=False)
op.create_index(op.f('ix_code_reviews_user_id'), 'code_reviews', ['user_id'], unique=False)
op.create_table('agent_findings',
sa.Column('id', sa.UUID(), nullable=False),
sa.Column('review_id', sa.UUID(), nullable=False),
sa.Column('agent_type', sa.String(length=100), nullable=False),
sa.Column('severity', sa.Enum('CRITICAL', 'HIGH', 'MEDIUM', 'LOW', name='severityenum'), nullable=False),
sa.Column('issue_type', sa.String(length=200), nullable=False),
sa.Column('line_number', sa.Integer(), nullable=False),
sa.Column('code_snippet', sa.Text(), nullable=True),
sa.Column('message', sa.Text(), nullable=False),
sa.Column('suggestion', sa.Text(), nullable=True),
sa.Column('metrics', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('ai_explanation', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column('mcp_references', postgresql.ARRAY(sa.Text()), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['review_id'], ['code_reviews.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_agent_findings_agent_type'), 'agent_findings', ['agent_type'], unique=False)
op.create_index(op.f('ix_agent_findings_review_id'), 'agent_findings', ['review_id'], unique=False)
op.create_index(op.f('ix_agent_findings_severity'), 'agent_findings', ['severity'], unique=False)
# ### end Alembic commands ###


def downgrade() -> None:
"""Downgrade schema."""





# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_agent_findings_severity'), table_name='agent_findings')
op.drop_index(op.f('ix_agent_findings_review_id'), table_name='agent_findings')
op.drop_index(op.f('ix_agent_findings_agent_type'), table_name='agent_findings')
op.drop_table('agent_findings')
op.drop_index(op.f('ix_code_reviews_user_id'), table_name='code_reviews')
op.drop_index(op.f('ix_code_reviews_status'), table_name='code_reviews')
op.drop_index(op.f('ix_code_reviews_created_at'), table_name='code_reviews')
op.drop_table('code_reviews')
op.drop_index(op.f('ix_users_role'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
# ### end Alembic commands ###
61 changes: 61 additions & 0 deletions backend/src/core/config/settings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
"""
Configuración centralizada para CodeGuard AI.

Carga variables de entorno usando pydantic-settings.
"""

from typing import Optional

from pydantic_settings import BaseSettings, SettingsConfigDict


class Settings(BaseSettings):
"""
Configuración de la aplicación cargada desde variables de entorno.

Attributes:
CLERK_SECRET_KEY: Clave secreta de Clerk para validar JWT
CLERK_PUBLISHABLE_KEY: Clave pública de Clerk
DATABASE_URL: URL de conexión a PostgreSQL/Supabase
ENVIRONMENT: Entorno de ejecución (development/production)
DEBUG: Modo debug
"""

# Clerk Authentication
CLERK_SECRET_KEY: str
CLERK_PUBLISHABLE_KEY: str

# Database
DATABASE_URL: str

# Application
ENVIRONMENT: str = "development"
DEBUG: bool = True
APP_NAME: str = "CodeGuard AI"
APP_VERSION: str = "1.0.0"

# API
API_HOST: str = "0.0.0.0"
API_PORT: int = 8000

# CORS
ALLOWED_ORIGINS: str = "http://localhost:3000,http://localhost:5173"

# Redis (opcional para Sprint 2)
REDIS_URL: Optional[str] = None
REDIS_PASSWORD: Optional[str] = None

model_config = SettingsConfigDict(
env_file=".env",
env_file_encoding="utf-8",
extra="ignore",
)

@property
def allowed_origins_list(self) -> list[str]:
"""Retorna lista de orígenes permitidos para CORS."""
return [origin.strip() for origin in self.ALLOWED_ORIGINS.split(",")]


# Singleton de configuración
settings = Settings()
22 changes: 21 additions & 1 deletion backend/src/core/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
"""

import os
from typing import Generator

from dotenv import load_dotenv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session, sessionmaker

load_dotenv()

Expand All @@ -17,3 +18,22 @@

engine = create_engine(DATABASE_URL, pool_pre_ping=True)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)


def get_db() -> Generator[Session, None, None]:
"""
Dependencia de FastAPI para obtener sesión de base de datos.

Yields:
Session: Sesión de SQLAlchemy.

Example:
@app.get("/users")
def get_users(db: Session = Depends(get_db)):
return db.query(User).all()
"""
db = SessionLocal()
try:
yield db
finally:
db.close()
Loading