-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathMakefile
More file actions
162 lines (133 loc) · 4.51 KB
/
Makefile
File metadata and controls
162 lines (133 loc) · 4.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
.PHONY: help install install-backend install-frontend dev sync clean migrate migrate-auto migrate-down test test-cov run-backend run-frontend run docker-build docker-up docker-down docker-logs scrape-reddit train-models backtest lint format shell
# Default target
help:
@echo "Available commands:"
@echo ""
@echo "Setup:"
@echo " make install - Install all dependencies (backend + frontend)"
@echo " make dev - Install dev dependencies"
@echo " make sync - Sync uv dependencies"
@echo " make clean - Remove cache and temporary files"
@echo ""
@echo "Database:"
@echo " make migrate - Run database migrations"
@echo " make migrate-auto - Create new migration (msg='description')"
@echo " make migrate-down - Rollback last migration"
@echo ""
@echo "Run:"
@echo " make app - Start FastAPI backend server"
@echo " make front - Start Next.js frontend"
@echo ""
@echo "Docker:"
@echo " make docker-build - Build docker images"
@echo " make docker-up - Start docker containers"
@echo " make docker-down - Stop docker containers"
@echo " make docker-logs - View docker logs"
@echo ""
@echo "Data & ML:"
@echo " make scrape-reddit - Run Reddit scraper (one-time)"
@echo " make scrape-scheduled - Run automated scheduled scraper"
@echo " make scrape-once - Test scraper (single run)"
@echo " make train-models - Train ML models"
@echo " make backtest - Run backtesting"
@echo ""
@echo "Celery (Background Tasks):"
@echo " make worker - Start Celery worker"
@echo " make beat - Start Celery beat scheduler"
@echo " make celery-monitor - Monitor Celery tasks (Flower)"
@echo ""
@echo "Code Quality:"
@echo " make test - Run tests"
@echo " make test-cov - Run tests with coverage"
@echo " make lint - Run linting"
@echo " make format - Format code"
@echo " make shell - Open Python shell with models loaded"
# Installation
install: install-app install-front
install-app:
uv pip install -r requirements.txt
install-front:
cd frontend && npm install
sync:
uv sync
# Clean
clean:
find . -type d -name "__pycache__" -exec rm -rf {} +
find . -type f -name "*.pyc" -delete
find . -type f -name "*.pyo" -delete
find . -type f -name "*.log" -delete
find . -type d -name ".pytest_cache" -exec rm -rf {} +
find . -type d -name ".ruff_cache" -exec rm -rf {} +
rm -rf .coverage htmlcov/
# Database Migrations
migrate:
uv run alembic upgrade head
migrate-auto:
uv run alembic revision --autogenerate -m "$(msg)"
migrate-down:
uv run alembic downgrade -1
# Run Application
app:
@if [ ! -s backend/api/main.py ]; then \
echo "❌ Error: backend/api/main.py is empty. Create FastAPI app first."; \
exit 1; \
fi
uv run uvicorn backend.api.main:app --reload --host 0.0.0.0 --port 8000
front:
cd frontend && npm run dev
# Docker
docker-build:
docker-compose build
docker-up:
docker-compose up -d
docker-down:
docker-compose down
docker-logs:
docker-compose logs -f
# Data Collection & ML
scrape-reddit:
uv run python scripts/scrape_reddit.py
scrape-scheduled:
uv run python scripts/scheduled_scraper.py
scrape-once:
uv run python scripts/scheduled_scraper.py --once
train-models:
uv run python scripts/train_models.py
backtest:
uv run python scripts/backtest.py
# Celery Background Tasks
worker:
@echo "🚀 Starting Celery worker..."
@uv run celery -A backend.celery_app worker --loglevel=info --concurrency=2 --queues=scraping,ml
beat:
@echo "⏰ Starting Celery beat scheduler..."
@uv run celery -A backend.celery_app beat --loglevel=info
celery monitor:
@echo "🌸 Starting Flower monitoring UI (http://localhost:5555)..."
@uv run celery -A backend.celery_app flower --port=5555
# Testing
test:
uv run pytest tests/ -v
test-cov:
uv run pytest tests/ --cov=backend --cov-report=html --cov-report=term
# Code Quality
lint:
@echo "Linting backend..."
@uv run ruff check backend/
@echo "Linting scripts..."
@uv run ruff check scripts/
format:
@echo "Formatting backend..."
@uv run ruff format backend/
@echo "Formatting scripts..."
@uv run ruff format scripts/
# Database Shell (requires psql installed)
db-shell:
@if [ -z "$$DATABASE_URL" ]; then \
echo "❌ Error: DATABASE_URL not set. Check your .env file."; \
exit 1; \
fi
psql "$$DATABASE_URL"
# Python Shell
shell:
uv run python -i -c "from backend.database.config import Base, engine; from backend.models.reddit import RedditPost; print('✓ Models loaded')"