diff --git a/.gitignore b/.gitignore index 852216e6..61b854f3 100644 --- a/.gitignore +++ b/.gitignore @@ -132,3 +132,6 @@ dmypy.json # macOS .DS_Store + +shop.db +test.db \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..ef88d16a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +# syntax=docker/dockerfile:1 +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ +PYTHONUNBUFFERED=1 + + +WORKDIR /app + + +COPY hw2/hw/requirements.txt ./requirements.txt +RUN pip install --no-cache-dir -r requirements.txt + + +COPY hw2/hw ./hw + +ENV PYTHONPATH=/app/hw + + +EXPOSE 8000 + +CMD ["uvicorn", "shop_api.main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..b28abea7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,76 @@ +version: "3.9" + +services: + app: + build: . + container_name: shop_api + ports: + - "8000:8000" + environment: + - PYTHONPATH=/app/hw + - DATABASE_URL=postgresql+psycopg://shop:shop@db:5432/shopdb + depends_on: + - db + networks: + - monitor-net + + db: + image: postgres:16 + container_name: postgres + environment: + - POSTGRES_DB=shopdb + - POSTGRES_USER=shop + - POSTGRES_PASSWORD=shop + ports: + - "5432:5432" + volumes: + - pgdata:/var/lib/postgresql/data + networks: + - monitor-net + + adminer: + image: adminer + container_name: adminer + ports: + - "8080:8080" + depends_on: + - db + networks: + - monitor-net + + prometheus: + image: prom/prometheus:latest + container_name: prometheus + volumes: + - ./prometheus.yml:/etc/prometheus/prometheus.yml:ro + - prom-data:/prometheus + ports: + - "9090:9090" + networks: + - monitor-net + depends_on: + - app + + grafana: + image: grafana/grafana:latest + container_name: grafana + ports: + - "3000:3000" + volumes: + - grafana-data:/var/lib/grafana + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=admin + networks: + - monitor-net + depends_on: + - prometheus + +volumes: + prom-data: + grafana-data: + pgdata: + +networks: + monitor-net: + driver: bridge diff --git a/hw1/app.py b/hw1/app.py index 6107b870..f63a08bf 100644 --- a/hw1/app.py +++ b/hw1/app.py @@ -1,4 +1,7 @@ from typing import Any, Awaitable, Callable +import json +from math import prod +from urllib.parse import parse_qs async def application( @@ -12,7 +15,116 @@ async def application( receive: Корутина для получения сообщений от клиента send: Корутина для отправки сообщений клиенту """ - # TODO: Ваша реализация здесь + + if scope.get("type") != "http": + await _send_json(send, {"detail": "unsupported scope type"}, status=422) + return + + method = scope.get("method", "GET").upper() + path: str = scope.get("path", "/") + query = parse_qs((scope.get("query_string") or b"").decode( + "utf-8"), keep_blank_values=True) + + if path.startswith("/fibonacci/"): + n_str = path[len("/fibonacci/"):].strip("/") + if n_str == "": + await _send_json(send, {"detail": "n is required"}, status=422) + return + try: + n = int(n_str) + except ValueError: + await _send_json(send, {"detail": "n must be an integer"}, status=422) + return + if n < 0: + await _send_json(send, {"detail": "n must be non-negative"}, status=400) + return + + a, b = 0, 1 + for _ in range(n): + a, b = b, a + b + await _send_json(send, {"result": a}, status=200) + return + + if path == "/factorial": + if method != "GET": + await _send_json(send, {"detail": "not found"}, status=404) + return + + raw = query.get("n", [None])[0] + if raw is None or raw == "": + await _send_json(send, {"detail": "n is required"}, status=422) + return + try: + n = int(raw) + except ValueError: + await _send_json(send, {"detail": "n must be an integer"}, status=422) + return + if n < 0: + await _send_json(send, {"detail": "n must be non-negative"}, status=400) + return + + result = 1 if n == 0 else prod(range(1, n + 1)) + await _send_json(send, {"result": result}, status=200) + return + + if path == "/mean": + if method != "GET": + await _send_json(send, {"detail": "not found"}, status=404) + return + + body_bytes = await _read_body(receive) + if not body_bytes: + await _send_json(send, {"detail": "json body required (array of numbers)"}, status=422) + return + + try: + data = json.loads(body_bytes.decode("utf-8")) + except json.JSONDecodeError: + await _send_json(send, {"detail": "invalid JSON"}, status=422) + return + + if not isinstance(data, list): + await _send_json(send, {"detail": "body must be a JSON array"}, status=422) + return + + if len(data) == 0: + await _send_json(send, {"detail": "array must not be empty"}, status=400) + return + + try: + nums = [float(x) for x in data] + except (TypeError, ValueError): + await _send_json(send, {"detail": "array must contain only numbers"}, status=422) + return + + mean_value = sum(nums) / len(nums) + await _send_json(send, {"result": mean_value}, status=200) + return + + await _send_json(send, {"detail": "not found"}, status=404) + + +async def _read_body(receive: Callable[[], Awaitable[dict[str, Any]]]) -> bytes: + body = b"" + more = True + while more: + message = await receive() + if message["type"] != "http.request": + break + body += message.get("body", b"") + more = message.get("more_body", False) + return body + + +async def _send_json(send: Callable[[dict[str, Any]], Awaitable[None]], payload: dict, status: int = 200): + body = json.dumps(payload).encode("utf-8") + headers = [ + (b"content-type", b"application/json; charset=utf-8"), + (b"content-length", str(len(body)).encode("ascii")), + ] + await send({"type": "http.response.start", "status": status, "headers": headers}) + await send({"type": "http.response.body", "body": body}) + if __name__ == "__main__": import uvicorn diff --git a/hw2/hw/requirements.txt b/hw2/hw/requirements.txt index 207dcf5c..0e2ce500 100644 --- a/hw2/hw/requirements.txt +++ b/hw2/hw/requirements.txt @@ -1,9 +1,15 @@ # Основные зависимости для ASGI приложения fastapi>=0.117.1 uvicorn>=0.24.0 +prometheus-fastapi-instrumentator>=6.1.0 # Зависимости для тестирования pytest>=7.4.0 pytest-asyncio>=0.21.0 httpx>=0.27.2 Faker>=37.8.0 + +SQLAlchemy>=2.0 +psycopg[binary]>=3.2 + +alembic>=1.13 \ No newline at end of file diff --git a/hw2/hw/shop_api/HW_results/Dashboard.png b/hw2/hw/shop_api/HW_results/Dashboard.png new file mode 100644 index 00000000..7bafd25f Binary files /dev/null and b/hw2/hw/shop_api/HW_results/Dashboard.png differ diff --git a/hw2/hw/shop_api/HW_results/Dashboard_2.png b/hw2/hw/shop_api/HW_results/Dashboard_2.png new file mode 100644 index 00000000..fd9d466c Binary files /dev/null and b/hw2/hw/shop_api/HW_results/Dashboard_2.png differ diff --git a/hw2/hw/shop_api/HW_results/Prometheus.png b/hw2/hw/shop_api/HW_results/Prometheus.png new file mode 100644 index 00000000..379517c4 Binary files /dev/null and b/hw2/hw/shop_api/HW_results/Prometheus.png differ diff --git a/hw2/hw/shop_api/HW_results/README.md b/hw2/hw/shop_api/HW_results/README.md new file mode 100644 index 00000000..e951e0ee --- /dev/null +++ b/hw2/hw/shop_api/HW_results/README.md @@ -0,0 +1,52 @@ +# сценарии: + +docker compose exec app python -m shop_api.HW_results.tx.txn_demos nrr_rc # non-repeatable read на READ COMMITTED + +docker compose exec app python -m shop_api.HW_results.tx.txn_demos nrr_rr # отсутствие non-repeatable read на REPEATABLE READ + +docker compose exec app python -m shop_api.HW_results.tx.txn_demos phantom_rc # phantom на READ COMMITTED + +docker compose exec app python -m shop_api.HW_results.tx.txn_demos phantom_rr # отсутствие phantom на REPEATABLE READ + +docker compose exec app python -m shop_api.HW_results.tx.txn_demos serializable # строгая изоляция + + +nrr_rc + +Running scenario: nrr_rc +[T1] first read val=100 +[T2] committed UPDATE +[T1] second read val=101 + +второе чтение видит новое значение —non‑repeatable read + +Running scenario: nrr_rr +[T1] first read val=100 +[T2] committed UPDATE +[T1] second read val=100 + + в REPEATABLE READ snapshot фиксирован, значение не меняется + +phantom_rc + +Running scenario: phantom_rc +[T1] first count=2 +[T2] committed INSERT +[T1] second count=3 + +количество строк изменилось — phantom. + +Running scenario: phantom_rr +[T1] first count=2 +[T2] committed INSERT +[T1] second count=2 + +snapshot предотвращает появление «призрачных» строк в той же транзакции. + +serializable — строгая изоляция + +Running scenario: serializable +[T1] bump +10 committed +[T2] bump +20 committed + +за счёт блокировок операции выполняются последовательно, конфликтов нет \ No newline at end of file diff --git a/hw2/hw/shop_api/HW_results/tx/dirty_read.py b/hw2/hw/shop_api/HW_results/tx/dirty_read.py new file mode 100644 index 00000000..f04fe797 --- /dev/null +++ b/hw2/hw/shop_api/HW_results/tx/dirty_read.py @@ -0,0 +1,4 @@ +print(""" +PostgreSQL не поддерживает уровень READ UNCOMMITTED. Любая попытка +установить его сводится к READ COMMITTED, поэтому dirty read невозможен. +""") diff --git a/hw2/hw/shop_api/HW_results/tx/txn_demos.py b/hw2/hw/shop_api/HW_results/tx/txn_demos.py new file mode 100644 index 00000000..b59b11da --- /dev/null +++ b/hw2/hw/shop_api/HW_results/tx/txn_demos.py @@ -0,0 +1,180 @@ +from __future__ import annotations +import threading +import time +from contextlib import contextmanager +from sqlalchemy import create_engine, text +from sqlalchemy.engine import Connection +from sqlalchemy.orm import sessionmaker + +import os + +DB_URL = os.getenv( + "DATABASE_URL", + "postgresql+psycopg2://postgres:postgres@postgres:5432/postgres", +) + +engine = create_engine(DB_URL, isolation_level="READ COMMITTED", future=True) +Session = sessionmaker(engine, future=True) + + +@contextmanager +def tx(isolation: str = "READ COMMITTED"): + eng = create_engine(DB_URL, isolation_level=isolation, future=True) + with eng.connect() as conn: + trans = conn.begin() + try: + yield conn + trans.commit() + except: + trans.rollback() + raise + finally: + conn.close() + + +def prepare(): + with engine.begin() as c: + c.execute(text(""" + CREATE TABLE IF NOT EXISTS demo_kv( + id INT PRIMARY KEY, + val INT NOT NULL + ); + """)) + c.execute(text("DELETE FROM demo_kv;")) + c.execute(text("INSERT INTO demo_kv(id,val) VALUES (1, 100);")) + + +def non_repeatable_read_read_committed(): + prepare() + + def t1(): + with tx("READ COMMITTED") as c: + r1 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1")).scalar_one() + print(f"[T1] first read val={r1}") + time.sleep(2) + r2 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1")).scalar_one() + print(f"[T1] second read val={r2}") + + def t2(): + time.sleep(0.5) + with tx("READ COMMITTED") as c: + c.execute(text("UPDATE demo_kv SET val = val + 1 WHERE id=1")) + print("[T2] committed UPDATE") + threading.Thread(target=t1).start() + threading.Thread(target=t2).start() + + +def non_repeatable_read_repeatable_read(): + prepare() + + def t1(): + with tx("REPEATABLE READ") as c: + r1 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1")).scalar_one() + print(f"[T1] first read val={r1}") + time.sleep(2) + r2 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1")).scalar_one() + print(f"[T1] second read val={r2}") + + def t2(): + time.sleep(0.5) + with tx("READ COMMITTED") as c: + c.execute(text("UPDATE demo_kv SET val = val + 1 WHERE id=1")) + print("[T2] committed UPDATE") + threading.Thread(target=t1).start() + threading.Thread(target=t2).start() + + +def phantom_read_read_committed(): + prepare() + with engine.begin() as c: + c.execute(text("DELETE FROM demo_kv;")) + c.execute(text("INSERT INTO demo_kv(id,val) VALUES (1,100), (2,100);")) + + def t1(): + with tx("READ COMMITTED") as c: + r1 = c.execute( + text("SELECT COUNT(*) FROM demo_kv WHERE val=100")).scalar_one() + print(f"[T1] first count={r1}") + time.sleep(2) + r2 = c.execute( + text("SELECT COUNT(*) FROM demo_kv WHERE val=100")).scalar_one() + print(f"[T1] second count={r2}") + + def t2(): + time.sleep(0.5) + with tx("READ COMMITTED") as c: + c.execute(text("INSERT INTO demo_kv(id,val) VALUES (3,100)")) + print("[T2] committed INSERT") + threading.Thread(target=t1).start() + threading.Thread(target=t2).start() + + +def phantom_read_repeatable_read(): + prepare() + with engine.begin() as c: + c.execute(text("DELETE FROM demo_kv;")) + c.execute(text("INSERT INTO demo_kv(id,val) VALUES (1,100), (2,100);")) + + def t1(): + with tx("REPEATABLE READ") as c: + r1 = c.execute( + text("SELECT COUNT(*) FROM demo_kv WHERE val=100")).scalar_one() + print(f"[T1] first count={r1}") + time.sleep(2) + r2 = c.execute( + text("SELECT COUNT(*) FROM demo_kv WHERE val=100")).scalar_one() + print(f"[T1] second count={r2}") + + def t2(): + time.sleep(0.5) + with tx("READ COMMITTED") as c: + c.execute(text("INSERT INTO demo_kv(id,val) VALUES (3,100)")) + print("[T2] committed INSERT") + threading.Thread(target=t1).start() + threading.Thread(target=t2).start() + + +def serializable_example_no_anomalies(): + prepare() + + def t1(): + with tx("SERIALIZABLE") as c: + r1 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1 FOR UPDATE")).scalar_one() + c.execute(text("UPDATE demo_kv SET val=:v WHERE id=1"), + {"v": r1 + 10}) + print("[T1] bump +10 committed") + + def t2(): + time.sleep(0.2) + try: + with tx("SERIALIZABLE") as c: + r1 = c.execute( + text("SELECT val FROM demo_kv WHERE id=1 FOR UPDATE")).scalar_one() + c.execute(text("UPDATE demo_kv SET val=:v WHERE id=1"), { + "v": r1 + 20}) + print("[T2] bump +20 committed") + except Exception as e: + print(f"[T2] serialization failure -> {e}") + threading.Thread(target=t1).start() + threading.Thread(target=t2).start() + + +SCENARIOS = { + "nrr_rc": non_repeatable_read_read_committed, + "nrr_rr": non_repeatable_read_repeatable_read, + "phantom_rc": phantom_read_read_committed, + "phantom_rr": phantom_read_repeatable_read, + "serializable": serializable_example_no_anomalies, +} + +if __name__ == "__main__": + import sys + name = sys.argv[1] if len(sys.argv) > 1 else "nrr_rc" + print(f"Running scenario: {name}") + SCENARIOS[name]() + time.sleep(4) diff --git a/hw2/hw/shop_api/app.py b/hw2/hw/shop_api/app.py new file mode 100644 index 00000000..2be190b4 --- /dev/null +++ b/hw2/hw/shop_api/app.py @@ -0,0 +1,23 @@ +from fastapi import FastAPI +from prometheus_fastapi_instrumentator import Instrumentator + +from .routers.items import router as items_router +from .routers.carts import router as carts_router +from .routers.chat import register_chat + +from .db import init_db + +app = FastAPI(title="Shop API") +init_db() +instrumentator = Instrumentator() +instrumentator.instrument(app) + + +@app.on_event("startup") +async def _startup(): + instrumentator.expose(app, endpoint="/metrics", include_in_schema=False) + init_db() + +app.include_router(items_router) +app.include_router(carts_router) +register_chat(app) diff --git a/hw2/hw/shop_api/db.py b/hw2/hw/shop_api/db.py new file mode 100644 index 00000000..807c4d8d --- /dev/null +++ b/hw2/hw/shop_api/db.py @@ -0,0 +1,51 @@ +from __future__ import annotations +import os +from contextlib import contextmanager + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker, DeclarativeBase + +DATABASE_URL = os.getenv( + "DATABASE_URL", + "sqlite+pysqlite:///./shop.db", +) + + +class Base(DeclarativeBase): + pass + + +connect_args = {"check_same_thread": False} if DATABASE_URL.startswith( + "sqlite") else {} + + +engine = create_engine( + DATABASE_URL, + pool_pre_ping=True, + connect_args=connect_args, + future=True, +) +SessionLocal = sessionmaker( + bind=engine, + expire_on_commit=False, + autoflush=False, + future=True, +) + + +def init_db() -> None: + from . import db_models + Base.metadata.create_all(bind=engine) + + +@contextmanager +def session_scope(): + session = SessionLocal() + try: + yield session + session.commit() + except: + session.rollback() + raise + finally: + session.close() diff --git a/hw2/hw/shop_api/db_models.py b/hw2/hw/shop_api/db_models.py new file mode 100644 index 00000000..eccbc39c --- /dev/null +++ b/hw2/hw/shop_api/db_models.py @@ -0,0 +1,43 @@ +from __future__ import annotations +from sqlalchemy import Boolean, ForeignKey, Integer, Float, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from .db import Base + + +class ItemORM(Base): + __tablename__ = "items" + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, autoincrement=True) + name: Mapped[str] = mapped_column(String(255), nullable=False) + price: Mapped[float] = mapped_column(Float, nullable=False) + deleted: Mapped[bool] = mapped_column( + Boolean, default=False, nullable=False) + + +class CartORM(Base): + __tablename__ = "carts" + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, autoincrement=True) + items: Mapped[list["CartItemORM"]] = relationship( + back_populates="cart", cascade="all, delete-orphan" + ) + + +class CartItemORM(Base): + __tablename__ = "cart_items" + __table_args__ = ( + UniqueConstraint("cart_id", "item_id", name="uq_cart_item"), + ) + + id: Mapped[int] = mapped_column( + Integer, primary_key=True, autoincrement=True) + cart_id: Mapped[int] = mapped_column( + ForeignKey("carts.id"), nullable=False) + item_id: Mapped[int] = mapped_column( + ForeignKey("items.id"), nullable=False) + quantity: Mapped[int] = mapped_column(Integer, default=0, nullable=False) + + cart: Mapped["CartORM"] = relationship(back_populates="items") diff --git a/hw2/hw/shop_api/main.py b/hw2/hw/shop_api/main.py index f60a8c60..c07c4599 100644 --- a/hw2/hw/shop_api/main.py +++ b/hw2/hw/shop_api/main.py @@ -1,3 +1 @@ -from fastapi import FastAPI - -app = FastAPI(title="Shop API") +from .app import app diff --git a/hw2/hw/shop_api/models.py b/hw2/hw/shop_api/models.py new file mode 100644 index 00000000..3fb976d9 --- /dev/null +++ b/hw2/hw/shop_api/models.py @@ -0,0 +1,46 @@ +from __future__ import annotations +from typing import List, Optional +from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, ConfigDict + + +class ItemCreate(BaseModel): + name: str = Field(..., min_length=1) + price: float = Field(..., ge=0) + + +class ItemPut(BaseModel): + name: str = Field(..., min_length=1) + price: float = Field(..., ge=0) + deleted: bool = False + + +class ItemPatch(BaseModel): + name: Optional[str] = Field(None, min_length=1) + price: Optional[float] = Field(None, ge=0) + model_config = ConfigDict(extra='forbid') + + +model_config = { + "extra": "forbid", +} + + +class Item(BaseModel): + id: int + name: str + price: float + deleted: bool = False + + +class CartItem(BaseModel): + id: int + name: str + quantity: int + available: bool + + +class Cart(BaseModel): + id: int + items: List[CartItem] + price: float diff --git a/hw2/hw/shop_api/routers/__init__.py b/hw2/hw/shop_api/routers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/hw2/hw/shop_api/routers/carts.py b/hw2/hw/shop_api/routers/carts.py new file mode 100644 index 00000000..f547d78f --- /dev/null +++ b/hw2/hw/shop_api/routers/carts.py @@ -0,0 +1,52 @@ +from fastapi import APIRouter, Query, Response +from typing import Dict, List, Optional + +from ..models import Cart +from ..storage import create_cart, cart_view, add_to_cart, all_carts + + +router = APIRouter(prefix="/cart", tags=["cart"]) + + +@router.post("", status_code=201) +def post_cart(response: Response) -> Dict[str, int]: + cid = create_cart() + response.headers["Location"] = f"/cart/{cid}" + return {"id": cid} + + +@router.get("/{cart_id}", response_model=Cart) +def get_cart(cart_id: int) -> Cart: + return cart_view(cart_id) + + +@router.get("", response_model=List[Cart]) +def list_carts( + offset: int = Query(0, ge=0), + limit: int = Query(10, gt=0), + min_price: Optional[float] = Query(None, ge=0), + max_price: Optional[float] = Query(None, ge=0), + min_quantity: Optional[int] = Query(None, ge=0), + max_quantity: Optional[int] = Query(None, ge=0), +) -> List[Cart]: + + carts: List[Cart] = [] + for cid in all_carts(): + cart = cart_view(cid) + total_qty = sum(ci.quantity for ci in cart.items) + if min_price is not None and cart.price < float(min_price): + continue + if max_price is not None and cart.price > float(max_price): + continue + if min_quantity is not None and total_qty < int(min_quantity): + continue + if max_quantity is not None and total_qty > int(max_quantity): + continue + carts.append(cart) + return carts[offset: offset + limit] + + +@router.post("/{cart_id}/add/{item_id}", response_model=Cart) +def add(cart_id: int, item_id: int) -> Cart: + add_to_cart(cart_id, item_id) + return cart_view(cart_id) diff --git a/hw2/hw/shop_api/routers/chat.py b/hw2/hw/shop_api/routers/chat.py new file mode 100644 index 00000000..04663742 --- /dev/null +++ b/hw2/hw/shop_api/routers/chat.py @@ -0,0 +1,43 @@ +from typing import Dict, Optional +from fastapi import WebSocket, WebSocketDisconnect, FastAPI +import secrets +import string + +_rooms: Dict[str, Dict[str, WebSocket]] = {} + + +def _gen_username(length: int = 8) -> str: + alphabet = string.ascii_letters + string.digits + return "user_" + "".join(secrets.choice(alphabet) for _ in range(length)) + + +async def _broadcast(room: str, message: str, skip_ws: Optional[WebSocket] = None) -> None: + conns = _rooms.get(room, {}) + for _, ws in list(conns.items()): + if skip_ws is not None and ws is skip_ws: + continue + try: + await ws.send_text(message) + except Exception: + pass + + +def register_chat(app: FastAPI) -> None: + @app.websocket("/chat/{chat_name}") + async def chat_websocket(websocket: WebSocket, chat_name: str): + await websocket.accept() + username = _gen_username() + conns = _rooms.setdefault(chat_name, {}) + conns[username] = websocket + try: + while True: + text = await websocket.receive_text() + await _broadcast(chat_name, f"{username} :: {text}", skip_ws=websocket) + except WebSocketDisconnect: + pass + finally: + room = _rooms.get(chat_name) + if room and username in room: + room.pop(username, None) + if not room: + _rooms.pop(chat_name, None) diff --git a/hw2/hw/shop_api/routers/items.py b/hw2/hw/shop_api/routers/items.py new file mode 100644 index 00000000..6abf339c --- /dev/null +++ b/hw2/hw/shop_api/routers/items.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from typing import List, Optional +from http import HTTPStatus + +from fastapi import APIRouter, HTTPException, Query, Response + +from ..models import Item, ItemCreate, ItemPut, ItemPatch +from ..storage import ( + create_item, + save_item, + get_item_or_404, + get_item_raw, + all_items, +) + +router = APIRouter(prefix="/item", tags=["item"]) + + +@router.post("", response_model=Item, status_code=HTTPStatus.CREATED) +def post_item(body: ItemCreate) -> Item: + return create_item(name=body.name, price=float(body.price)) + + +@router.get("/{item_id}", response_model=Item) +def get_item(item_id: int) -> Item: + return get_item_or_404(item_id) + + +@router.get("", response_model=List[Item]) +def list_items( + offset: int = Query(0, ge=0), + limit: int = Query(10, gt=0), + min_price: Optional[float] = Query(None, ge=0), + max_price: Optional[float] = Query(None, ge=0), + show_deleted: bool = False, +) -> List[Item]: + items = all_items(show_deleted) + if min_price is not None: + items = [i for i in items if i.price >= float(min_price)] + if max_price is not None: + items = [i for i in items if i.price <= float(max_price)] + return items[offset: offset + limit] + + +@router.put("/{item_id}", response_model=Item) +def replace_item(item_id: int, body: ItemPut) -> Item: + item = get_item_raw(item_id) + if not item: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, + detail="item not found") + + new_item = Item( + id=item_id, + name=body.name, + price=float(body.price), + deleted=body.deleted, + ) + save_item(new_item) + return new_item + + +@router.patch("/{item_id}", response_model=Item) +def patch_item(item_id: int, body: ItemPatch): + item = get_item_raw(item_id) + if not item: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, + detail="item not found") + if item.deleted: + return Response(status_code=HTTPStatus.NOT_MODIFIED) + + if body.name is not None: + item.name = body.name + if body.price is not None: + item.price = float(body.price) + + save_item(item) + return item + + +@router.delete("/{item_id}", status_code=HTTPStatus.OK) +def delete_item(item_id: int): + item = get_item_raw(item_id) + if not item: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, + detail="item not found") + + item.deleted = True + save_item(item) + return None diff --git a/hw2/hw/shop_api/storage.py b/hw2/hw/shop_api/storage.py new file mode 100644 index 00000000..2593708f --- /dev/null +++ b/hw2/hw/shop_api/storage.py @@ -0,0 +1,148 @@ + +from __future__ import annotations +from typing import List, Iterable +from fastapi import HTTPException +from sqlalchemy import select +from sqlalchemy.orm import Session + +from .models import Item, Cart, CartItem +from .db import SessionLocal +from .db_models import ItemORM, CartORM, CartItemORM + + +def with_session(func): + def wrapper(*args, **kwargs): + session: Session | None = kwargs.get("session") + created = False + if session is None: + session = SessionLocal() + kwargs["session"] = session + created = True + try: + result = func(*args, **kwargs) + session.commit() + return result + except Exception: + session.rollback() + raise + finally: + if created: + session.close() + return wrapper + + +@with_session +def get_item_or_404(item_id: int, *, session: Session | None = None) -> Item: + orm = session.get(ItemORM, item_id) + if not orm or orm.deleted: + raise HTTPException(status_code=404, detail="item not found") + return Item(id=orm.id, name=orm.name, price=float(orm.price), deleted=orm.deleted) + + +@with_session +def get_item_raw(item_id: int, *, session: Session | None = None) -> ItemORM | None: + return session.get(ItemORM, item_id) + + +@with_session +def save_item(item: Item, *, session: Session | None = None) -> None: + if item.id: + orm = session.get(ItemORM, item.id) + if not orm: + orm = ItemORM(id=item.id, name=item.name, price=item.price, + deleted=getattr(item, "deleted", False)) + session.add(orm) + else: + orm.name = item.name + orm.price = item.price + orm.deleted = getattr(item, "deleted", orm.deleted) + else: + orm = ItemORM(name=item.name, price=item.price, + deleted=getattr(item, "deleted", False)) + session.add(orm) + + +@with_session +def all_items(show_deleted: bool, *, session: Session | None = None) -> List[Item]: + stmt = select(ItemORM) + if not show_deleted: + stmt = stmt.where(ItemORM.deleted.is_(False)) + rows: Iterable[ItemORM] = session.scalars(stmt) + return [Item(id=r.id, name=r.name, price=float(r.price), deleted=r.deleted) for r in rows] + + +@with_session +def create_cart(*, session: Session | None = None) -> int: + cart = CartORM() + session.add(cart) + session.flush() + return cart.id + + +def _ensure_cart_orm(cart_id: int, session: Session) -> CartORM: + cart = session.get(CartORM, cart_id) + if not cart: + raise HTTPException(status_code=404, detail="cart not found") + return cart + + +@with_session +def ensure_cart(cart_id: int, *, session: Session | None = None) -> None: + _ensure_cart_orm(cart_id, session) + + +@with_session +def add_to_cart(cart_id: int, item_id: int, count: int = 1, *, session: Session | None = None) -> None: + cart = _ensure_cart_orm(cart_id, session) + item = session.get(ItemORM, item_id) + if not item: + raise HTTPException(status_code=404, detail="item not found") + qty = max(1, int(count)) + + stmt = select(CartItemORM).where( + CartItemORM.cart_id == cart.id, + CartItemORM.item_id == item.id, + ) + rel = session.scalars(stmt).first() + if rel: + rel.quantity += qty + else: + rel = CartItemORM(cart_id=cart.id, item_id=item.id, quantity=qty) + session.add(rel) + + +@with_session +def cart_view(cart_id: int, *, session: Session | None = None) -> Cart: + cart = _ensure_cart_orm(cart_id, session) + + stmt = select(CartItemORM).where(CartItemORM.cart_id == cart.id) + rels: list[CartItemORM] = list(session.scalars(stmt)) + + items: list[CartItem] = [] + total = 0.0 + + for rel in rels: + item = session.get(ItemORM, rel.item_id) + available = bool(item and not item.deleted) + name = item.name if item else f"item#{rel.item_id}" + price = float(item.price) if item else 0.0 + if available: + total += price * rel.quantity + items.append(CartItem(id=rel.item_id, name=name, + quantity=rel.quantity, available=available)) + + return Cart(id=cart.id, items=items, price=total) + + +@with_session +def all_carts(*, session: Session | None = None) -> List[int]: + rows = session.scalars(select(CartORM.id).order_by(CartORM.id)) + return list(rows) + + +@with_session +def create_item(name: str, price: float, *, session=None) -> Item: + orm = ItemORM(name=name, price=price, deleted=False) + session.add(orm) + session.flush() + return Item(id=orm.id, name=orm.name, price=float(orm.price), deleted=orm.deleted) diff --git a/lecture3/Dockerfile b/lecture3/Dockerfile new file mode 100644 index 00000000..1eaf1db1 --- /dev/null +++ b/lecture3/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.12 AS base + +ARG PYTHONFAULTHANDLER=1 \ + PYTHONUNBUFFERED=1 \ + PYTHONHASHSEED=random \ + PIP_NO_CACHE_DIR=on \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=500 + +RUN apt-get update && apt-get install -y gcc +RUN python -m pip install --upgrade pip + +WORKDIR $APP_ROOT/src +COPY . ./ + +ENV VIRTUAL_ENV=$APP_ROOT/src/.venv \ + PATH=$APP_ROOT/src/.venv/bin:$PATH + +RUN pip install -r requirements.txt + +FROM base as local + +CMD ["uvicorn", "demo_service.api:app", "--port", "8080", "--host", "0.0.0.0"] diff --git a/lecture3/README.md b/lecture3/README.md new file mode 100644 index 00000000..aad28c54 --- /dev/null +++ b/lecture3/README.md @@ -0,0 +1,13 @@ +# ДЗ + +## Настроить сборку образов Docker и мониторинг с помощью Prometheus и Grafana + +Интегрировать Docker с Prometheus и Grafana в любой уже написанный в ДЗ сервис (по аналогии с тем, как в репе) + +По сути, если вы выполнили вторую домашку, то теперь для неё надо написать Dockerfile и настроить мониторинг. Если вторую домашку вы не делали, то можно взять сервис из [rest_example](../hw2/rest_example/main.py) + +Сдача через PR, так же нужно: + +1) Dockerfile для сборки сервиса +2) docker-compose.yml для локального разворачивания в Docker +3) Приложить скрин с парой Дашбордов в Grafana diff --git a/lecture3/ddoser.py b/lecture3/ddoser.py new file mode 100644 index 00000000..fdc10f76 --- /dev/null +++ b/lecture3/ddoser.py @@ -0,0 +1,44 @@ +from concurrent.futures import ThreadPoolExecutor, as_completed + +import requests +from faker import Faker + +faker = Faker() + + +def create_users(): + for _ in range(500): + user = faker.profile() + response = requests.post( + "http://localhost:8080/create-user", + json={ + "username": user["username"], + "first_name": user["name"], + "last_name": "", + }, + ) + + print(response) + + +def get_users(): + for _ in range(500): + + response = requests.post( + "http://localhost:8080/get-user", + params={"id": faker.random_number(digits=2)}, + ) + print(response) + + +with ThreadPoolExecutor() as executor: + futures = {} + + for i in range(15): + futures[executor.submit(create_users)] = f"create-user-{i}" + + for _ in range(15): + futures[executor.submit(get_users)] = f"get-users-{i}" + + for future in as_completed(futures): + print(f"completed {futures[future]}") diff --git a/lecture3/demo_service/__init__.py b/lecture3/demo_service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lecture3/demo_service/api.py b/lecture3/demo_service/api.py new file mode 100644 index 00000000..7c6bce40 --- /dev/null +++ b/lecture3/demo_service/api.py @@ -0,0 +1,42 @@ +from http import HTTPStatus +from typing import Annotated +import random + +from fastapi import FastAPI, HTTPException, Query +from prometheus_fastapi_instrumentator import Instrumentator + +from demo_service import store +from demo_service.contracts import UserRequest, UserResource + +app = FastAPI(title="Demo User API") +Instrumentator().instrument(app).expose(app) + + +def maybe_raise_random_error(): + if random.random() < 0.1: + raise HTTPException( + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + detail="Random error occurred" + ) + + +@app.post( + "/create-user", + response_model=UserResource, + status_code=HTTPStatus.CREATED, +) +async def create_user(body: UserRequest) -> UserResource: + maybe_raise_random_error() + return store.insert(body) + + +@app.post("/get-user") +async def get_user(id: Annotated[int, Query()]) -> UserResource: + maybe_raise_random_error() + + resource = store.select(id) + + if not resource: + raise HTTPException(HTTPStatus.NOT_FOUND) + + return resource diff --git a/lecture3/demo_service/contracts.py b/lecture3/demo_service/contracts.py new file mode 100644 index 00000000..72b2ce89 --- /dev/null +++ b/lecture3/demo_service/contracts.py @@ -0,0 +1,18 @@ +from datetime import datetime + +from pydantic import BaseModel + + +class UserResource(BaseModel): + uid: int + username: str + first_name: str + last_name: str + birthdate: datetime | None = None + + +class UserRequest(BaseModel): + username: str + first_name: str + last_name: str + birthdate: datetime | None = None diff --git a/lecture3/demo_service/store.py b/lecture3/demo_service/store.py new file mode 100644 index 00000000..a88a7cfb --- /dev/null +++ b/lecture3/demo_service/store.py @@ -0,0 +1,27 @@ +from typing import Iterable + +from demo_service.contracts import UserRequest, UserResource + + +def _generate_int_id() -> Iterable[int]: + i = 0 + while True: + yield i + i += 1 + + +_users = dict[int, UserResource]() +_id_generator = _generate_int_id() + + +def insert(user: UserRequest) -> UserResource: + id = next(_id_generator) + resource = UserResource(uid=id, **user.model_dump()) + + _users[id] = resource + + return resource + + +def select(id: int) -> UserResource | None: + return _users.get(id, None) diff --git a/lecture3/docker-compose.yml b/lecture3/docker-compose.yml new file mode 100644 index 00000000..91b5555c --- /dev/null +++ b/lecture3/docker-compose.yml @@ -0,0 +1,31 @@ +version: "3" + +services: + + local: + build: + context: . + dockerfile: ./Dockerfile + target: local + restart: always + ports: + - 8080:8080 + + grafana: + image: grafana/grafana:latest + ports: + - 3000:3000 + restart: always + + prometheus: + image: prom/prometheus + volumes: + - ./settings/prometheus/:/etc/prometheus/ + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.path=/prometheus" + - "--web.console.libraries=/usr/share/prometheus/console_libraries" + - "--web.console.templates=/usr/share/prometheus/consoles" + ports: + - 9090:9090 + restart: always diff --git a/lecture3/requirements.txt b/lecture3/requirements.txt new file mode 100644 index 00000000..57084e17 --- /dev/null +++ b/lecture3/requirements.txt @@ -0,0 +1,3 @@ +fastapi +uvicorn +prometheus-fastapi-instrumentator>=6.1.0 \ No newline at end of file diff --git a/lecture3/settings/prometheus/prometheus.yml b/lecture3/settings/prometheus/prometheus.yml new file mode 100644 index 00000000..6bdf88e7 --- /dev/null +++ b/lecture3/settings/prometheus/prometheus.yml @@ -0,0 +1,10 @@ +global: + scrape_interval: 10s + evaluation_interval: 10s + +scrape_configs: + - job_name: demo-service-local + metrics_path: /metrics + static_configs: + - targets: + - local:8080 diff --git a/lecture4/1_raw_asyncpg/main.py b/lecture4/1_raw_asyncpg/main.py new file mode 100644 index 00000000..35439b2d --- /dev/null +++ b/lecture4/1_raw_asyncpg/main.py @@ -0,0 +1,61 @@ +import asyncpg +from typing import Optional, List + + +class UserRepository: + """Простой репозиторий для работы с пользователями через asyncpg""" + + def __init__(self, connection_string: str): + self.connection_string = connection_string + self.pool: Optional[asyncpg.Pool] = None + + async def initialize(self): + """Инициализация пула соединений""" + self.pool = await asyncpg.create_pool(self.connection_string, min_size=2, max_size=10) + + async def close(self): + """Закрытие пула""" + if self.pool: + await self.pool.close() + + async def create_user(self, email: str, name: str, age: int) -> int: + """Создание нового пользователя""" + async with self.pool.acquire() as connection: + row = await connection.fetchrow( + "INSERT INTO users (email, name, age) VALUES ($1, $2, $3) RETURNING id", + email, name, age + ) + return row['id'] + + async def get_user_by_id(self, user_id: int) -> Optional[dict]: + """Получение пользователя по ID""" + async with self.pool.acquire() as connection: + row = await connection.fetchrow( + "SELECT id, email, name, age, created_at FROM users WHERE id = $1", + user_id + ) + return dict(row) if row else None + + async def update_user_age(self, user_id: int, new_age: int) -> bool: + """Обновление возраста пользователя""" + async with self.pool.acquire() as connection: + result = await connection.execute( + "UPDATE users SET age = $1 WHERE id = $2", + new_age, user_id + ) + return result.split()[-1] == '1' + + async def get_users_with_orders(self) -> List[dict]: + """Получение пользователей с количеством их заказов (JOIN запрос)""" + async with self.pool.acquire() as connection: + rows = await connection.fetch(""" + SELECT + u.id, u.name, u.email, + COUNT(o.id) as order_count, + COALESCE(SUM(o.total_price), 0) as total_spent + FROM users u + LEFT JOIN orders o ON u.id = o.user_id + GROUP BY u.id, u.name, u.email + ORDER BY total_spent DESC + """) + return [dict(row) for row in rows] diff --git a/lecture4/1_raw_asyncpg/requirements.txt b/lecture4/1_raw_asyncpg/requirements.txt new file mode 100644 index 00000000..9d47fee1 --- /dev/null +++ b/lecture4/1_raw_asyncpg/requirements.txt @@ -0,0 +1,2 @@ +asyncpg==0.29.0 +python-dotenv==1.0.0 diff --git a/lecture4/2_active_record/main.py b/lecture4/2_active_record/main.py new file mode 100644 index 00000000..6619be91 --- /dev/null +++ b/lecture4/2_active_record/main.py @@ -0,0 +1,74 @@ +from typing import List, Optional +from datetime import datetime +from sqlmodel import SQLModel, Field, Session, select + + +# === ActiveRecord модели === + +class User(SQLModel, table=True): + __tablename__ = "users" + + id: Optional[int] = Field(default=None, primary_key=True) + email: str = Field(unique=True, index=True, max_length=255) + name: str = Field(max_length=255) + age: int = Field(ge=0) + created_at: Optional[datetime] = Field(default_factory=datetime.utcnow) + updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow) + + # === ActiveRecord методы === + + @classmethod + def create(cls, session: Session, email: str, name: str, age: int) -> "User": + """Создание нового пользователя""" + user = cls(email=email, name=name, age=age) + session.add(user) + session.commit() + session.refresh(user) + return user + + @classmethod + def find_by_id(cls, session: Session, user_id: int) -> Optional["User"]: + """Поиск пользователя по ID""" + return session.get(cls, user_id) + + @classmethod + def find_by_email(cls, session: Session, email: str) -> Optional["User"]: + """Поиск пользователя по email""" + statement = select(cls).where(cls.email == email) + return session.exec(statement).first() + + @classmethod + def get_all_with_stats(cls, session: Session) -> List[dict]: + """Получение всех пользователей со статистикой заказов""" + statement = select(cls).order_by(cls.created_at) + users = session.exec(statement).all() + + result = [] + for user in users: + result.append({ + "id": user.id, + "name": user.name, + "email": user.email, + "age": user.age, + "order_count": 0 + }) + return result + + def update_age(self, session: Session, new_age: int) -> "User": + """Обновление возраста пользователя""" + self.age = new_age + self.updated_at = datetime.utcnow() + session.add(self) + session.commit() + session.refresh(self) + return self + + def to_dict(self) -> dict: + """Преобразование в словарь для вывода""" + return { + "id": self.id, + "email": self.email, + "name": self.name, + "age": self.age, + "created_at": self.created_at + } diff --git a/lecture4/2_active_record/requirements.txt b/lecture4/2_active_record/requirements.txt new file mode 100644 index 00000000..0265b8a7 --- /dev/null +++ b/lecture4/2_active_record/requirements.txt @@ -0,0 +1,3 @@ +sqlmodel==0.0.14 +psycopg2-binary==2.9.9 +python-dotenv==1.0.0 diff --git a/lecture4/3_data_mapper_sqlalchemy/main.py b/lecture4/3_data_mapper_sqlalchemy/main.py new file mode 100644 index 00000000..f3b99486 --- /dev/null +++ b/lecture4/3_data_mapper_sqlalchemy/main.py @@ -0,0 +1,156 @@ +from typing import List, Optional +from dataclasses import dataclass +from abc import ABC, abstractmethod + +from sqlalchemy import Column, Integer, String, DateTime +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session +from sqlalchemy.sql import func + + +# === Доменные модели (без привязки к БД) === + +@dataclass +class User: + """Доменная модель пользователя""" + id: Optional[int] = None + email: str = "" + name: str = "" + age: int = 0 + + +# === SQLAlchemy модели (для мапинга с БД) === + +Base = declarative_base() + + +class UserOrm(Base): + __tablename__ = 'users' + + id = Column(Integer, primary_key=True) + email = Column(String(255), unique=True, nullable=False) + name = Column(String(255), nullable=False) + age = Column(Integer, nullable=False) + created_at = Column(DateTime, default=func.now()) + updated_at = Column(DateTime, default=func.now(), onupdate=func.now()) + + +# === Мапперы (преобразование между доменными моделями и ORM) === + +class UserMapper: + """Маппер для преобразования между User и UserOrm""" + + @staticmethod + def to_domain(orm_user: UserOrm) -> User: + """Преобразование ORM модели в доменную""" + return User( + id=orm_user.id, + email=orm_user.email, + name=orm_user.name, + age=orm_user.age + ) + + @staticmethod + def to_orm( + domain_user: User, + orm_user: Optional[UserOrm] = None, + ) -> UserOrm: + """Преобразование доменной модели в ORM""" + if orm_user is None: + orm_user = UserOrm() + + orm_user.email = domain_user.email + orm_user.name = domain_user.name + orm_user.age = domain_user.age + + return orm_user + + +# === Абстрактные интерфейсы репозиториев === + +class UserRepositoryInterface(ABC): + """Интерфейс репозитория пользователей""" + + @abstractmethod + def create(self, user: User) -> User: + pass + + @abstractmethod + def find_by_id(self, user_id: int) -> Optional[User]: + pass + + @abstractmethod + def find_by_email(self, email: str) -> Optional[User]: + pass + + @abstractmethod + def get_all(self) -> List[User]: + pass + + @abstractmethod + def update(self, user: User) -> User: + pass + + +# === Конкретные реализации репозиториев === + +class SqlAlchemyUserRepository(UserRepositoryInterface): + """SQLAlchemy реализация репозитория пользователей""" + + def __init__(self, session: Session): + self.session = session + + def create(self, user: User) -> User: + orm_user = UserMapper.to_orm(user) + self.session.add(orm_user) + self.session.flush() # Получаем ID без коммита + return UserMapper.to_domain(orm_user) + + def find_by_id(self, user_id: int) -> Optional[User]: + orm_user = self.session.query(UserOrm).filter_by(id=user_id).first() + return UserMapper.to_domain(orm_user) if orm_user else None + + def find_by_email(self, email: str) -> Optional[User]: + orm_user = self.session.query(UserOrm).filter_by(email=email).first() + return UserMapper.to_domain(orm_user) if orm_user else None + + def get_all(self) -> List[User]: + orm_users = self.session.query(UserOrm).order_by(UserOrm.created_at).all() + return [UserMapper.to_domain(orm_user) for orm_user in orm_users] + + def update(self, user: User) -> User: + orm_user = self.session.query(UserOrm).filter_by(id=user.id).first() + if not orm_user: + raise ValueError(f"User with id {user.id} not found") + + UserMapper.to_orm(user, orm_user) + self.session.flush() + return UserMapper.to_domain(orm_user) + + +# === Сервисы для бизнес-логики === + +class UserService: + """Сервис для работы с пользователями""" + + def __init__(self, user_repo: UserRepositoryInterface): + self.user_repo = user_repo + + def create_user(self, email: str, name: str, age: int) -> User: + """Создание нового пользователя с валидацией""" + existing_user = self.user_repo.find_by_email(email) + if existing_user: + raise ValueError(f"User with email {email} already exists") + + if age < 0: + raise ValueError("Age cannot be negative") + + user = User(email=email, name=name, age=age) + return self.user_repo.create(user) + + def get_user_with_validation(self, user_id: int) -> User: + """Получение пользователя с проверкой существования""" + user = self.user_repo.find_by_id(user_id) + if not user: + raise ValueError(f"User with id {user_id} not found") + return user diff --git a/lecture4/3_data_mapper_sqlalchemy/requirements.txt b/lecture4/3_data_mapper_sqlalchemy/requirements.txt new file mode 100644 index 00000000..e00142d2 --- /dev/null +++ b/lecture4/3_data_mapper_sqlalchemy/requirements.txt @@ -0,0 +1,3 @@ +sqlalchemy==2.0.25 +psycopg2-binary==2.9.9 +python-dotenv==1.0.0 diff --git a/lecture4/4_edgedb/.gitignore b/lecture4/4_edgedb/.gitignore new file mode 100644 index 00000000..438992eb --- /dev/null +++ b/lecture4/4_edgedb/.gitignore @@ -0,0 +1,112 @@ +# Сгенерированные EdgeDB файлы +generated/ +*.egg-info/ + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/lecture4/4_edgedb/README.md b/lecture4/4_edgedb/README.md new file mode 100644 index 00000000..06796455 --- /dev/null +++ b/lecture4/4_edgedb/README.md @@ -0,0 +1,76 @@ +# Пример работы с EdgeDB + +Этот пример демонстрирует работу с **EdgeDB** (Gel) + +## Что такое EdgeDB? + +EdgeDB (Теперь называется Gel) - это база данных, построенная поверх PostgreSQL, которая предоставляет: + +- **EdgeQL** - мощный язык запросов, похожий на GraphQL +- **Строгую типизацию** - схема определяется в `.gel` файлах +- **Автоматическую генерацию кода** - Python типы из схемы +- **Встроенные миграции** - автоматическое управление схемой +- **Объектно-ориентированные запросы** - работа с объектами, а не таблицами + +## Установка и настройка: + +### 1. Установка EdgeDB CLI +```bash +# macOS +curl --proto '=https' --tlsv1.2 -sSf https://sh.edgedb.com | sh + +# Ubuntu/Debian +curl https://packages.edgedb.com/keys/edgedb.asc | sudo apt-key add - +echo "deb https://packages.edgedb.com/apt $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/edgedb.list +sudo apt update && sudo apt install edgedb-cli +``` + +### 2. Инициализация проекта +```bash +# Переход в папку с EdgeDB +cd 4_edgedb + +# Инициализация проекта EdgeDB +edgedb project init + +# Создание и применение миграций +edgedb migration create +edgedb migrate + +# Установка Python зависимостей +pip install -r requirements.txt +``` + +### 3. Генерация типизированного Python кода +```bash +# Генерация асинхронных функций из queries/*.edgeql файлов +edgedb-py --target async --dir queries --out-dir generated + +# Альтернативно - синхронные функции +edgedb-py --target sync --dir queries --out-dir generated + +# Генерация в конкретный файл (все запросы в одном файле) +edgedb-py --target async --dir queries --file generated_queries.py +``` + +Вам сгенерируются Python функции, которые вы сможете вызывать из вашего кода. Они выполнят ваш запрос и вернут сразу Dataclass с результатом запроса, что довольно удобно. + + +### Основные команды: + +```bash +# Генерация асинхронных функций (рекомендуется) +edgedb-py --target async --dir queries --out-dir generated + +# Генерация синхронных функций +edgedb-py --target sync --dir queries --out-dir generated + +# Генерация в один файл +edgedb-py --target async --dir queries --file all_queries.py + +# Генерация с опциями +edgedb-py --target async \ + --dir queries \ + --out-dir generated \ + --no-skip-pyi-files # Создавать .pyi файлы для type hints +``` diff --git a/lecture4/4_edgedb/dbschema/default.gel b/lecture4/4_edgedb/dbschema/default.gel new file mode 100644 index 00000000..7a76af78 --- /dev/null +++ b/lecture4/4_edgedb/dbschema/default.gel @@ -0,0 +1,55 @@ +# Схема базы данных EdgeDB + +# Тип для пользователей +type default::User { + required email: str { + constraint exclusive; + }; + required name: str; + required age: int32 { + constraint min_value(0); + }; + created_at: datetime { + default := datetime_current(); + }; + + # Обратная связь с заказами + multi orders := .$product_id diff --git a/lecture4/4_edgedb/queries/create_order.edgeql b/lecture4/4_edgedb/queries/create_order.edgeql new file mode 100644 index 00000000..6a42ae02 --- /dev/null +++ b/lecture4/4_edgedb/queries/create_order.edgeql @@ -0,0 +1,11 @@ +# Создание заказа с автоматическим вычислением стоимости +WITH + user := (SELECT User FILTER .id = $user_id), + product := (SELECT Product FILTER .id = $product_id) +INSERT Order { + user := user, + product := product, + quantity := $quantity, + total_price := product.price * $quantity, + status := 'pending' +} diff --git a/lecture4/4_edgedb/queries/create_product.edgeql b/lecture4/4_edgedb/queries/create_product.edgeql new file mode 100644 index 00000000..f1304e04 --- /dev/null +++ b/lecture4/4_edgedb/queries/create_product.edgeql @@ -0,0 +1,7 @@ +# Создание продукта +INSERT Product { + name := $name, + price := $price, + description := $description, + in_stock := $in_stock +} diff --git a/lecture4/4_edgedb/queries/create_user.edgeql b/lecture4/4_edgedb/queries/create_user.edgeql new file mode 100644 index 00000000..50b23aeb --- /dev/null +++ b/lecture4/4_edgedb/queries/create_user.edgeql @@ -0,0 +1,6 @@ +# Создание пользователя +INSERT User { + email := $email, + name := $name, + age := $age +} diff --git a/lecture4/4_edgedb/queries/delete_user.edgeql b/lecture4/4_edgedb/queries/delete_user.edgeql new file mode 100644 index 00000000..884e4d98 --- /dev/null +++ b/lecture4/4_edgedb/queries/delete_user.edgeql @@ -0,0 +1,3 @@ +# Удаление пользователя +DELETE User +FILTER .id = $user_id diff --git a/lecture4/4_edgedb/requirements.txt b/lecture4/4_edgedb/requirements.txt new file mode 100644 index 00000000..41b198cd --- /dev/null +++ b/lecture4/4_edgedb/requirements.txt @@ -0,0 +1,2 @@ +edgedb==2.1.0 +python-dotenv==1.0.0 diff --git a/lecture4/README.md b/lecture4/README.md new file mode 100644 index 00000000..26822ef7 --- /dev/null +++ b/lecture4/README.md @@ -0,0 +1,15 @@ +## ДЗ + +За каждый пункт - 1 балл + +Внедрить во вторую домашку хранение данных в БД, для этого надо: +1) Добавить БД в docket-compose.yml (если БД - это отдельный сервис, если хотите использовать sqlite, то можно скипнуть этот шаг) +2) Переписать код на взаимодействие с вашей БД (если вы еще этого не сделали, если вы уже написали код с БД, подзравляю, вам остался только 3 пункт) +3) В свободной форме, напишите скрипты, которые просимулируют разные "проблемы" которые могут возникнуть в транзакциях (dirty read, not-repeatable read, serialize) и настраивая уровне изоляции покажите, что они действительно решаются (через SQLAlchemy например), то есть: +показать dirty read при read uncommited +показать что нет dirty read при read commited +показать non-repeatable read при read commited +показать что нет non-repeatable read при repeatable read +показать phantom reads при repeatable read +показать что нет phantom reads при serializable +*Тут зависит от того какую БД вы выбрали, разные БД могут поддерживать разные уровни изоляции \ No newline at end of file diff --git a/lecture4/docker-compose.yml b/lecture4/docker-compose.yml new file mode 100644 index 00000000..24a07eee --- /dev/null +++ b/lecture4/docker-compose.yml @@ -0,0 +1,34 @@ +version: '3.8' + +services: + postgres: + image: postgres:15 + container_name: hw4_postgres + environment: + POSTGRES_DB: hw4_db + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + - ./migrations/init.sql:/docker-entrypoint-initdb.d/init.sql + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 + + edgedb: + image: edgedb/edgedb:5 + container_name: hw4_edgedb + environment: + EDGEDB_SERVER_SECURITY: insecure_dev_mode + ports: + - "5656:5656" + volumes: + - edgedb_data:/var/lib/edgedb/data + +volumes: + postgres_data: + edgedb_data: diff --git a/lecture4/migrations/init.sql b/lecture4/migrations/init.sql new file mode 100644 index 00000000..88d07db8 --- /dev/null +++ b/lecture4/migrations/init.sql @@ -0,0 +1,79 @@ +-- Создание схемы базы данных для примеров +DROP TABLE IF EXISTS orders CASCADE; +DROP TABLE IF EXISTS products CASCADE; +DROP TABLE IF EXISTS users CASCADE; + +-- Таблица пользователей +CREATE TABLE users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE NOT NULL, + name VARCHAR(255) NOT NULL, + age INTEGER CHECK (age >= 0), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Таблица продуктов +CREATE TABLE products ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + price DECIMAL(10, 2) NOT NULL CHECK (price >= 0), + description TEXT, + in_stock BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Таблица заказов +CREATE TABLE orders ( + id SERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + product_id INTEGER NOT NULL REFERENCES products(id) ON DELETE CASCADE, + quantity INTEGER NOT NULL CHECK (quantity > 0), + total_price DECIMAL(10, 2) NOT NULL CHECK (total_price >= 0), + status VARCHAR(50) DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'shipped', 'delivered', 'cancelled')), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Индексы для оптимизации запросов +CREATE INDEX idx_users_email ON users(email); +CREATE INDEX idx_orders_user_id ON orders(user_id); +CREATE INDEX idx_orders_product_id ON orders(product_id); +CREATE INDEX idx_orders_status ON orders(status); + +-- Триггер для автоматического обновления updated_at +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = CURRENT_TIMESTAMP; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER update_users_updated_at BEFORE UPDATE ON users + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +CREATE TRIGGER update_products_updated_at BEFORE UPDATE ON products + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +CREATE TRIGGER update_orders_updated_at BEFORE UPDATE ON orders + FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); + +-- Вставка тестовых данных +INSERT INTO users (email, name, age) VALUES + ('alice@example.com', 'Alice Johnson', 28), + ('bob@example.com', 'Bob Smith', 35), + ('charlie@example.com', 'Charlie Brown', 42); + +INSERT INTO products (name, price, description, in_stock) VALUES + ('Laptop', 999.99, 'High-performance laptop', TRUE), + ('Mouse', 29.99, 'Wireless optical mouse', TRUE), + ('Keyboard', 79.99, 'Mechanical gaming keyboard', FALSE), + ('Monitor', 299.99, '24-inch LCD monitor', TRUE); + +INSERT INTO orders (user_id, product_id, quantity, total_price, status) VALUES + (1, 1, 1, 999.99, 'delivered'), + (1, 2, 2, 59.98, 'shipped'), + (2, 3, 1, 79.99, 'processing'), + (3, 4, 1, 299.99, 'pending'); diff --git a/prometheus.yml b/prometheus.yml new file mode 100644 index 00000000..f3dbc6ef --- /dev/null +++ b/prometheus.yml @@ -0,0 +1,8 @@ +global: + scrape_interval: 5s + +scrape_configs: + - job_name: "shop_api" + metrics_path: /metrics + static_configs: + - targets: ["app:8000"]