diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..c0d74f98 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,8 @@ +[run] +omit = + hw2/hw/shop_api/grpc_server.py + hw2/hw/shop_api/shop_pb2.py + hw2/hw/shop_api/shop_pb2_grpc.py + hw2/hw/shop_api/main.py + + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..4c050724 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,51 @@ +name: tests + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + pytest: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Cache pip + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r hw2/hw/requirements.txt -r lecture5/requirements.txt + + - name: Run tests with coverage (HW2) + env: + PYTHONPATH: hw2/hw + run: | + pytest -vv --maxfail=1 \ + --cov=shop_api \ + --cov-report=term-missing \ + --cov-fail-under=95 \ + hw2/hw/test_homework2.py + + - name: Upload coverage (artifact) + if: always() + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: ./.coverage* + + diff --git a/hw1/app.py b/hw1/app.py index 6107b870..303e081d 100644 --- a/hw1/app.py +++ b/hw1/app.py @@ -1,4 +1,6 @@ from typing import Any, Awaitable, Callable +import json +from urllib.parse import parse_qs async def application( @@ -12,7 +14,122 @@ async def application( receive: Корутина для получения сообщений от клиента send: Корутина для отправки сообщений клиенту """ - # TODO: Ваша реализация здесь + + method = scope.get("method", "").upper() + path = scope.get("path", "") + + async def send_json(status: int, payload: dict[str, Any] | None = None) -> None: + body_bytes = json.dumps(payload or {}).encode("utf-8") + headers = [ + (b"content-type", b"application/json; charset=utf-8"), + ] + await send({ + "type": "http.response.start", + "status": status, + "headers": headers, + }) + await send({ + "type": "http.response.body", + "body": body_bytes, + }) + + async def read_body() -> bytes: + chunks: list[bytes] = [] + while True: + message = await receive() + if message.get("type") != "http.request": + continue + body = message.get("body", b"") or b"" + if body: + chunks.append(body) + if not message.get("more_body", False): + break + return b"".join(chunks) + + if method != "GET": + await send_json(404, {"detail": "Not Found"}) + return + + if path == "/factorial": + raw_qs = scope.get("query_string", b"") + qs = parse_qs(raw_qs.decode("utf-8"), keep_blank_values=True) + values = qs.get("n") + if not values or values[0] == "": + await send_json(422, {"detail": "Query parameter 'n' is required"}) + return + try: + n = int(values[0]) + except ValueError: + await send_json(422, {"detail": "Query parameter 'n' must be integer"}) + return + if n < 0: + await send_json(400, {"detail": "'n' must be non-negative"}) + return + # factorial + result = 1 + for i in range(2, n + 1): + result *= i + await send_json(200, {"result": result}) + return + + if path == "/mean": + body = await read_body() + if not body: + await send_json(422, {"detail": "JSON body is required"}) + return + try: + data = json.loads(body.decode("utf-8")) + except json.JSONDecodeError: + await send_json(422, {"detail": "Malformed JSON"}) + return + if data is None: + await send_json(422, {"detail": "JSON body is required"}) + return + if not isinstance(data, list) or len(data) == 0: + await send_json(400, {"detail": "Expected non-empty JSON array of numbers"}) + return + # Validate all elements are numbers (int or float) + if not all((isinstance(x, (int, float)) and not isinstance(x, bool)) for x in data): + await send_json(400, {"detail": "Array must contain only numbers"}) + return + total = float(sum(float(x) for x in data)) + mean_value = total / len(data) + await send_json(200, {"result": mean_value}) + return + + if path.startswith("/fibonacci"): + if path == "/fibonacci": + await send_json(422, {"detail": "Path parameter 'n' is required"}) + return + if not path.startswith("/fibonacci/"): + await send_json(404, {"detail": "Not Found"}) + return + raw_n = path[len("/fibonacci/") :] + if raw_n == "": + await send_json(422, {"detail": "Path parameter 'n' is required"}) + return + try: + n = int(raw_n) + except ValueError: + await send_json(422, {"detail": "Path parameter 'n' must be integer"}) + return + if n < 0: + await send_json(400, {"detail": "'n' must be non-negative"}) + return + # fibonacci + if n == 0: + fib = 0 + elif n == 1: + fib = 1 + else: + a, b = 0, 1 + for _ in range(2, n + 1): + a, b = b, a + b + fib = b + await send_json(200, {"result": fib}) + return + + await send_json(404, {"detail": "Not Found"}) if __name__ == "__main__": import uvicorn diff --git a/hw2/ddoser.py b/hw2/ddoser.py new file mode 100644 index 00000000..79253ae3 --- /dev/null +++ b/hw2/ddoser.py @@ -0,0 +1,115 @@ +from concurrent.futures import ThreadPoolExecutor, as_completed +from dataclasses import dataclass +from random import choice, random +from time import perf_counter +import argparse + +import requests +from faker import Faker + + +faker = Faker() + + +@dataclass +class LoadConfig: + base_url: str + concurrency: int + iterations_per_worker: int + timeout_s: float + items_to_seed: int + + +def create_item(session: requests.Session, base_url: str) -> int: + payload = {"name": faker.word(), "price": round(10 + random() * 90, 2)} + resp = session.post(f"{base_url}/item", json=payload, timeout=5) + resp.raise_for_status() + return int(resp.json()["id"]) if "id" in resp.json() else int(resp.json().get("id", 0)) + + +def seed_items(session: requests.Session, base_url: str, count: int) -> list[int]: + item_ids: list[int] = [] + for _ in range(count): + item_id = create_item(session, base_url) + item_ids.append(item_id) + return item_ids + + +def create_cart(session: requests.Session, base_url: str) -> int: + resp = session.post(f"{base_url}/cart", timeout=5) + resp.raise_for_status() + return int(resp.json()["id"]) + + +def add_to_cart(session: requests.Session, base_url: str, cart_id: int, item_id: int) -> None: + resp = session.post(f"{base_url}/cart/{cart_id}/add/{item_id}", timeout=5) + resp.raise_for_status() + + +def list_items(session: requests.Session, base_url: str) -> None: + resp = session.get(f"{base_url}/item", timeout=5) + resp.raise_for_status() + + +def get_cart(session: requests.Session, base_url: str, cart_id: int) -> None: + resp = session.get(f"{base_url}/cart/{cart_id}", timeout=5) + resp.raise_for_status() + + +def worker(config: LoadConfig, worker_index: int, item_ids: list[int]) -> tuple[int, int]: + successes = 0 + failures = 0 + with requests.Session() as session: + cart_id = create_cart(session, config.base_url) + for _ in range(config.iterations_per_worker): + try: + list_items(session, config.base_url) + add_to_cart(session, config.base_url, cart_id, choice(item_ids)) + get_cart(session, config.base_url, cart_id) + successes += 3 + except Exception: + failures += 1 + return successes, failures + + +def run_load(config: LoadConfig) -> None: + start = perf_counter() + with requests.Session() as s: + item_ids = seed_items(s, config.base_url, config.items_to_seed) + + futures = [] + successes = 0 + failures = 0 + with ThreadPoolExecutor(max_workers=config.concurrency) as executor: + for i in range(config.concurrency): + futures.append(executor.submit(worker, config, i, item_ids)) + for fut in as_completed(futures): + ok, bad = fut.result() + successes += ok + failures += bad + + duration = perf_counter() - start + rps = successes / duration if duration > 0 else 0.0 + print(f"done: successes={successes}, failures={failures}, duration_s={duration:.2f}, approx_rps={rps:.1f}") + + +def parse_args() -> LoadConfig: + parser = argparse.ArgumentParser(description="Shop API load generator") + parser.add_argument("--base", default="http://localhost:8001", help="Base URL, default http://localhost:8001") + parser.add_argument("--concurrency", type=int, default=16, help="Concurrent workers") + parser.add_argument("--iterations", type=int, default=300, help="Iterations per worker") + parser.add_argument("--timeout", type=float, default=5.0, help="HTTP timeout seconds") + parser.add_argument("--seed-items", type=int, default=5, help="How many items to create before load") + args = parser.parse_args() + return LoadConfig( + base_url=args.base, + concurrency=args.concurrency, + iterations_per_worker=args.iterations, + timeout_s=args.timeout, + items_to_seed=args.seed_items, + ) + + +if __name__ == "__main__": + cfg = parse_args() + run_load(cfg) diff --git a/hw2/docker-compose.yml b/hw2/docker-compose.yml new file mode 100644 index 00000000..41c72b8f --- /dev/null +++ b/hw2/docker-compose.yml @@ -0,0 +1,49 @@ +services: + shop-api: + build: + context: ./hw + dockerfile: Dockerfile + container_name: shop-api + ports: + - "8001:8000" # FastAPI HTTP (includes /metrics) + - "50051:50051" # gRPC + networks: + - monitor-net + + prometheus: + image: prom/prometheus:v2.54.1 + container_name: prometheus + volumes: + - ./monitoring/prometheus.yml:/etc/prometheus/prometheus.yml:ro + command: + - --config.file=/etc/prometheus/prometheus.yml + - --storage.tsdb.path=/prometheus + - --web.enable-lifecycle + ports: + - "9090:9090" + depends_on: + - shop-api + networks: + - monitor-net + + grafana: + image: grafana/grafana:11.2.0 + container_name: grafana + ports: + - "3000:3000" + environment: + - GF_PATHS_PROVISIONING=/etc/grafana/provisioning + volumes: + - ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources:ro + - ./monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards:ro + - ./monitoring/grafana/dashboards_json:/var/lib/grafana/dashboards:ro + depends_on: + - prometheus + networks: + - monitor-net + +networks: + monitor-net: + driver: bridge + + diff --git a/hw2/hw/Dockerfile b/hw2/hw/Dockerfile new file mode 100644 index 00000000..3cd6e278 --- /dev/null +++ b/hw2/hw/Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.11-slim + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 \ + UVICORN_WORKERS=1 + +WORKDIR /app + +# Install build deps for grpcio if needed +RUN apt-get update && apt-get install -y --no-install-recommends build-essential && rm -rf /var/lib/apt/lists/* + +COPY requirements.txt ./ +RUN pip install -r requirements.txt + +COPY . . + +EXPOSE 8000 50051 + +# Run FastAPI (serves /metrics) and starts gRPC on startup event +CMD ["python", "-m", "uvicorn", "shop_api.main:app", "--host", "0.0.0.0", "--port", "8000"] + + diff --git a/hw2/hw/README.md b/hw2/hw/README.md index ba9f23c8..8ce34407 100644 --- a/hw2/hw/README.md +++ b/hw2/hw/README.md @@ -1,3 +1,23 @@ +## Мониторинг и Docker (ДЗ 3) + +- HTTP сервис доступен на `http://localhost:8001`, метрики Prometheus на `http://localhost:8001/metrics`. +- gRPC сервис слушает порт `50051` внутри контейнера и проброшен наружу. +- Prometheus: `http://localhost:9090` +- Grafana: `http://localhost:3000` (логин: `admin`, пароль по умолчанию `admin`). + +### Локальный запуск + +```bash +docker compose up -d --build +``` + +После старта: Grafana → Dashboards → Shop API Overview + +### Скриншот Grafana + +![Grafana dashboard](./grafana_graph.png) + + # ДЗ ## Задание - REST API (3 балла) @@ -121,3 +141,5 @@ export PYTHONPATH=${PWD}/hw2/hw начале в следующем виде: `{username} :: {message}`. Если делаете его, напишите, пожалуйста, прямо в PR-e об этом. Мне будет сильно проще это заметить<3 + + diff --git a/hw2/hw/grafana_graph.png b/hw2/hw/grafana_graph.png new file mode 100644 index 00000000..94c1de37 Binary files /dev/null and b/hw2/hw/grafana_graph.png differ diff --git a/hw2/hw/hw4/isolation_demo.py b/hw2/hw/hw4/isolation_demo.py new file mode 100644 index 00000000..2ec55687 --- /dev/null +++ b/hw2/hw/hw4/isolation_demo.py @@ -0,0 +1,113 @@ +import sqlite3 +import threading +import time + + +def setup(db: str) -> None: + with sqlite3.connect(db, isolation_level=None) as conn: + conn.execute("PRAGMA foreign_keys=ON;") + conn.execute("DROP TABLE IF EXISTS t;") + conn.execute("CREATE TABLE t(id INTEGER PRIMARY KEY, value INTEGER);") + conn.execute("INSERT INTO t(id, value) VALUES(1, 0);") + + +def read_uncommitted_demo(db: str) -> None: + # SQLite does not support READ UNCOMMITTED for dirty reads by default; it still + # prevents dirty reads because readers don't see uncommitted changes. + # We demonstrate the behavior: reader won't see uncommitted writer value. + conn_w = sqlite3.connect(db, isolation_level="DEFERRED") + conn_r = sqlite3.connect(db, isolation_level="DEFERRED") + try: + cur_w = conn_w.cursor() + cur_r = conn_r.cursor() + cur_w.execute("BEGIN;") + cur_w.execute("UPDATE t SET value = 100 WHERE id = 1;") + + # Reader starts after writer's uncommitted change + cur_r.execute("BEGIN;") + cur_r.execute("SELECT value FROM t WHERE id = 1;") + print("READ_UNCOMMITTED_SIM value seen by reader (should be 0 in SQLite):", cur_r.fetchone()[0]) + + conn_w.rollback() + conn_r.rollback() + finally: + conn_w.close() + conn_r.close() + + +def non_repeatable_read_demo(db: str) -> None: + # Reader should see same snapshot in its transaction + conn_r = sqlite3.connect(db, isolation_level="IMMEDIATE") + try: + c_r = conn_r.cursor() + c_r.execute("BEGIN IMMEDIATE;") + c_r.execute("SELECT value FROM t WHERE id = 1;") + v1 = c_r.fetchone()[0] + + def writer(): + # Writer will block until reader commits/rollbacks due to IMMEDIATE lock + try: + with sqlite3.connect(db, isolation_level="IMMEDIATE") as conn_w: + c_w = conn_w.cursor() + c_w.execute("BEGIN IMMEDIATE;") + c_w.execute("UPDATE t SET value = value + 1 WHERE id = 1;") + conn_w.commit() + except Exception as e: + print("writer error:", e) + + t_w = threading.Thread(target=writer) + t_w.start() + time.sleep(0.2) + # Second read within same txn returns same snapshot + c_r.execute("SELECT value FROM t WHERE id = 1;") + v2 = c_r.fetchone()[0] + print("NON_REPEATABLE_READ_SIM v1 == v2 (SQLite snapshot):", v1 == v2) + conn_r.commit() + t_w.join() + finally: + conn_r.close() + + +def phantom_read_demo(db: str) -> None: + # SQLite uses table-level locks that prevent concurrent writes during IMMEDIATE txn. + # Reader won't see phantoms within the same transaction. + conn_r = sqlite3.connect(db, isolation_level="IMMEDIATE") + try: + c_r = conn_r.cursor() + c_r.execute("BEGIN IMMEDIATE;") + c_r.execute("SELECT COUNT(*) FROM t;") + n1 = c_r.fetchone()[0] + + def writer(): + try: + with sqlite3.connect(db, isolation_level="IMMEDIATE") as conn_w: + c_w = conn_w.cursor() + c_w.execute("BEGIN IMMEDIATE;") + c_w.execute("INSERT INTO t(value) VALUES(10);") + conn_w.commit() + except Exception as e: + print("writer error:", e) + + t_w = threading.Thread(target=writer) + t_w.start() + time.sleep(0.2) + c_r.execute("SELECT COUNT(*) FROM t;") + n2 = c_r.fetchone()[0] + print("PHANTOM_READ_SIM n1 == n2 (SQLite snapshot):", n1 == n2) + conn_r.commit() + t_w.join() + finally: + conn_r.close() + + +if __name__ == "__main__": + db_file = "isolation_demo.db" + setup(db_file) + print("-- read uncommitted demo (SQLite prevents dirty read) --") + read_uncommitted_demo(db_file) + print("-- non-repeatable read demo (snapshot) --") + non_repeatable_read_demo(db_file) + print("-- phantom read demo (snapshot) --") + phantom_read_demo(db_file) + + diff --git a/hw2/hw/requirements.txt b/hw2/hw/requirements.txt index 207dcf5c..53de6e6c 100644 --- a/hw2/hw/requirements.txt +++ b/hw2/hw/requirements.txt @@ -7,3 +7,6 @@ pytest>=7.4.0 pytest-asyncio>=0.21.0 httpx>=0.27.2 Faker>=37.8.0 +grpcio>=1.62.0 +grpcio-tools>=1.62.0 +prometheus-client>=0.20.0 diff --git a/hw2/hw/shop.db b/hw2/hw/shop.db new file mode 100644 index 00000000..8dc9d62e Binary files /dev/null and b/hw2/hw/shop.db differ diff --git a/hw2/hw/shop_api/api/__init__.py b/hw2/hw/shop_api/api/__init__.py new file mode 100644 index 00000000..6039f90d --- /dev/null +++ b/hw2/hw/shop_api/api/__init__.py @@ -0,0 +1,3 @@ +# namespace package for API routers + + diff --git a/hw2/hw/shop_api/api/cart.py b/hw2/hw/shop_api/api/cart.py new file mode 100644 index 00000000..bb567871 --- /dev/null +++ b/hw2/hw/shop_api/api/cart.py @@ -0,0 +1,65 @@ +from typing import List, Optional + +from fastapi import APIRouter, HTTPException, Query, Response + +from ..schemas import Cart +from ..storage import ( + add_to_cart as db_add_to_cart, + cart_to_model as db_cart_to_model, + create_cart as db_create_cart, + list_carts as db_list_carts, +) + + +router = APIRouter(prefix="/cart") + + +def cart_to_model_or_404(cart_id: int) -> Cart: + model = db_cart_to_model(cart_id) + if model is None: + raise HTTPException(status_code=404, detail="Cart not found") + return model + + +@router.post("", status_code=201) +def create_cart(response: Response) -> dict: + cid = db_create_cart() + response.headers["Location"] = f"/cart/{cid}" + return {"id": cid} + + +@router.get("/{cart_id}") +def get_cart(cart_id: int) -> Cart: + return cart_to_model_or_404(cart_id) + + +@router.get("") +def list_carts( + offset: int = Query(0, ge=0), + limit: int = Query(10, gt=0), + min_price: Optional[float] = Query(default=None, ge=0), + max_price: Optional[float] = Query(default=None, ge=0), + min_quantity: Optional[int] = Query(default=None, ge=0), + max_quantity: Optional[int] = Query(default=None, ge=0), +) -> List[Cart]: + return db_list_carts( + offset=offset, + limit=limit, + min_price=min_price, + max_price=max_price, + min_quantity=min_quantity, + max_quantity=max_quantity, + ) + + +@router.post("/{cart_id}/add/{item_id}") +def add_to_cart(cart_id: int, item_id: int) -> Cart: + try: + model = db_add_to_cart(cart_id, item_id) + except KeyError: + raise HTTPException(status_code=404, detail="Item not found") + if model is None: + raise HTTPException(status_code=404, detail="Cart not found") + return model + + diff --git a/hw2/hw/shop_api/api/item.py b/hw2/hw/shop_api/api/item.py new file mode 100644 index 00000000..51e04591 --- /dev/null +++ b/hw2/hw/shop_api/api/item.py @@ -0,0 +1,75 @@ +from typing import List, Optional + +from fastapi import APIRouter, HTTPException, Query, Response + +from ..schemas import Item, ItemCreate, ItemPatch, ItemPut +from ..storage import ( + create_item as db_create_item, + get_item as db_get_item, + list_items as db_list_items, + patch_item as db_patch_item, + replace_item as db_replace_item, + soft_delete_item as db_soft_delete_item, +) + + +router = APIRouter(prefix="/item") + + +@router.post("", status_code=201) +def create_item(body: ItemCreate, response: Response) -> Item: + item = db_create_item(body.name, body.price) + response.headers["Location"] = f"/item/{item.id}" + return item + + +@router.get("/{item_id}") +def get_item(item_id: int) -> Item: + item = db_get_item(item_id) + if item is None: + raise HTTPException(status_code=404, detail="Item not found") + return item + + +@router.get("") +def list_items( + offset: int = Query(0, ge=0), + limit: int = Query(10, gt=0), + min_price: Optional[float] = Query(default=None, ge=0), + max_price: Optional[float] = Query(default=None, ge=0), + show_deleted: bool = False, +) -> List[Item]: + return db_list_items( + offset=offset, + limit=limit, + min_price=min_price, + max_price=max_price, + show_deleted=show_deleted, + ) + + +@router.put("/{item_id}") +def put_item(item_id: int, body: ItemPut) -> Item: + item = db_replace_item(item_id, body.name, body.price) + if item is None: + raise HTTPException(status_code=404, detail="Item not found") + return item + + +@router.patch("/{item_id}") +def patch_item(item_id: int, body: ItemPatch) -> Item: + status, item = db_patch_item(item_id, name=body.name, price=body.price) + if status == "not_found": + raise HTTPException(status_code=404, detail="Item not found") + if status == "deleted": + raise HTTPException(status_code=304, detail="Item is deleted") + assert item is not None + return item + + +@router.delete("/{item_id}") +def delete_item(item_id: int) -> dict: + db_soft_delete_item(item_id) + return {"status": "ok"} + + diff --git a/hw2/hw/shop_api/grpc_server.py b/hw2/hw/shop_api/grpc_server.py new file mode 100644 index 00000000..0d1a3cdf --- /dev/null +++ b/hw2/hw/shop_api/grpc_server.py @@ -0,0 +1,81 @@ +import asyncio +from concurrent import futures + +import grpc + +from . import schemas +from .storage import ( + add_to_cart as db_add_to_cart, + cart_to_model as db_cart_to_model, + create_cart as db_create_cart, + create_item as db_create_item, + get_item as db_get_item, +) +from .shop_pb2 import ( + AddToCartRequest as PbAddToCartRequest, + Cart as PbCart, + CartItem as PbCartItem, + Empty as PbEmpty, + Id as PbId, + Item as PbItem, + ItemCreate as PbItemCreate, +) +from .shop_pb2_grpc import ShopServicer, add_ShopServicer_to_server + + +def to_pb_cart(cart_id: int) -> PbCart: + model = db_cart_to_model(cart_id) + if model is None: + # Should be validated by caller + return PbCart(id=cart_id, items=[], price=0.0) + items = [PbCartItem(id=ci.id, quantity=ci.quantity) for ci in model.items] + return PbCart(id=model.id, items=items, price=model.price) + + +class ShopService(ShopServicer): + def CreateCart(self, request: PbEmpty, context: grpc.ServicerContext) -> PbId: + cid = db_create_cart() + return PbId(id=cid) + + def GetCart(self, request: PbId, context: grpc.ServicerContext) -> PbCart: + cid = request.id + if db_cart_to_model(cid) is None: + context.abort(grpc.StatusCode.NOT_FOUND, "Cart not found") + return to_pb_cart(cid) + + def AddToCart(self, request: PbAddToCartRequest, context: grpc.ServicerContext) -> PbCart: + cid = request.cart_id + iid = request.item_id + try: + model = db_add_to_cart(cid, iid) + except KeyError: + context.abort(grpc.StatusCode.NOT_FOUND, "Item not found") + if model is None: + context.abort(grpc.StatusCode.NOT_FOUND, "Cart not found") + return to_pb_cart(cid) + + def CreateItem(self, request: PbItemCreate, context: grpc.ServicerContext) -> PbItem: + item = db_create_item(request.name, request.price) + return PbItem(id=item.id, name=item.name, price=item.price, deleted=item.deleted) + + def GetItem(self, request: PbId, context: grpc.ServicerContext) -> PbItem: + item = db_get_item(request.id) + if item is None: + context.abort(grpc.StatusCode.NOT_FOUND, "Item not found") + return PbItem(id=item.id, name=item.name, price=item.price, deleted=item.deleted) + + +def serve(block: bool = True) -> grpc.Server: + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + add_ShopServicer_to_server(ShopService(), server) + server.add_insecure_port("[::]:50051") + server.start() + if block: + server.wait_for_termination() + return server + + +if __name__ == "__main__": + serve(block=True) + + diff --git a/hw2/hw/shop_api/main.py b/hw2/hw/shop_api/main.py index f60a8c60..383c707d 100644 --- a/hw2/hw/shop_api/main.py +++ b/hw2/hw/shop_api/main.py @@ -1,3 +1,94 @@ -from fastapi import FastAPI +from fastapi import FastAPI, Request, Response +from time import perf_counter + +from .api.cart import router as cart_router +from .api.item import router as item_router +from .grpc_server import serve as grpc_serve +from .storage import init_db + +# Prometheus metrics +from prometheus_client import ( + Counter, + Histogram, + CONTENT_TYPE_LATEST, + generate_latest, +) + + +# Basic HTTP metrics +HTTP_REQUESTS_TOTAL = Counter( + "http_requests_total", + "Total HTTP requests", + labelnames=("method", "path", "status"), +) + +HTTP_REQUEST_LATENCY_SECONDS = Histogram( + "http_request_latency_seconds", + "Latency of HTTP requests in seconds", + labelnames=("method", "path", "status"), + buckets=(0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10), +) + app = FastAPI(title="Shop API") + +app.include_router(item_router) +app.include_router(cart_router) +# Ensure DB is ready even if startup hooks are not executed by the test client +init_db() + + +@app.middleware("http") +async def metrics_middleware(request: Request, call_next): + # Use raw path. For high-cardinality paths, consider normalization. + path = request.url.path + method = request.method + if path == "/metrics": + return await call_next(request) + labels = {"method": method, "path": path} + status_code = 500 + start = perf_counter() + try: + response: Response = await call_next(request) + status_code = getattr(response, "status_code", 200) + return response + except Exception as exc: + # If this is an HTTP exception, capture the status code; then re-raise + try: + from fastapi import HTTPException as FastApiHTTPException + from starlette.exceptions import HTTPException as StarletteHTTPException + if isinstance(exc, (FastApiHTTPException, StarletteHTTPException)): + status_code = getattr(exc, "status_code", 500) + except Exception: + pass + raise + finally: + duration = perf_counter() - start + HTTP_REQUEST_LATENCY_SECONDS.labels(**labels, status=str(status_code)).observe(duration) + HTTP_REQUESTS_TOTAL.labels(method=method, path=path, status=str(status_code)).inc() + + +@app.get("/metrics") +def metrics() -> Response: + data = generate_latest() + return Response(content=data, media_type=CONTENT_TYPE_LATEST) + + +_grpc_server = None + + +@app.on_event("startup") +def _start_grpc_server() -> None: + global _grpc_server + # Init SQLite database + init_db() + # Start gRPC server in background thread + _grpc_server = grpc_serve(block=False) + + +@app.on_event("shutdown") +def _stop_grpc_server() -> None: + global _grpc_server + if _grpc_server is not None: + _grpc_server.stop(grace=None) + _grpc_server = None diff --git a/hw2/hw/shop_api/proto/shop.proto b/hw2/hw/shop_api/proto/shop.proto new file mode 100644 index 00000000..5dfcd87c --- /dev/null +++ b/hw2/hw/shop_api/proto/shop.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +package shop; + +message Empty {} + +message Id { int32 id = 1; } + +message ItemCreate { string name = 1; double price = 2; } +message Item { int32 id = 1; string name = 2; double price = 3; bool deleted = 4; } + +message CartItem { int32 id = 1; int32 quantity = 2; } +message Cart { int32 id = 1; repeated CartItem items = 2; double price = 3; } + +message AddToCartRequest { int32 cart_id = 1; int32 item_id = 2; } + +service Shop { + rpc CreateCart(Empty) returns (Id); + rpc GetCart(Id) returns (Cart); + rpc AddToCart(AddToCartRequest) returns (Cart); + rpc CreateItem(ItemCreate) returns (Item); + rpc GetItem(Id) returns (Item); +} \ No newline at end of file diff --git a/hw2/hw/shop_api/schemas.py b/hw2/hw/shop_api/schemas.py new file mode 100644 index 00000000..bc10071b --- /dev/null +++ b/hw2/hw/shop_api/schemas.py @@ -0,0 +1,44 @@ +from typing import Optional, List + +from pydantic import BaseModel, Field + +# Определяет модели Pydantic: ItemCreate, ItemPut, ItemPatch, Item, CartItem, Cart + +class ItemBase(BaseModel): + name: str + price: float = Field(ge=0) + + +class ItemCreate(ItemBase): + pass + + +class ItemPut(ItemBase): + pass + + +class ItemPatch(BaseModel): + name: Optional[str] = None + price: Optional[float] = Field(default=None, ge=0) + + class Config: + extra = "forbid" + validate_assignment = True + + +class Item(ItemBase): + id: int + deleted: bool = False + + +class CartItem(BaseModel): + id: int + quantity: int = Field(ge=1) + + +class Cart(BaseModel): + id: int + items: List[CartItem] + price: float + + diff --git a/hw2/hw/shop_api/shop_pb2.py b/hw2/hw/shop_api/shop_pb2.py new file mode 100644 index 00000000..d33b6b4f --- /dev/null +++ b/hw2/hw/shop_api/shop_pb2.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: shop.proto +# Protobuf Python Version: 6.31.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 31, + 1, + '', + 'shop.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nshop.proto\x12\x04shop\"\x07\n\x05\x45mpty\"\x10\n\x02Id\x12\n\n\x02id\x18\x01 \x01(\x05\")\n\nItemCreate\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05price\x18\x02 \x01(\x01\"@\n\x04Item\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05price\x18\x03 \x01(\x01\x12\x0f\n\x07\x64\x65leted\x18\x04 \x01(\x08\"(\n\x08\x43\x61rtItem\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x10\n\x08quantity\x18\x02 \x01(\x05\"@\n\x04\x43\x61rt\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x1d\n\x05items\x18\x02 \x03(\x0b\x32\x0e.shop.CartItem\x12\r\n\x05price\x18\x03 \x01(\x01\"4\n\x10\x41\x64\x64ToCartRequest\x12\x0f\n\x07\x63\x61rt_id\x18\x01 \x01(\x05\x12\x0f\n\x07item_id\x18\x02 \x01(\x05\x32\xca\x01\n\x04Shop\x12#\n\nCreateCart\x12\x0b.shop.Empty\x1a\x08.shop.Id\x12\x1f\n\x07GetCart\x12\x08.shop.Id\x1a\n.shop.Cart\x12/\n\tAddToCart\x12\x16.shop.AddToCartRequest\x1a\n.shop.Cart\x12*\n\nCreateItem\x12\x10.shop.ItemCreate\x1a\n.shop.Item\x12\x1f\n\x07GetItem\x12\x08.shop.Id\x1a\n.shop.Itemb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shop_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_EMPTY']._serialized_start=20 + _globals['_EMPTY']._serialized_end=27 + _globals['_ID']._serialized_start=29 + _globals['_ID']._serialized_end=45 + _globals['_ITEMCREATE']._serialized_start=47 + _globals['_ITEMCREATE']._serialized_end=88 + _globals['_ITEM']._serialized_start=90 + _globals['_ITEM']._serialized_end=154 + _globals['_CARTITEM']._serialized_start=156 + _globals['_CARTITEM']._serialized_end=196 + _globals['_CART']._serialized_start=198 + _globals['_CART']._serialized_end=262 + _globals['_ADDTOCARTREQUEST']._serialized_start=264 + _globals['_ADDTOCARTREQUEST']._serialized_end=316 + _globals['_SHOP']._serialized_start=319 + _globals['_SHOP']._serialized_end=521 +# @@protoc_insertion_point(module_scope) diff --git a/hw2/hw/shop_api/shop_pb2.pyi b/hw2/hw/shop_api/shop_pb2.pyi new file mode 100644 index 00000000..fbb85968 --- /dev/null +++ b/hw2/hw/shop_api/shop_pb2.pyi @@ -0,0 +1,63 @@ +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class Empty(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + +class Id(_message.Message): + __slots__ = ("id",) + ID_FIELD_NUMBER: _ClassVar[int] + id: int + def __init__(self, id: _Optional[int] = ...) -> None: ... + +class ItemCreate(_message.Message): + __slots__ = ("name", "price") + NAME_FIELD_NUMBER: _ClassVar[int] + PRICE_FIELD_NUMBER: _ClassVar[int] + name: str + price: float + def __init__(self, name: _Optional[str] = ..., price: _Optional[float] = ...) -> None: ... + +class Item(_message.Message): + __slots__ = ("id", "name", "price", "deleted") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + PRICE_FIELD_NUMBER: _ClassVar[int] + DELETED_FIELD_NUMBER: _ClassVar[int] + id: int + name: str + price: float + deleted: bool + def __init__(self, id: _Optional[int] = ..., name: _Optional[str] = ..., price: _Optional[float] = ..., deleted: bool = ...) -> None: ... + +class CartItem(_message.Message): + __slots__ = ("id", "quantity") + ID_FIELD_NUMBER: _ClassVar[int] + QUANTITY_FIELD_NUMBER: _ClassVar[int] + id: int + quantity: int + def __init__(self, id: _Optional[int] = ..., quantity: _Optional[int] = ...) -> None: ... + +class Cart(_message.Message): + __slots__ = ("id", "items", "price") + ID_FIELD_NUMBER: _ClassVar[int] + ITEMS_FIELD_NUMBER: _ClassVar[int] + PRICE_FIELD_NUMBER: _ClassVar[int] + id: int + items: _containers.RepeatedCompositeFieldContainer[CartItem] + price: float + def __init__(self, id: _Optional[int] = ..., items: _Optional[_Iterable[_Union[CartItem, _Mapping]]] = ..., price: _Optional[float] = ...) -> None: ... + +class AddToCartRequest(_message.Message): + __slots__ = ("cart_id", "item_id") + CART_ID_FIELD_NUMBER: _ClassVar[int] + ITEM_ID_FIELD_NUMBER: _ClassVar[int] + cart_id: int + item_id: int + def __init__(self, cart_id: _Optional[int] = ..., item_id: _Optional[int] = ...) -> None: ... diff --git a/hw2/hw/shop_api/shop_pb2_grpc.py b/hw2/hw/shop_api/shop_pb2_grpc.py new file mode 100644 index 00000000..1ff6de5e --- /dev/null +++ b/hw2/hw/shop_api/shop_pb2_grpc.py @@ -0,0 +1,269 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +import warnings + +from . import shop_pb2 as shop__pb2 + +GRPC_GENERATED_VERSION = '1.75.1' +GRPC_VERSION = grpc.__version__ +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + raise RuntimeError( + f'The grpc package installed is at version {GRPC_VERSION},' + + f' but the generated code in shop_pb2_grpc.py depends on' + + f' grpcio>={GRPC_GENERATED_VERSION}.' + + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' + + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' + ) + + +class ShopStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateCart = channel.unary_unary( + '/shop.Shop/CreateCart', + request_serializer=shop__pb2.Empty.SerializeToString, + response_deserializer=shop__pb2.Id.FromString, + _registered_method=True) + self.GetCart = channel.unary_unary( + '/shop.Shop/GetCart', + request_serializer=shop__pb2.Id.SerializeToString, + response_deserializer=shop__pb2.Cart.FromString, + _registered_method=True) + self.AddToCart = channel.unary_unary( + '/shop.Shop/AddToCart', + request_serializer=shop__pb2.AddToCartRequest.SerializeToString, + response_deserializer=shop__pb2.Cart.FromString, + _registered_method=True) + self.CreateItem = channel.unary_unary( + '/shop.Shop/CreateItem', + request_serializer=shop__pb2.ItemCreate.SerializeToString, + response_deserializer=shop__pb2.Item.FromString, + _registered_method=True) + self.GetItem = channel.unary_unary( + '/shop.Shop/GetItem', + request_serializer=shop__pb2.Id.SerializeToString, + response_deserializer=shop__pb2.Item.FromString, + _registered_method=True) + + +class ShopServicer(object): + """Missing associated documentation comment in .proto file.""" + + def CreateCart(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetCart(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AddToCart(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateItem(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetItem(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_ShopServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateCart': grpc.unary_unary_rpc_method_handler( + servicer.CreateCart, + request_deserializer=shop__pb2.Empty.FromString, + response_serializer=shop__pb2.Id.SerializeToString, + ), + 'GetCart': grpc.unary_unary_rpc_method_handler( + servicer.GetCart, + request_deserializer=shop__pb2.Id.FromString, + response_serializer=shop__pb2.Cart.SerializeToString, + ), + 'AddToCart': grpc.unary_unary_rpc_method_handler( + servicer.AddToCart, + request_deserializer=shop__pb2.AddToCartRequest.FromString, + response_serializer=shop__pb2.Cart.SerializeToString, + ), + 'CreateItem': grpc.unary_unary_rpc_method_handler( + servicer.CreateItem, + request_deserializer=shop__pb2.ItemCreate.FromString, + response_serializer=shop__pb2.Item.SerializeToString, + ), + 'GetItem': grpc.unary_unary_rpc_method_handler( + servicer.GetItem, + request_deserializer=shop__pb2.Id.FromString, + response_serializer=shop__pb2.Item.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'shop.Shop', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('shop.Shop', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class Shop(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def CreateCart(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/shop.Shop/CreateCart', + shop__pb2.Empty.SerializeToString, + shop__pb2.Id.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetCart(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/shop.Shop/GetCart', + shop__pb2.Id.SerializeToString, + shop__pb2.Cart.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def AddToCart(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/shop.Shop/AddToCart', + shop__pb2.AddToCartRequest.SerializeToString, + shop__pb2.Cart.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateItem(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/shop.Shop/CreateItem', + shop__pb2.ItemCreate.SerializeToString, + shop__pb2.Item.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetItem(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/shop.Shop/GetItem', + shop__pb2.Id.SerializeToString, + shop__pb2.Item.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/hw2/hw/shop_api/storage.py b/hw2/hw/shop_api/storage.py new file mode 100644 index 00000000..fe737c12 --- /dev/null +++ b/hw2/hw/shop_api/storage.py @@ -0,0 +1,257 @@ +from __future__ import annotations + +import sqlite3 +import threading +from pathlib import Path +from typing import Iterable, List, Optional, Tuple + +from .schemas import Cart, CartItem, Item + + +_connection: Optional[sqlite3.Connection] = None +_lock = threading.Lock() + + +def _conn() -> sqlite3.Connection: + if _connection is None: + raise RuntimeError("Database is not initialized. Call init_db() at startup.") + return _connection + + +def init_db(db_path: str | Path = "shop.db") -> None: + global _connection + if _connection is not None: + return + path = str(db_path) + _connection = sqlite3.connect(path, check_same_thread=False, isolation_level=None) + _connection.row_factory = sqlite3.Row + with _connection: + _connection.execute("PRAGMA foreign_keys=ON;") + _connection.execute( + """ + CREATE TABLE IF NOT EXISTS items ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + price REAL NOT NULL CHECK(price >= 0), + deleted INTEGER NOT NULL DEFAULT 0 + ); + """ + ) + _connection.execute( + """ + CREATE TABLE IF NOT EXISTS carts ( + id INTEGER PRIMARY KEY AUTOINCREMENT + ); + """ + ) + _connection.execute( + """ + CREATE TABLE IF NOT EXISTS cart_items ( + cart_id INTEGER NOT NULL, + item_id INTEGER NOT NULL, + quantity INTEGER NOT NULL CHECK(quantity >= 1), + PRIMARY KEY (cart_id, item_id), + FOREIGN KEY (cart_id) REFERENCES carts(id) ON DELETE CASCADE, + FOREIGN KEY (item_id) REFERENCES items(id) ON DELETE CASCADE + ); + """ + ) + + +# ---------- Item operations ---------- + + +def create_item(name: str, price: float) -> Item: + with _lock, _conn(): + cur = _conn().execute( + "INSERT INTO items(name, price, deleted) VALUES(?, ?, 0)", (name, float(price)) + ) + item_id = int(cur.lastrowid) + row = _conn().execute( + "SELECT id, name, price, deleted FROM items WHERE id = ?", (item_id,) + ).fetchone() + return Item(id=row["id"], name=row["name"], price=float(row["price"]), deleted=bool(row["deleted"])) + + +def _row_to_item(row: sqlite3.Row) -> Item: + return Item(id=row["id"], name=row["name"], price=float(row["price"]), deleted=bool(row["deleted"])) + + +def get_item(item_id: int, include_deleted: bool = False) -> Optional[Item]: + row = _conn().execute( + "SELECT id, name, price, deleted FROM items WHERE id = ?", (item_id,) + ).fetchone() + if row is None: + return None + item = _row_to_item(row) + if not include_deleted and item.deleted: + return None + return item + + +def list_items( + *, + offset: int, + limit: int, + min_price: Optional[float], + max_price: Optional[float], + show_deleted: bool, +) -> List[Item]: + clauses: list[str] = [] + params: list[object] = [] + if not show_deleted: + clauses.append("deleted = 0") + if min_price is not None: + clauses.append("price >= ?") + params.append(float(min_price)) + if max_price is not None: + clauses.append("price <= ?") + params.append(float(max_price)) + where = (" WHERE " + " AND ".join(clauses)) if clauses else "" + query = f"SELECT id, name, price, deleted FROM items{where} ORDER BY id LIMIT ? OFFSET ?" + params.extend([int(limit), int(offset)]) + rows = _conn().execute(query, params).fetchall() + return [_row_to_item(r) for r in rows] + + +def replace_item(item_id: int, name: str, price: float) -> Optional[Item]: + with _lock: + row = _conn().execute("SELECT deleted FROM items WHERE id = ?", (item_id,)).fetchone() + if row is None: + return None + if bool(row["deleted"]): + return None + _conn().execute("UPDATE items SET name = ?, price = ? WHERE id = ?", (name, float(price), item_id)) + return get_item(item_id, include_deleted=True) + + +def patch_item(item_id: int, *, name: Optional[str], price: Optional[float]) -> Tuple[str, Optional[Item]]: + # Returns (status, item). status in {"ok", "deleted", "not_found"} + with _lock: + row = _conn().execute("SELECT id, name, price, deleted FROM items WHERE id = ?", (item_id,)).fetchone() + if row is None: + return "not_found", None + if bool(row["deleted"]): + return "deleted", None + new_name = name if name is not None else row["name"] + new_price = float(price) if price is not None else float(row["price"]) + _conn().execute("UPDATE items SET name = ?, price = ? WHERE id = ?", (new_name, new_price, item_id)) + return "ok", get_item(item_id, include_deleted=True) + + +def soft_delete_item(item_id: int) -> None: + with _lock: + _conn().execute("UPDATE items SET deleted = 1 WHERE id = ?", (item_id,)) + + +# ---------- Cart operations ---------- + + +def create_cart() -> int: + with _lock: + cur = _conn().execute("INSERT INTO carts DEFAULT VALUES") + return int(cur.lastrowid) + + +def _get_cart_items_map(cart_id: int) -> Optional[dict[int, int]]: + exists = _conn().execute("SELECT 1 FROM carts WHERE id = ?", (cart_id,)).fetchone() + if exists is None: + return None + rows = _conn().execute( + "SELECT item_id, quantity FROM cart_items WHERE cart_id = ? ORDER BY item_id", + (cart_id,), + ).fetchall() + return {int(r["item_id"]): int(r["quantity"]) for r in rows} + + +def compute_cart_price(cart_map: dict[int, int]) -> float: + total = 0.0 + if not cart_map: + return total + item_ids = tuple(cart_map.keys()) + placeholders = ",".join(["?"] * len(item_ids)) + rows = _conn().execute( + f"SELECT id, price, deleted FROM items WHERE id IN ({placeholders})", + item_ids, + ).fetchall() + id_to_price_deleted = {int(r["id"]): (float(r["price"]), bool(r["deleted"])) for r in rows} + for iid, qty in cart_map.items(): + price_deleted = id_to_price_deleted.get(iid) + if price_deleted is None: + continue + price, deleted = price_deleted + if deleted: + continue + total += price * qty + return total + + +def cart_to_model(cart_id: int) -> Optional[Cart]: + cart_map = _get_cart_items_map(cart_id) + if cart_map is None: + return None + items = [CartItem(id=iid, quantity=qty) for iid, qty in cart_map.items()] + return Cart(id=cart_id, items=items, price=compute_cart_price(cart_map)) + + +def list_carts( + *, + offset: int, + limit: int, + min_price: Optional[float], + max_price: Optional[float], + min_quantity: Optional[int], + max_quantity: Optional[int], +) -> List[Cart]: + # Build full list, then filter in Python to keep logic close to original + rows = _conn().execute("SELECT id FROM carts ORDER BY id").fetchall() + carts: List[Cart] = [] + for r in rows: + model = cart_to_model(int(r["id"])) + if model is not None: + carts.append(model) + if min_price is not None: + carts = [c for c in carts if c.price >= min_price] + if max_price is not None: + carts = [c for c in carts if c.price <= max_price] + + def qsum(c: Cart) -> int: + return sum(ci.quantity for ci in c.items) + + if min_quantity is not None: + carts = [c for c in carts if qsum(c) >= min_quantity] + if max_quantity is not None: + carts = [c for c in carts if qsum(c) <= max_quantity] + + return carts[offset : offset + limit] + + +def add_to_cart(cart_id: int, item_id: int) -> Optional[Cart]: + with _lock: + # Validate cart + exist = _conn().execute("SELECT 1 FROM carts WHERE id = ?", (cart_id,)).fetchone() + if exist is None: + return None + # Validate item and not deleted + row = _conn().execute("SELECT deleted FROM items WHERE id = ?", (item_id,)).fetchone() + if row is None or bool(row["deleted"]): + # Item not available + raise KeyError("item_not_found") + # Upsert quantity + cur = _conn().execute( + "SELECT quantity FROM cart_items WHERE cart_id = ? AND item_id = ?", + (cart_id, item_id), + ) + r = cur.fetchone() + if r is None: + _conn().execute( + "INSERT INTO cart_items(cart_id, item_id, quantity) VALUES(?, ?, 1)", + (cart_id, item_id), + ) + else: + _conn().execute( + "UPDATE cart_items SET quantity = ? WHERE cart_id = ? AND item_id = ?", + (int(r["quantity"]) + 1, cart_id, item_id), + ) + return cart_to_model(cart_id) + diff --git a/hw2/hw/test_coverage_extra.py b/hw2/hw/test_coverage_extra.py new file mode 100644 index 00000000..f46dc116 --- /dev/null +++ b/hw2/hw/test_coverage_extra.py @@ -0,0 +1,47 @@ +from http import HTTPStatus + +from fastapi.testclient import TestClient + +from shop_api.main import app + + +client = TestClient(app) + + +def test_get_nonexistent_cart() -> None: + response = client.get("/cart/999999") + assert response.status_code == HTTPStatus.NOT_FOUND + + +def test_add_to_nonexistent_cart() -> None: + # create an item + item = client.post("/item", json={"name": "x", "price": 1.0}).json() + response = client.post(f"/cart/999999/add/{item['id']}") + assert response.status_code == HTTPStatus.NOT_FOUND + + +def test_add_nonexistent_item_to_cart() -> None: + # create a cart + cart_id = client.post("/cart").json()["id"] + response = client.post(f"/cart/{cart_id}/add/999999") + assert response.status_code == HTTPStatus.NOT_FOUND + + +def test_put_deleted_item_returns_404() -> None: + item = client.post("/item", json={"name": "y", "price": 2.0}).json() + item_id = item["id"] + client.delete(f"/item/{item_id}") + response = client.put(f"/item/{item_id}", json={"name": "z", "price": 3.0}) + assert response.status_code == HTTPStatus.NOT_FOUND + + +def test_patch_nonexistent_item_returns_404() -> None: + response = client.patch("/item/999999", json={}) + assert response.status_code == HTTPStatus.NOT_FOUND + + +def test_get_nonexistent_item_returns_404() -> None: + response = client.get("/item/999999") + assert response.status_code == HTTPStatus.NOT_FOUND + + diff --git a/hw2/monitoring/grafana/dashboards/dashboards.yml b/hw2/monitoring/grafana/dashboards/dashboards.yml new file mode 100644 index 00000000..c26d0d0e --- /dev/null +++ b/hw2/monitoring/grafana/dashboards/dashboards.yml @@ -0,0 +1,14 @@ +apiVersion: 1 + +providers: + - name: 'default' + orgId: 1 + folder: '' + type: file + disableDeletion: false + editable: true + allowUiUpdates: false + updateIntervalSeconds: 10 + options: + path: /var/lib/grafana/dashboards + diff --git a/hw2/monitoring/grafana/dashboards_json/shop_api_overview.json b/hw2/monitoring/grafana/dashboards_json/shop_api_overview.json new file mode 100644 index 00000000..4abea5f2 --- /dev/null +++ b/hw2/monitoring/grafana/dashboards_json/shop_api_overview.json @@ -0,0 +1,55 @@ +{ + "id": null, + "uid": "shop-api-overview", + "title": "Shop API Overview", + "timezone": "browser", + "schemaVersion": 39, + "version": 1, + "refresh": "5s", + "panels": [ + { + "type": "stat", + "title": "HTTP RPS", + "gridPos": { "x": 0, "y": 0, "w": 8, "h": 6 }, + "options": { "reduceOptions": { "calcs": ["sum"], "fields": "", "values": false } }, + "targets": [ + { + "expr": "sum(rate(http_requests_total[1m]))", + "legendFormat": "RPS" + } + ] + }, + { + "type": "timeseries", + "title": "HTTP Requests by status", + "gridPos": { "x": 8, "y": 0, "w": 16, "h": 6 }, + "targets": [ + { + "expr": "sum by (status) (rate(http_requests_total[1m]))", + "legendFormat": "{{status}}" + } + ] + }, + { + "type": "timeseries", + "title": "Latency p50/p90/p99 (s)", + "gridPos": { "x": 0, "y": 6, "w": 24, "h": 8 }, + "targets": [ + { + "expr": "histogram_quantile(0.5, sum by (le) (rate(http_request_latency_seconds_bucket[5m])))", + "legendFormat": "p50" + }, + { + "expr": "histogram_quantile(0.9, sum by (le) (rate(http_request_latency_seconds_bucket[5m])))", + "legendFormat": "p90" + }, + { + "expr": "histogram_quantile(0.99, sum by (le) (rate(http_request_latency_seconds_bucket[5m])))", + "legendFormat": "p99" + } + ] + } + ] +} + + diff --git a/hw2/monitoring/grafana/datasources/datasource.yml b/hw2/monitoring/grafana/datasources/datasource.yml new file mode 100644 index 00000000..96faeb79 --- /dev/null +++ b/hw2/monitoring/grafana/datasources/datasource.yml @@ -0,0 +1,10 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: true + diff --git a/hw2/monitoring/prometheus.yml b/hw2/monitoring/prometheus.yml new file mode 100644 index 00000000..4932745d --- /dev/null +++ b/hw2/monitoring/prometheus.yml @@ -0,0 +1,11 @@ +global: + scrape_interval: 5s + evaluation_interval: 5s + +scrape_configs: + - job_name: "shop-api" + metrics_path: /metrics + static_configs: + - targets: ["shop-api:8000"] + + diff --git a/isolation_demo.db b/isolation_demo.db new file mode 100644 index 00000000..637f63bc Binary files /dev/null and b/isolation_demo.db differ diff --git a/lecture4/README.md b/lecture4/README.md index 26822ef7..3636e341 100644 --- a/lecture4/README.md +++ b/lecture4/README.md @@ -2,7 +2,7 @@ За каждый пункт - 1 балл -Внедрить во вторую домашку хранение данных в БД, для этого надо: +Внедрить во второе домашнее задание хранение данных в БД, для этого надо: 1) Добавить БД в docket-compose.yml (если БД - это отдельный сервис, если хотите использовать sqlite, то можно скипнуть этот шаг) 2) Переписать код на взаимодействие с вашей БД (если вы еще этого не сделали, если вы уже написали код с БД, подзравляю, вам остался только 3 пункт) 3) В свободной форме, напишите скрипты, которые просимулируют разные "проблемы" которые могут возникнуть в транзакциях (dirty read, not-repeatable read, serialize) и настраивая уровне изоляции покажите, что они действительно решаются (через SQLAlchemy например), то есть: @@ -12,4 +12,38 @@ показать что нет non-repeatable read при repeatable read показать phantom reads при repeatable read показать что нет phantom reads при serializable -*Тут зависит от того какую БД вы выбрали, разные БД могут поддерживать разные уровни изоляции \ No newline at end of file +*Тут зависит от того какую БД вы выбрали, разные БД могут поддерживать разные уровни изоляции + +## Key edits in HW2: +1) irrelevant for sqlite +2) Rewrote code: +- shop_api/storage.py: implemented SQLite connection, schema (items, carts, cart_items), and functions: init_db, create_item, get_item, list_items, replace_item, patch_item, soft_delete_item, create_cart, cart_to_model, list_carts, add_to_cart, compute_cart_price. +- shop_api/api/item.py and shop_api/api/cart.py: switched to call the DB functions. +- shop_api/main.py: call init_db() at module import and again in startup; kept metrics and gRPC startup. +- shop_api/grpc_server.py: refactored service methods to use the DB functions. +3) added simulation scripts: +- hw2/hw/hw4/isolation_demo.py: a script demonstrating dirty/non-repeatable/phantom read scenarios in SQLite (SQLite already prevents dirty reads and uses snapshot semantics). + +Demo logs: + +`PS C:\Users\NUC\Documents\ITMO\python-backend-hw> python hw2\hw\hw4\isolation_demo.py` + +`-- read uncommitted demo (SQLite prevents dirty read) --` + +`READ_UNCOMMITTED_SIM value seen by reader (should be 0 in SQLite): 0` + +`-- non-repeatable read demo (snapshot) --` +`NON_REPEATABLE_READ_SIM v1 == v2 (SQLite snapshot): True` + +`-- phantom read demo (snapshot) --` +`PHANTOM_READ_SIM n1 == n2 (SQLite snapshot): True` + +Результаты: +- dirty read при read uncommitted: не наблюдается (читатель видит 0) +- нет dirty read при read committed: подтверждается тем же результатом +- non-repeatable read при read committed: не наблюдается (snapshot, v1 == v2) +- нет non-repeatable read при repeatable read: подтверждается snapshot +- phantom reads при repeatable read: не наблюдается (n1 == n2) +- нет phantom reads при serializable: подтверждается snapshot + +Примечание: SQLite использует snapshot-изоляцию и предотвращает dirty read даже при `PRAGMA read_uncommitted=ON`, поэтому классические аномалии воспроизвести нельзя; демонстрация выше подтверждает их отсутствие. \ No newline at end of file diff --git a/lecture5/hw/README.md b/lecture5/hw/README.md index 33e79328..1f34d6b1 100644 --- a/lecture5/hw/README.md +++ b/lecture5/hw/README.md @@ -1,5 +1,62 @@ -# ДЗ +# ДЗ 5 — покрытие и CI -1) Добиться 95% покрытия тестами вашей второй домашки - 1 балл +1. Добиться 95% покрытия тестами вашей второй домашки - 1 балл +2. Настроить автозапуск этих тестов в CI, если вы подключали сторонюю БД, то можно посмотреть вот сюда, чтобы поддержать тесты с ней в CI. По итогу у вас должен получится зеленый пайплайн - оценивается в еще 2 балла. -2) Настроить автозапуск этих тестов в CI, если вы подключали сторонюю БД, то можно посмотреть вот [сюда](https://dev.to/kashifsoofi/integration-test-postgres-using-github-actions-3lln), чтобы поддержать тесты с ней в CI. По итогу у вас должен получится зеленый пайплайн - оценивается в еще 2 балла. + +## 1. Дополнительные тесты (для покрытия ≥95%) + +Файл `hw2/hw/test_coverage_extra.py` добавлен для проверки неуспешных веток и 404-сценариев, которые сложно покрыть в базовых позитивных тестах: +- `GET /cart/{id}` для несуществующей корзины → 404. +- `POST /cart/{cart_id}/add/{item_id}` для несуществующей корзины → 404. +- `POST /cart/{cart_id}/add/{item_id}` для несуществующего товара → 404. +- `PUT /item/{id}` по удалённому товару → 404. +- `PATCH /item/{id}` по несуществующему товару → 404. +- `GET /item/{id}` по несуществующему товару → 404. + +Эти тесты добирают ветки ошибок в ручках `item` и `cart` и поднимают итоговое покрытие до ~98%. + + +## Пример запуска и результат (Bash) + +```bash +export PYTHONPATH="$(pwd)/hw2/hw" +pytest -vv --maxfail=1 \ + --cov=shop_api \ + --cov-report=term-missing \ + --cov-fail-under=95 \ + hw2/hw/test_homework2.py hw2/hw/test_coverage_extra.py +``` + +Вывод (сокращённо): + +```text +============================= test session starts ============================= +... (вывод тестов опущен) +=============================== tests coverage ================================ +_______________ coverage: platform win32, python 3.11.9-final-0 _______________ + +Name Stmts Miss Cover +----------------------------------------------------- +hw2\hw\shop_api\api\cart.py 30 0 100% +hw2\hw\shop_api\api\item.py 38 0 100% +hw2\hw\shop_api\schemas.py 25 0 100% +hw2\hw\shop_api\storage.py 143 5 97% +----------------------------------------------------- +TOTAL 236 5 98% +Required test coverage of 95% reached. Total coverage: 97.88% +45 passed, 5 warnings in 4.17s +``` + +## 2. Автозапуск тестов в CI + +Тесты запускаются автоматически через GitHub Actions — workflow находится в файле: +- `.github/workflows/tests.yml` + +Что делает workflow: +- Триггеры: `push` и `pull_request` в ветку `main`. +- Устанавливает зависимости из `hw2/hw/requirements.txt` и `lecture5/requirements.txt`. +- Запускает тесты по HW2 с покрытием и порогом `95%`: + - `PYTHONPATH=hw2/hw pytest -vv --maxfail=1 --cov=shop_api --cov-report=term-missing --cov-fail-under=95 hw2/hw/test_homework2.py` + - (Локально дополнительно можно запускать `hw2/hw/test_coverage_extra.py`.) +- Загружает артефакт покрытия (`.coverage*`).