From 7d4e274c629cd38ac1d73026375cbfb3006c85f3 Mon Sep 17 00:00:00 2001 From: tttonyalpha Date: Sat, 18 Oct 2025 19:43:56 +0300 Subject: [PATCH 1/4] added hm1 --- hw1/app.py | 159 ++++++++++++++++++++++++++++++++++++++++++++++++++- hw1/env.yaml | 12 ++++ 2 files changed, 169 insertions(+), 2 deletions(-) create mode 100644 hw1/env.yaml diff --git a/hw1/app.py b/hw1/app.py index 6107b870..91eef443 100644 --- a/hw1/app.py +++ b/hw1/app.py @@ -1,4 +1,6 @@ from typing import Any, Awaitable, Callable +from urllib.parse import parse_qs +import json async def application( @@ -12,8 +14,161 @@ async def application( receive: Корутина для получения сообщений от клиента send: Корутина для отправки сообщений клиенту """ - # TODO: Ваша реализация здесь + async def j(status: int, data: dict[str, Any]): + body = json.dumps(data, ensure_ascii=False).encode("utf-8") + hdrs = [ + (b"content-type", b"application/json; charset=utf-8"), + (b"content-length", str(len(body)).encode("ascii")), + ] + await send({"type": "http.response.start", "status": status, "headers": hdrs}) + await send({"type": "http.response.body", "body": body}) + + async def grab_body() -> bytes: + buf = b"" + more = True + while more: + m = await receive() + if m.get("type") != "http.request": + continue + buf += m.get("body", b"") + more = m.get("more_body", False) + return buf + + if scope["type"] == "lifespan": + while True: + msg = await receive() + if msg["type"] == "lifespan.startup": + print("boot") + await send({"type": "lifespan.startup.complete"}) + elif msg["type"] == "lifespan.shutdown": + print("bye") + await send({"type": "lifespan.shutdown.complete"}) + return + return + + if scope["type"] != "http": + return + + verb = scope.get("method", "GET").upper() + pth = scope.get("path", "") + qs = parse_qs(scope.get("query_string", b"").decode("latin-1")) + + known = (pth == "/factorial") or (pth == "/mean") or pth.startswith("/fibonacci") + if verb != "GET": + if known: + print("405") + await j(405, {"detail": "method not allowed. use GET."}) + else: + print("404") + await j(404, {"detail": "not found."}) + return + + if pth == "/factorial": + print("hit /factorial") + raw_n_list = qs.get("n") or qs.get("N") + + if not raw_n_list or raw_n_list[0] == "": + print("bad n") + await j(422, {"detail": "query param 'n' is required and must be integer."}) + return + + try: + n = int(raw_n_list[0]) + except Exception: + print("not int") + await j(422, {"detail": "'n' must be an integer."}) + return + + if n < 0: + print("neg n") + await j(400, {"detail": "factorial is for non-negative integers only."}) + return + + # inplace kringe but sorry + out = 1 + k = 2 + while k <= n: + out *= k + k += 1 + + print("ok") + await j(200, {"result": out}) + return + + if pth.startswith("/fibonacci"): + print("hit /fibonacci") + parts = pth.split("/", 2) + raw_n = parts[2] if len(parts) > 2 and parts[1] == "fibonacci" else "" + + try: + n = int(raw_n) + except Exception: + print("bad path n") + await j(422, {"detail": "path param must be an integer."}) + return + + if n < 0: + print("neg n") + await j(400, {"detail": "fibonacci is for non-negative integers only."}) + return + + a, b = 0, 1 + i = 0 + while i < n: + a, b = b, a + b + i += 1 + + print("ok") + await j(200, {"result": a}) + return + + if pth == "/mean": + print("hit /mean") + raw = await grab_body() + + if not raw: + print("no json") + await j(422, {"detail": "json body is required."}) + return + + try: + data = json.loads(raw.decode("utf-8")) + except json.JSONDecodeError: + print("bad json") + await j(422, {"detail": "body must be valid json."}) + return + + if isinstance(data, list): + arr = data + elif isinstance(data, dict) and ("numbers" in data): + arr = data["numbers"] + else: + print("missing numbers") + await j(422, {"detail": "expected array or object with numbers"}) + return + + try: + nums = [float(x) for x in arr] + except Exception: + print("not floats") + await j(422, {"detail": "numbers must be an array of numbers"}) + return + + if not nums: + print("empty") + await j(400, {"detail": "numbers list is empty"}) + return + + mean_val = sum(nums) / len(nums) + print("ok") + await j(200, {"result": mean_val}) + return + + print("404") + await j(404, {"detail": "not found."}) + if __name__ == "__main__": import uvicorn - uvicorn.run("app:application", host="0.0.0.0", port=8000, reload=True) + print("test app") + uvicorn.run("app:application", host="0.0.0.0", port=8000, reload=True) \ No newline at end of file diff --git a/hw1/env.yaml b/hw1/env.yaml new file mode 100644 index 00000000..0895e468 --- /dev/null +++ b/hw1/env.yaml @@ -0,0 +1,12 @@ +name: itmo_python_backend_hm1 +channels: + - conda-forge +dependencies: + - python=3.10 + - uvicorn>=0.24.0 + - pytest>=7.4.0 + - pytest-asyncio>=0.21.0 + - httpx>=0.27.2 + - pip + - pip: + - async-asgi-testclient>=1.4.11 \ No newline at end of file From b07fc64ff47ac95484eb2b3c42138ebdd58b4fec Mon Sep 17 00:00:00 2001 From: tttonyalpha Date: Sat, 18 Oct 2025 20:29:54 +0300 Subject: [PATCH 2/4] added hm2 --- hw2/hw/shop_api/main.py | 201 ++++++++++++++++++++++++++++++++++++++- hw2/hw/test_homework2.py | 27 +++++- 2 files changed, 226 insertions(+), 2 deletions(-) diff --git a/hw2/hw/shop_api/main.py b/hw2/hw/shop_api/main.py index f60a8c60..1442e243 100644 --- a/hw2/hw/shop_api/main.py +++ b/hw2/hw/shop_api/main.py @@ -1,3 +1,202 @@ -from fastapi import FastAPI +from fastapi import FastAPI, HTTPException, Query, Body, WebSocket + +from typing import Dict, Any, List, Optional +from fastapi.responses import JSONResponse +from fastapi import WebSocketDisconnect + +from uuid import uuid4 app = FastAPI(title="Shop API") + +class S: + def __init__(self): + self.items: Dict[int, Dict[str, Any]] = {} + self.carts: Dict[int, Dict[int, int]] = {} + self.iid = 1 + self.cid = 1 + + def mk_item(self, name: str, price: float) -> Dict[str, Any]: + i = self.iid + self.iid += 1 + self.items[i] = {"id": i, "name": name, "price": float(price), "deleted": False} + return self.items[i] + + def one_item(self, i: int) -> Dict[str, Any]: + x = self.items.get(i) + if not x or x["deleted"]: + raise HTTPException(status_code=404) + return x + + def set_item(self, i: int, name: str, price: float) -> Dict[str, Any]: + if i not in self.items or self.items[i]["deleted"]: + raise HTTPException(status_code=404) + self.items[i]["name"] = name + self.items[i]["price"] = float(price) + return self.items[i] + + def patch_item(self, i: int, data: Dict[str, Any]) -> Optional[Dict[str, Any]]: + if i not in self.items: + raise HTTPException(status_code=404) + if self.items[i]["deleted"]: + return None + if set(data.keys()) - {"name", "price"}: + raise HTTPException(status_code=422) + if "name" in data: + self.items[i]["name"] = data["name"] + if "price" in data: + self.items[i]["price"] = float(data["price"]) + return self.items[i] + + def del_item(self, i: int) -> None: + if i in self.items: + self.items[i]["deleted"] = True + + def mk_cart(self) -> int: + c = self.cid + self.cid += 1 + self.carts[c] = {} + return c + + def cart_view(self, c: int) -> Dict[str, Any]: + if c not in self.carts: + raise HTTPException(status_code=404) + arr = [] + total = 0.0 + for iid, q in self.carts[c].items(): + it = self.items.get(iid) + ok = bool(it and not it["deleted"]) + nm = it["name"] if it else "unknown" + arr.append({"id": iid, "name": nm, "quantity": q, "available": ok}) + if ok: + total += float(it["price"]) * q + return {"id": c, "items": arr, "price": float(total)} + + def add(self, c: int, i: int) -> None: + if c not in self.carts: + raise HTTPException(status_code=404) + if i not in self.items: + raise HTTPException(status_code=404) + d = self.carts[c] + d[i] = d.get(i, 0) + 1 + +st = S() + +def _slice(x: List[Any], o: int, l: int) -> List[Any]: + return x[o:o+l] + +@app.post("/item", status_code=201) +def create_item(body: Dict[str, Any] = Body(...)) -> Dict[str, Any]: + if "name" not in body or "price" not in body: + raise HTTPException(status_code=422) + return st.mk_item(body["name"], body["price"]) + +@app.get("/item/{item_id}") +def read_item(item_id: int) -> Dict[str, Any]: + return st.one_item(item_id) + +@app.get("/item") +def read_items( + offset: int = Query(0, ge=0), + limit: int = Query(10, gt=0), + min_price: Optional[float] = Query(None, ge=0), + max_price: Optional[float] = Query(None, ge=0), + show_deleted: bool = Query(False), +) -> List[Dict[str, Any]]: + xs = list(st.items.values()) + if not show_deleted: + xs = [x for x in xs if not x["deleted"]] + if min_price is not None: + xs = [x for x in xs if float(x["price"]) >= float(min_price)] + if max_price is not None: + xs = [x for x in xs if float(x["price"]) <= float(max_price)] + xs.sort(key=lambda x: x["id"]) + return _slice(xs, offset, limit) + +@app.put("/item/{item_id}") +def replace_item(item_id: int, body: Dict[str, Any] = Body(...)) -> Dict[str, Any]: + if set(body.keys()) - {"name", "price"}: + raise HTTPException(status_code=422) + if "name" not in body or "price" not in body: + raise HTTPException(status_code=422) + return st.set_item(item_id, body["name"], body["price"]) + +@app.patch("/item/{item_id}") +def update_item(item_id: int, body: Dict[str, Any] = Body(...)): + y = st.patch_item(item_id, body) + if y is None: + return JSONResponse(status_code=304, content=None) + return y + +@app.delete("/item/{item_id}") +def remove_item(item_id: int): + st.del_item(item_id) + return {"ok": True} + +@app.post("/cart", status_code=201) +def create_cart(): + cid = st.mk_cart() + return JSONResponse(status_code=201, content={"id": cid}, headers={"location": f"/cart/{cid}"}) + +@app.get("/cart/{cart_id}") +def read_cart(cart_id: int): + return st.cart_view(cart_id) + +@app.get("/cart") +def read_carts( + skip: int = Query(0, ge=0, alias="offset"), + take: int = Query(10, gt=0, alias="limit"), + pmin: Optional[float] = Query(None, ge=0, alias="min_price"), + pmax: Optional[float] = Query(None, ge=0, alias="max_price"), + qmin: Optional[int] = Query(None, ge=0, alias="min_quantity"), + qmax: Optional[int] = Query(None, ge=0, alias="max_quantity"), +) -> List[Dict[str, Any]]: + bag = [] + for k in sorted(st.carts): + v = st.cart_view(k) + tot = 0 + for it in v["items"]: + tot += it["quantity"] + if pmin is not None and v["price"] < float(pmin): + continue + if pmax is not None and v["price"] > float(pmax): + continue + if qmin is not None and tot < int(qmin): + continue + if qmax is not None and tot > int(qmax): + continue + bag.append(v) + if skip >= len(bag): + return [] + return bag[skip:skip + take] + +@app.post("/cart/{cart_id}/add/{item_id}") +def add_item(cart_id: int, item_id: int): + st.add(cart_id, item_id) + return {"ok": True} + +_rooms: Dict[str, List[WebSocket]] = {} +_names: Dict[WebSocket, str] = {} + +@app.websocket("/chat/{room}") +async def ws_chat(ws: WebSocket, room: str): + await ws.accept() + u = "user-" + uuid4().hex[:6] + _names[ws] = u + _rooms.setdefault(room, []).append(ws) + try: + while True: + t = await ws.receive_text() + for w in list(_rooms.get(room, [])): + if w is not ws: + try: + await w.send_text(f"{u} :: {t}") + except RuntimeError: + pass + except WebSocketDisconnect: + pass + finally: + if ws in _rooms.get(room, []): + _rooms[room].remove(ws) + _names.pop(ws, None) + if _rooms.get(room) == []: + _rooms.pop(room, None) \ No newline at end of file diff --git a/hw2/hw/test_homework2.py b/hw2/hw/test_homework2.py index 60a1f36a..2b68a467 100644 --- a/hw2/hw/test_homework2.py +++ b/hw2/hw/test_homework2.py @@ -5,7 +5,7 @@ import pytest from faker import Faker from fastapi.testclient import TestClient - +import re from shop_api.main import app client = TestClient(app) @@ -282,3 +282,28 @@ def test_delete_item(existing_item: dict[str, Any]) -> None: response = client.delete(f"/item/{item_id}") assert response.status_code == HTTPStatus.OK + + +def _ok(msg, text): + assert re.match(rf"^user-[0-9a-f]{{6}} :: {re.escape(text)}$", msg) + +def test_ws_broadcast_two(): + with client.websocket_connect("/chat/room1") as a, client.websocket_connect("/chat/room1") as b: + a.send_text("suuuuup brooo") + m = b.receive_text() + _ok(m, "suuuuup brooo") + b.send_text("yo") + m2 = a.receive_text() + _ok(m2, "yo") + +def test_ws_rooms_isolated_and_names_unique(): + with client.websocket_connect("/chat/x") as x1, client.websocket_connect("/chat/x") as x2, client.websocket_connect("/chat/y") as y1, client.websocket_connect("/chat/y") as y2: + x1.send_text("hi my name is lolik") + mx = x2.receive_text() + _ok(mx, "hi my name is lolik") + y1.send_text("how are you") + my = y2.receive_text() + _ok(my, "how are you") + ux = mx.split(" :: ", 1)[0] + uy = my.split(" :: ", 1)[0] + assert ux != uy \ No newline at end of file From 041f635b2d6dbac3df5ad0642b85158332451555 Mon Sep 17 00:00:00 2001 From: tttonyalpha Date: Sat, 18 Oct 2025 21:35:31 +0300 Subject: [PATCH 3/4] added hm3 --- lecture3/hw3/Dockerfile | 20 ++ lecture3/hw3/docker-compose.yml | 43 +++ .../grafana/dashboards/dashboard.yml | 10 + .../grafana/dashboards_json/fastapi.json | 249 ++++++++++++++++++ .../grafana/datasources/datasource.yml | 12 + lecture3/hw3/monitoring/prometheus.yml | 13 + lecture3/hw3/requirements.txt | 4 + lecture3/hw3/shop_api/__init__.py | 0 lecture3/hw3/shop_api/main.py | 205 ++++++++++++++ 9 files changed, 556 insertions(+) create mode 100644 lecture3/hw3/Dockerfile create mode 100644 lecture3/hw3/docker-compose.yml create mode 100644 lecture3/hw3/monitoring/grafana/dashboards/dashboard.yml create mode 100644 lecture3/hw3/monitoring/grafana/dashboards_json/fastapi.json create mode 100644 lecture3/hw3/monitoring/grafana/datasources/datasource.yml create mode 100644 lecture3/hw3/monitoring/prometheus.yml create mode 100644 lecture3/hw3/requirements.txt create mode 100644 lecture3/hw3/shop_api/__init__.py create mode 100644 lecture3/hw3/shop_api/main.py diff --git a/lecture3/hw3/Dockerfile b/lecture3/hw3/Dockerfile new file mode 100644 index 00000000..cd6bec42 --- /dev/null +++ b/lecture3/hw3/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.10 + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl ca-certificates && \ + rm -rf /var/lib/apt/lists/* + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "shop_api.main:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers"] \ No newline at end of file diff --git a/lecture3/hw3/docker-compose.yml b/lecture3/hw3/docker-compose.yml new file mode 100644 index 00000000..6289d93c --- /dev/null +++ b/lecture3/hw3/docker-compose.yml @@ -0,0 +1,43 @@ +version: "3.9" + +services: + api: + build: . + container_name: shop_api + ports: + - "8000:8000" + networks: [monitoring] + environment: + - PYTHONUNBUFFERED=1 + + prometheus: + image: prom/prometheus:latest + container_name: prometheus + command: + - "--config.file=/etc/prometheus/prometheus.yml" + - "--storage.tsdb.retention.time=7d" + volumes: + - ./monitoring/prometheus.yml:/etc/prometheus/prometheus.yml:ro + ports: + - "9090:9090" + depends_on: [api] + networks: [monitoring] + + grafana: + image: grafana/grafana:latest + container_name: grafana + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=admin + volumes: + - ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources:ro + - ./monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards:ro + - ./monitoring/grafana/dashboards_json:/var/lib/grafana/dashboards:ro + ports: + - "3000:3000" + depends_on: [prometheus] + networks: [monitoring] + +networks: + monitoring: + driver: bridge \ No newline at end of file diff --git a/lecture3/hw3/monitoring/grafana/dashboards/dashboard.yml b/lecture3/hw3/monitoring/grafana/dashboards/dashboard.yml new file mode 100644 index 00000000..d02fb6d8 --- /dev/null +++ b/lecture3/hw3/monitoring/grafana/dashboards/dashboard.yml @@ -0,0 +1,10 @@ +apiVersion: 1 +providers: + - name: "local-dashboards" + orgId: 1 + folder: "" + type: file + disableDeletion: true + editable: false + options: + path: /var/lib/grafana/dashboards \ No newline at end of file diff --git a/lecture3/hw3/monitoring/grafana/dashboards_json/fastapi.json b/lecture3/hw3/monitoring/grafana/dashboards_json/fastapi.json new file mode 100644 index 00000000..0964355f --- /dev/null +++ b/lecture3/hw3/monitoring/grafana/dashboards_json/fastapi.json @@ -0,0 +1,249 @@ +{ + "title": "FastAPI / Prometheus (shop_api) — Pro", + "timezone": "browser", + "schemaVersion": 39, + "version": 3, + "editable": false, + "refresh": "10s", + "time": { "from": "now-30m", "to": "now" }, + "templating": { + "list": [ + { + "name": "job", + "label": "Job", + "type": "query", + "datasource": "Prometheus", + "query": "label_values(http_requests_total, job)", + "includeAll": false, + "multi": false, + "current": { "text": "shop_api", "value": "shop_api" } + }, + { + "name": "handler", + "label": "Handler", + "type": "query", + "datasource": "Prometheus", + "query": "label_values(http_requests_total{job=\"$job\",handler!=\"none\"}, handler)", + "includeAll": true, + "multi": true, + "allValue": ".*", + "current": { "text": "All", "value": ".*" } + } + ] + }, + "panels": [ + { + "type": "timeseries", + "title": "RPS by paths", + "targets": [ + { + "expr": "sum by (handler) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[1m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { + "defaults": { "unit": "req/s", "decimals": 2 }, + "overrides": [] + }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] }, + "tooltip": { "mode": "multi" } + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 0 } + }, + { + "type": "timeseries", + "title": "p95 latency", + "targets": [ + { + "expr": "histogram_quantile(0.95, sum by (le, handler) (rate(http_request_duration_seconds_bucket{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))) * 1000", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { + "defaults": { "unit": "ms", "decimals": 0, "min": 0 }, + "overrides": [] + }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] }, + "tooltip": { "mode": "multi" } + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 0 } + }, + { + "type": "heatmap", + "title": "heatmap ", + "targets": [ + { + "expr": "sum by (le) (rate(http_request_duration_seconds_bucket{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))" + } + ], + "dataFormat": "tsbuckets", + "fieldConfig": { "defaults": { "unit": "ops" } }, + "options": { + "yAxis": { "unit": "s" }, + "legend": { "show": true }, + "tooltip": { "show": true } + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 } + }, + { + "type": "timeseries", + "title": "Errors 4xx / 5xx (RPS)", + "targets": [ + { + "expr": "sum by (status) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\",status=~\"4..\"}[5m]))", + "legendFormat": "4xx {{status}}" + }, + { + "expr": "sum by (status) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\",status=~\"5..\"}[5m]))", + "legendFormat": "5xx {{status}}" + } + ], + "fieldConfig": { "defaults": { "unit": "req/s", "decimals": 3, "min": 0 } }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","sum"] } + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 } + }, + { + "type": "timeseries", + "title": "aqw qwery len", + "targets": [ + { + "expr": "sum by (handler) (rate(http_request_size_bytes_sum{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m])) / sum by (handler) (rate(http_request_size_bytes_count{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 16 } + }, + { + "type": "timeseries", + "title": "aqw ans len", + "targets": [ + { + "expr": "sum by (handler) (rate(http_response_size_bytes_sum{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m])) / sum by (handler) (rate(http_response_size_bytes_count{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 16 } + }, + { + "type": "stat", + "title": "carts (rate/5m)", + "targets": [ + { "expr": "rate(app_cart_created_total{job=\"$job\"}[5m])" } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 0, "y": 24 } + }, + { + "type": "stat", + "title": "added items (rate/5m)", + "targets": [ + { "expr": "rate(app_item_added_total{job=\"$job\"}[5m])" } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 6, "y": 24 } + }, + { + "type": "stat", + "title": "CPU 5m", + "targets": [ + { "expr": "avg(rate(process_cpu_seconds_total{job=\"$job\"}[5m]))" } + ], + "fieldConfig": { "defaults": { "unit": "cores", "decimals": 2, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 12, "y": 24 } + }, + { + "type": "stat", + "title": "mem RSS", + "targets": [ + { "expr": "process_resident_memory_bytes{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 16, "y": 24 } + }, + { + "type": "stat", + "title": "file desc", + "targets": [ + { "expr": "process_open_fds{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "none", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 20, "y": 24 } + }, + { + "type": "stat", + "title": "uptime", + "targets": [ + { "expr": "time() - process_start_time_seconds{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "s", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 0, "y": 28 } + }, + { + "type": "timeseries", + "title": "GC collections/sec", + "targets": [ + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"0\"}[5m])", + "legendFormat": "gen0" + }, + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"1\"}[5m])", + "legendFormat": "gen1" + }, + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"2\"}[5m])", + "legendFormat": "gen2" + } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] } + }, + "gridPos": { "h": 8, "w": 18, "x": 6, "y": 28 } + }, + { + "type": "stat", + "title": "Prometheus scrape", + "targets": [ + { "expr": "max(up{job=\"$job\"})" } + ], + "fieldConfig": { + "defaults": { + "unit": "none", + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "red", "value": 0 }, + { "color": "green", "value": 1 } + ] + } + } + }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 8, "w": 6, "x": 0, "y": 32 } + }, + { + "type": "timeseries", + "title": "Prometheus scrape duration / samples", + "targets": [ + { "expr": "avg(scrape_duration_seconds{job=\"$job\"})", "legendFormat": "scrape duration (s)" }, + { "expr": "sum(scrape_samples_scraped{job=\"$job\"})", "legendFormat": "samples scraped" } + ], + "fieldConfig": { "defaults": { "decimals": 3, "min": 0 } }, + "options": { "legend": { "displayMode": "table", "placement": "right" } }, + "gridPos": { "h": 8, "w": 18, "x": 6, "y": 36 } + } + ] +} \ No newline at end of file diff --git a/lecture3/hw3/monitoring/grafana/datasources/datasource.yml b/lecture3/hw3/monitoring/grafana/datasources/datasource.yml new file mode 100644 index 00000000..415d3568 --- /dev/null +++ b/lecture3/hw3/monitoring/grafana/datasources/datasource.yml @@ -0,0 +1,12 @@ +apiVersion: 1 +deleteDatasources: + - name: Prometheus + type: prometheus + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: false \ No newline at end of file diff --git a/lecture3/hw3/monitoring/prometheus.yml b/lecture3/hw3/monitoring/prometheus.yml new file mode 100644 index 00000000..63dfb61f --- /dev/null +++ b/lecture3/hw3/monitoring/prometheus.yml @@ -0,0 +1,13 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + - job_name: "shop_api" + metrics_path: /metrics + static_configs: + - targets: ["host.docker.internal:8000"] + + - job_name: "prometheus" + static_configs: + - targets: ["prometheus:9090"] \ No newline at end of file diff --git a/lecture3/hw3/requirements.txt b/lecture3/hw3/requirements.txt new file mode 100644 index 00000000..260438e4 --- /dev/null +++ b/lecture3/hw3/requirements.txt @@ -0,0 +1,4 @@ +fastapi>=0.117.1 +uvicorn>=0.24.0 +prometheus-fastapi-instrumentator>=7.0.0 +prometheus-client>=0.20.0 \ No newline at end of file diff --git a/lecture3/hw3/shop_api/__init__.py b/lecture3/hw3/shop_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lecture3/hw3/shop_api/main.py b/lecture3/hw3/shop_api/main.py new file mode 100644 index 00000000..4eee33f2 --- /dev/null +++ b/lecture3/hw3/shop_api/main.py @@ -0,0 +1,205 @@ +from collections import defaultdict +import secrets +from fastapi import FastAPI, HTTPException, Body, WebSocket, WebSocketDisconnect +from fastapi.responses import JSONResponse +from prometheus_fastapi_instrumentator import Instrumentator +from prometheus_client import Counter as PcCounter, Counter + +app=FastAPI() +Instrumentator().instrument(app).expose(app,endpoint="/metrics",include_in_schema=False) +CART_CREATED=Counter("app_cart_created_total","total carts") +ITEM_ADDED=Counter("app_item_added_total","total items") +REQS_BY_PATH=PcCounter("app_requests_total","total q per path",["path"]) + +@app.middleware("http") +async def m(rq, nxt): + if rq.url.path!="/metrics": REQS_BY_PATH.labels(path=rq.url.path).inc() + return await nxt(rq) + +class S: + def __init__(s): + s._i=0; s._c=0; s.items={}; s.carts={} + def ni(s): s._i+=1; return s._i + def nc(s): s._c+=1; return s._c +st=S() + +def gi(x,n): + if x is None: return None + try: return int(x) + except: raise HTTPException(status_code=422,detail=f"Invalid {n}") +def gf(x,n): + if x is None: return None + try: return float(x) + except: raise HTTPException(status_code=422,detail=f"Invalid {n}") +def gb(x,d=False): + if isinstance(x,bool): return x + if x is None: return d + s=str(x).lower() + if s in ("1","true","yes","on"): return True + if s in ("0","false","no","off"): return False + return d + +def need_item(i): + it=st.items.get(i) + if it is None: raise HTTPException(status_code=404,detail="Item not found") + return it +def need_cart(c): + ct=st.carts.get(c) # do we need it to check??? + if ct is None: raise HTTPException(status_code=404,detail="Cart not found") + return ct + +def cv(c): + xs=[]; tot=0.0 + for iid,q in sorted(c["items"].items()): + it=st.items.get(iid) + if not it or it["deleted"]: continue + xs.append({"id":iid,"quantity":int(q)}) + tot+=float(it["price"])*int(q) + return {"id":c["id"],"items":xs,"price":float(tot)} + +def sl(a,o,l): return a[o:o+l] + +@app.post("/item",status_code=201) +def create_item(p=Body(...)): + if not isinstance(p,dict): raise HTTPException(status_code=422,detail="Invalid body") + if "name" not in p or "price" not in p: raise HTTPException(status_code=422,detail="name and price required") + n=p["name"]; pr=gf(p["price"],"price") + if pr is None or pr<0.0 or not isinstance(n,str) or not n: raise HTTPException(status_code=422,detail="Invalid fields") + i=st.ni(); o={"id":i,"name":n,"price":float(pr),"deleted":False}; st.items[i]=o; return o + +@app.get("/item/{item_id}") +def get_item(item_id): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + it=need_item(i) + if it["deleted"]: raise HTTPException(status_code=404,detail="Item deleted") + return it + +@app.get("/item") +def list_items(offset=0,limit=10,min_price=None,max_price=None,show_deleted=False): + o=gi(offset,"offset"); l=gi(limit,"limit"); mn=gf(min_price,"min_price"); mx=gf(max_price,"max_price"); sd=gb(show_deleted,False) + if o is None or o<0: raise HTTPException(status_code=422,detail="offset") + if l is None or l<=0: raise HTTPException(status_code=422,detail="limit") + if mn is not None and mn<0: raise HTTPException(status_code=422,detail="min_price") + if mx is not None and mx<0: raise HTTPException(status_code=422,detail="max_price") + out=[] + for iid in sorted(st.items.keys()): + it=st.items[iid] + if not sd and it["deleted"]: continue + if mn is not None and it["price"]float(mx): continue + out.append(it) + return sl(out,o,l) + +@app.put("/item/{item_id}") +def put_item(item_id,p=Body(...)): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + it=need_item(i) + if it["deleted"]: raise HTTPException(status_code=404,detail="Item deleted") + if not isinstance(p,dict): raise HTTPException(status_code=422,detail="Invalid body") + if "name" not in p or "price" not in p: raise HTTPException(status_code=422,detail="name and price required") + n=p["name"]; pr=gf(p["price"],"price") + if pr is None or pr<0.0 or not isinstance(n,str) or not n: raise HTTPException(status_code=422,detail="Invalid fields") + it["name"]=n; it["price"]=float(pr); return it + +@app.patch("/item/{item_id}") +def patch_item(item_id,body=Body(default={})): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + it=need_item(i) + if it["deleted"]: return JSONResponse(status_code=304,content=None) + if body is None: body={} + if not isinstance(body,dict): raise HTTPException(status_code=422,detail="Invalid body") + ok={"name","price"} + if not set(body.keys()).issubset(ok): raise HTTPException(status_code=422,detail="Unexpected fields") + if "name" in body: + if not isinstance(body["name"],str) or not body["name"]: raise HTTPException(status_code=422,detail="Invalid name") + if "price" in body: + p=gf(body["price"],"price") + if p is None or p<0.0: raise HTTPException(status_code=422,detail="Invalid price") + if "name" in body: it["name"]=body["name"] + if "price" in body: it["price"]=float(body["price"]) + return it + +@app.delete("/item/{item_id}") +def delete_item(item_id): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + it=st.items.get(i) + if it is None: raise HTTPException(status_code=404,detail="Item not found") + it["deleted"]=True + return {"status":"ok"} + +@app.post("/cart",status_code=201) +def create_cart(): + c=st.nc(); st.carts[c]={"id":c,"items":defaultdict(int)}; CART_CREATED.inc() + return JSONResponse(status_code=201,content={"id":c},headers={"location":f"/cart/{c}"}) + +@app.get("/cart/{cart_id}") +def get_cart(cart_id): + try: c=int(cart_id) + except: raise HTTPException(status_code=404,detail="Cart not found") + return cv(need_cart(c)) + +@app.post("/cart/{cart_id}/add/{item_id}") +def add_item_to_cart(cart_id,item_id): + try: c=int(cart_id); i=int(item_id) + except: raise HTTPException(status_code=404,detail="Not found") + cart=need_cart(c); it=need_item(i) + if it["deleted"]: raise HTTPException(status_code=404,detail="Item deleted") + cart["items"][i]=int(cart["items"].get(i,0))+1; ITEM_ADDED.inc() + return cv(cart) + +@app.get("/cart") +def list_carts(offset=0,limit=10,min_price=None,max_price=None,min_quantity=None,max_quantity=None): + o=gi(offset,"offset"); l=gi(limit,"limit"); mn=gf(min_price,"min_price"); mx=gf(max_price,"max_price"); miq=gi(min_quantity,"min_quantity"); maq=gi(max_quantity,"max_quantity") + if o is None or o<0: raise HTTPException(status_code=422,detail="offset") + if l is None or l<=0: raise HTTPException(status_code=422,detail="limit") + if mn is not None and mn<0: raise HTTPException(status_code=422,detail="min_price") + if mx is not None and mx<0: raise HTTPException(status_code=422,detail="max_price") + if miq is not None and miq<0: raise HTTPException(status_code=422,detail="min_quantity") + if maq is not None and maq<0: raise HTTPException(status_code=422,detail="max_quantity") + vs=[] + for cid in sorted(st.carts.keys()): + v=cv(st.carts[cid]) + if mn is not None and v["price"]float(mx): continue + q=sum(i["quantity"] for i in v["items"]) + if miq is not None and qint(maq): continue + vs.append(v) + return sl(vs,o,l) + +_rooms=defaultdict(set) +_usernames={} +_alloc=set() + +def _uname(): + for _ in range(5): + n=f"user-{secrets.token_hex(3)}" + if n not in _alloc: + _alloc.add(n); return n + return f"user-{secrets.token_hex(3)}" + +async def _bcast(room,sender,text): + nm=_usernames.get(sender,"user-unknown"); pl=f"{nm} :: {text}" + dead=[] + for ws in list(_rooms[room]): + if ws is sender: continue + try: await ws.send_text(pl) + except: dead.append(ws) + for ws in dead: + _rooms[room].discard(ws); _usernames.pop(ws,None) + +@app.websocket("/chat/{room}") +async def ws_chat(ws,room): + await ws.accept(); u=_uname(); _rooms[room].add(ws); _usernames[ws]=u + try: + while True: + m=await ws.receive_text() + await _bcast(room,ws,m) + except WebSocketDisconnect: + pass + finally: + _rooms[room].discard(ws); _usernames.pop(ws,None) \ No newline at end of file From fdadaf7e7d88629bf4de62d4fe7f5706f42780be Mon Sep 17 00:00:00 2001 From: tttonyalpha Date: Sun, 26 Oct 2025 19:12:45 +0300 Subject: [PATCH 4/4] added hw4 --- lecture4/hw4/Dockerfile | 20 ++ lecture4/hw4/__init__.py | 0 lecture4/hw4/docker-compose.yml | 64 +++++ .../grafana/dashboards/dashboard.yml | 10 + .../grafana/dashboards_json/fastapi.json | 249 ++++++++++++++++++ .../grafana/datasources/datasource.yml | 12 + lecture4/hw4/monitoring/prometheus.yml | 13 + lecture4/hw4/requirements.txt | 6 + lecture4/hw4/scripts/__init__.py | 0 lecture4/hw4/scripts/log.txt | 28 ++ lecture4/hw4/scripts/run_verification.sh | 5 + lecture4/hw4/scripts/tx_demo.py | 148 +++++++++++ lecture4/hw4/scripts/verify_isolation.py | 162 ++++++++++++ lecture4/hw4/shop_api/__init__.py | 0 lecture4/hw4/shop_api/db.py | 13 + lecture4/hw4/shop_api/main.py | 223 ++++++++++++++++ lecture4/hw4/shop_api/models.py | 22 ++ 17 files changed, 975 insertions(+) create mode 100644 lecture4/hw4/Dockerfile create mode 100644 lecture4/hw4/__init__.py create mode 100644 lecture4/hw4/docker-compose.yml create mode 100644 lecture4/hw4/monitoring/grafana/dashboards/dashboard.yml create mode 100644 lecture4/hw4/monitoring/grafana/dashboards_json/fastapi.json create mode 100644 lecture4/hw4/monitoring/grafana/datasources/datasource.yml create mode 100644 lecture4/hw4/monitoring/prometheus.yml create mode 100644 lecture4/hw4/requirements.txt create mode 100644 lecture4/hw4/scripts/__init__.py create mode 100644 lecture4/hw4/scripts/log.txt create mode 100755 lecture4/hw4/scripts/run_verification.sh create mode 100644 lecture4/hw4/scripts/tx_demo.py create mode 100644 lecture4/hw4/scripts/verify_isolation.py create mode 100644 lecture4/hw4/shop_api/__init__.py create mode 100644 lecture4/hw4/shop_api/db.py create mode 100644 lecture4/hw4/shop_api/main.py create mode 100644 lecture4/hw4/shop_api/models.py diff --git a/lecture4/hw4/Dockerfile b/lecture4/hw4/Dockerfile new file mode 100644 index 00000000..cd6bec42 --- /dev/null +++ b/lecture4/hw4/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.10 + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl ca-certificates && \ + rm -rf /var/lib/apt/lists/* + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "shop_api.main:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers"] \ No newline at end of file diff --git a/lecture4/hw4/__init__.py b/lecture4/hw4/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lecture4/hw4/docker-compose.yml b/lecture4/hw4/docker-compose.yml new file mode 100644 index 00000000..a2787aa7 --- /dev/null +++ b/lecture4/hw4/docker-compose.yml @@ -0,0 +1,64 @@ +version: "3.9" + +services: + db: + image: postgres:16 + container_name: shop_db + environment: + - POSTGRES_DB=shop + - POSTGRES_USER=shop + - POSTGRES_PASSWORD=shop + volumes: + - pgdata:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB"] + interval: 5s + timeout: 5s + retries: 10 + networks: [monitoring] + + api: + build: . + container_name: shop_api + ports: + - "8000:8000" + networks: [monitoring] + environment: + - PYTHONUNBUFFERED=1 + - DATABASE_URL=postgresql+psycopg2://shop:shop@db:5432/shop + depends_on: + db: + condition: service_healthy + + prometheus: + image: prom/prometheus:latest + container_name: prometheus + command: + - "--config.file=/etc/prometheus/prometheus.yml" + volumes: + - ./monitoring/prometheus.yml:/etc/prometheus/prometheus.yml:ro + ports: + - "9090:9090" + networks: [monitoring] + + grafana: + image: grafana/grafana:latest + container_name: grafana + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=admin + volumes: + - ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources:ro + - ./monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards:ro + - ./monitoring/grafana/dashboards_json:/var/lib/grafana/dashboards:ro + ports: + - "3000:3000" + depends_on: [prometheus] + networks: [monitoring] + +networks: + monitoring: + driver: bridge + +volumes: + pgdata: \ No newline at end of file diff --git a/lecture4/hw4/monitoring/grafana/dashboards/dashboard.yml b/lecture4/hw4/monitoring/grafana/dashboards/dashboard.yml new file mode 100644 index 00000000..d02fb6d8 --- /dev/null +++ b/lecture4/hw4/monitoring/grafana/dashboards/dashboard.yml @@ -0,0 +1,10 @@ +apiVersion: 1 +providers: + - name: "local-dashboards" + orgId: 1 + folder: "" + type: file + disableDeletion: true + editable: false + options: + path: /var/lib/grafana/dashboards \ No newline at end of file diff --git a/lecture4/hw4/monitoring/grafana/dashboards_json/fastapi.json b/lecture4/hw4/monitoring/grafana/dashboards_json/fastapi.json new file mode 100644 index 00000000..0964355f --- /dev/null +++ b/lecture4/hw4/monitoring/grafana/dashboards_json/fastapi.json @@ -0,0 +1,249 @@ +{ + "title": "FastAPI / Prometheus (shop_api) — Pro", + "timezone": "browser", + "schemaVersion": 39, + "version": 3, + "editable": false, + "refresh": "10s", + "time": { "from": "now-30m", "to": "now" }, + "templating": { + "list": [ + { + "name": "job", + "label": "Job", + "type": "query", + "datasource": "Prometheus", + "query": "label_values(http_requests_total, job)", + "includeAll": false, + "multi": false, + "current": { "text": "shop_api", "value": "shop_api" } + }, + { + "name": "handler", + "label": "Handler", + "type": "query", + "datasource": "Prometheus", + "query": "label_values(http_requests_total{job=\"$job\",handler!=\"none\"}, handler)", + "includeAll": true, + "multi": true, + "allValue": ".*", + "current": { "text": "All", "value": ".*" } + } + ] + }, + "panels": [ + { + "type": "timeseries", + "title": "RPS by paths", + "targets": [ + { + "expr": "sum by (handler) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[1m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { + "defaults": { "unit": "req/s", "decimals": 2 }, + "overrides": [] + }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] }, + "tooltip": { "mode": "multi" } + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 0 } + }, + { + "type": "timeseries", + "title": "p95 latency", + "targets": [ + { + "expr": "histogram_quantile(0.95, sum by (le, handler) (rate(http_request_duration_seconds_bucket{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))) * 1000", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { + "defaults": { "unit": "ms", "decimals": 0, "min": 0 }, + "overrides": [] + }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] }, + "tooltip": { "mode": "multi" } + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 0 } + }, + { + "type": "heatmap", + "title": "heatmap ", + "targets": [ + { + "expr": "sum by (le) (rate(http_request_duration_seconds_bucket{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))" + } + ], + "dataFormat": "tsbuckets", + "fieldConfig": { "defaults": { "unit": "ops" } }, + "options": { + "yAxis": { "unit": "s" }, + "legend": { "show": true }, + "tooltip": { "show": true } + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 } + }, + { + "type": "timeseries", + "title": "Errors 4xx / 5xx (RPS)", + "targets": [ + { + "expr": "sum by (status) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\",status=~\"4..\"}[5m]))", + "legendFormat": "4xx {{status}}" + }, + { + "expr": "sum by (status) (rate(http_requests_total{job=\"$job\",handler!=\"none\",handler=~\"$handler\",status=~\"5..\"}[5m]))", + "legendFormat": "5xx {{status}}" + } + ], + "fieldConfig": { "defaults": { "unit": "req/s", "decimals": 3, "min": 0 } }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","sum"] } + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 } + }, + { + "type": "timeseries", + "title": "aqw qwery len", + "targets": [ + { + "expr": "sum by (handler) (rate(http_request_size_bytes_sum{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m])) / sum by (handler) (rate(http_request_size_bytes_count{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 16 } + }, + { + "type": "timeseries", + "title": "aqw ans len", + "targets": [ + { + "expr": "sum by (handler) (rate(http_response_size_bytes_sum{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m])) / sum by (handler) (rate(http_response_size_bytes_count{job=\"$job\",handler!=\"none\",handler=~\"$handler\"}[5m]))", + "legendFormat": "{{handler}}" + } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 16 } + }, + { + "type": "stat", + "title": "carts (rate/5m)", + "targets": [ + { "expr": "rate(app_cart_created_total{job=\"$job\"}[5m])" } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 0, "y": 24 } + }, + { + "type": "stat", + "title": "added items (rate/5m)", + "targets": [ + { "expr": "rate(app_item_added_total{job=\"$job\"}[5m])" } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 6, "y": 24 } + }, + { + "type": "stat", + "title": "CPU 5m", + "targets": [ + { "expr": "avg(rate(process_cpu_seconds_total{job=\"$job\"}[5m]))" } + ], + "fieldConfig": { "defaults": { "unit": "cores", "decimals": 2, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 12, "y": 24 } + }, + { + "type": "stat", + "title": "mem RSS", + "targets": [ + { "expr": "process_resident_memory_bytes{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "bytes", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 16, "y": 24 } + }, + { + "type": "stat", + "title": "file desc", + "targets": [ + { "expr": "process_open_fds{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "none", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 4, "x": 20, "y": 24 } + }, + { + "type": "stat", + "title": "uptime", + "targets": [ + { "expr": "time() - process_start_time_seconds{job=\"$job\"}" } + ], + "fieldConfig": { "defaults": { "unit": "s", "decimals": 0, "min": 0 } }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 4, "w": 6, "x": 0, "y": 28 } + }, + { + "type": "timeseries", + "title": "GC collections/sec", + "targets": [ + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"0\"}[5m])", + "legendFormat": "gen0" + }, + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"1\"}[5m])", + "legendFormat": "gen1" + }, + { + "expr": "rate(python_gc_collections_total{job=\"$job\",generation=\"2\"}[5m])", + "legendFormat": "gen2" + } + ], + "fieldConfig": { "defaults": { "unit": "ops", "decimals": 3, "min": 0 } }, + "options": { + "legend": { "displayMode": "table", "placement": "right", "calcs": ["lastNotNull","max","mean"] } + }, + "gridPos": { "h": 8, "w": 18, "x": 6, "y": 28 } + }, + { + "type": "stat", + "title": "Prometheus scrape", + "targets": [ + { "expr": "max(up{job=\"$job\"})" } + ], + "fieldConfig": { + "defaults": { + "unit": "none", + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "red", "value": 0 }, + { "color": "green", "value": 1 } + ] + } + } + }, + "options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value" }, + "gridPos": { "h": 8, "w": 6, "x": 0, "y": 32 } + }, + { + "type": "timeseries", + "title": "Prometheus scrape duration / samples", + "targets": [ + { "expr": "avg(scrape_duration_seconds{job=\"$job\"})", "legendFormat": "scrape duration (s)" }, + { "expr": "sum(scrape_samples_scraped{job=\"$job\"})", "legendFormat": "samples scraped" } + ], + "fieldConfig": { "defaults": { "decimals": 3, "min": 0 } }, + "options": { "legend": { "displayMode": "table", "placement": "right" } }, + "gridPos": { "h": 8, "w": 18, "x": 6, "y": 36 } + } + ] +} \ No newline at end of file diff --git a/lecture4/hw4/monitoring/grafana/datasources/datasource.yml b/lecture4/hw4/monitoring/grafana/datasources/datasource.yml new file mode 100644 index 00000000..415d3568 --- /dev/null +++ b/lecture4/hw4/monitoring/grafana/datasources/datasource.yml @@ -0,0 +1,12 @@ +apiVersion: 1 +deleteDatasources: + - name: Prometheus + type: prometheus + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: false \ No newline at end of file diff --git a/lecture4/hw4/monitoring/prometheus.yml b/lecture4/hw4/monitoring/prometheus.yml new file mode 100644 index 00000000..63dfb61f --- /dev/null +++ b/lecture4/hw4/monitoring/prometheus.yml @@ -0,0 +1,13 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + - job_name: "shop_api" + metrics_path: /metrics + static_configs: + - targets: ["host.docker.internal:8000"] + + - job_name: "prometheus" + static_configs: + - targets: ["prometheus:9090"] \ No newline at end of file diff --git a/lecture4/hw4/requirements.txt b/lecture4/hw4/requirements.txt new file mode 100644 index 00000000..f85b1929 --- /dev/null +++ b/lecture4/hw4/requirements.txt @@ -0,0 +1,6 @@ +fastapi>=0.117.1 +uvicorn>=0.24.0 +prometheus-fastapi-instrumentator>=7.0.0 +prometheus-client>=0.20.0 +SQLAlchemy>=2.0.31 +psycopg2-binary>=2.9.9 \ No newline at end of file diff --git a/lecture4/hw4/scripts/__init__.py b/lecture4/hw4/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lecture4/hw4/scripts/log.txt b/lecture4/hw4/scripts/log.txt new file mode 100644 index 00000000..c51a793b --- /dev/null +++ b/lecture4/hw4/scripts/log.txt @@ -0,0 +1,28 @@ +START 2025-10-26T15:31:19.058475Z +== DIRTY READ == +T2@RC read=150 +after_rollback=150 +PASS +== NON-REPEATABLE READ @ READ COMMITTED == +first=150 second=250 +PASS +== NO NON-REPEATABLE READ @ REPEATABLE READ == +first=150 second=150 +PASS +== PHANTOM @ READ COMMITTED == +first=1 second=2 +PASS +== NO PHANTOM @ REPEATABLE READ == +first=1 second=1 +PASS +== WRITE SKEW: RR vs SERIALIZABLE == +RR reads=2,2 commits=commit,commit final_working=0 +SER reads=2,2 commits=commit,rollback final_working=1 +PASS +SUMMARY pass=6 skip=0 fail=0 +dirty_read: PASS +nonrepeat_rc: PASS +nonrepeat_rr: PASS +phantom_rc: PASS +phantom_rr: PASS +serializable_ws: PASS \ No newline at end of file diff --git a/lecture4/hw4/scripts/run_verification.sh b/lecture4/hw4/scripts/run_verification.sh new file mode 100755 index 00000000..72c420b8 --- /dev/null +++ b/lecture4/hw4/scripts/run_verification.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -e +python -m scripts.verify_isolation +echo "----- log.txt -----" +cat log.txt \ No newline at end of file diff --git a/lecture4/hw4/scripts/tx_demo.py b/lecture4/hw4/scripts/tx_demo.py new file mode 100644 index 00000000..7412fda6 --- /dev/null +++ b/lecture4/hw4/scripts/tx_demo.py @@ -0,0 +1,148 @@ +import os, threading, time +from sqlalchemy import create_engine, text +from sqlalchemy.exc import OperationalError, DatabaseError + +URL=os.getenv("DATABASE_URL","sqlite:///./app.db") +E=create_engine(URL, future=True) + +def _prep(): + with E.begin() as c: + c.execute(text("drop table if exists t_nr")) + c.execute(text("create table t_nr(id int primary key, v int)")) + c.execute(text("insert into t_nr(id,v) values (1,0) on conflict (id) do update set v=excluded.v" if URL.startswith("sqlite") else "insert into t_nr(id,v) values (1,0) on conflict (id) do nothing")) + c.execute(text("drop table if exists t_ph")) + c.execute(text("create table t_ph(id serial primary key, tag int)")) if not URL.startswith("sqlite") else c.execute(text("create table t_ph(id integer primary key autoincrement, tag int)")) + c.execute(text("delete from t_ph")) + c.execute(text("insert into t_ph(tag) values (1),(1),(2)")) + c.execute(text("drop table if exists oncall")) + c.execute(text("create table oncall(id int primary key, working bool)")) + # should works + c.execute(text("insert into oncall(id,working) values (1,true) on conflict (id) do update set working=excluded.working" if URL.startswith("sqlite") else "insert into oncall(id,working) values (1,true) on conflict (id) do nothing")) + c.execute(text("insert into oncall(id,working) values (2,true) on conflict (id) do update set working=excluded.working" if URL.startswith("sqlite") else "insert into oncall(id,working) values (2,true) on conflict (id) do nothing")) + +def _engine(level): + if URL.startswith("sqlite"): return create_engine(URL, future=True) + return create_engine(URL, future=True, isolation_level=level) + +def dirty_read(): + _prep() + if not URL.startswith("postgres"): + print("skip: dirty read demo depends on vendor"); return + A=_engine("READ UNCOMMITTED") + B=_engine("READ UNCOMMITTED") + with A.begin() as a: + a.execute(text("update t_nr set v=10 where id=1")) + with B.begin() as b: + r=b.execute(text("select v from t_nr where id=1")).scalar() + print("dirty_read_read=",r) + a.rollback() + with E.begin() as c: + r=c.execute(text("select v from t_nr where id=1")).scalar() + print("after_rollback=",r) + +def non_repeatable_read_read_committed(): + _prep() + A=_engine("READ COMMITTED") + B=_engine("READ COMMITTED") + with A.connect() as a: + a.exec_driver_sql("begin") + r1=a.execute(text("select v from t_nr where id=1")).scalar() + def w(): + with B.begin() as b: + b.execute(text("update t_nr set v=20 where id=1")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select v from t_nr where id=1")).scalar() + a.exec_driver_sql("commit") + print("rc_v1=",r1,"rc_v2=",r2) + +def repeatable_read_blocks_nonrepeatable(): + if not URL.startswith("postgres"): + print("skip: rr demo depends on postgres"); return + _prep() + A=_engine("REPEATABLE READ") + B=_engine("READ COMMITTED") + with A.connect() as a: + a.exec_driver_sql("begin") + r1=a.execute(text("select v from t_nr where id=1")).scalar() + def w(): + time.sleep(0.2) + with B.begin() as b: + b.execute(text("update t_nr set v=30 where id=1")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select v from t_nr where id=1")).scalar() + a.exec_driver_sql("commit") + print("rr_v1=",r1,"rr_v2=",r2) + +def phantom_read_read_committed(): + _prep() + A=_engine("READ COMMITTED") + B=_engine("READ COMMITTED") + with A.connect() as a: + a.exec_driver_sql("begin") + r1=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + def w(): + with B.begin() as b: + b.execute(text("insert into t_ph(tag) values (1)")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + a.exec_driver_sql("commit") + print("rc_ph1=",r1,"rc_ph2=",r2) + +def repeatable_read_blocks_phantoms(): + if not URL.startswith("postgres"): + print("skip: rr phantom demo depends on postgres"); return + _prep() + A=_engine("REPEATABLE READ") + B=_engine("READ COMMITTED") + with A.connect() as a: + a.exec_driver_sql("begin") + r1=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + def w(): + time.sleep(0.2) + with B.begin() as b: + b.execute(text("insert into t_ph(tag) values (1)")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + a.exec_driver_sql("commit") + print("rr_ph1=",r1,"rr_ph2=",r2) + +def write_skew_rr_vs_serializable(): + if not URL.startswith("postgres"): + print("skip: serializable demo depends on postgres"); return + _prep() + def txn(level, val): + eng=_engine(level) + with eng.connect() as c: + c.exec_driver_sql("begin") + n=c.execute(text("select count(*) from oncall where working=true")).scalar() + if n>=2: + c.exec_driver_sql("commit"); print(level,"skip"); return + c.execute(text("update oncall set working=false where id=:i"),{"i":val}) + try: + c.exec_driver_sql("commit"); print(level,"commit") + except DatabaseError as e: + print(level,"rollback") + with E.begin() as c: + c.execute(text("update oncall set working=true where id in (1,2)")) + t1=threading.Thread(target=txn,args=("REPEATABLE READ",1)) + t2=threading.Thread(target=txn,args=("REPEATABLE READ",2)) + t1.start(); t2.start(); t1.join(); t2.join() + with E.begin() as c: + r=c.execute(text("select count(*) from oncall where working=true")).scalar() + print("rr_after=",r) + with E.begin() as c: + c.execute(text("update oncall set working=true where id in (1,2)")) + t1=threading.Thread(target=txn,args=("SERIALIZABLE",1)) + t2=threading.Thread(target=txn,args=("SERIALIZABLE",2)) + t1.start(); t2.start(); t1.join(); t2.join() + with E.begin() as c: + r=c.execute(text("select count(*) from oncall where working=true")).scalar() + print("ser_after=",r) + +if __name__=="__main__": + dirty_read() + non_repeatable_read_read_committed() + repeatable_read_blocks_nonrepeatable() + phantom_read_read_committed() + repeatable_read_blocks_phantoms() + write_skew_rr_vs_serializable() \ No newline at end of file diff --git a/lecture4/hw4/scripts/verify_isolation.py b/lecture4/hw4/scripts/verify_isolation.py new file mode 100644 index 00000000..a14fd3b7 --- /dev/null +++ b/lecture4/hw4/scripts/verify_isolation.py @@ -0,0 +1,162 @@ +import os, threading, time, datetime +from sqlalchemy import create_engine, text + +URL=os.getenv("DATABASE_URL","sqlite:///./app.db") +PG=URL.startswith("postgres") +E=create_engine(URL, future=True) + +def _w(f,s): print(s); f.write(s+"\n") + +def _prep_nr(c): + c.execute(text("drop table if exists t_nr")) + c.execute(text("create table t_nr(id int primary key, v int)")) + c.execute(text("insert into t_nr(id,v) values (1,150)")) + +def _prep_ph(c): + c.execute(text("drop table if exists t_ph")) + if PG: c.execute(text("create table t_ph(id serial primary key, tag int)")) + else: c.execute(text("create table t_ph(id integer primary key autoincrement, tag int)")) + c.execute(text("insert into t_ph(tag) values (1)")) + +def _prep_ws(c): + c.execute(text("drop table if exists oncall")) + c.execute(text("create table oncall(id int primary key, working bool)")) + c.execute(text("insert into oncall(id,working) values (1,true)")) + c.execute(text("insert into oncall(id,working) values (2,true)")) + +def _engine(level=None): + if level and PG: return create_engine(URL, future=True, isolation_level=level) + return create_engine(URL, future=True) + +def dirty_read(f): + _w(f,"== DIRTY READ ==") + if not PG: + _w(f,"SKIP vendor"); return "SKIP" + with E.begin() as c: _prep_nr(c) + A=_engine("READ COMMITTED") + with A.connect() as t1, A.connect() as t2: + x=t1.begin(); t1.execute(text("update t_nr set v=200 where id=1")) + y=t2.begin(); r=t2.execute(text("select v from t_nr where id=1")).scalar(); y.commit() + x.rollback() + _w(f,f"T2@RC read={r}") + with E.begin() as c: cur=c.execute(text("select v from t_nr where id=1")).scalar() + _w(f,f"after_rollback={cur}") + ok=(r==150 and cur==150) + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def nonrepeat_rc(f): + _w(f,"== NON-REPEATABLE READ @ READ COMMITTED ==") + with E.begin() as c: _prep_nr(c) + A=_engine("READ COMMITTED") + with A.connect() as a, A.connect() as b: + ta=a.begin(); r1=a.execute(text("select v from t_nr where id=1")).scalar() + def w(): + with b.begin() as tb: b.execute(text("update t_nr set v=250 where id=1")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select v from t_nr where id=1")).scalar(); ta.commit() + _w(f,f"first={r1} second={r2}") + ok=(r1!=r2 and r1==150 and r2==250) + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def nonrepeat_rr(f): + _w(f,"== NO NON-REPEATABLE READ @ REPEATABLE READ ==") + if not PG: + _w(f,"SKIP vendor"); return "SKIP" + with E.begin() as c: _prep_nr(c) + A=_engine("REPEATABLE READ"); B=_engine("READ COMMITTED") + with A.connect() as a, B.connect() as b: + ta=a.begin(); r1=a.execute(text("select v from t_nr where id=1")).scalar() + def w(): + time.sleep(0.2) + + with b.begin() as tb: b.execute(text("update t_nr set v=300 where id=1")) + t=threading.Thread(target=w); t.start(); t.join() + r2=a.execute(text("select v from t_nr where id=1")).scalar(); ta.commit() + _w(f,f"first={r1} second={r2}") + ok=(r1==150 and r2==150) + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def phantom_rc(f): + _w(f,"== PHANTOM @ READ COMMITTED ==") + with E.begin() as c: _prep_ph(c) + A=_engine("READ COMMITTED") + with A.connect() as a, A.connect() as b: + ta=a.begin(); c1=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + def w(): + with b.begin() as tb: b.execute(text("insert into t_ph(tag) values (1)")) + t=threading.Thread(target=w); t.start(); t.join() + c2=a.execute(text("select count(*) from t_ph where tag=1")).scalar(); ta.commit() + _w(f,f"first={c1} second={c2}") + ok=(c1==1 and c2==2) + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def phantom_rr(f): + _w(f,"== NO PHANTOM @ REPEATABLE READ ==") + if not PG: + _w(f,"SKIP vendor"); return "SKIP" + with E.begin() as c: _prep_ph(c) + A=_engine("REPEATABLE READ"); B=_engine("READ COMMITTED") + with A.connect() as a, B.connect() as b: + ta=a.begin(); c1=a.execute(text("select count(*) from t_ph where tag=1")).scalar() + def w(): + time.sleep(0.2) + with b.begin() as tb: b.execute(text("insert into t_ph(tag) values (1)")) + t=threading.Thread(target=w); t.start(); t.join() + c2=a.execute(text("select count(*) from t_ph where tag=1")).scalar(); ta.commit() + _w(f,f"first={c1} second={c2}") + ok=(c1==1 and c2==1) + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def serializable_ws(f): + _w(f,"== WRITE SKEW: RR vs SERIALIZABLE ==") + if not PG: + _w(f,"SKIP vendor"); return "SKIP" + with E.begin() as c: _prep_ws(c) + def run(level): + with E.begin() as c: c.execute(text("update oncall set working=true where id in (1,2)")) + bar=threading.Barrier(2); res={} + def tx(i,tag): + eng=_engine(level); + with eng.connect() as x: + t=x.begin(); n=x.execute(text("select count(*) from oncall where working=true")).scalar(); res[f"read_{tag}"]=n; bar.wait() + x.execute(text("update oncall set working=false where id=:i"),{"i":i}) + try: t.commit(); res[f"commit_{tag}"]="commit" + except Exception: res[f"commit_{tag}"]="rollback" + t1=threading.Thread(target=tx,args=(1,"A")) + t2=threading.Thread(target=tx,args=(2,"B")) + t1.start(); t2.start(); t1.join(); t2.join() + with E.begin() as c: res["final"]=c.execute(text("select count(*) from oncall where working=true")).scalar() + return res + r1=run("REPEATABLE READ") + _w(f,f"RR reads={r1['read_A']},{r1['read_B']} commits={r1['commit_A']},{r1['commit_B']} final_working={r1['final']}") + r2=run("SERIALIZABLE") + _w(f,f"SER reads={r2['read_A']},{r2['read_B']} commits={r2['commit_A']},{r2['commit_B']} final_working={r2['final']}") + ok_rr=(r1["final"] in (0,1,2)) and (r1["commit_A"]=="commit" and r1["commit_B"]=="commit") + ok_ser=(r2["final"]>=1) and ({"commit","rollback"}=={r2["commit_A"],r2["commit_B"]}) + ok=ok_rr and ok_ser + _w(f,"PASS" if ok else "FAIL") + return "PASS" if ok else "FAIL" + +def main(): + with open("log.txt","w",encoding="utf-8") as f: + _w(f,"START "+datetime.datetime.utcnow().isoformat()+"Z") + r=[] + r.append(("dirty_read",dirty_read(f))) + r.append(("nonrepeat_rc",nonrepeat_rc(f))) + r.append(("nonrepeat_rr",nonrepeat_rr(f))) + r.append(("phantom_rc",phantom_rc(f))) + r.append(("phantom_rr",phantom_rr(f))) + r.append(("serializable_ws",serializable_ws(f))) + ok=sum(1 for _,x in r if x=="PASS") + sk=sum(1 for _,x in r if x=="SKIP") + fl=sum(1 for _,x in r if x=="FAIL") + _w(f,f"SUMMARY pass={ok} skip={sk} fail={fl}") + for k,v in r: _w(f,f"{k}: {v}") + +if __name__=="__main__": + main() \ No newline at end of file diff --git a/lecture4/hw4/shop_api/__init__.py b/lecture4/hw4/shop_api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lecture4/hw4/shop_api/db.py b/lecture4/hw4/shop_api/db.py new file mode 100644 index 00000000..99fcc88d --- /dev/null +++ b/lecture4/hw4/shop_api/db.py @@ -0,0 +1,13 @@ +import os +from sqlalchemy import create_engine +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import sessionmaker + +DATABASE_URL=os.getenv("DATABASE_URL","sqlite:///./app.db") +engine=create_engine(DATABASE_URL, future=True) +class Base(DeclarativeBase): pass +SessionLocal=sessionmaker(bind=engine, autoflush=False, autocommit=False, future=True) + +def init_db(): + from . import models + Base.metadata.create_all(bind=engine) \ No newline at end of file diff --git a/lecture4/hw4/shop_api/main.py b/lecture4/hw4/shop_api/main.py new file mode 100644 index 00000000..4e913eeb --- /dev/null +++ b/lecture4/hw4/shop_api/main.py @@ -0,0 +1,223 @@ +from collections import defaultdict +import secrets + + +from fastapi import FastAPI, HTTPException, Body, WebSocket, WebSocketDisconnect +from fastapi.responses import JSONResponse + + + +from prometheus_fastapi_instrumentator import Instrumentator +from prometheus_client import Counter as PcCounter, Counter + +from sqlalchemy import select, update, func + + + +from .db import SessionLocal, init_db +from .models import Item, Cart, CartItem + +app=FastAPI() +Instrumentator().instrument(app).expose(app,endpoint="/metrics",include_in_schema=False) +CART_CREATED=Counter("app_cart_created_total","total carts") +ITEM_ADDED=Counter("app_item_added_total","total items") +REQS_BY_PATH=PcCounter("app_requests_total","total q per path",["path"]) + +@app.on_event("startup") +def _s(): init_db() + +@app.middleware("http") +async def m(rq, nxt): + if rq.url.path!="/metrics": REQS_BY_PATH.labels(path=rq.url.path).inc() + return await nxt(rq) + +def gi(x,n): + if x is None: return None + try: return int(x) + except: raise HTTPException(status_code=422,detail=f"Invalid {n}") +def gf(x,n): + if x is None: return None + try: return float(x) + except: raise HTTPException(status_code=422,detail=f"Invalid {n}") +def gb(x,d=False): + if isinstance(x,bool): return x + if x is None: return d + s=str(x).lower() + if s in ("1","true","yes","on"): return True + if s in ("0","false","no","off"): return False + return d + +def sl(a,o,l): return a[o:o+l] + +def need_item(db,i): + it=db.get(Item,i) + if not it or it.deleted: raise HTTPException(status_code=404,detail="Item not found") + return it +def need_cart(db,c): + ct=db.get(Cart,c) + if not ct: raise HTTPException(status_code=404,detail="Cart not found") + return ct + +def cv(db,c): + rows=db.execute(select(CartItem,Item).join(Item,CartItem.item_id==Item.id).where(CartItem.cart_id==c.id,Item.deleted==False).order_by(Item.id)).all() + xs=[]; tot=0.0 + for ci,it in rows: + xs.append({"id":it.id,"quantity":int(ci.quantity)}) + tot+=float(it.price)*int(ci.quantity) + return {"id":c.id,"items":xs,"price":float(tot)} + +@app.post("/item",status_code=201) +def create_item(p=Body(...)): + if not isinstance(p,dict): raise HTTPException(status_code=422,detail="Invalid body") + if "name" not in p or "price" not in p: raise HTTPException(status_code=422,detail="name and price required") + n=p["name"]; pr=gf(p["price"],"price") + if pr is None or pr<0.0 or not isinstance(n,str) or not n: raise HTTPException(status_code=422,detail="Invalid fields") + with SessionLocal.begin() as db: + it=Item(name=n,price=float(pr),deleted=False); db.add(it); db.flush() + return {"id":it.id,"name":it.name,"price":float(it.price),"deleted":it.deleted} + +@app.get("/item/{item_id}") +def get_item(item_id): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + with SessionLocal() as db: + it=need_item(db,i) + return {"id":it.id,"name":it.name,"price":float(it.price),"deleted":it.deleted} + +@app.get("/item") +def list_items(offset=0,limit=10,min_price=None,max_price=None,show_deleted=False): + o=gi(offset,"offset"); l=gi(limit,"limit"); mn=gf(min_price,"min_price"); mx=gf(max_price,"max_price"); sd=gb(show_deleted,False) + if o is None or o<0: raise HTTPException(status_code=422,detail="offset") + if l is None or l<=0: raise HTTPException(status_code=422,detail="limit") + if mn is not None and mn<0: raise HTTPException(status_code=422,detail="min_price") + if mx is not None and mx<0: raise HTTPException(status_code=422,detail="max_price") + with SessionLocal() as db: + q=select(Item).order_by(Item.id) + if not sd: q=q.where(Item.deleted==False) + if mn is not None: q=q.where(Item.price>=float(mn)) + if mx is not None: q=q.where(Item.price<=float(mx)) + rows=db.execute(q).scalars().all() + out=[{"id":it.id,"name":it.name,"price":float(it.price),"deleted":it.deleted} for it in rows] + return sl(out,o,l) + +@app.put("/item/{item_id}") +def put_item(item_id,p=Body(...)): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + if not isinstance(p,dict): raise HTTPException(status_code=422,detail="Invalid body") + if "name" not in p or "price" not in p: raise HTTPException(status_code=422,detail="name and price required") + n=p["name"]; pr=gf(p["price"],"price") + if pr is None or pr<0.0 or not isinstance(n,str) or not n: raise HTTPException(status_code=422,detail="Invalid fields") + with SessionLocal.begin() as db: + it=need_item(db,i); it.name=n; it.price=float(pr); db.flush() + return {"id":it.id,"name":it.name,"price":float(it.price),"deleted":it.deleted} + +@app.patch("/item/{item_id}") +def patch_item(item_id,body=Body(default={})): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + if body is None: body={} + if not isinstance(body,dict): raise HTTPException(status_code=422,detail="Invalid body") + ok={"name","price"} + if not set(body.keys()).issubset(ok): raise HTTPException(status_code=422,detail="Unexpected fields") + with SessionLocal.begin() as db: + it=need_item(db,i) + if it.deleted: return JSONResponse(status_code=304,content=None) + if "name" in body: + if not isinstance(body["name"],str) or not body["name"]: raise HTTPException(status_code=422,detail="Invalid name") + it.name=body["name"] + if "price" in body: + p=gf(body["price"],"price") + if p is None or p<0.0: raise HTTPException(status_code=422,detail="Invalid price") + it.price=float(body["price"]) + db.flush() + return {"id":it.id,"name":it.name,"price":float(it.price),"deleted":it.deleted} + +@app.delete("/item/{item_id}") +def delete_item(item_id): + try: i=int(item_id) + except: raise HTTPException(status_code=404,detail="Item not found") + with SessionLocal.begin() as db: + it=db.get(Item,i) + if not it: raise HTTPException(status_code=404,detail="Item not found") + it.deleted=True; db.flush() + return {"status":"ok"} + +@app.post("/cart",status_code=201) +def create_cart(): + with SessionLocal.begin() as db: + ct=Cart(); db.add(ct); db.flush(); CART_CREATED.inc() + return JSONResponse(status_code=201,content={"id":ct.id},headers={"location":f"/cart/{ct.id}"}) + +@app.get("/cart/{cart_id}") +def get_cart(cart_id): + try: c=int(cart_id) + except: raise HTTPException(status_code=404,detail="Cart not found") + with SessionLocal() as db: + return cv(db,need_cart(db,c)) + +@app.post("/cart/{cart_id}/add/{item_id}") +def add_item_to_cart(cart_id,item_id): + try: c=int(cart_id); i=int(item_id) + except: raise HTTPException(status_code=404,detail="Not found") + with SessionLocal.begin() as db: + cart=need_cart(db,c); it=need_item(db,i) + ci=db.get(CartItem,{"cart_id":cart.id,"item_id":it.id}) + if ci: ci.quantity=int(ci.quantity)+1 + else: ci=CartItem(cart_id=cart.id,item_id=it.id,quantity=1); db.add(ci) + ITEM_ADDED.inc(); db.flush() + return cv(db,cart) + +@app.get("/cart") +def list_carts(offset=0,limit=10,min_price=None,max_price=None,min_quantity=None,max_quantity=None): + o=gi(offset,"offset"); l=gi(limit,"limit"); mn=gf(min_price,"min_price"); mx=gf(max_price,"max_price"); miq=gi(min_quantity,"min_quantity"); maq=gi(max_quantity,"max_quantity") + if o is None or o<0: raise HTTPException(status_code=422,detail="offset") + if l is None or l<=0: raise HTTPException(status_code=422,detail="limit") + if mn is not None and mn<0: raise HTTPException(status_code=422,detail="min_price") + if mx is not None and mx<0: raise HTTPException(status_code=422,detail="max_price") + if miq is not None and miq<0: raise HTTPException(status_code=422,detail="min_quantity") + if maq is not None and maq<0: raise HTTPException(status_code=422,detail="max_quantity") + with SessionLocal() as db: + ids=[x.id for x in db.execute(select(Cart).order_by(Cart.id)).scalars().all()] + vs=[] + for cid in ids: + v=cv(db,db.get(Cart,cid)) + if mn is not None and v["price"]float(mx): continue + q=sum(i["quantity"] for i in v["items"]) + if miq is not None and qint(maq): continue + vs.append(v) + return sl(vs,o,l) + +_rooms=defaultdict(set) +_usernames={} +_alloc=set() + +def _uname(): + while True: + u="u"+secrets.token_hex(2) + if u not in _alloc: _alloc.add(u); return u + +async def _bcast(room,sender,msg): + u=_usernames.get(sender,"anon") + pl=f"{u}: {msg}" + dead=[] + for ws in list(_rooms[room]): + if ws is sender: continue + try: await ws.send_text(pl) + except: dead.append(ws) + for ws in dead: + _rooms[room].discard(ws); _usernames.pop(ws,None) + +@app.websocket("/chat/{room}") +async def ws_chat(ws,room): + await ws.accept(); u=_uname(); _rooms[room].add(ws); _usernames[ws]=u + try: + while True: + m=await ws.receive_text() + await _bcast(room,ws,m) + except WebSocketDisconnect: + pass + finally: + _rooms[room].discard(ws); _usernames.pop(ws,None) \ No newline at end of file diff --git a/lecture4/hw4/shop_api/models.py b/lecture4/hw4/shop_api/models.py new file mode 100644 index 00000000..fe088a80 --- /dev/null +++ b/lecture4/hw4/shop_api/models.py @@ -0,0 +1,22 @@ +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import Integer, String +from sqlalchemy import Boolean, Numeric, ForeignKey, UniqueConstraint +from .db import Base + +class Item(Base): + __tablename__="items" + id: Mapped[int]=mapped_column(Integer, primary_key=True, autoincrement=True) + name: Mapped[str]=mapped_column(String(255), nullable=False) + price: Mapped[float]=mapped_column(Numeric(12,2), nullable=False) + deleted: Mapped[bool]=mapped_column(Boolean, nullable=False, default=False) + +class Cart(Base): + __tablename__="carts" + id: Mapped[int]=mapped_column(Integer, primary_key=True, autoincrement=True) + +class CartItem(Base): + __tablename__="cart_items" + cart_id: Mapped[int]=mapped_column(ForeignKey("carts.id", ondelete="CASCADE"), primary_key=True) + item_id: Mapped[int]=mapped_column(ForeignKey("items.id", ondelete="RESTRICT"), primary_key=True) + quantity: Mapped[int]=mapped_column(Integer, nullable=False, default=0) + __table_args__=(UniqueConstraint("cart_id","item_id",name="uq_cart_item"),) \ No newline at end of file