Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
df6d505
init private_api
RusticPotatoes Jun 7, 2025
c5c962b
removing redundant files
RusticPotatoes Jun 7, 2025
8ba0ef8
referance fixes
RusticPotatoes Jun 7, 2025
bd48cd5
proj for api private
RusticPotatoes Jun 7, 2025
57ece58
fixing pytest for local run
RusticPotatoes Jun 6, 2025
156782a
remove aiosqlite
RusticPotatoes Jun 6, 2025
c6da2d0
removing, uneeded
RusticPotatoes Jun 6, 2025
7966f8e
reverting back
RusticPotatoes Jun 6, 2025
94f0d33
remove dependency not used
RusticPotatoes Jun 6, 2025
0eccc2b
move env loads to init
RusticPotatoes Jun 6, 2025
1b7b98b
init docker file. pivioting back to public api
RusticPotatoes Jun 7, 2025
b202404
Merge branch 'develop' into private_api_init
RusticPotatoes Jun 7, 2025
9e5207d
removing wait adding private
RusticPotatoes Jun 7, 2025
d407d1a
correcting paths, bases is not right for context
RusticPotatoes Jun 7, 2025
49a18fb
use 6000
RusticPotatoes Jun 7, 2025
6aa9c95
cleanup
RusticPotatoes Jun 7, 2025
a9d3d1a
refactor: update docker-compose and Dockerfiles to use common healthc…
Jun 7, 2025
9feec18
refactor: remove deprecated highscore endpoint implementation
Jun 7, 2025
432300e
refactor: remove unused AioKafkaEngine implementation
Jun 7, 2025
307e7ba
still runs after removal, cleanup unused dependencies
RusticPotatoes Jun 7, 2025
84f604f
move tests
RusticPotatoes Jun 7, 2025
dfc6582
kafka setting is not being used anywhere?, removing
RusticPotatoes Jun 7, 2025
3b86991
load env for tests
RusticPotatoes Jun 7, 2025
5d280a3
spaces, and private api needs dev
RusticPotatoes Jun 7, 2025
4678fa8
update uvlock
RusticPotatoes Jun 7, 2025
0ee4759
poly upgrade for tests
RusticPotatoes Jun 8, 2025
0e3534d
poly complaining, removing unused
RusticPotatoes Jun 8, 2025
3ffecc3
useful poly commands, run make checks
RusticPotatoes Jun 8, 2025
6d115bb
update cli to support tests
RusticPotatoes Jun 8, 2025
cfe4b47
move old tests to projects for now as they are mostly integration tests
RusticPotatoes Jun 8, 2025
76c4bfa
init api private
RusticPotatoes Jun 8, 2025
e741a50
uv uses hatch, removing poetry, adding dev group
RusticPotatoes Jun 8, 2025
139af32
sync
RusticPotatoes Jun 8, 2025
09aeb59
not sure what changed
RusticPotatoes Jun 8, 2025
a62fc74
prob should consider removing this line
RusticPotatoes Jun 8, 2025
bbca8ff
Merge pull request #1 from Bot-detector/develop
RusticPotatoes Jun 8, 2025
2d1004f
remove tests
RusticPotatoes Jun 8, 2025
190a4c0
Merge branch 'Bot-detector:develop' into private_api_init
RusticPotatoes Jun 10, 2025
7af47f1
Add abstract methods for selecting highscores in HighscoreDataLatestI…
extreme4all Jun 14, 2025
051b598
bf/select list must return list
extreme4all Jun 14, 2025
24fc1ff
hiscore select and list
RusticPotatoes Jun 15, 2025
7c388d8
v4?
RusticPotatoes Jun 15, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
# To get an api key please use our referral code (they also have free tier)
# https://www.webshare.io/?referral_code=qvpjdwxqsblt
PROXY_API_KEY = ""
KAFKA_BOOTSTRAP_SERVERS = "kafka:9092"
KAFKA_BOOTSTRAP_SERVERS = "kafka:9092" # public api, scraper and worker
SESSION_TIMEOUT = "60"
DATABASE_URL = "mysql+asyncmy://root:root_bot_buster@mysql:3306/playerdata"
DEBUG = "False"
PYTHONDONTWRITEBYTECODE=1
PYTHONDONTWRITEBYTECODE = 1
ENV = "DEV" # private api
9 changes: 9 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ restart-%: ## Restart a docker service by name, eg make restart-api_public
docker compose build $*
docker compose up -d $*

info:
uv run poly info

libs:
uv run poly libs

checks: info libs
uv run poly check

setup:
uv sync

Expand Down
Empty file.
8 changes: 8 additions & 0 deletions bases/bot_detector/api_private/src/api/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from fastapi import APIRouter

from . import v2, v3, v4

router = APIRouter()
router.include_router(v2.router, prefix="/v2")
router.include_router(v3.router, prefix="/v3")
router.include_router(v4.router, prefix="/v4")
1 change: 1 addition & 0 deletions bases/bot_detector/api_private/src/api/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
the api folder can be considered the controller in the MVC approach
Empty file.
6 changes: 6 additions & 0 deletions bases/bot_detector/api_private/src/api/v2/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from fastapi import APIRouter
from . import player, highscore

router = APIRouter()
router.include_router(player.router)
router.include_router(highscore.router)
64 changes: 64 additions & 0 deletions bases/bot_detector/api_private/src/api/v2/highscore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import logging

from bot_detector.api_private.src.app.repositories import (
PlayerActivityRepo,
PlayerSkillsRepo,
ScraperDataRepo,
)
from bot_detector.api_private.src.app.views.response.highscore import (
PlayerHiscoreData,
)

# from src.app.repositories.highscore import HighscoreRepo
from bot_detector.api_private.src.core.fastapi.dependencies.session import (
get_session,
)
from fastapi import APIRouter, Depends, Query

logger = logging.getLogger(__name__)


router = APIRouter()


@router.get("/highscore/latest")
async def get_highscore_latest_v2(
player_id: int,
player_name: str = None,
label_id: int = None,
many: bool = False,
limit: int = Query(default=10, ge=0, le=10_000),
session=Depends(get_session),
):
repo = ScraperDataRepo(session=session)
repo_skills = PlayerSkillsRepo(session=session)
repo_activities = PlayerActivityRepo(session=session)

data = await repo.select(
player_name=player_name,
player_id=player_id,
label_id=label_id,
many=many,
limit=limit,
history=False,
)

logger.info(data[0])
for d in data:
scraper_id = d.pop("scraper_id")
d["Player_id"] = d.pop("player_id")
d["id"] = scraper_id
d["timestamp"] = d.pop("created_at")
d["ts_date"] = d.pop("record_date")

skills = await repo_skills.select(scraper_id=scraper_id)
activities = await repo_activities.select(scraper_id=scraper_id)

for skill in skills:
d[skill.get("skill_name")] = skill.get("skill_value")

for activity in activities:
d[activity.get("activity_name")] = activity.get("activity_value")

data = [{k: v for k, v in d.items() if v} for d in data]
return [PlayerHiscoreData(**d).model_dump(mode="json") for d in data]
29 changes: 29 additions & 0 deletions bases/bot_detector/api_private/src/api/v2/player.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from bot_detector.api_private.src.app.repositories.player import PlayerRepo
from bot_detector.api_private.src.core.fastapi.dependencies.session import (
get_session,
)
from fastapi import APIRouter, Depends, Query

router = APIRouter()


@router.get("/player")
async def get_player(
player_id: str = None,
player_name: str = None,
label_id: int = None,
greater_than: bool = False,
limit: int = Query(default=1_000, ge=0, le=100_000),
session=Depends(get_session),
):
# TODO: make use of abstract base class
repo = PlayerRepo(session=session)

data = await repo.select(
player_id=player_id,
player_name=player_name,
greater_than=greater_than,
label_id=label_id,
limit=limit,
)
return data
6 changes: 6 additions & 0 deletions bases/bot_detector/api_private/src/api/v3/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from fastapi import APIRouter

from . import highscore

router = APIRouter()
router.include_router(highscore.router)
78 changes: 78 additions & 0 deletions bases/bot_detector/api_private/src/api/v3/highscore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import logging
from collections import defaultdict

from bot_detector.api_private.src.app.repositories import ScraperDataRepo
from bot_detector.api_private.src.app.views.response import (
ActivityView,
ScraperDataView,
SkillView,
)
from bot_detector.api_private.src.core.fastapi.dependencies.session import (
get_session,
)
from fastapi import APIRouter, Depends, Query

logger = logging.getLogger(__name__)

router = APIRouter()


def convert_to_scraper_data_view(result_list: list[dict]) -> list[ScraperDataView]:
# Dictionary to hold grouped data by scraper_id
scraper_data_map = defaultdict(lambda: {"skills": [], "activities": []})

for row in result_list:
scraper_id = row["scrape_id"]
scraper_data = scraper_data_map[scraper_id]

# Set shared attributes only once per scraper_id
if "created_at" not in scraper_data:
scraper_data["created_at"] = row["scrape_ts"]
scraper_data["record_date"] = row["scrape_date"]
scraper_data["scraper_id"] = scraper_id
scraper_data["player_id"] = row["player_id"]
scraper_data["player_name"] = row["player_name"]

# Append to skills or activities based on hs_type
if row["hs_type"] == "skill":
scraper_data["skills"].append(
SkillView(skill_name=row["hs_name"], skill_value=row["hs_value"])
)
elif row["hs_type"] == "activity":
scraper_data["activities"].append(
ActivityView(
activity_name=row["hs_name"], activity_value=row["hs_value"]
)
)

# Convert the grouped data into ScraperDataView instances
return [
ScraperDataView(
created_at=data["created_at"],
record_date=data["record_date"],
scraper_id=data["scraper_id"],
player_id=data["player_id"],
player_name=data["player_name"],
skills=data["skills"],
activities=data["activities"],
)
for data in scraper_data_map.values()
]


@router.get("/highscore/latest", response_model=list[ScraperDataView])
async def get_highscore_latest(
player_id: int,
label_id: int = None,
many: bool = False,
limit: int = Query(default=10, ge=0, le=10_000),
session=Depends(get_session),
):
repo = ScraperDataRepo(session=session)
data = await repo.select_latest_scraper_data_v3(
player_id=player_id,
label_id=label_id,
many=many,
limit=limit,
)
return convert_to_scraper_data_view(result_list=data)
6 changes: 6 additions & 0 deletions bases/bot_detector/api_private/src/api/v4/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from fastapi import APIRouter

from . import highscore

router = APIRouter()
router.include_router(highscore.router)
61 changes: 61 additions & 0 deletions bases/bot_detector/api_private/src/api/v4/highscore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import logging
from collections import defaultdict
from datetime import datetime

from bot_detector.api_private.src.app.repositories import ScraperDataRepo
from bot_detector.api_private.src.app.views.response import (
ActivityView,
ScraperDataView,
SkillView,
)
from bot_detector.api_private.src.core.fastapi.dependencies.session import (
get_session,
)
from bot_detector.database.repositories.hiscore import HighscoreDataLatestRepo
from bot_detector.structs import HighscoreDataLatestStruct
from fastapi import APIRouter, Depends, Query

logger = logging.getLogger(__name__)

router = APIRouter()


def convert_latest_struct_to_scraper_data_view(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

for the v4 i'd not convert, and keep it the HighscoreDataLatestStruct we only convert the v3 model to keep backwards compatibility, the ML component uses the /highscore/latest route

records: list[HighscoreDataLatestStruct],
) -> list[ScraperDataView]:
return [
ScraperDataView(
created_at=record.created_at
or datetime.combine(record.scrape_date, datetime.min.time()),
record_date=record.scrape_date,
scraper_id=record.player_id,
player_id=record.player_id,
player_name=record.player_name or "Unknown",
skills=[
SkillView(skill_name=k, skill_value=v)
for k, v in (record.skills or {}).items()
],
activities=[
ActivityView(activity_name=k, activity_value=v)
for k, v in (record.activities or {}).items()
],
)
for record in records
]


@router.get("/highscore/latest", response_model=list[ScraperDataView])
async def get_highscore_latest(
player_id: int,
label_id: int = None,
many: bool = False,
limit: int = Query(default=10, ge=0, le=10_000),
session=Depends(get_session),
):
repo = HighscoreDataLatestRepo()
if many:
rows = await repo.select_highscore_list(session, player_id, label_id, limit)
else:
row = await repo.select_highscore(session, player_id, label_id)
rows = [row] if row else []
return convert_latest_struct_to_scraper_data_view(rows)
Empty file.
6 changes: 6 additions & 0 deletions bases/bot_detector/api_private/src/app/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
the model is responsible for all the data handeling
- getting data from the database
- handles data logic

the view is responsible for the data representation
- return format etc
Empty file.
15 changes: 15 additions & 0 deletions bases/bot_detector/api_private/src/app/repositories/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from .abstract_repo import AbstractAPI
from .highscore import HighscoreRepo
from .player import PlayerRepo
from .player_activities import PlayerActivityRepo
from .player_skills import PlayerSkillsRepo
from .scraper_data import ScraperDataRepo

__all__ = [
"HighscoreRepo",
"PlayerRepo",
"PlayerSkillsRepo",
"ScraperDataRepo",
"AbstractAPI",
"PlayerActivityRepo",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from abc import ABC, abstractmethod


class AbstractAPI(ABC):
@abstractmethod
def insert(self):
raise NotImplementedError

@abstractmethod
def select(self):
raise NotImplementedError

@abstractmethod
def update(self):
raise NotImplementedError

@abstractmethod
def delete(self):
raise NotImplementedError
56 changes: 56 additions & 0 deletions bases/bot_detector/api_private/src/app/repositories/highscore.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import logging

from bot_detector.api_private.src.app.repositories.abstract_repo import (
AbstractAPI,
)
from bot_detector.api_private.src.core.database.models import ( # playerHiscoreData,; PlayerHiscoreDataXPChange,
PlayerHiscoreDataLatest,
)
from bot_detector.api_private.src.core.database.models.player import Player
from fastapi.encoders import jsonable_encoder
from sqlalchemy.ext.asyncio import AsyncResult, AsyncSession
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import Select

logger = logging.getLogger(__name__)


class HighscoreRepo(AbstractAPI):
def __init__(self, session) -> None:
super().__init__()
self.session: AsyncSession = session

def insert(self):
raise NotImplementedError

async def select(
self, player_id: int, label_id: int, limit: int, many: bool
) -> dict:
table = aliased(PlayerHiscoreDataLatest, name="phd")
player = aliased(Player, name="pl")

sql = Select(player.name, table)
sql = sql.join(target=player, onclause=table.Player_id == player.id)

if player_id:
if many:
sql = sql.where(table.Player_id >= player_id)
else:
sql = sql.where(table.Player_id == player_id)

if label_id:
sql = sql.where(player.label_id == label_id)

sql = sql.limit(limit)

async with self.session:
result: AsyncResult = await self.session.execute(sql)
result = result.fetchall()
data = [{"name": name, **jsonable_encoder(hs)} for name, hs in result]
return data

async def update(self):
raise NotImplementedError

async def delete(self):
raise NotImplementedError
Loading