diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e15106e --- /dev/null +++ b/.gitignore @@ -0,0 +1,216 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +# Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +# poetry.lock +# poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +# pdm.lock +# pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +# pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# Redis +*.rdb +*.aof +*.pid + +# RabbitMQ +mnesia/ +rabbitmq/ +rabbitmq-data/ + +# ActiveMQ +activemq-data/ + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +# .idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +# Streamlit +.streamlit/secrets.toml diff --git a/odoo-kopia-snapshot/.gitignore b/odoo-kopia-snapshot/.gitignore new file mode 100644 index 0000000..c0d2085 --- /dev/null +++ b/odoo-kopia-snapshot/.gitignore @@ -0,0 +1,5 @@ +__pycache__/ +*.pyc +.venv/ +dist/ +*.egg-info/ diff --git a/odoo-kopia-snapshot/Dockerfile b/odoo-kopia-snapshot/Dockerfile index 9d9a43f..ebd2126 100644 --- a/odoo-kopia-snapshot/Dockerfile +++ b/odoo-kopia-snapshot/Dockerfile @@ -21,6 +21,9 @@ ENV KOPIA_CHECK_FOR_UPDATES=false # Copy the correct postgresql-client version from pg_builder COPY --from=pg_builder /extract/* / +# Copy uv from official image +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + # Install Python, Kopia, etc. RUN apk add --no-cache \ ca-certificates \ @@ -37,13 +40,19 @@ RUN apk add --no-cache \ # Create app directory WORKDIR /app -# Copy scripts -COPY backup.py /app/backup.py -COPY restore.py /app/restore.py -COPY entrypoint.sh /app/entrypoint.sh +# Install dependencies (cached layer) +COPY pyproject.toml uv.lock ./ +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev --no-editable --no-install-project -# Make scripts executable -RUN chmod +x /app/entrypoint.sh /app/backup.py /app/restore.py +# Install project +COPY src/ src/ +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev --no-editable + +# Copy entrypoint +COPY entrypoint.sh /app/entrypoint.sh +RUN chmod +x /app/entrypoint.sh # Set entrypoint ENTRYPOINT ["/app/entrypoint.sh"] diff --git a/odoo-kopia-snapshot/README.md b/odoo-kopia-snapshot/README.md index 3ec161a..279a519 100644 --- a/odoo-kopia-snapshot/README.md +++ b/odoo-kopia-snapshot/README.md @@ -1,32 +1,264 @@ # odoo-kopia-snapshot -The intended use for this container is under a Kubernetes CronJob. +Kopia-based backup and restore for Odoo (PostgreSQL + filestore), designed for Kubernetes. -This should not be your soul backup process - you should also ensure that you have WAL -backups and pg basebackups to over the gaps between the snapshots. +## Features -Snapshots are very useful for long term storage. +- **Full Odoo backup** — `pg_dump` + filestore captured in a single Kopia snapshot +- **Restore** — database, filestore, or both, with download-only mode for offline inspection +- **List snapshots** — filter by tag, limit results, optional JSON output +- **Kubernetes manifest generators** — CronJob for scheduled backups, Job for one-shot restores +- **Multi-Postgres-version images** — built against PostgreSQL 13–17 +- **SHA-256 checksum verification** — database dumps are checksummed at backup and verified at restore +- **Configurable retention** — latest, hourly, daily, weekly, monthly, and annual policies +- **Read-only repository connection** — restores never mutate the repository -Monitoring is assumed to be handled as part of your Kubernetes monitoring. +## Quick start -## Backup +```bash +# 1. Generate a backup CronJob and apply it +generate-backup-cronjob \ + --namespace odoo-prod \ + --image ghcr.io/example/odoo-kopia-snapshot:2.0.1-pg17 \ + --filestore-pvc odoo-data \ + --secret-ref odoo-kopia-secrets \ + --backup-args '--kopia-repo-connect-params "azure --container=kopia --prefix=prod/"' \ + | kubectl apply -f - -- Kopia is very touchy with it's temp space. You _must_ ensure that the space that Kopia - can see matches the limits. If it can see more, but it's restricted, it won't be smart. -- The temp/cache space must be appropriately sized for your filestore and pg_dump or the - snapshot will fail. This needs to be actively monitored. -- The backup script assumes that the backup location is side of the backup source - folder and that it is emphermal i.e. it does no cleanup. +# 2. List available snapshots +list-snapshots \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' -### Restore +# 3. Generate a restore Job for a specific snapshot +generate-restore-job \ + --namespace odoo-prod \ + --image ghcr.io/example/odoo-kopia-snapshot:2.0.1-pg17 \ + --filestore-pvc odoo-data \ + --secret-ref odoo-kopia-secrets \ + --snapshot abc123def \ + --restore-args '--postgres-restore --filestore-restore --kopia-repo-connect-params "azure --container=kopia --prefix=prod/"' \ + | kubectl apply -f - +``` -Not yet implemented. +## Commands -### Releasing +### `backup` -- Remember to bump the `versions.yaml` dummy `v` tag +Dumps the PostgreSQL database, writes a SHA-256 checksum, and creates a Kopia snapshot of the Odoo data directory. -## Building +```bash +backup \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' +``` -To build and run (shell) the Dockerfile locally: -`docker run --rm -it "$(docker build -q . --build-arg POSTGRES_VERSION=17 --build-arg ALPINE_VERSION=3.22 --build-arg KOPIA_VERSION=0.17.0)" shell` +| Flag | Default | Description | +| ------------------------------ | ------------------------------- | -------------------------------------------------------------- | +| `--kopia-repo-connect-params` | _(required)_ | Kopia repository connection string | +| `--no-postgres-backup` | — | Skip the database dump | +| `--no-postgres-backup-cleanup` | — | Keep the dump file after snapshot | +| `--postgres-backup-dir` | `/var/lib/odoo/database-backup` | Ephemeral directory for the dump (must be inside `--odoo-dir`) | +| `--odoo-dir` | `/var/lib/odoo` | Root Odoo data directory to snapshot | +| `--kopia-hostname` | `odoo` | Hostname recorded in Kopia (must be stable) | +| `--kopia-username` | `odoo` | Username recorded in Kopia (must be stable) | +| `--kopia-compression` | `s2-default` | Compression algorithm | +| `--kopia-log-level` | `info` | `error`, `warning`, `info`, or `debug` | +| `--no-kopia-maintenance` | — | Skip the post-snapshot maintenance run | + +**Retention flags** (applied as global policy): + +| Flag | Default | +| ---------------- | ------- | +| `--keep-latest` | 42 | +| `--keep-hourly` | 0 | +| `--keep-daily` | 14 | +| `--keep-weekly` | 8 | +| `--keep-monthly` | 6 | +| `--keep-annual` | 2 | + +### `restore` + +Restores a database dump, filestore, or both from a Kopia snapshot. Connects to the repository in **read-only** mode. + +```bash +restore abc123def \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' \ + --postgres-restore \ + --filestore-restore +``` + +| Flag | Default | Description | +| ------------------------------ | ----------------- | -------------------------------------------------------------------- | +| `snapshot` (positional) | _(required)_ | Kopia snapshot ID (find with `list-snapshots`) | +| `--kopia-repo-connect-params` | _(required)_ | Kopia repository connection string | +| `--postgres-restore` | off | Opt-in: restore the database | +| `--filestore-restore` | off | Opt-in: restore the filestore | +| `--target-database` | `$PGDATABASE` | Restore the dump into this database name | +| `--source-database` | _(auto-detected)_ | Original database name in the snapshot | +| `--pg-restore-args` | `""` | Extra flags passed to `pg_restore` (e.g. `--clean --if-exists`) | +| `--download-only` | off | Download snapshot artifacts to disk without restoring | +| `--download-path` | — | Directory for downloaded artifacts (required with `--download-only`) | +| `--no-postgres-backup-cleanup` | — | Keep the dump file after restore | + +### `list-snapshots` + +Lists available Kopia snapshots. + +```bash +list-snapshots \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' \ + --json +``` + +| Flag | Default | Description | +| ----------------------------- | ------------ | ------------------------------------------------ | +| `--kopia-repo-connect-params` | _(required)_ | Kopia repository connection string | +| `--tags` | — | Filter by tag (`key:value`, repeatable) | +| `--all` | off | Show snapshots from all users/hosts | +| `--max-results` | unlimited | Limit results per source | +| `--json` | off | Machine-readable JSON output (logs go to stderr) | + +### `generate-backup-cronjob` + +Emits a Kubernetes CronJob manifest to stdout. + +```bash +generate-backup-cronjob \ + --namespace odoo-prod \ + --image ghcr.io/example/odoo-kopia-snapshot:2.0.1-pg17 \ + --filestore-pvc odoo-data \ + --secret-ref odoo-kopia-secrets \ + --schedule '30 2 * * *' \ + --backup-args '--kopia-repo-connect-params "azure --container=kopia --prefix=prod/"' +``` + +| Flag | Default | Description | +| --------------- | -------------- | ---------------------------------- | +| `--name` | `kopia-backup` | CronJob resource name | +| `--schedule` | `0 0 * * *` | Cron schedule expression | +| `--backup-args` | `""` | Extra arguments passed to `backup` | + +### `generate-restore-job` + +Emits a Kubernetes Job manifest to stdout. + +```bash +generate-restore-job \ + --namespace odoo-prod \ + --image ghcr.io/example/odoo-kopia-snapshot:2.0.1-pg17 \ + --filestore-pvc odoo-data \ + --secret-ref odoo-kopia-secrets \ + --snapshot abc123def \ + --restore-args '--postgres-restore --filestore-restore --kopia-repo-connect-params "azure --container=kopia --prefix=prod/"' +``` + +| Flag | Default | Description | +| ---------------- | --------------- | ----------------------------------- | +| `--name` | `kopia-restore` | Job resource name | +| `--snapshot` | _(required)_ | Kopia snapshot ID | +| `--restore-args` | `""` | Extra arguments passed to `restore` | + +### Common Kubernetes generator flags + +Both generators share these flags via `--help`: + +| Flag | Default | Description | +| ------------------------------------------------- | ------------ | ----------------------------------------------------- | +| `--namespace` | _(required)_ | Kubernetes namespace | +| `--image` | _(required)_ | Container image reference | +| `--filestore-pvc` | _(required)_ | PVC name for Odoo data | +| `--secret-ref` | — | Inject all keys from a Secret (repeatable) | +| `--configmap-ref` | — | Inject all keys from a ConfigMap (repeatable) | +| `--env` | — | Literal env var `NAME=VALUE` (repeatable) | +| `--env-from-secret` | — | Single env from Secret `NAME=SECRET:KEY` (repeatable) | +| `--env-from-configmap` | — | Single env from ConfigMap `NAME=CM:KEY` (repeatable) | +| `--memory-request` / `--memory-limit` | `4Gi` | Memory resources | +| `--cpu-request` / `--cpu-limit` | `250m` / `1` | CPU resources | +| `--run-as-user` / `--run-as-group` / `--fs-group` | `1000` | Security context UIDs | +| `--kopia-cache-size` | `25Gi` | Ephemeral volume for Kopia cache | +| `--postgres-dump-size` | `100Gi` | Ephemeral volume for dump files | + +## Environment variables + +| Variable | Used by | Required | Description | +| ---------------- | ------------------------------- | --------------------------------- | ----------------------------------------------------------------- | +| `KOPIA_PASSWORD` | backup, restore, list-snapshots | Yes | Kopia repository password | +| `PGHOST` | backup, restore | Yes | PostgreSQL host | +| `PGPORT` | backup, restore | Yes | PostgreSQL port | +| `PGUSER` | backup, restore | Yes | PostgreSQL user | +| `PGPASSWORD` | backup, restore | Yes | PostgreSQL password | +| `PGDATABASE` | backup, restore | Yes (backup) / Optional (restore) | Database name; restore uses it as the default `--target-database` | + +## Docker + +### Build arguments + +| Arg | Default | Description | +| ------------------ | -------- | ----------------------------------- | +| `ALPINE_VERSION` | `3.22` | Alpine base image version | +| `POSTGRES_VERSION` | `16.9` | PostgreSQL client version to bundle | +| `KOPIA_VERSION` | `0.22.3` | Kopia binary version | + +### Local build and shell + +```bash +docker run --rm -it \ + "$(docker build -q . \ + --build-arg POSTGRES_VERSION=17 \ + --build-arg ALPINE_VERSION=3.22 \ + --build-arg KOPIA_VERSION=0.22.3)" \ + shell +``` + +### Running commands + +The entrypoint accepts a command name as the first argument followed by any flags. Available commands: `backup`, `restore`, `list`, `generate-backup-cronjob`, `generate-restore-job`, and `shell`. + +```bash +IMAGE=ghcr.io/example/odoo-kopia-snapshot:2.0.1-pg17 + +# List snapshots +docker run --rm \ + -e KOPIA_PASSWORD \ + "$IMAGE" list \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' + +# Run a backup +docker run --rm \ + -e KOPIA_PASSWORD -e PGHOST -e PGPORT -e PGUSER -e PGPASSWORD -e PGDATABASE \ + -v odoo-data:/var/lib/odoo \ + "$IMAGE" backup \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' + +# Restore (download-only to a local directory) +docker run --rm \ + -e KOPIA_PASSWORD \ + -v "$PWD/restore-output:/restore" \ + "$IMAGE" restore abc123def \ + --kopia-repo-connect-params 'azure --container=kopia --prefix=prod/' \ + --download-only --download-path /restore + +# Generate a manifest (no volumes or env needed) +docker run --rm "$IMAGE" generate-backup-cronjob \ + --namespace odoo-prod \ + --image "$IMAGE" \ + --filestore-pvc odoo-data \ + --secret-ref odoo-kopia-secrets \ + --backup-args '--kopia-repo-connect-params "azure --container=kopia --prefix=prod/"' +``` + +> **Note:** The entrypoint command for listing snapshots is `list` (not `list-snapshots`). + +## Architecture note + +This is a **snapshot-level** tool — each backup is a point-in-time `pg_dump` plus a filestore copy. It is not a WAL/PITR solution. For full coverage, pair it with continuous WAL archiving and periodic `pg_basebackup`. + +Kopia temp/cache space must be sized to accommodate your filestore and dump, or snapshots will fail. Monitor ephemeral volume usage in your Kubernetes cluster. + +## Development + +```bash +uv run pytest # run the test suite +``` + +- `versions.yaml` tracks the release version and the PostgreSQL/Kopia version matrix (managed by Renovate) +- To release: bump the version in both `versions.yaml` and `pyproject.toml` diff --git a/odoo-kopia-snapshot/entrypoint.sh b/odoo-kopia-snapshot/entrypoint.sh index 32bf00c..1259e2f 100644 --- a/odoo-kopia-snapshot/entrypoint.sh +++ b/odoo-kopia-snapshot/entrypoint.sh @@ -10,13 +10,29 @@ case "$COMMAND" in backup) echo "Running backup command..." shift # Remove 'backup' from args and pass through - exec python3 /app/backup.py "$@" + exec /app/.venv/bin/backup "$@" ;; restore) echo "Running restore command..." - shift # Remove 'backup' from args and pass through - exec python3 /app/restore.py "$@" + shift # Remove 'restore' from args and pass through + exec /app/.venv/bin/restore "$@" + ;; + + list) + echo "Running list command..." + shift # Remove 'list' from args and pass through + exec /app/.venv/bin/list-snapshots "$@" + ;; + + generate-backup-cronjob) + shift + exec /app/.venv/bin/generate-backup-cronjob "$@" + ;; + + generate-restore-job) + shift + exec /app/.venv/bin/generate-restore-job "$@" ;; shell|sh|bash) @@ -26,9 +42,12 @@ case "$COMMAND" in "") echo "No command specified. Available commands:" - echo " backup - Run backup operation" - echo " restore - Run restore operation" - echo " shell - Start interactive shell" + echo " backup - Run backup operation" + echo " restore - Run restore operation" + echo " list - List available snapshots" + echo " generate-backup-cronjob - Generate a Kubernetes CronJob YAML for backups" + echo " generate-restore-job - Generate a Kubernetes Job YAML for restore" + echo " shell - Start interactive shell" echo "" echo "Example: docker run odoo-kopia-backup backup --pghost=db" exit 0 diff --git a/odoo-kopia-snapshot/pyproject.toml b/odoo-kopia-snapshot/pyproject.toml new file mode 100644 index 0000000..93cef87 --- /dev/null +++ b/odoo-kopia-snapshot/pyproject.toml @@ -0,0 +1,23 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "odoo-kopia-snapshot" +version = "2.0.1" +requires-python = ">=3.12" +dependencies = [ + "pyyaml", +] + +[project.scripts] +backup = "odoo_kopia_snapshot.backup:main" +restore = "odoo_kopia_snapshot.restore:main" +list-snapshots = "odoo_kopia_snapshot.list:main" +generate-backup-cronjob = "odoo_kopia_snapshot.generate_backup_cronjob:main" +generate-restore-job = "odoo_kopia_snapshot.generate_restore_job:main" + +[dependency-groups] +dev = [ + "pytest", +] diff --git a/odoo-kopia-snapshot/restore.py b/odoo-kopia-snapshot/restore.py deleted file mode 100644 index 51c3c55..0000000 --- a/odoo-kopia-snapshot/restore.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -import logging -import sys - - -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", -) -_logger = logging.getLogger(__name__) - - -def main(): - _logger.critical("Not implemented") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/__init__.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo-kopia-snapshot/backup.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/backup.py similarity index 87% rename from odoo-kopia-snapshot/backup.py rename to odoo-kopia-snapshot/src/odoo_kopia_snapshot/backup.py index 929835f..85146a0 100644 --- a/odoo-kopia-snapshot/backup.py +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/backup.py @@ -8,52 +8,12 @@ import sys from pathlib import Path +from .utils import setup_logging, run_command, create_sha256_file -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", -) +setup_logging() _logger = logging.getLogger(__name__) -def create_sha256_file(target_file): - checksum_file = f"{target_file}.sha256" - try: - result = subprocess.run( - ["sha256sum", target_file], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - check=True, - ) - with open(checksum_file, "w") as f: - f.write(result.stdout) - except subprocess.CalledProcessError as e: - _logger.critical(f"Error calculating checksum: {e.stderr}") - except FileNotFoundError: - _logger.critical("The 'sha256sum' command was not found...") - - -def run_command(cmd, check=True, capture_output=False, text=True): - """Run a shell command and return the result.""" - try: - result = subprocess.run( - cmd, check=check, capture_output=capture_output, text=text - ) - return result.returncode == 0 - except subprocess.CalledProcessError as e: - if check: - _logger.error(f"Command failed: {' '.join(cmd)}") - _logger.error(f"Error: {e.stderr if capture_output else str(e)}") - raise - return False - except Exception as e: - _logger.error(f"An error occurred while running command {' '.join(cmd)}: {e}") - # Raising an exception here will stop the loop unless it's handled externally - raise - - def run_postgres_backup(args) -> Path: postgres_backup_directory = args.postgres_backup_dir.resolve() postgres_backup_directory.mkdir(parents=True, exist_ok=True) @@ -86,8 +46,9 @@ def run_postgres_backup(args) -> Path: _logger.info("Checking PostgreSQL is ready...") for i in range(1, 10): try: - pg_isready = run_command(["pg_isready"]) - if pg_isready: + result = run_command(["pg_isready"]) + if result.returncode == 0: + pg_isready = True break except subprocess.CalledProcessError: _logger.info( @@ -296,7 +257,7 @@ def main(): *args.kopia_repo_connect_params.split(), *overrides, ] - if not run_command(connect_cmd, check=False): + if run_command(connect_cmd, check=False).returncode != 0: _logger.info( "Failed to connect to Kopia repository or repository not initialized. Attempting to create..." ) diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_backup_cronjob.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_backup_cronjob.py new file mode 100644 index 0000000..390df8a --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_backup_cronjob.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +"""Generate a Kubernetes CronJob manifest for odoo-kopia-snapshot backup.""" + +import argparse +import shlex + +from .kube import add_common_args, build_pod_spec, dump_manifest + + +def main(): + parser = argparse.ArgumentParser( + description="Generate a Kubernetes CronJob YAML for scheduled backups", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--name", default="kopia-backup", help="CronJob resource name" + ) + parser.add_argument( + "--schedule", default="0 0 * * *", help="Cron schedule expression" + ) + parser.add_argument( + "--backup-args", + default="", + help="Extra arguments passed to the backup command (shell-quoted string)", + ) + add_common_args(parser) + + args = parser.parse_args() + + extra_args = shlex.split(args.backup_args) if args.backup_args else [] + + manifest = { + "apiVersion": "batch/v1", + "kind": "CronJob", + "metadata": { + "name": args.name, + "namespace": args.namespace, + }, + "spec": { + "schedule": args.schedule, + "concurrencyPolicy": "Forbid", + "jobTemplate": { + "spec": { + "backoffLimit": 0, + "template": { + "spec": build_pod_spec(args, "backup", extra_args), + }, + }, + }, + }, + } + + dump_manifest(manifest) + + +if __name__ == "__main__": + main() diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_restore_job.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_restore_job.py new file mode 100644 index 0000000..1cd4444 --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/generate_restore_job.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +"""Generate a Kubernetes Job manifest for odoo-kopia-snapshot restore.""" + +import argparse +import shlex + +from .kube import add_common_args, build_pod_spec, dump_manifest + + +def main(): + parser = argparse.ArgumentParser( + description="Generate a Kubernetes Job YAML for a one-shot restore", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--name", default="kopia-restore", help="Job resource name" + ) + parser.add_argument( + "--snapshot", + required=True, + help="Kopia snapshot ID to restore", + ) + parser.add_argument( + "--restore-args", + default="", + help="Extra arguments passed to the restore command (shell-quoted string)", + ) + add_common_args(parser) + + args = parser.parse_args() + + extra_args = [args.snapshot] + if args.restore_args: + extra_args.extend(shlex.split(args.restore_args)) + + manifest = { + "apiVersion": "batch/v1", + "kind": "Job", + "metadata": { + "name": args.name, + "namespace": args.namespace, + }, + "spec": { + "backoffLimit": 0, + "ttlSecondsAfterFinished": 600, + "template": { + "spec": build_pod_spec(args, "restore", extra_args), + }, + }, + } + + dump_manifest(manifest) + + +if __name__ == "__main__": + main() diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/kube.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/kube.py new file mode 100644 index 0000000..2682cc7 --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/kube.py @@ -0,0 +1,227 @@ +"""Shared helpers for generating Kubernetes YAML manifests.""" + +import argparse +import shlex + +import yaml + + +def build_env_from(secret_refs, configmap_refs): + """Build the envFrom list for bulk Secret/ConfigMap injection.""" + items = [] + for ref in secret_refs: + items.append({"secretRef": {"name": ref}}) + for ref in configmap_refs: + items.append({"configMapRef": {"name": ref}}) + return items + + +def build_env_vars(env_literals, env_from_secret, env_from_configmap): + """Build the env list for selective env var injection. + + env_literals: list of "NAME=VALUE" strings + env_from_secret: list of "NAME=SECRET:KEY" strings + env_from_configmap: list of "NAME=CONFIGMAP:KEY" strings + """ + items = [] + for entry in env_literals: + name, _, value = entry.partition("=") + items.append({"name": name, "value": value}) + for entry in env_from_secret: + name, _, ref = entry.partition("=") + secret_name, _, key = ref.partition(":") + items.append({ + "name": name, + "valueFrom": {"secretKeyRef": {"name": secret_name, "key": key}}, + }) + for entry in env_from_configmap: + name, _, ref = entry.partition("=") + cm_name, _, key = ref.partition(":") + items.append({ + "name": name, + "valueFrom": {"configMapKeyRef": {"name": cm_name, "key": key}}, + }) + return items + + +def build_volume_mounts(): + """Build the volumeMounts list.""" + return [ + {"name": "odoo-data", "mountPath": "/var/lib/odoo"}, + {"name": "kopia-cache", "mountPath": "/tmp/kopia"}, + {"name": "postgres-dump", "mountPath": "/var/lib/odoo/database-backup"}, + ] + + +def build_volumes(filestore_pvc, kopia_cache_size, postgres_dump_size): + """Build the volumes list.""" + return [ + { + "name": "odoo-data", + "persistentVolumeClaim": {"claimName": filestore_pvc}, + }, + { + "name": "kopia-cache", + "ephemeral": { + "volumeClaimTemplate": { + "spec": { + "accessModes": ["ReadWriteOnce"], + "resources": {"requests": {"storage": kopia_cache_size}}, + } + } + }, + }, + { + "name": "postgres-dump", + "ephemeral": { + "volumeClaimTemplate": { + "spec": { + "accessModes": ["ReadWriteOnce"], + "resources": {"requests": {"storage": postgres_dump_size}}, + } + } + }, + }, + ] + + +def build_resources(memory_request, memory_limit, cpu_request, cpu_limit): + """Build the resources dict.""" + return { + "requests": {"memory": memory_request, "cpu": cpu_request}, + "limits": {"memory": memory_limit, "cpu": cpu_limit}, + } + + +def build_security_context(run_as_user, run_as_group, fs_group): + """Build the pod-level securityContext dict.""" + return { + "runAsUser": run_as_user, + "runAsGroup": run_as_group, + "fsGroup": fs_group, + } + + +def add_common_args(parser): + """Add common Kubernetes manifest arguments to an argparse parser.""" + k8s_group = parser.add_argument_group("Kubernetes options") + k8s_group.add_argument( + "--namespace", + required=True, + help="Kubernetes namespace", + ) + k8s_group.add_argument( + "--image", + required=True, + help="Container image (e.g. ghcr.io/.../odoo-kopia-snapshot:2.0.1)", + ) + + env_group = parser.add_argument_group("Environment injection") + env_group.add_argument( + "--secret-ref", + action="append", + default=[], + help="Inject all keys from a Secret via envFrom (repeatable)", + ) + env_group.add_argument( + "--configmap-ref", + action="append", + default=[], + help="Inject all keys from a ConfigMap via envFrom (repeatable)", + ) + env_group.add_argument( + "--env", + action="append", + default=[], + dest="env_literals", + help="Literal env var, NAME=VALUE (repeatable)", + ) + env_group.add_argument( + "--env-from-secret", + action="append", + default=[], + help="Single env from Secret, NAME=SECRET:KEY (repeatable)", + ) + env_group.add_argument( + "--env-from-configmap", + action="append", + default=[], + help="Single env from ConfigMap, NAME=CONFIGMAP:KEY (repeatable)", + ) + + res_group = parser.add_argument_group("Resource limits") + res_group.add_argument("--memory-request", default="4Gi", help="Memory request") + res_group.add_argument("--memory-limit", default="4Gi", help="Memory limit") + res_group.add_argument("--cpu-request", default="250m", help="CPU request") + res_group.add_argument("--cpu-limit", default="1", help="CPU limit") + + sec_group = parser.add_argument_group("Security context") + sec_group.add_argument( + "--run-as-user", type=int, default=1000, help="runAsUser" + ) + sec_group.add_argument( + "--run-as-group", type=int, default=1000, help="runAsGroup" + ) + sec_group.add_argument( + "--fs-group", type=int, default=1000, help="fsGroup" + ) + + vol_group = parser.add_argument_group("Volumes") + vol_group.add_argument( + "--filestore-pvc", + required=True, + help="PVC name for the Odoo data volume", + ) + vol_group.add_argument( + "--kopia-cache-size", + default="25Gi", + help="Ephemeral volume size for Kopia cache", + ) + vol_group.add_argument( + "--postgres-dump-size", + default="100Gi", + help="Ephemeral volume size for PostgreSQL dumps", + ) + + return parser + + +def build_pod_spec(args, command, extra_args): + """Build the pod spec dict (shared between CronJob and Job). + + Returns the dict for the pod template's spec field. + """ + container = {"name": command, "image": args.image} + + env_from = build_env_from(args.secret_ref, args.configmap_ref) + if env_from: + container["envFrom"] = env_from + + env_vars = build_env_vars( + args.env_literals, args.env_from_secret, args.env_from_configmap, + ) + if env_vars: + container["env"] = env_vars + + container["args"] = [command] + extra_args + container["volumeMounts"] = build_volume_mounts() + container["resources"] = build_resources( + args.memory_request, args.memory_limit, + args.cpu_request, args.cpu_limit, + ) + + return { + "restartPolicy": "Never", + "securityContext": build_security_context( + args.run_as_user, args.run_as_group, args.fs_group, + ), + "containers": [container], + "volumes": build_volumes( + args.filestore_pvc, args.kopia_cache_size, args.postgres_dump_size, + ), + } + + +def dump_manifest(manifest): + """Serialize a manifest dict to YAML and print to stdout.""" + print(yaml.dump(manifest, default_flow_style=False, sort_keys=False), end="") diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/list.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/list.py new file mode 100644 index 0000000..b9a417a --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/list.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 +import os +import argparse +import subprocess +import logging +import sys +from pathlib import Path + +from .utils import setup_logging, run_command + +setup_logging() +_logger = logging.getLogger(__name__) + + +def main(): + parser = argparse.ArgumentParser( + description="List available Kopia snapshots (useful for finding snapshot IDs before restore)", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + # Listing options + list_group = parser.add_argument_group("Listing options") + list_group.add_argument( + "--tags", + action="append", + default=[], + help="Filter by Kopia tag (format key:value, repeatable)", + ) + list_group.add_argument( + "--all", + action="store_true", + default=False, + help="Show snapshots from all users/hosts", + ) + list_group.add_argument( + "--max-results", + type=int, + default=None, + help="Limit number of results per source", + ) + list_group.add_argument( + "--json", + action="store_true", + default=False, + help="Output raw JSON from kopia (parseable, kopia logs go to stderr)", + ) + + # Kopia arguments + kopia_group = parser.add_argument_group("Kopia options") + kopia_group.add_argument( + "--kopia-repo-connect-params", + required=True, + help='Kopia repository connection parameters (e.g., "azure --container=kopia --prefix=ns/")', + ) + kopia_group.add_argument( + "--kopia-cache-dir", + default="/tmp/kopia/cache", + help="Kopia cache directory", + type=Path, + ) + kopia_group.add_argument( + "--kopia-config-file", + default="/tmp/kopia/repository.config", + help="Kopia configuration file (ephemeral)", + type=Path, + ) + kopia_group.add_argument( + "--kopia-log-level", + default="info", + choices=["error", "warning", "info", "debug"], + help="Kopia log level", + ) + kopia_group.add_argument( + "--kopia-log-dir", + default="/tmp/kopia/logs", + help="Kopia log directory", + type=Path, + ) + kopia_group.add_argument( + "--kopia-hostname", + default="odoo", + help="Kopia hostname override", + ) + kopia_group.add_argument( + "--kopia-username", + default="odoo", + help="Kopia username override", + ) + kopia_group.add_argument( + "--kopia-bin", + type=Path, + default="/usr/local/bin/kopia", + help="Kopia binary path", + ) + + args = parser.parse_args() + + # Ensure kopia directories exist + Path(args.kopia_config_file).parent.mkdir(parents=True, exist_ok=True) + Path(args.kopia_cache_dir).mkdir(parents=True, exist_ok=True) + Path(args.kopia_log_dir).mkdir(parents=True, exist_ok=True) + + kopia_bin = f"{args.kopia_bin}" + + # Check KOPIA_PASSWORD from environment + if not os.environ.get("KOPIA_PASSWORD"): + os.environ["KOPIA_PASSWORD"] = "static-passw0rd" + _logger.warning( + "KOPIA_PASSWORD environment variable not set." + " Using insecure default matching velero's old defaults..." + ) + + common_flags = [ + f"--config-file={args.kopia_config_file}", + f"--log-level={args.kopia_log_level}", + f"--log-dir={args.kopia_log_dir}", + f"--file-log-level={args.kopia_log_level}", + "--no-progress", + ] + + overrides = [ + f"--override-hostname={args.kopia_hostname}", + f"--override-username={args.kopia_username}", + ] + + # Connect to kopia repository + _logger.info( + "Attempting to connect to Kopia repository (using ephemeral config)..." + ) + connect_cmd = [ + kopia_bin, + *common_flags, + "repository", + "connect", + f"--cache-directory={args.kopia_cache_dir}", + *args.kopia_repo_connect_params.split(), + *overrides, + ] + result = run_command(connect_cmd, check=False) + if result.returncode != 0: + _logger.critical("Failed to connect to Kopia repository. Aborting.") + sys.exit(1) + + # Build snapshot list command + list_cmd = [kopia_bin, *common_flags, "snapshot", "list"] + + if args.all: + list_cmd.append("--all") + + for tag in args.tags: + list_cmd.append(f"--tags={tag}") + + if args.max_results is not None: + list_cmd.append(f"--max-results={args.max_results}") + + if args.json: + list_cmd.append("--json") + + # Run snapshot list + _logger.info("Listing snapshots...") + if args.json: + result = run_command(list_cmd, capture_output=True) + if isinstance(result, subprocess.CompletedProcess): + print(result.stdout, end="") + else: + run_command(list_cmd) + + # Disconnect from kopia + _logger.info("Disconnecting from Kopia repository...") + disconnect_cmd = [kopia_bin, *common_flags, "repository", "disconnect"] + run_command(disconnect_cmd, check=False) + + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/restore.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/restore.py new file mode 100644 index 0000000..9aba611 --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/restore.py @@ -0,0 +1,371 @@ +#!/usr/bin/env python3 +import time +import os +import argparse +import subprocess +import logging +import shlex +import sys +from pathlib import Path + +from .utils import setup_logging, run_command, verify_checksum + +setup_logging() +_logger = logging.getLogger(__name__) + + +def detect_source_database(backup_dir: Path) -> str: + """Auto-detect source database name from .dump filename in backup dir.""" + dump_files = list(backup_dir.glob("*.dump")) + if len(dump_files) == 0: + _logger.critical(f"No .dump files found in {backup_dir}") + sys.exit(1) + if len(dump_files) > 1: + _logger.critical( + f"Multiple .dump files found in {backup_dir}: {dump_files}." + " Use --source-database to specify which one." + ) + sys.exit(1) + return dump_files[0].stem + + +def main(): + parser = argparse.ArgumentParser( + description="Odoo restore script using Kopia and PostgreSQL", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + # Positional + parser.add_argument( + "snapshot", + help="Kopia snapshot ID to restore from (use 'list' command to find available IDs)", + ) + + # PostgreSQL arguments + pg_group = parser.add_argument_group("PostgreSQL options") + pg_group.add_argument( + "--postgres-restore", + action="store_true", + default=False, + help="Enable PostgreSQL restore (opt-in)", + ) + pg_group.add_argument( + "--no-postgres-restore", + action="store_true", + help="Disable PostgreSQL restore", + ) + pg_group.add_argument( + "--postgres-backup-dir", + default=Path("/var/lib/odoo/database-backup"), + help="Local directory to restore the dump file into", + type=Path, + ) + pg_group.add_argument( + "--pg-restore-args", + default="", + help='Extra arguments for pg_restore (e.g. "--clean --if-exists")', + ) + pg_group.add_argument( + "--target-database", + default=None, + help="Restore database as this name (defaults to PGDATABASE env var)", + ) + pg_group.add_argument( + "--source-database", + default=None, + help="Original database name in the snapshot (auto-detected from dump filename if not provided)", + ) + pg_group.add_argument( + "--postgres-backup-cleanup", + action="store_true", + default=True, + help="Clean up dump file after restore", + ) + pg_group.add_argument( + "--no-postgres-backup-cleanup", + action="store_true", + help="Keep dump file after restore", + ) + + # Download-only mode + parser.add_argument( + "--download-only", + action="store_true", + default=False, + help="Download snapshot data to disk without restoring (skips pg_restore and cleanup)", + ) + parser.add_argument( + "--download-path", + type=Path, + default=None, + help="Directory to download snapshot artifacts into (required with --download-only)", + ) + + # Filestore arguments + fs_group = parser.add_argument_group("Filestore options") + fs_group.add_argument( + "--filestore-restore", + action="store_true", + default=False, + help="Enable filestore restore (opt-in)", + ) + fs_group.add_argument( + "--no-filestore-restore", + action="store_true", + help="Disable filestore restore", + ) + fs_group.add_argument( + "--odoo-dir", + default=Path("/var/lib/odoo"), + help="Base path for Odoo data (filestore lives under this)", + type=Path, + ) + + # Kopia arguments + kopia_group = parser.add_argument_group("Kopia options") + kopia_group.add_argument( + "--kopia-repo-connect-params", + required=True, + help='Kopia repository connection parameters (e.g., "azure --container=kopia --prefix=ns/")', + ) + kopia_group.add_argument( + "--kopia-cache-dir", + default="/tmp/kopia/cache", + help="Kopia cache directory", + type=Path, + ) + kopia_group.add_argument( + "--kopia-config-file", + default="/tmp/kopia/repository.config", + help="Kopia configuration file (ephemeral)", + type=Path, + ) + kopia_group.add_argument( + "--kopia-log-level", + default="info", + choices=["error", "warning", "info", "debug"], + help="Kopia log level", + ) + kopia_group.add_argument( + "--kopia-log-dir", + default="/tmp/kopia/logs", + help="Kopia log directory", + type=Path, + ) + kopia_group.add_argument( + "--kopia-hostname", + default="odoo", + help="Kopia hostname override", + ) + kopia_group.add_argument( + "--kopia-username", + default="odoo", + help="Kopia username override", + ) + kopia_group.add_argument( + "--kopia-bin", + type=Path, + default="/usr/local/bin/kopia", + help="Kopia binary path", + ) + + args = parser.parse_args() + + # Handle --no-* flags + if args.no_postgres_restore: + args.postgres_restore = False + if args.no_postgres_backup_cleanup: + args.postgres_backup_cleanup = False + if args.no_filestore_restore: + args.filestore_restore = False + if args.download_only: + args.postgres_backup_cleanup = False + if not args.download_path: + parser.error("--download-path is required when using --download-only") + # Override restore paths to point at the download directory + args.postgres_backup_dir = args.download_path + args.odoo_dir = args.download_path + + # Resolve target database + target_database = args.target_database or os.environ.get("PGDATABASE") + + if args.postgres_restore and not args.download_only and not target_database: + _logger.critical( + "No target database specified. Use --target-database or set PGDATABASE." + ) + sys.exit(1) + + if args.postgres_restore and not args.download_only: + REQUIRED_PG_ENVIRON = ["PGHOST", "PGPORT", "PGUSER", "PGPASSWORD"] + if not all(os.environ.get(key) for key in REQUIRED_PG_ENVIRON): + _logger.critical( + "Not all PostgreSQL environment variables are set" + " (PGHOST, PGPORT, PGUSER, PGPASSWORD). Aborting." + ) + sys.exit(1) + + # Ensure kopia directories exist + Path(args.kopia_config_file).parent.mkdir(parents=True, exist_ok=True) + Path(args.kopia_cache_dir).mkdir(parents=True, exist_ok=True) + Path(args.kopia_log_dir).mkdir(parents=True, exist_ok=True) + + kopia_bin = f"{args.kopia_bin}" + + # Check KOPIA_PASSWORD from environment + if not os.environ.get("KOPIA_PASSWORD"): + os.environ["KOPIA_PASSWORD"] = "static-passw0rd" + _logger.warning( + "KOPIA_PASSWORD environment variable not set." + " Using insecure default matching velero's old defaults..." + ) + + common_flags = [ + f"--config-file={args.kopia_config_file}", + f"--log-level={args.kopia_log_level}", + f"--log-dir={args.kopia_log_dir}", + f"--file-log-level={args.kopia_log_level}", + "--no-progress", + ] + + overrides = [ + f"--override-hostname={args.kopia_hostname}", + f"--override-username={args.kopia_username}", + ] + + # Connect to kopia repository + _logger.info( + "Attempting to connect to Kopia repository (using ephemeral config)..." + ) + connect_cmd = [ + kopia_bin, + *common_flags, + "repository", + "connect", + f"--cache-directory={args.kopia_cache_dir}", + *args.kopia_repo_connect_params.split(), + *overrides, + "--readonly", + ] + if run_command(connect_cmd, check=False).returncode != 0: + _logger.critical("Failed to connect to Kopia repository. Aborting.") + sys.exit(1) + + snapshot = args.snapshot + + # Restore database dump from snapshot + if args.postgres_restore: + postgres_backup_dir = args.postgres_backup_dir.resolve() + postgres_backup_dir.mkdir(parents=True, exist_ok=True) + + _logger.info(f"Restoring database dump from snapshot {snapshot}...") + restore_db_cmd = [ + kopia_bin, + *common_flags, + "snapshot", + "restore", + f"{snapshot}/database-backup", + str(postgres_backup_dir), + ] + run_command(restore_db_cmd) + + # Auto-detect or use provided source database + source_database = args.source_database or detect_source_database( + postgres_backup_dir + ) + _logger.info(f"Source database: {source_database}") + + dump_file = postgres_backup_dir / f"{source_database}.dump" + if not dump_file.exists(): + _logger.critical(f"Expected dump file not found: {dump_file}") + sys.exit(1) + + # Verify checksum + verify_checksum(dump_file) + + if not args.download_only: + # Wait for PostgreSQL + _logger.info("Checking PostgreSQL is ready...") + pg_isready = False + for i in range(1, 10): + try: + result = run_command(["pg_isready"]) + if result.returncode == 0: + pg_isready = True + break + except subprocess.CalledProcessError: + _logger.info( + f"PostgreSQL is not ready.. attempt {i}. Retrying in {i} seconds..." + ) + time.sleep(i) + except Exception: + raise + + if not pg_isready: + _logger.critical("Could not contact PostgreSQL") + sys.exit(1) + + # Run pg_restore + _logger.info(f"Restoring database dump to {target_database}...") + pg_restore_cmd = [ + "pg_restore", + f"--dbname={target_database}", + "--verbose", + "--no-owner", + ] + if args.pg_restore_args: + pg_restore_cmd.extend(shlex.split(args.pg_restore_args)) + pg_restore_cmd.append(str(dump_file)) + run_command(pg_restore_cmd) + + # Clean up dump files + if args.postgres_backup_cleanup: + _logger.info(f"Cleaning up dump file {dump_file}") + dump_file.unlink(missing_ok=True) + checksum_file = Path(f"{dump_file}.sha256") + checksum_file.unlink(missing_ok=True) + else: + _logger.info(f"Download-only mode: dump file available at {dump_file}") + else: + # Even without postgres restore, we may need source_database for filestore + source_database = args.source_database + + # Restore filestore + if args.filestore_restore: + if not source_database: + _logger.critical( + "Cannot restore filestore without knowing the source database name." + " Use --source-database or enable --postgres-restore for auto-detection." + ) + sys.exit(1) + + filestore_target = target_database or source_database + odoo_dir = args.odoo_dir.resolve() + target_path = odoo_dir / "filestore" / filestore_target + + _logger.info( + f"Restoring filestore from snapshot {snapshot}" + f" (source: {source_database}, target: {filestore_target})..." + ) + target_path.mkdir(parents=True, exist_ok=True) + restore_fs_cmd = [ + kopia_bin, + *common_flags, + "snapshot", + "restore", + f"{snapshot}/filestore/{source_database}", + str(target_path), + ] + run_command(restore_fs_cmd) + _logger.info("Filestore restore complete") + + # Disconnect from kopia + _logger.info("Disconnecting from Kopia repository...") + disconnect_cmd = [kopia_bin, *common_flags, "repository", "disconnect"] + run_command(disconnect_cmd) + + _logger.info("Restore finished successfully.") + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/odoo-kopia-snapshot/src/odoo_kopia_snapshot/utils.py b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/utils.py new file mode 100644 index 0000000..52fd826 --- /dev/null +++ b/odoo-kopia-snapshot/src/odoo_kopia_snapshot/utils.py @@ -0,0 +1,78 @@ +"""Shared utilities for odoo-kopia-snapshot.""" + +import logging +import subprocess +import sys +from pathlib import Path + +_logger = logging.getLogger(__name__) + + +def setup_logging(): + """Configure root logger with a consistent format.""" + logging.basicConfig( + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def run_command(cmd, check=True, capture_output=False, text=True): + """Run a shell command and return the CompletedProcess result. + + Returns CompletedProcess on success or when check=False. + Raises CalledProcessError when check=True and the command fails. + """ + try: + return subprocess.run( + cmd, check=check, capture_output=capture_output, text=text + ) + except subprocess.CalledProcessError as e: + if check: + _logger.error(f"Command failed: {' '.join(cmd)}") + _logger.error(f"Error: {e.stderr if capture_output else str(e)}") + raise + return e # pragma: no cover – unreachable when check=False + except Exception as e: + _logger.error(f"An error occurred while running command {' '.join(cmd)}: {e}") + raise + + +def create_sha256_file(target_file): + """Create a .sha256 checksum sidecar for *target_file*.""" + checksum_file = f"{target_file}.sha256" + try: + result = subprocess.run( + ["sha256sum", target_file], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + check=True, + ) + with open(checksum_file, "w") as f: + f.write(result.stdout) + except subprocess.CalledProcessError as e: + _logger.critical(f"Error calculating checksum: {e.stderr}") + except FileNotFoundError: + _logger.critical("The 'sha256sum' command was not found...") + + +def verify_checksum(dump_file: Path): + """Verify SHA256 checksum of the dump file.""" + checksum_file = Path(f"{dump_file}.sha256") + if not checksum_file.exists(): + _logger.warning( + f"No checksum file found at {checksum_file}, skipping verification" + ) + return + _logger.info(f"Verifying checksum of {dump_file}...") + try: + subprocess.run( + ["sha256sum", "-c", str(checksum_file)], + check=True, + cwd=dump_file.parent, + ) + _logger.info("Checksum verification passed") + except subprocess.CalledProcessError: + _logger.critical("Checksum verification FAILED — dump file may be corrupt") + sys.exit(1) diff --git a/odoo-kopia-snapshot/tests/__init__.py b/odoo-kopia-snapshot/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/odoo-kopia-snapshot/tests/conftest.py b/odoo-kopia-snapshot/tests/conftest.py new file mode 100644 index 0000000..374aa93 --- /dev/null +++ b/odoo-kopia-snapshot/tests/conftest.py @@ -0,0 +1,38 @@ +"""Shared test fixtures.""" + +from types import SimpleNamespace + +import pytest + + +@pytest.fixture() +def make_args(): + """Return a factory that builds a SimpleNamespace with sane defaults. + + Override any attribute by passing keyword arguments. + """ + + def _make_args(**overrides): + defaults = { + "namespace": "test-ns", + "image": "ghcr.io/example/odoo-kopia-snapshot:latest", + "secret_ref": [], + "configmap_ref": [], + "env_literals": [], + "env_from_secret": [], + "env_from_configmap": [], + "memory_request": "4Gi", + "memory_limit": "4Gi", + "cpu_request": "250m", + "cpu_limit": "1", + "run_as_user": 1000, + "run_as_group": 1000, + "fs_group": 1000, + "filestore_pvc": "odoo-data", + "kopia_cache_size": "25Gi", + "postgres_dump_size": "100Gi", + } + defaults.update(overrides) + return SimpleNamespace(**defaults) + + return _make_args diff --git a/odoo-kopia-snapshot/tests/test_generate_backup.py b/odoo-kopia-snapshot/tests/test_generate_backup.py new file mode 100644 index 0000000..70a0436 --- /dev/null +++ b/odoo-kopia-snapshot/tests/test_generate_backup.py @@ -0,0 +1,42 @@ +"""Tests for generate_backup_cronjob manifest structure.""" + +from odoo_kopia_snapshot.kube import build_pod_spec + + +def test_cronjob_structure(make_args): + """The pod spec used in a CronJob has the right shape.""" + args = make_args() + spec = build_pod_spec(args, "backup", []) + + # Top-level pod spec keys + assert spec["restartPolicy"] == "Never" + assert "securityContext" in spec + assert "volumes" in spec + + container = spec["containers"][0] + assert container["name"] == "backup" + assert container["args"] == ["backup"] + assert "volumeMounts" in container + assert "resources" in container + + +def test_cronjob_extra_args(make_args): + """Extra backup args are appended to container args.""" + args = make_args() + spec = build_pod_spec(args, "backup", ["--no-kopia-maintenance"]) + container = spec["containers"][0] + assert container["args"] == ["backup", "--no-kopia-maintenance"] + + +def test_cronjob_env_injection(make_args): + """Secret refs and env literals appear in the container.""" + args = make_args( + secret_ref=["kopia-secret", "pg-secret"], + env_literals=["TZ=UTC"], + ) + spec = build_pod_spec(args, "backup", []) + container = spec["containers"][0] + + assert len(container["envFrom"]) == 2 + assert container["envFrom"][0]["secretRef"]["name"] == "kopia-secret" + assert container["env"][0] == {"name": "TZ", "value": "UTC"} diff --git a/odoo-kopia-snapshot/tests/test_generate_restore.py b/odoo-kopia-snapshot/tests/test_generate_restore.py new file mode 100644 index 0000000..7e0e7f6 --- /dev/null +++ b/odoo-kopia-snapshot/tests/test_generate_restore.py @@ -0,0 +1,29 @@ +"""Tests for generate_restore_job manifest structure.""" + +from odoo_kopia_snapshot.kube import build_pod_spec + + +def test_restore_job_includes_snapshot_id(make_args): + """The snapshot ID appears as the first extra arg.""" + args = make_args() + spec = build_pod_spec(args, "restore", ["abc123"]) + container = spec["containers"][0] + assert container["name"] == "restore" + assert container["args"] == ["restore", "abc123"] + + +def test_restore_job_extra_args(make_args): + """Additional restore args follow the snapshot ID.""" + args = make_args() + spec = build_pod_spec(args, "restore", ["abc123", "--verbose"]) + container = spec["containers"][0] + assert container["args"] == ["restore", "abc123", "--verbose"] + + +def test_restore_job_resources(make_args): + """Custom resource values propagate to the container.""" + args = make_args(memory_request="8Gi", memory_limit="8Gi") + spec = build_pod_spec(args, "restore", ["snap1"]) + resources = spec["containers"][0]["resources"] + assert resources["requests"]["memory"] == "8Gi" + assert resources["limits"]["memory"] == "8Gi" diff --git a/odoo-kopia-snapshot/tests/test_kube.py b/odoo-kopia-snapshot/tests/test_kube.py new file mode 100644 index 0000000..c49d992 --- /dev/null +++ b/odoo-kopia-snapshot/tests/test_kube.py @@ -0,0 +1,149 @@ +"""Tests for odoo_kopia_snapshot.kube helpers.""" + +from odoo_kopia_snapshot.kube import ( + build_env_from, + build_env_vars, + build_pod_spec, + build_resources, + build_security_context, + build_volume_mounts, + build_volumes, +) + + +# -- build_env_from ---------------------------------------------------------- + +def test_build_env_from_empty(): + assert build_env_from([], []) == [] + + +def test_build_env_from_secrets_only(): + result = build_env_from(["my-secret"], []) + assert result == [{"secretRef": {"name": "my-secret"}}] + + +def test_build_env_from_mixed(): + result = build_env_from(["s1"], ["cm1", "cm2"]) + assert len(result) == 3 + assert result[0] == {"secretRef": {"name": "s1"}} + assert result[1] == {"configMapRef": {"name": "cm1"}} + assert result[2] == {"configMapRef": {"name": "cm2"}} + + +# -- build_env_vars ----------------------------------------------------------- + +def test_build_env_vars_empty(): + assert build_env_vars([], [], []) == [] + + +def test_build_env_vars_literals(): + result = build_env_vars(["FOO=bar", "BAZ=qux"], [], []) + assert result == [ + {"name": "FOO", "value": "bar"}, + {"name": "BAZ", "value": "qux"}, + ] + + +def test_build_env_vars_from_secret(): + result = build_env_vars([], ["DB_PASS=pg-secret:password"], []) + assert result == [ + { + "name": "DB_PASS", + "valueFrom": { + "secretKeyRef": {"name": "pg-secret", "key": "password"}, + }, + }, + ] + + +def test_build_env_vars_from_configmap(): + result = build_env_vars([], [], ["LOG_LEVEL=app-config:log-level"]) + assert result == [ + { + "name": "LOG_LEVEL", + "valueFrom": { + "configMapKeyRef": {"name": "app-config", "key": "log-level"}, + }, + }, + ] + + +# -- build_volume_mounts ------------------------------------------------------ + +def test_build_volume_mounts_returns_three(): + mounts = build_volume_mounts() + assert len(mounts) == 3 + names = {m["name"] for m in mounts} + assert names == {"odoo-data", "kopia-cache", "postgres-dump"} + + +# -- build_volumes ------------------------------------------------------------- + +def test_build_volumes_pvc_claim(): + vols = build_volumes("my-pvc", "10Gi", "50Gi") + pvc_vol = vols[0] + assert pvc_vol["persistentVolumeClaim"]["claimName"] == "my-pvc" + + +def test_build_volumes_ephemeral_sizes(): + vols = build_volumes("pvc", "10Gi", "50Gi") + kopia = vols[1]["ephemeral"]["volumeClaimTemplate"]["spec"] + pg = vols[2]["ephemeral"]["volumeClaimTemplate"]["spec"] + assert kopia["resources"]["requests"]["storage"] == "10Gi" + assert pg["resources"]["requests"]["storage"] == "50Gi" + + +# -- build_resources ----------------------------------------------------------- + +def test_build_resources(): + res = build_resources("2Gi", "4Gi", "100m", "500m") + assert res == { + "requests": {"memory": "2Gi", "cpu": "100m"}, + "limits": {"memory": "4Gi", "cpu": "500m"}, + } + + +# -- build_security_context ---------------------------------------------------- + +def test_build_security_context(): + ctx = build_security_context(1000, 1000, 1000) + assert ctx == {"runAsUser": 1000, "runAsGroup": 1000, "fsGroup": 1000} + + +# -- build_pod_spec ------------------------------------------------------------ + +def test_build_pod_spec_basic(make_args): + args = make_args() + spec = build_pod_spec(args, "backup", []) + assert spec["restartPolicy"] == "Never" + assert len(spec["containers"]) == 1 + container = spec["containers"][0] + assert container["name"] == "backup" + assert container["image"] == args.image + assert container["args"] == ["backup"] + + +def test_build_pod_spec_extra_args(make_args): + args = make_args() + spec = build_pod_spec(args, "restore", ["snap123", "--verbose"]) + container = spec["containers"][0] + assert container["args"] == ["restore", "snap123", "--verbose"] + + +def test_build_pod_spec_with_env(make_args): + args = make_args( + secret_ref=["s1"], + env_literals=["FOO=bar"], + ) + spec = build_pod_spec(args, "backup", []) + container = spec["containers"][0] + assert "envFrom" in container + assert "env" in container + + +def test_build_pod_spec_no_env_when_empty(make_args): + args = make_args() + spec = build_pod_spec(args, "backup", []) + container = spec["containers"][0] + assert "envFrom" not in container + assert "env" not in container diff --git a/odoo-kopia-snapshot/tests/test_utils.py b/odoo-kopia-snapshot/tests/test_utils.py new file mode 100644 index 0000000..0864483 --- /dev/null +++ b/odoo-kopia-snapshot/tests/test_utils.py @@ -0,0 +1,25 @@ +"""Tests for the shared utils module.""" + +import subprocess + +import pytest + +from odoo_kopia_snapshot.utils import run_command + + +def test_run_command_success(): + result = run_command(["echo", "hello"], capture_output=True) + assert isinstance(result, subprocess.CompletedProcess) + assert result.returncode == 0 + assert result.stdout.strip() == "hello" + + +def test_run_command_check_true_raises(): + with pytest.raises(subprocess.CalledProcessError): + run_command(["false"], check=True) + + +def test_run_command_check_false_returns_completed_process(): + result = run_command(["false"], check=False) + assert isinstance(result, subprocess.CompletedProcess) + assert result.returncode != 0 diff --git a/odoo-kopia-snapshot/uv.lock b/odoo-kopia-snapshot/uv.lock new file mode 100644 index 0000000..4257875 --- /dev/null +++ b/odoo-kopia-snapshot/uv.lock @@ -0,0 +1,129 @@ +version = 1 +revision = 1 +requires-python = ">=3.12" + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 }, +] + +[[package]] +name = "odoo-kopia-snapshot" +version = "2.0.1" +source = { editable = "." } +dependencies = [ + { name = "pyyaml" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, +] + +[package.metadata] +requires-dist = [{ name = "pyyaml" }] + +[package.metadata.requires-dev] +dev = [{ name = "pytest" }] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366 }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, +]