diff --git a/.gitignore b/.gitignore index fb17e8ea0..6719b8e2c 100644 --- a/.gitignore +++ b/.gitignore @@ -161,3 +161,4 @@ scratch/ # Surrogate models surrogatemodels/ utopia_forest.json +deploy/secrets.yaml diff --git a/.s2i/bin/assemble b/.s2i/bin/assemble index d6d56e973..7acaa3c6c 100644 --- a/.s2i/bin/assemble +++ b/.s2i/bin/assemble @@ -1,98 +1,36 @@ #!/bin/bash - -function is_django_installed() { - python -c "import django" &>/dev/null -} - -function should_collectstatic() { - is_django_installed && [[ -z "$DISABLE_COLLECTSTATIC" ]] -} - -function virtualenv_bin() { - # New versions of Python (>3.6) should use venv module - # from stdlib instead of virtualenv package - python3.12 -m venv $1 -} - -# Install pipenv or micropipenv to the separate virtualenv to isolate it -# from system Python packages and packages in the main -# virtualenv. Executable is simlinked into ~/.local/bin -# to be accessible. This approach is inspired by pipsi -# (pip script installer). -function install_tool() { - echo "---> Installing $1 packaging tool ..." - VENV_DIR=$HOME/.local/venvs/$1 - virtualenv_bin "$VENV_DIR" - # First, try to install the tool without --isolated which means that if you - # have your own PyPI mirror, it will take it from there. If this try fails, try it - # again with --isolated which ignores external pip settings (env vars, config file) - # and installs the tool from PyPI (needs internet connetion). - # $1$2 combines package name with [extras] or version specifier if is defined as $2``` - if ! $VENV_DIR/bin/pip install -U $1$2; then - echo "WARNING: Installation of $1 failed, trying again from official PyPI with pip --isolated install" - $VENV_DIR/bin/pip install --isolated -U $1$2 # Combines package name with [extras] or version specifier if is defined as $2``` - fi - mkdir -p $HOME/.local/bin - ln -s $VENV_DIR/bin/$1 $HOME/.local/bin/$1 -} - +# S2I assemble script for DESDEO API (FastAPI / gunicorn + uvicorn). +# +# Key differences from the original script: +# - Uses uv instead of pip for dependency installation. +# - UV_PROJECT_ENVIRONMENT points uv at the S2I-managed virtualenv +# (/opt/app-root) so it does NOT create a separate .venv directory. +# - --frozen → reproduces exactly what is pinned in uv.lock. +# - --no-dev → skips dev-only deps (pytest, ruff, etc.). +# - --group web --group server → pulls in FastAPI, gunicorn, uvicorn, etc. +# - Django collectstatic block removed (not applicable here). +# set -e - -# First of all, check that we don't have disallowed combination of ENVs -if [[ ! -z "$ENABLE_PIPENV" && ! -z "$ENABLE_MICROPIPENV" ]]; then - echo "ERROR: Pipenv and micropipenv cannot be enabled at the same time!" - # podman/buildah does not relay this exit code but it will be fixed hopefuly - # https://github.com/containers/buildah/issues/2305 - exit 3 -fi - shopt -s dotglob + echo "---> Installing application source ..." mv /tmp/src/* "$HOME" -# set permissions for any installed artifacts +# Restore permissions after source injection. fix-permissions /opt/app-root -P +echo "---> Installing uv ..." +pip install -q --upgrade pip +pip install -q uv -if [[ ! -z "$UPGRADE_PIP_TO_LATEST" ]]; then - echo "---> Upgrading pip, setuptools and wheel to latest version ..." - if ! pip install -U pip setuptools wheel; then - echo "WARNING: Installation of the latest pip, setuptools and wheel failed, trying again from official PyPI with pip --isolated install" - pip install --isolated -U pip setuptools wheel - fi -fi - -pip install $DESDEO_INSTALL - - -if should_collectstatic; then - ( - echo "---> Collecting Django static files ..." - - APP_HOME=$(readlink -f "${APP_HOME:-.}") - # Change the working directory to APP_HOME - PYTHONPATH="$(pwd)${PYTHONPATH:+:$PYTHONPATH}" - cd "$APP_HOME" - - # Look for 'manage.py' in the current directory - manage_file=./manage.py - - if [[ ! -f "$manage_file" ]]; then - echo "WARNING: seems that you're using Django, but we could not find a 'manage.py' file." - echo "'manage.py collectstatic' ignored." - exit - fi - - if ! python $manage_file collectstatic --dry-run --noinput &> /dev/null; then - echo "WARNING: could not run 'manage.py collectstatic'. To debug, run:" - echo " $ python $manage_file collectstatic --noinput" - echo "Ignore this warning if you're not serving static files with Django." - exit - fi +echo "---> Syncing Python dependencies via uv ..." +# UV_PROJECT_ENVIRONMENT: use the existing S2I venv instead of creating .venv. +# UV_PYTHON_PREFERENCE: do not let uv download its own Python interpreter. +UV_PROJECT_ENVIRONMENT="${VIRTUAL_ENV:-/opt/app-root}" \ +UV_PYTHON_PREFERENCE=only-system \ + uv sync --frozen --no-dev --group web --group server - python $manage_file collectstatic --noinput - ) -fi +echo "---> Dependencies installed." -# set permissions for any installed artifacts +# Restore permissions for any artifacts written during install. fix-permissions /opt/app-root -P diff --git a/.s2i/environment b/.s2i/environment index 30e018bca..96b178108 100644 --- a/.s2i/environment +++ b/.s2i/environment @@ -1,5 +1,27 @@ +# S2I environment variables for the DESDEO API build and runtime. +# These are read both during the S2I *build* (assemble) and at *runtime*. +# +# UPGRADE_PIP_TO_LATEST is no longer needed because the assemble script +# upgrades pip explicitly before installing uv. Kept here as a no-op for +# compatibility with any base-image hooks that check for it. UPGRADE_PIP_TO_LATEST=1 + +# Entry point: Gunicorn loads the FastAPI app from this module path. APP_MODULE=desdeo.api.app:app + +# Gunicorn flags. +# --workers=1 Single worker is safe for the current single-pod setup. +# Increase to 2-4 if the pod gets >1 CPU allocated. +# --worker-class uvicorn.workers.UvicornWorker gives async support. +# --bind Must be 0.0.0.0:8080 to match the Service targetPort. +# --access-logfile - Log to stdout so OpenShift can capture it. GUNICORN_CMD_ARGS=--bind=0.0.0.0:8080 --workers=1 --access-logfile=- --worker-class uvicorn.workers.UvicornWorker -DESDEO_INSTALL=. --group web --group server + +# Passed to `uv sync` in the assemble script. +# Format: flags passed after the implicit project root (.). +# --group web --group server: include the FastAPI/gunicorn/uvicorn dependency groups. +# NOTE: This variable is used in assemble, not by pip directly. +DESDEO_INSTALL=--group web --group server + +# Runtime default; override via Deployment env or Secret. DEBUG=false diff --git a/deploy/api-buildconfig.yaml b/deploy/api-buildconfig.yaml new file mode 100644 index 000000000..23abe9b9a --- /dev/null +++ b/deploy/api-buildconfig.yaml @@ -0,0 +1,75 @@ +# deploy/api-buildconfig.yaml +# +# BuildConfig for the DESDEO API using OpenShift S2I strategy. +# +# Builder image: desdeo-builder ImageStream (custom image built from +# desdeo-s2i-buildimage.Dockerfile). This includes Python 3.12 on UBI8 +# plus COIN-OR solvers (bonmin, ipopt, cbc) and ca-certificates. +# +# To use the base Python image without solvers instead, +# replace the sourceStrategy.from block with: +# from: +# kind: ImageStreamTag +# name: python:3.12-ubi9 +# namespace: openshift +# +# Triggers: +# - ImageChange on desdeo-builder -> rebuilds API when builder is updated +# - GitHub webhook (push to DEPLOY_BRANCH) <- main CI/CD trigger +# - ConfigChange +--- +apiVersion: build.openshift.io/v1 +kind: BuildConfig +metadata: + name: desdeo-api + labels: + app: desdeo-api +spec: + source: + type: Git + git: + # Github repo and deploy branch + uri: https://github.com/gialmisi/DESDEO.git + ref: rahti-deploy + + strategy: + type: Source + sourceStrategy: + # Reference the custom builder ImageStream produced by builder-buildconfig.yaml. + # This image has Python 3.12 + COIN-OR solvers pre-installed. + from: + kind: ImageStreamTag + name: desdeo-builder:latest + env: + - name: UPGRADE_PIP_TO_LATEST + value: "1" + - name: DESDEO_INSTALL + value: "--group web --group server" + - name: DEBUG + value: "false" + + output: + to: + kind: ImageStreamTag + name: desdeo-api:latest + + triggers: + # Rebuild API when the builder image is updated (solver or base OS updates). + - type: ImageChange + imageChange: {} + # GitHub webhook, push to DEPLOY_BRANCH triggers a new API build. + - type: GitHub + github: + secretReference: + name: desdeo-webhook-api + - type: ConfigChange + + runPolicy: Serial + + resources: + requests: + memory: "1Gi" + cpu: "500m" + limits: + memory: "2Gi" + cpu: "1" diff --git a/deploy/api-deployment.yaml b/deploy/api-deployment.yaml new file mode 100644 index 000000000..6ab48e36e --- /dev/null +++ b/deploy/api-deployment.yaml @@ -0,0 +1,146 @@ +# deploy/api-deployment.yaml +# +# Deployment, Service, and Route for the DESDEO FastAPI backend. +# +# The Deployment has an ImageChange trigger: whenever the BuildConfig pushes a +# new image to the desdeo-api ImageStream, the pod is replaced with a rolling +# update automatically. +# +# All secrets are consumed as environment variables from the desdeo-secrets +# Secret; no secret values live in this file. +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: desdeo-api + labels: + app: desdeo-api + annotations: + # This annotation tells the OpenShift image change controller to update the + # Deployment when a new image is pushed to the ImageStream. + image.openshift.io/triggers: > + [{"from":{"kind":"ImageStreamTag","name":"desdeo-api:latest"}, + "fieldPath":"spec.template.spec.containers[0].image"}] +spec: + replicas: 1 + selector: + matchLabels: + app: desdeo-api + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 0 + maxSurge: 1 + template: + metadata: + labels: + app: desdeo-api + spec: + containers: + - name: api + # Placeholder: the image change annotation above overwrites this on deploy. + image: desdeo-api:latest + imagePullPolicy: Always + ports: + - containerPort: 8080 + name: http + protocol: TCP + env: + - name: DESDEO_PRODUCTION + value: "true" + - name: AUTHJWT_SECRET + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: AUTHJWT_SECRET + - name: DB_HOST + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: DB_HOST + - name: DB_PORT + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: DB_PORT + - name: DB_NAME + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: DB_NAME + - name: DB_USER + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: DB_USER + - name: DB_PASSWORD + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: DB_PASSWORD + # CORS: allow requests from the webui Route. + # Update this if the webui hostname changes. + - name: CORS_ORIGINS + value: '["https://gialmisi-desdeo-webui.rahtiapp.fi"]' + # COOKIE_DOMAIN is intentionally not set. + # With the SvelteKit proxy architecture, cookies are owned by the + # webui host and forwarded server-side. Setting a shared domain here + # is unnecessary and can cause authentication issues. + - name: APP_MODULE + value: "desdeo.api.app:app" + - name: GUNICORN_CMD_ARGS + value: "--bind=0.0.0.0:8080 --workers=1 --access-logfile=- --worker-class uvicorn.workers.UvicornWorker" + resources: + requests: + memory: "512Mi" + cpu: "250m" + limits: + memory: "1Gi" + cpu: "1" + readinessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 15 + periodSeconds: 10 + failureThreshold: 3 + livenessProbe: + httpGet: + path: /health + port: 8080 + initialDelaySeconds: 30 + periodSeconds: 20 + failureThreshold: 3 +--- +apiVersion: v1 +kind: Service +metadata: + name: desdeo-api + labels: + app: desdeo-api +spec: + selector: + app: desdeo-api + ports: + - name: http + port: 8080 + targetPort: 8080 +--- +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + name: desdeo-api + labels: + app: desdeo-api +spec: + host: gialmisi-desdeo-api.rahtiapp.fi + to: + kind: Service + name: desdeo-api + port: + targetPort: http + tls: + # edge: TLS terminated at the Rahti HAProxy; traffic to the pod is plain HTTP. + termination: edge + # Redirect any accidental HTTP requests to HTTPS. + insecureEdgeTerminationPolicy: Redirect diff --git a/deploy/api-imagestream.yaml b/deploy/api-imagestream.yaml new file mode 100644 index 000000000..69fdfb647 --- /dev/null +++ b/deploy/api-imagestream.yaml @@ -0,0 +1,18 @@ +# deploy/api-imagestream.yaml +# +# ImageStream for the DESDEO API container image. +# The BuildConfig writes new image revisions here; the Deployment reads from here. +# Separating build output from deployment allows rolling updates to trigger +# automatically when a new image is pushed. +--- +apiVersion: image.openshift.io/v1 +kind: ImageStream +metadata: + name: desdeo-api + labels: + app: desdeo-api +spec: + lookupPolicy: + # Allow Deployments in this namespace to reference the image by its + # ImageStreamTag name without needing the full registry URL. + local: true diff --git a/deploy/builder-buildconfig.yaml b/deploy/builder-buildconfig.yaml new file mode 100644 index 000000000..fe58d7727 --- /dev/null +++ b/deploy/builder-buildconfig.yaml @@ -0,0 +1,61 @@ +# deploy/builder-buildconfig.yaml +# +# BuildConfig that produces the custom DESDEO S2I builder image. +# Uses Docker strategy to build desdeo-s2i-buildimage.Dockerfile +# from the repository root. +# +# This only needs to be (re)built when: +# - The Dockerfile changes +# - The solver binaries release is updated +# - The base UBI8 image receives a security update (ImageChange trigger) +# +# To trigger a manual rebuild: +# oc start-build desdeo-builder --follow +# +# The output image is stored in the desdeo-builder ImageStream and +# referenced by the api-buildconfig.yaml as the S2I builder. +--- +apiVersion: build.openshift.io/v1 +kind: BuildConfig +metadata: + name: desdeo-builder + labels: + app: desdeo-api + component: builder-image +spec: + source: + type: Git + git: + uri: https://github.com/gialmisi/DESDEO.git + ref: rahti-deploy + + strategy: + type: Docker + dockerStrategy: + dockerfilePath: desdeo-s2i-buildimage.Dockerfile + # The Dockerfile temporarily switches to root to install packages, + # then switches back to UID 1001. This is expected and required. + noCache: false + + output: + to: + kind: ImageStreamTag + name: desdeo-builder:latest + + triggers: + # Rebuild when the base UBI8 image receives updates (security patches). + - type: ImageChange + imageChange: {} + # Manual / API trigger. + - type: ConfigChange + + # The builder image download includes solver binaries (~100MB). + resources: + requests: + memory: "512Mi" + cpu: "250m" + limits: + memory: "1Gi" + cpu: "1" + + runPolicy: Serial diff --git a/deploy/builder-imagestream.yaml b/deploy/builder-imagestream.yaml new file mode 100644 index 000000000..6fb5c7ffc --- /dev/null +++ b/deploy/builder-imagestream.yaml @@ -0,0 +1,19 @@ +# deploy/builder-imagestream.yaml +# +# ImageStream for the custom DESDEO S2I builder image. +# This image extends the base Python 3.12 UBI8 image with: +# - COIN-OR solvers (bonmin, ipopt, cbc) from a GitHub release +# - ca-certificates for managing Gurobi licences +# +# The API BuildConfig references this stream as its S2I builder, +# so whenever this image is updated the API is automatically rebuilt. +--- +apiVersion: image.openshift.io/v1 +kind: ImageStream +metadata: + name: desdeo-builder + labels: + app: desdeo-api +spec: + lookupPolicy: + local: true diff --git a/deploy/db-init-job.yaml b/deploy/db-init-job.yaml new file mode 100644 index 000000000..ec6b063b7 --- /dev/null +++ b/deploy/db-init-job.yaml @@ -0,0 +1,60 @@ +# deploy/db-init-job.yaml +# +# One-shot Kubernetes Job that runs desdeo/api/db_init_prod.py using the API +# image. Run this: +# - After the very first deployment (creates tables + seeds admin user). +# - After a deliberate database wipe (the script is idempotent on re-run). +# +# The Job is NOT re-triggered automatically on each new image build. +# If you add new tables in a later release, use a proper migration tool +# (Alembic) instead of re-running this Job. +# +# Usage: +# # Replace with your Rahti project name before applying. +# oc apply -f deploy/db-init-job.yaml +# oc logs -f job/desdeo-db-init +# oc delete job desdeo-db-init +# +# To re-run after a database wipe, delete the old Job first: +# oc delete job desdeo-db-init +# oc apply -f deploy/db-init-job.yaml +--- +apiVersion: batch/v1 +kind: Job +metadata: + name: desdeo-db-init + labels: + app: desdeo-api + component: db-init +spec: + backoffLimit: 3 + # Auto-clean completed Job pods after 1 hour. + ttlSecondsAfterFinished: 3600 + template: + metadata: + labels: + app: desdeo-api + component: db-init + spec: + restartPolicy: Never + containers: + - name: db-init + # Replace with your Rahti project name. + image: image-registry.openshift-image-registry.svc:5000//desdeo-api:latest + command: ["python", "desdeo/api/db_init_prod.py"] + env: + - name: DESDEO_PRODUCTION + value: "true" + envFrom: + # Injects DB_HOST, DB_PORT, DB_NAME, DB_USER, DB_PASSWORD, + # AUTHJWT_SECRET, DESDEO_ADMIN_USERNAME, DESDEO_ADMIN_PASSWORD + # from the shared Secret. + - secretRef: + name: desdeo-secrets + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" diff --git a/deploy/postgres.yaml b/deploy/postgres.yaml new file mode 100644 index 000000000..690690b4f --- /dev/null +++ b/deploy/postgres.yaml @@ -0,0 +1,108 @@ +# deploy/postgres.yaml +# +# In-cluster PostgreSQL using the OpenShift built-in image. +# +# Apply with: +# oc apply -f deploy/postgres.yaml +# +# The OpenShift PostgreSQL image uses these env var names: +# POSTGRESQL_USER — creates a non-superuser application account +# POSTGRESQL_PASSWORD — password for POSTGRESQL_USER +# POSTGRESQL_DATABASE — database to create on first start +# +# These are sourced from desdeo-secrets (keys POSTGRES_USER and POSTGRES_PASSWORD). +# The database name is hardcoded to 'desdeo' here; change it if needed and update +# DB_NAME in the secret accordingly. +# +# Data is persisted in the PVC mounted at /var/lib/pgsql/data. +# +# To check available image tags on your cluster: +# oc get is postgresql -n openshift -o jsonpath='{.spec.tags[*].name}' +--- +apiVersion: v1 +kind: Service +metadata: + name: desdeo-postgres + labels: + app: desdeo-postgres +spec: + clusterIP: None + selector: + app: desdeo-postgres + ports: + - name: postgres + port: 5432 + targetPort: 5432 +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: desdeo-postgres + labels: + app: desdeo-postgres +spec: + selector: + matchLabels: + app: desdeo-postgres + serviceName: desdeo-postgres + replicas: 1 + template: + metadata: + labels: + app: desdeo-postgres + spec: + containers: + - name: postgres + image: image-registry.openshift-image-registry.svc:5000/openshift/postgresql:16-el10 + ports: + - containerPort: 5432 + name: postgres + env: + - name: POSTGRESQL_USER + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: POSTGRES_USER + - name: POSTGRESQL_PASSWORD + valueFrom: + secretKeyRef: + name: desdeo-secrets + key: POSTGRES_PASSWORD + - name: POSTGRESQL_DATABASE + value: desdeo + volumeMounts: + - name: postgres-data + mountPath: /var/lib/pgsql/data + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + readinessProbe: + exec: + command: + - /bin/sh + - -c + - psql -U $POSTGRESQL_USER -d desdeo -c "SELECT 1" + initialDelaySeconds: 15 + periodSeconds: 10 + failureThreshold: 6 + livenessProbe: + exec: + command: + - /bin/sh + - -c + - psql -U $POSTGRESQL_USER -d desdeo -c "SELECT 1" + initialDelaySeconds: 30 + periodSeconds: 20 + failureThreshold: 3 + volumeClaimTemplates: + - metadata: + name: postgres-data + spec: + accessModes: ["ReadWriteOnce"] + resources: + requests: + storage: 2Gi diff --git a/deploy/secrets-template.yaml b/deploy/secrets-template.yaml new file mode 100644 index 000000000..9759df4a9 --- /dev/null +++ b/deploy/secrets-template.yaml @@ -0,0 +1,88 @@ +# deploy/secrets-template.yaml +# +# DO NOT commit real values to git. +# +# Usage: +# 1. Copy this file: cp deploy/secrets-template.yaml deploy/secrets.yaml +# 2. Fill in all placeholders in deploy/secrets.yaml +# 3. Apply: oc apply -f deploy/secrets.yaml +# 4. Keep deploy/secrets.yaml out of version control. +# +# Generate passwords and keys: +# python -c "import secrets; print(secrets.token_hex(32))" # password +# python -c "import secrets; print(secrets.token_hex(64))" # JWT key +# python -c "import secrets; print(secrets.token_hex(24))" # webhook secret +# +# Or create the main secret directly without a file: +# oc create secret generic desdeo-secrets \ +# --from-literal=POSTGRES_USER=desdeo \ +# --from-literal=POSTGRES_PASSWORD= \ +# --from-literal=DB_HOST=desdeo-postgres \ +# --from-literal=DB_PORT=5432 \ +# --from-literal=DB_NAME=desdeo \ +# --from-literal=DB_USER=desdeo \ +# --from-literal=DB_PASSWORD= \ +# --from-literal=AUTHJWT_SECRET=<64-char-hex> \ +# --from-literal=DESDEO_ADMIN_USERNAME=admin \ +# --from-literal=DESDEO_ADMIN_PASSWORD= \ +# --from-literal=WEBHOOK_SECRET_API=<24-char-hex> \ +# --from-literal=WEBHOOK_SECRET_WEBUI=<24-char-hex> +# +# Then create the webhook secrets separately: +# oc create secret generic desdeo-webhook-api \ +# --from-literal=WebHookSecretKey= +# oc create secret generic desdeo-webhook-webui \ +# --from-literal=WebHookSecretKey= +--- +apiVersion: v1 +kind: Secret +metadata: + name: desdeo-secrets +type: Opaque +stringData: + # Application user created by the OpenShift PostgreSQL image on first start. + POSTGRES_USER: desdeo + POSTGRES_PASSWORD: + + # These are read individually by desdeo/api/config.py in production mode. + # DB_HOST must match the Kubernetes Service name in postgres.yaml. + DB_HOST: desdeo-postgres + DB_PORT: "5432" + DB_NAME: desdeo + DB_USER: desdeo + DB_PASSWORD: # same value as POSTGRES_PASSWORD above + + # Generate with: python -c "import secrets; print(secrets.token_hex(64))" + # Never reuse across deployments. + AUTHJWT_SECRET: + + # Seeded by the db-init Job. Can be changed after first login. + DESDEO_ADMIN_USERNAME: admin + DESDEO_ADMIN_PASSWORD: + + # Generate with: python -c "import secrets; print(secrets.token_hex(24))" + # Use different values for API and webui. + # IMPORTANT: copy the same values into desdeo-webhook-api and + # desdeo-webhook-webui below — YAML does not support variable references. + WEBHOOK_SECRET_API: + WEBHOOK_SECRET_WEBUI: +--- +apiVersion: v1 +kind: Secret +metadata: + name: desdeo-webhook-api +type: Opaque +stringData: + # OpenShift's secretReference looks for exactly this key name. + # Must be the same value as WEBHOOK_SECRET_API above. + WebHookSecretKey: +--- +apiVersion: v1 +kind: Secret +metadata: + name: desdeo-webhook-webui +type: Opaque +stringData: + # OpenShift's secretReference looks for exactly this key name. + # Must be the same value as WEBHOOK_SECRET_WEBUI above. + WebHookSecretKey: diff --git a/deploy/webui-buildconfig.yaml b/deploy/webui-buildconfig.yaml new file mode 100644 index 000000000..55497d61e --- /dev/null +++ b/deploy/webui-buildconfig.yaml @@ -0,0 +1,64 @@ +# deploy/webui-buildconfig.yaml +# +# BuildConfig for the DESDEO web UI using Docker build strategy. +# +# The Dockerfile lives at webui/Dockerfile in the repository. +# contextDir: webui → all COPY paths in the Dockerfile are relative to webui/. +# +# VITE_API_URL is baked into the client-side bundle at build time. +# It is set to '/api' so that browser requests go to the SvelteKit +# proxy route at /api/[...path], which forwards them to the API +# over the internal cluster network. Do NOT set this to the API's +# public Route URL, the proxy architecture means the browser never +# talks directly to the API. +--- +apiVersion: build.openshift.io/v1 +kind: BuildConfig +metadata: + name: desdeo-webui + labels: + app: desdeo-webui +spec: + source: + type: Git + git: + uri: https://github.com/gialmisi/DESDEO.git + ref: rahti-deploy + # Only the webui/ subtree is needed as the Docker build context. + contextDir: webui + + strategy: + type: Docker + dockerStrategy: + dockerfilePath: Dockerfile + buildArgs: + # '/api' routes browser requests through the SvelteKit server-side proxy. + # See webui/src/routes/api/[...path]/+server.ts. + - name: VITE_API_URL + value: "/api" + env: + - name: NODE_OPTIONS + value: "--max-old-space-size=3072" + + output: + to: + kind: ImageStreamTag + name: desdeo-webui:latest + + triggers: + - type: GitHub + github: + secretReference: + name: desdeo-webhook-webui + - type: ConfigChange + + # npm install + vite build is memory-hungry; 4Gi is usually sufficient. + resources: + requests: + memory: "2Gi" + cpu: "500m" + limits: + memory: "4Gi" + cpu: "1" + + runPolicy: Serial diff --git a/deploy/webui-deployment.yaml b/deploy/webui-deployment.yaml new file mode 100644 index 000000000..ed2f4bf54 --- /dev/null +++ b/deploy/webui-deployment.yaml @@ -0,0 +1,98 @@ +# deploy/webui-deployment.yaml +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: desdeo-webui + labels: + app: desdeo-webui + annotations: + image.openshift.io/triggers: > + [{"from":{"kind":"ImageStreamTag","name":"desdeo-webui:latest"}, + "fieldPath":"spec.template.spec.containers[0].image"}] +spec: + replicas: 1 + selector: + matchLabels: + app: desdeo-webui + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 0 + maxSurge: 1 + template: + metadata: + labels: + app: desdeo-webui + spec: + containers: + - name: webui + image: desdeo-webui:latest + imagePullPolicy: Always + ports: + - containerPort: 3000 + name: http + protocol: TCP + env: + # adapter-node reads PORT to know which port to listen on. + - name: PORT + value: "3000" + # Server-side +server.ts proxy routes use this to reach the API + # over the internal cluster network (avoids the public ingress). + - name: API_BASE_URL + value: "http://desdeo-api:8080" + # ORIGIN is required by SvelteKit for CSRF protection when behind a proxy. + - name: ORIGIN + value: "https://gialmisi-desdeo-webui.rahtiapp.fi" + resources: + requests: + memory: "128Mi" + cpu: "100m" + limits: + memory: "256Mi" + cpu: "500m" + readinessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 10 + periodSeconds: 10 + failureThreshold: 3 + livenessProbe: + httpGet: + path: / + port: 3000 + initialDelaySeconds: 20 + periodSeconds: 20 + failureThreshold: 3 +--- +apiVersion: v1 +kind: Service +metadata: + name: desdeo-webui + labels: + app: desdeo-webui +spec: + selector: + app: desdeo-webui + ports: + - name: http + port: 3000 + targetPort: 3000 +--- +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + name: desdeo-webui + labels: + app: desdeo-webui +spec: + host: gialmisi-desdeo-webui.rahtiapp.fi + to: + kind: Service + name: desdeo-webui + port: + targetPort: http + tls: + termination: edge + insecureEdgeTerminationPolicy: Redirect diff --git a/deploy/webui-imagestream.yaml b/deploy/webui-imagestream.yaml new file mode 100644 index 000000000..0348a28bc --- /dev/null +++ b/deploy/webui-imagestream.yaml @@ -0,0 +1,11 @@ +# deploy/webui-imagestream.yaml +--- +apiVersion: image.openshift.io/v1 +kind: ImageStream +metadata: + name: desdeo-webui + labels: + app: desdeo-webui +spec: + lookupPolicy: + local: true diff --git a/desdeo/api/app.py b/desdeo/api/app.py index 7b71e1032..e3709a962 100644 --- a/desdeo/api/app.py +++ b/desdeo/api/app.py @@ -11,6 +11,7 @@ problem, reference_point_method, session, + site_selection, user_authentication, utopia, xnimbus, @@ -38,8 +39,15 @@ app.include_router(gdm_aggregate.router) app.include_router(gnimbus_routers.router) app.include_router(enautilus.router) +app.include_router(site_selection.router) app.include_router(gdm_score_bands_routers.router) + +@app.get("/health") +def health(): + return {"status": "ok"} + + origins = AuthConfig.cors_origins app.add_middleware( diff --git a/desdeo/api/db_init_prod.py b/desdeo/api/db_init_prod.py new file mode 100644 index 000000000..d86126427 --- /dev/null +++ b/desdeo/api/db_init_prod.py @@ -0,0 +1,84 @@ +"""Production database initialisation script. + +Run once as a Kubernetes Job after the first deployment (or after a full +database wipe). It is intentionally idempotent: running it multiple times +against the same database is safe. + +What it does +------------ +1. Creates all SQLModel tables if they do not already exist. + (Uses create_all which is a no-op for tables that are present.) +2. Seeds an initial analyst user whose credentials come from env vars. + If the user already exists the step is skipped. + +Environment variables required +------------------------------- +DATABASE_URL PostgreSQL DSN, e.g. + postgresql://desdeo:@desdeo-postgres:5432/desdeo +DESDEO_ADMIN_USERNAME Username for the seeded analyst account. +DESDEO_ADMIN_PASSWORD Password for the seeded analyst account. + +Optional +-------- +DESDEO_ADMIN_GROUP Group name for the seeded user (default: "admin"). +""" + +import os +import sys + +from sqlmodel import Session, SQLModel, select + +# Import the engine after DATABASE_URL is in the environment so the config +# module picks it up correctly. +from desdeo.api.db import engine +from desdeo.api.models import User, UserRole +from desdeo.api.routers.user_authentication import get_password_hash + + +def create_tables() -> None: + print("[db-init] Creating database tables (create_all is a no-op for existing tables)...") + SQLModel.metadata.create_all(engine) + print("[db-init] Tables ready.") + + +def seed_admin_user() -> None: + username = os.environ.get("DESDEO_ADMIN_USERNAME") + password = os.environ.get("DESDEO_ADMIN_PASSWORD") + group = os.environ.get("DESDEO_ADMIN_GROUP", "admin") + + if not username or not password: + print("[db-init] WARNING: DESDEO_ADMIN_USERNAME or DESDEO_ADMIN_PASSWORD not set — skipping user seed.") + return + + with Session(engine) as session: + existing = session.exec(select(User).where(User.username == username)).first() + + if existing: + print(f"[db-init] User '{username}' already exists — skipping.") + return + + user = User( + username=username, + password_hash=get_password_hash(password), + role=UserRole.analyst, + group=group, + ) + session.add(user) + session.commit() + print(f"[db-init] Created user '{username}' (role=analyst, group={group}).") + + +def main() -> None: + database_url = os.environ.get("DATABASE_URL") + if not database_url: + print("[db-init] ERROR: DATABASE_URL is not set.", file=sys.stderr) + sys.exit(1) + + print(f"[db-init] Using database: {database_url.split('@')[-1]}") # hide credentials + create_tables() + seed_admin_user() + print("[db-init] Done.") + + +if __name__ == "__main__": + main() diff --git a/desdeo/api/models/__init__.py b/desdeo/api/models/__init__.py index ba1726330..977013037 100644 --- a/desdeo/api/models/__init__.py +++ b/desdeo/api/models/__init__.py @@ -2,6 +2,8 @@ __all__ = [ # noqa: RUF022 "Bounds", + "ConstrainedVariantRequest", + "ConstrainedVariantResponse", "ConstantDB", "ConstraintDB", "CreateSessionRequest", @@ -15,6 +17,9 @@ "ENautilusFinalState", "ENautilusRepresentativeSolutionsResponse", "ENautilusSessionTreeResponse", + "ENautilusSimulateRequest", + "ENautilusSimulateResponse", + "ENautilusSimulateStepResult", "ENautilusState", "ENautilusStateResponse", "ENautilusStepRequest", @@ -79,6 +84,7 @@ "UserPublic", "UserRole", "VariableDB", + "VariableFixing", "ProblemMetaDataDB", "BaseProblemMetaData", "ForestProblemMetaData", @@ -107,6 +113,7 @@ "SolutionReference", "SolutionReferenceLite", "SolutionReferenceResponse", + "SiteSelectionMetaData", "SolverSelectionMetadata", "UserSavedEMOResults", "UserSavedSolutionDB", @@ -151,6 +158,9 @@ ENautilusFinalizeResponse, ENautilusRepresentativeSolutionsResponse, ENautilusSessionTreeResponse, + ENautilusSimulateRequest, + ENautilusSimulateResponse, + ENautilusSimulateStepResult, ENautilusStateResponse, ENautilusStepRequest, ENautilusStepResponse, @@ -232,6 +242,8 @@ ) from .problem import ( ConstantDB, + ConstrainedVariantRequest, + ConstrainedVariantResponse, ConstraintDB, DiscreteRepresentationDB, ExtraFunctionDB, @@ -247,10 +259,12 @@ RepresentativeNonDominatedSolutions, ScalarizationFunctionDB, SimulatorDB, + SiteSelectionMetaData, SolverSelectionMetadata, TensorConstantDB, TensorVariableDB, VariableDB, + VariableFixing, ) from .reference_point_method import RPMSolveRequest from .session import ( diff --git a/desdeo/api/models/enautilus.py b/desdeo/api/models/enautilus.py index 7485bb788..d9c964a68 100644 --- a/desdeo/api/models/enautilus.py +++ b/desdeo/api/models/enautilus.py @@ -131,6 +131,40 @@ class ENautilusTreeNodeResponse(SQLModel): ) +class ENautilusSimulateRequest(SQLModel): + """Run E-NAUTILUS greedily from a state to completion.""" + + state_id: int = Field(description="Starting ENautilusState to branch from.") + preferred_objective: str = Field(description="Objective symbol to favor (e.g., 'f_1').") + deprioritize: bool = Field( + default=False, + description="If True, always pick the WORST value for the objective instead of the best.", + ) + number_of_intermediate_points: int = Field( + default=3, description="Number of intermediate points per simulated step." + ) + + +class ENautilusSimulateStepResult(SQLModel): + """One step in the simulated path.""" + + iteration: int + iterations_left: int + selected_point: dict[str, float] = Field(sa_column=Column(JSON), description="The auto-picked intermediate point.") + selected_point_index: int + intermediate_points: list[dict[str, float]] = Field(sa_column=Column(JSON)) + closeness_measures: list[float] + + +class ENautilusSimulateResponse(SQLModel): + """Result of greedy E-NAUTILUS simulation.""" + + preferred_objective: str + steps: list[ENautilusSimulateStepResult] + final_solution: SolverResults = Field(description="Projected Pareto-optimal solution.") + final_intermediate_point: dict[str, float] = Field(sa_column=Column(JSON)) + + class ENautilusDecisionEventResponse(SQLModel): """A decision event capturing a transition from parent to child node.""" diff --git a/desdeo/api/models/problem.py b/desdeo/api/models/problem.py index fefd6c1a1..d7bff1fb8 100644 --- a/desdeo/api/models/problem.py +++ b/desdeo/api/models/problem.py @@ -67,6 +67,32 @@ class ProblemAddFromJSONRequest(SQLModel): json_file: UploadFile +class VariableFixing(BaseModel): + """Fix a single variable to a specific value via an EQ constraint.""" + + variable_symbol: str + fixed_value: float + constraint_name: str | None = None + + +class ConstrainedVariantRequest(BaseModel): + """Request to create a derived problem with additional EQ constraints fixing variables.""" + + variable_fixings: list[VariableFixing] + name: str | None = None + is_temporary: bool = True + + +class ConstrainedVariantResponse(BaseModel): + """Response after creating a constrained variant.""" + + problem_id: int + parent_problem_id: int + name: str + is_temporary: bool + n_constraints_added: int + + class ProblemInfo(ProblemBase): """Problem info request return data.""" @@ -130,6 +156,10 @@ class ProblemDB(ProblemBase, table=True): scenario_keys: list[str] | None = Field(sa_column=Column(JSON, nullable=True), default=None) variable_domain: VariableDomainTypeEnum = Field() + # Variant tracking + is_temporary: bool = Field(default=False) + parent_problem_id: int | None = Field(default=None, foreign_key="problemdb.id") + # Back populates user: "User" = Relationship(back_populates="problems") solutions: list["UserSavedSolutionDB"] = Relationship(back_populates="problem", cascade_delete=True) @@ -283,6 +313,39 @@ class SolverSelectionMetadata(SQLModel, table=True): metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="solver_selection_metadata") +class SiteSelectionMetaData(SQLModel, table=True): + """A problem metadata class to hold site selection problem specific information. + + Stores geographic data and variable mappings needed to visualize binary + site-selection solutions on a map (e.g., clinic placement, facility location). + """ + + id: int | None = Field(primary_key=True, default=None) + metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None) + + metadata_type: str = "site_selection_metadata" + + # Geographic data (embedded JSON, not file paths) + sites_json: str = Field(description="JSON array: [{name, node, lat, lon}, ...] one per site variable") + nodes_json: str = Field(description="JSON array: [{name, lat, lon, size}, ...] one per map node") + travel_time_matrix_json: str = Field(description="JSON: 2D list[list[float]], shape [n_nodes, n_nodes]") + + # Variable mapping + site_variable_symbols: list[str] = Field( + sa_column=Column(JSON), description="Ordered list of site variable symbols matching sites_json positions" + ) + coverage_variable_symbols: list[str] | None = Field( + sa_column=Column(JSON), + default=None, + description="Ordered list of coverage variable symbols matching nodes_json positions, or None", + ) + + # Display config + coverage_threshold: float = Field(default=15.0, description="Threshold for coverage edges (e.g., minutes, km)") + + metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="site_selection_metadata") + + class ProblemMetaDataDB(SQLModel, table=True): """Store Problem MetaData to DB with this class.""" @@ -296,17 +359,23 @@ class ProblemMetaDataDB(SQLModel, table=True): solver_selection_metadata: list[SolverSelectionMetadata] = Relationship( back_populates="metadata_instance", cascade_delete=True ) + site_selection_metadata: list[SiteSelectionMetaData] = Relationship( + back_populates="metadata_instance", cascade_delete=True + ) problem: ProblemDB = Relationship(back_populates="problem_metadata") @property def all_metadata( self, - ) -> list[ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata]: + ) -> list[ + ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata | SiteSelectionMetaData + ]: """Return all metadata in one list.""" return ( (self.forest_metadata or []) + (self.representative_nd_metadata or []) + (self.solver_selection_metadata or []) + + (self.site_selection_metadata or []) ) @@ -317,6 +386,7 @@ class ProblemMetaDataPublic(SQLModel): forest_metadata: list[ForestProblemMetaData] | None representative_nd_metadata: list[RepresentativeNonDominatedSolutions] | None + site_selection_metadata: list[SiteSelectionMetaData] | None class ProblemMetaDataGetRequest(SQLModel): diff --git a/desdeo/api/models/session.py b/desdeo/api/models/session.py index f8386baad..a999e80d4 100644 --- a/desdeo/api/models/session.py +++ b/desdeo/api/models/session.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING +from pydantic import ConfigDict from sqlmodel import Field, Relationship, SQLModel if TYPE_CHECKING: @@ -18,6 +19,8 @@ class CreateSessionRequest(SQLModel): class InteractiveSessionBase(SQLModel): """The base model for representing interactive sessions.""" + model_config = ConfigDict(from_attributes=True) + id: int | None user_id: int | None diff --git a/desdeo/api/routers/enautilus.py b/desdeo/api/routers/enautilus.py index 4c577f96a..28f513f76 100644 --- a/desdeo/api/routers/enautilus.py +++ b/desdeo/api/routers/enautilus.py @@ -15,6 +15,9 @@ ENautilusFinalState, ENautilusRepresentativeSolutionsResponse, ENautilusSessionTreeResponse, + ENautilusSimulateRequest, + ENautilusSimulateResponse, + ENautilusSimulateStepResult, ENautilusState, ENautilusStateResponse, ENautilusStepRequest, @@ -509,6 +512,169 @@ def get_session_tree( ) +@router.post("/simulate") +def simulate( + request: ENautilusSimulateRequest, + db_session: Annotated[Session, Depends(get_session)], +) -> ENautilusSimulateResponse: + """Run E-NAUTILUS greedily from a state to completion. + + Given a starting state, this endpoint greedily selects the best intermediate + point for the preferred objective at each iteration until iterations_left == 0, + then projects to the Pareto front. No database writes are performed. + """ + # Load starting state + state_db: StateDB | None = db_session.exec(select(StateDB).where(StateDB.id == request.state_id)).first() + if state_db is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"StateDB with id={request.state_id} not found." + ) + + if not isinstance(state_db.state, ENautilusState): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail="The referenced state is not an ENautilusState." + ) + + enautilus_state: ENautilusState = state_db.state + result: ENautilusResult = enautilus_state.enautilus_results + + # Load problem + problem_db = db_session.exec(select(ProblemDB).where(ProblemDB.id == state_db.problem_id)).first() + if problem_db is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"ProblemDB with id={state_db.problem_id} not found." + ) + + problem = Problem.from_problemdb(problem_db) + + # Validate preferred_objective + obj_symbols = [obj.symbol for obj in problem.objectives] + if request.preferred_objective not in obj_symbols: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"'{request.preferred_objective}' is not a valid objective. Valid: {obj_symbols}", + ) + + # Determine if the preferred objective is maximized. + # When deprioritize is True, invert the selection logic (pick worst instead of best). + pref_obj = next(obj for obj in problem.objectives if obj.symbol == request.preferred_objective) + pref_maximize = pref_obj.maximize if not request.deprioritize else (not pref_obj.maximize) + + # Load non-dominated solutions + non_dom_db = db_session.exec( + select(RepresentativeNonDominatedSolutions).where( + RepresentativeNonDominatedSolutions.id == enautilus_state.non_dominated_solutions_id + ) + ).first() + if non_dom_db is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"RepresentativeNonDominatedSolutions with id={enautilus_state.non_dominated_solutions_id} not found.", + ) + + non_dom_solutions = non_dom_db.solution_data + + # Start from the existing result's intermediate points + current_intermediate_points = result.intermediate_points + current_reachable_indices = result.reachable_point_indices + current_closeness = result.closeness_measures + current_iteration = result.current_iteration + iterations_left = result.iterations_left + + steps: list[ENautilusSimulateStepResult] = [] + + while iterations_left > 0: + # Greedy selection: pick the intermediate point best for preferred_objective + best_idx = _pick_best_for_objective(current_intermediate_points, request.preferred_objective, pref_maximize) + + selected_point = current_intermediate_points[best_idx] + reachable_for_selected = current_reachable_indices[best_idx] + + steps.append( + ENautilusSimulateStepResult( + iteration=current_iteration, + iterations_left=iterations_left, + selected_point=selected_point, + selected_point_index=best_idx, + intermediate_points=current_intermediate_points, + closeness_measures=current_closeness, + ) + ) + + if iterations_left == 0: + break + + # Run E-NAUTILUS step (core computation, no DB writes) + next_result: ENautilusResult = enautilus_step( + problem=problem, + non_dominated_points=non_dom_solutions, + current_iteration=current_iteration, + iterations_left=iterations_left, + selected_point=selected_point, + number_of_intermediate_points=request.number_of_intermediate_points, + reachable_point_indices=reachable_for_selected, + ) + + current_intermediate_points = next_result.intermediate_points + current_reachable_indices = next_result.reachable_point_indices + current_closeness = next_result.closeness_measures + current_iteration = next_result.current_iteration + iterations_left = next_result.iterations_left + + # Final selection at iterations_left == 0 + final_best_idx = _pick_best_for_objective(current_intermediate_points, request.preferred_objective, pref_maximize) + final_intermediate_point = current_intermediate_points[final_best_idx] + + steps.append( + ENautilusSimulateStepResult( + iteration=current_iteration, + iterations_left=iterations_left, + selected_point=final_intermediate_point, + selected_point_index=final_best_idx, + intermediate_points=current_intermediate_points, + closeness_measures=current_closeness, + ) + ) + + # Build a result object for projection + final_result = ENautilusResult( + current_iteration=current_iteration, + iterations_left=iterations_left, + intermediate_points=current_intermediate_points, + reachable_best_bounds=[], # not needed for projection + reachable_worst_bounds=[], + closeness_measures=current_closeness, + reachable_point_indices=current_reachable_indices, + ) + + non_dom_df = pl.DataFrame(non_dom_solutions) + representative_solutions = enautilus_get_representative_solutions(problem, final_result, non_dom_df) + final_solution = representative_solutions[final_best_idx] + + return ENautilusSimulateResponse( + preferred_objective=request.preferred_objective, + steps=steps, + final_solution=final_solution, + final_intermediate_point=final_intermediate_point, + ) + + +def _pick_best_for_objective( + intermediate_points: list[dict[str, float]], + objective: str, + maximize: bool, +) -> int: + """Pick the index of the intermediate point with the best value for an objective.""" + best_idx = 0 + best_val = intermediate_points[0][objective] + for i in range(1, len(intermediate_points)): + val = intermediate_points[i][objective] + if (maximize and val > best_val) or (not maximize and val < best_val): + best_val = val + best_idx = i + return best_idx + + def _match_chosen_point( chosen: dict[str, float] | None, options: list[dict[str, float]], diff --git a/desdeo/api/routers/problem.py b/desdeo/api/routers/problem.py index b1924fe61..cc1f3b5a0 100644 --- a/desdeo/api/routers/problem.py +++ b/desdeo/api/routers/problem.py @@ -5,9 +5,12 @@ from fastapi import APIRouter, Depends, HTTPException, Request, UploadFile, status from fastapi.responses import JSONResponse -from sqlmodel import Session +from sqlmodel import Session, select +from desdeo.api.db import get_session from desdeo.api.models import ( + ConstrainedVariantRequest, + ConstrainedVariantResponse, ForestProblemMetaData, ProblemDB, ProblemInfo, @@ -16,6 +19,7 @@ ProblemMetaDataGetRequest, ProblemSelectSolverRequest, RepresentativeNonDominatedSolutions, + SiteSelectionMetaData, SolverSelectionMetadata, User, UserRole, @@ -27,6 +31,7 @@ ) from desdeo.api.routers.user_authentication import get_current_user from desdeo.problem import Problem +from desdeo.problem.schema import Constraint, ConstraintTypeEnum, TensorVariable from desdeo.tools.utils import available_solvers from .utils import ContextField, SessionContext, SessionContextGuard @@ -64,28 +69,40 @@ async def parse_problem_json(request: Request) -> Problem: @router.get("/all") -def get_problems(user: Annotated[User, Depends(get_current_user)]) -> list[ProblemInfoSmall]: - """Get information on all the current user's problems. +def get_problems( + user: Annotated[User, Depends(get_current_user)], + db_session: Annotated[Session, Depends(get_session)], +) -> list[ProblemInfoSmall]: + """Get information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfoSmall]: a list of information on all the problems. + list[ProblemInfoSmall]: a list of information on the problems. """ + if user.role in (UserRole.analyst, UserRole.admin): + return list(db_session.exec(select(ProblemDB)).all()) return user.problems @router.get("/all_info") -def get_problems_info(user: Annotated[User, Depends(get_current_user)]) -> list[ProblemInfo]: - """Get detailed information on all the current user's problems. +def get_problems_info( + user: Annotated[User, Depends(get_current_user)], + db_session: Annotated[Session, Depends(get_session)], +) -> list[ProblemInfo]: + """Get detailed information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfo]: a list of the detailed information on all the problems. + list[ProblemInfo]: a list of the detailed information on the problems. """ + if user.role in (UserRole.analyst, UserRole.admin): + return list(db_session.exec(select(ProblemDB)).all()) return user.problems @@ -116,12 +133,15 @@ def get_problem( def add_problem( request: Annotated[Problem, Depends(parse_problem_json)], context: Annotated[SessionContext, Depends(SessionContextGuard().post)], + target_user_id: int | None = None, ) -> ProblemInfo: """Add a newly defined problem to the database. Args: request (Problem): the JSON representation of the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Note: Users with the role 'guest' may not add new problems. @@ -141,8 +161,22 @@ def add_problem( detail="Guest users are not allowed to add new problems.", ) + effective_user = user + if target_user_id is not None: + if user.role not in (UserRole.analyst, UserRole.admin): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Only analysts and admins can add problems on behalf of other users.", + ) + effective_user = db_session.get(User, target_user_id) + if effective_user is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"User with id={target_user_id} not found.", + ) + try: - problem_db = ProblemDB.from_problem(request, user=user) + problem_db = ProblemDB.from_problem(request, user=effective_user) except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, @@ -160,12 +194,15 @@ def add_problem( def add_problem_json( json_file: UploadFile, context: Annotated[SessionContext, Depends(SessionContextGuard().post)], + target_user_id: int | None = None, ) -> ProblemInfo: """Adds a problem to the database based on its JSON definition. Args: json_file (UploadFile): a file in JSON format describing the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Raises: HTTPException: if the provided `json_file` is empty. @@ -177,6 +214,20 @@ def add_problem_json( user = context.user db_session = context.db_session + effective_user = user + if target_user_id is not None: + if user.role not in (UserRole.analyst, UserRole.admin): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Only analysts and admins can add problems on behalf of other users.", + ) + effective_user = db_session.get(User, target_user_id) + if effective_user is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"User with id={target_user_id} not found.", + ) + raw = json_file.file.read() if not raw: @@ -188,7 +239,7 @@ def add_problem_json( raise HTTPException(status_code=400, detail="Invalid JSON.") from e problem = Problem.model_validate_json(raw, by_name=True) - problem_db = ProblemDB.from_problem(problem, user=user) + problem_db = ProblemDB.from_problem(problem, user=effective_user) db_session.add(problem_db) db_session.commit() @@ -201,7 +252,9 @@ def add_problem_json( def get_metadata( request: ProblemMetaDataGetRequest, context: Annotated[SessionContext, Depends(SessionContextGuard(require=[]).post)], -) -> list[ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata]: +) -> list[ + ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata | SiteSelectionMetaData +]: """Fetch specific metadata for a specific problem. Fetch specific metadata for a specific problem. See all the possible @@ -265,10 +318,10 @@ def select_solver( ) # Auth the user - if user.id != problem_db.user_id: + if user.role not in (UserRole.analyst, UserRole.admin) and user.id != problem_db.user_id: raise HTTPException( detail="Unauthorized user!", - status_code=status.HTTP_401_UNAUTHORIZED, + status_code=status.HTTP_403_FORBIDDEN, ) # All good, get on with it. @@ -372,8 +425,8 @@ def get_all_representative_solution_sets( raise HTTPException(status_code=404, detail=f"Problem with ID {problem_id} not found.") # Check the user - if problem_db.user_id != user.id: - raise HTTPException(status_code=401, detail="Unauthorized user.") + if user.role not in (UserRole.analyst, UserRole.admin) and problem_db.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") # Fetch metadata problem_metadata = problem_db.problem_metadata @@ -408,8 +461,11 @@ def get_representative_solution_set( raise HTTPException(status_code=404, detail=f"Representative set with ID {set_id} not found.") # Check the user - if repr_set.metadata_instance.problem.user_id != context.user.id: - raise HTTPException(status_code=401, detail="Unauthorized user.") + if ( + context.user.role not in (UserRole.analyst, UserRole.admin) + and repr_set.metadata_instance.problem.user_id != context.user.id + ): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") # Return all fields as a dict return RepresentativeSolutionSetFull( @@ -437,10 +493,10 @@ def delete_representative_solution_set( if repr_metadata is None: raise HTTPException(status_code=404, detail=f"Representative solution set with ID {set_id} not found.") - # Ensure the user owns the problem this set belongs to + # Ensure the user owns the problem this set belongs to (analysts/admins are exempt) problem_metadata = repr_metadata.metadata_instance - if problem_metadata.problem.user_id != user.id: - raise HTTPException(status_code=401, detail="Unauthorized user.") + if user.role not in (UserRole.analyst, UserRole.admin) and problem_metadata.problem.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") # Delete the set db_session.delete(repr_metadata) @@ -452,7 +508,11 @@ def delete_problem( problem_id: int, context: Annotated[SessionContext, Depends(SessionContextGuard().delete)], ): - """Delete a problem by its ID.""" + """Delete a problem by its ID. + + Temporary problems (is_temporary=True) can be deleted by their owner. + Non-temporary problems can only be deleted by admin users. + """ db_session: Session = context.db_session user = context.user @@ -460,8 +520,13 @@ def delete_problem( if problem_db is None: raise HTTPException(status_code=404, detail=f"Problem with ID {problem_id} not found.") - if problem_db.user_id != user.id: - raise HTTPException(status_code=401, detail="Unauthorized user.") + # Role hierarchy for deletion: admin > analyst > dm > guest. + # Users can delete their own problems or problems owned by lower-role users. + _role_rank = {UserRole.guest: 0, UserRole.dm: 1, UserRole.analyst: 2, UserRole.admin: 3} + is_own = problem_db.user_id == user.id + outranks_owner = _role_rank.get(user.role, 0) > _role_rank.get(problem_db.user.role, 0) + if not (is_own or outranks_owner): + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") db_session.delete(problem_db) db_session.commit() @@ -480,8 +545,110 @@ def get_problem_json( if problem_db is None: raise HTTPException(status_code=404, detail=f"Problem with ID {problem_id} not found.") - if problem_db.user_id != user.id: - raise HTTPException(status_code=401, detail="Unauthorized user.") + if user.role not in (UserRole.analyst, UserRole.admin) and problem_db.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") problem = Problem.from_problemdb(problem_db) return JSONResponse(content=json.loads(problem.model_dump_json()), status_code=status.HTTP_200_OK) + + +@router.post("/{problem_id}/constrained_variant") +def create_constrained_variant( + problem_id: int, + request: ConstrainedVariantRequest, + context: Annotated[SessionContext, Depends(SessionContextGuard(require=[]).post)], +) -> ConstrainedVariantResponse: + """Create a derived problem with additional EQ constraints fixing variables to specific values. + + The original problem is not modified. The variant is stored as a new ProblemDB row + with parent_problem_id set to the original. + """ + db_session: Session = context.db_session + user = context.user + problem_db = context.problem_db + + if problem_db is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Problem with ID {problem_id} not found.") + + if problem_db.user_id != user.id: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Unauthorized user.") + + # Reconstruct in-memory Problem + problem = Problem.from_problemdb(problem_db) + + # Build a mapping from valid variable symbols to their MathJSON reference expression. + # Scalar variables: "x" → "x" (plain symbol in MathJSON). + # Tensor variables: unrolled names like "sv_5" → ["At", "sv", 5, 1] (indexed access). + # Pyomo creates tensor variables with index sets for each dimension of the shape, + # so shape [60, 1] needs a 2D index (i, 1). Shape [3, 4] would enumerate all 12 + # elements as sv_1..sv_12 in row-major order with 1-based Pyomo indices. + import itertools + + var_symbol_to_expr: dict[str, str | list] = {} + for v in problem.variables: + if isinstance(v, TensorVariable): + # Enumerate all elements in row-major order (matching solver unrolling convention) + ranges = [range(1, dim + 1) for dim in v.shape] + for flat_idx, indices in enumerate(itertools.product(*ranges), start=1): + var_symbol_to_expr[f"{v.symbol}_{flat_idx}"] = ["At", v.symbol, *indices] + else: + var_symbol_to_expr[v.symbol] = v.symbol + + new_constraints = [] + for i, fixing in enumerate(request.variable_fixings): + if fixing.variable_symbol not in var_symbol_to_expr: + raise HTTPException( + status_code=422, + detail=f"Variable symbol '{fixing.variable_symbol}' not found in problem. " + f"Available: {sorted(var_symbol_to_expr.keys())}", + ) + var_expr = var_symbol_to_expr[fixing.variable_symbol] + new_constraints.append( + Constraint( + name=fixing.constraint_name or f"fix_{fixing.variable_symbol}_to_{fixing.fixed_value}", + symbol=f"_fix_{i}", + cons_type=ConstraintTypeEnum.EQ, + func=["Add", var_expr, -fixing.fixed_value], + is_linear=True, + ) + ) + + # Create the variant (immutable copy with added constraints) + variant = problem.add_constraints(new_constraints) + variant_name = request.name or f"{problem_db.name} [variant]" + # Update the name on the frozen model + variant = variant.model_copy(update={"name": variant_name}) + + # Persist + variant_db = ProblemDB.from_problem(variant, user=user) + variant_db.is_temporary = request.is_temporary + variant_db.parent_problem_id = problem_id + db_session.add(variant_db) + db_session.commit() + db_session.refresh(variant_db) + + # Copy solver selection metadata from parent so the variant uses the same solver + if problem_db.problem_metadata is not None: + parent_solver_meta = [ + m for m in problem_db.problem_metadata.all_metadata if m.metadata_type == "solver_selection_metadata" + ] + if parent_solver_meta: + variant_metadata = ProblemMetaDataDB(problem_id=variant_db.id) + db_session.add(variant_metadata) + db_session.commit() + db_session.refresh(variant_metadata) + + variant_solver = SolverSelectionMetadata( + metadata_id=variant_metadata.id, + solver_string_representation=parent_solver_meta[-1].solver_string_representation, + ) + db_session.add(variant_solver) + db_session.commit() + + return ConstrainedVariantResponse( + problem_id=variant_db.id, + parent_problem_id=problem_id, + name=variant_name, + is_temporary=variant_db.is_temporary, + n_constraints_added=len(new_constraints), + ) diff --git a/desdeo/api/routers/session.py b/desdeo/api/routers/session.py index 5b4a5e757..baf3d7b0f 100644 --- a/desdeo/api/routers/session.py +++ b/desdeo/api/routers/session.py @@ -11,9 +11,14 @@ InteractiveSessionDB, InteractiveSessionInfo, User, + UserRole, ) from desdeo.api.routers.user_authentication import get_current_user -from desdeo.api.routers.utils import SessionContext, SessionContextGuard, fetch_interactive_session +from desdeo.api.routers.utils import ( + SessionContext, + SessionContextGuard, + fetch_interactive_session_with_role_check, +) router = APIRouter(prefix="/session") @@ -22,13 +27,34 @@ def create_new_session( request: CreateSessionRequest, context: Annotated[SessionContext, Depends(SessionContextGuard().post)], + target_user_id: int | None = None, ) -> InteractiveSessionInfo: - """Creates a new interactive session.""" + """Creates a new interactive session. + + If ``target_user_id`` is provided, the session is created on behalf of that user. + Only analysts and admins may use this parameter. + """ user = context.user db_session = context.db_session + if target_user_id is not None: + if user.role not in (UserRole.analyst, UserRole.admin): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Only analysts and admins may create sessions for other users.", + ) + target_user = db_session.get(User, target_user_id) + if target_user is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"User with id={target_user_id} not found.", + ) + owner = target_user + else: + owner = user + interactive_session = InteractiveSessionDB( - user_id=user.id, + user_id=owner.id, info=request.info, ) @@ -36,12 +62,12 @@ def create_new_session( db_session.commit() db_session.refresh(interactive_session) - user.active_session_id = interactive_session.id + owner.active_session_id = interactive_session.id - db_session.add(user) + db_session.add(owner) db_session.commit() - return interactive_session + return InteractiveSessionInfo.model_validate(interactive_session) @router.get("/get/{session_id}") @@ -50,8 +76,8 @@ def get_session( user: Annotated[User, Depends(get_current_user)], session: Annotated[Session, Depends(get_db_session)], ) -> InteractiveSessionInfo: - """Return an interactive session with a current user.""" - return fetch_interactive_session( + """Return an interactive session. Analysts and admins may access any session.""" + return fetch_interactive_session_with_role_check( user=user, session_id=session_id, session=session, @@ -63,17 +89,13 @@ def get_all_sessions( user: Annotated[User, Depends(get_current_user)], session: Annotated[Session, Depends(get_db_session)], ) -> list[InteractiveSessionInfo]: - """Return all interactive sessions of the current user.""" - statement = select(InteractiveSessionDB).where(InteractiveSessionDB.user_id == user.id) - result = session.exec(statement).all() - - if not result: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="No interactive sessions found for the user.", - ) + """Return interactive sessions. Analysts and admins see all users' sessions; others see only their own.""" + if user.role in (UserRole.analyst, UserRole.admin): + statement = select(InteractiveSessionDB) + else: + statement = select(InteractiveSessionDB).where(InteractiveSessionDB.user_id == user.id) - return result + return list(session.exec(statement).all()) @router.delete("/{session_id}", status_code=status.HTTP_204_NO_CONTENT) @@ -82,8 +104,8 @@ def delete_session( user: Annotated[User, Depends(get_current_user)], session: Annotated[Session, Depends(get_db_session)], ) -> None: - """Delete an interactive session and all its related states.""" - interactive_session = fetch_interactive_session( + """Delete an interactive session and all its related states. Analysts and admins may delete any session.""" + interactive_session = fetch_interactive_session_with_role_check( user=user, session_id=session_id, session=session, diff --git a/desdeo/api/routers/site_selection.py b/desdeo/api/routers/site_selection.py new file mode 100644 index 000000000..e22beb0bc --- /dev/null +++ b/desdeo/api/routers/site_selection.py @@ -0,0 +1,277 @@ +"""Endpoints for site selection map visualization and metadata management.""" + +import json +from typing import Annotated + +import numpy as np +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from sqlmodel import Session, select + +from desdeo.api.db import get_session +from desdeo.api.models import ProblemDB, ProblemMetaDataDB, SiteSelectionMetaData, User, UserRole +from desdeo.api.routers.user_authentication import get_current_user + +router = APIRouter(prefix="/site-selection", tags=["Site Selection"]) + +# Color constants for map nodes +COLOR_ACTIVE = "#FFA500" # node has at least one visited site +COLOR_COVERED = "#FFFF00" # node is covered by a nearby visited site +COLOR_INACTIVE = "#808080" # node is not covered + + +# --- Request / Response models --- + + +class SiteSelectionMetaDataRequest(BaseModel): + """Request body for loading site selection metadata.""" + + problem_id: int + sites: list[dict] # [{name, node, lat, lon}] + nodes: list[dict] # [{name, lat, lon, size}] + travel_time_matrix: list[list[float]] + site_variable_symbols: list[str] + coverage_variable_symbols: list[str] | None = None + coverage_threshold: float = 15.0 + + +class SiteSelectionMapRequest(BaseModel): + """Request body for building the site selection map.""" + + problem_id: int + optimal_variables: dict # from SolverResults.optimal_variables + + +class SiteSelectionMapNode(BaseModel): + """A node marker on the map.""" + + name: str + lat: float + lon: float + size: float + color: str + tooltip: str + + +class SiteSelectionMapEdge(BaseModel): + """A coverage connection edge between two nodes.""" + + from_lat: float + from_lon: float + to_lat: float + to_lon: float + + +class SiteSelectionMapResponse(BaseModel): + """Response body for the site selection map endpoint.""" + + nodes: list[SiteSelectionMapNode] + edges: list[SiteSelectionMapEdge] + center: list[float] + site_variable_symbols: list[str] + site_node_names: list[str] + + +# --- Endpoints --- + + +@router.post("/load_metadata") +def load_metadata( + req: SiteSelectionMetaDataRequest, + user: Annotated[User, Depends(get_current_user)], + session: Annotated[Session, Depends(get_session)], +) -> SiteSelectionMetaData: + """Store site selection metadata for a problem. + + The authenticated user must own the problem. + """ + problem = session.get(ProblemDB, req.problem_id) + if problem is None: + raise HTTPException(status_code=404, detail=f"Problem with ID {req.problem_id} not found.") + + # Role hierarchy: admin > analyst > dm > guest. + # Users can manage metadata for their own problems or problems owned by lower-role users. + _role_rank = {UserRole.guest: 0, UserRole.dm: 1, UserRole.analyst: 2, UserRole.admin: 3} + is_own = problem.user_id == user.id + outranks_owner = _role_rank.get(user.role, 0) > _role_rank.get(problem.user.role, 0) + if not (is_own or outranks_owner): + raise HTTPException(status_code=403, detail="You do not have permission to modify this problem.") + + # Ensure ProblemMetaDataDB exists for this problem + metadata_db = session.exec(select(ProblemMetaDataDB).where(ProblemMetaDataDB.problem_id == req.problem_id)).first() + if metadata_db is None: + metadata_db = ProblemMetaDataDB(problem_id=req.problem_id) + session.add(metadata_db) + session.commit() + session.refresh(metadata_db) + + site_sel = SiteSelectionMetaData( + metadata_id=metadata_db.id, + sites_json=json.dumps(req.sites), + nodes_json=json.dumps(req.nodes), + travel_time_matrix_json=json.dumps(req.travel_time_matrix), + site_variable_symbols=req.site_variable_symbols, + coverage_variable_symbols=req.coverage_variable_symbols, + coverage_threshold=req.coverage_threshold, + ) + session.add(site_sel) + session.commit() + session.refresh(site_sel) + + return site_sel + + +@router.post("/map", response_model=SiteSelectionMapResponse) +def build_map( + req: SiteSelectionMapRequest, + user: Annotated[User, Depends(get_current_user)], + session: Annotated[Session, Depends(get_session)], +) -> SiteSelectionMapResponse: + """Build Leaflet-compatible map data from a site selection solution. + + Reads site selection metadata from the DB and extracts variable values + from the provided solution to determine node colors and coverage edges. + """ + # Load metadata + metadata_db = session.exec(select(ProblemMetaDataDB).where(ProblemMetaDataDB.problem_id == req.problem_id)).first() + + if metadata_db is None: + raise HTTPException(status_code=404, detail="No metadata found for this problem.") + + site_sel_list = [m for m in metadata_db.all_metadata if m.metadata_type == "site_selection_metadata"] + if not site_sel_list: + raise HTTPException(status_code=404, detail="No site selection metadata found for this problem.") + + meta: SiteSelectionMetaData = site_sel_list[-1] + + # Parse embedded JSON + sites: list[dict] = json.loads(meta.sites_json) + nodes: list[dict] = json.loads(meta.nodes_json) + ttime_matrix: list[list[float]] = json.loads(meta.travel_time_matrix_json) + + n_nodes = len(nodes) + n_sites = len(sites) + + # Extract site visit values from solution variables + sv = [] + for sym in meta.site_variable_symbols: + val = req.optimal_variables.get(sym) + if isinstance(val, list): + sv.append(bool(round(val[0]))) + elif val is not None: + sv.append(bool(round(val))) + else: + sv.append(False) + + # Extract coverage values + cover = [] + if meta.coverage_variable_symbols: + for sym in meta.coverage_variable_symbols: + val = req.optimal_variables.get(sym) + if isinstance(val, list): + cover.append(bool(round(val[0]))) + elif val is not None: + cover.append(bool(round(val))) + else: + cover.append(False) + else: + cover = [False] * n_nodes + + visited_site_indices = [i for i, v in enumerate(sv) if v] + covered_node_indices = [i for i, v in enumerate(cover) if v] + + # Build node name lookup + node_names = [n["name"] for n in nodes] + node_name_to_idx = {name: i for i, name in enumerate(node_names)} + + # Build travel-time close-nodes mask: shape (n_nodes, n_nodes) + ttime_np = np.array(ttime_matrix) + close_nodes = (ttime_np < meta.coverage_threshold).astype(np.int8) + + # Each site maps to its host node -> build (n_sites, n_nodes) adjacency + site_node_names = [s["node"] for s in sites] + site_node_idxs = [node_name_to_idx[n] for n in site_node_names] + sites_adj2nodes = close_nodes[site_node_idxs, :] # (n_sites, n_nodes) + + # Group visited sites by node (for tooltips) + sites_in_nodes: dict[str, list[str]] = {} + for i in visited_site_indices: + node_name = site_node_names[i] + site_name = sites[i]["name"] + sites_in_nodes.setdefault(node_name, []).append(site_name) + + active_node_names = set(sites_in_nodes.keys()) + covered_node_names = {node_names[i] for i in covered_node_indices} + + # Build coverage connections: site_node -> set of nearby nodes it covers + site2covered: dict[str, set[str]] = {} + for site_idx in visited_site_indices: + site_node = site_node_names[site_idx] + nearby: set[str] = set() + for n_idx in range(n_nodes): + if sites_adj2nodes[site_idx, n_idx]: + nearby.add(node_names[n_idx]) + nearby.discard(site_node) + if nearby: + existing = site2covered.get(site_node, set()) + site2covered[site_node] = existing | nearby + + # Invert: covered_node -> set of site-nodes that cover it + adjacent_sites: dict[str, set[str]] = {} + for site_node, covered_set in site2covered.items(): + for cov_node in covered_set: + if cov_node in covered_node_names or cov_node in active_node_names: + adjacent_sites.setdefault(cov_node, set()).add(site_node) + + # Coordinate lookup + node_coords: dict[str, tuple[float, float]] = {} + for n in nodes: + node_coords[n["name"]] = (n["lat"], n["lon"]) + + # Build edges + edges: list[SiteSelectionMapEdge] = [] + for cov_node, src_nodes in adjacent_sites.items(): + to_lat, to_lon = node_coords[cov_node] + for src_node in src_nodes: + from_lat, from_lon = node_coords[src_node] + edges.append(SiteSelectionMapEdge(from_lat=from_lat, from_lon=from_lon, to_lat=to_lat, to_lon=to_lon)) + + # Build node markers + nodes_out: list[SiteSelectionMapNode] = [] + for n in nodes: + name = n["name"] + lat = n["lat"] + lon = n["lon"] + size = n.get("size", 5.0) + + # Color assignment + if name in active_node_names: + color = COLOR_ACTIVE + elif name in covered_node_names: + color = COLOR_COVERED + else: + color = COLOR_INACTIVE + + # Tooltip HTML + if name in sites_in_nodes: + site_list_html = "
".join(sites_in_nodes[name]) + tooltip = f"{name}
Sites:
{site_list_html}" + elif name in adjacent_sites: + covered_by = "
".join(sorted(adjacent_sites[name])) + tooltip = f"{name}
Covered by sites in:
{covered_by}" + else: + tooltip = f"{name}" + + nodes_out.append(SiteSelectionMapNode(name=name, lat=lat, lon=lon, size=size, color=color, tooltip=tooltip)) + + # Center of the map + avg_lat = sum(n.lat for n in nodes_out) / len(nodes_out) + avg_lon = sum(n.lon for n in nodes_out) / len(nodes_out) + + return SiteSelectionMapResponse( + nodes=nodes_out, + edges=edges, + center=[avg_lat, avg_lon], + site_variable_symbols=meta.site_variable_symbols, + site_node_names=[s["node"] for s in sites], + ) diff --git a/desdeo/api/routers/user_authentication.py b/desdeo/api/routers/user_authentication.py index f2a0402f4..dcaa7941b 100644 --- a/desdeo/api/routers/user_authentication.py +++ b/desdeo/api/routers/user_authentication.py @@ -335,6 +335,32 @@ def add_user_to_database( ) +@router.get("/users/dms") +def get_dm_users( + user: Annotated[User, Depends(get_current_user)], + session: Annotated[Session, Depends(get_session)], +) -> list[UserPublic]: + """Return all users with the decision maker role. Requires analyst or admin. + + Args: + user (Annotated[User, Depends]): the current user. + session (Annotated[Session, Depends]): the database session. + + Returns: + list[UserPublic]: public information for all DM users. + + Raises: + HTTPException: if the current user is not an analyst or admin. + """ + if user.role not in (UserRole.analyst, UserRole.admin): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Only analysts and admins can list users.", + ) + statement = select(User).where(User.role == UserRole.dm) + return list(session.exec(statement).all()) + + @router.get("/user_info") def get_current_user_info(user: Annotated[User, Depends(get_current_user)]) -> UserPublic: """Return information about the current user. @@ -475,12 +501,14 @@ def refresh_access_token( @router.post("/add_new_dm") def add_new_dm( + user: Annotated[User, Depends(get_current_user)], form_data: Annotated[OAuth2PasswordRequestForm, Depends()], session: Annotated[Session, Depends(get_session)], ) -> JSONResponse: - """Add a new user of the role Decision Maker to the database. Requires no login. + """Add a new user of the role Decision Maker to the database. Requires a logged in analyst or an admin. Args: + user: Annotated[User, Depends(get_current_user)]: Logged in user with the role "analyst" or "admin". form_data (Annotated[OAuth2PasswordRequestForm, Depends()]): The user credentials to add to the database. session (Annotated[Session, Depends(get_session)]): the database session. @@ -488,8 +516,15 @@ def add_new_dm( JSONResponse: A JSON response Raises: - HTTPException: if username is already in use or if saving to the database fails for some reason. + HTTPException: if the logged in user is not an analyst or an admin or if + username is already in use or if saving to the database fails for some reason. """ + if user.role not in (UserRole.analyst, UserRole.admin): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Logged in user has insufficient rights.", + ) + add_user_to_database( form_data=form_data, role=UserRole.dm, diff --git a/desdeo/api/routers/utils.py b/desdeo/api/routers/utils.py index 7f2caba46..868e44c89 100644 --- a/desdeo/api/routers/utils.py +++ b/desdeo/api/routers/utils.py @@ -19,6 +19,7 @@ RPMSolveRequest, StateDB, User, + UserRole, ) from desdeo.api.models.session import CreateSessionRequest from desdeo.api.routers.user_authentication import get_current_user @@ -26,6 +27,57 @@ RequestType = RPMSolveRequest | ENautilusStepRequest | CreateSessionRequest +def fetch_problem_with_role_check(user: User, problem_id: int, session: Session) -> ProblemDB | None: + """Fetch a ProblemDB by id, bypassing ownership for analysts and admins. + + Args: + user (User): the requesting user. + problem_id (int): id of the problem to fetch. + session (Session): the database session. + + Returns: + ProblemDB | None: the matching problem, or None if not found. + """ + if user.role in (UserRole.analyst, UserRole.admin): + statement = select(ProblemDB).where(ProblemDB.id == problem_id) + else: + statement = select(ProblemDB).where( + ProblemDB.user_id == user.id, + ProblemDB.id == problem_id, + ) + return session.exec(statement).first() + + +def fetch_interactive_session_with_role_check(user: User, session_id: int, session: Session) -> InteractiveSessionDB: + """Fetch an InteractiveSessionDB by id, bypassing ownership for analysts and admins. + + Args: + user (User): the requesting user. + session_id (int): id of the interactive session to fetch. + session (Session): the database session. + + Raises: + HTTPException: when the session is not found (or not owned by the user for non-analysts). + + Returns: + InteractiveSessionDB: the matching session. + """ + if user.role in (UserRole.analyst, UserRole.admin): + statement = select(InteractiveSessionDB).where(InteractiveSessionDB.id == session_id) + else: + statement = select(InteractiveSessionDB).where( + InteractiveSessionDB.id == session_id, + InteractiveSessionDB.user_id == user.id, + ) + result = session.exec(statement).first() + if result is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Could not find interactive session with id={session_id}.", + ) + return result + + def fetch_interactive_session( user: User, session: Session, @@ -218,18 +270,11 @@ def post( parent_state = None if request is not None: - if hasattr(request, "problem_id"): - problem_db = fetch_user_problem(user, request, db_session) + if hasattr(request, "problem_id") and request.problem_id is not None: + problem_db = fetch_problem_with_role_check(user, request.problem_id, db_session) if problem_db is None and problem_id is not None: - - class _ProblemOnly: - def __init__(self, problem_id: int): - self.problem_id = problem_id - self.session_id = None - self.parent_state_id = None - - problem_db = fetch_user_problem(user, _ProblemOnly(problem_id), db_session) + problem_db = fetch_problem_with_role_check(user, problem_id, db_session) if hasattr(request, "interactive_session_id") or hasattr(request, "problem_id"): interactive_session = fetch_interactive_session(user, db_session, request) @@ -242,15 +287,7 @@ def __init__(self, problem_id: int): interactive_session=interactive_session, ) elif problem_id is not None: - - class _ProblemOnly: - def __init__(self, problem_id: int): - self.problem_id = problem_id - self.session_id = None - self.parent_state_id = None - - pseudo_request = _ProblemOnly(problem_id) - problem_db = fetch_user_problem(user, pseudo_request, db_session) + problem_db = fetch_problem_with_role_check(user, problem_id, db_session) context = SessionContext( user=user, @@ -275,14 +312,7 @@ def get( interactive_session = None if problem_id is not None: - - class _ProblemOnly: - def __init__(self, problem_id: int): - self.problem_id = problem_id - self.session_id = None - self.parent_state_id = None - - problem_db = fetch_user_problem(user, _ProblemOnly(problem_id), db_session) + problem_db = fetch_problem_with_role_check(user, problem_id, db_session) if session_id is not None or (problem_id is not None): interactive_session = fetch_interactive_session( diff --git a/desdeo/api/tests/test_constraint_variant.py b/desdeo/api/tests/test_constraint_variant.py new file mode 100644 index 000000000..c29727e35 --- /dev/null +++ b/desdeo/api/tests/test_constraint_variant.py @@ -0,0 +1,257 @@ +"""Tests for the constrained variant endpoint and DELETE problem behavior.""" + +from fastapi.testclient import TestClient +from sqlmodel import Session, select + +from desdeo.api.models import ( + ProblemDB, + User, + UserRole, +) +from desdeo.api.models.problem import ( + ConstrainedVariantRequest, + VariableFixing, +) +from desdeo.api.routers.user_authentication import get_password_hash +from desdeo.problem.testproblems import river_pollution_problem + +from .conftest import login, post_json + + +def test_variable_fixing_valid(): + """Create a VariableFixing and verify fields.""" + fixing = VariableFixing(variable_symbol="x_1", fixed_value=3.14, constraint_name="lock_x1") + assert fixing.variable_symbol == "x_1" + assert fixing.fixed_value == 3.14 + assert fixing.constraint_name == "lock_x1" + + +def test_constrained_variant_request_defaults(): + """Verify is_temporary defaults to True and name defaults to None.""" + req = ConstrainedVariantRequest(variable_fixings=[VariableFixing(variable_symbol="x_1", fixed_value=1.0)]) + assert req.is_temporary is True + assert req.name is None + + +def test_constrained_variant_success(client: TestClient, session_and_user: dict): + """POST valid fixings for an owned problem returns 200 with correct response.""" + session: Session = session_and_user["session"] + access_token = login(client) + + # Use the river pollution problem (has variables x_1, x_2) + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + + payload = { + "variable_fixings": [ + {"variable_symbol": "x_1", "fixed_value": 0.5}, + {"variable_symbol": "x_2", "fixed_value": 0.3}, + ], + } + + response = post_json(client, f"/problem/{problem.id}/constrained_variant", payload, access_token) + assert response.status_code == 200 + + data = response.json() + assert data["n_constraints_added"] == 2 + assert data["problem_id"] != problem.id + assert data["parent_problem_id"] == problem.id + assert data["is_temporary"] is True + assert "[variant]" in data["name"] + + +def test_constrained_variant_creates_db_record(client: TestClient, session_and_user: dict): + """After a successful POST, the new ProblemDB row exists with correct fields.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + + payload = { + "variable_fixings": [{"variable_symbol": "x_1", "fixed_value": 1.0}], + "name": "Test Variant", + } + + response = post_json(client, f"/problem/{problem.id}/constrained_variant", payload, access_token) + assert response.status_code == 200 + + variant_id = response.json()["problem_id"] + variant_db = session.get(ProblemDB, variant_id) + assert variant_db is not None + assert variant_db.is_temporary is True + assert variant_db.parent_problem_id == problem.id + assert variant_db.name == "Test Variant" + + +def test_constrained_variant_unknown_symbol(client: TestClient, session_and_user: dict): + """POST a fixing with a nonexistent variable symbol returns 422.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + + payload = { + "variable_fixings": [{"variable_symbol": "nonexistent_var", "fixed_value": 1.0}], + } + + response = post_json(client, f"/problem/{problem.id}/constrained_variant", payload, access_token) + assert response.status_code == 422 + assert "nonexistent_var" in response.json()["detail"] + + +def test_constrained_variant_wrong_owner(client: TestClient, session_and_user: dict): + """POST for a problem owned by a different user returns 403.""" + session: Session = session_and_user["session"] + + # Create another user and their problem + other_user = User( + username="other_variant", + password_hash=get_password_hash("other_variant"), + role=UserRole.analyst, + group="test", + ) + session.add(other_user) + session.commit() + session.refresh(other_user) + + other_problem = ProblemDB.from_problem(river_pollution_problem(), user=other_user) + session.add(other_problem) + session.commit() + session.refresh(other_problem) + + # Login as the original analyst user + access_token = login(client) + + payload = { + "variable_fixings": [{"variable_symbol": "x_1", "fixed_value": 1.0}], + } + + response = post_json(client, f"/problem/{other_problem.id}/constrained_variant", payload, access_token) + assert response.status_code == 403 + + +def test_constrained_variant_not_found(client: TestClient, session_and_user: dict): + """POST with a nonexistent problem_id returns 404.""" + access_token = login(client) + + payload = { + "variable_fixings": [{"variable_symbol": "x_1", "fixed_value": 1.0}], + } + + response = post_json(client, "/problem/99999/constrained_variant", payload, access_token) + assert response.status_code == 404 + + +def test_constrained_variant_preserves_original(client: TestClient, session_and_user: dict): + """Creating a variant does not modify the original problem.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + original_name = problem.name + original_constraint_count = len(problem.constraints) + + payload = { + "variable_fixings": [ + {"variable_symbol": "x_1", "fixed_value": 0.5}, + {"variable_symbol": "x_2", "fixed_value": 0.3}, + ], + } + + response = post_json(client, f"/problem/{problem.id}/constrained_variant", payload, access_token) + assert response.status_code == 200 + + # Refresh and verify original is unchanged + session.refresh(problem) + assert problem.name == original_name + assert len(problem.constraints) == original_constraint_count + + +# --- Endpoint tests: DELETE /problem/{problem_id} --- + + +def test_delete_temporary_problem_success(client: TestClient, session_and_user: dict): + """DELETE a temporary variant returns 204 and removes the DB row.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + + # Create a temporary variant + payload = { + "variable_fixings": [{"variable_symbol": "x_1", "fixed_value": 1.0}], + } + create_resp = post_json(client, f"/problem/{problem.id}/constrained_variant", payload, access_token) + assert create_resp.status_code == 200 + variant_id = create_resp.json()["problem_id"] + + # Delete it + response = client.delete( + f"/problem/{variant_id}", + headers={"Authorization": f"Bearer {access_token}"}, + ) + assert response.status_code == 204 + + # Verify gone + assert session.get(ProblemDB, variant_id) is None + + +def test_delete_non_temporary_problem_success(client: TestClient, session_and_user: dict): + """DELETE a non-temporary problem owned by the user returns 204.""" + session: Session = session_and_user["session"] + user = session_and_user["user"] + access_token = login(client) + + # Create a fresh non-temporary problem to delete (don't destroy shared fixtures) + problem = ProblemDB.from_problem(river_pollution_problem(), user=user) + session.add(problem) + session.commit() + session.refresh(problem) + assert not problem.is_temporary + + response = client.delete( + f"/problem/{problem.id}", + headers={"Authorization": f"Bearer {access_token}"}, + ) + assert response.status_code == 204 + assert session.get(ProblemDB, problem.id) is None + + +def test_delete_wrong_owner(client: TestClient, session_and_user: dict): + """DELETE a problem owned by a different user returns 403.""" + session: Session = session_and_user["session"] + + other_user = User( + username="other_delete", + password_hash=get_password_hash("other_delete"), + role=UserRole.analyst, + group="test", + ) + session.add(other_user) + session.commit() + session.refresh(other_user) + + other_problem = ProblemDB.from_problem(river_pollution_problem(), user=other_user) + other_problem.is_temporary = True + session.add(other_problem) + session.commit() + session.refresh(other_problem) + + # Login as the original analyst + access_token = login(client) + + response = client.delete( + f"/problem/{other_problem.id}", + headers={"Authorization": f"Bearer {access_token}"}, + ) + assert response.status_code == 403 + + +def test_delete_not_found(client: TestClient, session_and_user: dict): + """DELETE a nonexistent problem returns 404.""" + access_token = login(client) + + response = client.delete( + "/problem/99999", + headers={"Authorization": f"Bearer {access_token}"}, + ) + assert response.status_code == 404 diff --git a/desdeo/api/tests/test_delete_problem.py b/desdeo/api/tests/test_delete_problem.py index 20ddda0d0..99c723e60 100644 --- a/desdeo/api/tests/test_delete_problem.py +++ b/desdeo/api/tests/test_delete_problem.py @@ -100,11 +100,11 @@ def test_delete_problem_unauthorized(client: TestClient, session_and_user: dict) session: Session = session_and_user["session"] user: User = session_and_user["user"] - # Create a second user + # Create a second user (DM — cannot delete other users' problems) other_user = User( username="other", password_hash=get_password_hash("other"), - role=UserRole.analyst, + role=UserRole.dm, group="test", ) session.add(other_user) @@ -126,7 +126,7 @@ def test_delete_problem_unauthorized(client: TestClient, session_and_user: dict) headers={"Authorization": f"Bearer {other_token}"}, ) - assert response.status_code == status.HTTP_401_UNAUTHORIZED + assert response.status_code == status.HTTP_403_FORBIDDEN # Problem should still exist assert session.get(ProblemDB, problem_id) is not None diff --git a/desdeo/api/tests/test_problem_on_behalf.py b/desdeo/api/tests/test_problem_on_behalf.py new file mode 100644 index 000000000..cc65753b3 --- /dev/null +++ b/desdeo/api/tests/test_problem_on_behalf.py @@ -0,0 +1,277 @@ +"""Tests for analyst adding problems on behalf of decision makers.""" + +from fastapi import status +from fastapi.testclient import TestClient + +from desdeo.api.models import ProblemInfo, UserPublic, UserRole +from desdeo.problem.testproblems import simple_knapsack_vectors + +from .conftest import get_json, login, post_file_multipart, post_json + + +def _add_dm(client: TestClient, analyst_token: str, username: str, password: str) -> None: + """Helper: create a DM user via the API.""" + response = client.post( + "/add_new_dm", + data={"username": username, "password": password, "grant_type": "password"}, + headers={"Authorization": f"Bearer {analyst_token}", "content-type": "application/x-www-form-urlencoded"}, + ) + assert response.status_code == status.HTTP_201_CREATED + + +def test_list_dms_as_analyst(client: TestClient): + """Analyst can retrieve the list of DM users.""" + analyst_token = login(client) + + # No DMs yet — list should be empty + response = get_json(client, "/users/dms", analyst_token) + assert response.status_code == status.HTTP_200_OK + assert response.json() == [] + + # Create two DM users + _add_dm(client, analyst_token, "dm_one", "dm_one") + _add_dm(client, analyst_token, "dm_two", "dm_two") + + response = get_json(client, "/users/dms", analyst_token) + assert response.status_code == status.HTTP_200_OK + + dms = [UserPublic.model_validate(u) for u in response.json()] + usernames = {dm.username for dm in dms} + assert usernames == {"dm_one", "dm_two"} + assert all(dm.role == UserRole.dm for dm in dms) + + +def test_list_dms_as_dm_forbidden(client: TestClient): + """DM users cannot list other DM users.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_user", "dm_user") + + dm_token = login(client, username="dm_user", password="dm_user") # noqa: S106 + response = get_json(client, "/users/dms", dm_token) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_list_dms_unauthenticated(client: TestClient): + """Unauthenticated requests to /users/dms are rejected.""" + response = client.get("/users/dms") + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +def test_add_problem_for_dm_as_analyst(client: TestClient, session_and_user: dict): + """Analyst can add a problem that is owned by a DM.""" + analyst_token = login(client) + + # Create a DM + _add_dm(client, analyst_token, "target_dm", "target_dm") + dm_token = login(client, username="target_dm", password="target_dm") # noqa: S106 + + # Fetch DM id from /users/dms + dms = get_json(client, "/users/dms", analyst_token).json() + dm_id = next(u["id"] for u in dms if u["username"] == "target_dm") + + # Analyst submits a problem on behalf of the DM + problem = simple_knapsack_vectors() + response = post_json(client, f"/problem/add?target_user_id={dm_id}", problem.model_dump(), analyst_token) + assert response.status_code == status.HTTP_200_OK + + info = ProblemInfo.model_validate(response.json()) + assert info.name == "Simple two-objective Knapsack problem" + + # DM should now own the problem + dm_problems = get_json(client, "/problem/all", dm_token).json() + assert any(p["name"] == "Simple two-objective Knapsack problem" for p in dm_problems) + + # Analyst sees all problems (including the DM's), but user_id must belong to the DM + all_problems = get_json(client, "/problem/all_info", analyst_token).json() + created = next(p for p in all_problems if p["name"] == "Simple two-objective Knapsack problem") + assert created["user_id"] == dm_id + + +def test_add_problem_for_dm_as_dm_forbidden(client: TestClient): + """A DM cannot add a problem on behalf of another user.""" + analyst_token = login(client) + + # Create two DMs + _add_dm(client, analyst_token, "dm_a", "dm_a") + _add_dm(client, analyst_token, "dm_b", "dm_b") + + # Fetch dm_b's id + dms = get_json(client, "/users/dms", analyst_token).json() + dm_b_id = next(u["id"] for u in dms if u["username"] == "dm_b") + + # dm_a tries to add a problem for dm_b — should be forbidden + dm_a_token = login(client, username="dm_a", password="dm_a") # noqa: S106 + problem = simple_knapsack_vectors() + response = post_json(client, f"/problem/add?target_user_id={dm_b_id}", problem.model_dump(), dm_a_token) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_add_problem_for_nonexistent_user(client: TestClient): + """Adding a problem for a non-existent target_user_id returns 404.""" + analyst_token = login(client) + problem = simple_knapsack_vectors() + response = post_json(client, "/problem/add?target_user_id=99999", problem.model_dump(), analyst_token) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +def test_add_problem_json_for_dm_as_analyst(client: TestClient): + """Analyst can upload a JSON problem file on behalf of a DM.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "json_dm", "json_dm") + dm_token = login(client, username="json_dm", password="json_dm") # noqa: S106 + + dms = get_json(client, "/users/dms", analyst_token).json() + dm_id = next(u["id"] for u in dms if u["username"] == "json_dm") + + # Serialize a problem to JSON bytes + problem = simple_knapsack_vectors() + problem_bytes = problem.model_dump_json(by_alias=True).encode() + + response = post_file_multipart( + client, + f"/problem/add_json?target_user_id={dm_id}", + problem_bytes, + analyst_token, + ) + assert response.status_code == status.HTTP_200_OK + + info = ProblemInfo.model_validate(response.json()) + assert info.name == "Simple two-objective Knapsack problem" + + # DM should own the problem + dm_problems = get_json(client, "/problem/all", dm_token).json() + assert any(p["name"] == "Simple two-objective Knapsack problem" for p in dm_problems) + + +def test_add_problem_json_for_dm_as_dm_forbidden(client: TestClient): + """A DM cannot upload a JSON problem on behalf of another user.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "jdm_a", "jdm_a") + _add_dm(client, analyst_token, "jdm_b", "jdm_b") + + dms = get_json(client, "/users/dms", analyst_token).json() + jdm_b_id = next(u["id"] for u in dms if u["username"] == "jdm_b") + + jdm_a_token = login(client, username="jdm_a", password="jdm_a") # noqa: S106 + problem = simple_knapsack_vectors() + problem_bytes = problem.model_dump_json(by_alias=True).encode() + + response = post_file_multipart( + client, + f"/problem/add_json?target_user_id={jdm_b_id}", + problem_bytes, + jdm_a_token, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def _add_problem_as_dm(client: TestClient, dm_token: str) -> int: + """Helper: DM adds a problem and returns its id.""" + problem = simple_knapsack_vectors() + response = post_json(client, "/problem/add", problem.model_dump(), dm_token) + assert response.status_code == status.HTTP_200_OK + return response.json()["id"] + + +def test_analyst_sees_all_problems(client: TestClient): + """Analyst sees problems from all users in GET /problem/all_info.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "vis_dm", "vis_dm") + dm_token = login(client, username="vis_dm", password="vis_dm") # noqa: S106 + + dm_problem_id = _add_problem_as_dm(client, dm_token) + + response = get_json(client, "/problem/all_info", analyst_token) + assert response.status_code == status.HTTP_200_OK + ids = [p["id"] for p in response.json()] + assert dm_problem_id in ids + + +def test_dm_only_sees_own_problems(client: TestClient): + """A DM cannot see another DM's problems in GET /problem/all_info.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_x", "dm_x") + _add_dm(client, analyst_token, "dm_y", "dm_y") + + dm_x_token = login(client, username="dm_x", password="dm_x") # noqa: S106 + dm_y_token = login(client, username="dm_y", password="dm_y") # noqa: S106 + + dm_x_problem_id = _add_problem_as_dm(client, dm_x_token) + + # dm_y lists problems — must not see dm_x's problem + response = get_json(client, "/problem/all_info", dm_y_token) + assert response.status_code == status.HTTP_200_OK + ids = [p["id"] for p in response.json()] + assert dm_x_problem_id not in ids + + +def test_analyst_can_get_problem_json_for_dm(client: TestClient): + """Analyst can download a DM's problem as JSON.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "json_owner", "json_owner") + dm_token = login(client, username="json_owner", password="json_owner") # noqa: S106 + + dm_problem_id = _add_problem_as_dm(client, dm_token) + + response = get_json(client, f"/problem/{dm_problem_id}/json", analyst_token) + assert response.status_code == status.HTTP_200_OK + + +def test_dm_cannot_get_problem_json_of_other_dm(client: TestClient): + """A DM cannot download another DM's problem JSON.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "owner_dm", "owner_dm") + _add_dm(client, analyst_token, "thief_dm", "thief_dm") + + owner_token = login(client, username="owner_dm", password="owner_dm") # noqa: S106 + thief_token = login(client, username="thief_dm", password="thief_dm") # noqa: S106 + + owner_problem_id = _add_problem_as_dm(client, owner_token) + + response = get_json(client, f"/problem/{owner_problem_id}/json", thief_token) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_analyst_can_delete_dm_problem(client: TestClient): + """Analyst can delete a DM's problem.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "del_dm", "del_dm") + dm_token = login(client, username="del_dm", password="del_dm") # noqa: S106 + + dm_problem_id = _add_problem_as_dm(client, dm_token) + + response = client.delete( + f"/problem/{dm_problem_id}", + headers={"Authorization": f"Bearer {analyst_token}"}, + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + +def test_analyst_can_assign_solver_for_dm_problem(client: TestClient): + """Analyst can assign a solver to a DM's problem.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "solver_dm", "solver_dm") + dm_token = login(client, username="solver_dm", password="solver_dm") # noqa: S106 + + dm_problem_id = _add_problem_as_dm(client, dm_token) + + response = post_json( + client, + "/problem/assign_solver", + {"problem_id": dm_problem_id, "solver_string_representation": "scipy_minimize"}, + analyst_token, + ) + assert response.status_code == status.HTTP_200_OK + + +def test_analyst_can_get_repr_solution_sets_for_dm(client: TestClient): + """Analyst can list representative solution sets for a DM's problem.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "repr_dm", "repr_dm") + dm_token = login(client, username="repr_dm", password="repr_dm") # noqa: S106 + + dm_problem_id = _add_problem_as_dm(client, dm_token) + + response = get_json(client, f"/problem/{dm_problem_id}/all_representative_solution_sets", analyst_token) + assert response.status_code == status.HTTP_200_OK + assert response.json() == [] diff --git a/desdeo/api/tests/test_routes.py b/desdeo/api/tests/test_routes.py index 74df63135..a60168ff2 100644 --- a/desdeo/api/tests/test_routes.py +++ b/desdeo/api/tests/test_routes.py @@ -264,8 +264,8 @@ def test_get_all_sessions_success(client: TestClient, session_and_user: dict): assert len(data) == 2 -def test_get_all_sessions_not_found(client: TestClient, session_and_user: dict): - """Test get_all returns 404 if user has no sessions.""" +def test_get_all_sessions_empty(client: TestClient, session_and_user: dict): + """Test get_all returns 200 + empty list if user has no sessions.""" access_token = login(client) response = client.get( @@ -273,7 +273,8 @@ def test_get_all_sessions_not_found(client: TestClient, session_and_user: dict): headers={"Authorization": f"Bearer {access_token}"}, ) - assert response.status_code == status.HTTP_404_NOT_FOUND + assert response.status_code == status.HTTP_200_OK + assert response.json() == [] def test_delete_session_success(client: TestClient, session_and_user: dict): @@ -683,11 +684,13 @@ def test_nimbus_save_and_delete_save(client: TestClient): def test_add_new_dm(client: TestClient): """Test that adding a decision maker works.""" + access_token = login(client) + # Create a new user to the database good_response = client.post( "/add_new_dm", data={"username": "new_dm", "password": "new_dm", "grant_type": "password"}, - headers={"content-type": "application/x-www-form-urlencoded"}, + headers={"Authorization": f"Bearer {access_token}", "content-type": "application/x-www-form-urlencoded"}, ) assert good_response.status_code == status.HTTP_201_CREATED @@ -695,7 +698,7 @@ def test_add_new_dm(client: TestClient): bad_response = client.post( "/add_new_dm", data={"username": "new_dm", "password": "new_dm", "grant_type": "password"}, - headers={"content-type": "application/x-www-form-urlencoded"}, + headers={"Authorization": f"Bearer {access_token}", "content-type": "application/x-www-form-urlencoded"}, ) assert bad_response.status_code == status.HTTP_409_CONFLICT @@ -713,10 +716,14 @@ def test_add_new_analyst(client: TestClient): assert nologin_response.status_code == status.HTTP_401_UNAUTHORIZED # Try to create an analyst using a dm account. + analyst_token_for_setup = login(client) response = client.post( "/add_new_dm", data={"username": "new_dm", "password": "new_dm", "grant_type": "password"}, - headers={"content-type": "application/x-www-form-urlencoded"}, + headers={ + "Authorization": f"Bearer {analyst_token_for_setup}", + "content-type": "application/x-www-form-urlencoded", + }, ) assert response.status_code == status.HTTP_201_CREATED @@ -836,7 +843,7 @@ def get_user_info(token: str): response = client.post( "/add_new_dm", data={"username": "new_dm", "password": "new_dm", "grant_type": "password"}, - headers={"content-type": "application/x-www-form-urlencoded"}, + headers={"Authorization": f"Bearer {access_token}", "content-type": "application/x-www-form-urlencoded"}, ) assert response.status_code == status.HTTP_201_CREATED @@ -1047,7 +1054,7 @@ def test_gdm_score_bands(client: TestClient): response = client.post( "/add_new_dm", data={"username": "dm", "password": "dm", "grant_type": "password"}, - headers={"content-type": "application/x-www-form-urlencoded"}, + headers={"Authorization": f"Bearer {access_token}", "content-type": "application/x-www-form-urlencoded"}, ) assert response.status_code == 201 diff --git a/desdeo/api/tests/test_session_management.py b/desdeo/api/tests/test_session_management.py new file mode 100644 index 000000000..82450a7cb --- /dev/null +++ b/desdeo/api/tests/test_session_management.py @@ -0,0 +1,184 @@ +"""Tests for analyst/admin management of other users' interactive sessions.""" + +from fastapi import status +from fastapi.testclient import TestClient + +from .conftest import get_json, login, post_json + + +def _add_dm(client: TestClient, analyst_token: str, username: str, password: str) -> None: + """Helper: create a DM user via the API.""" + response = client.post( + "/add_new_dm", + data={"username": username, "password": password, "grant_type": "password"}, + headers={"Authorization": f"Bearer {analyst_token}", "content-type": "application/x-www-form-urlencoded"}, + ) + assert response.status_code == status.HTTP_201_CREATED + + +def _create_session(client: TestClient, token: str, info: str | None = None) -> int: + """Helper: create a session and return its id.""" + response = post_json(client, "/session/new", {"info": info}, token) + assert response.status_code == status.HTTP_200_OK + return response.json()["id"] + + +def test_analyst_sees_all_sessions(client: TestClient): + """Analyst's GET /session/get_all includes sessions from DM users.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_list", "dm_list") + dm_token = login(client, username="dm_list", password="dm_list") # noqa: S106 + + dm_session_id = _create_session(client, dm_token, "DM's session") + + response = get_json(client, "/session/get_all", analyst_token) + assert response.status_code == status.HTTP_200_OK + ids = [s["id"] for s in response.json()] + assert dm_session_id in ids + + +def test_dm_sees_only_own_sessions(client: TestClient): + """DM cannot see another DM's sessions in GET /session/get_all.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_x", "dm_x") + _add_dm(client, analyst_token, "dm_y", "dm_y") + + dm_x_token = login(client, username="dm_x", password="dm_x") # noqa: S106 + dm_y_token = login(client, username="dm_y", password="dm_y") # noqa: S106 + + dm_x_session_id = _create_session(client, dm_x_token) + + response = get_json(client, "/session/get_all", dm_y_token) + assert response.status_code == status.HTTP_200_OK + ids = [s["id"] for s in response.json()] + assert dm_x_session_id not in ids + + +def test_empty_session_list_returns_ok(client: TestClient): + """GET /session/get_all returns 200 + empty list when user has no sessions.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_empty", "dm_empty") + dm_token = login(client, username="dm_empty", password="dm_empty") # noqa: S106 + + response = get_json(client, "/session/get_all", dm_token) + assert response.status_code == status.HTTP_200_OK + assert response.json() == [] + + +def test_analyst_creates_session_for_dm(client: TestClient): + """Analyst can create a session owned by a DM via ?target_user_id=.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_target", "dm_target") + dm_token = login(client, username="dm_target", password="dm_target") # noqa: S106 + + dms = get_json(client, "/users/dms", analyst_token).json() + dm_id = next(u["id"] for u in dms if u["username"] == "dm_target") + + response = post_json( + client, + f"/session/new?target_user_id={dm_id}", + {"info": "created by analyst"}, + analyst_token, + ) + assert response.status_code == status.HTTP_200_OK + session_data = response.json() + assert session_data["user_id"] == dm_id + + # DM should see it in their own listing + dm_sessions = get_json(client, "/session/get_all", dm_token).json() + assert any(s["id"] == session_data["id"] for s in dm_sessions) + + +def test_dm_cannot_create_for_other_dm(client: TestClient): + """DM cannot create a session for another user via ?target_user_id=.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_a", "dm_a") + _add_dm(client, analyst_token, "dm_b", "dm_b") + + dms = get_json(client, "/users/dms", analyst_token).json() + dm_b_id = next(u["id"] for u in dms if u["username"] == "dm_b") + + dm_a_token = login(client, username="dm_a", password="dm_a") # noqa: S106 + response = post_json( + client, + f"/session/new?target_user_id={dm_b_id}", + {"info": "should fail"}, + dm_a_token, + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_create_session_for_nonexistent_user(client: TestClient): + """Creating a session with a nonexistent target_user_id returns 404.""" + analyst_token = login(client) + response = post_json(client, "/session/new?target_user_id=99999", {"info": None}, analyst_token) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +def test_analyst_can_get_dm_session_by_id(client: TestClient): + """Analyst can GET a DM's session by its ID.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_get", "dm_get") + dm_token = login(client, username="dm_get", password="dm_get") # noqa: S106 + + dm_session_id = _create_session(client, dm_token) + + response = get_json(client, f"/session/get/{dm_session_id}", analyst_token) + assert response.status_code == status.HTTP_200_OK + assert response.json()["id"] == dm_session_id + + +def test_dm_cannot_get_other_dm_session_by_id(client: TestClient): + """DM cannot GET another DM's session by ID.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_owner", "dm_owner") + _add_dm(client, analyst_token, "dm_thief", "dm_thief") + + owner_token = login(client, username="dm_owner", password="dm_owner") # noqa: S106 + thief_token = login(client, username="dm_thief", password="dm_thief") # noqa: S106 + + owner_session_id = _create_session(client, owner_token) + + response = get_json(client, f"/session/get/{owner_session_id}", thief_token) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +def test_analyst_can_delete_dm_session(client: TestClient): + """Analyst can delete a DM's session.""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_del", "dm_del") + dm_token = login(client, username="dm_del", password="dm_del") # noqa: S106 + + dm_session_id = _create_session(client, dm_token) + + response = client.delete( + f"/session/{dm_session_id}", + headers={"Authorization": f"Bearer {analyst_token}"}, + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + # Confirm it's gone + response = get_json(client, f"/session/get/{dm_session_id}", analyst_token) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +def test_dm_cannot_delete_other_dm_session(client: TestClient): + """DM cannot delete another DM's session (regression: latent ownership bug).""" + analyst_token = login(client) + _add_dm(client, analyst_token, "dm_keep", "dm_keep") + _add_dm(client, analyst_token, "dm_attacker", "dm_attacker") + + keep_token = login(client, username="dm_keep", password="dm_keep") # noqa: S106 + attacker_token = login(client, username="dm_attacker", password="dm_attacker") # noqa: S106 + + target_session_id = _create_session(client, keep_token) + + response = client.delete( + f"/session/{target_session_id}", + headers={"Authorization": f"Bearer {attacker_token}"}, + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + # Session should still exist + response = get_json(client, f"/session/get/{target_session_id}", analyst_token) + assert response.status_code == status.HTTP_200_OK diff --git a/desdeo/api/tests/test_site_selection.py b/desdeo/api/tests/test_site_selection.py new file mode 100644 index 000000000..752c85c71 --- /dev/null +++ b/desdeo/api/tests/test_site_selection.py @@ -0,0 +1,316 @@ +"""Tests for the site selection metadata and map endpoints.""" + +import json + +from fastapi.testclient import TestClient +from sqlmodel import Session, select + +from desdeo.api.models import ( + ProblemDB, + ProblemMetaDataDB, + SiteSelectionMetaData, + User, + UserRole, +) +from desdeo.api.routers.user_authentication import get_password_hash +from desdeo.problem.testproblems import river_pollution_problem + +from .conftest import login, post_json + +# --- Minimal synthetic data for tests --- + +SITES = [ + {"name": "Library A", "node": "CityA", "lat": 40.0, "lon": -83.0}, + {"name": "Center B", "node": "CityA", "lat": 40.01, "lon": -83.01}, + {"name": "Clinic C", "node": "CityB", "lat": 40.1, "lon": -83.1}, +] +NODES = [ + {"name": "CityA", "lat": 40.0, "lon": -83.0, "size": 10.0}, + {"name": "CityB", "lat": 40.1, "lon": -83.1, "size": 5.0}, +] +TRAVEL_TIME_MATRIX = [ + [0.0, 10.0], + [10.0, 0.0], +] +SITE_VARS = ["x_1", "x_2", "x_3"] +COVER_VARS = ["cover_1", "cover_2"] + + +# --- Model tests --- + + +def test_site_selection_metadata_create(session_and_user: dict): + """Create a SiteSelectionMetaData instance and verify round-trip through DB.""" + session: Session = session_and_user["session"] + + problem = session.exec(select(ProblemDB)).first() + metadata_db = ProblemMetaDataDB(problem_id=problem.id) + session.add(metadata_db) + session.commit() + session.refresh(metadata_db) + + site_sel = SiteSelectionMetaData( + metadata_id=metadata_db.id, + sites_json=json.dumps(SITES), + nodes_json=json.dumps(NODES), + travel_time_matrix_json=json.dumps(TRAVEL_TIME_MATRIX), + site_variable_symbols=SITE_VARS, + coverage_variable_symbols=COVER_VARS, + coverage_threshold=15.0, + ) + session.add(site_sel) + session.commit() + session.refresh(site_sel) + + loaded = session.get(SiteSelectionMetaData, site_sel.id) + assert loaded is not None + assert loaded.site_variable_symbols == SITE_VARS + assert loaded.coverage_variable_symbols == COVER_VARS + assert json.loads(loaded.sites_json) == SITES + assert json.loads(loaded.nodes_json) == NODES + assert json.loads(loaded.travel_time_matrix_json) == TRAVEL_TIME_MATRIX + assert loaded.coverage_threshold == 15.0 + + +def test_site_selection_metadata_coverage_optional(session_and_user: dict): + """Verify that coverage_variable_symbols=None is accepted.""" + session: Session = session_and_user["session"] + + problem = session.exec(select(ProblemDB)).first() + metadata_db = ProblemMetaDataDB(problem_id=problem.id) + session.add(metadata_db) + session.commit() + session.refresh(metadata_db) + + site_sel = SiteSelectionMetaData( + metadata_id=metadata_db.id, + sites_json=json.dumps(SITES), + nodes_json=json.dumps(NODES), + travel_time_matrix_json=json.dumps(TRAVEL_TIME_MATRIX), + site_variable_symbols=SITE_VARS, + coverage_variable_symbols=None, + coverage_threshold=20.0, + ) + session.add(site_sel) + session.commit() + session.refresh(site_sel) + + loaded = session.get(SiteSelectionMetaData, site_sel.id) + assert loaded is not None + assert loaded.coverage_variable_symbols is None + + +# --- Endpoint tests: /site-selection/load_metadata --- + + +def test_load_metadata_success(client: TestClient, session_and_user: dict): + """POST with valid data for an owned problem returns 200.""" + access_token = login(client) + + session: Session = session_and_user["session"] + problem = session.exec(select(ProblemDB)).first() + + payload = { + "problem_id": problem.id, + "sites": SITES, + "nodes": NODES, + "travel_time_matrix": TRAVEL_TIME_MATRIX, + "site_variable_symbols": SITE_VARS, + "coverage_variable_symbols": COVER_VARS, + "coverage_threshold": 15.0, + } + + response = post_json(client, "/site-selection/load_metadata", payload, access_token) + assert response.status_code == 200 + + data = response.json() + assert data["site_variable_symbols"] == SITE_VARS + assert data["coverage_variable_symbols"] == COVER_VARS + + # Verify in DB + loaded = session.exec(select(SiteSelectionMetaData)).first() + assert loaded is not None + + +def test_load_metadata_wrong_owner(client: TestClient, session_and_user: dict): + """POST with a problem_id owned by a different user returns 403.""" + session: Session = session_and_user["session"] + + # Create a second user and a problem owned by them + other_user = User( + username="other", + password_hash=get_password_hash("other"), + role=UserRole.analyst, + group="test", + ) + session.add(other_user) + session.commit() + session.refresh(other_user) + + other_problem = ProblemDB.from_problem(river_pollution_problem(), user=other_user) + session.add(other_problem) + session.commit() + session.refresh(other_problem) + + # Login as the original "analyst" user + access_token = login(client) + + payload = { + "problem_id": other_problem.id, + "sites": SITES, + "nodes": NODES, + "travel_time_matrix": TRAVEL_TIME_MATRIX, + "site_variable_symbols": SITE_VARS, + } + + response = post_json(client, "/site-selection/load_metadata", payload, access_token) + assert response.status_code == 403 + + +def test_load_metadata_problem_not_found(client: TestClient, session_and_user: dict): + """POST with a nonexistent problem_id returns 404.""" + access_token = login(client) + + payload = { + "problem_id": 99999, + "sites": SITES, + "nodes": NODES, + "travel_time_matrix": TRAVEL_TIME_MATRIX, + "site_variable_symbols": SITE_VARS, + } + + response = post_json(client, "/site-selection/load_metadata", payload, access_token) + assert response.status_code == 404 + + +# --- Endpoint tests: /site-selection/map --- + + +def _setup_metadata(session: Session, problem_id: int, coverage: bool = True) -> SiteSelectionMetaData: + """Helper: create ProblemMetaDataDB + SiteSelectionMetaData for a problem.""" + metadata_db = session.exec(select(ProblemMetaDataDB).where(ProblemMetaDataDB.problem_id == problem_id)).first() + if metadata_db is None: + metadata_db = ProblemMetaDataDB(problem_id=problem_id) + session.add(metadata_db) + session.commit() + session.refresh(metadata_db) + + site_sel = SiteSelectionMetaData( + metadata_id=metadata_db.id, + sites_json=json.dumps(SITES), + nodes_json=json.dumps(NODES), + travel_time_matrix_json=json.dumps(TRAVEL_TIME_MATRIX), + site_variable_symbols=SITE_VARS, + coverage_variable_symbols=COVER_VARS if coverage else None, + coverage_threshold=15.0, + ) + session.add(site_sel) + session.commit() + session.refresh(site_sel) + return site_sel + + +def test_map_success(client: TestClient, session_and_user: dict): + """POST with valid problem_id and solution returns nodes, edges, center.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB)).first() + _setup_metadata(session, problem.id) + + payload = { + "problem_id": problem.id, + "optimal_variables": {"x_1": 1.0, "x_2": 0.0, "x_3": 1.0, "cover_1": 1.0, "cover_2": 0.0}, + } + + response = post_json(client, "/site-selection/map", payload, access_token) + assert response.status_code == 200 + + data = response.json() + assert "nodes" in data + assert "edges" in data + assert "center" in data + assert len(data["nodes"]) == 2 + assert len(data["center"]) == 2 + + +def test_map_no_metadata(client: TestClient, session_and_user: dict): + """POST with a problem_id that has no SiteSelectionMetaData returns 404.""" + session: Session = session_and_user["session"] + access_token = login(client) + + # Use a problem that has no site selection metadata (river pollution from conftest) + problem = session.exec(select(ProblemDB).where(ProblemDB.name == "The river pollution problem")).first() + + payload = { + "problem_id": problem.id, + "optimal_variables": {}, + } + + response = post_json(client, "/site-selection/map", payload, access_token) + # Could be 404 — river pollution has ProblemMetaDataDB from conftest but no site_selection_metadata + assert response.status_code == 404 + + +def test_map_color_active(client: TestClient, session_and_user: dict): + """A node hosting a visited site receives COLOR_ACTIVE (#FFA500).""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB)).first() + _setup_metadata(session, problem.id) + + # x_1 and x_2 are in CityA, x_3 is in CityB — visit only x_1 + payload = { + "problem_id": problem.id, + "optimal_variables": {"x_1": 1.0, "x_2": 0.0, "x_3": 0.0, "cover_1": 0.0, "cover_2": 0.0}, + } + + response = post_json(client, "/site-selection/map", payload, access_token) + assert response.status_code == 200 + + data = response.json() + city_a = next(n for n in data["nodes"] if n["name"] == "CityA") + assert city_a["color"] == "#FFA500" + + +def test_map_color_inactive(client: TestClient, session_and_user: dict): + """A node with no visited sites and no coverage receives COLOR_INACTIVE (#808080).""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB)).first() + _setup_metadata(session, problem.id) + + # Visit x_1 (CityA only), no coverage + payload = { + "problem_id": problem.id, + "optimal_variables": {"x_1": 1.0, "x_2": 0.0, "x_3": 0.0, "cover_1": 0.0, "cover_2": 0.0}, + } + + response = post_json(client, "/site-selection/map", payload, access_token) + assert response.status_code == 200 + + data = response.json() + city_b = next(n for n in data["nodes"] if n["name"] == "CityB") + assert city_b["color"] == "#808080" + + +def test_map_no_coverage_variables(client: TestClient, session_and_user: dict): + """POST with metadata where coverage_variable_symbols=None returns 200 with no edges.""" + session: Session = session_and_user["session"] + access_token = login(client) + + problem = session.exec(select(ProblemDB)).first() + _setup_metadata(session, problem.id, coverage=False) + + payload = { + "problem_id": problem.id, + "optimal_variables": {"x_1": 1.0, "x_2": 0.0, "x_3": 0.0}, + } + + response = post_json(client, "/site-selection/map", payload, access_token) + assert response.status_code == 200 + + data = response.json() + assert data["edges"] == [] diff --git a/desdeo/mcdm/enautilus.py b/desdeo/mcdm/enautilus.py index 6d8ec8497..dd30fcd6d 100644 --- a/desdeo/mcdm/enautilus.py +++ b/desdeo/mcdm/enautilus.py @@ -19,6 +19,7 @@ numpy_array_to_objective_dict, objective_dict_to_numpy_array, ) +from desdeo.problem.schema import TensorVariable from desdeo.tools import SolverResults, flip_maximized_objective_values @@ -61,11 +62,30 @@ def enautilus_get_representative_solutions( supplied `non_dominated_points` should contain this information. """ obj_syms = [obj.symbol for obj in problem.objectives] - var_syms = [var.symbol for var in problem.variables] const_syms = [con.symbol for con in problem.constraints] if problem.constraints else None extra_syms = [extra.symbol for extra in problem.extra_funcs] if problem.extra_funcs else None scal_syms = [scal.symbol for scal in problem.scalarization_funcs] if problem.scalarization_funcs else None + # Build the list of variable column names present in the DataFrame. + # Scalar variables use their symbol directly; tensor variables are unrolled + # into columns like "sv_1", "sv_2", ... matching the convention used by + # solve_epsilon.py and the representative solution set upload. + available_cols = set(non_dominated_points.columns) + var_col_names: list[str] = [] + for var in problem.variables: + if isinstance(var, TensorVariable): + # Collect unrolled columns: symbol_1, symbol_2, ... + n_elements = 1 + for d in var.shape: + n_elements *= d + for i in range(1, n_elements + 1): + col = f"{var.symbol}_{i}" + if col in available_cols: + var_col_names.append(col) + else: + if var.symbol in available_cols: + var_col_names.append(var.symbol) + # Objective matrix (rows = ND points, cols = objectives, original senses) obj_matrix = non_dominated_points.select(obj_syms).to_numpy() @@ -79,7 +99,7 @@ def enautilus_get_representative_solutions( row = non_dominated_points[idx] - var_dict = {sym: row[sym] for sym in var_syms if sym in row} + var_dict = {col: row[col] for col in var_col_names} obj_dict = {sym: row[sym] for sym in obj_syms} const_dict = {sym: row[sym] for sym in const_syms if sym in row} if const_syms is not None else None extra_dict = {sym: row[sym] for sym in extra_syms if sym in row} if extra_syms is not None else None diff --git a/desdeo/problem/testproblems/dmitry_forest_problem_discrete.py b/desdeo/problem/testproblems/dmitry_forest_problem_discrete.py index 4383d0cd6..8e7bfe139 100644 --- a/desdeo/problem/testproblems/dmitry_forest_problem_discrete.py +++ b/desdeo/problem/testproblems/dmitry_forest_problem_discrete.py @@ -20,9 +20,12 @@ def dmitry_forest_problem_disc() -> Problem: Returns: Problem: A problem instance representing the forest problem. """ - # Navigate from this file (desdeo/problem/testproblems/) up to the repo root - path = Path(__file__).resolve().parent.parent.parent.parent - path = path / "tests/data/dmitry_discrete_repr/dmitry_forest_problem_non_dom_solns.csv" + # __file__ is desdeo/problem/testproblems/dmitry_forest_problem_discrete.py + # CSV is at /tests/data/dmitry_discrete_repr/... + path = ( + Path(__file__).resolve().parent.parent.parent.parent + / "tests/data/dmitry_discrete_repr/dmitry_forest_problem_non_dom_solns.csv" + ) obj_names = ["Rev", "HA", "Carb", "DW"] @@ -65,10 +68,7 @@ def dmitry_forest_problem_disc() -> Problem: return Problem( name="Dmitry Forest Problem (Discrete)", - description=( - "Defines a forest problem with four objectives: " - "revenue, habitat availability, carbon storage, and deadwood." - ), + description="Defines a forest problem with four objectives: revenue, habitat availability, carbon storage, and deadwood.", variables=variables, objectives=objectives, discrete_representation=discrete_def, diff --git a/docs/howtoguides/deploying_on_openshift.md b/docs/howtoguides/deploying_on_openshift.md new file mode 100644 index 000000000..4924984b7 --- /dev/null +++ b/docs/howtoguides/deploying_on_openshift.md @@ -0,0 +1,602 @@ +# How to deploy DESDEO on OpenShift (Kubernetes) + +## Overview + +This guide walks through deploying the full DESDEO stack, FastAPI backend, +SvelteKit web UI, and PostgreSQL database, on an OpenShift/OKD cluster. [CSC +Rahti](https://rahti.csc.fi/) is used as the concrete example throughout; values +specific to Rahti (hostnames, API endpoint, image registry URL) are marked so +readers on other OpenShift clusters can substitute their own. + +Two approaches are documented: + +- **CLI approach**: uses YAML manifests and the `oc` CLI (command-line interface) + exclusively. Every step is reproducible and version-controlled. The bulk of + this guide follows this approach. +- **Web console approach**: uses the Rahti web interface. Described under + [Alternative web console approach](#web-console-approach) for + users who prefer a graphical interface. + +OpenShift is a Kubernetes distribution with extra features layered on top. This +guide uses OpenShift-specific objects (BuildConfig, ImageStream, Route) that do +not exist in vanilla Kubernetes. If you are deploying on plain Kubernetes, +consult your platform's documentation instead. + +The files you will work with live in two places in the DESDEO repository: + +- `deploy/`: all OpenShift manifests (ImageStreams, BuildConfigs, Deployments, + StatefulSet, Routes, Job). +- Several application-level files added or modified to support production + deployment, described under [Repository preparation](#repository-preparation). + +## Prerequisites + +- A CSC account with an active computing project. +- Rahti access enabled for that project. Apply via + [MyCSC](https://my.csc.fi) -> your project -> Services -> Rahti -> Apply for + access. See [Rahti access](https://docs.csc.fi/cloud/rahti/access/) for + details. +- A Rahti project created in the [Rahti web console](https://console-openshift-console.apps.2.rahti.csc.fi/). + When creating the project, include your CSC computing project number in the + description field using the format `csc_project:#######`. +- `oc` CLI installed (see [Using the Rahti CLI](https://docs.csc.fi/cloud/rahti/usage/cli/)). +- Logged in to the cluster: + ```bash + oc login https://api.2.rahti.csc.fi:6443 --token= + ``` +- Switched to your project: + ```bash + oc project + ``` +- A fork or branch of the [DESDEO repository](https://github.com/industrial-optimization-group/DESDEO) + with the `deploy/` files committed and pushed. + +!!! note + Newly created CSC computing projects can take some time to become visible + to Rahti. If project creation fails with an error, wait a few minutes and + try again. + +## Architecture + +Four components are deployed and wired together: + +1. **`desdeo-api` Deployment**: FastAPI served by gunicorn+uvicorn, listening on + port 8080. Built in-cluster using OpenShift's Source-to-Image (S2I) strategy + from a custom Python builder image that includes COIN-OR solvers. + +2. **`desdeo-webui` Deployment**: SvelteKit with adapter-node, listening on port + 3000. Built using the Docker strategy from `webui/Dockerfile`. All browser API + calls are routed through a `/api/[...path]` proxy route baked into the + SvelteKit app. This keeps cookies same-origin and avoids CORS complications. + +3. **`desdeo-postgres` StatefulSet**: PostgreSQL running on the built-in OpenShift + image, backed by a PersistentVolumeClaim. Alternatively, [CSC Pukki DBaaS](#option-b-pukki-dbaas) + can be used instead. + +4. **OpenShift Routes**: TLS-terminated at Rahti's HAProxy ingress. Certificates + for `*.rahtiapp.fi` are provisioned automatically. + +### URL environment variables + +Two env vars control how the API is reached, and they intentionally point to +different targets: + +| Variable | Value | Used by | +|---|---|---| +| `VITE_API_URL` | `/api` | Baked into the client-side Javascript bundle at build time. Browser requests go to `/api/...`, which the SvelteKit proxy handles. | +| `API_BASE_URL` | `http://desdeo-api:8080` | Set at runtime on the webui pod. SvelteKit's server-side proxy uses the internal cluster DNS name to reach the API, never exposed to the browser. | + +!!! warning + Do not set `VITE_API_URL` to the API's external Route URL. The proxy + architecture means the browser never talks directly to the API. Doing so + causes cross-origin cookie issues that prevent authentication from working. + +## Repository preparation + +The following files must be present in the repository before deploying. All +manifests live under `deploy/`. + +| File | Purpose | +|---|---| +| `deploy/secrets-template.yaml` | Template for creating credentials (never commit real values) | +| `deploy/postgres.yaml` | StatefulSet, Service, and PVC for PostgreSQL | +| `deploy/builder-imagestream.yaml` | ImageStream that tracks the custom S2I builder image | +| `deploy/builder-buildconfig.yaml` | BuildConfig: Docker strategy, builds the solver-enabled S2I builder image | +| `deploy/api-imagestream.yaml` | ImageStream that tracks built API images | +| `deploy/webui-imagestream.yaml` | ImageStream that tracks built webui images | +| `deploy/api-buildconfig.yaml` | BuildConfig: S2I using `desdeo-builder:latest`, GitHub webhook trigger | +| `deploy/webui-buildconfig.yaml` | BuildConfig: Docker strategy, GitHub webhook trigger | +| `deploy/api-deployment.yaml` | Deployment, Service, and Route for the API | +| `deploy/webui-deployment.yaml` | Deployment, Service, and Route for the web UI | +| `deploy/db-init-job.yaml` | One-shot Job that creates tables and seeds the initial user | + +In addition, several application-level files are required: + +- `.s2i/bin/assemble`: Custom S2I assemble script that uses `uv sync --frozen` + to install Python dependencies. The default assemble script uses pip, which + does not understand uv's `--group` flag. + +- `.s2i/environment`: Sets S2I environment variables such as `APP_MODULE`, + `GUNICORN_CMD_ARGS`, and the port. + +- `desdeo/api/db_init_prod.py`: Production database initialisation script. The + `db_init.py` debug branch does nothing in production mode; this separate script + creates all SQLModel tables and seeds the initial analyst user. + +- `webui/Dockerfile`: Multi-stage Node 24 build. The `NPM_RUN=start:production` + env var selects the adapter-node start script via `svelte.config.js`. + +- `webui/src/routes/api/[...path]/+server.ts`: The SvelteKit proxy route. It + forwards all `/api/*` requests to the API using `event.fetch`, so the + `handleFetch` hook in `hooks.server.ts` can intercept 401 responses and handle + token refresh transparently. + +- `desdeo-s2i-buildimage.Dockerfile`: Builds the custom S2I builder image that + extends the Python 3.12 UBI8 base with COIN-OR solvers (`bonmin`, `ipopt`, + `cbc`). + +## Step 1: Prepare secrets + +All credentials are stored in a Secret named `desdeo-secrets`. +Two options are available, choose one and skip the other. + +Key reference of the stored secrets: + +| Key | Description | +|---|---| +| `POSTGRES_USER` / `DB_USER` | PostgreSQL application user name | +| `POSTGRES_PASSWORD` / `DB_PASSWORD` | Password for the above (same value) | +| `DB_HOST` | Kubernetes Service name: `desdeo-postgres` (or Pukki hostname) | +| `DB_PORT` | `5432` | +| `DB_NAME` | Database name | +| `AUTHJWT_SECRET` | JWT signing key, generate fresh, never reuse between deployments| +| `DESDEO_ADMIN_USERNAME` | Initial analyst account username | +| `DESDEO_ADMIN_PASSWORD` | Initial analyst account password | +| `WEBHOOK_SECRET_API` | GitHub webhook secret for the API BuildConfig | +| `WEBHOOK_SECRET_WEBUI` | GitHub webhook secret for the webui BuildConfig | + +!!! note + `DESDEO_PRODUCTION=true` is set directly in the Deployment manifest, not + as a Secret, because it is not sensitive. + +### Options A: From `secrets.yaml` + +Copy the template, fill in the values, then apply it: + +```bash +cp deploy/secrets-template.yaml deploy/secrets.yaml +# Edit deploy/secrets.yaml, replace every accordingly +oc apply -f deploy/secrets.yaml +``` + +!!! warning + Make __absolutely sure__ that the file `secrets.yaml` is __never__ committed to git! + +### Option B: From literals +Create the secret and the two dedicated webhook secrets using `oc create secret +generic`: + +```bash +# Main application secret +oc create secret generic desdeo-secrets \ + --from-literal=POSTGRES_USER=desdeo \ + --from-literal=POSTGRES_PASSWORD= \ + --from-literal=DB_HOST=desdeo-postgres \ + --from-literal=DB_PORT=5432 \ + --from-literal=DB_NAME=desdeo \ + --from-literal=DB_USER=desdeo \ + --from-literal=DB_PASSWORD= \ + --from-literal=AUTHJWT_SECRET=$(python3 -c "import secrets; print(secrets.token_hex(64))") \ + --from-literal=DESDEO_ADMIN_USERNAME=admin \ + --from-literal=DESDEO_ADMIN_PASSWORD= \ + --from-literal=WEBHOOK_SECRET_API=$(python3 -c "import secrets; print(secrets.token_hex(24))") \ + --from-literal=WEBHOOK_SECRET_WEBUI=$(python3 -c "import secrets; print(secrets.token_hex(24))") + +# Dedicated webhook secrets. OpenShift's secretReference requires the key +# to be named exactly 'WebHookSecretKey'. Use the same values as above. +oc create secret generic desdeo-webhook-api \ + --from-literal=WebHookSecretKey= +oc create secret generic desdeo-webhook-webui \ + --from-literal=WebHookSecretKey= +``` + +## Step 2: Deploy PostgreSQL + +Two options are available. Choose one and skip the other. + +### Option A: In-cluster PostgreSQL (default) + +```bash +oc apply -f deploy/postgres.yaml +oc rollout status statefulset/desdeo-postgres +``` + +The StatefulSet uses the built-in Rahti PostgreSQL image. To check available +tags on your cluster: + +```bash +oc get is postgresql -n openshift -o jsonpath='{.spec.tags[*].name}' +``` + +Data is stored at `/var/lib/pgsql/data` in the PVC. + +!!! note + The env vars that initialize the database are `POSTGRESQL_USER`, + `POSTGRESQL_PASSWORD`, and `POSTGRESQL_DATABASE` (note the `POSTGRESQL_` + prefix). The manifests map these from the Secret keys `POSTGRES_USER`, + `POSTGRES_PASSWORD`, and the hardcoded value `desdeo`. + +### Option B: Pukki DBaaS + +[Pukki](https://pukki.dbaas.csc.fi) is CSC's managed PostgreSQL service. It +removes the need to deploy `deploy/postgres.yaml` entirely — skip that step if +using Pukki. + +**Prerequisites**: add the Pukki service to your CSC computing project via +MyCSC → your project → Services → Pukki → Apply for access. + +**Setup:** + +1. Log in to [pukki.dbaas.csc.fi](https://pukki.dbaas.csc.fi). +2. Click **Launch Instance**. Give it a name. Default Volume Size, Datastore, + and Flavor settings are fine for most deployments. +3. Under **Database Access**, add the Rahti egress IP: `86.50.229.150/32`. +4. Under **Initialize Databases**, create a database (e.g. `desdeo`) and set + an admin username and password. These become `DB_USER`, `DB_PASSWORD`, and + `DB_NAME` in the Secret. +5. Once the instance is running, copy the hostname from the Pukki dashboard. + This becomes `DB_HOST` in the Secret instead of `desdeo-postgres`. + +Update the secret with the Pukki hostname: + +```bash +oc create secret generic desdeo-secrets \ + ... \ + --from-literal=DB_HOST= \ + ... +``` + +Skip `oc apply -f deploy/postgres.yaml`. All subsequent steps are identical +regardless of which option you chose. + +!!! warning + The Rahti egress IP `86.50.229.150/32` must be added to the Pukki access + list before deploying. Without it the API pod cannot reach the database and + will crash on startup. + +## Step 3: Create ImageStreams and BuildConfigs + +An ImageStream is an OpenShift object that tracks versions of a container image. +When a BuildConfig pushes a new image to an ImageStream, any Deployment watching +that stream automatically triggers a rolling update, no external registry or CI +system required. + +Apply the ImageStreams first: + +```bash +oc apply -f deploy/builder-imagestream.yaml +oc apply -f deploy/api-imagestream.yaml +oc apply -f deploy/webui-imagestream.yaml +``` + +Before applying the BuildConfigs, open each file and substitute `` +with the branch you want to build from (e.g. `master`). Ensure the git URI uses +HTTPS, not SSH, because build pods do not have SSH credentials. + +The API BuildConfig uses the S2I strategy with `desdeo-builder:latest` as its +builder image, the custom image built from `desdeo-s2i-buildimage.Dockerfile` +that includes COIN-OR solvers. The webui BuildConfig uses the Docker strategy +with `webui/Dockerfile`. The build arg `VITE_API_URL=/api` is passed explicitly. +This is intentional, as browser requests go through the SvelteKit proxy rather +than directly to the API. + +```bash +oc apply -f deploy/builder-buildconfig.yaml +oc apply -f deploy/api-buildconfig.yaml +oc apply -f deploy/webui-buildconfig.yaml +``` + +## Step 4: Trigger first builds + +The builder image must be ready before the API build can start, as +`api-buildconfig.yaml` references `desdeo-builder:latest` as its S2I base. + +```bash +# Build the solver-enabled builder image first (takes a few minutes). +# --follow does not always work, Rahti's web-based interface can also be +# used for monitoring progress. +oc start-build desdeo-builder --follow + +# Then build the API and webui (can run in parallel once the builder is done) +oc start-build desdeo-api --follow +oc start-build desdeo-webui --follow +``` + +The first build takes longer than subsequent ones because there is no layer +cache. Expect roughly a few minutes for the builder, and anther few minutes for +both the API and the webui. + +Once the API pod is running, verify the solvers are present: + +```bash +oc exec deployment/desdeo-api -- which bonmin ipopt cbc +``` + +All three should return paths under `/opt/solver_binaries/`. + +!!! warning + If the webui build fails with `exit status 137`, the build pod ran out of + memory. Increase the build pod memory limit in `webui-buildconfig.yaml`: + ```yaml + spec: + resources: + limits: + memory: 4Gi + ``` + Also ensure `NODE_OPTIONS=--max-old-space-size=3072` is set in + `dockerStrategy.env`, then re-apply and re-trigger the build. + +## Step 5: Deploy API and web UI + +```bash +oc apply -f deploy/api-deployment.yaml +oc apply -f deploy/webui-deployment.yaml +oc rollout status deployment/desdeo-api +oc rollout status deployment/desdeo-webui +``` + +!!! warning + Rahti enforces a maximum CPU limit-to-request ratio of 5:1. If + `resources.limits.cpu` divided by `resources.requests.cpu` exceeds this, + the ReplicaSet will silently fail to create pods. The error does not appear + in pod logs, look in the ReplicaSet events: + ```bash + oc describe replicaset + ``` + The manifests in `deploy/` are set within the allowed ratio. If you + customize resource settings, check the ratio before applying. + +The following env vars must be present on the API pod at runtime: + +| Variable | Source | +|---|---| +| `DESDEO_PRODUCTION` | Set to `true` directly in the Deployment manifest | +| `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, `DB_PASSWORD` | From `desdeo-secrets` | +| `AUTHJWT_SECRET` | From `desdeo-secrets` | +| `CORS_ORIGINS` | Set in the Deployment to `["https://your-webui.rahtiapp.fi"]` | + +!!! note + `COOKIE_DOMAIN` is intentionally not set. With the SvelteKit proxy + architecture, cookies are owned by the webui host and forwarded + server-side — the API does not need to set a shared cookie domain. + +## Step 6: Initialize the database + +`db_init_prod.py` creates all SQLModel tables and seeds the initial analyst +user defined by `DESDEO_ADMIN_USERNAME` and `DESDEO_ADMIN_PASSWORD`. It is +safe to re-run — tables that already exist are not touched. + +Before applying, open `deploy/db-init-job.yaml` and replace `` with +your Rahti project name. + +```bash +oc apply -f deploy/db-init-job.yaml +oc logs -f job/desdeo-db-init +``` + +Expected output: + +``` +[db-init] Tables ready. +[db-init] Created user 'admin' (role=analyst, group=admin). +[db-init] Done. +``` + +Once the job completes successfully, delete it: + +```bash +oc delete job desdeo-db-init +``` + +!!! note + Warnings about missing solvers (`bonmin`, `cbc`, `ipopt`) in the init job + logs are harmless if the solver builder image has not been used. Once the + API is rebuilt using `desdeo-builder:latest`, the warnings will disappear. + +### Resetting the database + +To re-run the init job on an existing database (e.g. after adding new tables +in a release), simply apply the job again. Existing data is not affected. + +To wipe the database entirely and start fresh, **all users, problems, and +session data will be permanently deleted**: + +```bash +# Drop and recreate the public schema +oc exec -it statefulset/desdeo-postgres -- \ + psql -U desdeo -d desdeo -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;" + +# Re-run the init job +oc apply -f deploy/db-init-job.yaml +oc logs -f job/desdeo-db-init +oc delete job desdeo-db-init +``` + +!!! warning + The schema drop is irreversible. All data will be permanently lost. + +## Step 7: Verify + +```bash +curl https://your-api.rahtiapp.fi/health +# -> {"status":"ok"} + +curl -I https://your-webui.rahtiapp.fi/ +# -> HTTP/2 200 +# (a 307 redirect to /home is also normal) +``` + +Routes are TLS-terminated at Rahti's HAProxy ingress. Certificates for +`*.rahtiapp.fi` are provisioned automatically, no manual certificate work is +required. + +## Step 8: Set up GitHub webhooks + +BuildConfigs include GitHub webhook triggers. Once configured, every push to the +deploy branch triggers a rebuild of the affected component, which then rolls out +automatically via the ImageStream trigger on the Deployment. + +Retrieve the webhook secret values from the dedicated webhook secrets: + +```bash +oc get secret desdeo-webhook-api -o jsonpath='{.data.WebHookSecretKey}' | base64 -d +oc get secret desdeo-webhook-webui -o jsonpath='{.data.WebHookSecretKey}' | base64 -d +``` + +Construct the webhook URLs: + +``` +https://api.2.rahti.csc.fi:6443/apis/build.openshift.io/v1/namespaces//buildconfigs/desdeo-api/webhooks//github +https://api.2.rahti.csc.fi:6443/apis/build.openshift.io/v1/namespaces//buildconfigs/desdeo-webui/webhooks//github +``` + +In GitHub, go to your repository: **Settings -> Webhooks -> Add webhook** + +- Payload URL: the URL constructed above +- Content type: `application/json`: required; `x-www-form-urlencoded` will be rejected +- Secret: leave blank (the secret is embedded in the URL) +- Events: Just the push event + +Add one webhook per BuildConfig. + +!!! note + `oc describe bc/desdeo-api` always shows `` as a placeholder in + the webhook URL, this is a display-only mask. Always retrieve the actual + secret value from the Secret object as shown above. + +At this point, the desdei web-API and webui should be running on Rahti, and they should automatically +update when new commits are pushed to the deployment branch. + +## Troubleshooting + +| Symptom | Cause | Fix | +|---|---|---| +| API pod crashes with `ValidationError: authjwt_secret_key` | `AUTHJWT_SECRET` env var missing or key name wrong | Verify key names in the Secret match the Deployment's `secretKeyRef` fields | +| API pod crashes with DB connection error | `DB_HOST`, `DB_PORT`, `DB_NAME`, `DB_USER`, or `DB_PASSWORD` missing or incorrect | Run `oc describe secret desdeo-secrets` and compare key names | +| API pod crashes with connection timeout to Pukki | Rahti egress IP not whitelisted in Pukki access list | Add `86.50.229.150/32` to the Pukki instance's Database Access settings | +| Webui pod never starts; `FailedCreate` in ReplicaSet events | CPU limit-to-request ratio exceeds 5:1 | Adjust `resources.requests.cpu` so that `limits.cpu / requests.cpu ≤ 5` | +| Login returns 500; logs show `TypeError: Invalid URL` | `API_BASE_URL` env var not set on the webui pod | Set `API_BASE_URL=http://desdeo-api:8080` in the webui Deployment | +| Build fails with `exit status 137` | Build pod out of memory | Set `spec.resources.limits.memory: 4Gi` in the BuildConfig | +| Build fails with `pip install --group` error | Default S2I assemble script used instead of the custom one | Ensure `.s2i/bin/assemble` is present in the repo and uses `uv sync --frozen` | +| `uv sync` fails with lockfile conflict | `uv.lock` is out of sync with `pyproject.toml` | Run `uv lock` locally and commit the updated lockfile | +| Database init job fails with import errors | `DESDEO_PRODUCTION` not set; API falls back to SQLite mode | Ensure `DESDEO_PRODUCTION=true` is set in the Job env | +| GitHub webhook returns 401 | Wrong content type or secret mismatch | Set content type to `application/json`; verify webhook secret matches `WebHookSecretKey` in the dedicated Secret | + +## Known limitations + +- **Schema migrations**: `db_init_prod.py` uses `SQLModel.metadata.create_all`, + which creates missing tables but does not ALTER existing ones. If the data + model changes in a later release, tables must be migrated manually or via, + e.g., Alembic, before redeploying. + +- **WebSocket connections**: The GDM-SCORE-bands and GNIMBUS features use + `VITE_API_URL` directly for `ws://` connections and are not proxied through + the SvelteKit `/api` route. These require separate handling not covered in + this guide. + +--- + +## Web console approach + +The steps above use the `oc` CLI and YAML manifests. Rahti also provides a web +console at [console-openshift-console.apps.2.rahti.csc.fi](https://console-openshift-console.apps.2.rahti.csc.fi/) +that lets you accomplish the same tasks through a graphical interface. This +section documents the web console approach as an alternative. + +!!! note + The web console approach is less reproducible than the CLI approach and + requires more manual steps on each redeployment. It is recommended for + one-shot deploymnets or first-time exploration, not for ongoing deployments. + +### Getting started + +Log in to the Rahti web console. Look for the **Create Project** button (you +may need to switch to the Administrator perspective to see it). Fill in the +project name and description. Include your CSC computing project number in the +description in the format `csc_project:#######`. + +### Deploying the API + +Navigate to **+Add -> Import from Git**. Enter the repository URL and branch +under **Show advanced Git options -> Git reference**. + +Under **Build**, add the following environment variables: + +``` +DESDEO_PRODUCTION = true +DB_HOST = +DB_PORT = 5432 +DB_NAME = desdeo +DB_USER = +DB_PASSWORD = +AUTHJWT_SECRET = <64-char hex> +CORS_ORIGINS = ["https://your-webui.rahtiapp.fi"] +``` + +The builder image should be set to `python:3.12-ubi9`. The S2I assemble script +(`.s2i/bin/assemble`) uses `uv sync --frozen` to install dependencies. + +Store sensitive values in an OpenShift Secret and reference them in the Build +configuration rather than entering them as plain text. + +### Deploying the web UI + +Add another resource via **+Add -> Import from Git** using the same repository +and branch. Under **Advanced Git Options**, set the **Context Dir** to `/webui`. + +Select **Docker build** as the build strategy (not S2I). The `webui/Dockerfile` +handles the Node 24 build internally. + +Set these build arguments and environment variables: + +``` +VITE_API_URL = /api +API_BASE_URL = http://:8080 +``` + +!!! warning + Do not set `VITE_API_URL` to the API's public Route URL. Browser requests + must go through the SvelteKit `/api` proxy, not directly to the API. + +If the build fails with `exit status 137`, increase the build memory limit in +the BuildConfig YAML: + +```yaml +spec: + resources: + limits: + memory: 4000Mi +``` + +### PostgreSQL + +Use either Pukki DBaaS or a PostgreSQL image from the Rahti developer catalog. + +For Pukki, see [Option B: Pukki DBaaS](#option-b-pukki-dbaas) above, +the setup steps are the same regardless of whether you use the CLI or web console. + +For in-cluster PostgreSQL, navigate to **+Add -> Developer Catalog** and find +the PostgreSQL template. The correct env var names for the OpenShift image are +`POSTGRESQL_USER`, `POSTGRESQL_PASSWORD`, and `POSTGRESQL_DATABASE`. + +### Database initialization + +Once the API is running, use `db_init_prod.py` to create tables and seed the +initial user. The recommended approach is to run it as a Kubernetes Job using +the manifest in `deploy/db-init-job.yaml` (see [Step 6](#step-6-initialize-the-database)). + +Alternatively, you can exec into the API pod directly: + +```bash +oc exec -it deployment/desdeo-api -- python desdeo/api/db_init_prod.py +``` diff --git a/docs/howtoguides/index.md b/docs/howtoguides/index.md index f366caa71..9112bf050 100644 --- a/docs/howtoguides/index.md +++ b/docs/howtoguides/index.md @@ -32,5 +32,6 @@ Guides are goal-oriented and are meant to direct users towards specific goals wh ## Web-API and Web-GUI - **[Running the web-API and web-GUI](api_and_gui.md):** How to run the web-API and web-GUI -- **[Hosting on Kubernetes](kubernetes.md):** How to host a DESDEO web application on Kubernetes +- **[Hosting on Kubernetes (old)](kubernetes.md):** How to host a DESDEO web application on Kubernetes +- **[Deploying on OpenShift](deploying_on_openshift.md):** How to deploy DESDEO on OpenShift/Kubernetes (Rahti example) - **[Implementing method interfaces](implementing_method_interfaces.md):** How to implement new interactive method interfaces in the Web-GUI diff --git a/mkdocs.yml b/mkdocs.yml index ff05c53c8..4b2483e4d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -36,6 +36,7 @@ nav: - "How to define multiobjective optimization problems": "howtoguides/problem.md" - "How to define a multiobjective optimization problem": "howtoguides/how_to_define_a_problem.ipynb" - "How to host a DESDEO web application on Kubernetes": "howtoguides/kubernetes.md" + - "How to deploy DESDEO on OpenShift": "howtoguides/deploying_on_openshift.md" - "How to run DESDEO's web-API and web-GUI": "howtoguides/api_and_gui.md" - "How to implement method interfaces in the Web-GUI": "howtoguides/implementing_method_interfaces.md" - "How to utilize 'MCDM' methods": "howtoguides/how_to_utilize_mcdm_methods.ipynb" diff --git a/uv.lock b/uv.lock index ed589fa76..c7b8be916 100644 --- a/uv.lock +++ b/uv.lock @@ -694,7 +694,7 @@ wheels = [ [[package]] name = "desdeo" -version = "2.2.2" +version = "2.3.0" source = { editable = "." } dependencies = [ { name = "bayesian-optimization" }, diff --git a/webui/Dockerfile b/webui/Dockerfile new file mode 100644 index 000000000..a8e757c63 --- /dev/null +++ b/webui/Dockerfile @@ -0,0 +1,71 @@ +# Multi-stage Dockerfile for the DESDEO web UI. +# Build context: webui/ (set via BuildConfig contextDir: webui) +# +# Two URL variables control how the frontend talks to the backend API: +# +# VITE_API_URL (build-time ARG / ENV) +# Baked into the client-side JavaScript bundle by Vite. +# Used by browser code: import.meta.env.VITE_API_URL +# Must be the *public* HTTPS route of the API. +# Default: https://gialmisi-desdeo-api.rahtiapp.fi +# +# API_URL (runtime ENV in the container / Deployment env) +# Used by SvelteKit server-side route handlers (+server.ts) that +# proxy requests to the backend. +# Should point to the *internal cluster* service so traffic stays +# inside the cluster and avoids the public ingress. +# Default here: http://desdeo-api:8080 +# Override in the Deployment manifest if the service name differs. + +# --------------------------------------------------------------------------- +# Stage 1 – build +# --------------------------------------------------------------------------- +FROM node:24-alpine AS build + +WORKDIR /app + +# Install dependencies first for layer-cache efficiency. +COPY package*.json ./ +RUN npm install + +# Copy the rest of the source. +COPY . . + +# VITE_API_URL is baked into the bundle at build time. +ARG VITE_API_URL=https://gialmisi-desdeo-api.rahtiapp.fi +ENV VITE_API_URL=$VITE_API_URL + +# NPM_RUN=start:production makes svelte.config.js select adapter-node. +RUN NPM_RUN=start:production npm run build + +# Prune devDependencies so we can copy node_modules into the runtime stage. +RUN npm prune --omit=dev + +# --------------------------------------------------------------------------- +# Stage 2 – runtime +# --------------------------------------------------------------------------- +FROM node:24-alpine + +WORKDIR /app + +# Copy the compiled adapter-node output and the pruned dependencies. +COPY --from=build /app/build ./build +COPY --from=build /app/node_modules ./node_modules +COPY --from=build /app/package.json ./package.json + +# OpenShift runs containers with an arbitrary UID in GID 0 (root group). +# Setting group ownership to 0 with group-write allows the process to run +# under any UID without permission errors. +RUN chgrp -R 0 /app && chmod -R g=u /app + +# adapter-node default port; can be overridden at runtime via PORT env. +EXPOSE 3000 +ENV PORT=3000 + +# Server-side proxy routes use this to reach the backend inside the cluster. +ENV API_URL=http://desdeo-api:8080 + +# Run as a non-root UID (required by Rahti / OpenShift SCC). +USER 1001 + +CMD ["node", "build"] diff --git a/webui/README.md b/webui/README.md index 37e96b0c7..5a69afda1 100644 --- a/webui/README.md +++ b/webui/README.md @@ -2,37 +2,34 @@ ## Environment variables -For the frontend to work correctly, there are some environmental variables -that should be set in an`.env` file at the root level. These variables are: +For the frontend to work correctly, there are some environment variables that +should be set in a `.env` file at the root of the `webui/` directory. These +variables are: -- `VITE_API_URL` which should be defined to be '/api' for the proxy to work correctly. I.e.: +- `VITE_API_URL` — set to `"/api"` so that client-side code routes requests + through the SvelteKit catch-all proxy at `src/routes/api/[...path]/+server.ts`: ```bash VITE_API_URL="/api" ``` -- `API_URL` which should be defined to be 'http://localhost:8000 or the path of the server' +- `API_BASE_URL` — the URL of the running DESDEO web-API, used by server-side + route handlers and by `orval.config.mjs` when generating the OpenAPI client: ```bash -API_URL=http://localhost:8000 +API_BASE_URL=http://localhost:8000 ``` -Check also the file `vite.config.ts`, where in the server setting - -```toml - server: { - proxy: { - '/api': { - target: 'http://127.0.0.1:8000', - changeOrigin: true, - secure: false, - rewrite: (path) => path.replace(/^\/api/, '') - } - } - } +A minimal `.env` for local development therefore looks like: + +```bash +API_BASE_URL="http://localhost:8000" +VITE_API_URL="/api" ``` -the `target` should point to the local URL that can be used to access the DESDEO web-API. +> **Note:** `VITE_API_URL` is baked into the client bundle at build time by +> Vite, so changing it after a build has no effect. `API_BASE_URL` is read at +> runtime by the Node.js server process. ## Installing @@ -92,8 +89,10 @@ npm run dev -- --open When the web-API is updated, it is important to update the OpenAPI clients, which automatically use the schemas defined in the web-API on the GUI side. To -generate them, make sure the web-API is running on the URL defined in `OPENAPI_URL` in the file -`orval.config.mjs`, and issue the command: +generate them, make sure the web-API is running at the URL defined in +`OPENAPI_URL` inside `orval.config.mjs` (defaults to +`http://localhost:8000/openapi.json`), and that `API_BASE_URL` is set in your +`.env` file, then run: ```bash npm run generate:client diff --git a/webui/orval.config.mjs b/webui/orval.config.mjs index 2b9466487..09533549d 100644 --- a/webui/orval.config.mjs +++ b/webui/orval.config.mjs @@ -36,6 +36,17 @@ export default defineConfig({ target: 'src/lib/gen/endpoints', namingConvention: "PascalCase", fileExtension: 'zod.ts', + override: { + zod: { + generate: { + body: true, + param: true, + query: true, + header: true, + response: true, + }, + }, + }, }, hooks: { afterAllFilesWrite: 'prettier --write', diff --git a/webui/package-lock.json b/webui/package-lock.json index 2cd89fba0..7ab0334c9 100644 --- a/webui/package-lock.json +++ b/webui/package-lock.json @@ -20,6 +20,7 @@ "formsnap": "^2.0.1", "katex": "^0.16.23", "layerchart": "^2.0.0-next.10", + "leaflet": "^1.9.4", "mathlive": "^0.107.1", "mode-watcher": "^1.1.0", "msw": "^2.11.5", @@ -56,6 +57,7 @@ "@testing-library/jest-dom": "^6.9.1", "@testing-library/svelte": "^5.2.8", "@types/d3": "^7.4.3", + "@types/leaflet": "^1.9.21", "@types/node": "^24.7.1", "@vitest/browser": "^3.2.4", "@vitest/coverage-v8": "^3.2.4", @@ -437,9 +439,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", - "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.5.tgz", + "integrity": "sha512-nGsF/4C7uzUj+Nj/4J+Zt0bYQ6bz33Phz8Lb2N80Mti1HjGclTJdXZ+9APC4kLvONbjxN1zfvYNd8FEcbBK/MQ==", "cpu": [ "ppc64" ], @@ -453,9 +455,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", - "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.5.tgz", + "integrity": "sha512-Cv781jd0Rfj/paoNrul1/r4G0HLvuFKYh7C9uHZ2Pl8YXstzvCyyeWENTFR9qFnRzNMCjXmsulZuvosDg10Mog==", "cpu": [ "arm" ], @@ -469,9 +471,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", - "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.5.tgz", + "integrity": "sha512-Oeghq+XFgh1pUGd1YKs4DDoxzxkoUkvko+T/IVKwlghKLvvjbGFB3ek8VEDBmNvqhwuL0CQS3cExdzpmUyIrgA==", "cpu": [ "arm64" ], @@ -485,9 +487,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", - "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.5.tgz", + "integrity": "sha512-nQD7lspbzerlmtNOxYMFAGmhxgzn8Z7m9jgFkh6kpkjsAhZee1w8tJW3ZlW+N9iRePz0oPUDrYrXidCPSImD0Q==", "cpu": [ "x64" ], @@ -501,9 +503,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", - "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.5.tgz", + "integrity": "sha512-I+Ya/MgC6rr8oRWGRDF3BXDfP8K1BVUggHqN6VI2lUZLdDi1IM1v2cy0e3lCPbP+pVcK3Tv8cgUhHse1kaNZZw==", "cpu": [ "arm64" ], @@ -517,9 +519,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", - "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.5.tgz", + "integrity": "sha512-MCjQUtC8wWJn/pIPM7vQaO69BFgwPD1jriEdqwTCKzWjGgkMbcg+M5HzrOhPhuYe1AJjXlHmD142KQf+jnYj8A==", "cpu": [ "x64" ], @@ -533,9 +535,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", - "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.5.tgz", + "integrity": "sha512-X6xVS+goSH0UelYXnuf4GHLwpOdc8rgK/zai+dKzBMnncw7BTQIwquOodE7EKvY2UVUetSqyAfyZC1D+oqLQtg==", "cpu": [ "arm64" ], @@ -549,9 +551,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", - "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.5.tgz", + "integrity": "sha512-233X1FGo3a8x1ekLB6XT69LfZ83vqz+9z3TSEQCTYfMNY880A97nr81KbPcAMl9rmOFp11wO0dP+eB18KU/Ucg==", "cpu": [ "x64" ], @@ -565,9 +567,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", - "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.5.tgz", + "integrity": "sha512-0wkVrYHG4sdCCN/bcwQ7yYMXACkaHc3UFeaEOwSVW6e5RycMageYAFv+JS2bKLwHyeKVUvtoVH+5/RHq0fgeFw==", "cpu": [ "arm" ], @@ -581,9 +583,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", - "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.5.tgz", + "integrity": "sha512-euKkilsNOv7x/M1NKsx5znyprbpsRFIzTV6lWziqJch7yWYayfLtZzDxDTl+LSQDJYAjd9TVb/Kt5UKIrj2e4A==", "cpu": [ "arm64" ], @@ -597,9 +599,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", - "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.5.tgz", + "integrity": "sha512-hVRQX4+P3MS36NxOy24v/Cdsimy/5HYePw+tmPqnNN1fxV0bPrFWR6TMqwXPwoTM2VzbkA+4lbHWUKDd5ZDA/w==", "cpu": [ "ia32" ], @@ -613,9 +615,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", - "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.5.tgz", + "integrity": "sha512-mKqqRuOPALI8nDzhOBmIS0INvZOOFGGg5n1osGIXAx8oersceEbKd4t1ACNTHM3sJBXGFAlEgqM+svzjPot+ZQ==", "cpu": [ "loong64" ], @@ -629,9 +631,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", - "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.5.tgz", + "integrity": "sha512-EE/QXH9IyaAj1qeuIV5+/GZkBTipgGO782Ff7Um3vPS9cvLhJJeATy4Ggxikz2inZ46KByamMn6GqtqyVjhenA==", "cpu": [ "mips64el" ], @@ -645,9 +647,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", - "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.5.tgz", + "integrity": "sha512-0V2iF1RGxBf1b7/BjurA5jfkl7PtySjom1r6xOK2q9KWw/XCpAdtB6KNMO+9xx69yYfSCRR9FE0TyKfHA2eQMw==", "cpu": [ "ppc64" ], @@ -661,9 +663,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", - "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.5.tgz", + "integrity": "sha512-rYxThBx6G9HN6tFNuvB/vykeLi4VDsm5hE5pVwzqbAjZEARQrWu3noZSfbEnPZ/CRXP3271GyFk/49up2W190g==", "cpu": [ "riscv64" ], @@ -677,9 +679,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", - "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.5.tgz", + "integrity": "sha512-uEP2q/4qgd8goEUc4QIdU/1P2NmEtZ/zX5u3OpLlCGhJIuBIv0s0wr7TB2nBrd3/A5XIdEkkS5ZLF0ULuvaaYQ==", "cpu": [ "s390x" ], @@ -693,9 +695,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", - "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.5.tgz", + "integrity": "sha512-+Gq47Wqq6PLOOZuBzVSII2//9yyHNKZLuwfzCemqexqOQCSz0zy0O26kIzyp9EMNMK+nZ0tFHBZrCeVUuMs/ew==", "cpu": [ "x64" ], @@ -709,9 +711,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", - "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.5.tgz", + "integrity": "sha512-3F/5EG8VHfN/I+W5cO1/SV2H9Q/5r7vcHabMnBqhHK2lTWOh3F8vixNzo8lqxrlmBtZVFpW8pmITHnq54+Tq4g==", "cpu": [ "arm64" ], @@ -725,9 +727,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", - "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.5.tgz", + "integrity": "sha512-28t+Sj3CPN8vkMOlZotOmDgilQwVvxWZl7b8rxpn73Tt/gCnvrHxQUMng4uu3itdFvrtba/1nHejvxqz8xgEMA==", "cpu": [ "x64" ], @@ -741,9 +743,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", - "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.5.tgz", + "integrity": "sha512-Doz/hKtiuVAi9hMsBMpwBANhIZc8l238U2Onko3t2xUp8xtM0ZKdDYHMnm/qPFVthY8KtxkXaocwmMh6VolzMA==", "cpu": [ "arm64" ], @@ -757,9 +759,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", - "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.5.tgz", + "integrity": "sha512-WfGVaa1oz5A7+ZFPkERIbIhKT4olvGl1tyzTRaB5yoZRLqC0KwaO95FeZtOdQj/oKkjW57KcVF944m62/0GYtA==", "cpu": [ "x64" ], @@ -773,9 +775,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", - "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.5.tgz", + "integrity": "sha512-Xh+VRuh6OMh3uJ0JkCjI57l+DVe7VRGBYymen8rFPnTVgATBwA6nmToxM2OwTlSvrnWpPKkrQUj93+K9huYC6A==", "cpu": [ "arm64" ], @@ -789,9 +791,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", - "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.5.tgz", + "integrity": "sha512-aC1gpJkkaUADHuAdQfuVTnqVUTLqqUNhAvEwHwVWcnVVZvNlDPGA0UveZsfXJJ9T6k9Po4eHi3c02gbdwO3g6w==", "cpu": [ "x64" ], @@ -805,9 +807,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", - "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.5.tgz", + "integrity": "sha512-0UNx2aavV0fk6UpZcwXFLztA2r/k9jTUa7OW7SAea1VYUhkug99MW1uZeXEnPn5+cHOd0n8myQay6TlFnBR07w==", "cpu": [ "arm64" ], @@ -821,9 +823,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", - "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.5.tgz", + "integrity": "sha512-5nlJ3AeJWCTSzR7AEqVjT/faWyqKU86kCi1lLmxVqmNR+j4HrYdns+eTGjS/vmrzCIe8inGQckUadvS0+JkKdQ==", "cpu": [ "ia32" ], @@ -837,9 +839,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", - "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.5.tgz", + "integrity": "sha512-PWypQR+d4FLfkhBIV+/kHsUELAnMpx1bRvvsn3p+/sAERbnCzFrtDRG2Xw5n+2zPxBK2+iaP+vetsRl4Ti7WgA==", "cpu": [ "x64" ], @@ -3554,6 +3556,16 @@ "license": "MIT", "peer": true }, + "node_modules/@types/leaflet": { + "version": "1.9.21", + "resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.9.21.tgz", + "integrity": "sha512-TbAd9DaPGSnzp6QvtYngntMZgcRk+igFELwR2N99XZn7RXUdKgsXMR+28bUO0rPsWp8MIu/f47luLIQuSLYv/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, "node_modules/@types/mdast": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", @@ -5617,9 +5629,9 @@ ] }, "node_modules/esbuild": { - "version": "0.27.4", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", - "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", + "version": "0.27.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.5.tgz", + "integrity": "sha512-zdQoHBjuDqKsvV5OPaWansOwfSQ0Js+Uj9J85TBvj3bFW1JjWTSULMRwdQAc8qMeIScbClxeMK0jlrtB9linhA==", "hasInstallScript": true, "license": "MIT", "bin": { @@ -5629,32 +5641,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.4", - "@esbuild/android-arm": "0.27.4", - "@esbuild/android-arm64": "0.27.4", - "@esbuild/android-x64": "0.27.4", - "@esbuild/darwin-arm64": "0.27.4", - "@esbuild/darwin-x64": "0.27.4", - "@esbuild/freebsd-arm64": "0.27.4", - "@esbuild/freebsd-x64": "0.27.4", - "@esbuild/linux-arm": "0.27.4", - "@esbuild/linux-arm64": "0.27.4", - "@esbuild/linux-ia32": "0.27.4", - "@esbuild/linux-loong64": "0.27.4", - "@esbuild/linux-mips64el": "0.27.4", - "@esbuild/linux-ppc64": "0.27.4", - "@esbuild/linux-riscv64": "0.27.4", - "@esbuild/linux-s390x": "0.27.4", - "@esbuild/linux-x64": "0.27.4", - "@esbuild/netbsd-arm64": "0.27.4", - "@esbuild/netbsd-x64": "0.27.4", - "@esbuild/openbsd-arm64": "0.27.4", - "@esbuild/openbsd-x64": "0.27.4", - "@esbuild/openharmony-arm64": "0.27.4", - "@esbuild/sunos-x64": "0.27.4", - "@esbuild/win32-arm64": "0.27.4", - "@esbuild/win32-ia32": "0.27.4", - "@esbuild/win32-x64": "0.27.4" + "@esbuild/aix-ppc64": "0.27.5", + "@esbuild/android-arm": "0.27.5", + "@esbuild/android-arm64": "0.27.5", + "@esbuild/android-x64": "0.27.5", + "@esbuild/darwin-arm64": "0.27.5", + "@esbuild/darwin-x64": "0.27.5", + "@esbuild/freebsd-arm64": "0.27.5", + "@esbuild/freebsd-x64": "0.27.5", + "@esbuild/linux-arm": "0.27.5", + "@esbuild/linux-arm64": "0.27.5", + "@esbuild/linux-ia32": "0.27.5", + "@esbuild/linux-loong64": "0.27.5", + "@esbuild/linux-mips64el": "0.27.5", + "@esbuild/linux-ppc64": "0.27.5", + "@esbuild/linux-riscv64": "0.27.5", + "@esbuild/linux-s390x": "0.27.5", + "@esbuild/linux-x64": "0.27.5", + "@esbuild/netbsd-arm64": "0.27.5", + "@esbuild/netbsd-x64": "0.27.5", + "@esbuild/openbsd-arm64": "0.27.5", + "@esbuild/openbsd-x64": "0.27.5", + "@esbuild/openharmony-arm64": "0.27.5", + "@esbuild/sunos-x64": "0.27.5", + "@esbuild/win32-arm64": "0.27.5", + "@esbuild/win32-ia32": "0.27.5", + "@esbuild/win32-x64": "0.27.5" } }, "node_modules/escalade": { @@ -7088,6 +7100,12 @@ "svelte": "^5.7.0" } }, + "node_modules/leaflet": { + "version": "1.9.4", + "resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz", + "integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA==", + "license": "BSD-2-Clause" + }, "node_modules/leven": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-4.1.0.tgz", @@ -8941,9 +8959,9 @@ } }, "node_modules/remeda": { - "version": "2.33.6", - "resolved": "https://registry.npmjs.org/remeda/-/remeda-2.33.6.tgz", - "integrity": "sha512-tazDGH7s75kUPGBKLvhgBEHMgW+TdDFhjUAMdQj57IoWz6HsGa5D2RX5yDUz6IIqiRRvZiaEHzCzWdTeixc/Kg==", + "version": "2.33.7", + "resolved": "https://registry.npmjs.org/remeda/-/remeda-2.33.7.tgz", + "integrity": "sha512-cXlyjevWx5AcslOUEETG4o8XYi9UkoCXcJmj7XhPFVbla+ITuOBxv6ijBrmbeg+ZhzmDThkNdO+iXKUfrJep1w==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/remeda" @@ -10338,12 +10356,12 @@ } }, "node_modules/typedoc/node_modules/minimatch": { - "version": "10.2.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", - "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", + "version": "10.2.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz", + "integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==", "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^5.0.2" + "brace-expansion": "^5.0.5" }, "engines": { "node": "18 || 20 || >=22" diff --git a/webui/package.json b/webui/package.json index c0f550e46..bf648d909 100644 --- a/webui/package.json +++ b/webui/package.json @@ -33,6 +33,7 @@ "formsnap": "^2.0.1", "katex": "^0.16.23", "layerchart": "^2.0.0-next.10", + "leaflet": "^1.9.4", "mathlive": "^0.107.1", "mode-watcher": "^1.1.0", "msw": "^2.11.5", @@ -69,6 +70,7 @@ "@testing-library/jest-dom": "^6.9.1", "@testing-library/svelte": "^5.2.8", "@types/d3": "^7.4.3", + "@types/leaflet": "^1.9.21", "@types/node": "^24.7.1", "@vitest/browser": "^3.2.4", "@vitest/coverage-v8": "^3.2.4", diff --git a/webui/src/hooks.server.ts b/webui/src/hooks.server.ts index a214db051..1f5b957b8 100644 --- a/webui/src/hooks.server.ts +++ b/webui/src/hooks.server.ts @@ -5,7 +5,17 @@ import { dev } from '$app/environment'; // const API = process.env.API_BASE_URL ?? '/'; export const handleFetch: HandleFetch = async ({ event, request, fetch }) => { - // TODO: check that the request originates from our app, instead of being a third party + // Forward access_token cookie to all API requests + const accessToken = event.cookies.get("access_token"); + if (accessToken) { + request = new Request(request, { + headers: new Headers({ + ...Object.fromEntries(request.headers.entries()), + cookie: `access_token=${accessToken}`, + }), + }); + } + const originalRequest = request.clone(); let res = await fetch(request); diff --git a/webui/src/lib/api/new-client.ts b/webui/src/lib/api/new-client.ts index 3e8f365c0..54b4b3ba1 100644 --- a/webui/src/lib/api/new-client.ts +++ b/webui/src/lib/api/new-client.ts @@ -21,16 +21,20 @@ const getBody = async (c: Response | Request): Promise => { return (c as Response).text() as Promise; }; -// NOTE: Update just base url const getUrl = (contextUrl: string): string => { const url = new URL(contextUrl); - const origin = url.origin; const pathname = url.pathname; const search = url.search; - const requestUrl = new URL(`${origin}${pathname}${search}`); + // Server-side (Node.js): call API directly. Falls back to localhost if + // API_BASE_URL is not in process.env (Vite dev doesn't populate it automatically). + if (typeof window === 'undefined') { + const base = process.env.API_BASE_URL ?? 'http://localhost:8000'; + return new URL(`${base}${pathname}${search}`).toString(); + } - return requestUrl.toString(); + // Browser-side: route through the SvelteKit proxy so cookies stay on one domain + return `/api${pathname}${search}`; }; const getHeaders = (headers?: HeadersInit): HeadersInit => { @@ -56,22 +60,8 @@ export const customFetch = async ( }; const request = new Request(requestUrl, requestInit); - const retryRequest = request.clone(); - - let response = await f(request); - - if (response.status === 401) { - const refreshUrl = new URL("/refresh", requestUrl).toString(); - const refreshResponse = await f(refreshUrl, { - method: "POST", - credentials: "include", - }); - - if (refreshResponse.ok) { - response = await f(retryRequest); - } - } + const response = await f(request); const data = await getBody(response); return { status: response.status, data, headers: response.headers } as T; diff --git a/webui/src/lib/components/custom/decision-journey/decision-journey.svelte b/webui/src/lib/components/custom/decision-journey/decision-journey.svelte index 900405093..9a05193c4 100644 --- a/webui/src/lib/components/custom/decision-journey/decision-journey.svelte +++ b/webui/src/lib/components/custom/decision-journey/decision-journey.svelte @@ -1,8 +1,9 @@ + + + + + +
+ {#if loading} +
+ Loading map... +
+ {/if} + {#if error} +
+ {error} +
+ {/if} +
+
diff --git a/webui/src/lib/components/ui/topbar/topbar.svelte b/webui/src/lib/components/ui/topbar/topbar.svelte index 498291397..9a6c9a432 100644 --- a/webui/src/lib/components/ui/topbar/topbar.svelte +++ b/webui/src/lib/components/ui/topbar/topbar.svelte @@ -21,11 +21,15 @@ import Problem from '@lucide/svelte/icons/puzzle'; import Archive from '@lucide/svelte/icons/archive'; import HelpCircle from '@lucide/svelte/icons/circle-help'; - import { Button } from '$lib/components/ui/button/index.js'; + import UserPlus from '@lucide/svelte/icons/user-plus'; + import * as DropdownMenu from '$lib/components/ui/dropdown-menu/index.js'; import { goto } from '$app/navigation'; + import { onMount } from 'svelte'; + import { get } from 'svelte/store'; import { auth } from '../../../../stores/auth'; import { derived } from 'svelte/store'; + import { getCurrentUserInfoUserInfoGet } from '$lib/gen/endpoints/DESDEOFastAPI'; import desdeo_logo from '$lib/assets/desdeo_logo.svg'; async function logout() { @@ -43,12 +47,25 @@ goto('/home'); } + onMount(async () => { + if (!get(auth).user) { + const response = await getCurrentUserInfoUserInfoGet(); + if (response.status === 200) { + auth.setAuth('authenticated', response.data); + } + } + }); + const userDisplay = derived(auth, ($auth) => { if ($auth.user) { return `${$auth.user.username} (${$auth.user.role})`; } return ''; }); + + const canManageUsers = derived(auth, ($auth) => + $auth.user?.role === 'analyst' || $auth.user?.role === 'admin' + );
@@ -89,6 +106,16 @@ + {#if $canManageUsers} + + + Users + + {/if} + { + return `http://localhost:8000/users/dms`; +}; + +export const getDmUsersUsersDmsGet = async ( + options?: RequestInit +): Promise => { + return customFetch(getGetDmUsersUsersDmsGetUrl(), { + ...options, + method: 'GET' + }); +}; + /** * Return information about the current user. @@ -2038,9 +2239,10 @@ export const refreshAccessTokenRefreshPost = async ( }; /** - * Add a new user of the role Decision Maker to the database. Requires no login. + * Add a new user of the role Decision Maker to the database. Requires a logged in analyst or an admin. Args: + user: Annotated[User, Depends(get_current_user)]: Logged in user with the role "analyst" or "admin". form_data (Annotated[OAuth2PasswordRequestForm, Depends()]): The user credentials to add to the database. session (Annotated[Session, Depends(get_session)]): the database session. @@ -2048,7 +2250,8 @@ Returns: JSONResponse: A JSON response Raises: - HTTPException: if username is already in use or if saving to the database fails for some reason. + HTTPException: if the logged in user is not an analyst or an admin or if + username is already in use or if saving to the database fails for some reason. * @summary Add New Dm */ export type addNewDmAddNewDmPostResponse200 = { @@ -2197,13 +2400,14 @@ export const addNewAnalystAddNewAnalystPost = async ( }; /** - * Get information on all the current user's problems. + * Get information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfoSmall]: a list of information on all the problems. + list[ProblemInfoSmall]: a list of information on the problems. * @summary Get Problems */ export type getProblemsProblemAllGetResponse200 = { @@ -2230,13 +2434,14 @@ export const getProblemsProblemAllGet = async ( }; /** - * Get detailed information on all the current user's problems. + * Get detailed information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfo]: a list of the detailed information on all the problems. + list[ProblemInfo]: a list of the detailed information on the problems. * @summary Get Problems Info */ export type getProblemsInfoProblemAllInfoGetResponse200 = { @@ -2339,6 +2544,9 @@ export const getProblemProblemProblemIdGet = async ( /** * Delete a problem by its ID. + +Temporary problems (is_temporary=True) can be deleted by their owner. +Non-temporary problems can only be deleted by admin users. * @summary Delete Problem */ export type deleteProblemProblemProblemIdDeleteResponse204 = { @@ -2403,6 +2611,8 @@ export const deleteProblemProblemProblemIdDelete = async ( Args: request (Problem): the JSON representation of the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Note: Users with the role 'guest' may not add new problems. @@ -2474,6 +2684,8 @@ export const addProblemProblemAddPost = async ( Args: json_file (UploadFile): a file in JSON format describing the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Raises: HTTPException: if the provided `json_file` is empty. @@ -2566,7 +2778,12 @@ Returns: * @summary Get Metadata */ export type getMetadataProblemGetMetadataPostResponse200 = { - data: (ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata)[]; + data: ( + | ForestProblemMetaData + | RepresentativeNonDominatedSolutions + | SolverSelectionMetadata + | SiteSelectionMetaData + )[]; status: 200; }; @@ -3040,8 +3257,63 @@ export const getProblemJsonProblemProblemIdJsonGet = async ( ); }; +/** + * Create a derived problem with additional EQ constraints fixing variables to specific values. + +The original problem is not modified. The variant is stored as a new ProblemDB row +with parent_problem_id set to the original. + * @summary Create Constrained Variant + */ +export type createConstrainedVariantProblemProblemIdConstrainedVariantPostResponse200 = { + data: ConstrainedVariantResponse; + status: 200; +}; + +export type createConstrainedVariantProblemProblemIdConstrainedVariantPostResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type createConstrainedVariantProblemProblemIdConstrainedVariantPostResponseSuccess = + createConstrainedVariantProblemProblemIdConstrainedVariantPostResponse200 & { + headers: Headers; + }; +export type createConstrainedVariantProblemProblemIdConstrainedVariantPostResponseError = + createConstrainedVariantProblemProblemIdConstrainedVariantPostResponse422 & { + headers: Headers; + }; + +export type createConstrainedVariantProblemProblemIdConstrainedVariantPostResponse = + | createConstrainedVariantProblemProblemIdConstrainedVariantPostResponseSuccess + | createConstrainedVariantProblemProblemIdConstrainedVariantPostResponseError; + +export const getCreateConstrainedVariantProblemProblemIdConstrainedVariantPostUrl = ( + problemId: number | null +) => { + return `http://localhost:8000/problem/${problemId}/constrained_variant`; +}; + +export const createConstrainedVariantProblemProblemIdConstrainedVariantPost = async ( + problemId: number | null, + constrainedVariantRequest: ConstrainedVariantRequest, + options?: RequestInit +): Promise => { + return customFetch( + getCreateConstrainedVariantProblemProblemIdConstrainedVariantPostUrl(problemId), + { + ...options, + method: 'POST', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify(constrainedVariantRequest) + } + ); +}; + /** * Creates a new interactive session. + +If ``target_user_id`` is provided, the session is created on behalf of that user. +Only analysts and admins may use this parameter. * @summary Create New Session */ export type createNewSessionSessionNewPostResponse200 = { @@ -3102,7 +3374,7 @@ export const createNewSessionSessionNewPost = async ( }; /** - * Return an interactive session with a current user. + * Return an interactive session. Analysts and admins may access any session. * @summary Get Session */ export type getSessionSessionGetSessionIdGetResponse200 = { @@ -3146,7 +3418,7 @@ export const getSessionSessionGetSessionIdGet = async ( }; /** - * Return all interactive sessions of the current user. + * Return interactive sessions. Analysts and admins see all users' sessions; others see only their own. * @summary Get All Sessions */ export type getAllSessionsSessionGetAllGetResponse200 = { @@ -3177,7 +3449,7 @@ export const getAllSessionsSessionGetAllGet = async ( }; /** - * Delete an interactive session and all its related states. + * Delete an interactive session and all its related states. Analysts and admins may delete any session. * @summary Delete Session */ export type deleteSessionSessionSessionIdDeleteResponse204 = { @@ -5352,6 +5624,149 @@ export const getSessionTreeMethodEnautilusSessionTreeSessionIdGet = async ( ); }; +/** + * Run E-NAUTILUS greedily from a state to completion. + +Given a starting state, this endpoint greedily selects the best intermediate +point for the preferred objective at each iteration until iterations_left == 0, +then projects to the Pareto front. No database writes are performed. + * @summary Simulate + */ +export type simulateMethodEnautilusSimulatePostResponse200 = { + data: ENautilusSimulateResponse; + status: 200; +}; + +export type simulateMethodEnautilusSimulatePostResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type simulateMethodEnautilusSimulatePostResponseSuccess = + simulateMethodEnautilusSimulatePostResponse200 & { + headers: Headers; + }; +export type simulateMethodEnautilusSimulatePostResponseError = + simulateMethodEnautilusSimulatePostResponse422 & { + headers: Headers; + }; + +export type simulateMethodEnautilusSimulatePostResponse = + | simulateMethodEnautilusSimulatePostResponseSuccess + | simulateMethodEnautilusSimulatePostResponseError; + +export const getSimulateMethodEnautilusSimulatePostUrl = () => { + return `http://localhost:8000/method/enautilus/simulate`; +}; + +export const simulateMethodEnautilusSimulatePost = async ( + eNautilusSimulateRequest: ENautilusSimulateRequest, + options?: RequestInit +): Promise => { + return customFetch( + getSimulateMethodEnautilusSimulatePostUrl(), + { + ...options, + method: 'POST', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify(eNautilusSimulateRequest) + } + ); +}; + +/** + * Store site selection metadata for a problem. + +The authenticated user must own the problem. + * @summary Load Metadata + */ +export type loadMetadataSiteSelectionLoadMetadataPostResponse200 = { + data: SiteSelectionMetaData; + status: 200; +}; + +export type loadMetadataSiteSelectionLoadMetadataPostResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type loadMetadataSiteSelectionLoadMetadataPostResponseSuccess = + loadMetadataSiteSelectionLoadMetadataPostResponse200 & { + headers: Headers; + }; +export type loadMetadataSiteSelectionLoadMetadataPostResponseError = + loadMetadataSiteSelectionLoadMetadataPostResponse422 & { + headers: Headers; + }; + +export type loadMetadataSiteSelectionLoadMetadataPostResponse = + | loadMetadataSiteSelectionLoadMetadataPostResponseSuccess + | loadMetadataSiteSelectionLoadMetadataPostResponseError; + +export const getLoadMetadataSiteSelectionLoadMetadataPostUrl = () => { + return `http://localhost:8000/site-selection/load_metadata`; +}; + +export const loadMetadataSiteSelectionLoadMetadataPost = async ( + siteSelectionMetaDataRequest: SiteSelectionMetaDataRequest, + options?: RequestInit +): Promise => { + return customFetch( + getLoadMetadataSiteSelectionLoadMetadataPostUrl(), + { + ...options, + method: 'POST', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify(siteSelectionMetaDataRequest) + } + ); +}; + +/** + * Build Leaflet-compatible map data from a site selection solution. + +Reads site selection metadata from the DB and extracts variable values +from the provided solution to determine node colors and coverage edges. + * @summary Build Map + */ +export type buildMapSiteSelectionMapPostResponse200 = { + data: SiteSelectionMapResponse; + status: 200; +}; + +export type buildMapSiteSelectionMapPostResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type buildMapSiteSelectionMapPostResponseSuccess = + buildMapSiteSelectionMapPostResponse200 & { + headers: Headers; + }; +export type buildMapSiteSelectionMapPostResponseError = buildMapSiteSelectionMapPostResponse422 & { + headers: Headers; +}; + +export type buildMapSiteSelectionMapPostResponse = + | buildMapSiteSelectionMapPostResponseSuccess + | buildMapSiteSelectionMapPostResponseError; + +export const getBuildMapSiteSelectionMapPostUrl = () => { + return `http://localhost:8000/site-selection/map`; +}; + +export const buildMapSiteSelectionMapPost = async ( + siteSelectionMapRequest: SiteSelectionMapRequest, + options?: RequestInit +): Promise => { + return customFetch(getBuildMapSiteSelectionMapPostUrl(), { + ...options, + method: 'POST', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify(siteSelectionMapRequest) + }); +}; + /** * Vote for a band using this endpoint. @@ -5705,3 +6120,27 @@ export const configureGdmGdmScoreBandsConfigurePost = async ( } ); }; + +/** + * @summary Health + */ +export type healthHealthGetResponse200 = { + data: unknown; + status: 200; +}; + +export type healthHealthGetResponseSuccess = healthHealthGetResponse200 & { + headers: Headers; +}; +export type healthHealthGetResponse = healthHealthGetResponseSuccess; + +export const getHealthHealthGetUrl = () => { + return `http://localhost:8000/health`; +}; + +export const healthHealthGet = async (options?: RequestInit): Promise => { + return customFetch(getHealthHealthGetUrl(), { + ...options, + method: 'GET' + }); +}; diff --git a/webui/src/lib/gen/endpoints/DESDEOFastAPIzod.ts b/webui/src/lib/gen/endpoints/DESDEOFastAPIzod.ts index 94d5381c0..9a2e1eed8 100644 --- a/webui/src/lib/gen/endpoints/DESDEOFastAPIzod.ts +++ b/webui/src/lib/gen/endpoints/DESDEOFastAPIzod.ts @@ -7,6 +7,30 @@ */ import * as zod from 'zod'; +/** + * Return all users with the decision maker role. Requires analyst or admin. + +Args: + user (Annotated[User, Depends]): the current user. + session (Annotated[Session, Depends]): the database session. + +Returns: + list[UserPublic]: public information for all DM users. + +Raises: + HTTPException: if the current user is not an analyst or admin. + * @summary Get Dm Users + */ +export const GetDmUsersUsersDmsGetResponseItem = zod + .object({ + username: zod.string(), + id: zod.number(), + role: zod.enum(['guest', 'dm', 'analyst', 'admin']).describe('Possible user roles.'), + group_ids: zod.union([zod.array(zod.number()), zod.null()]) + }) + .describe('The object to handle public user information.'); +export const GetDmUsersUsersDmsGetResponse = zod.array(GetDmUsersUsersDmsGetResponseItem); + /** * Return information about the current user. @@ -80,9 +104,10 @@ Returns: export const RefreshAccessTokenRefreshPostResponse = zod.unknown(); /** - * Add a new user of the role Decision Maker to the database. Requires no login. + * Add a new user of the role Decision Maker to the database. Requires a logged in analyst or an admin. Args: + user: Annotated[User, Depends(get_current_user)]: Logged in user with the role "analyst" or "admin". form_data (Annotated[OAuth2PasswordRequestForm, Depends()]): The user credentials to add to the database. session (Annotated[Session, Depends(get_session)]): the database session. @@ -90,7 +115,8 @@ Returns: JSONResponse: A JSON response Raises: - HTTPException: if username is already in use or if saving to the database fails for some reason. + HTTPException: if the logged in user is not an analyst or an admin or if + username is already in use or if saving to the database fails for some reason. * @summary Add New Dm */ export const AddNewDmAddNewDmPostResponse = zod.unknown(); @@ -114,17 +140,20 @@ Raises: export const AddNewAnalystAddNewAnalystPostResponse = zod.unknown(); /** - * Get information on all the current user's problems. + * Get information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfoSmall]: a list of information on all the problems. + list[ProblemInfoSmall]: a list of information on the problems. * @summary Get Problems */ export const getProblemsProblemAllGetResponseProblemMetadataOneForestMetadataOneItemMetadataTypeDefault = `forest_problem_metadata`; export const getProblemsProblemAllGetResponseProblemMetadataOneRepresentativeNdMetadataOneItemMetadataTypeDefault = `representative_non_dominated_solutions`; +export const getProblemsProblemAllGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault = `site_selection_metadata`; +export const getProblemsProblemAllGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault = 15; export const GetProblemsProblemAllGetResponseItem = zod .object({ @@ -199,6 +228,50 @@ export const GetProblemsProblemAllGetResponseItem = zod ) ), zod.null() + ]), + site_selection_metadata: zod.union([ + zod.array( + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default( + getProblemsProblemAllGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault + ), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe( + 'Ordered list of site variable symbols matching sites_json positions' + ), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default( + getProblemsProblemAllGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault + ) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ) + ), + zod.null() ]) }) .describe('Response model for ProblemMetaData.'), @@ -209,13 +282,14 @@ export const GetProblemsProblemAllGetResponseItem = zod export const GetProblemsProblemAllGetResponse = zod.array(GetProblemsProblemAllGetResponseItem); /** - * Get detailed information on all the current user's problems. + * Get detailed information on problems. Analysts and admins see all users' problems. Args: user (Annotated[User, Depends): the current user. + db_session (Annotated[Session, Depends]): the database session. Returns: - list[ProblemInfo]: a list of the detailed information on all the problems. + list[ProblemInfo]: a list of the detailed information on the problems. * @summary Get Problems Info */ export const getProblemsInfoProblemAllInfoGetResponseObjectivesItemMaximizeDefault = false; @@ -235,6 +309,8 @@ export const getProblemsInfoProblemAllInfoGetResponseExtraFuncsOneItemIsTwiceDif export const getProblemsInfoProblemAllInfoGetResponseDiscreteRepresentationOneNonDominatedDefault = false; export const getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneForestMetadataOneItemMetadataTypeDefault = `forest_problem_metadata`; export const getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneRepresentativeNdMetadataOneItemMetadataTypeDefault = `representative_non_dominated_solutions`; +export const getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault = `site_selection_metadata`; +export const getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault = 15; export const GetProblemsInfoProblemAllInfoGetResponseItem = zod .object({ @@ -788,6 +864,50 @@ export const GetProblemsInfoProblemAllInfoGetResponseItem = zod ) ), zod.null() + ]), + site_selection_metadata: zod.union([ + zod.array( + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default( + getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault + ), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe( + 'Ordered list of site variable symbols matching sites_json positions' + ), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default( + getProblemsInfoProblemAllInfoGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault + ) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ) + ), + zod.null() ]) }) .describe('Response model for ProblemMetaData.'), @@ -838,6 +958,8 @@ export const getProblemProblemProblemIdGetResponseExtraFuncsOneItemIsTwiceDiffer export const getProblemProblemProblemIdGetResponseDiscreteRepresentationOneNonDominatedDefault = false; export const getProblemProblemProblemIdGetResponseProblemMetadataOneForestMetadataOneItemMetadataTypeDefault = `forest_problem_metadata`; export const getProblemProblemProblemIdGetResponseProblemMetadataOneRepresentativeNdMetadataOneItemMetadataTypeDefault = `representative_non_dominated_solutions`; +export const getProblemProblemProblemIdGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault = `site_selection_metadata`; +export const getProblemProblemProblemIdGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault = 15; export const GetProblemProblemProblemIdGetResponse = zod .object({ @@ -1391,6 +1513,50 @@ export const GetProblemProblemProblemIdGetResponse = zod ) ), zod.null() + ]), + site_selection_metadata: zod.union([ + zod.array( + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default( + getProblemProblemProblemIdGetResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault + ), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe( + 'Ordered list of site variable symbols matching sites_json positions' + ), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default( + getProblemProblemProblemIdGetResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault + ) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ) + ), + zod.null() ]) }) .describe('Response model for ProblemMetaData.'), @@ -1401,6 +1567,9 @@ export const GetProblemProblemProblemIdGetResponse = zod /** * Delete a problem by its ID. + +Temporary problems (is_temporary=True) can be deleted by their owner. +Non-temporary problems can only be deleted by admin users. * @summary Delete Problem */ export const DeleteProblemProblemProblemIdDeleteParams = zod.object({ @@ -1417,6 +1586,8 @@ export const DeleteProblemProblemProblemIdDeleteQueryParams = zod.object({ Args: request (Problem): the JSON representation of the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Note: Users with the role 'guest' may not add new problems. @@ -1429,6 +1600,7 @@ Returns: * @summary Add Problem */ export const AddProblemProblemAddPostQueryParams = zod.object({ + target_user_id: zod.union([zod.number(), zod.null()]).optional(), problem_id: zod.union([zod.number(), zod.null()]).optional() }); @@ -1514,6 +1686,8 @@ export const addProblemProblemAddPostResponseExtraFuncsOneItemIsTwiceDifferentia export const addProblemProblemAddPostResponseDiscreteRepresentationOneNonDominatedDefault = false; export const addProblemProblemAddPostResponseProblemMetadataOneForestMetadataOneItemMetadataTypeDefault = `forest_problem_metadata`; export const addProblemProblemAddPostResponseProblemMetadataOneRepresentativeNdMetadataOneItemMetadataTypeDefault = `representative_non_dominated_solutions`; +export const addProblemProblemAddPostResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault = `site_selection_metadata`; +export const addProblemProblemAddPostResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault = 15; export const AddProblemProblemAddPostResponse = zod .object({ @@ -2059,6 +2233,50 @@ export const AddProblemProblemAddPostResponse = zod ) ), zod.null() + ]), + site_selection_metadata: zod.union([ + zod.array( + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default( + addProblemProblemAddPostResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault + ), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe( + 'Ordered list of site variable symbols matching sites_json positions' + ), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default( + addProblemProblemAddPostResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault + ) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ) + ), + zod.null() ]) }) .describe('Response model for ProblemMetaData.'), @@ -2073,6 +2291,8 @@ export const AddProblemProblemAddPostResponse = zod Args: json_file (UploadFile): a file in JSON format describing the problem. context (Annotated[SessionContext, Depends): the session context. + target_user_id (int | None): if provided, assign the problem to this user instead of + the caller. Only analysts and admins may use this parameter. Raises: HTTPException: if the provided `json_file` is empty. @@ -2083,6 +2303,7 @@ Returns: * @summary Add Problem Json */ export const AddProblemJsonProblemAddJsonPostQueryParams = zod.object({ + target_user_id: zod.union([zod.number(), zod.null()]).optional(), problem_id: zod.union([zod.number(), zod.null()]).optional() }); @@ -2175,6 +2396,8 @@ export const addProblemJsonProblemAddJsonPostResponseExtraFuncsOneItemIsTwiceDif export const addProblemJsonProblemAddJsonPostResponseDiscreteRepresentationOneNonDominatedDefault = false; export const addProblemJsonProblemAddJsonPostResponseProblemMetadataOneForestMetadataOneItemMetadataTypeDefault = `forest_problem_metadata`; export const addProblemJsonProblemAddJsonPostResponseProblemMetadataOneRepresentativeNdMetadataOneItemMetadataTypeDefault = `representative_non_dominated_solutions`; +export const addProblemJsonProblemAddJsonPostResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault = `site_selection_metadata`; +export const addProblemJsonProblemAddJsonPostResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault = 15; export const AddProblemJsonProblemAddJsonPostResponse = zod .object({ @@ -2728,6 +2951,50 @@ export const AddProblemJsonProblemAddJsonPostResponse = zod ) ), zod.null() + ]), + site_selection_metadata: zod.union([ + zod.array( + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default( + addProblemJsonProblemAddJsonPostResponseProblemMetadataOneSiteSelectionMetadataOneItemMetadataTypeDefault + ), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe( + 'Ordered list of site variable symbols matching sites_json positions' + ), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default( + addProblemJsonProblemAddJsonPostResponseProblemMetadataOneSiteSelectionMetadataOneItemCoverageThresholdDefault + ) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ) + ), + zod.null() ]) }) .describe('Response model for ProblemMetaData.'), @@ -2767,6 +3034,8 @@ export const GetMetadataProblemGetMetadataPostBody = zod export const getMetadataProblemGetMetadataPostResponseOneMetadataTypeDefault = `forest_problem_metadata`; export const getMetadataProblemGetMetadataPostResponseTwoMetadataTypeDefault = `representative_non_dominated_solutions`; export const getMetadataProblemGetMetadataPostResponseThreeMetadataTypeDefault = `solver_selection_metadata`; +export const getMetadataProblemGetMetadataPostResponseFourMetadataTypeDefault = `site_selection_metadata`; +export const getMetadataProblemGetMetadataPostResponseFourCoverageThresholdDefault = 15; export const GetMetadataProblemGetMetadataPostResponseItem = zod.union([ zod @@ -2821,6 +3090,39 @@ export const GetMetadataProblemGetMetadataPostResponseItem = zod.union([ }) .describe( 'A problem metadata class to store the preferred solver of a problem.\n\nA problem metadata class to store the preferred solver of a problem.\nSee desdeo\/tools\/utils.py -> available_solvers for available solvers.' + ), + zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default(getMetadataProblemGetMetadataPostResponseFourMetadataTypeDefault), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod + .string() + .describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe('Ordered list of site variable symbols matching sites_json positions'), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe( + 'Ordered list of coverage variable symbols matching nodes_json positions, or None' + ), + coverage_threshold: zod + .number() + .default(getMetadataProblemGetMetadataPostResponseFourCoverageThresholdDefault) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' ) ]); export const GetMetadataProblemGetMetadataPostResponse = zod.array( @@ -2967,11 +3269,56 @@ export const GetProblemJsonProblemProblemIdJsonGetQueryParams = zod.object({ export const GetProblemJsonProblemProblemIdJsonGetResponse = zod.unknown(); +/** + * Create a derived problem with additional EQ constraints fixing variables to specific values. + +The original problem is not modified. The variant is stored as a new ProblemDB row +with parent_problem_id set to the original. + * @summary Create Constrained Variant + */ +export const CreateConstrainedVariantProblemProblemIdConstrainedVariantPostParams = zod.object({ + problem_id: zod.union([zod.number(), zod.null()]) +}); + +export const createConstrainedVariantProblemProblemIdConstrainedVariantPostBodyIsTemporaryDefault = true; + +export const CreateConstrainedVariantProblemProblemIdConstrainedVariantPostBody = zod + .object({ + variable_fixings: zod.array( + zod + .object({ + variable_symbol: zod.string(), + fixed_value: zod.number(), + constraint_name: zod.union([zod.string(), zod.null()]).optional() + }) + .describe('Fix a single variable to a specific value via an EQ constraint.') + ), + name: zod.union([zod.string(), zod.null()]).optional(), + is_temporary: zod + .boolean() + .default(createConstrainedVariantProblemProblemIdConstrainedVariantPostBodyIsTemporaryDefault) + }) + .describe('Request to create a derived problem with additional EQ constraints fixing variables.'); + +export const CreateConstrainedVariantProblemProblemIdConstrainedVariantPostResponse = zod + .object({ + problem_id: zod.number(), + parent_problem_id: zod.number(), + name: zod.string(), + is_temporary: zod.boolean(), + n_constraints_added: zod.number() + }) + .describe('Response after creating a constrained variant.'); + /** * Creates a new interactive session. + +If ``target_user_id`` is provided, the session is created on behalf of that user. +Only analysts and admins may use this parameter. * @summary Create New Session */ export const CreateNewSessionSessionNewPostQueryParams = zod.object({ + target_user_id: zod.union([zod.number(), zod.null()]).optional(), problem_id: zod.union([zod.number(), zod.null()]).optional() }); @@ -2990,7 +3337,7 @@ export const CreateNewSessionSessionNewPostResponse = zod .describe('The base model for representing interactive sessions.'); /** - * Return an interactive session with a current user. + * Return an interactive session. Analysts and admins may access any session. * @summary Get Session */ export const GetSessionSessionGetSessionIdGetParams = zod.object({ @@ -3006,7 +3353,7 @@ export const GetSessionSessionGetSessionIdGetResponse = zod .describe('The base model for representing interactive sessions.'); /** - * Return all interactive sessions of the current user. + * Return interactive sessions. Analysts and admins see all users' sessions; others see only their own. * @summary Get All Sessions */ export const GetAllSessionsSessionGetAllGetResponseItem = zod @@ -3021,7 +3368,7 @@ export const GetAllSessionsSessionGetAllGetResponse = zod.array( ); /** - * Delete an interactive session and all its related states. + * Delete an interactive session and all its related states. Analysts and admins may delete any session. * @summary Delete Session */ export const DeleteSessionSessionSessionIdDeleteParams = zod.object({ @@ -7158,6 +7505,207 @@ export const GetSessionTreeMethodEnautilusSessionTreeSessionIdGetResponse = zod }) .describe('The complete E-NAUTILUS session tree.'); +/** + * Run E-NAUTILUS greedily from a state to completion. + +Given a starting state, this endpoint greedily selects the best intermediate +point for the preferred objective at each iteration until iterations_left == 0, +then projects to the Pareto front. No database writes are performed. + * @summary Simulate + */ +export const simulateMethodEnautilusSimulatePostBodyDeprioritizeDefault = false; +export const simulateMethodEnautilusSimulatePostBodyNumberOfIntermediatePointsDefault = 3; + +export const SimulateMethodEnautilusSimulatePostBody = zod + .object({ + state_id: zod.number().describe('Starting ENautilusState to branch from.'), + preferred_objective: zod.string().describe("Objective symbol to favor (e.g., 'f_1')."), + deprioritize: zod + .boolean() + .default(simulateMethodEnautilusSimulatePostBodyDeprioritizeDefault) + .describe('If True, always pick the WORST value for the objective instead of the best.'), + number_of_intermediate_points: zod + .number() + .default(simulateMethodEnautilusSimulatePostBodyNumberOfIntermediatePointsDefault) + .describe('Number of intermediate points per simulated step.') + }) + .describe('Run E-NAUTILUS greedily from a state to completion.'); + +export const SimulateMethodEnautilusSimulatePostResponse = zod + .object({ + preferred_objective: zod.string(), + steps: zod.array( + zod + .object({ + iteration: zod.number(), + iterations_left: zod.number(), + selected_point: zod + .record(zod.string(), zod.number()) + .describe('The auto-picked intermediate point.'), + selected_point_index: zod.number(), + intermediate_points: zod.array(zod.record(zod.string(), zod.number())), + closeness_measures: zod.array(zod.number()) + }) + .describe('One step in the simulated path.') + ), + final_solution: zod + .object({ + optimal_variables: zod + .record(zod.string(), zod.union([zod.number(), zod.number(), zod.array(zod.unknown())])) + .describe('The optimal decision variables found.'), + optimal_objectives: zod + .record(zod.string(), zod.union([zod.number(), zod.array(zod.number())])) + .describe( + 'The objective function values corresponding to the optimal decision variables found.' + ), + constraint_values: zod + .union([ + zod.record( + zod.string(), + zod.union([ + zod.number(), + zod.number(), + zod.array(zod.number()), + zod.array(zod.unknown()) + ]) + ), + zod.unknown(), + zod.null() + ]) + .optional() + .describe( + 'The constraint values of the problem. A negative value means the constraint is respected, a positive one means it has been breached.' + ), + extra_func_values: zod + .union([ + zod.record(zod.string(), zod.union([zod.number(), zod.array(zod.number())])), + zod.null() + ]) + .optional() + .describe('The extra function values of the problem.'), + scalarization_values: zod + .union([ + zod.record(zod.string(), zod.union([zod.number(), zod.array(zod.number())])), + zod.null() + ]) + .optional() + .describe('The scalarization function values of the problem.'), + lagrange_multipliers: zod + .union([ + zod.record(zod.string(), zod.union([zod.number(), zod.array(zod.number())])), + zod.null() + ]) + .optional() + .describe('The Lagrange multipliers of the problem.'), + success: zod + .boolean() + .describe('A boolean flag indicating whether the optimization was successful or not.'), + message: zod.string().describe('Description of the cause of termination.') + }) + .describe('Projected Pareto-optimal solution.'), + final_intermediate_point: zod.record(zod.string(), zod.number()) + }) + .describe('Result of greedy E-NAUTILUS simulation.'); + +/** + * Store site selection metadata for a problem. + +The authenticated user must own the problem. + * @summary Load Metadata + */ +export const loadMetadataSiteSelectionLoadMetadataPostBodyCoverageThresholdDefault = 15; + +export const LoadMetadataSiteSelectionLoadMetadataPostBody = zod + .object({ + problem_id: zod.number(), + sites: zod.array(zod.record(zod.string(), zod.unknown())), + nodes: zod.array(zod.record(zod.string(), zod.unknown())), + travel_time_matrix: zod.array(zod.array(zod.number())), + site_variable_symbols: zod.array(zod.string()), + coverage_variable_symbols: zod.union([zod.array(zod.string()), zod.null()]).optional(), + coverage_threshold: zod + .number() + .default(loadMetadataSiteSelectionLoadMetadataPostBodyCoverageThresholdDefault) + }) + .describe('Request body for loading site selection metadata.'); + +export const loadMetadataSiteSelectionLoadMetadataPostResponseMetadataTypeDefault = `site_selection_metadata`; +export const loadMetadataSiteSelectionLoadMetadataPostResponseCoverageThresholdDefault = 15; + +export const LoadMetadataSiteSelectionLoadMetadataPostResponse = zod + .object({ + id: zod.union([zod.number(), zod.null()]).optional(), + metadata_id: zod.union([zod.number(), zod.null()]).optional(), + metadata_type: zod + .string() + .default(loadMetadataSiteSelectionLoadMetadataPostResponseMetadataTypeDefault), + sites_json: zod + .string() + .describe('JSON array: [{name, node, lat, lon}, ...] one per site variable'), + nodes_json: zod.string().describe('JSON array: [{name, lat, lon, size}, ...] one per map node'), + travel_time_matrix_json: zod + .string() + .describe('JSON: 2D list[list[float]], shape [n_nodes, n_nodes]'), + site_variable_symbols: zod + .array(zod.string()) + .describe('Ordered list of site variable symbols matching sites_json positions'), + coverage_variable_symbols: zod + .union([zod.array(zod.string()), zod.null()]) + .optional() + .describe('Ordered list of coverage variable symbols matching nodes_json positions, or None'), + coverage_threshold: zod + .number() + .default(loadMetadataSiteSelectionLoadMetadataPostResponseCoverageThresholdDefault) + .describe('Threshold for coverage edges (e.g., minutes, km)') + }) + .describe( + 'A problem metadata class to hold site selection problem specific information.\n\nStores geographic data and variable mappings needed to visualize binary\nsite-selection solutions on a map (e.g., clinic placement, facility location).' + ); + +/** + * Build Leaflet-compatible map data from a site selection solution. + +Reads site selection metadata from the DB and extracts variable values +from the provided solution to determine node colors and coverage edges. + * @summary Build Map + */ +export const BuildMapSiteSelectionMapPostBody = zod + .object({ + problem_id: zod.number(), + optimal_variables: zod.record(zod.string(), zod.unknown()) + }) + .describe('Request body for building the site selection map.'); + +export const BuildMapSiteSelectionMapPostResponse = zod + .object({ + nodes: zod.array( + zod + .object({ + name: zod.string(), + lat: zod.number(), + lon: zod.number(), + size: zod.number(), + color: zod.string(), + tooltip: zod.string() + }) + .describe('A node marker on the map.') + ), + edges: zod.array( + zod + .object({ + from_lat: zod.number(), + from_lon: zod.number(), + to_lat: zod.number(), + to_lon: zod.number() + }) + .describe('A coverage connection edge between two nodes.') + ), + center: zod.array(zod.number()), + site_variable_symbols: zod.array(zod.string()), + site_node_names: zod.array(zod.string()) + }) + .describe('Response body for the site selection map endpoint.'); + /** * Vote for a band using this endpoint. @@ -8006,3 +8554,8 @@ export const ConfigureGdmGdmScoreBandsConfigurePostBody = zod .describe('Configuration for the SCORE bands based GDM.'); export const ConfigureGdmGdmScoreBandsConfigurePostResponse = zod.unknown(); + +/** + * @summary Health + */ +export const HealthHealthGetResponse = zod.unknown(); diff --git a/webui/src/routes/api/[...path]/+server.ts b/webui/src/routes/api/[...path]/+server.ts new file mode 100644 index 000000000..b1554751d --- /dev/null +++ b/webui/src/routes/api/[...path]/+server.ts @@ -0,0 +1,38 @@ +import type { RequestHandler } from './$types'; + +const API_BASE_URL = process.env.API_BASE_URL ?? 'http://localhost:8000'; + +const handler: RequestHandler = async ({ request, params, fetch }) => { + const path = params.path; + const search = new URL(request.url).search; + const upstreamUrl = `${API_BASE_URL}/${path}${search}`; + + const headers = new Headers(request.headers); + headers.delete('host'); + + const upstreamRequest = new Request(upstreamUrl, { + method: request.method, + headers, + body: ['GET', 'HEAD'].includes(request.method) ? undefined : request.body, + // @ts-expect-error — duplex is required for streaming bodies in Node 18+ + duplex: 'half', + }); + + // Use event.fetch (not global fetch) so handleFetch intercepts for 401/refresh + const response = await fetch(upstreamRequest); + + // Forward response headers so getBody can detect content-type correctly + const responseHeaders = new Headers(response.headers); + responseHeaders.delete('set-cookie'); // SvelteKit manages cookies separately + + return new Response(response.body, { + status: response.status, + headers: responseHeaders, + }); +}; + +export const GET = handler; +export const POST = handler; +export const PUT = handler; +export const PATCH = handler; +export const DELETE = handler; diff --git a/webui/src/routes/interactive_methods/E-NAUTILUS/+page.svelte b/webui/src/routes/interactive_methods/E-NAUTILUS/+page.svelte index 928240dbb..0ea7b9306 100644 --- a/webui/src/routes/interactive_methods/E-NAUTILUS/+page.svelte +++ b/webui/src/routes/interactive_methods/E-NAUTILUS/+page.svelte @@ -1,8 +1,9 @@ - - E-NAUTILUS | DESDEO - - + onDestroy(() => { + if (constrainedProblemId != null) { + cleanupConstrainedVariant(constrainedProblemId); + } + }); + {#snippet ColumnHeader({ column, title, colorIdx }: { column: Column; title: string; colorIdx?: number })}
0}
+ {#if adoptedSolution} +
+ Viewing what-if solution + +
+ {/if} {#if finalView === 'map'} -
- Map visualization coming soon. -
+ {#if finalSolution} +
+
+ +
+ {#if constraintSummary || rpmResult || resolveError} +
+
+ {#if constraintSummary} + Site constraints: {constraintSummary} + {/if} + {#if siteFixings.length > 0} + + {/if} + {#if rpmResult} + + {/if} + {#if rpmResult || siteFixings.length > 0} + + {/if} +
+ {#if resolveError} +
{resolveError}
+ {/if} +
+ {/if} +
+ {:else} +
+ No solution available for map. +
+ {/if} {:else if finalView === 'journey'} {#if sessionTree && previous_response?.state_id != null} {:else}
@@ -721,8 +886,8 @@ currentPreferenceValues={[]} previousPreferenceType={''} currentPreferenceType={''} - solutionsObjectiveValues={representativeObjectiveValues.length > 0 ? [representativeObjectiveValues[final_selected_index]] : []} - previousObjectiveValues={[]} + solutionsObjectiveValues={finalSolution ? [objective_keys.map(k => { const v = finalSolution.optimal_objectives[k]; return Array.isArray(v) ? v[0] : v as number; })] : []} + previousObjectiveValues={adoptedSolution && originalFinalSolution ? [objective_keys.map(k => { const v = originalFinalSolution.optimal_objectives[k]; return Array.isArray(v) ? v[0] : v as number; })] : []} externalSelectedIndexes={[0]} /> {/if} @@ -742,6 +907,67 @@ showVariables={true} title="Representative solution" /> + {:else if finalView === 'map' && rpmResult && rpmResult.solver_results.length > 0 && problem_info && originalFinalSolution} +
+
+ + +
+
+ {#if comparisonTab === 'chart'} +
+ { const v = rpmResult.solver_results[0].optimal_objectives[k]; return Array.isArray(v) ? v[0] : v as number; })]} + previousObjectiveValues={[objective_keys.map(k => { const v = originalFinalSolution.optimal_objectives[k]; return Array.isArray(v) ? v[0] : v as number; })]} + externalSelectedIndexes={[0]} + referenceDataLabels={{ previousSolutionLabels: ['Original E-NAUTILUS'] }} + lineLabels={{ '0': 'Constrained' }} + /> +
+ {:else} +
+ + + + + + + + + + + {#each problem_info.objectives as obj, i} + {@const origVal = Array.isArray(originalFinalSolution.optimal_objectives[obj.symbol]) ? originalFinalSolution.optimal_objectives[obj.symbol][0] : originalFinalSolution.optimal_objectives[obj.symbol]} + {@const newVal = Array.isArray(rpmResult.solver_results[0].optimal_objectives[obj.symbol]) ? rpmResult.solver_results[0].optimal_objectives[obj.symbol][0] : rpmResult.solver_results[0].optimal_objectives[obj.symbol]} + {@const delta = (newVal as number) - (origVal as number)} + {@const improved = obj.maximize ? delta > 0 : delta < 0} + + + + + + + {/each} + +
ObjectiveOriginalConstrainedΔ
{obj.name}{formatNumber(origVal as number, 2)}{formatNumber(newVal as number, 2)} + {delta > 0 ? '+' : ''}{formatNumber(delta, 2)} + {improved ? '↑' : delta === 0 ? '' : '↓'} +
+
+ {/if} +
+
{/if} {/snippet} diff --git a/webui/src/routes/interactive_methods/E-NAUTILUS/handler.ts b/webui/src/routes/interactive_methods/E-NAUTILUS/handler.ts index d4d2a74f9..e8483c2cd 100644 --- a/webui/src/routes/interactive_methods/E-NAUTILUS/handler.ts +++ b/webui/src/routes/interactive_methods/E-NAUTILUS/handler.ts @@ -1,6 +1,6 @@ -import type { ENautilusRepresentativeSolutionsResponse, ENautilusSessionTreeResponse, ENautilusStateResponse, ENautilusStepRequest, ENautilusStepResponse, ProblemGetRequest, ProblemInfo } from "$lib/gen/endpoints/DESDEOFastAPI"; -import type { getRepresentativeMethodEnautilusGetRepresentativeStateIdGetResponse, getSessionTreeMethodEnautilusSessionTreeSessionIdGetResponse, getStateMethodEnautilusGetStateStateIdGetResponse, stepMethodEnautilusStepPostResponse } from "$lib/gen/endpoints/DESDEOFastAPI"; -import { stepMethodEnautilusStepPost, getProblemProblemProblemIdGet, getStateMethodEnautilusGetStateStateIdGet, getRepresentativeMethodEnautilusGetRepresentativeStateIdGet, getSessionTreeMethodEnautilusSessionTreeSessionIdGet } from "$lib/gen/endpoints/DESDEOFastAPI"; +import type { ENautilusRepresentativeSolutionsResponse, ENautilusSessionTreeResponse, ENautilusSimulateResponse, ENautilusStateResponse, ENautilusStepRequest, ENautilusStepResponse, ProblemInfo, VariableFixing, RPMState } from "$lib/gen/models"; +import type { getRepresentativeMethodEnautilusGetRepresentativeStateIdGetResponse, getSessionTreeMethodEnautilusSessionTreeSessionIdGetResponse, getStateMethodEnautilusGetStateStateIdGetResponse, simulateMethodEnautilusSimulatePostResponse, stepMethodEnautilusStepPostResponse } from "$lib/gen/endpoints/DESDEOFastAPI"; +import { stepMethodEnautilusStepPost, getProblemProblemProblemIdGet, getStateMethodEnautilusGetStateStateIdGet, getRepresentativeMethodEnautilusGetRepresentativeStateIdGet, getSessionTreeMethodEnautilusSessionTreeSessionIdGet, simulateMethodEnautilusSimulatePost, createConstrainedVariantProblemProblemIdConstrainedVariantPost, solveSolutionsMethodRpmSolvePost, deleteProblemProblemProblemIdDelete } from "$lib/gen/endpoints/DESDEOFastAPI"; import type { getProblemProblemProblemIdGetResponse } from "$lib/gen/endpoints/DESDEOFastAPI"; import { fetch_sessions, create_session } from '../../methods/sessions/handler'; export { fetch_sessions, create_session }; @@ -61,8 +61,8 @@ export async function step_enautilus( return {request: request, response: response.data}; } -export async function fetch_problem_info(request: ProblemGetRequest): Promise { - const response: getProblemProblemProblemIdGetResponse = await getProblemProblemProblemIdGet(request.problem_id, undefined); +export async function fetch_problem_info(problem_id: number): Promise { + const response: getProblemProblemProblemIdGetResponse = await getProblemProblemProblemIdGet(problem_id); if (response.status != 200) { console.log("Could not fetch problem info.", response.status); @@ -115,3 +115,112 @@ export async function fetch_session_tree( return response.data; } + +export async function simulate_enautilus( + state_id: number, + preferred_objective: string, + number_of_intermediate_points?: number, +): Promise { + const response: simulateMethodEnautilusSimulatePostResponse = await simulateMethodEnautilusSimulatePost({ + state_id, + preferred_objective, + ...(number_of_intermediate_points != null ? { number_of_intermediate_points } : {}), + }); + + if (response.status !== 200) { + console.error("E-NAUTILUS simulation failed:", response.status); + return null; + } + + return response.data; +} + +export async function resolveWithSiteConstraints( + problem_id: number, + fixings: VariableFixing[], + reference_point: Record, + solver?: string, +): Promise<{ constrained_problem_id: number; rpm_result: RPMState } | null> { + // Step 1: Create constrained variant + const variantResp = await createConstrainedVariantProblemProblemIdConstrainedVariantPost( + problem_id, + { variable_fixings: fixings } + ); + + if (variantResp.status !== 200) { + console.error("Failed to create constrained variant:", variantResp.status); + return null; + } + + const constrained_problem_id = variantResp.data.problem_id; + + // Step 2: Solve with RPM using E-NAUTILUS final objectives as reference point + try { + const rpmResp = await solveSolutionsMethodRpmSolvePost({ + problem_id: constrained_problem_id, + preference: { + preference_type: "reference_point", + aspiration_levels: reference_point, + }, + solver: solver ?? undefined, + }); + + if (rpmResp.status !== 200) { + console.error("RPM solve failed:", rpmResp.status); + // Cleanup on failure + await cleanupConstrainedVariant(constrained_problem_id); + return null; + } + + return { + constrained_problem_id, + rpm_result: rpmResp.data, + }; + } catch (e) { + // Cleanup on error + await cleanupConstrainedVariant(constrained_problem_id); + throw e; + } +} + +/** + * Unroll tensor variables in a SolverResults object. + * RPM returns tensor variables as e.g. {"sv": [[v1], [v2], ...]} for shape [N, 1]. + * The map and other components expect unrolled names: {"sv_1": v1, "sv_2": v2, ...}. + * Scalar values and already-unrolled variables are passed through unchanged. + */ +export function unrollTensorVariables( + variables: Record +): Record { + const result: Record = {}; + for (const [key, value] of Object.entries(variables)) { + if (Array.isArray(value) && value.length > 0 && Array.isArray(value[0])) { + // Nested list (tensor variable) — flatten with 1-based indexing + let flatIdx = 1; + const flatten = (arr: unknown[]): void => { + for (const el of arr) { + if (Array.isArray(el)) { + flatten(el); + } else { + result[`${key}_${flatIdx}`] = el; + flatIdx++; + } + } + }; + flatten(value); + } else { + result[key] = value; + } + } + return result; +} + +export async function cleanupConstrainedVariant(constrained_problem_id: number): Promise { + try { + await deleteProblemProblemProblemIdDelete(constrained_problem_id); + } catch (e) { + console.warn("Failed to clean up constrained variant:", e); + } +} + +export type { VariableFixing }; diff --git a/webui/src/routes/manage-users/+layout.svelte b/webui/src/routes/manage-users/+layout.svelte new file mode 100644 index 000000000..6fd05e623 --- /dev/null +++ b/webui/src/routes/manage-users/+layout.svelte @@ -0,0 +1,10 @@ + + +
+ + +
+ diff --git a/webui/src/routes/manage-users/+page.server.ts b/webui/src/routes/manage-users/+page.server.ts new file mode 100644 index 000000000..a974e54d3 --- /dev/null +++ b/webui/src/routes/manage-users/+page.server.ts @@ -0,0 +1,26 @@ +import { redirect } from '@sveltejs/kit'; +import type { PageServerLoad } from './$types'; +import { getCurrentUserInfoUserInfoGet } from '$lib/gen/endpoints/DESDEOFastAPI'; + +export const load: PageServerLoad = async ({ cookies }) => { + const refreshToken = cookies.get('refresh_token'); + if (!refreshToken) { + return redirect(307, '/home'); + } + + const accessToken = cookies.get('access_token'); + const response = await getCurrentUserInfoUserInfoGet({ + headers: { Authorization: `Bearer ${accessToken}` }, + }); + + if (response.status !== 200) { + return redirect(307, '/home'); + } + + const role = response.data.role; + if (role !== 'analyst' && role !== 'admin') { + return redirect(307, '/dashboard'); + } + + return {}; +}; diff --git a/webui/src/routes/manage-users/+page.svelte b/webui/src/routes/manage-users/+page.svelte new file mode 100644 index 000000000..d5b5ece5b --- /dev/null +++ b/webui/src/routes/manage-users/+page.svelte @@ -0,0 +1,159 @@ + + + + Manage Users | DESDEO + + +
+

Manage Users

+

Create new user accounts.

+ +
+ + + + Add Decision Maker + Create a new decision maker account. + + +
+
+ + +
+
+ + +
+ + {#if dmResult} +

+ {dmResult.message} +

+ {/if} +
+
+
+ + + {#if isAnalystOrAdmin} + + + Add Analyst + Create a new analyst account. + + +
+
+ + +
+
+ + +
+ + {#if analystResult} +

+ {analystResult.message} +

+ {/if} +
+
+
+ {/if} +
+
diff --git a/webui/src/routes/manage-users/handler.ts b/webui/src/routes/manage-users/handler.ts new file mode 100644 index 000000000..b706ad9bb --- /dev/null +++ b/webui/src/routes/manage-users/handler.ts @@ -0,0 +1,53 @@ +import { + addNewDmAddNewDmPost, + addNewAnalystAddNewAnalystPost, +} from '$lib/gen/endpoints/DESDEOFastAPI'; +import type { + BodyAddNewDmAddNewDmPost, + BodyAddNewAnalystAddNewAnalystPost, +} from '$lib/gen/endpoints/DESDEOFastAPI'; + +export type UserCreateResult = { success: boolean; message: string }; + +// Note: /add_new_dm and /add_new_analyst use FastAPI's OAuth2PasswordRequestForm +// (application/x-www-form-urlencoded). Authentication for the analyst endpoint is +// handled transparently by the SvelteKit proxy and hooks.server.ts, which attach +// the access_token cookie to outgoing requests — no manual token passing needed here. + +export async function addDm(username: string, password: string): Promise { + const body: BodyAddNewDmAddNewDmPost = { username, password, scope: '' }; + const response = await addNewDmAddNewDmPost(body); + const status = response.status as number; + + if (status === 401) { + return { success: false, message: 'Unauthorized: analyst or admin role required.' }; + } + if (status === 409) { + return { success: false, message: 'A user with that username already exists.' }; + } + if (status !== 201) { + console.error('addDm failed.', status); + return { success: false, message: 'Failed to create user. Please try again.' }; + } + + return { success: true, message: 'Decision maker created successfully.' }; +} + +export async function addAnalyst(username: string, password: string): Promise { + const body: BodyAddNewAnalystAddNewAnalystPost = { username, password, scope: '' }; + const response = await addNewAnalystAddNewAnalystPost(body); + const status = response.status as number; + + if (status === 401) { + return { success: false, message: 'Unauthorized: analyst or admin role required.' }; + } + if (status === 409) { + return { success: false, message: 'A user with that username already exists.' }; + } + if (status !== 201) { + console.error('addAnalyst failed.', status); + return { success: false, message: 'Failed to create analyst. Please try again.' }; + } + + return { success: true, message: 'Analyst created successfully.' }; +} diff --git a/webui/src/routes/manage-users/userSchema.ts b/webui/src/routes/manage-users/userSchema.ts new file mode 100644 index 000000000..7ca19df06 --- /dev/null +++ b/webui/src/routes/manage-users/userSchema.ts @@ -0,0 +1,13 @@ +import { z } from "zod"; + +// The /add_new_dm and /add_new_analyst endpoints use FastAPI's OAuth2PasswordRequestForm, +// which sends data as application/x-www-form-urlencoded — not a JSON body. Orval does not +// generate Zod schemas for form-encoded request bodies, so this schema is defined manually. +// The generated TypeScript interfaces (BodyAddNewDmAddNewDmPost, BodyAddNewAnalystAddNewAnalystPost) +// are still used when constructing the actual API call body. +export const newUserSchema = z.object({ + username: z.string().min(1, "Username is required"), + password: z.string().min(1, "Password is required"), +}); + +export type FormMessage = { success: boolean; text: string }; diff --git a/webui/src/routes/methods/initialize/+page.svelte b/webui/src/routes/methods/initialize/+page.svelte index a0595ae18..775a9bf97 100644 --- a/webui/src/routes/methods/initialize/+page.svelte +++ b/webui/src/routes/methods/initialize/+page.svelte @@ -80,8 +80,8 @@ { name: 'E-NAUTILUS', path: '/interactive_methods/E-NAUTILUS', - description: 'Evolutionary NAUTILUS method for MOO.', - preferencesType: ['reference point'] + description: 'E-NAUTILUS method for MOO.', + preferencesType: ['preferred solutions'] }, { name: 'Evolutionary method', diff --git a/webui/src/routes/methods/sessions/+page.svelte b/webui/src/routes/methods/sessions/+page.svelte index cdcd18582..caf9d623b 100644 --- a/webui/src/routes/methods/sessions/+page.svelte +++ b/webui/src/routes/methods/sessions/+page.svelte @@ -1,16 +1,20 @@ @@ -125,11 +169,29 @@ Create a new session - - Optional info/label. - + Optional info/label. + {#if isAnalystOrAdmin && dms.length > 0} +
+ + (selectedTargetDmId = v)} + > + + {selectedTargetDmId ? getOwnerLabel(Number(selectedTargetDmId)) : ($auth.user?.username ?? 'Myself')} + + + {$auth.user?.username ?? 'Myself'} + {#each dms as dm} + {dm.username} + {/each} + + +
+ {/if}
Existing sessions - These are your current interactive sessions. Deleting a session deletes the session and its related states. + {isAnalystOrAdmin + ? 'All interactive sessions. Deleting a session removes it and its related states.' + : 'Your interactive sessions. Deleting a session removes it and its related states.'} - {#if sessions.length === 0} + {#if isAnalystOrAdmin && usersWithSessions.length > 1} +
+ + (selectedFilter = v || 'me')} + > + + {selectedFilter === 'me' + ? ($auth.user?.username ?? 'Myself') + : selectedFilter === 'all' + ? 'All users' + : getOwnerLabel(Number(selectedFilter))} + + + {$auth.user?.username ?? 'Myself'} + All users + {#each usersWithSessions.filter((u) => u.id !== $auth.user?.id) as u} + {u.label} + {/each} + + +
+ {/if} + {#if filteredSessions.length === 0}
No sessions found. Create one above.
{:else} @@ -157,14 +246,20 @@ ID Info + {#if isAnalystOrAdmin} + Owner + {/if} Actions - {#each sessions as s (s.id)} + {#each filteredSessions as s (s.id)} {s.id} - {s.info ?? '—'} + {s.info ?? '—'} + {#if isAnalystOrAdmin} + {getOwnerLabel(s.user_id)} + {/if}
diff --git a/webui/src/routes/methods/sessions/handler.ts b/webui/src/routes/methods/sessions/handler.ts index 28f3a657b..7b24dea6b 100644 --- a/webui/src/routes/methods/sessions/handler.ts +++ b/webui/src/routes/methods/sessions/handler.ts @@ -22,9 +22,15 @@ export async function fetch_sessions(): Promise return response.data; } -export async function create_session(info: string | null): Promise { +export async function create_session( + info: string | null, + targetUserId?: number | null +): Promise { const payload: CreateSessionRequest = { info: info ?? null }; - const response: createNewSessionSessionNewPostResponse = await createNewSessionSessionNewPost(payload); + const response: createNewSessionSessionNewPostResponse = await createNewSessionSessionNewPost( + payload, + targetUserId != null ? { target_user_id: targetUserId } : undefined + ); if (response.status !== 200) { console.error('create_session failed.', response.status); diff --git a/webui/src/routes/problems/+page.svelte b/webui/src/routes/problems/+page.svelte index 9afbb683e..b2bb2f60a 100644 --- a/webui/src/routes/problems/+page.svelte +++ b/webui/src/routes/problems/+page.svelte @@ -43,10 +43,11 @@ import * as Tabs from '$lib/components/ui/tabs'; import * as Table from '$lib/components/ui/table/index.js'; import { Button } from '$lib/components/ui/button'; - import type { ProblemInfo } from '$lib/gen/endpoints/DESDEOFastAPI'; + import type { ProblemInfo, UserPublic } from '$lib/gen/endpoints/DESDEOFastAPI'; + import { auth } from '../../stores/auth'; import { methodSelection } from '../../stores/methodSelection'; import { invalidateAll } from '$app/navigation'; - import { deleteProblem, downloadProblemJson, getAssignedSolver, getAvailableSolvers, assignSolver, addRepresentativeSolutionSet } from './handler'; + import { deleteProblem, downloadProblemJson, getAssignedSolver, getAvailableSolvers, assignSolver, addRepresentativeSolutionSet, uploadSiteSelectionMetadata } from './handler'; import * as Select from '$lib/components/ui/select/index.js'; import { Input } from '$lib/components/ui/input/index.js'; import { Label } from '$lib/components/ui/label/index.js'; @@ -58,6 +59,41 @@ let { data }: PageProps = $props(); let problemList = $derived(data.problemList); + let dmUsers = $derived(data.dmUsers as UserPublic[]); + + const isAnalystOrAdmin = $derived( + $auth.user?.role === 'analyst' || $auth.user?.role === 'admin' + ); + + const ownerMap = $derived( + Object.fromEntries(dmUsers.map((u: UserPublic) => [u.id, u.username])) + ); + + function getOwnerLabel(userId: number): string { + if (userId === $auth.user?.id) return $auth.user?.username ?? String(userId); + return ownerMap[userId] ?? `User #${userId}`; + } + + // Unique users that have at least one problem in the list (for the filter dropdown) + const usersWithProblems = $derived( + isAnalystOrAdmin + ? [...new Map(problemList.map((p: ProblemInfo) => [p.user_id, p.user_id])).keys()].map((id) => ({ + id, + label: getOwnerLabel(id) + })) + : [] + ); + + // 'me' = current user, 'all' = everyone, '' = specific user id + let selectedFilter = $state('me'); + + const filteredProblemList = $derived( + selectedFilter === 'all' + ? problemList + : selectedFilter === 'me' + ? problemList.filter((p: ProblemInfo) => p.user_id === $auth.user?.id) + : problemList.filter((p: ProblemInfo) => p.user_id === Number(selectedFilter)) + ); let selectedProblem = $state(undefined); let expandedObjectives = $state(new Set()); let expandedConstraints = $state(new Set()); @@ -78,6 +114,76 @@ let importSubmitting = $state(false); let fileInputEl = $state(); + // Site selection map metadata upload state + let mapMetaDialogOpen = $state(false); + let mapMetaError = $state(''); + let mapMetaSubmitting = $state(false); + let mapMetaParsed = $state<{ + sites: { name: string; node: string; lat: number; lon: number }[]; + nodes: { name: string; lat: number; lon: number; size: number }[]; + travel_time_matrix: number[][]; + site_variable_symbols: string[]; + coverage_variable_symbols: string[] | null; + coverage_threshold: number; + } | null>(null); + let mapMetaFileInputEl = $state(); + + function handleMapMetaFileSelected(event: Event) { + const input = event.target as HTMLInputElement; + const file = input.files?.[0]; + if (!file) return; + input.value = ''; + mapMetaError = ''; + + file.text().then((text) => { + try { + const parsed = JSON.parse(text); + if (!parsed.sites || !parsed.nodes || !parsed.travel_time_matrix || !parsed.site_variable_symbols) { + mapMetaError = 'JSON must contain: sites, nodes, travel_time_matrix, site_variable_symbols'; + mapMetaDialogOpen = true; + return; + } + mapMetaParsed = { + sites: parsed.sites, + nodes: parsed.nodes, + travel_time_matrix: parsed.travel_time_matrix, + site_variable_symbols: parsed.site_variable_symbols, + coverage_variable_symbols: parsed.coverage_variable_symbols ?? null, + coverage_threshold: parsed.coverage_threshold ?? 15.0, + }; + mapMetaDialogOpen = true; + } catch (e) { + mapMetaError = `Failed to parse JSON: ${e instanceof Error ? e.message : String(e)}`; + mapMetaDialogOpen = true; + } + }); + } + + async function handleMapMetaSubmit() { + if (!selectedProblem || !mapMetaParsed) return; + mapMetaSubmitting = true; + mapMetaError = ''; + try { + const success = await uploadSiteSelectionMetadata({ + problem_id: selectedProblem.id, + ...mapMetaParsed, + }); + if (success) { + mapMetaDialogOpen = false; + mapMetaParsed = null; + const problemId = selectedProblem.id; + await invalidateAll(); + selectedProblem = problemList.find((p) => p.id === problemId); + } else { + mapMetaError = 'Failed to upload metadata. Check the server logs.'; + } + } catch (e) { + mapMetaError = `Error: ${e instanceof Error ? e.message : String(e)}`; + } finally { + mapMetaSubmitting = false; + } + } + function computeIdealNadir(solutionData: { [key: string]: number[] }) { const ideal: { [key: string]: number } = {}; const nadir: { [key: string]: number } = {}; @@ -179,13 +285,12 @@ importSubmitting = true; importError = ''; try { - const success = await addRepresentativeSolutionSet({ + const success = await addRepresentativeSolutionSet(selectedProblem.id, { name: importName, description: importDescription || undefined, solution_data: importSolutionData, ideal: importIdeal, nadir: importNadir, - problem_id: selectedProblem.id }); if (success) { importDialogOpen = false; @@ -263,12 +368,39 @@ of preferences you want to utilize.

{#if problemList.length === 0} -

You have not defined any problems yet.

+

+ {isAnalystOrAdmin ? 'No problems have been defined yet.' : 'You have not defined any problems yet.'} +

{:else} + {#if isAnalystOrAdmin && usersWithProblems.length > 1} +
+ + { + selectedFilter = v || 'me'; + selectedProblem = undefined; + }} + > + + {selectedFilter === 'me' ? ($auth.user?.username ?? 'Myself') : selectedFilter === 'all' ? 'All users' : getOwnerLabel(Number(selectedFilter))} + + + {$auth.user?.username ?? 'Myself'} + All users + {#each usersWithProblems.filter((u) => u.id !== $auth.user?.id) as u} + {u.label} + {/each} + + +
+ {/if} +
{ selectedProblem = e; console.log('Selected problem:', selectedProblem.id); @@ -337,6 +469,13 @@ {/if}
+ {#if isAnalystOrAdmin} +
+
+
Owner
+
{getOwnerLabel(selectedProblem.user_id)}
+
+ {/if}
@@ -769,6 +908,54 @@ {/each}
{/if} + + + {#if selectedProblem.problem_metadata?.site_selection_metadata?.length} +
+

Site Selection Map Metadata

+ {#each selectedProblem.problem_metadata.site_selection_metadata as meta} +
+
+
Sites
+
{meta.site_variable_symbols.length} sites configured
+
+
+
+
Nodes
+
{JSON.parse(meta.nodes_json).length} map nodes
+
+
+
+
Coverage
+
+ {meta.coverage_variable_symbols + ? `${meta.coverage_variable_symbols.length} coverage variables` + : 'No coverage variables'} +
+
+
+
+
Threshold
+
{meta.coverage_threshold}
+
+
+ {/each} +
+ {/if} + + {:else} Select a problem to see details. {/if} @@ -847,4 +1034,66 @@
{/if} + +{#if mapMetaDialogOpen} + + +
+
(mapMetaDialogOpen = false)}>
+
+
+

Upload Site Selection Map Metadata

+

Review the parsed metadata before uploading.

+
+
+ {#if mapMetaError} +

{mapMetaError}

+ {/if} + {#if mapMetaParsed} +
+
+ Sites + {mapMetaParsed.sites.length} +
+
+ Map nodes + {mapMetaParsed.nodes.length} +
+
+ Site variables + {mapMetaParsed.site_variable_symbols.length} +
+
+ Coverage variables + {mapMetaParsed.coverage_variable_symbols?.length ?? 'None'} +
+
+ Coverage threshold + {mapMetaParsed.coverage_threshold} +
+
+ Travel time matrix + {mapMetaParsed.travel_time_matrix.length} x {mapMetaParsed.travel_time_matrix[0]?.length ?? 0} +
+
+
+ Site variable symbols +
{mapMetaParsed.site_variable_symbols.join('\n')}
+
+
+ Node names +
{mapMetaParsed.nodes.map(n => n.name).join('\n')}
+
+ {/if} +
+
+ + +
+
+
+{/if} + diff --git a/webui/src/routes/problems/+page.ts b/webui/src/routes/problems/+page.ts index c788907c3..e22e3dd21 100644 --- a/webui/src/routes/problems/+page.ts +++ b/webui/src/routes/problems/+page.ts @@ -1,14 +1,21 @@ import type { PageLoad } from './$types'; -import { getProblemsInfoProblemAllInfoGet } from '$lib/gen/endpoints/DESDEOFastAPI'; +import { getDmUsersUsersDmsGet, getProblemsInfoProblemAllInfoGet } from '$lib/gen/endpoints/DESDEOFastAPI'; +import type { UserPublic } from '$lib/gen/endpoints/DESDEOFastAPI'; export const load: PageLoad = async () => { - const res = await getProblemsInfoProblemAllInfoGet(); + const [problemsRes, dmsRes] = await Promise.all([ + getProblemsInfoProblemAllInfoGet(), + getDmUsersUsersDmsGet().catch(() => null) + ]); - if (res.status !== 200) { + if (problemsRes.status !== 200) { throw new Error('Failed to fetch problems'); } + const dmUsers: UserPublic[] = dmsRes?.status === 200 ? (dmsRes.data as UserPublic[]) : []; + return { - problemList: res.data + problemList: problemsRes.data, + dmUsers }; }; diff --git a/webui/src/routes/problems/define/+page.svelte b/webui/src/routes/problems/define/+page.svelte index 4b33d2b0f..c88ba3587 100644 --- a/webui/src/routes/problems/define/+page.svelte +++ b/webui/src/routes/problems/define/+page.svelte @@ -17,6 +17,9 @@ VariableTypeEnum } from '$lib/gen/endpoints/DESDEOFastAPI'; import { createProblem, fetchProblem, type ProblemPayload, uploadProblemJson } from './handler'; + import { getDmUsersUsersDmsGet } from '$lib/gen/endpoints/DESDEOFastAPI'; + import type { UserPublic } from '$lib/gen/endpoints/DESDEOFastAPI'; + import { auth } from '../../../stores/auth'; type VariableForm = { name: string; @@ -135,6 +138,13 @@ let jsonFile = $state(null); + let dms = $state([]); + let selectedDmId = $state(''); + const isAnalystOrAdmin = $derived( + $auth.user?.role === 'analyst' || $auth.user?.role === 'admin' + ); + const targetUserId = $derived(selectedDmId ? Number(selectedDmId) : null); + const parseNumber = (value: string): number | null => { if (value.trim() === '') return null; const parsed = Number(value); @@ -354,7 +364,7 @@ } isSubmitting = true; - const response = await createProblem(buildPayload()); + const response = await createProblem(buildPayload(), targetUserId); isSubmitting = false; if (!response.ok) { @@ -378,7 +388,7 @@ } isSubmitting = true; - const response = await uploadProblemJson({ json_file: jsonFile }); + const response = await uploadProblemJson({ json_file: jsonFile }, targetUserId); isSubmitting = false; if (!response.ok) { @@ -513,6 +523,13 @@ }; onMount(async () => { + if ($auth.user?.role === 'analyst' || $auth.user?.role === 'admin') { + const dmResponse = await getDmUsersUsersDmsGet(); + if (dmResponse.status === 200) { + dms = dmResponse.data; + } + } + const editId = page.url.searchParams.get('edit'); if (!editId) return; @@ -538,6 +555,25 @@

Problem Definition

+ {#if isAnalystOrAdmin && dms.length > 0} +
+ + + + {selectedDmId + ? (dms.find((dm) => String(dm.id) === selectedDmId)?.username ?? 'Unknown') + : 'Myself (default)'} + + + Myself (default) + {#each dms as dm} + {dm.username} + {/each} + + +
+ {/if} + (mode = value)}> Define via Form diff --git a/webui/src/routes/problems/define/handler.ts b/webui/src/routes/problems/define/handler.ts index 6ba39d8e2..ec61e6e62 100644 --- a/webui/src/routes/problems/define/handler.ts +++ b/webui/src/routes/problems/define/handler.ts @@ -5,7 +5,7 @@ import type { ObjectiveDB, ProblemInfo, VariableDB -} from '$lib/gen/endpoints/DESDEOFastAPI'; +} from '$lib/gen/models'; import { addProblemJsonProblemAddJsonPost, addProblemProblemAddPost, getProblemProblemProblemIdGet } from '$lib/gen/endpoints/DESDEOFastAPI'; export type ObjectivePayload = Omit & { @@ -33,9 +33,13 @@ export type ProblemResponse = | { ok: true; data: ProblemInfo } | { ok: false; error: string; status?: number }; -export async function createProblem(payload: ProblemPayload): Promise { +export async function createProblem(payload: ProblemPayload, targetUserId?: number | null): Promise { try { - const response = await addProblemProblemAddPost(payload as any); + const params = targetUserId != null ? { target_user_id: targetUserId } : undefined; + const response = await addProblemProblemAddPost({ + body: JSON.stringify(payload), + headers: { 'Content-Type': 'application/json' } + }, params); if (response.status !== 200) { return { ok: false, error: 'Failed to create problem.', status: response.status }; @@ -49,10 +53,12 @@ export async function createProblem(payload: ProblemPayload): Promise { try { - const response = await addProblemJsonProblemAddJsonPost(body); + const params = targetUserId != null ? { target_user_id: targetUserId } : undefined; + const response = await addProblemJsonProblemAddJsonPost(body, params); if (response.status !== 200) { return { ok: false, error: 'Failed to upload problem JSON.', status: response.status }; @@ -67,7 +73,7 @@ export async function uploadProblemJson( export async function fetchProblem(problemId: number): Promise { try { - const response = await getProblemProblemProblemIdGet(problemId, undefined); + const response = await getProblemProblemProblemIdGet(problemId); if (response.status !== 200) { return { ok: false, error: 'Failed to fetch problem.', status: response.status }; diff --git a/webui/src/routes/problems/handler.ts b/webui/src/routes/problems/handler.ts index 38209a35a..dedea5d92 100644 --- a/webui/src/routes/problems/handler.ts +++ b/webui/src/routes/problems/handler.ts @@ -6,7 +6,8 @@ import { getMetadataProblemGetMetadataPost } from '$lib/gen/endpoints/DESDEOFast import { getAvailableSolversProblemAssignSolverGet } from '$lib/gen/endpoints/DESDEOFastAPI'; import { selectSolverProblemAssignSolverPost } from '$lib/gen/endpoints/DESDEOFastAPI'; import { addRepresentativeSolutionSetProblemProblemIdAddRepresentativeSolutionSetPost } from '$lib/gen/endpoints/DESDEOFastAPI'; -import type { RepresentativeSolutionSetBase } from '$lib/gen/endpoints/DESDEOFastAPI'; +import { loadMetadataSiteSelectionLoadMetadataPost } from '$lib/gen/endpoints/DESDEOFastAPI'; +import type { RepresentativeSolutionSetBase, SiteSelectionMetaDataRequest } from '$lib/gen/models'; export async function deleteProblem(problemId: number): Promise { const response: deleteProblemProblemProblemIdDeleteResponse = @@ -76,12 +77,25 @@ export async function assignSolver(problemId: number, solver: string): Promise { + const response = await loadMetadataSiteSelectionLoadMetadataPost(payload); + + if (response.status !== 200) { + console.error('Failed to upload site selection metadata:', response.status); + return false; + } + + return true; +} + export async function addRepresentativeSolutionSet( - payload: RepresentativeSolutionSetBase & { problem_id: number } + problemId: number, + payload: RepresentativeSolutionSetBase ): Promise { - const { problem_id, ...body } = payload; const response = - await addRepresentativeSolutionSetProblemProblemIdAddRepresentativeSolutionSetPost(problem_id, body); + await addRepresentativeSolutionSetProblemProblemIdAddRepresentativeSolutionSetPost(problemId, payload); if (response.status !== 200) { console.error('Failed to add representative solution set:', response.status); diff --git a/webui/vite.config.ts b/webui/vite.config.ts index c800a69cd..3356e81f0 100644 --- a/webui/vite.config.ts +++ b/webui/vite.config.ts @@ -13,15 +13,5 @@ export default defineConfig({ }, optimizeDeps: { exclude: ['mathlive'] - }, - server: { - proxy: { - '/api': { - target: 'http://127.0.0.1:8000', - changeOrigin: true, - secure: false, - rewrite: (path) => path.replace(/^\/api/, '') - } - } } -}); \ No newline at end of file +});