diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..4ba1992 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "protoc": { + "options": [ + "--proto_path=${workspaceRoot}/packages/grpc/proto" + ] + } +} \ No newline at end of file diff --git a/README.md b/README.md index 7732230..eda3d52 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ The objective of this project is to provide nothing more than a basic interface - `DATABASE_URL` Postgres connection URL. Example: `postgresql://user:pass@host/db` - `JOBBER_NAME` The name of your jobber instance, should be unique per host. - `MANAGER_PORT` Port that runner-manager server operates on. Default: 5211 -- `MANAGER_HOST` Host that runner-manager server operates on. Default: hostname() +- `MANAGER_GRPC_HOST` Host that runner-manager server operates on. Default: hostname() - `STARTUP_USERNAME` The administrator account username. Created at every startup. Has full permissions. If you change this after a previous start, it will create a NEW account, not update the previous account. - `STARTUP_PASSWORD` The administrator account password.
diff --git a/packages/server/bruno/DELETE Job -> (name) -> Environment -> (name).bru b/bruno/DELETE Job -> (name) -> Environment -> (name).bru similarity index 100% rename from packages/server/bruno/DELETE Job -> (name) -> Environment -> (name).bru rename to bruno/DELETE Job -> (name) -> Environment -> (name).bru diff --git a/packages/server/bruno/DELETE Job -> (name).bru b/bruno/DELETE Job -> (name).bru similarity index 100% rename from packages/server/bruno/DELETE Job -> (name).bru rename to bruno/DELETE Job -> (name).bru diff --git a/packages/server/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru b/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru rename to bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Actions.bru b/bruno/GET Job -> (name) -> Actions.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Actions.bru rename to bruno/GET Job -> (name) -> Actions.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Actions:latest.bru b/bruno/GET Job -> (name) -> Actions:latest.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Actions:latest.bru rename to bruno/GET Job -> (name) -> Actions:latest.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Debug.bru b/bruno/GET Job -> (name) -> Debug.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Debug.bru rename to bruno/GET Job -> (name) -> Debug.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Environment.bru b/bruno/GET Job -> (name) -> Environment.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Environment.bru rename to bruno/GET Job -> (name) -> Environment.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Logs.bru b/bruno/GET Job -> (name) -> Logs.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Logs.bru rename to bruno/GET Job -> (name) -> Logs.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Runners.bru b/bruno/GET Job -> (name) -> Runners.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Runners.bru rename to bruno/GET Job -> (name) -> Runners.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Triggers.bru b/bruno/GET Job -> (name) -> Triggers.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Triggers.bru rename to bruno/GET Job -> (name) -> Triggers.bru diff --git a/packages/server/bruno/GET Job -> (name) -> Triggers:latest.bru b/bruno/GET Job -> (name) -> Triggers:latest.bru similarity index 100% rename from packages/server/bruno/GET Job -> (name) -> Triggers:latest.bru rename to bruno/GET Job -> (name) -> Triggers:latest.bru diff --git a/packages/server/bruno/GET Job > (name).bru b/bruno/GET Job > (name).bru similarity index 100% rename from packages/server/bruno/GET Job > (name).bru rename to bruno/GET Job > (name).bru diff --git a/packages/server/bruno/GET Jobs.bru b/bruno/GET Jobs.bru similarity index 100% rename from packages/server/bruno/GET Jobs.bru rename to bruno/GET Jobs.bru diff --git a/packages/server/bruno/POST Job -> (name) -> Environment -> (name).bru b/bruno/POST Job -> (name) -> Environment -> (name).bru similarity index 100% rename from packages/server/bruno/POST Job -> (name) -> Environment -> (name).bru rename to bruno/POST Job -> (name) -> Environment -> (name).bru diff --git a/packages/server/bruno/POST Jobs -> Publish.bru b/bruno/POST Jobs -> Publish.bru similarity index 100% rename from packages/server/bruno/POST Jobs -> Publish.bru rename to bruno/POST Jobs -> Publish.bru diff --git a/packages/server/bruno/PUT Job -> (name).bru b/bruno/PUT Job -> (name).bru similarity index 100% rename from packages/server/bruno/PUT Job -> (name).bru rename to bruno/PUT Job -> (name).bru diff --git a/packages/server/bruno/bruno.json b/bruno/bruno.json similarity index 100% rename from packages/server/bruno/bruno.json rename to bruno/bruno.json diff --git a/packages/server/bruno/environments/local.bru b/bruno/environments/local.bru similarity index 100% rename from packages/server/bruno/environments/local.bru rename to bruno/environments/local.bru diff --git a/docker-compose.yaml b/docker-compose.yaml index b3b04b3..5abf25a 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -20,6 +20,7 @@ services: - "jobber-db" volumes: - /var/run/docker.sock:/var/run/docker.sock + - /tmp/jobber-env:/tmp/jobber-env - jobber-data:/app/config environment: DATABASE_URL: postgresql://pg-user:pg-pass@jobber-db/database diff --git a/docker/gateway.Dockerfile b/docker/gateway.Dockerfile new file mode 100644 index 0000000..0e325de --- /dev/null +++ b/docker/gateway.Dockerfile @@ -0,0 +1,22 @@ +FROM node:24-slim AS base +ENV PNPM_HOME="/pnpm" +ENV PATH="$PNPM_HOME:$PATH" +WORKDIR /app +RUN corepack enable && corepack prepare pnpm@10.15.1 --activate + + +FROM base AS build +COPY . /repo +WORKDIR /repo +RUN apt update \ + && apt install protobuf-compiler --no-install-recommends -y \ + && pnpm install --frozen-lockfile \ + && pnpm run -r build \ + && pnpm --prod --filter=@jobber/gateway --node-linker hoisted deploy /app + + + +FROM base +WORKDIR /app +COPY --from=build /app /app +ENTRYPOINT ["node", "./dist/index.js"] \ No newline at end of file diff --git a/docker/node-20.Dockerfile b/docker/node-20.Dockerfile index 7b57a84..3b5db6a 100644 --- a/docker/node-20.Dockerfile +++ b/docker/node-20.Dockerfile @@ -13,7 +13,9 @@ RUN apt update \ FROM base AS build COPY . /repo WORKDIR /repo -RUN pnpm install --frozen-lockfile \ +RUN apt update \ + && apt install protobuf-compiler --no-install-recommends -y \ + && pnpm install --frozen-lockfile \ && pnpm run -r build \ && pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app @@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \ FROM base WORKDIR /app -COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js +COPY --from=build /app/dist/esm/ /app/ diff --git a/docker/node-22.Dockerfile b/docker/node-22.Dockerfile index 8344a12..c411fe5 100644 --- a/docker/node-22.Dockerfile +++ b/docker/node-22.Dockerfile @@ -13,7 +13,9 @@ RUN apt update \ FROM base AS build COPY . /repo WORKDIR /repo -RUN pnpm install --frozen-lockfile \ +RUN apt update \ + && apt install protobuf-compiler --no-install-recommends -y \ + && pnpm install --frozen-lockfile \ && pnpm run -r build \ && pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app @@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \ FROM base WORKDIR /app -COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js +COPY --from=build /app/dist/esm/ /app/ diff --git a/docker/node-24.Dockerfile b/docker/node-24.Dockerfile index 16ac7f0..c65ea09 100644 --- a/docker/node-24.Dockerfile +++ b/docker/node-24.Dockerfile @@ -13,7 +13,9 @@ RUN apt update \ FROM base AS build COPY . /repo WORKDIR /repo -RUN pnpm install --frozen-lockfile \ +RUN apt update \ + && apt install protobuf-compiler --no-install-recommends -y \ + && pnpm install --frozen-lockfile \ && pnpm run -r build \ && pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app @@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \ FROM base WORKDIR /app -COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js +COPY --from=build /app/dist/esm/ /app/ diff --git a/docker/server.Dockerfile b/docker/server.Dockerfile index 8afd6da..8ef83df 100644 --- a/docker/server.Dockerfile +++ b/docker/server.Dockerfile @@ -27,7 +27,9 @@ FROM base AS build COPY . /repo WORKDIR /repo -RUN pnpm install --frozen-lockfile \ +RUN apt update \ + && apt install protobuf-compiler --no-install-recommends -y \ + && pnpm install --frozen-lockfile \ && pnpm run -r build \ && pnpm --prod --filter=@jobber/server --node-linker hoisted deploy /app \ && mkdir /app/public/ \ diff --git a/docs/environment-variables.md b/docs/environment-variables.md index b8372b1..43b3d5d 100644 --- a/docs/environment-variables.md +++ b/docs/environment-variables.md @@ -28,7 +28,7 @@ This document describes all environment variables used to configure Jobber. - `MANAGER_PORT` - Port for the runner-manager server **Default:** `5211` -- `MANAGER_HOST` - Host address for the runner-manager server +- `MANAGER_GRPC_HOST` - Host address for the runner-manager server **Default:** `hostname()` ## Authentication @@ -58,7 +58,7 @@ This document describes all environment variables used to configure Jobber. **Default:** eithan1231/runner-node-20:latest - `RUNNER_CONTAINER_DOCKER_NETWORK` - Docker network for runner containers - **Note:** Must have access to `MANAGER_HOST` + **Note:** Must have access to `MANAGER_GRPC_HOST` - `RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES` - Permitted Docker argument types for projects **Values:** `volumes`, `networks`, `labels`, `memoryLimit`, `directPassthroughArguments` diff --git a/docs/permissions.md b/docs/permissions.md index ad24df2..20a1e16 100644 --- a/docs/permissions.md +++ b/docs/permissions.md @@ -28,7 +28,7 @@ Example resource pattern: `job/*/actions` #### Job -> Store -- `job/:jobId/store/:storeId` READ/DELETE +- `job/:jobId/store` READ/WRITE/DELETE #### Job -> Triggers diff --git a/e2e/config/nanomq.conf b/e2e/config/nanomq.conf new file mode 100644 index 0000000..48291c0 --- /dev/null +++ b/e2e/config/nanomq.conf @@ -0,0 +1,3 @@ +listeners.tcp { + bind = "0.0.0.0:1883" +} diff --git a/e2e/docker-compose.yaml b/e2e/docker-compose.yaml new file mode 100644 index 0000000..6140039 --- /dev/null +++ b/e2e/docker-compose.yaml @@ -0,0 +1,106 @@ +networks: + internal: + driver: bridge + runner: + driver: bridge + mqtt-network: + driver: bridge + +name: "jobber-test" + +services: + postgres: + image: postgres:14 + restart: unless-stopped + networks: + - internal + environment: + - POSTGRES_USER=jobber-username + - POSTGRES_PASSWORD=jobber-password + - POSTGRES_DB=jobber-database + + mqtt: + image: emqx/nanomq:0.24.6-slim + restart: unless-stopped + networks: + - mqtt-network + volumes: + - ./config/nanomq.conf:/etc/nanomq.conf:ro + + server: + depends_on: + - postgres + - mqtt + labels: + - "jobber-discovery=server" + image: jobber-e2e-server + build: + context: ../ + dockerfile: docker/server.Dockerfile + restart: unless-stopped + ports: + - 5000:5000 + networks: + - internal + - runner + - mqtt-network + volumes: + - /tmp/jobber-env:/tmp/jobber-env + - /var/run/docker.sock:/var/run/docker.sock + environment: + SECRET_PASSPHRASE: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + + JOBBER_NAME: "JobberE2E" + + ALLOWED_HOSTS: "localhost,localhost:5000,127.0.0.1:5000,server:5000" + + DATABASE_URL: "postgresql://jobber-username:jobber-password@postgres:5432/jobber-database" + + ALLOW_PUBLIC_REGISTRATION: "true" + AUTH_PUBLIC_LOGIN_ENABLED: "true" + + OAUTH_ISSUER: "http://localhost:5000" + + DEBUG_HTTP: "true" + DEBUG_RUNNER: "true" + + API_PORT: "5000" + + MANAGER_GRPC_PORT: "5001" + MANAGER_GRPC_HOST: "server" + + # "{compose_name}_{network_name}" is the convention for compose networks + RUNNER_CONTAINER_DOCKER_NETWORK: "jobber-test_runner" + RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES: "volumes,networks,labels,memoryLimit,directPassthroughArguments" + RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH: "true" + + RUNNER_IMAGE_NODE20_URL: "jobber-e2e-runner:20-latest" + RUNNER_IMAGE_NODE22_URL: "jobber-e2e-runner:22-latest" + RUNNER_IMAGE_NODE24_URL: "jobber-e2e-runner:24-latest" + + # Seed data required for e2e tests + # NOTE: This will be INSECURE! Do not use this format for production! + SEED: '{"oauth-clients": {"clientId": "e2e-client", "clientSecret": "secret-secret-secret-secret-secret"}, "api-tokens": [{"token": "super-power-anonymous-token", "permissions": { "type": "all" }}]}' + + gateway: + depends_on: + - server + image: jobber-e2e-gateway + build: + context: ../ + dockerfile: docker/gateway.Dockerfile + restart: unless-stopped + networks: + - runner + environment: + PORT: "5002" + + GRPC_ENDPOINT: "https://server:5001" + + OIDC_ISSUER_URL: "http://localhost:5000" + OIDC_DISCOVERY_URL: "http://server:5000/.well-known/openid-configuration" + + OAUTH_CLIENT_ID: "e2e-client" + OAUTH_CLIENT_SECRET: "secret-secret-secret-secret-secret" + ports: + - 5002:5002 diff --git a/e2e/test.sh b/e2e/test.sh new file mode 100755 index 0000000..ad21b77 --- /dev/null +++ b/e2e/test.sh @@ -0,0 +1,18 @@ +#!/bin/bash + + +sudo docker build -f docker/node-20.Dockerfile -t jobber-e2e-runner:20-latest . +sudo docker build -f docker/node-22.Dockerfile -t jobber-e2e-runner:22-latest . +sudo docker build -f docker/node-24.Dockerfile -t jobber-e2e-runner:24-latest . + +docker compose -f e2e/docker-compose.yaml up -d --build + +# sleep a few seconds to allow processes to startup +sleep 5 + +# Run tests +bash e2e/tests/test-runner-basics.sh +bash e2e/tests/test-common-js.sh +bash e2e/tests/test-run-once.sh + +docker compose -f e2e/docker-compose.yaml down \ No newline at end of file diff --git a/e2e/tests/test-common-js.sh b/e2e/tests/test-common-js.sh new file mode 100644 index 0000000..dec439e --- /dev/null +++ b/e2e/tests/test-common-js.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +cd examples/http-javascript-cjs +./publish.sh super-power-anonymous-token http://localhost:5000 +cd ../../ + +sleep 3 + +RESPONSE=$(curl -s "http://localhost:5002/http-javascript-cjs") +if echo "$RESPONSE" | grep -q "path.join example from commonjs"; then + echo "PASS: Successfully received expected response from http-javascript-cjs job" +else + echo "FAIL: Failed to receive expected response from http-javascript-cjs job" + echo "Actual response: $RESPONSE" + exit 1 +fi diff --git a/e2e/tests/test-run-once.sh b/e2e/tests/test-run-once.sh new file mode 100644 index 0000000..e074921 --- /dev/null +++ b/e2e/tests/test-run-once.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +cd examples/http-javascript-run-once +./publish.sh super-power-anonymous-token http://localhost:5000 +cd ../../ + +sleep 3 + +RESPONSE=$(curl -s "http://localhost:5002/http-javascript-run-once") +if echo "$RESPONSE" | grep -q "run-once-response"; then + echo "PASS: Successfully received expected response from http-javascript-run-once job" +else + echo "FAIL: Failed to receive expected response from http-javascript-run-once job" + echo "Actual response: $RESPONSE" + exit 1 +fi diff --git a/e2e/tests/test-runner-basics.sh b/e2e/tests/test-runner-basics.sh new file mode 100644 index 0000000..599d629 --- /dev/null +++ b/e2e/tests/test-runner-basics.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +cd examples/e2e +./publish.sh super-power-anonymous-token http://localhost:5000 +cd ../../ + +sleep 3 + +curl -s "http://localhost:5002/e2e?action=set-state&value=my-test-value" > /dev/null +GET_STORE_RESPONSE=$(curl -s "http://localhost:5002/e2e?action=get-state") +if echo "$GET_STORE_RESPONSE" | grep -q "my-test-value"; then + echo "PASS: Successfully set and got state value" +else + echo "FAIL: Failed to set and get state value" + echo "Actual response: $GET_STORE_RESPONSE" + exit 1 +fi + + +sleep 1 + +curl -s "http://localhost:5002/e2e?action=mqtt" > /dev/null + +sleep 1 + + +RESPONSE=$(curl -s "http://localhost:5002/e2e") + +# Check that bootstrap is true + +if echo "$RESPONSE" | grep -q '"bootstrap":true'; then + echo "PASS: Bootstrap is true" +else + echo "FAIL: Bootstrap is not true" + echo "Actual response: $RESPONSE" + exit 1 +fi + +if echo "$RESPONSE" | grep -q '"lastScheduleRecent":true'; then + echo "PASS: Schedule Recent is true" +else + echo "FAIL: Schedule Recent is not true" + echo "Actual response: $RESPONSE" + exit 1 +fi + + +if echo "$RESPONSE" | grep -q '"lastMqttRecent":true'; then + echo "PASS: MQTT Recent is true" +else + echo "FAIL: MQTT Recent is not true" + echo "Actual response: $RESPONSE" + exit 1 +fi + + +# hang response status code should be 204 +HANG_RESPONSE=$(curl -s "http://localhost:5002/e2e?action=hang") +if [ -z "$HANG_RESPONSE" ]; then + echo "PASS: Hang response is empty as expected" +else + echo "FAIL: Hang response is not empty" + echo "Actual response: $HANG_RESPONSE" + exit 1 +fi diff --git a/examples/e2e/package-lock.json b/examples/e2e/package-lock.json new file mode 100644 index 0000000..9579ea4 --- /dev/null +++ b/examples/e2e/package-lock.json @@ -0,0 +1,13 @@ +{ + "name": "e2e", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "e2e", + "version": "1.0.0", + "license": "SEE LICENSE IN ../../" + } + } +} diff --git a/examples/e2e/package.json b/examples/e2e/package.json new file mode 100644 index 0000000..dcb06db --- /dev/null +++ b/examples/e2e/package.json @@ -0,0 +1,43 @@ +{ + "name": "e2e", + "version": "1.0.0", + "description": "HTTP Javascript Example", + "license": "SEE LICENSE IN ../../", + "author": "Eithan", + "type": "module", + "main": "./src/index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "action": { + "runnerAsynchronous": true, + "runnerMinCount": 1, + "runnerMaxCount": 1, + "runnerMaxAge": 0, + "runnerMaxIdleAge": 0, + "runnerMaxAgeHard": 0, + "runnerMode": "standard" + }, + "triggers": [ + { + "type": "http", + "name": "http-javascript-trigger", + "path": "/e2e", + "method": "GET" + }, + { + "type": "mqtt", + "topics": [ + "#" + ], + "connection": { + "protocol": "mqtt", + "host": "mqtt" + } + }, + { + "type": "schedule", + "cron": "* * * * * *" + } + ] +} diff --git a/examples/e2e/publish.sh b/examples/e2e/publish.sh new file mode 100755 index 0000000..86ceeb5 --- /dev/null +++ b/examples/e2e/publish.sh @@ -0,0 +1,22 @@ +#/bin/bash + +# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm) +npm install > /dev/null + +# Archive essential files +zip -rvq archive.zip ./package.json ./src > /dev/null + +# Get base url argument from argument, defaults to localhost:3000 +TOKEN=${1} +BASE_URL=${2:-http://localhost:3000} + +# Upload to Jobber +curl \ + --silent \ + --request POST \ + --url "$BASE_URL/api/job/publish/" \ + --header 'content-type: multipart/form-data' \ + --header "Authorization: Bearer $TOKEN" \ + --form 'archive=@archive.zip;type=application/zip' > /dev/null + +rm archive.zip \ No newline at end of file diff --git a/examples/e2e/src/index.js b/examples/e2e/src/index.js new file mode 100644 index 0000000..b808cb4 --- /dev/null +++ b/examples/e2e/src/index.js @@ -0,0 +1,65 @@ +const unixTimestamp = () => Math.floor(Date.now() / 1000); + +const myState = { + bootstrap: false, + lastSchedule: 0, + lastMqtt: 0, +}; + +export const handlerHttp = async (context) => { + const action = context.request.query("action"); + + if (action === "hang") { + // It gives no response, it just hangs. + return; + } + + if (action === "mqtt") { + await context.publish("ping", "this is pretty cool"); + + return context.response.text("published to mqtt!"); + } + + if (action === "set-state") { + const key = "test-key"; + + const value = context.request.query("value"); + + if (!value) { + return context.response.text("Missing 'value' query parameter"); + } + + await globalThis.jobber.setStore(key, value); + + return await context.response.text("set!"); + } + + if (action === "get-state") { + const key = "test-key"; + + return await context.response.text(await globalThis.jobber.getStore(key)); + } + + context.response.json({ + bootstrap: myState.bootstrap, + lastScheduleRecent: myState.lastSchedule + 60 > unixTimestamp(), + lastMqttRecent: myState.lastMqtt + 60 > unixTimestamp(), + }); +}; + +export const handlerMqtt = async (context) => { + myState.lastMqtt = unixTimestamp(); + if (context.topic === "ping") { + await context.publish("pong", "Hello from Jobber MQTT JavaScript Example!"); + } +}; + +export const handlerSchedule = async (context) => { + myState.lastSchedule = unixTimestamp(); +}; + +export const bootstrap = async (context) => { + console.log("Bootstrap function called with context:", context); + + myState.bootstrap = true; +}; diff --git a/examples/http-javascript-cjs/package-lock.json b/examples/http-javascript-cjs/package-lock.json new file mode 100644 index 0000000..eebad91 --- /dev/null +++ b/examples/http-javascript-cjs/package-lock.json @@ -0,0 +1,13 @@ +{ + "name": "http-javascript-cjs", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "http-javascript-cjs", + "version": "1.0.0", + "license": "SEE LICENSE IN ../../" + } + } +} diff --git a/examples/http-javascript-cjs/package.json b/examples/http-javascript-cjs/package.json new file mode 100644 index 0000000..6af499e --- /dev/null +++ b/examples/http-javascript-cjs/package.json @@ -0,0 +1,29 @@ +{ + "name": "http-javascript-cjs", + "version": "1.0.0", + "description": "HTTP CommonJS Javascript Example", + "license": "SEE LICENSE IN ../../", + "author": "Eithan", + "type": "commonjs", + "main": "./src/index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "action": { + "runnerAsynchronous": true, + "runnerMinCount": 1, + "runnerMaxCount": 1, + "runnerMaxAge": 0, + "runnerMaxIdleAge": 0, + "runnerMaxAgeHard": 0, + "runnerMode": "standard" + }, + "triggers": [ + { + "type": "http", + "name": "http-javascript-trigger", + "path": "/http-javascript-cjs", + "method": "GET" + } + ] +} diff --git a/examples/http-javascript-cjs/publish.sh b/examples/http-javascript-cjs/publish.sh new file mode 100755 index 0000000..86ceeb5 --- /dev/null +++ b/examples/http-javascript-cjs/publish.sh @@ -0,0 +1,22 @@ +#/bin/bash + +# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm) +npm install > /dev/null + +# Archive essential files +zip -rvq archive.zip ./package.json ./src > /dev/null + +# Get base url argument from argument, defaults to localhost:3000 +TOKEN=${1} +BASE_URL=${2:-http://localhost:3000} + +# Upload to Jobber +curl \ + --silent \ + --request POST \ + --url "$BASE_URL/api/job/publish/" \ + --header 'content-type: multipart/form-data' \ + --header "Authorization: Bearer $TOKEN" \ + --form 'archive=@archive.zip;type=application/zip' > /dev/null + +rm archive.zip \ No newline at end of file diff --git a/examples/http-javascript-cjs/src/index.js b/examples/http-javascript-cjs/src/index.js new file mode 100644 index 0000000..0ccd48b --- /dev/null +++ b/examples/http-javascript-cjs/src/index.js @@ -0,0 +1,11 @@ +// ew 20015 called and asked for its javascript back + +const path = require("path"); + +exports.handlerHttp = async (context) => { + const host = context.request.header("host"); + + return context.response.text( + `path.join example from commonjs: ${path.join("foo", "bar")}`, + ); +}; diff --git a/examples/http-javascript-run-once/package-lock.json b/examples/http-javascript-run-once/package-lock.json new file mode 100644 index 0000000..1347e8c --- /dev/null +++ b/examples/http-javascript-run-once/package-lock.json @@ -0,0 +1,13 @@ +{ + "name": "http-javascript-run-once", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "http-javascript-run-once", + "version": "1.0.0", + "license": "SEE LICENSE IN ../../" + } + } +} diff --git a/examples/http-javascript-run-once/package.json b/examples/http-javascript-run-once/package.json new file mode 100644 index 0000000..fc7e679 --- /dev/null +++ b/examples/http-javascript-run-once/package.json @@ -0,0 +1,29 @@ +{ + "name": "http-javascript-run-once", + "version": "1.0.0", + "description": "HTTP Javascript Run Once Example", + "license": "SEE LICENSE IN ../../", + "author": "Eithan", + "type": "module", + "main": "./src/index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "action": { + "runnerAsynchronous": false, + "runnerMinCount": 0, + "runnerMaxCount": 1, + "runnerMaxAge": 0, + "runnerMaxIdleAge": 0, + "runnerMaxAgeHard": 0, + "runnerMode": "run-once" + }, + "triggers": [ + { + "type": "http", + "name": "http-javascript-trigger", + "path": "/http-javascript-run-once", + "method": "GET" + } + ] +} diff --git a/examples/http-javascript-run-once/publish.sh b/examples/http-javascript-run-once/publish.sh new file mode 100755 index 0000000..86ceeb5 --- /dev/null +++ b/examples/http-javascript-run-once/publish.sh @@ -0,0 +1,22 @@ +#/bin/bash + +# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm) +npm install > /dev/null + +# Archive essential files +zip -rvq archive.zip ./package.json ./src > /dev/null + +# Get base url argument from argument, defaults to localhost:3000 +TOKEN=${1} +BASE_URL=${2:-http://localhost:3000} + +# Upload to Jobber +curl \ + --silent \ + --request POST \ + --url "$BASE_URL/api/job/publish/" \ + --header 'content-type: multipart/form-data' \ + --header "Authorization: Bearer $TOKEN" \ + --form 'archive=@archive.zip;type=application/zip' > /dev/null + +rm archive.zip \ No newline at end of file diff --git a/examples/http-javascript-run-once/src/index.js b/examples/http-javascript-run-once/src/index.js new file mode 100644 index 0000000..d00a47a --- /dev/null +++ b/examples/http-javascript-run-once/src/index.js @@ -0,0 +1,3 @@ +export const handlerHttp = async (context) => { + return context.response.text(`run-once-response`); +}; diff --git a/examples/http-javascript/package-lock.json b/examples/http-javascript/package-lock.json new file mode 100644 index 0000000..9f1f8a4 --- /dev/null +++ b/examples/http-javascript/package-lock.json @@ -0,0 +1,13 @@ +{ + "name": "http-javascript", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "http-javascript", + "version": "1.0.0", + "license": "SEE LICENSE IN ../../" + } + } +} diff --git a/examples/http-javascript/package.json b/examples/http-javascript/package.json new file mode 100644 index 0000000..caaebea --- /dev/null +++ b/examples/http-javascript/package.json @@ -0,0 +1,29 @@ +{ + "name": "http-javascript", + "version": "1.0.0", + "description": "HTTP Javascript Example", + "license": "SEE LICENSE IN ../../", + "author": "Eithan", + "type": "module", + "main": "./src/index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "action": { + "runnerAsynchronous": true, + "runnerMinCount": 1, + "runnerMaxCount": 1, + "runnerMaxAge": 30, + "runnerMaxIdleAge": 30, + "runnerMaxAgeHard": 60, + "runnerMode": "standard" + }, + "triggers": [ + { + "type": "http", + "name": "http-javascript-trigger", + "path": "/http-javascript", + "method": "GET" + } + ] +} diff --git a/examples/http-javascript/publish.sh b/examples/http-javascript/publish.sh new file mode 100755 index 0000000..86ceeb5 --- /dev/null +++ b/examples/http-javascript/publish.sh @@ -0,0 +1,22 @@ +#/bin/bash + +# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm) +npm install > /dev/null + +# Archive essential files +zip -rvq archive.zip ./package.json ./src > /dev/null + +# Get base url argument from argument, defaults to localhost:3000 +TOKEN=${1} +BASE_URL=${2:-http://localhost:3000} + +# Upload to Jobber +curl \ + --silent \ + --request POST \ + --url "$BASE_URL/api/job/publish/" \ + --header 'content-type: multipart/form-data' \ + --header "Authorization: Bearer $TOKEN" \ + --form 'archive=@archive.zip;type=application/zip' > /dev/null + +rm archive.zip \ No newline at end of file diff --git a/examples/http-javascript/src/index.js b/examples/http-javascript/src/index.js new file mode 100644 index 0000000..7c0ad8c --- /dev/null +++ b/examples/http-javascript/src/index.js @@ -0,0 +1,17 @@ +export const handlerHttp = async (context) => { + const host = context.request.header("host"); + + context.response.html(` + + + Jobber HTTP JavaScript Example + + +

Jobber HTTP JavaScript Example

+

This is an example of a simple HTTP server built with Jobber and JavaScript.

+

To test this out, send a request to this endpoint using curl or your browser:

+
curl http://${host}/
+ + + `); +}; diff --git a/examples/http-typescript/package.json b/examples/http-typescript-legacy/package.json similarity index 94% rename from examples/http-typescript/package.json rename to examples/http-typescript-legacy/package.json index 6ca8742..bb27084 100644 --- a/examples/http-typescript/package.json +++ b/examples/http-typescript-legacy/package.json @@ -1,6 +1,6 @@ { - "name": "http-typescript", - "version": "0.0.9", + "name": "http-typescript-legacy", + "version": "0.0.10", "description": "Jobber Example, Typescript HTTP Demo", "main": "./dist/index.js", "type": "module", diff --git a/examples/http-typescript/publish.sh b/examples/http-typescript-legacy/publish.sh similarity index 80% rename from examples/http-typescript/publish.sh rename to examples/http-typescript-legacy/publish.sh index 722a8a2..bfabdcd 100755 --- a/examples/http-typescript/publish.sh +++ b/examples/http-typescript-legacy/publish.sh @@ -18,12 +18,15 @@ npm run build # Archive essential files zip -rv archive.zip ./package.json ./dist ./src ./node_modules +# Get base url argument from argument, defaults to localhost:3000 +BASE_URL=${1:-http://localhost:3000} + # Upload to Jobber curl \ --silent \ --show-error \ --request POST \ - --url 'http://localhost:3000/api/job/publish/' \ + --url "$BASE_URL/api/job/publish/" \ --header 'content-type: multipart/form-data' \ --form 'archive=@archive.zip;type=application/zip' diff --git a/examples/http-typescript/src/declaration.d.ts b/examples/http-typescript-legacy/src/declaration.d.ts similarity index 100% rename from examples/http-typescript/src/declaration.d.ts rename to examples/http-typescript-legacy/src/declaration.d.ts diff --git a/examples/http-typescript/src/index.ts b/examples/http-typescript-legacy/src/index.ts similarity index 85% rename from examples/http-typescript/src/index.ts rename to examples/http-typescript-legacy/src/index.ts index c2a219c..2ead3a2 100644 --- a/examples/http-typescript/src/index.ts +++ b/examples/http-typescript-legacy/src/index.ts @@ -5,21 +5,25 @@ type StoreCounter = number; export const handler = async ( request: JobberHandlerRequest, response: JobberHandlerResponse, - context: JobberHandlerContext + context: JobberHandlerContext, ) => { if (request.type() !== "http") { throw new Error("Expecting HTTP request"); } + if (request.query("test")) { + return response.json({ hello: "speedy" }, 200); + } + // console.log("name:", request.name()); await context.setStoreJson( "medium-length", - "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU" + "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU", ); await context.setStoreJson( "long-length-sdfhkfgasufygasiuyfgweuofygweoyfvewifyvewrifygverygifvegerg", - "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU" + "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU", ); await context.setStoreJson("1d expiry", "", { @@ -46,6 +50,6 @@ export const handler = async ( { count, }, - 200 + 200, ); }; diff --git a/examples/mqtt-typescript/tsconfig.json b/examples/http-typescript-legacy/tsconfig.json similarity index 88% rename from examples/mqtt-typescript/tsconfig.json rename to examples/http-typescript-legacy/tsconfig.json index 5c356fc..82a0a34 100644 --- a/examples/mqtt-typescript/tsconfig.json +++ b/examples/http-typescript-legacy/tsconfig.json @@ -8,7 +8,8 @@ "skipLibCheck": true, "forceConsistentCasingInFileNames": true, "declaration": false, - "outDir": "./dist", + "rootDir": "src", + "outDir": "./dist" }, "$schema": "https://json.schemastore.org/tsconfig", "display": "Recommended" diff --git a/examples/http-typescript/package-lock.json b/examples/http-typescript/package-lock.json deleted file mode 100644 index 6dd6db3..0000000 --- a/examples/http-typescript/package-lock.json +++ /dev/null @@ -1,990 +0,0 @@ -{ - "name": "http-typescript", - "version": "0.0.9", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "http-typescript", - "version": "0.0.9", - "license": "MIT", - "devDependencies": { - "@tsconfig/node20": "^20.1.4", - "@types/node": "^20.16.12", - "rimraf": "^5.0.10", - "tsc-alias": "^1.8.10", - "typescript": "^5.6.3" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@tsconfig/node20": { - "version": "20.1.4", - "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.4.tgz", - "integrity": "sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==", - "dev": true - }, - "node_modules/@types/node": { - "version": "20.17.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.11.tgz", - "integrity": "sha512-Ept5glCK35R8yeyIeYlRIZtX6SLRyqMhOFTgj5SOkMpLTdw3SEHI9fHx60xaUZ+V1aJxQJODE+7/j5ocZydYTg==", - "dev": true, - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", - "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", - "dev": true, - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", - "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", - "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", - "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", - "dev": true, - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "node_modules/commander": { - "version": "9.5.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz", - "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==", - "dev": true, - "engines": { - "node": "^12.20.0 || >=14" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.18.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz", - "integrity": "sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw==", - "dev": true, - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", - "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ignore": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", - "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", - "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", - "dev": true, - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", - "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/mylas": { - "version": "2.1.13", - "resolved": "https://registry.npmjs.org/mylas/-/mylas-2.1.13.tgz", - "integrity": "sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==", - "dev": true, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/raouldeheer" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", - "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "dev": true - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/plimit-lit": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/plimit-lit/-/plimit-lit-1.6.1.tgz", - "integrity": "sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==", - "dev": true, - "dependencies": { - "queue-lit": "^1.5.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/queue-lit": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/queue-lit/-/queue-lit-1.5.2.tgz", - "integrity": "sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw==", - "dev": true, - "engines": { - "node": ">=12" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/readdirp": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", - "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", - "dev": true, - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz", - "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==", - "dev": true, - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", - "dev": true, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tsc-alias": { - "version": "1.8.10", - "resolved": "https://registry.npmjs.org/tsc-alias/-/tsc-alias-1.8.10.tgz", - "integrity": "sha512-Ibv4KAWfFkFdKJxnWfVtdOmB0Zi1RJVxcbPGiCDsFpCQSsmpWyuzHG3rQyI5YkobWwxFPEyQfu1hdo4qLG2zPw==", - "dev": true, - "dependencies": { - "chokidar": "^3.5.3", - "commander": "^9.0.0", - "globby": "^11.0.4", - "mylas": "^2.1.9", - "normalize-path": "^3.0.0", - "plimit-lit": "^1.2.6" - }, - "bin": { - "tsc-alias": "dist/bin/index.js" - } - }, - "node_modules/typescript": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", - "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", - "dev": true - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - } - } -} diff --git a/examples/mqtt-typescript/package.json b/examples/mqtt-typescript-legacy/package.json similarity index 96% rename from examples/mqtt-typescript/package.json rename to examples/mqtt-typescript-legacy/package.json index a6afbe8..a1d2416 100644 --- a/examples/mqtt-typescript/package.json +++ b/examples/mqtt-typescript-legacy/package.json @@ -1,5 +1,5 @@ { - "name": "mqtt-typescript", + "name": "mqtt-typescript-legacy", "version": "0.0.9", "description": "Jobber Example for MQTT in TypeScript", "main": "./dist/index.js", diff --git a/examples/mqtt-typescript/publish.sh b/examples/mqtt-typescript-legacy/publish.sh similarity index 78% rename from examples/mqtt-typescript/publish.sh rename to examples/mqtt-typescript-legacy/publish.sh index f6bcaca..bfabdcd 100755 --- a/examples/mqtt-typescript/publish.sh +++ b/examples/mqtt-typescript-legacy/publish.sh @@ -18,14 +18,16 @@ npm run build # Archive essential files zip -rv archive.zip ./package.json ./dist ./src ./node_modules +# Get base url argument from argument, defaults to localhost:3000 +BASE_URL=${1:-http://localhost:3000} + # Upload to Jobber curl \ --silent \ --show-error \ --request POST \ - --url 'http://localhost:3000/api/job/publish/' \ + --url "$BASE_URL/api/job/publish/" \ --header 'content-type: multipart/form-data' \ - --header 'Authorization: Bearer 2abb54173128350bdc916054f320a300c8b76bc873dd3c80301a02946f39c9e24bc824' \ --form 'archive=@archive.zip;type=application/zip' rm archive.zip \ No newline at end of file diff --git a/examples/mqtt-typescript/src/declaration.d.ts b/examples/mqtt-typescript-legacy/src/declaration.d.ts similarity index 100% rename from examples/mqtt-typescript/src/declaration.d.ts rename to examples/mqtt-typescript-legacy/src/declaration.d.ts diff --git a/examples/mqtt-typescript/src/index.ts b/examples/mqtt-typescript-legacy/src/index.ts similarity index 100% rename from examples/mqtt-typescript/src/index.ts rename to examples/mqtt-typescript-legacy/src/index.ts diff --git a/examples/http-typescript/tsconfig.json b/examples/mqtt-typescript-legacy/tsconfig.json similarity index 94% rename from examples/http-typescript/tsconfig.json rename to examples/mqtt-typescript-legacy/tsconfig.json index 5c356fc..b3eedcd 100644 --- a/examples/http-typescript/tsconfig.json +++ b/examples/mqtt-typescript-legacy/tsconfig.json @@ -8,6 +8,7 @@ "skipLibCheck": true, "forceConsistentCasingInFileNames": true, "declaration": false, + "rootDir": "src", "outDir": "./dist", }, "$schema": "https://json.schemastore.org/tsconfig", diff --git a/examples/mqtt-typescript/package-lock.json b/examples/mqtt-typescript/package-lock.json deleted file mode 100644 index cebb374..0000000 --- a/examples/mqtt-typescript/package-lock.json +++ /dev/null @@ -1,895 +0,0 @@ -{ - "name": "mqtt-typescript", - "version": "0.0.9", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "mqtt-typescript", - "version": "0.0.9", - "license": "MIT", - "devDependencies": { - "@tsconfig/node20": "^20.1.4", - "@types/node": "^20.16.12", - "rimraf": "^5.0.10", - "tsc-alias": "^1.8.10", - "typescript": "^5.6.3" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@tsconfig/node20": { - "version": "20.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "20.17.11", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.19.2" - } - }, - "node_modules/ansi-regex": { - "version": "6.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/ansi-styles": { - "version": "6.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/brace-expansion": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar": { - "version": "3.6.0", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/commander": { - "version": "9.5.0", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || >=14" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "dev": true, - "license": "MIT" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-glob": { - "version": "3.3.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fastq": { - "version": "1.18.0", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child": { - "version": "3.3.0", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob": { - "version": "10.4.5", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ignore": { - "version": "5.3.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/jackspeak": { - "version": "3.4.3", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/lru-cache": { - "version": "10.4.3", - "dev": true, - "license": "ISC" - }, - "node_modules/merge2": { - "version": "1.4.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/minimatch": { - "version": "9.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/mylas": { - "version": "2.1.13", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/raouldeheer" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/path-key": { - "version": "3.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picomatch": { - "version": "2.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/plimit-lit": { - "version": "1.6.1", - "dev": true, - "license": "MIT", - "dependencies": { - "queue-lit": "^1.5.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/queue-lit": { - "version": "1.5.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/readdirp": { - "version": "3.6.0", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "5.0.10", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "4.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width": { - "version": "5.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/tsc-alias": { - "version": "1.8.10", - "dev": true, - "license": "MIT", - "dependencies": { - "chokidar": "^3.5.3", - "commander": "^9.0.0", - "globby": "^11.0.4", - "mylas": "^2.1.9", - "normalize-path": "^3.0.0", - "plimit-lit": "^1.2.6" - }, - "bin": { - "tsc-alias": "dist/bin/index.js" - } - }, - "node_modules/typescript": { - "version": "5.7.2", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/undici-types": { - "version": "6.19.8", - "dev": true, - "license": "MIT" - }, - "node_modules/which": { - "version": "2.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "8.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - } - } -} diff --git a/examples/schedule-javascript/README.md b/examples/schedule-javascript-legacy/README.md similarity index 100% rename from examples/schedule-javascript/README.md rename to examples/schedule-javascript-legacy/README.md diff --git a/examples/schedule-javascript/package.json b/examples/schedule-javascript-legacy/package.json similarity index 94% rename from examples/schedule-javascript/package.json rename to examples/schedule-javascript-legacy/package.json index cdc9792..8925566 100644 --- a/examples/schedule-javascript/package.json +++ b/examples/schedule-javascript-legacy/package.json @@ -1,5 +1,5 @@ { - "name": "schedule-javascript", + "name": "schedule-javascript-legacy", "version": "0.0.1", "description": "Jobber Example, Javascript Schedule", "main": "./src/index.js", diff --git a/examples/schedule-javascript/publish.sh b/examples/schedule-javascript-legacy/publish.sh similarity index 76% rename from examples/schedule-javascript/publish.sh rename to examples/schedule-javascript-legacy/publish.sh index 4f7d8e3..3c05060 100644 --- a/examples/schedule-javascript/publish.sh +++ b/examples/schedule-javascript-legacy/publish.sh @@ -9,12 +9,15 @@ npm install # Archive essential files zip -rv archive.zip ./package.json ./src ./node_modules +# Get base url argument from argument, defaults to localhost:3000 +BASE_URL=${1:-http://localhost:3000} + # Upload to Jobber curl \ --silent \ --show-error \ --request POST \ - --url 'http://localhost:3000/api/job/publish/' \ + --url "$BASE_URL/api/job/publish/" \ --header 'content-type: multipart/form-data' \ --form 'archive=@archive.zip;type=application/zip' diff --git a/examples/schedule-javascript/src/index.js b/examples/schedule-javascript-legacy/src/index.js similarity index 100% rename from examples/schedule-javascript/src/index.js rename to examples/schedule-javascript-legacy/src/index.js diff --git a/examples/schedule-javascript/package-lock.json b/examples/schedule-javascript/package-lock.json deleted file mode 100644 index db4f3f8..0000000 --- a/examples/schedule-javascript/package-lock.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "name": "schedule-javascript", - "version": "0.0.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "schedule-javascript", - "version": "0.0.1", - "license": "MIT", - "devDependencies": { - "@types/node": "^20.16.12" - } - }, - "node_modules/@types/node": { - "version": "20.19.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.0.tgz", - "integrity": "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.21.0" - } - }, - "node_modules/undici-types": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", - "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", - "dev": true, - "license": "MIT" - } - } -} diff --git a/packages/common/package.json b/packages/common/package.json new file mode 100644 index 0000000..92c6b16 --- /dev/null +++ b/packages/common/package.json @@ -0,0 +1,37 @@ +{ + "name": "@jobber/common", + "version": "1.0.0", + "description": "Common utilities for Jobber Services", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + "./*": { + "types": "./dist/*", + "import": "./dist/*", + "require": "./dist/*" + }, + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js", + "require": "./dist/index.js" + } + }, + "type": "module", + "scripts": { + "build": "rimraf ./dist/* && tsc && tsc-alias -p tsconfig.json" + }, + "keywords": [], + "author": "Eithan Hersey-Tuit", + "license": "MIT", + "dependencies": { + "@jobber/tcp-frame-socket": "workspace:*", + "zod": "3.23.8" + }, + "devDependencies": { + "@tsconfig/node20": "^20.1.4", + "@types/node": "^20.16.12", + "rimraf": "^5.0.10", + "tsc-alias": "^1.8.10", + "typescript": "^5.6.3" + } +} diff --git a/packages/common/src/await-truthy.ts b/packages/common/src/await-truthy.ts new file mode 100644 index 0000000..d8cd352 --- /dev/null +++ b/packages/common/src/await-truthy.ts @@ -0,0 +1,36 @@ +import { timeout } from "./timeout.js"; + +/** + * Awaits until the callback yields true + */ +export const awaitTruthy = async ( + callback: () => Promise, + timeoutMs: number = 30_000 +) => { + let startTime = Date.now(); + + let index = 0; + while (true) { + if (Date.now() - startTime > timeoutMs) { + return false; + } + + if (await callback()) { + return true; + } + + index++; + + if (index <= 10) { + await timeout(10); + } + + if (index > 10 && index <= 20) { + await timeout(20); + } + + if (index > 20) { + await timeout(100); + } + } +}; diff --git a/packages/common/src/bouncer-base.ts b/packages/common/src/bouncer-base.ts new file mode 100644 index 0000000..b4b2444 --- /dev/null +++ b/packages/common/src/bouncer-base.ts @@ -0,0 +1,291 @@ +import { + canPerformAction, + type JobberPermissionAction, + type JobberPermissions, +} from "./permissions.js"; + +export class BouncerBase { + private _permissions; + + constructor(permissions: JobberPermissions) { + this._permissions = permissions; + } + + public can(resource: string, action: JobberPermissionAction): boolean { + return canPerformAction(this._permissions, resource, action); + } + + public canRead(resource: string): boolean { + return this.can(resource, "read"); + } + + public canWrite(resource: string): boolean { + return this.can(resource, "write"); + } + + public canDelete(resource: string): boolean { + return this.can(resource, "delete"); + } + + public canReadJob(job: { id: string }): boolean { + return this.can(`job/${job.id}`, "read"); + } + + public canWriteJob(job: { id: string }): boolean { + return this.can(`job/${job.id}`, "write"); + } + + public canDeleteJob(job: { id: string }): boolean { + return this.can(`job/${job.id}`, "delete"); + } + + public canReadJobEnvironment( + environment: { jobId: string }, + name: string, + ): boolean { + return this.can(`job/${environment.jobId}/environment/${name}`, "read"); + } + + public canWriteJobEnvironment( + environment: { jobId: string }, + name: string, + ): boolean { + return this.can(`job/${environment.jobId}/environment/${name}`, "write"); + } + + public canDeleteJobEnvironment( + environment: { jobId: string }, + name: string, + ): boolean { + return this.can(`job/${environment.jobId}/environment/${name}`, "delete"); + } + + public canReadJobAction(action: { jobId: string; id: string }): boolean { + return this.can(`job/${action.jobId}/actions/${action.id}`, "read"); + } + + public canWriteJobAction(action: { jobId: string; id: string }): boolean { + return this.can(`job/${action.jobId}/actions/${action.id}`, "write"); + } + + public canDeleteJobAction(action: { jobId: string; id: string }): boolean { + return this.can(`job/${action.jobId}/actions/${action.id}`, "delete"); + } + + public canReadJobRunners(job: { id: string }): boolean { + return this.can(`job/${job.id}/runners`, "read"); + } + + public canWriteJobRunners(job: { id: string }): boolean { + return this.can(`job/${job.id}/runners`, "write"); + } + + public canDeleteJobRunners(job: { id: string }): boolean { + return this.can(`job/${job.id}/runners`, "delete"); + } + + public canReadJobStore(item: { jobId: string }): boolean { + return this.can(`job/${item.jobId}/store`, "read"); + } + + public canWriteJobStore(item: { jobId: string }): boolean { + return this.can(`job/${item.jobId}/store`, "write"); + } + + public canDeleteJobStore(item: { jobId: string }): boolean { + return this.can(`job/${item.jobId}/store`, "delete"); + } + + public canReadJobTriggers(trigger: { jobId: string; id: string }): boolean { + return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "read"); + } + + public canWriteJobTriggers(trigger: { jobId: string; id: string }): boolean { + return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "write"); + } + + public canDeleteJobTriggers(trigger: { jobId: string; id: string }): boolean { + return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "delete"); + } + + public canReadJobVersion(version: { jobId: string; id: string }): boolean { + return this.can(`job/${version.jobId}/versions/${version.id}`, "read"); + } + + public canReadJobVersionArchive(version: { + jobId: string; + id: string; + }): boolean { + return this.can( + `job/${version.jobId}/versions/${version.id}/archive`, + "read", + ); + } + + public canJobPublish(): boolean { + return this.can(`job/-/publish`, "write"); + } + + public canReadApiTokenGenerally(): boolean { + return this.can(`api-tokens`, "read"); + } + + public canWriteApiTokenGenerally(): boolean { + return this.can(`api-tokens`, "write"); + } + + public canDeleteApiTokenGenerally(): boolean { + return this.can(`api-tokens`, "delete"); + } + + public canReadApiToken(token: { id: string }): boolean { + return this.can(`api-tokens/${token.id}`, "read"); + } + + public canWriteApiToken(token: { id: string }): boolean { + return this.can(`api-tokens/${token.id}`, "write"); + } + + public canDeleteApiToken(token: { id: string }): boolean { + return this.can(`api-tokens/${token.id}`, "delete"); + } + + public canReadSystemMetricsPrometheus(): boolean { + return this.can(`system/metrics/prometheus`, "read"); + } + + public canReadSystemMetricsOverview(): boolean { + return this.can(`system/metrics/overview`, "read"); + } + + public canReadUserGenerally(): boolean { + return this.can(`users`, "read"); + } + + public canWriteUserGenerally(): boolean { + return this.can(`users`, "write"); + } + + public canDeleteUserGenerally(): boolean { + return this.can(`users`, "delete"); + } + + public canReadUser(user: { id: string }): boolean { + return this.can(`users/${user.id}`, "read"); + } + + public canWriteUser(user: { id: string }): boolean { + return this.can(`users/${user.id}`, "write"); + } + + public canDeleteUser(user: { id: string }): boolean { + return this.can(`users/${user.id}`, "delete"); + } + + public canWriteUserUsername(user: { id: string }): boolean { + return this.can(`users/${user.id}/username`, "write"); + } + + public canWriteUserPassword(user: { id: string }): boolean { + return this.can(`users/${user.id}/password`, "write"); + } + + public canWriteUserPermissions(user: { id: string }): boolean { + return this.can(`users/${user.id}/permissions`, "write"); + } + + public canReadOauthServiceClientGenerally(): boolean { + return this.can(`oauth/service-client`, "read"); + } + + public canWriteOauthServiceClientGenerally(): boolean { + return this.can(`oauth/service-client`, "read"); + } + + public canReadOauthServiceClient(serviceClient: { id: string }): boolean { + return this.can(`oauth/service-client/${serviceClient.id}`, "read"); + } + + public canWriteOauthServiceClient(serviceClient: { id: string }): boolean { + return this.can(`oauth/service-client/${serviceClient.id}`, "write"); + } + + public canDeleteOauthServiceClient(serviceClient: { id: string }): boolean { + return this.can(`oauth/service-client/${serviceClient.id}`, "delete"); + } + + public canReadOauthSigningKeyGenerally(): boolean { + return this.can(`oauth/signing-key`, "read"); + } + + public canWriteOauthSigningKeyGenerally(): boolean { + return this.can(`oauth/signing-key`, "write"); + } + + public canReadOauthSigningKey(signingKey: { id: string }): boolean { + return this.can(`oauth/signing-key/${signingKey.id}`, "read"); + } + + public canWriteOauthSigningKey(signingKey: { id: string }): boolean { + return this.can(`oauth/signing-key/${signingKey.id}`, "write"); + } + + public canDeleteOauthSigningKey(signingKey: { id: string }): boolean { + return this.can(`oauth/signing-key/${signingKey.id}`, "delete"); + } + + public canReadTemplatesGenerally() { + return this.can(`templates`, "read"); + } + + public canReadAuditLogGenerally() { + return this.can(`audit-log`, "read"); + } + + /** + * SPECIAL: This is a special case to allow runners to publish MQTT messages + */ + public canPublishMqttMessage(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/publish-mqtt`, "write"); + } + + /** + * Used within runner + * SPECIAL: This is a special case to allow other services to read runner status + */ + public canReadRunnerStatus(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/runner-status`, "read"); + } + + /** + * Used within runner + * SPECIAL: This is a special case to allow runners to invoke HTTP events + */ + public canInvokeRunnerHttpEvent(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/invoke-http-event`, "write"); + } + + /** + * Used within runner + * SPECIAL: This is a special case to allow runners to invoke MQTT events + */ + public canInvokeRunnerMqttEvent(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/invoke-mqtt-event`, "write"); + } + + /** + * Used within runner + * SPECIAL: This is a special case to allow runners to invoke schedule events + */ + public canInvokeRunnerScheduleEvent(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/invoke-schedule-event`, "write"); + } + + /** + * Used by gateway + * SPECIAL: This enables services such as the gateway, to spawn runners for run-once jobs. + */ + public canCreateSoftRunner(job: { id: string }): boolean { + return this.can(`special/job/${job.id}/create-soft-runner`, "write"); + } +} diff --git a/packages/common/src/deferred.ts b/packages/common/src/deferred.ts new file mode 100644 index 0000000..4a8fd44 --- /dev/null +++ b/packages/common/src/deferred.ts @@ -0,0 +1,13 @@ +export function deferred() { + let resolve!: (value: T | PromiseLike) => void; + let reject!: (reason?: unknown) => void; + + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + + return { promise, resolve, reject }; +} + +export type Deferred = ReturnType>; diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts new file mode 100644 index 0000000..14fdaae --- /dev/null +++ b/packages/common/src/index.ts @@ -0,0 +1,3 @@ +export * from "./loop-base.js"; +export * from "./await-truthy.js"; +export * from "./timeout.js"; diff --git a/packages/common/src/loop-base.ts b/packages/common/src/loop-base.ts new file mode 100644 index 0000000..878225d --- /dev/null +++ b/packages/common/src/loop-base.ts @@ -0,0 +1,108 @@ +import assert from "node:assert"; +import { awaitTruthy } from "./await-truthy.js"; +import { timeout } from "./timeout.js"; +import EventEmitter from "node:events"; + +/** + * Lifecycle: + * 1) neutral = = default state (pre-start or stopped) + * 2) starting = in process of starting + * 3) started = active and running + * 4) stopping = in process of stopping + * 5) One stopped, goes to neutral. + */ +export type StatusLifecycle = "neutral" | "starting" | "started" | "stopping"; + +type EventEmitterEvents = { + neutral: []; + starting: []; + started: []; + stopping: []; +}; + +export abstract class LoopBase { + protected status: StatusLifecycle = "neutral"; + + private signal: AbortController | null = null; + + protected abstract loopDuration: number; + + private events = new EventEmitter(); + + public start() { + return new Promise(async (resolve) => { + assert(this.status === "neutral"); + + this.signal = new AbortController(); + + this.events.once("started", () => { + resolve(); + }); + + this.status = "starting"; + + if (this.loopStarting) { + await this.loopStarting(); + } + + this.events.emit("starting"); + + this.loop(); + }); + } + + public stop() { + return new Promise(async (resolve) => { + assert(this.status === "started"); + + this.events.once("neutral", () => { + resolve(); + }); + + this.status = "stopping"; + + this.signal?.abort(); + + if (this.loopClosing) { + await this.loopClosing(); + } + + this.events.emit("stopping"); + }); + } + + private async loop() { + this.status = "started"; + + if (this.loopStarted) { + await this.loopStarted(); + } + + this.events.emit("started"); + + while (this.status === "started") { + try { + await this.loopIteration(); + } catch (err) { + console.error(err); + } + + await timeout(this.loopDuration, this.signal?.signal); + } + + this.status = "neutral"; + this.signal = null; + + if (this.loopClosed) { + await this.loopClosed(); + } + + this.events.emit("neutral"); + } + + protected abstract loopIteration(): Promise; + protected abstract loopClosing?(): Promise; + protected abstract loopClosed?(): Promise; + protected abstract loopStarting?(): Promise; + protected abstract loopStarted?(): Promise; +} diff --git a/packages/common/src/oauth.ts b/packages/common/src/oauth.ts new file mode 100644 index 0000000..b20c7ff --- /dev/null +++ b/packages/common/src/oauth.ts @@ -0,0 +1,48 @@ +export const getOAuthAudienceGeneralApi = () => { + return "jobber-api"; +}; + +export const getOAuthAudienceRunnerApi = (runnerId: string) => { + return `jobber-runner:${runnerId}`; +}; + +export const getOAuthAudienceGatewayApi = () => { + return "jobber-gateway"; +}; + +/** + * Checks whether a given audience matches any of the allowed audiences, supporting + * wildcard segments using the `*` character, split by `:`. + */ +export const canOAuthAccessAudience = ( + audience: string, + allowedAudiences: string[], +) => { + const audienceChunks = audience.split(":"); + + for (const allowedAudience of allowedAudiences) { + const allowedAudienceChunks = allowedAudience.split(":"); + + if (allowedAudienceChunks.length !== audienceChunks.length) { + continue; + } + + let matches = true; + + for (let i = 0; i < allowedAudienceChunks.length; i++) { + if ( + allowedAudienceChunks[i] !== "*" && + allowedAudienceChunks[i] !== audienceChunks[i] + ) { + matches = false; + break; + } + } + + if (matches) { + return true; + } + } + + return false; +}; diff --git a/packages/server/src/permissions.ts b/packages/common/src/permissions.ts similarity index 62% rename from packages/server/src/permissions.ts rename to packages/common/src/permissions.ts index cf33434..756186b 100644 --- a/packages/server/src/permissions.ts +++ b/packages/common/src/permissions.ts @@ -1,4 +1,5 @@ import { z } from "zod"; + export const JobberPermissionEffectSchema = z.enum(["allow", "deny"]); export const JobberPermissionActionSchema = z.enum(["read", "write", "delete"]); @@ -44,10 +45,83 @@ export const PERMISSION_READ_ONLY: JobberPermissions = [ }, ] as const; +export const PERMISSION_GATEWAY: JobberPermissions = [ + { + effect: "allow", + resource: "job/*", + actions: ["read"], + }, + { + effect: "allow", + resource: "special/job/*/runner-status", + actions: ["read"], + }, + { + effect: "allow", + resource: "templates", + actions: ["read"], + }, + { + effect: "allow", + resource: "special/job/*/invoke-http-event", + actions: ["write"], + }, + { + effect: "allow", + resource: "special/job/*/create-soft-runner", + actions: ["write"], + }, + { + effect: "allow", + resource: "job/*/runners", + actions: ["read", "delete"], + }, + { + effect: "deny", + resource: "job/*/environment/*", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "job/*/runners", + actions: ["write", "delete"], + }, + { + effect: "deny", + resource: "job/*/store", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "job/*/versions/*/archive", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "job/*/publish", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "api-tokens", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "system", + actions: ["read", "write", "delete"], + }, + { + effect: "deny", + resource: "users", + actions: ["read", "write", "delete"], + }, +] as const; + export const canPerformAction = ( permissions: JobberPermissions, resource: string, - action: JobberPermissionAction + action: JobberPermissionAction, ): boolean => { // Check for deny permissions first for (const permission of permissions) { diff --git a/packages/common/src/timeout.ts b/packages/common/src/timeout.ts new file mode 100644 index 0000000..8afd2de --- /dev/null +++ b/packages/common/src/timeout.ts @@ -0,0 +1,27 @@ +/** + * Creates a promise that resolves after a timeout + * @param ms Time to wait in milliseconds + * @returns + */ +export const timeout = (ms: number, signal?: AbortSignal) => { + return new Promise((resolve, reject) => { + if (signal?.aborted) { + return resolve(); + } + + const resolver = () => { + clearTimeout(timeoutId); + signal?.removeEventListener("abort", resolver); + + resolve(); + }; + + const timeoutId = setTimeout(() => { + resolver(); + }, ms); + + signal?.addEventListener("abort", () => { + resolver(); + }); + }); +}; diff --git a/packages/common/tsconfig.json b/packages/common/tsconfig.json new file mode 100644 index 0000000..cd126f1 --- /dev/null +++ b/packages/common/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "experimentalDecorators": true, + "inlineSourceMap": true, + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "types": ["node"], + "rootDir": "./src", + "outDir": "./dist", + "paths":{ + "~/*": ["./src/*"] + } + }, + "$schema": "https://json.schemastore.org/tsconfig", + "display": "Recommended" +} \ No newline at end of file diff --git a/packages/gateway/package.json b/packages/gateway/package.json new file mode 100644 index 0000000..67963b6 --- /dev/null +++ b/packages/gateway/package.json @@ -0,0 +1,45 @@ +{ + "name": "@jobber/gateway", + "version": "1.0.0", + "description": "Jobber API Gateway", + "main": "./dist/index.js", + "type": "module", + "scripts": { + "build:image": "docker build -t jobber-gateway ./", + "build": "rimraf ./dist/* && tsc && tsc-alias -p tsconfig.json", + "start": "node ./dist/index.js", + "dev": "pnpm build && node --env-file-if-exists=.env ./dist/index.js" + }, + "keywords": [], + "author": "Eithan Hersey-Tuit", + "license": "MIT", + "dependencies": { + "@grpc/grpc-js": "^1.14.3", + "@grpc/proto-loader": "^0.8.0", + "@hono/node-server": "^1.13.7", + "@jobber/common": "workspace:*", + "@jobber/grpc": "workspace:*", + "@jobber/tcp-frame-socket": "workspace:*", + "hono": "^4.6.11", + "jose": "^6.1.3", + "long": "^5.3.2", + "nice-grpc": "^2.1.14", + "prom-client": "^15.1.3", + "protobufjs": "^8.0.0", + "reflect-metadata": "^0.2.2", + "semver": "^7.6.3", + "tsyringe": "^4.10.0", + "zod": "^3.23.8" + }, + "devDependencies": { + "@tsconfig/node20": "^20.1.4", + "@types/node": "^20.16.12", + "@types/semver": "^7.5.8", + "grpc-tools": "^1.13.1", + "rimraf": "^5.0.10", + "ts-proto": "^2.10.1", + "tsc-alias": "^1.8.10", + "typescript": "^5.6.3", + "vitest": "^3.2.4" + } +} diff --git a/packages/gateway/src/config.ts b/packages/gateway/src/config.ts new file mode 100644 index 0000000..cec6a96 --- /dev/null +++ b/packages/gateway/src/config.ts @@ -0,0 +1,34 @@ +import { z } from "zod"; + +export const ConfigurationOptionsSchema = z.object({ + // The port the gateway will listen to traffic on + PORT: z.coerce.number().default(3000), + + // Upstream gRPC service (for gateway -> backend) + GRPC_ENDPOINT: z.string().url(), + + // OIDC Issuer URL + OIDC_ISSUER_URL: z.string().url(), + + // OIDC Discovery URL (if not provided, will be derived from issuer url) + OIDC_DISCOVERY_URL: z.string().url().optional(), + + OAUTH_CLIENT_ID: z.string().min(1), + OAUTH_CLIENT_SECRET: z.string().min(1), +}); + +export type ConfigurationOptionsSchemaType = z.infer< + typeof ConfigurationOptionsSchema +>; + +export type ConfigurationOptions = keyof ConfigurationOptionsSchemaType; + +export const getConfigOption = ( + option: T, +): ConfigurationOptionsSchemaType[T] => { + const schema = ConfigurationOptionsSchema.shape[option]; + + return schema.parse(process.env[option], { + path: ["config", option], + }) as ConfigurationOptionsSchemaType[T]; +}; diff --git a/packages/gateway/src/gateway.ts b/packages/gateway/src/gateway.ts new file mode 100644 index 0000000..9c2df56 --- /dev/null +++ b/packages/gateway/src/gateway.ts @@ -0,0 +1,568 @@ +import { awaitTruthy, LoopBase } from "@jobber/common"; +import { + Channel, + ChannelCredentials, + ClientError, + createChannel, + createClientFactory, + Metadata, + RawClient, + ServerError, + Status, +} from "nice-grpc"; +import { Item as JobItem } from "@jobber/grpc/basics/job.js"; +import { Item as ActionItem } from "@jobber/grpc/basics/action.js"; +import { Item as TriggerItem } from "@jobber/grpc/basics/trigger.js"; +import { Item as RunnerItem } from "@jobber/grpc/basics/runner.js"; +import { + EventHttpRequest, + EventHttpRequest_Head, + RunnerAPIDefinition, +} from "@jobber/grpc/runner.js"; +import { FromTsProtoServiceDefinition } from "nice-grpc/lib/service-definitions/ts-proto.js"; +import { IncomingMessage, Server, ServerResponse } from "node:http"; +import { + getOAuthAudienceGeneralApi, + getOAuthAudienceRunnerApi, +} from "@jobber/common/oauth.js"; +import { randomUUID } from "node:crypto"; +import { getConfigOption } from "./config.js"; +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import assert from "node:assert"; +import { createOauth2Token } from "./oauth-client.js"; +import { ConnectivityState } from "@grpc/grpc-js/build/src/connectivity-state.js"; + +type RunnerClient = RawClient< + FromTsProtoServiceDefinition +>; + +type GeneralClient = RawClient< + FromTsProtoServiceDefinition +>; + +type GrpcAuth = { + audience: string; + jwt: string; + expiresAt: number; + refreshAt: number; + metadata: Metadata; +}; + +type JobEntry = { + job: JobItem; + action: ActionItem; + triggers: TriggerItem[]; + runners: RunnerItem[]; +}; + +type RunnerConnection = { + jobId: string; + auth: GrpcAuth; + channel: Channel; + client: RunnerClient; +}; + +export class GatewayClient extends LoopBase { + protected loopDuration = 500; + + protected loopStarted = undefined; + protected loopClosed = undefined; + + private server: Server | null = null; + + private grpcAuth: GrpcAuth | null = null; + private grpcChannel: Channel | null = null; + private grpcClient: GeneralClient | null = null; + + /** Key: job.id */ + private jobs = new Map(); + + /** Key: runner.id */ + private runnerGrpc = new Map(); + + /** Key: trigger.id */ + private triggers = new Map(); + + /** Key: template name */ + private templates = new Map<"bad-gateway", string>(); + + constructor() { + super(); + } + + private static createAuth( + audience: string, + tokenResult: { token: string; expiresAt: number; refreshAt: number }, + ): GrpcAuth { + return { + audience, + jwt: tokenResult.token, + expiresAt: tokenResult.expiresAt, + refreshAt: tokenResult.refreshAt, + metadata: Metadata({ + Authorization: `Bearer ${tokenResult.token}`, + }), + }; + } + + private async refreshAuthIfNeeded(auth: GrpcAuth): Promise { + if (Date.now() / 1000 < auth.refreshAt) { + return false; + } + + console.log( + `[Gateway] Refreshing OAuth2 token for audience: ${auth.audience}`, + ); + const tokenResult = await createOauth2Token(auth.audience); + + auth.jwt = tokenResult.token; + auth.expiresAt = tokenResult.expiresAt; + auth.refreshAt = tokenResult.refreshAt; + auth.metadata.set("Authorization", `Bearer ${tokenResult.token}`); + + return true; + } + + private async refreshRunnerTokens() { + for (const connection of this.runnerGrpc.values()) { + await this.refreshAuthIfNeeded(connection.auth); + } + } + + protected async loopStarting() { + const audience = getOAuthAudienceGeneralApi(); + const tokenResult = await createOauth2Token(audience); + + this.grpcAuth = GatewayClient.createAuth(audience, tokenResult); + + this.grpcChannel = createChannel( + getConfigOption("GRPC_ENDPOINT"), + ChannelCredentials.createInsecure(), + ); + this.grpcClient = createClientFactory().create( + GeneralAPIDefinition, + this.grpcChannel, + { "*": { metadata: this.grpcAuth.metadata } }, + ); + + // Force a loop iteration to ensure routes are ready + await this.loopIteration(); + + this.server = new Server(); + this.server.listen(getConfigOption("PORT")); + this.server.on("request", (req, res) => this.handleHttpRequest(req, res)); + } + + protected async loopClosing() { + await new Promise((resolve, reject) => + this.server?.close((err) => (err ? reject(err) : resolve(true))), + ); + + for (const connection of this.runnerGrpc.values()) { + connection.channel.close(); + } + this.runnerGrpc.clear(); + this.triggers.clear(); + this.jobs.clear(); + + this.grpcChannel?.close(); + this.grpcChannel = null; + this.grpcClient = null; + this.grpcAuth = null; + } + + protected async loopIteration() { + assert(this.grpcClient); + assert(this.grpcAuth); + + // Refresh tokens if they are approaching expiry + await this.refreshAuthIfNeeded(this.grpcAuth); + await this.refreshRunnerTokens(); + + // Fetch enabled jobs + const jobs = (await this.grpcClient.getJobs({})).jobs.filter( + (job) => job.status === "ENABLED" && job.versionId, + ); + + // Remove jobs that no longer exist + const activeJobIds = new Set(jobs.map((job) => job.id)); + + for (const [id, data] of this.jobs) { + if (!activeJobIds.has(id)) { + this.handleJobRemoval(data.job); + } + } + + // Add or update existing jobs + await Promise.all(jobs.map((job) => this.handleJobUpdate(job))); + + // fetch templates + await this.handleFetchTemplates(); + } + + private async handleJobUpdate(job: JobItem) { + assert(this.grpcClient); + + // Fetch triggers, action, and runners in parallel + const [{ triggers }, { action }, { runners }] = await Promise.all([ + this.grpcClient.getJobTriggersLatest({ jobId: job.id }), + this.grpcClient.getJobActionLatest({ jobId: job.id }), + this.grpcClient.getRunners({ + jobId: job.id, + status: "READY", + versionId: job.versionId, + }), + ]); + + if (!action) { + console.log(`[Gateway] Job ${job.id} has no action, skipping`); + return; + } + + const readyRunners = runners.filter((runner) => runner.readyAt !== null); + const previous = this.jobs.get(job.id); + + // Clean up gRPC connections for runners that are no longer active + const activeRunnerIds = new Set(readyRunners.map((r) => r.id)); + + for (const [runnerId, connection] of this.runnerGrpc) { + if (connection.jobId === job.id && !activeRunnerIds.has(runnerId)) { + connection.channel.close(); + this.runnerGrpc.delete(runnerId); + } + } + + // Create gRPC connections for new runners + for (const runner of readyRunners) { + if (this.runnerGrpc.has(runner.id)) { + continue; + } + + const audience = getOAuthAudienceRunnerApi(runner.id); + const tokenResult = await createOauth2Token(audience); + const auth = GatewayClient.createAuth(audience, tokenResult); + + const channel = createChannel( + `http://${runner.properties?.runnerContainerName}:${runner.properties?.runnerApiPort}`, + ChannelCredentials.createInsecure(), + { + "grpc.keepalive_permit_without_calls": 1, + "grpc.keepalive_timeout_ms": 30_000, + }, + ); + + const client = createClientFactory().create( + RunnerAPIDefinition, + channel, + { "*": { metadata: auth.metadata } }, + ); + + this.runnerGrpc.set(runner.id, { jobId: job.id, auth, channel, client }); + } + + // Remove triggers that no longer exist, then upsert current ones + if (previous) { + const currentTriggerIds = new Set(triggers.map((t) => t.id)); + + for (const old of previous.triggers) { + if (!currentTriggerIds.has(old.id)) { + this.triggers.delete(old.id); + } + } + } + + for (const trigger of triggers) { + this.triggers.set(trigger.id, trigger); + } + + this.jobs.set(job.id, { job, action, triggers, runners: readyRunners }); + } + + private handleJobRemoval(job: JobItem) { + for (const [triggerId, trigger] of this.triggers) { + if (trigger.jobId === job.id) { + this.triggers.delete(triggerId); + } + } + + for (const [runnerId, connection] of this.runnerGrpc) { + if (connection.jobId === job.id) { + connection.channel.close(); + this.runnerGrpc.delete(runnerId); + } + } + + this.jobs.delete(job.id); + } + + private async handleFetchTemplates() { + if (!this.grpcClient) { + return; + } + + const templates = await this.grpcClient.getTemplates({}); + + this.templates.set("bad-gateway", templates.templateBadGateway); + } + + private async getRunner(entry: JobEntry) { + assert(this.grpcClient); + + if (entry.action.runnerMode === "STANDARD" && entry.runners.length >= 1) { + const runner = + entry.runners[Math.floor(Math.random() * entry.runners.length)]; + + const grpc = this.runnerGrpc.get(runner.id); + if (grpc) { + const state = grpc.channel.getConnectivityState(false); + + if ( + state === ConnectivityState.READY || + state === ConnectivityState.IDLE || + state === ConnectivityState.CONNECTING + ) { + return runner; + } + } + } + + try { + const { runner } = await this.grpcClient.createSoftRunner({ + jobId: entry.job.id, + actionId: entry.action.id, + versionId: entry.job.versionId, + }); + + if (!runner) { + return null; + } + + await awaitTruthy(async () => { + const grpc = this.runnerGrpc.get(runner.id); + if (!grpc) { + return false; + } + + const state = grpc.channel.getConnectivityState(false); + return ( + state === ConnectivityState.READY || + state === ConnectivityState.IDLE || + state === ConnectivityState.CONNECTING + ); + }, 30_000); + + return runner; + } catch (err) { + if (err instanceof ClientError) { + console.warn( + `Failed to create soft-create runner for job ${entry.job.id}: ${err.message}`, + ); + + return null; + } + + throw err; + } + } + + private async handleHttpRequest(req: IncomingMessage, res: ServerResponse) { + if (this.status !== "started") { + res.statusCode = 503; + res.setHeader("Content-Type", "text/html"); + res.end(this.templates.get("bad-gateway")); + return; + } + + const trigger = this.matchTrigger(req); + + if (!trigger?.http || !this.jobs.has(trigger.jobId)) { + res.statusCode = 502; + res.setHeader("Content-Type", "text/html"); + res.end(this.templates.get("bad-gateway")); + return; + } + + const entry = this.jobs.get(trigger.jobId)!; + + const runner = await this.getRunner(entry); + + if (!runner) { + res.statusCode = 502; + res.setHeader("Content-Type", "text/html"); + res.end(this.templates.get("bad-gateway")); + return; + } + + const connection = this.runnerGrpc.get(runner.id); + if (!connection) { + res.statusCode = 502; + res.setHeader("Content-Type", "text/html"); + res.end(this.templates.get("bad-gateway")); + return; + } + + try { + let hasStartedResponding = false; + let hasTimeout = false; + + const timeoutHandle = setTimeout(() => { + if (hasStartedResponding) { + return; + } + + // Only timeout when the runner hasn't started responding + hasTimeout = true; + console.warn( + `[Gateway] HTTP request to runner ${runner.id} timed out after 30 seconds without response. Closing connection.`, + ); + }, entry.action.runnerTimeout * 1000); + + const response = connection.client.eventHttp( + this.buildHttpRequestStream(req, trigger), + ); + + for await (const event of response) { + if (hasTimeout) { + break; + } + + hasStartedResponding = true; + + if (event.head) { + res.statusCode = event.head.status; + for (const header of event.head.headers) { + res.setHeader(header.name, header.value); + } + } + + if (event.body) { + res.write(event.body.data); + if (event.body.end) { + res.end(); + } + } + } + + clearTimeout(timeoutHandle); + } catch (err) { + console.error( + `[Gateway] Error proxying request to runner ${runner.id}:`, + err, + ); + + if (!res.headersSent) { + res.statusCode = 502; + res.setHeader("Content-Type", "text/html"); + res.end(this.templates.get("bad-gateway")); + } else if (!res.writableEnded) { + res.end(); + } + } finally { + if (entry.action.runnerMode === "RUN_ONCE") { + await this.grpcClient?.deleteRunner({ runnerId: runner.id }); + } + } + } + + private async *buildHttpRequestStream( + req: IncomingMessage, + trigger: TriggerItem, + ): AsyncIterable { + const headers: EventHttpRequest_Head["headers"] = []; + + for (const [key, value] of Object.entries(req.headers)) { + if (Array.isArray(value)) { + for (const v of value) { + headers.push({ name: key, value: v }); + } + } else if (value !== undefined) { + headers.push({ name: key, value }); + } + } + + let path = ""; + let query = ""; + + if (req.url) { + const qPos = req.url.indexOf("?"); + if (qPos >= 0) { + path = req.url.substring(0, qPos); + query = req.url.substring(qPos + 1); + } else { + path = req.url; + } + } + + yield { info: { triggerName: trigger.http!.name ?? "" } }; + + yield { + head: { + id: randomUUID(), + scheme: "http", + method: req.method || "GET", + hostname: req.headers["host"] || "", + headers, + query, + path, + }, + }; + + let seq = 0; + + for await (const chunk of req) { + yield { + body: { id: randomUUID(), seq: seq++, data: chunk, end: false }, + }; + } + + yield { + body: { + id: randomUUID(), + seq: seq++, + data: new Uint8Array(0), + end: true, + }, + }; + } + + private matchTrigger(req: IncomingMessage): TriggerItem | null { + const host = req.headers["host"]; + const method = req.method; + + if (!host || !method || !req.url) { + return null; + } + + const qPos = req.url.indexOf("?"); + const path = qPos >= 0 ? req.url.substring(0, qPos) : req.url; + + for (const trigger of this.triggers.values()) { + if (!trigger.http) { + continue; + } + + if (trigger.http.hostname && trigger.http.hostname !== host) { + continue; + } + + if (trigger.http.method && trigger.http.method !== method) { + continue; + } + + if (trigger.http.path) { + if (trigger.http.path.startsWith("^")) { + const regex = new RegExp(trigger.http.path); + if (!regex.test(path)) { + continue; + } + } else if (trigger.http.path !== path) { + continue; + } + } + + return trigger; + } + + return null; + } +} diff --git a/packages/gateway/src/index.ts b/packages/gateway/src/index.ts new file mode 100644 index 0000000..04ab40a --- /dev/null +++ b/packages/gateway/src/index.ts @@ -0,0 +1,28 @@ +import "reflect-metadata"; +import { GatewayClient } from "./gateway.js"; + +async function main() { + console.log("Starting Gateway Management Client..."); + + console.log("Starting Gateway..."); + const gateway = new GatewayClient(); + await gateway.start(); + console.log("Gateway Management Client started"); + + const shutdown = async () => { + console.log("Stopping Gateway..."); + await gateway.stop(); + console.log("stopped"); + + process.exit(0); + }; + + process.once("SIGTERM", async () => { + await shutdown(); + }); + process.once("SIGINT", async () => { + await shutdown(); + }); +} + +main(); diff --git a/packages/gateway/src/oauth-client.ts b/packages/gateway/src/oauth-client.ts new file mode 100644 index 0000000..6faab88 --- /dev/null +++ b/packages/gateway/src/oauth-client.ts @@ -0,0 +1,97 @@ +import { getConfigOption } from "./config.js"; +import { createRemoteJWKSet, decodeJwt } from "jose"; + +function getDiscoveryUrl(): string { + if (getConfigOption("OIDC_DISCOVERY_URL")) { + return getConfigOption("OIDC_DISCOVERY_URL")!; + } + + const issuerUrl = getConfigOption("OIDC_ISSUER_URL"); + + return `${issuerUrl.replace(/\/+$/, "")}/.well-known/openid-configuration`; +} + +async function getOAuthDiscovery() { + const response = await fetch(getDiscoveryUrl()); + + if (!response.ok) { + throw new Error( + `Failed to fetch OIDC discovery document: ${response.status} ${response.statusText}`, + ); + } + + return (await response.json()) as { + issuer: string; + token_endpoint: string; + jwks_uri: string; + token_endpoint_auth_methods_supported: string[]; + }; +} + +async function getRemoteJwks() { + const discovery = await getOAuthDiscovery(); + + return createRemoteJWKSet(new URL(discovery.jwks_uri), { + cacheMaxAge: 5 * 60 * 1000, + }); +} + +export async function createOauth2Token(audience: string) { + const discovery = await getOAuthDiscovery(); + + if ( + !discovery.token_endpoint_auth_methods_supported.includes( + "client_secret_basic", + ) + ) { + throw new Error( + "OIDC provider does not support client_secret_basic authentication", + ); + } + + const params = new URLSearchParams(); + params.append("grant_type", "client_credentials"); + params.append("client_id", getConfigOption("OAUTH_CLIENT_ID")); + params.append("client_secret", getConfigOption("OAUTH_CLIENT_SECRET")); + params.append("audience", audience); + + const response = await fetch(discovery.token_endpoint, { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: params.toString(), + }); + + if (!response.ok) { + console.error( + `Client credentials request failed ${response.status}: ${await response.text()}`, + ); + + throw new Error( + `Failed to fetch OIDC token: ${response.status} ${response.statusText}`, + ); + } + + const data = await response.json(); + + if (typeof data.access_token !== "string") { + throw new Error("OIDC token response does not contain access_token"); + } + + if (!data.token_type || data.token_type.toLowerCase() !== "bearer") { + throw new Error("OIDC token response does not contain bearer token"); + } + + const decoded = decodeJwt(data.access_token); + + if (!decoded.exp) { + throw new Error("OIDC token does not contain exp claim"); + } + + return { + token: data.access_token, + expiresAt: decoded.exp, + refreshAt: decoded.exp - 60, + }; +} diff --git a/packages/gateway/tsconfig.json b/packages/gateway/tsconfig.json new file mode 100644 index 0000000..6ffe955 --- /dev/null +++ b/packages/gateway/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "experimentalDecorators": true, + "inlineSourceMap": true, + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": false, + "types": ["node"], + "rootDir": "./src", + "outDir": "./dist", + "paths":{ + "~/*": ["./src/*"] + } + }, + "$schema": "https://json.schemastore.org/tsconfig", + "display": "Recommended" +} \ No newline at end of file diff --git a/packages/grpc/package.json b/packages/grpc/package.json new file mode 100644 index 0000000..42b35cd --- /dev/null +++ b/packages/grpc/package.json @@ -0,0 +1,37 @@ +{ + "name": "@jobber/grpc", + "version": "1.0.0", + "description": "gRPC Definitions for Jobber", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": "./dist/index.js", + "./*": "./dist/*" + }, + "type": "module", + "scripts": { + "grpc": "rm -rf dist 2>/dev/null || : && protoc --plugin=protoc-gen-ts_proto=./node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=importSuffix=.js --ts_proto_out=./src --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false,stringEnums=true,esModuleInterop=true,enumsAsLiterals=true,outputDefaultValues=true --descriptor_set_out=src/protoset.bin --include_imports --proto_path=./proto/ ./proto/*.proto ./proto/**/*.proto", + "build": "pnpm grpc && tsc" + }, + "keywords": [], + "author": "Eithan Hersey-Tuit", + "license": "MIT", + "dependencies": { + "@bufbuild/protobuf": "^2.10.2", + "@grpc/grpc-js": "^1.14.3", + "@grpc/proto-loader": "^0.8.0", + "long": "^5.3.2", + "nice-grpc": "^2.1.14", + "nice-grpc-common": "^2.0.2", + "prom-client": "^15.1.3", + "protobufjs": "^8.0.0" + }, + "devDependencies": { + "@tsconfig/node20": "^20.1.4", + "@types/node": "^20.16.12", + "grpc-tools": "^1.13.1", + "ts-proto": "^2.10.1", + "tsc-alias": "^1.8.10", + "typescript": "^5.6.3" + } +} diff --git a/packages/grpc/proto/base.proto b/packages/grpc/proto/base.proto new file mode 100644 index 0000000..afc4441 --- /dev/null +++ b/packages/grpc/proto/base.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package Base; + +message Empty {} + +message ExportChunk { + string id = 1; + int64 sequence = 2; + bytes data = 3; +} diff --git a/packages/grpc/proto/basics/action.proto b/packages/grpc/proto/basics/action.proto new file mode 100644 index 0000000..105a1f5 --- /dev/null +++ b/packages/grpc/proto/basics/action.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package Action; + +message Item { + message DockerArguments { + message Volume { + enum VolumeMode { + READ_ONLY = 0; + READ_WRITE = 1; + } + + string source = 1; + string target = 2; + VolumeMode mode = 3; + } + + message Label { + string key = 1; + string value = 2; + } + + repeated string networks = 1; + repeated Volume volumes = 2; + repeated Label labels = 3; + optional string memoryLimit = 4; + repeated string directPassthroughArguments = 5; + } + + enum RunnerMode { + STANDARD = 0; + RUN_ONCE = 1; + } + + string id = 1; + string jobId = 2; + string versionId = 3; + + string runnerImage = 4; + bool runnerAsynchronous = 5; + uint32 runnerMinCount = 6; + uint32 runnerMaxCount = 7; + uint32 runnerTimeout = 8; + uint32 runnerMaxIdleAge = 9; + uint32 runnerMaxAge = 10; + uint32 runnerMaxAgeHard = 11; + DockerArguments dockerArguments = 12; + RunnerMode runnerMode = 13; +} diff --git a/packages/grpc/proto/basics/api-token.proto b/packages/grpc/proto/basics/api-token.proto new file mode 100644 index 0000000..ae3659b --- /dev/null +++ b/packages/grpc/proto/basics/api-token.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +import "basics/common.proto"; + +package ApiToken; + +message Item { + enum Status { + ENABLED = 0; + DISABLED = 1; + } + + string id = 1; + string token = 2; + string userId = 3; + string description = 4; + Status status = 5; + + repeated Common.Permission permissions = 6; + + string expires = 7; + string created = 8; +} diff --git a/packages/grpc/proto/basics/common.proto b/packages/grpc/proto/basics/common.proto new file mode 100644 index 0000000..53d691a --- /dev/null +++ b/packages/grpc/proto/basics/common.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package Common; + +message Permission { + enum Effect { + ALLOW = 0; + DENY = 1; + } + + enum Action { + READ = 0; + WRITE = 1; + DELETE = 2; + } + + Effect effect = 1; + string resource = 2; + repeated Action actions = 3; +} \ No newline at end of file diff --git a/packages/grpc/proto/basics/environment.proto b/packages/grpc/proto/basics/environment.proto new file mode 100644 index 0000000..73a0fbc --- /dev/null +++ b/packages/grpc/proto/basics/environment.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; + +import "basics/common.proto"; + +package Environment; + +message Item { + message ContextValue { + enum Type { + TEXT = 0; + SECRET = 1; + } + + Type type = 1; + optional string value = 2; + } + + string id = 1; + string jobId = 2; + + map context = 3; + + string modified = 4; +} diff --git a/packages/grpc/proto/basics/job-version.proto b/packages/grpc/proto/basics/job-version.proto new file mode 100644 index 0000000..26c74e3 --- /dev/null +++ b/packages/grpc/proto/basics/job-version.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +import "basics/common.proto"; + +package JobVersion; + +message Item { + string id = 1; + string jobId = 2; + string version = 3; + string modified = 4; + string created = 5; +} diff --git a/packages/grpc/proto/basics/job.proto b/packages/grpc/proto/basics/job.proto new file mode 100644 index 0000000..2ad3de3 --- /dev/null +++ b/packages/grpc/proto/basics/job.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package Job; + +message Item { + message Link { + string name = 1; + string url = 2; + } + + enum Status { + ENABLED = 0; + DISABLED = 1; + } + + string id = 1; + string jobName = 2; + Status status = 3; + optional string description = 4; + optional string versionId = 5; + repeated Link links = 6; +} diff --git a/packages/grpc/proto/basics/runner.proto b/packages/grpc/proto/basics/runner.proto new file mode 100644 index 0000000..b50d3ed --- /dev/null +++ b/packages/grpc/proto/basics/runner.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package Runner; + +message Item { + enum Status { + STARTING = 0; + READY = 1; + CLOSING = 2; + CLOSED = 3; + } + + message Properties { + string runnerPid = 1; + reserved 2; + string runnerContainerName = 3; + repeated string runnerContainerNetworks = 4; + uint32 runnerApiPort = 5; + bool runnerDebug = 6; + } + + string id = 1; + string jobId = 2; + string actionId = 3; + string versionId = 4; + Properties properties = 5; + + reserved 6 to 10; + + string createdAt = 11; + optional string readyAt = 12; + optional string closingAt = 13; + optional string closedAt = 14; +} diff --git a/packages/grpc/proto/basics/trigger.proto b/packages/grpc/proto/basics/trigger.proto new file mode 100644 index 0000000..d22ba40 --- /dev/null +++ b/packages/grpc/proto/basics/trigger.proto @@ -0,0 +1,56 @@ +syntax = "proto3"; + +import "basics/common.proto"; + +package Trigger; + +message Item { + message TriggerSchedule { + optional string name = 1; + string cron = 2; + optional string timezone = 3; + } + + message TriggerHttp { + optional string name = 1; + optional string hostname = 2; + optional string method = 3; + optional string path = 4; + } + + message TriggerMqtt { + message Connection { + optional string protocol = 1; + optional string protocolVariable = 2; + + optional string port = 3; + optional string portVariable = 4; + + optional string host = 5; + optional string hostVariable = 6; + + optional string username = 7; + optional string usernameVariable = 8; + + optional string password = 9; + optional string passwordVariable = 10; + + optional string clientId = 11; + optional string clientIdVariable = 12; + } + + optional string name = 1; + repeated string topics = 2; + Connection connection = 3; + } + + string id = 1; + string jobId = 2; + string versionId = 3; + + oneof context { + TriggerSchedule schedule = 4; + TriggerHttp http = 5; + TriggerMqtt mqtt = 6; + } +} diff --git a/packages/grpc/proto/gateway.proto b/packages/grpc/proto/gateway.proto new file mode 100644 index 0000000..80d31f8 --- /dev/null +++ b/packages/grpc/proto/gateway.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +import "base.proto"; + +package GatewayAPI; + +service GatewayAPI { +// +} diff --git a/packages/grpc/proto/general.proto b/packages/grpc/proto/general.proto new file mode 100644 index 0000000..bb6a25b --- /dev/null +++ b/packages/grpc/proto/general.proto @@ -0,0 +1,248 @@ +syntax = "proto3"; + +import "base.proto"; +import "basics/action.proto"; +import "basics/api-token.proto"; +import "basics/environment.proto"; +import "basics/job-version.proto"; +import "basics/job.proto"; +import "basics/runner.proto"; +import "basics/trigger.proto"; + +package GeneralAPI; + +service GeneralAPI { + rpc getJob(JobRequest) returns (JobResponse); + rpc getJobs(JobsRequest) returns (JobsResponse); + + rpc getJobVersion(JobVersionRequest) returns (JobVersionResponse); + rpc getJobVersionLatest(JobVersionLatestRequest) returns (JobVersionLatestResponse); + rpc getJobVersions(JobVersionsRequest) returns (JobVersionsResponse); + rpc getJobVersionArchive(JobVersionArchiveRequest) returns (stream JobVersionArchiveResponse); + + rpc getJobAction(JobActionRequest) returns (JobActionResponse); + rpc getJobActionLatest(JobActionLatestRequest) returns (JobActionLatestResponse); + rpc getJobActions(JobActionsRequest) returns (JobActionsResponse); + + rpc getJobTrigger(JobTriggerRequest) returns (JobTriggerResponse); + rpc getJobTriggers(JobTriggersRequest) returns (JobTriggersResponse); + rpc getJobTriggersLatest(JobTriggersLatestRequest) returns (JobTriggersLatestResponse); + + rpc getRunner(RunnerRequest) returns (RunnerResponse); + rpc getRunners(RunnersRequest) returns (RunnersResponse); + rpc deleteRunner(DeleteRunnerRequest) returns (Base.Empty); + + rpc getStoreItem(StoreItemRequest) returns (StoreItemResponse); + rpc setStoreItem(SetStoreItemRequest) returns (SetStoreItemResponse); + rpc deleteStoreItem(DeleteStoreItemRequest) returns (DeleteStoreItemResponse); + + + // This will likely be migrated to its own service similar to the GatewayAPI + rpc publishMqttMessage(PublishMqttMessageRequest) returns (PublishMqttMessageResponse); + + rpc createSoftRunner(CreateSoftRunnerRequest) returns (CreateSoftRunnerResponse); + + rpc getTemplates (TemplatesRequest) returns (TemplatesResponse); +} + +/** getJob **/ +message JobRequest { + string jobId = 1; +} +message JobResponse { + Job.Item job = 1; +} + + +/** getJobs **/ +message JobsRequest {} +message JobsResponse { + repeated Job.Item jobs = 1; +} + + +/** getJobVersion **/ +message JobVersionRequest { + string jobVersionId = 1; +} +message JobVersionResponse { + JobVersion.Item jobVersion = 1; +} + + +/** getJobVersionLatest **/ +message JobVersionLatestRequest { + string jobId = 1; +} +message JobVersionLatestResponse { + JobVersion.Item jobVersion = 1; +} + + +/** getJobVersions **/ +message JobVersionsRequest { + string jobId = 1; +} +message JobVersionsResponse { + repeated JobVersion.Item jobVersions = 1; +} + + +/** getJobVersionArchive **/ +message JobVersionArchiveRequest { + string jobId = 1; + string jobVersionId = 2; +} +message JobVersionArchiveResponse { + uint64 seq = 1; + bytes data = 2; + bool end = 3; +} + + +/** getJobAction **/ +message JobActionRequest { + string jobId = 1; + string actionId = 2; +} +message JobActionResponse { + Action.Item action = 1; +} + + +/** getJobActionLatest **/ +message JobActionLatestRequest { + string jobId = 1; +} +message JobActionLatestResponse { + Action.Item action = 1; +} + + +/** getJobActions **/ +message JobActionsRequest { + string jobId = 1; + optional string versionId = 2; +} +message JobActionsResponse { + repeated Action.Item actions = 1; +} + + +/** getJobTrigger **/ +message JobTriggerRequest { + string jobId = 1; + string triggerId = 2; +} +message JobTriggerResponse { + Trigger.Item trigger = 1; +} + + +/** getJobTriggers **/ +message JobTriggersRequest { + string jobId = 1; + optional string versionId = 2; +} +message JobTriggersResponse { + repeated Trigger.Item triggers = 1; +} + + +/** getJobTriggersLatest **/ +message JobTriggersLatestRequest { + string jobId = 1; +} +message JobTriggersLatestResponse { + repeated Trigger.Item triggers = 1; +} + + +/** getRunner **/ +message RunnerRequest { + string runnerId = 1; +} +message RunnerResponse { + Runner.Item runner = 1; +} + + +/** getRunners **/ +message RunnersRequest { + optional string jobId = 1; + optional string versionId = 2; + optional string actionId = 3; + optional Runner.Item.Status status = 4; +} +message RunnersResponse { + repeated Runner.Item runners = 1; +} + + +/** deleteRunner **/ +message DeleteRunnerRequest { + string runnerId = 1; +} + + +/** getStoreItem **/ +message StoreItemRequest { + string jobId = 1; + string key = 2; +} +message StoreItemResponse { + string key = 1; + string value = 2; +} + + +/** setStoreItem **/ +message SetStoreItemRequest { + string jobId = 1; + string key = 2; + string value = 3; + optional uint32 ttl = 4; +} +message SetStoreItemResponse { + string key = 1; + string value = 2; +} + + +/** deleteStoreItem **/ +message DeleteStoreItemRequest { + string jobId = 1; + string key = 2; +} +message DeleteStoreItemResponse { + string key = 1; +} + + +/** publishMqttMessage **/ +message PublishMqttMessageRequest { + string jobId = 1; + string topic = 2; + string payload = 3; +} +message PublishMqttMessageResponse {} + + +/** createRunner **/ +message CreateSoftRunnerRequest { + string jobId = 1; + string versionId = 2; + string actionId = 3; +} +message CreateSoftRunnerResponse { + Runner.Item runner = 1; +} + + +/** getTemplates **/ +message TemplatesRequest {} +message TemplatesResponse { + string templateBadGateway = 1; + + reserved 2 to 20; +} \ No newline at end of file diff --git a/packages/grpc/proto/runner.proto b/packages/grpc/proto/runner.proto new file mode 100644 index 0000000..11c1175 --- /dev/null +++ b/packages/grpc/proto/runner.proto @@ -0,0 +1,116 @@ +syntax = "proto3"; + +import "base.proto"; + +package RunnerAPI; + +service RunnerAPI { + rpc eventHttp(stream EventHttpRequest) returns (stream EventHttpResponse); + rpc eventMqtt(EventMqttRequest) returns (EventMqttResponse); + rpc eventSchedule(EventScheduleRequest) returns (EventScheduleResponse); + + rpc status(Base.Empty) returns (StatusResponse); +} + +message BasicContext { + string triggerName = 1; + reserved 2 to 10; +} + +message HttpHeader { + string name = 1; + string value = 2; +} + +message EventHttpRequest { + message Head { + string id = 1; + string method = 2; + string scheme = 3; + string hostname = 4; + string path = 5; + string query = 6; + reserved 7 to 10; + repeated HttpHeader headers = 11; + } + + message Body { + string id = 1; + uint64 seq = 2; + bytes data = 3; + bool end = 4; + reserved 5 to 8; + } + + oneof event { + BasicContext info = 1; + Head head = 2; + Body body = 3; + } +} + + +message EventHttpResponse { + message Head { + string id = 1; + int32 status = 2; + repeated HttpHeader headers = 3; + } + + message Body { + string id = 1; + uint64 seq = 2; + bytes data = 3; + bool end = 4; + } + + oneof event { + Head head = 1; + Body body = 2; + } +} + +message EventMqttRequest { + BasicContext context = 1; + string topic = 2; + bytes payload = 3; +} + +message EventMqttResponse { + enum Status { + ACCEPTED = 0; + REJECTED = 1; + } + + Status status = 1; +} + +message EventScheduleRequest { + BasicContext context = 1; + string scheduledAt = 2; +} + +message EventScheduleResponse { + enum Status { + ACCEPTED = 0; + REJECTED = 1; + } + + Status status = 1; +} + + +message StatusResponse { + enum Status { + STARTING = 0; + READY = 1; + CLOSING = 2; + CLOSED = 3; + FATAL = 4; + } + + Status status = 1; + uint32 lastRequestAt = 2; + uint32 loadAverage5Seconds = 3; + uint32 loadAverage1Minute = 4; +} diff --git a/packages/grpc/src/base.ts b/packages/grpc/src/base.ts new file mode 100644 index 0000000..f8576ed --- /dev/null +++ b/packages/grpc/src/base.ts @@ -0,0 +1,211 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: base.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Base"; + +export interface Empty { +} + +export interface ExportChunk { + id: string; + sequence: number; + data: Uint8Array; +} + +function createBaseEmpty(): Empty { + return {}; +} + +export const Empty: MessageFns = { + encode(_: Empty, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Empty { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEmpty(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): Empty { + return {}; + }, + + toJSON(_: Empty): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): Empty { + return Empty.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): Empty { + const message = createBaseEmpty(); + return message; + }, +}; + +function createBaseExportChunk(): ExportChunk { + return { id: "", sequence: 0, data: new Uint8Array(0) }; +} + +export const ExportChunk: MessageFns = { + encode(message: ExportChunk, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.sequence !== 0) { + writer.uint32(16).int64(message.sequence); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): ExportChunk { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExportChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.sequence = longToNumber(reader.int64()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.data = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): ExportChunk { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + sequence: isSet(object.sequence) ? globalThis.Number(object.sequence) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + }; + }, + + toJSON(message: ExportChunk): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.sequence !== 0) { + obj.sequence = Math.round(message.sequence); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + return obj; + }, + + create(base?: DeepPartial): ExportChunk { + return ExportChunk.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ExportChunk { + const message = createBaseExportChunk(); + message.id = object.id ?? ""; + message.sequence = object.sequence ?? 0; + message.data = object.data ?? new Uint8Array(0); + return message; + }, +}; + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/action.ts b/packages/grpc/src/basics/action.ts new file mode 100644 index 0000000..ce6acea --- /dev/null +++ b/packages/grpc/src/basics/action.ts @@ -0,0 +1,739 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/action.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Action"; + +export interface Item { + id: string; + jobId: string; + versionId: string; + runnerImage: string; + runnerAsynchronous: boolean; + runnerMinCount: number; + runnerMaxCount: number; + runnerTimeout: number; + runnerMaxIdleAge: number; + runnerMaxAge: number; + runnerMaxAgeHard: number; + dockerArguments: Item_DockerArguments | undefined; + runnerMode: Item_RunnerMode; +} + +export const Item_RunnerMode = { STANDARD: "STANDARD", RUN_ONCE: "RUN_ONCE", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_RunnerMode = typeof Item_RunnerMode[keyof typeof Item_RunnerMode]; + +export namespace Item_RunnerMode { + export type STANDARD = typeof Item_RunnerMode.STANDARD; + export type RUN_ONCE = typeof Item_RunnerMode.RUN_ONCE; + export type UNRECOGNIZED = typeof Item_RunnerMode.UNRECOGNIZED; +} + +export function item_RunnerModeFromJSON(object: any): Item_RunnerMode { + switch (object) { + case 0: + case "STANDARD": + return Item_RunnerMode.STANDARD; + case 1: + case "RUN_ONCE": + return Item_RunnerMode.RUN_ONCE; + case -1: + case "UNRECOGNIZED": + default: + return Item_RunnerMode.UNRECOGNIZED; + } +} + +export function item_RunnerModeToJSON(object: Item_RunnerMode): string { + switch (object) { + case Item_RunnerMode.STANDARD: + return "STANDARD"; + case Item_RunnerMode.RUN_ONCE: + return "RUN_ONCE"; + case Item_RunnerMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_RunnerModeToNumber(object: Item_RunnerMode): number { + switch (object) { + case Item_RunnerMode.STANDARD: + return 0; + case Item_RunnerMode.RUN_ONCE: + return 1; + case Item_RunnerMode.UNRECOGNIZED: + default: + return -1; + } +} + +export interface Item_DockerArguments { + networks: string[]; + volumes: Item_DockerArguments_Volume[]; + labels: Item_DockerArguments_Label[]; + memoryLimit?: string | undefined; + directPassthroughArguments: string[]; +} + +export interface Item_DockerArguments_Volume { + source: string; + target: string; + mode: Item_DockerArguments_Volume_VolumeMode; +} + +export const Item_DockerArguments_Volume_VolumeMode = { + READ_ONLY: "READ_ONLY", + READ_WRITE: "READ_WRITE", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type Item_DockerArguments_Volume_VolumeMode = + typeof Item_DockerArguments_Volume_VolumeMode[keyof typeof Item_DockerArguments_Volume_VolumeMode]; + +export namespace Item_DockerArguments_Volume_VolumeMode { + export type READ_ONLY = typeof Item_DockerArguments_Volume_VolumeMode.READ_ONLY; + export type READ_WRITE = typeof Item_DockerArguments_Volume_VolumeMode.READ_WRITE; + export type UNRECOGNIZED = typeof Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED; +} + +export function item_DockerArguments_Volume_VolumeModeFromJSON(object: any): Item_DockerArguments_Volume_VolumeMode { + switch (object) { + case 0: + case "READ_ONLY": + return Item_DockerArguments_Volume_VolumeMode.READ_ONLY; + case 1: + case "READ_WRITE": + return Item_DockerArguments_Volume_VolumeMode.READ_WRITE; + case -1: + case "UNRECOGNIZED": + default: + return Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED; + } +} + +export function item_DockerArguments_Volume_VolumeModeToJSON(object: Item_DockerArguments_Volume_VolumeMode): string { + switch (object) { + case Item_DockerArguments_Volume_VolumeMode.READ_ONLY: + return "READ_ONLY"; + case Item_DockerArguments_Volume_VolumeMode.READ_WRITE: + return "READ_WRITE"; + case Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_DockerArguments_Volume_VolumeModeToNumber(object: Item_DockerArguments_Volume_VolumeMode): number { + switch (object) { + case Item_DockerArguments_Volume_VolumeMode.READ_ONLY: + return 0; + case Item_DockerArguments_Volume_VolumeMode.READ_WRITE: + return 1; + case Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED: + default: + return -1; + } +} + +export interface Item_DockerArguments_Label { + key: string; + value: string; +} + +function createBaseItem(): Item { + return { + id: "", + jobId: "", + versionId: "", + runnerImage: "", + runnerAsynchronous: false, + runnerMinCount: 0, + runnerMaxCount: 0, + runnerTimeout: 0, + runnerMaxIdleAge: 0, + runnerMaxAge: 0, + runnerMaxAgeHard: 0, + dockerArguments: undefined, + runnerMode: Item_RunnerMode.STANDARD, + }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobId !== "") { + writer.uint32(18).string(message.jobId); + } + if (message.versionId !== "") { + writer.uint32(26).string(message.versionId); + } + if (message.runnerImage !== "") { + writer.uint32(34).string(message.runnerImage); + } + if (message.runnerAsynchronous !== false) { + writer.uint32(40).bool(message.runnerAsynchronous); + } + if (message.runnerMinCount !== 0) { + writer.uint32(48).uint32(message.runnerMinCount); + } + if (message.runnerMaxCount !== 0) { + writer.uint32(56).uint32(message.runnerMaxCount); + } + if (message.runnerTimeout !== 0) { + writer.uint32(64).uint32(message.runnerTimeout); + } + if (message.runnerMaxIdleAge !== 0) { + writer.uint32(72).uint32(message.runnerMaxIdleAge); + } + if (message.runnerMaxAge !== 0) { + writer.uint32(80).uint32(message.runnerMaxAge); + } + if (message.runnerMaxAgeHard !== 0) { + writer.uint32(88).uint32(message.runnerMaxAgeHard); + } + if (message.dockerArguments !== undefined) { + Item_DockerArguments.encode(message.dockerArguments, writer.uint32(98).fork()).join(); + } + if (message.runnerMode !== Item_RunnerMode.STANDARD) { + writer.uint32(104).int32(item_RunnerModeToNumber(message.runnerMode)); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.runnerImage = reader.string(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.runnerAsynchronous = reader.bool(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.runnerMinCount = reader.uint32(); + continue; + } + case 7: { + if (tag !== 56) { + break; + } + + message.runnerMaxCount = reader.uint32(); + continue; + } + case 8: { + if (tag !== 64) { + break; + } + + message.runnerTimeout = reader.uint32(); + continue; + } + case 9: { + if (tag !== 72) { + break; + } + + message.runnerMaxIdleAge = reader.uint32(); + continue; + } + case 10: { + if (tag !== 80) { + break; + } + + message.runnerMaxAge = reader.uint32(); + continue; + } + case 11: { + if (tag !== 88) { + break; + } + + message.runnerMaxAgeHard = reader.uint32(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.dockerArguments = Item_DockerArguments.decode(reader, reader.uint32()); + continue; + } + case 13: { + if (tag !== 104) { + break; + } + + message.runnerMode = item_RunnerModeFromJSON(reader.int32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "", + runnerImage: isSet(object.runnerImage) ? globalThis.String(object.runnerImage) : "", + runnerAsynchronous: isSet(object.runnerAsynchronous) ? globalThis.Boolean(object.runnerAsynchronous) : false, + runnerMinCount: isSet(object.runnerMinCount) ? globalThis.Number(object.runnerMinCount) : 0, + runnerMaxCount: isSet(object.runnerMaxCount) ? globalThis.Number(object.runnerMaxCount) : 0, + runnerTimeout: isSet(object.runnerTimeout) ? globalThis.Number(object.runnerTimeout) : 0, + runnerMaxIdleAge: isSet(object.runnerMaxIdleAge) ? globalThis.Number(object.runnerMaxIdleAge) : 0, + runnerMaxAge: isSet(object.runnerMaxAge) ? globalThis.Number(object.runnerMaxAge) : 0, + runnerMaxAgeHard: isSet(object.runnerMaxAgeHard) ? globalThis.Number(object.runnerMaxAgeHard) : 0, + dockerArguments: isSet(object.dockerArguments) + ? Item_DockerArguments.fromJSON(object.dockerArguments) + : undefined, + runnerMode: isSet(object.runnerMode) ? item_RunnerModeFromJSON(object.runnerMode) : Item_RunnerMode.STANDARD, + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.versionId !== "") { + obj.versionId = message.versionId; + } + if (message.runnerImage !== "") { + obj.runnerImage = message.runnerImage; + } + if (message.runnerAsynchronous !== false) { + obj.runnerAsynchronous = message.runnerAsynchronous; + } + if (message.runnerMinCount !== 0) { + obj.runnerMinCount = Math.round(message.runnerMinCount); + } + if (message.runnerMaxCount !== 0) { + obj.runnerMaxCount = Math.round(message.runnerMaxCount); + } + if (message.runnerTimeout !== 0) { + obj.runnerTimeout = Math.round(message.runnerTimeout); + } + if (message.runnerMaxIdleAge !== 0) { + obj.runnerMaxIdleAge = Math.round(message.runnerMaxIdleAge); + } + if (message.runnerMaxAge !== 0) { + obj.runnerMaxAge = Math.round(message.runnerMaxAge); + } + if (message.runnerMaxAgeHard !== 0) { + obj.runnerMaxAgeHard = Math.round(message.runnerMaxAgeHard); + } + if (message.dockerArguments !== undefined) { + obj.dockerArguments = Item_DockerArguments.toJSON(message.dockerArguments); + } + if (message.runnerMode !== Item_RunnerMode.STANDARD) { + obj.runnerMode = item_RunnerModeToJSON(message.runnerMode); + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobId = object.jobId ?? ""; + message.versionId = object.versionId ?? ""; + message.runnerImage = object.runnerImage ?? ""; + message.runnerAsynchronous = object.runnerAsynchronous ?? false; + message.runnerMinCount = object.runnerMinCount ?? 0; + message.runnerMaxCount = object.runnerMaxCount ?? 0; + message.runnerTimeout = object.runnerTimeout ?? 0; + message.runnerMaxIdleAge = object.runnerMaxIdleAge ?? 0; + message.runnerMaxAge = object.runnerMaxAge ?? 0; + message.runnerMaxAgeHard = object.runnerMaxAgeHard ?? 0; + message.dockerArguments = (object.dockerArguments !== undefined && object.dockerArguments !== null) + ? Item_DockerArguments.fromPartial(object.dockerArguments) + : undefined; + message.runnerMode = object.runnerMode ?? Item_RunnerMode.STANDARD; + return message; + }, +}; + +function createBaseItem_DockerArguments(): Item_DockerArguments { + return { networks: [], volumes: [], labels: [], memoryLimit: undefined, directPassthroughArguments: [] }; +} + +export const Item_DockerArguments: MessageFns = { + encode(message: Item_DockerArguments, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.networks) { + writer.uint32(10).string(v!); + } + for (const v of message.volumes) { + Item_DockerArguments_Volume.encode(v!, writer.uint32(18).fork()).join(); + } + for (const v of message.labels) { + Item_DockerArguments_Label.encode(v!, writer.uint32(26).fork()).join(); + } + if (message.memoryLimit !== undefined) { + writer.uint32(34).string(message.memoryLimit); + } + for (const v of message.directPassthroughArguments) { + writer.uint32(42).string(v!); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_DockerArguments(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.networks.push(reader.string()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.volumes.push(Item_DockerArguments_Volume.decode(reader, reader.uint32())); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.labels.push(Item_DockerArguments_Label.decode(reader, reader.uint32())); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.memoryLimit = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.directPassthroughArguments.push(reader.string()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_DockerArguments { + return { + networks: globalThis.Array.isArray(object?.networks) ? object.networks.map((e: any) => globalThis.String(e)) : [], + volumes: globalThis.Array.isArray(object?.volumes) + ? object.volumes.map((e: any) => Item_DockerArguments_Volume.fromJSON(e)) + : [], + labels: globalThis.Array.isArray(object?.labels) + ? object.labels.map((e: any) => Item_DockerArguments_Label.fromJSON(e)) + : [], + memoryLimit: isSet(object.memoryLimit) ? globalThis.String(object.memoryLimit) : undefined, + directPassthroughArguments: globalThis.Array.isArray(object?.directPassthroughArguments) + ? object.directPassthroughArguments.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: Item_DockerArguments): unknown { + const obj: any = {}; + if (message.networks?.length) { + obj.networks = message.networks; + } + if (message.volumes?.length) { + obj.volumes = message.volumes.map((e) => Item_DockerArguments_Volume.toJSON(e)); + } + if (message.labels?.length) { + obj.labels = message.labels.map((e) => Item_DockerArguments_Label.toJSON(e)); + } + if (message.memoryLimit !== undefined) { + obj.memoryLimit = message.memoryLimit; + } + if (message.directPassthroughArguments?.length) { + obj.directPassthroughArguments = message.directPassthroughArguments; + } + return obj; + }, + + create(base?: DeepPartial): Item_DockerArguments { + return Item_DockerArguments.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_DockerArguments { + const message = createBaseItem_DockerArguments(); + message.networks = object.networks?.map((e) => e) || []; + message.volumes = object.volumes?.map((e) => Item_DockerArguments_Volume.fromPartial(e)) || []; + message.labels = object.labels?.map((e) => Item_DockerArguments_Label.fromPartial(e)) || []; + message.memoryLimit = object.memoryLimit ?? undefined; + message.directPassthroughArguments = object.directPassthroughArguments?.map((e) => e) || []; + return message; + }, +}; + +function createBaseItem_DockerArguments_Volume(): Item_DockerArguments_Volume { + return { source: "", target: "", mode: Item_DockerArguments_Volume_VolumeMode.READ_ONLY }; +} + +export const Item_DockerArguments_Volume: MessageFns = { + encode(message: Item_DockerArguments_Volume, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.source !== "") { + writer.uint32(10).string(message.source); + } + if (message.target !== "") { + writer.uint32(18).string(message.target); + } + if (message.mode !== Item_DockerArguments_Volume_VolumeMode.READ_ONLY) { + writer.uint32(24).int32(item_DockerArguments_Volume_VolumeModeToNumber(message.mode)); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments_Volume { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_DockerArguments_Volume(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.source = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.target = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.mode = item_DockerArguments_Volume_VolumeModeFromJSON(reader.int32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_DockerArguments_Volume { + return { + source: isSet(object.source) ? globalThis.String(object.source) : "", + target: isSet(object.target) ? globalThis.String(object.target) : "", + mode: isSet(object.mode) + ? item_DockerArguments_Volume_VolumeModeFromJSON(object.mode) + : Item_DockerArguments_Volume_VolumeMode.READ_ONLY, + }; + }, + + toJSON(message: Item_DockerArguments_Volume): unknown { + const obj: any = {}; + if (message.source !== "") { + obj.source = message.source; + } + if (message.target !== "") { + obj.target = message.target; + } + if (message.mode !== Item_DockerArguments_Volume_VolumeMode.READ_ONLY) { + obj.mode = item_DockerArguments_Volume_VolumeModeToJSON(message.mode); + } + return obj; + }, + + create(base?: DeepPartial): Item_DockerArguments_Volume { + return Item_DockerArguments_Volume.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_DockerArguments_Volume { + const message = createBaseItem_DockerArguments_Volume(); + message.source = object.source ?? ""; + message.target = object.target ?? ""; + message.mode = object.mode ?? Item_DockerArguments_Volume_VolumeMode.READ_ONLY; + return message; + }, +}; + +function createBaseItem_DockerArguments_Label(): Item_DockerArguments_Label { + return { key: "", value: "" }; +} + +export const Item_DockerArguments_Label: MessageFns = { + encode(message: Item_DockerArguments_Label, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments_Label { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_DockerArguments_Label(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_DockerArguments_Label { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: Item_DockerArguments_Label): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): Item_DockerArguments_Label { + return Item_DockerArguments_Label.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_DockerArguments_Label { + const message = createBaseItem_DockerArguments_Label(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/api-token.ts b/packages/grpc/src/basics/api-token.ts new file mode 100644 index 0000000..f36c04f --- /dev/null +++ b/packages/grpc/src/basics/api-token.ts @@ -0,0 +1,275 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/api-token.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import { Permission } from "./common.js"; + +export const protobufPackage = "ApiToken"; + +export interface Item { + id: string; + token: string; + userId: string; + description: string; + status: Item_Status; + permissions: Permission[]; + expires: string; + created: string; +} + +export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_Status = typeof Item_Status[keyof typeof Item_Status]; + +export namespace Item_Status { + export type ENABLED = typeof Item_Status.ENABLED; + export type DISABLED = typeof Item_Status.DISABLED; + export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED; +} + +export function item_StatusFromJSON(object: any): Item_Status { + switch (object) { + case 0: + case "ENABLED": + return Item_Status.ENABLED; + case 1: + case "DISABLED": + return Item_Status.DISABLED; + case -1: + case "UNRECOGNIZED": + default: + return Item_Status.UNRECOGNIZED; + } +} + +export function item_StatusToJSON(object: Item_Status): string { + switch (object) { + case Item_Status.ENABLED: + return "ENABLED"; + case Item_Status.DISABLED: + return "DISABLED"; + case Item_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_StatusToNumber(object: Item_Status): number { + switch (object) { + case Item_Status.ENABLED: + return 0; + case Item_Status.DISABLED: + return 1; + case Item_Status.UNRECOGNIZED: + default: + return -1; + } +} + +function createBaseItem(): Item { + return { + id: "", + token: "", + userId: "", + description: "", + status: Item_Status.ENABLED, + permissions: [], + expires: "", + created: "", + }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.token !== "") { + writer.uint32(18).string(message.token); + } + if (message.userId !== "") { + writer.uint32(26).string(message.userId); + } + if (message.description !== "") { + writer.uint32(34).string(message.description); + } + if (message.status !== Item_Status.ENABLED) { + writer.uint32(40).int32(item_StatusToNumber(message.status)); + } + for (const v of message.permissions) { + Permission.encode(v!, writer.uint32(50).fork()).join(); + } + if (message.expires !== "") { + writer.uint32(58).string(message.expires); + } + if (message.created !== "") { + writer.uint32(66).string(message.created); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.token = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.userId = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.description = reader.string(); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.status = item_StatusFromJSON(reader.int32()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.permissions.push(Permission.decode(reader, reader.uint32())); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.expires = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.created = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + token: isSet(object.token) ? globalThis.String(object.token) : "", + userId: isSet(object.userId) ? globalThis.String(object.userId) : "", + description: isSet(object.description) ? globalThis.String(object.description) : "", + status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED, + permissions: globalThis.Array.isArray(object?.permissions) + ? object.permissions.map((e: any) => Permission.fromJSON(e)) + : [], + expires: isSet(object.expires) ? globalThis.String(object.expires) : "", + created: isSet(object.created) ? globalThis.String(object.created) : "", + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.token !== "") { + obj.token = message.token; + } + if (message.userId !== "") { + obj.userId = message.userId; + } + if (message.description !== "") { + obj.description = message.description; + } + if (message.status !== Item_Status.ENABLED) { + obj.status = item_StatusToJSON(message.status); + } + if (message.permissions?.length) { + obj.permissions = message.permissions.map((e) => Permission.toJSON(e)); + } + if (message.expires !== "") { + obj.expires = message.expires; + } + if (message.created !== "") { + obj.created = message.created; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.token = object.token ?? ""; + message.userId = object.userId ?? ""; + message.description = object.description ?? ""; + message.status = object.status ?? Item_Status.ENABLED; + message.permissions = object.permissions?.map((e) => Permission.fromPartial(e)) || []; + message.expires = object.expires ?? ""; + message.created = object.created ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/common.ts b/packages/grpc/src/basics/common.ts new file mode 100644 index 0000000..bd77824 --- /dev/null +++ b/packages/grpc/src/basics/common.ts @@ -0,0 +1,254 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/common.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Common"; + +export interface Permission { + effect: Permission_Effect; + resource: string; + actions: Permission_Action[]; +} + +export const Permission_Effect = { ALLOW: "ALLOW", DENY: "DENY", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Permission_Effect = typeof Permission_Effect[keyof typeof Permission_Effect]; + +export namespace Permission_Effect { + export type ALLOW = typeof Permission_Effect.ALLOW; + export type DENY = typeof Permission_Effect.DENY; + export type UNRECOGNIZED = typeof Permission_Effect.UNRECOGNIZED; +} + +export function permission_EffectFromJSON(object: any): Permission_Effect { + switch (object) { + case 0: + case "ALLOW": + return Permission_Effect.ALLOW; + case 1: + case "DENY": + return Permission_Effect.DENY; + case -1: + case "UNRECOGNIZED": + default: + return Permission_Effect.UNRECOGNIZED; + } +} + +export function permission_EffectToJSON(object: Permission_Effect): string { + switch (object) { + case Permission_Effect.ALLOW: + return "ALLOW"; + case Permission_Effect.DENY: + return "DENY"; + case Permission_Effect.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function permission_EffectToNumber(object: Permission_Effect): number { + switch (object) { + case Permission_Effect.ALLOW: + return 0; + case Permission_Effect.DENY: + return 1; + case Permission_Effect.UNRECOGNIZED: + default: + return -1; + } +} + +export const Permission_Action = { + READ: "READ", + WRITE: "WRITE", + DELETE: "DELETE", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type Permission_Action = typeof Permission_Action[keyof typeof Permission_Action]; + +export namespace Permission_Action { + export type READ = typeof Permission_Action.READ; + export type WRITE = typeof Permission_Action.WRITE; + export type DELETE = typeof Permission_Action.DELETE; + export type UNRECOGNIZED = typeof Permission_Action.UNRECOGNIZED; +} + +export function permission_ActionFromJSON(object: any): Permission_Action { + switch (object) { + case 0: + case "READ": + return Permission_Action.READ; + case 1: + case "WRITE": + return Permission_Action.WRITE; + case 2: + case "DELETE": + return Permission_Action.DELETE; + case -1: + case "UNRECOGNIZED": + default: + return Permission_Action.UNRECOGNIZED; + } +} + +export function permission_ActionToJSON(object: Permission_Action): string { + switch (object) { + case Permission_Action.READ: + return "READ"; + case Permission_Action.WRITE: + return "WRITE"; + case Permission_Action.DELETE: + return "DELETE"; + case Permission_Action.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function permission_ActionToNumber(object: Permission_Action): number { + switch (object) { + case Permission_Action.READ: + return 0; + case Permission_Action.WRITE: + return 1; + case Permission_Action.DELETE: + return 2; + case Permission_Action.UNRECOGNIZED: + default: + return -1; + } +} + +function createBasePermission(): Permission { + return { effect: Permission_Effect.ALLOW, resource: "", actions: [] }; +} + +export const Permission: MessageFns = { + encode(message: Permission, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.effect !== Permission_Effect.ALLOW) { + writer.uint32(8).int32(permission_EffectToNumber(message.effect)); + } + if (message.resource !== "") { + writer.uint32(18).string(message.resource); + } + writer.uint32(26).fork(); + for (const v of message.actions) { + writer.int32(permission_ActionToNumber(v)); + } + writer.join(); + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Permission { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePermission(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.effect = permission_EffectFromJSON(reader.int32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.resource = reader.string(); + continue; + } + case 3: { + if (tag === 24) { + message.actions.push(permission_ActionFromJSON(reader.int32())); + + continue; + } + + if (tag === 26) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.actions.push(permission_ActionFromJSON(reader.int32())); + } + + continue; + } + + break; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Permission { + return { + effect: isSet(object.effect) ? permission_EffectFromJSON(object.effect) : Permission_Effect.ALLOW, + resource: isSet(object.resource) ? globalThis.String(object.resource) : "", + actions: globalThis.Array.isArray(object?.actions) + ? object.actions.map((e: any) => permission_ActionFromJSON(e)) + : [], + }; + }, + + toJSON(message: Permission): unknown { + const obj: any = {}; + if (message.effect !== Permission_Effect.ALLOW) { + obj.effect = permission_EffectToJSON(message.effect); + } + if (message.resource !== "") { + obj.resource = message.resource; + } + if (message.actions?.length) { + obj.actions = message.actions.map((e) => permission_ActionToJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Permission { + return Permission.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Permission { + const message = createBasePermission(); + message.effect = object.effect ?? Permission_Effect.ALLOW; + message.resource = object.resource ?? ""; + message.actions = object.actions?.map((e) => e) || []; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/environment.ts b/packages/grpc/src/basics/environment.ts new file mode 100644 index 0000000..fa533fe --- /dev/null +++ b/packages/grpc/src/basics/environment.ts @@ -0,0 +1,388 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/environment.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Environment"; + +export interface Item { + id: string; + jobId: string; + context: { [key: string]: Item_ContextValue }; + modified: string; +} + +export interface Item_ContextValue { + type: Item_ContextValue_Type; + value?: string | undefined; +} + +export const Item_ContextValue_Type = { TEXT: "TEXT", SECRET: "SECRET", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_ContextValue_Type = typeof Item_ContextValue_Type[keyof typeof Item_ContextValue_Type]; + +export namespace Item_ContextValue_Type { + export type TEXT = typeof Item_ContextValue_Type.TEXT; + export type SECRET = typeof Item_ContextValue_Type.SECRET; + export type UNRECOGNIZED = typeof Item_ContextValue_Type.UNRECOGNIZED; +} + +export function item_ContextValue_TypeFromJSON(object: any): Item_ContextValue_Type { + switch (object) { + case 0: + case "TEXT": + return Item_ContextValue_Type.TEXT; + case 1: + case "SECRET": + return Item_ContextValue_Type.SECRET; + case -1: + case "UNRECOGNIZED": + default: + return Item_ContextValue_Type.UNRECOGNIZED; + } +} + +export function item_ContextValue_TypeToJSON(object: Item_ContextValue_Type): string { + switch (object) { + case Item_ContextValue_Type.TEXT: + return "TEXT"; + case Item_ContextValue_Type.SECRET: + return "SECRET"; + case Item_ContextValue_Type.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_ContextValue_TypeToNumber(object: Item_ContextValue_Type): number { + switch (object) { + case Item_ContextValue_Type.TEXT: + return 0; + case Item_ContextValue_Type.SECRET: + return 1; + case Item_ContextValue_Type.UNRECOGNIZED: + default: + return -1; + } +} + +export interface Item_ContextEntry { + key: string; + value: Item_ContextValue | undefined; +} + +function createBaseItem(): Item { + return { id: "", jobId: "", context: {}, modified: "" }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobId !== "") { + writer.uint32(18).string(message.jobId); + } + globalThis.Object.entries(message.context).forEach(([key, value]: [string, Item_ContextValue]) => { + Item_ContextEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join(); + }); + if (message.modified !== "") { + writer.uint32(34).string(message.modified); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + const entry3 = Item_ContextEntry.decode(reader, reader.uint32()); + if (entry3.value !== undefined) { + message.context[entry3.key] = entry3.value; + } + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.modified = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + context: isObject(object.context) + ? (globalThis.Object.entries(object.context) as [string, any][]).reduce( + (acc: { [key: string]: Item_ContextValue }, [key, value]: [string, any]) => { + acc[key] = Item_ContextValue.fromJSON(value); + return acc; + }, + {}, + ) + : {}, + modified: isSet(object.modified) ? globalThis.String(object.modified) : "", + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.context) { + const entries = globalThis.Object.entries(message.context) as [string, Item_ContextValue][]; + if (entries.length > 0) { + obj.context = {}; + entries.forEach(([k, v]) => { + obj.context[k] = Item_ContextValue.toJSON(v); + }); + } + } + if (message.modified !== "") { + obj.modified = message.modified; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobId = object.jobId ?? ""; + message.context = (globalThis.Object.entries(object.context ?? {}) as [string, Item_ContextValue][]).reduce( + (acc: { [key: string]: Item_ContextValue }, [key, value]: [string, Item_ContextValue]) => { + if (value !== undefined) { + acc[key] = Item_ContextValue.fromPartial(value); + } + return acc; + }, + {}, + ); + message.modified = object.modified ?? ""; + return message; + }, +}; + +function createBaseItem_ContextValue(): Item_ContextValue { + return { type: Item_ContextValue_Type.TEXT, value: undefined }; +} + +export const Item_ContextValue: MessageFns = { + encode(message: Item_ContextValue, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.type !== Item_ContextValue_Type.TEXT) { + writer.uint32(8).int32(item_ContextValue_TypeToNumber(message.type)); + } + if (message.value !== undefined) { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_ContextValue { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_ContextValue(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.type = item_ContextValue_TypeFromJSON(reader.int32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_ContextValue { + return { + type: isSet(object.type) ? item_ContextValue_TypeFromJSON(object.type) : Item_ContextValue_Type.TEXT, + value: isSet(object.value) ? globalThis.String(object.value) : undefined, + }; + }, + + toJSON(message: Item_ContextValue): unknown { + const obj: any = {}; + if (message.type !== Item_ContextValue_Type.TEXT) { + obj.type = item_ContextValue_TypeToJSON(message.type); + } + if (message.value !== undefined) { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): Item_ContextValue { + return Item_ContextValue.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_ContextValue { + const message = createBaseItem_ContextValue(); + message.type = object.type ?? Item_ContextValue_Type.TEXT; + message.value = object.value ?? undefined; + return message; + }, +}; + +function createBaseItem_ContextEntry(): Item_ContextEntry { + return { key: "", value: undefined }; +} + +export const Item_ContextEntry: MessageFns = { + encode(message: Item_ContextEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== undefined) { + Item_ContextValue.encode(message.value, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_ContextEntry { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_ContextEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = Item_ContextValue.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_ContextEntry { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? Item_ContextValue.fromJSON(object.value) : undefined, + }; + }, + + toJSON(message: Item_ContextEntry): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = Item_ContextValue.toJSON(message.value); + } + return obj; + }, + + create(base?: DeepPartial): Item_ContextEntry { + return Item_ContextEntry.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_ContextEntry { + const message = createBaseItem_ContextEntry(); + message.key = object.key ?? ""; + message.value = (object.value !== undefined && object.value !== null) + ? Item_ContextValue.fromPartial(object.value) + : undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isObject(value: any): boolean { + return typeof value === "object" && value !== null; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/job-version.ts b/packages/grpc/src/basics/job-version.ts new file mode 100644 index 0000000..94adfea --- /dev/null +++ b/packages/grpc/src/basics/job-version.ts @@ -0,0 +1,163 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/job-version.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "JobVersion"; + +export interface Item { + id: string; + jobId: string; + version: string; + modified: string; + created: string; +} + +function createBaseItem(): Item { + return { id: "", jobId: "", version: "", modified: "", created: "" }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobId !== "") { + writer.uint32(18).string(message.jobId); + } + if (message.version !== "") { + writer.uint32(26).string(message.version); + } + if (message.modified !== "") { + writer.uint32(34).string(message.modified); + } + if (message.created !== "") { + writer.uint32(42).string(message.created); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.version = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.modified = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.created = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + version: isSet(object.version) ? globalThis.String(object.version) : "", + modified: isSet(object.modified) ? globalThis.String(object.modified) : "", + created: isSet(object.created) ? globalThis.String(object.created) : "", + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.version !== "") { + obj.version = message.version; + } + if (message.modified !== "") { + obj.modified = message.modified; + } + if (message.created !== "") { + obj.created = message.created; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobId = object.jobId ?? ""; + message.version = object.version ?? ""; + message.modified = object.modified ?? ""; + message.created = object.created ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/job.ts b/packages/grpc/src/basics/job.ts new file mode 100644 index 0000000..fe88d04 --- /dev/null +++ b/packages/grpc/src/basics/job.ts @@ -0,0 +1,310 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/job.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Job"; + +export interface Item { + id: string; + jobName: string; + status: Item_Status; + description?: string | undefined; + versionId?: string | undefined; + links: Item_Link[]; +} + +export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_Status = typeof Item_Status[keyof typeof Item_Status]; + +export namespace Item_Status { + export type ENABLED = typeof Item_Status.ENABLED; + export type DISABLED = typeof Item_Status.DISABLED; + export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED; +} + +export function item_StatusFromJSON(object: any): Item_Status { + switch (object) { + case 0: + case "ENABLED": + return Item_Status.ENABLED; + case 1: + case "DISABLED": + return Item_Status.DISABLED; + case -1: + case "UNRECOGNIZED": + default: + return Item_Status.UNRECOGNIZED; + } +} + +export function item_StatusToJSON(object: Item_Status): string { + switch (object) { + case Item_Status.ENABLED: + return "ENABLED"; + case Item_Status.DISABLED: + return "DISABLED"; + case Item_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_StatusToNumber(object: Item_Status): number { + switch (object) { + case Item_Status.ENABLED: + return 0; + case Item_Status.DISABLED: + return 1; + case Item_Status.UNRECOGNIZED: + default: + return -1; + } +} + +export interface Item_Link { + name: string; + url: string; +} + +function createBaseItem(): Item { + return { id: "", jobName: "", status: Item_Status.ENABLED, description: undefined, versionId: undefined, links: [] }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobName !== "") { + writer.uint32(18).string(message.jobName); + } + if (message.status !== Item_Status.ENABLED) { + writer.uint32(24).int32(item_StatusToNumber(message.status)); + } + if (message.description !== undefined) { + writer.uint32(34).string(message.description); + } + if (message.versionId !== undefined) { + writer.uint32(42).string(message.versionId); + } + for (const v of message.links) { + Item_Link.encode(v!, writer.uint32(50).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobName = reader.string(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.status = item_StatusFromJSON(reader.int32()); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.description = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.links.push(Item_Link.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobName: isSet(object.jobName) ? globalThis.String(object.jobName) : "", + status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED, + description: isSet(object.description) ? globalThis.String(object.description) : undefined, + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : undefined, + links: globalThis.Array.isArray(object?.links) ? object.links.map((e: any) => Item_Link.fromJSON(e)) : [], + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobName !== "") { + obj.jobName = message.jobName; + } + if (message.status !== Item_Status.ENABLED) { + obj.status = item_StatusToJSON(message.status); + } + if (message.description !== undefined) { + obj.description = message.description; + } + if (message.versionId !== undefined) { + obj.versionId = message.versionId; + } + if (message.links?.length) { + obj.links = message.links.map((e) => Item_Link.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobName = object.jobName ?? ""; + message.status = object.status ?? Item_Status.ENABLED; + message.description = object.description ?? undefined; + message.versionId = object.versionId ?? undefined; + message.links = object.links?.map((e) => Item_Link.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseItem_Link(): Item_Link { + return { name: "", url: "" }; +} + +export const Item_Link: MessageFns = { + encode(message: Item_Link, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.url !== "") { + writer.uint32(18).string(message.url); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_Link { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_Link(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.url = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_Link { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + url: isSet(object.url) ? globalThis.String(object.url) : "", + }; + }, + + toJSON(message: Item_Link): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.url !== "") { + obj.url = message.url; + } + return obj; + }, + + create(base?: DeepPartial): Item_Link { + return Item_Link.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_Link { + const message = createBaseItem_Link(); + message.name = object.name ?? ""; + message.url = object.url ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/jwt-key.ts b/packages/grpc/src/basics/jwt-key.ts new file mode 100644 index 0000000..813b617 --- /dev/null +++ b/packages/grpc/src/basics/jwt-key.ts @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/jwt-key.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "JwtKey"; + +export interface Item { + id: string; + privateKey?: string | undefined; + publicKey: string; + status: Item_Status; + expires: string; + created: string; +} + +export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_Status = typeof Item_Status[keyof typeof Item_Status]; + +export namespace Item_Status { + export type ENABLED = typeof Item_Status.ENABLED; + export type DISABLED = typeof Item_Status.DISABLED; + export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED; +} + +export function item_StatusFromJSON(object: any): Item_Status { + switch (object) { + case 0: + case "ENABLED": + return Item_Status.ENABLED; + case 1: + case "DISABLED": + return Item_Status.DISABLED; + case -1: + case "UNRECOGNIZED": + default: + return Item_Status.UNRECOGNIZED; + } +} + +export function item_StatusToJSON(object: Item_Status): string { + switch (object) { + case Item_Status.ENABLED: + return "ENABLED"; + case Item_Status.DISABLED: + return "DISABLED"; + case Item_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_StatusToNumber(object: Item_Status): number { + switch (object) { + case Item_Status.ENABLED: + return 0; + case Item_Status.DISABLED: + return 1; + case Item_Status.UNRECOGNIZED: + default: + return -1; + } +} + +function createBaseItem(): Item { + return { id: "", privateKey: undefined, publicKey: "", status: Item_Status.ENABLED, expires: "", created: "" }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.privateKey !== undefined) { + writer.uint32(18).string(message.privateKey); + } + if (message.publicKey !== "") { + writer.uint32(26).string(message.publicKey); + } + if (message.status !== Item_Status.ENABLED) { + writer.uint32(32).int32(item_StatusToNumber(message.status)); + } + if (message.expires !== "") { + writer.uint32(42).string(message.expires); + } + if (message.created !== "") { + writer.uint32(50).string(message.created); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.privateKey = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.publicKey = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.status = item_StatusFromJSON(reader.int32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.expires = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.created = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + privateKey: isSet(object.privateKey) ? globalThis.String(object.privateKey) : undefined, + publicKey: isSet(object.publicKey) ? globalThis.String(object.publicKey) : "", + status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED, + expires: isSet(object.expires) ? globalThis.String(object.expires) : "", + created: isSet(object.created) ? globalThis.String(object.created) : "", + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.privateKey !== undefined) { + obj.privateKey = message.privateKey; + } + if (message.publicKey !== "") { + obj.publicKey = message.publicKey; + } + if (message.status !== Item_Status.ENABLED) { + obj.status = item_StatusToJSON(message.status); + } + if (message.expires !== "") { + obj.expires = message.expires; + } + if (message.created !== "") { + obj.created = message.created; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.privateKey = object.privateKey ?? undefined; + message.publicKey = object.publicKey ?? ""; + message.status = object.status ?? Item_Status.ENABLED; + message.expires = object.expires ?? ""; + message.created = object.created ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/jwt-keys.ts b/packages/grpc/src/basics/jwt-keys.ts new file mode 100644 index 0000000..8d465b0 --- /dev/null +++ b/packages/grpc/src/basics/jwt-keys.ts @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/jwt-keys.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "JwtKey"; + +export interface Item { + id: string; + privateKey?: string | undefined; + publicKey: string; + status: Item_Status; + expires: string; + created: string; +} + +export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const; + +export type Item_Status = typeof Item_Status[keyof typeof Item_Status]; + +export namespace Item_Status { + export type ENABLED = typeof Item_Status.ENABLED; + export type DISABLED = typeof Item_Status.DISABLED; + export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED; +} + +export function item_StatusFromJSON(object: any): Item_Status { + switch (object) { + case 0: + case "ENABLED": + return Item_Status.ENABLED; + case 1: + case "DISABLED": + return Item_Status.DISABLED; + case -1: + case "UNRECOGNIZED": + default: + return Item_Status.UNRECOGNIZED; + } +} + +export function item_StatusToJSON(object: Item_Status): string { + switch (object) { + case Item_Status.ENABLED: + return "ENABLED"; + case Item_Status.DISABLED: + return "DISABLED"; + case Item_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_StatusToNumber(object: Item_Status): number { + switch (object) { + case Item_Status.ENABLED: + return 0; + case Item_Status.DISABLED: + return 1; + case Item_Status.UNRECOGNIZED: + default: + return -1; + } +} + +function createBaseItem(): Item { + return { id: "", privateKey: undefined, publicKey: "", status: Item_Status.ENABLED, expires: "", created: "" }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.privateKey !== undefined) { + writer.uint32(18).string(message.privateKey); + } + if (message.publicKey !== "") { + writer.uint32(26).string(message.publicKey); + } + if (message.status !== Item_Status.ENABLED) { + writer.uint32(32).int32(item_StatusToNumber(message.status)); + } + if (message.expires !== "") { + writer.uint32(42).string(message.expires); + } + if (message.created !== "") { + writer.uint32(50).string(message.created); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.privateKey = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.publicKey = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.status = item_StatusFromJSON(reader.int32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.expires = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.created = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + privateKey: isSet(object.privateKey) ? globalThis.String(object.privateKey) : undefined, + publicKey: isSet(object.publicKey) ? globalThis.String(object.publicKey) : "", + status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED, + expires: isSet(object.expires) ? globalThis.String(object.expires) : "", + created: isSet(object.created) ? globalThis.String(object.created) : "", + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.privateKey !== undefined) { + obj.privateKey = message.privateKey; + } + if (message.publicKey !== "") { + obj.publicKey = message.publicKey; + } + if (message.status !== Item_Status.ENABLED) { + obj.status = item_StatusToJSON(message.status); + } + if (message.expires !== "") { + obj.expires = message.expires; + } + if (message.created !== "") { + obj.created = message.created; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.privateKey = object.privateKey ?? undefined; + message.publicKey = object.publicKey ?? ""; + message.status = object.status ?? Item_Status.ENABLED; + message.expires = object.expires ?? ""; + message.created = object.created ?? ""; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/runner.ts b/packages/grpc/src/basics/runner.ts new file mode 100644 index 0000000..e772522 --- /dev/null +++ b/packages/grpc/src/basics/runner.ts @@ -0,0 +1,448 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/runner.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Runner"; + +export interface Item { + id: string; + jobId: string; + actionId: string; + versionId: string; + properties: Item_Properties | undefined; + createdAt: string; + readyAt?: string | undefined; + closingAt?: string | undefined; + closedAt?: string | undefined; +} + +export const Item_Status = { + STARTING: "STARTING", + READY: "READY", + CLOSING: "CLOSING", + CLOSED: "CLOSED", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type Item_Status = typeof Item_Status[keyof typeof Item_Status]; + +export namespace Item_Status { + export type STARTING = typeof Item_Status.STARTING; + export type READY = typeof Item_Status.READY; + export type CLOSING = typeof Item_Status.CLOSING; + export type CLOSED = typeof Item_Status.CLOSED; + export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED; +} + +export function item_StatusFromJSON(object: any): Item_Status { + switch (object) { + case 0: + case "STARTING": + return Item_Status.STARTING; + case 1: + case "READY": + return Item_Status.READY; + case 2: + case "CLOSING": + return Item_Status.CLOSING; + case 3: + case "CLOSED": + return Item_Status.CLOSED; + case -1: + case "UNRECOGNIZED": + default: + return Item_Status.UNRECOGNIZED; + } +} + +export function item_StatusToJSON(object: Item_Status): string { + switch (object) { + case Item_Status.STARTING: + return "STARTING"; + case Item_Status.READY: + return "READY"; + case Item_Status.CLOSING: + return "CLOSING"; + case Item_Status.CLOSED: + return "CLOSED"; + case Item_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function item_StatusToNumber(object: Item_Status): number { + switch (object) { + case Item_Status.STARTING: + return 0; + case Item_Status.READY: + return 1; + case Item_Status.CLOSING: + return 2; + case Item_Status.CLOSED: + return 3; + case Item_Status.UNRECOGNIZED: + default: + return -1; + } +} + +export interface Item_Properties { + runnerPid: string; + runnerContainerName: string; + runnerContainerNetworks: string[]; + runnerApiPort: number; + runnerDebug: boolean; +} + +function createBaseItem(): Item { + return { + id: "", + jobId: "", + actionId: "", + versionId: "", + properties: undefined, + createdAt: "", + readyAt: undefined, + closingAt: undefined, + closedAt: undefined, + }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobId !== "") { + writer.uint32(18).string(message.jobId); + } + if (message.actionId !== "") { + writer.uint32(26).string(message.actionId); + } + if (message.versionId !== "") { + writer.uint32(34).string(message.versionId); + } + if (message.properties !== undefined) { + Item_Properties.encode(message.properties, writer.uint32(42).fork()).join(); + } + if (message.createdAt !== "") { + writer.uint32(90).string(message.createdAt); + } + if (message.readyAt !== undefined) { + writer.uint32(98).string(message.readyAt); + } + if (message.closingAt !== undefined) { + writer.uint32(106).string(message.closingAt); + } + if (message.closedAt !== undefined) { + writer.uint32(114).string(message.closedAt); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.actionId = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.properties = Item_Properties.decode(reader, reader.uint32()); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.createdAt = reader.string(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.readyAt = reader.string(); + continue; + } + case 13: { + if (tag !== 106) { + break; + } + + message.closingAt = reader.string(); + continue; + } + case 14: { + if (tag !== 114) { + break; + } + + message.closedAt = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "", + properties: isSet(object.properties) ? Item_Properties.fromJSON(object.properties) : undefined, + createdAt: isSet(object.createdAt) ? globalThis.String(object.createdAt) : "", + readyAt: isSet(object.readyAt) ? globalThis.String(object.readyAt) : undefined, + closingAt: isSet(object.closingAt) ? globalThis.String(object.closingAt) : undefined, + closedAt: isSet(object.closedAt) ? globalThis.String(object.closedAt) : undefined, + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.actionId !== "") { + obj.actionId = message.actionId; + } + if (message.versionId !== "") { + obj.versionId = message.versionId; + } + if (message.properties !== undefined) { + obj.properties = Item_Properties.toJSON(message.properties); + } + if (message.createdAt !== "") { + obj.createdAt = message.createdAt; + } + if (message.readyAt !== undefined) { + obj.readyAt = message.readyAt; + } + if (message.closingAt !== undefined) { + obj.closingAt = message.closingAt; + } + if (message.closedAt !== undefined) { + obj.closedAt = message.closedAt; + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobId = object.jobId ?? ""; + message.actionId = object.actionId ?? ""; + message.versionId = object.versionId ?? ""; + message.properties = (object.properties !== undefined && object.properties !== null) + ? Item_Properties.fromPartial(object.properties) + : undefined; + message.createdAt = object.createdAt ?? ""; + message.readyAt = object.readyAt ?? undefined; + message.closingAt = object.closingAt ?? undefined; + message.closedAt = object.closedAt ?? undefined; + return message; + }, +}; + +function createBaseItem_Properties(): Item_Properties { + return { runnerPid: "", runnerContainerName: "", runnerContainerNetworks: [], runnerApiPort: 0, runnerDebug: false }; +} + +export const Item_Properties: MessageFns = { + encode(message: Item_Properties, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.runnerPid !== "") { + writer.uint32(10).string(message.runnerPid); + } + if (message.runnerContainerName !== "") { + writer.uint32(26).string(message.runnerContainerName); + } + for (const v of message.runnerContainerNetworks) { + writer.uint32(34).string(v!); + } + if (message.runnerApiPort !== 0) { + writer.uint32(40).uint32(message.runnerApiPort); + } + if (message.runnerDebug !== false) { + writer.uint32(48).bool(message.runnerDebug); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_Properties { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_Properties(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runnerPid = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.runnerContainerName = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.runnerContainerNetworks.push(reader.string()); + continue; + } + case 5: { + if (tag !== 40) { + break; + } + + message.runnerApiPort = reader.uint32(); + continue; + } + case 6: { + if (tag !== 48) { + break; + } + + message.runnerDebug = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_Properties { + return { + runnerPid: isSet(object.runnerPid) ? globalThis.String(object.runnerPid) : "", + runnerContainerName: isSet(object.runnerContainerName) ? globalThis.String(object.runnerContainerName) : "", + runnerContainerNetworks: globalThis.Array.isArray(object?.runnerContainerNetworks) + ? object.runnerContainerNetworks.map((e: any) => globalThis.String(e)) + : [], + runnerApiPort: isSet(object.runnerApiPort) ? globalThis.Number(object.runnerApiPort) : 0, + runnerDebug: isSet(object.runnerDebug) ? globalThis.Boolean(object.runnerDebug) : false, + }; + }, + + toJSON(message: Item_Properties): unknown { + const obj: any = {}; + if (message.runnerPid !== "") { + obj.runnerPid = message.runnerPid; + } + if (message.runnerContainerName !== "") { + obj.runnerContainerName = message.runnerContainerName; + } + if (message.runnerContainerNetworks?.length) { + obj.runnerContainerNetworks = message.runnerContainerNetworks; + } + if (message.runnerApiPort !== 0) { + obj.runnerApiPort = Math.round(message.runnerApiPort); + } + if (message.runnerDebug !== false) { + obj.runnerDebug = message.runnerDebug; + } + return obj; + }, + + create(base?: DeepPartial): Item_Properties { + return Item_Properties.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_Properties { + const message = createBaseItem_Properties(); + message.runnerPid = object.runnerPid ?? ""; + message.runnerContainerName = object.runnerContainerName ?? ""; + message.runnerContainerNetworks = object.runnerContainerNetworks?.map((e) => e) || []; + message.runnerApiPort = object.runnerApiPort ?? 0; + message.runnerDebug = object.runnerDebug ?? false; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/basics/trigger.ts b/packages/grpc/src/basics/trigger.ts new file mode 100644 index 0000000..d6ebaf4 --- /dev/null +++ b/packages/grpc/src/basics/trigger.ts @@ -0,0 +1,763 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: basics/trigger.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; + +export const protobufPackage = "Trigger"; + +export interface Item { + id: string; + jobId: string; + versionId: string; + schedule?: Item_TriggerSchedule | undefined; + http?: Item_TriggerHttp | undefined; + mqtt?: Item_TriggerMqtt | undefined; +} + +export interface Item_TriggerSchedule { + name?: string | undefined; + cron: string; + timezone?: string | undefined; +} + +export interface Item_TriggerHttp { + name?: string | undefined; + hostname?: string | undefined; + method?: string | undefined; + path?: string | undefined; +} + +export interface Item_TriggerMqtt { + name?: string | undefined; + topics: string[]; + connection: Item_TriggerMqtt_Connection | undefined; +} + +export interface Item_TriggerMqtt_Connection { + protocol?: string | undefined; + protocolVariable?: string | undefined; + port?: string | undefined; + portVariable?: string | undefined; + host?: string | undefined; + hostVariable?: string | undefined; + username?: string | undefined; + usernameVariable?: string | undefined; + password?: string | undefined; + passwordVariable?: string | undefined; + clientId?: string | undefined; + clientIdVariable?: string | undefined; +} + +function createBaseItem(): Item { + return { id: "", jobId: "", versionId: "", schedule: undefined, http: undefined, mqtt: undefined }; +} + +export const Item: MessageFns = { + encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.jobId !== "") { + writer.uint32(18).string(message.jobId); + } + if (message.versionId !== "") { + writer.uint32(26).string(message.versionId); + } + if (message.schedule !== undefined) { + Item_TriggerSchedule.encode(message.schedule, writer.uint32(34).fork()).join(); + } + if (message.http !== undefined) { + Item_TriggerHttp.encode(message.http, writer.uint32(42).fork()).join(); + } + if (message.mqtt !== undefined) { + Item_TriggerMqtt.encode(message.mqtt, writer.uint32(50).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.schedule = Item_TriggerSchedule.decode(reader, reader.uint32()); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.http = Item_TriggerHttp.decode(reader, reader.uint32()); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.mqtt = Item_TriggerMqtt.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "", + schedule: isSet(object.schedule) ? Item_TriggerSchedule.fromJSON(object.schedule) : undefined, + http: isSet(object.http) ? Item_TriggerHttp.fromJSON(object.http) : undefined, + mqtt: isSet(object.mqtt) ? Item_TriggerMqtt.fromJSON(object.mqtt) : undefined, + }; + }, + + toJSON(message: Item): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.versionId !== "") { + obj.versionId = message.versionId; + } + if (message.schedule !== undefined) { + obj.schedule = Item_TriggerSchedule.toJSON(message.schedule); + } + if (message.http !== undefined) { + obj.http = Item_TriggerHttp.toJSON(message.http); + } + if (message.mqtt !== undefined) { + obj.mqtt = Item_TriggerMqtt.toJSON(message.mqtt); + } + return obj; + }, + + create(base?: DeepPartial): Item { + return Item.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item { + const message = createBaseItem(); + message.id = object.id ?? ""; + message.jobId = object.jobId ?? ""; + message.versionId = object.versionId ?? ""; + message.schedule = (object.schedule !== undefined && object.schedule !== null) + ? Item_TriggerSchedule.fromPartial(object.schedule) + : undefined; + message.http = (object.http !== undefined && object.http !== null) + ? Item_TriggerHttp.fromPartial(object.http) + : undefined; + message.mqtt = (object.mqtt !== undefined && object.mqtt !== null) + ? Item_TriggerMqtt.fromPartial(object.mqtt) + : undefined; + return message; + }, +}; + +function createBaseItem_TriggerSchedule(): Item_TriggerSchedule { + return { name: undefined, cron: "", timezone: undefined }; +} + +export const Item_TriggerSchedule: MessageFns = { + encode(message: Item_TriggerSchedule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined) { + writer.uint32(10).string(message.name); + } + if (message.cron !== "") { + writer.uint32(18).string(message.cron); + } + if (message.timezone !== undefined) { + writer.uint32(26).string(message.timezone); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerSchedule { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_TriggerSchedule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.cron = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.timezone = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_TriggerSchedule { + return { + name: isSet(object.name) ? globalThis.String(object.name) : undefined, + cron: isSet(object.cron) ? globalThis.String(object.cron) : "", + timezone: isSet(object.timezone) ? globalThis.String(object.timezone) : undefined, + }; + }, + + toJSON(message: Item_TriggerSchedule): unknown { + const obj: any = {}; + if (message.name !== undefined) { + obj.name = message.name; + } + if (message.cron !== "") { + obj.cron = message.cron; + } + if (message.timezone !== undefined) { + obj.timezone = message.timezone; + } + return obj; + }, + + create(base?: DeepPartial): Item_TriggerSchedule { + return Item_TriggerSchedule.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_TriggerSchedule { + const message = createBaseItem_TriggerSchedule(); + message.name = object.name ?? undefined; + message.cron = object.cron ?? ""; + message.timezone = object.timezone ?? undefined; + return message; + }, +}; + +function createBaseItem_TriggerHttp(): Item_TriggerHttp { + return { name: undefined, hostname: undefined, method: undefined, path: undefined }; +} + +export const Item_TriggerHttp: MessageFns = { + encode(message: Item_TriggerHttp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined) { + writer.uint32(10).string(message.name); + } + if (message.hostname !== undefined) { + writer.uint32(18).string(message.hostname); + } + if (message.method !== undefined) { + writer.uint32(26).string(message.method); + } + if (message.path !== undefined) { + writer.uint32(34).string(message.path); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerHttp { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_TriggerHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.hostname = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.method = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.path = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_TriggerHttp { + return { + name: isSet(object.name) ? globalThis.String(object.name) : undefined, + hostname: isSet(object.hostname) ? globalThis.String(object.hostname) : undefined, + method: isSet(object.method) ? globalThis.String(object.method) : undefined, + path: isSet(object.path) ? globalThis.String(object.path) : undefined, + }; + }, + + toJSON(message: Item_TriggerHttp): unknown { + const obj: any = {}; + if (message.name !== undefined) { + obj.name = message.name; + } + if (message.hostname !== undefined) { + obj.hostname = message.hostname; + } + if (message.method !== undefined) { + obj.method = message.method; + } + if (message.path !== undefined) { + obj.path = message.path; + } + return obj; + }, + + create(base?: DeepPartial): Item_TriggerHttp { + return Item_TriggerHttp.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_TriggerHttp { + const message = createBaseItem_TriggerHttp(); + message.name = object.name ?? undefined; + message.hostname = object.hostname ?? undefined; + message.method = object.method ?? undefined; + message.path = object.path ?? undefined; + return message; + }, +}; + +function createBaseItem_TriggerMqtt(): Item_TriggerMqtt { + return { name: undefined, topics: [], connection: undefined }; +} + +export const Item_TriggerMqtt: MessageFns = { + encode(message: Item_TriggerMqtt, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== undefined) { + writer.uint32(10).string(message.name); + } + for (const v of message.topics) { + writer.uint32(18).string(v!); + } + if (message.connection !== undefined) { + Item_TriggerMqtt_Connection.encode(message.connection, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerMqtt { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_TriggerMqtt(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.topics.push(reader.string()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.connection = Item_TriggerMqtt_Connection.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_TriggerMqtt { + return { + name: isSet(object.name) ? globalThis.String(object.name) : undefined, + topics: globalThis.Array.isArray(object?.topics) ? object.topics.map((e: any) => globalThis.String(e)) : [], + connection: isSet(object.connection) ? Item_TriggerMqtt_Connection.fromJSON(object.connection) : undefined, + }; + }, + + toJSON(message: Item_TriggerMqtt): unknown { + const obj: any = {}; + if (message.name !== undefined) { + obj.name = message.name; + } + if (message.topics?.length) { + obj.topics = message.topics; + } + if (message.connection !== undefined) { + obj.connection = Item_TriggerMqtt_Connection.toJSON(message.connection); + } + return obj; + }, + + create(base?: DeepPartial): Item_TriggerMqtt { + return Item_TriggerMqtt.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_TriggerMqtt { + const message = createBaseItem_TriggerMqtt(); + message.name = object.name ?? undefined; + message.topics = object.topics?.map((e) => e) || []; + message.connection = (object.connection !== undefined && object.connection !== null) + ? Item_TriggerMqtt_Connection.fromPartial(object.connection) + : undefined; + return message; + }, +}; + +function createBaseItem_TriggerMqtt_Connection(): Item_TriggerMqtt_Connection { + return { + protocol: undefined, + protocolVariable: undefined, + port: undefined, + portVariable: undefined, + host: undefined, + hostVariable: undefined, + username: undefined, + usernameVariable: undefined, + password: undefined, + passwordVariable: undefined, + clientId: undefined, + clientIdVariable: undefined, + }; +} + +export const Item_TriggerMqtt_Connection: MessageFns = { + encode(message: Item_TriggerMqtt_Connection, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.protocol !== undefined) { + writer.uint32(10).string(message.protocol); + } + if (message.protocolVariable !== undefined) { + writer.uint32(18).string(message.protocolVariable); + } + if (message.port !== undefined) { + writer.uint32(26).string(message.port); + } + if (message.portVariable !== undefined) { + writer.uint32(34).string(message.portVariable); + } + if (message.host !== undefined) { + writer.uint32(42).string(message.host); + } + if (message.hostVariable !== undefined) { + writer.uint32(50).string(message.hostVariable); + } + if (message.username !== undefined) { + writer.uint32(58).string(message.username); + } + if (message.usernameVariable !== undefined) { + writer.uint32(66).string(message.usernameVariable); + } + if (message.password !== undefined) { + writer.uint32(74).string(message.password); + } + if (message.passwordVariable !== undefined) { + writer.uint32(82).string(message.passwordVariable); + } + if (message.clientId !== undefined) { + writer.uint32(90).string(message.clientId); + } + if (message.clientIdVariable !== undefined) { + writer.uint32(98).string(message.clientIdVariable); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerMqtt_Connection { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseItem_TriggerMqtt_Connection(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.protocol = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.protocolVariable = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.port = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.portVariable = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.host = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.hostVariable = reader.string(); + continue; + } + case 7: { + if (tag !== 58) { + break; + } + + message.username = reader.string(); + continue; + } + case 8: { + if (tag !== 66) { + break; + } + + message.usernameVariable = reader.string(); + continue; + } + case 9: { + if (tag !== 74) { + break; + } + + message.password = reader.string(); + continue; + } + case 10: { + if (tag !== 82) { + break; + } + + message.passwordVariable = reader.string(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.clientId = reader.string(); + continue; + } + case 12: { + if (tag !== 98) { + break; + } + + message.clientIdVariable = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): Item_TriggerMqtt_Connection { + return { + protocol: isSet(object.protocol) ? globalThis.String(object.protocol) : undefined, + protocolVariable: isSet(object.protocolVariable) ? globalThis.String(object.protocolVariable) : undefined, + port: isSet(object.port) ? globalThis.String(object.port) : undefined, + portVariable: isSet(object.portVariable) ? globalThis.String(object.portVariable) : undefined, + host: isSet(object.host) ? globalThis.String(object.host) : undefined, + hostVariable: isSet(object.hostVariable) ? globalThis.String(object.hostVariable) : undefined, + username: isSet(object.username) ? globalThis.String(object.username) : undefined, + usernameVariable: isSet(object.usernameVariable) ? globalThis.String(object.usernameVariable) : undefined, + password: isSet(object.password) ? globalThis.String(object.password) : undefined, + passwordVariable: isSet(object.passwordVariable) ? globalThis.String(object.passwordVariable) : undefined, + clientId: isSet(object.clientId) ? globalThis.String(object.clientId) : undefined, + clientIdVariable: isSet(object.clientIdVariable) ? globalThis.String(object.clientIdVariable) : undefined, + }; + }, + + toJSON(message: Item_TriggerMqtt_Connection): unknown { + const obj: any = {}; + if (message.protocol !== undefined) { + obj.protocol = message.protocol; + } + if (message.protocolVariable !== undefined) { + obj.protocolVariable = message.protocolVariable; + } + if (message.port !== undefined) { + obj.port = message.port; + } + if (message.portVariable !== undefined) { + obj.portVariable = message.portVariable; + } + if (message.host !== undefined) { + obj.host = message.host; + } + if (message.hostVariable !== undefined) { + obj.hostVariable = message.hostVariable; + } + if (message.username !== undefined) { + obj.username = message.username; + } + if (message.usernameVariable !== undefined) { + obj.usernameVariable = message.usernameVariable; + } + if (message.password !== undefined) { + obj.password = message.password; + } + if (message.passwordVariable !== undefined) { + obj.passwordVariable = message.passwordVariable; + } + if (message.clientId !== undefined) { + obj.clientId = message.clientId; + } + if (message.clientIdVariable !== undefined) { + obj.clientIdVariable = message.clientIdVariable; + } + return obj; + }, + + create(base?: DeepPartial): Item_TriggerMqtt_Connection { + return Item_TriggerMqtt_Connection.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): Item_TriggerMqtt_Connection { + const message = createBaseItem_TriggerMqtt_Connection(); + message.protocol = object.protocol ?? undefined; + message.protocolVariable = object.protocolVariable ?? undefined; + message.port = object.port ?? undefined; + message.portVariable = object.portVariable ?? undefined; + message.host = object.host ?? undefined; + message.hostVariable = object.hostVariable ?? undefined; + message.username = object.username ?? undefined; + message.usernameVariable = object.usernameVariable ?? undefined; + message.password = object.password ?? undefined; + message.passwordVariable = object.passwordVariable ?? undefined; + message.clientId = object.clientId ?? undefined; + message.clientIdVariable = object.clientIdVariable ?? undefined; + return message; + }, +}; + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/gateway.ts b/packages/grpc/src/gateway.ts new file mode 100644 index 0000000..460b52e --- /dev/null +++ b/packages/grpc/src/gateway.ts @@ -0,0 +1,19 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: gateway.proto + +/* eslint-disable */ + +export const protobufPackage = "GatewayAPI"; + +/** */ +export type GatewayAPIDefinition = typeof GatewayAPIDefinition; +export const GatewayAPIDefinition = { name: "GatewayAPI", fullName: "GatewayAPI.GatewayAPI", methods: {} } as const; + +export interface GatewayAPIServiceImplementation { +} + +export interface GatewayAPIClient { +} diff --git a/packages/grpc/src/general.ts b/packages/grpc/src/general.ts new file mode 100644 index 0000000..17ce433 --- /dev/null +++ b/packages/grpc/src/general.ts @@ -0,0 +1,3331 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: general.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import type { CallContext, CallOptions } from "nice-grpc-common"; +import { Empty } from "./base.js"; +import { Item as Item2 } from "./basics/action.js"; +import { Item as Item1 } from "./basics/job-version.js"; +import { Item } from "./basics/job.js"; +import { + Item as Item4, + Item_Status, + item_StatusFromJSON, + item_StatusToJSON, + item_StatusToNumber, +} from "./basics/runner.js"; +import { Item as Item3 } from "./basics/trigger.js"; + +export const protobufPackage = "GeneralAPI"; + +/** getJob * */ +export interface JobRequest { + jobId: string; +} + +export interface JobResponse { + job: Item | undefined; +} + +/** getJobs * */ +export interface JobsRequest { +} + +export interface JobsResponse { + jobs: Item[]; +} + +/** getJobVersion * */ +export interface JobVersionRequest { + jobVersionId: string; +} + +export interface JobVersionResponse { + jobVersion: Item1 | undefined; +} + +/** getJobVersionLatest * */ +export interface JobVersionLatestRequest { + jobId: string; +} + +export interface JobVersionLatestResponse { + jobVersion: Item1 | undefined; +} + +/** getJobVersions * */ +export interface JobVersionsRequest { + jobId: string; +} + +export interface JobVersionsResponse { + jobVersions: Item1[]; +} + +/** getJobVersionArchive * */ +export interface JobVersionArchiveRequest { + jobId: string; + jobVersionId: string; +} + +export interface JobVersionArchiveResponse { + seq: number; + data: Uint8Array; + end: boolean; +} + +/** getJobAction * */ +export interface JobActionRequest { + jobId: string; + actionId: string; +} + +export interface JobActionResponse { + action: Item2 | undefined; +} + +/** getJobActionLatest * */ +export interface JobActionLatestRequest { + jobId: string; +} + +export interface JobActionLatestResponse { + action: Item2 | undefined; +} + +/** getJobActions * */ +export interface JobActionsRequest { + jobId: string; + versionId?: string | undefined; +} + +export interface JobActionsResponse { + actions: Item2[]; +} + +/** getJobTrigger * */ +export interface JobTriggerRequest { + jobId: string; + triggerId: string; +} + +export interface JobTriggerResponse { + trigger: Item3 | undefined; +} + +/** getJobTriggers * */ +export interface JobTriggersRequest { + jobId: string; + versionId?: string | undefined; +} + +export interface JobTriggersResponse { + triggers: Item3[]; +} + +/** getJobTriggersLatest * */ +export interface JobTriggersLatestRequest { + jobId: string; +} + +export interface JobTriggersLatestResponse { + triggers: Item3[]; +} + +/** getRunner * */ +export interface RunnerRequest { + runnerId: string; +} + +export interface RunnerResponse { + runner: Item4 | undefined; +} + +/** getRunners * */ +export interface RunnersRequest { + jobId?: string | undefined; + versionId?: string | undefined; + actionId?: string | undefined; + status?: Item_Status | undefined; +} + +export interface RunnersResponse { + runners: Item4[]; +} + +/** deleteRunner * */ +export interface DeleteRunnerRequest { + runnerId: string; +} + +/** getStoreItem * */ +export interface StoreItemRequest { + jobId: string; + key: string; +} + +export interface StoreItemResponse { + key: string; + value: string; +} + +/** setStoreItem * */ +export interface SetStoreItemRequest { + jobId: string; + key: string; + value: string; + ttl?: number | undefined; +} + +export interface SetStoreItemResponse { + key: string; + value: string; +} + +/** deleteStoreItem * */ +export interface DeleteStoreItemRequest { + jobId: string; + key: string; +} + +export interface DeleteStoreItemResponse { + key: string; +} + +/** publishMqttMessage * */ +export interface PublishMqttMessageRequest { + jobId: string; + topic: string; + payload: string; +} + +export interface PublishMqttMessageResponse { +} + +/** createRunner * */ +export interface CreateSoftRunnerRequest { + jobId: string; + versionId: string; + actionId: string; +} + +export interface CreateSoftRunnerResponse { + runner: Item4 | undefined; +} + +/** getTemplates * */ +export interface TemplatesRequest { +} + +export interface TemplatesResponse { + templateBadGateway: string; +} + +function createBaseJobRequest(): JobRequest { + return { jobId: "" }; +} + +export const JobRequest: MessageFns = { + encode(message: JobRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobRequest { + return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" }; + }, + + toJSON(message: JobRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + return obj; + }, + + create(base?: DeepPartial): JobRequest { + return JobRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobRequest { + const message = createBaseJobRequest(); + message.jobId = object.jobId ?? ""; + return message; + }, +}; + +function createBaseJobResponse(): JobResponse { + return { job: undefined }; +} + +export const JobResponse: MessageFns = { + encode(message: JobResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.job !== undefined) { + Item.encode(message.job, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.job = Item.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobResponse { + return { job: isSet(object.job) ? Item.fromJSON(object.job) : undefined }; + }, + + toJSON(message: JobResponse): unknown { + const obj: any = {}; + if (message.job !== undefined) { + obj.job = Item.toJSON(message.job); + } + return obj; + }, + + create(base?: DeepPartial): JobResponse { + return JobResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobResponse { + const message = createBaseJobResponse(); + message.job = (object.job !== undefined && object.job !== null) ? Item.fromPartial(object.job) : undefined; + return message; + }, +}; + +function createBaseJobsRequest(): JobsRequest { + return {}; +} + +export const JobsRequest: MessageFns = { + encode(_: JobsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): JobsRequest { + return {}; + }, + + toJSON(_: JobsRequest): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): JobsRequest { + return JobsRequest.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): JobsRequest { + const message = createBaseJobsRequest(); + return message; + }, +}; + +function createBaseJobsResponse(): JobsResponse { + return { jobs: [] }; +} + +export const JobsResponse: MessageFns = { + encode(message: JobsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.jobs) { + Item.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobs.push(Item.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobsResponse { + return { jobs: globalThis.Array.isArray(object?.jobs) ? object.jobs.map((e: any) => Item.fromJSON(e)) : [] }; + }, + + toJSON(message: JobsResponse): unknown { + const obj: any = {}; + if (message.jobs?.length) { + obj.jobs = message.jobs.map((e) => Item.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): JobsResponse { + return JobsResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobsResponse { + const message = createBaseJobsResponse(); + message.jobs = object.jobs?.map((e) => Item.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseJobVersionRequest(): JobVersionRequest { + return { jobVersionId: "" }; +} + +export const JobVersionRequest: MessageFns = { + encode(message: JobVersionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobVersionId !== "") { + writer.uint32(10).string(message.jobVersionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobVersionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionRequest { + return { jobVersionId: isSet(object.jobVersionId) ? globalThis.String(object.jobVersionId) : "" }; + }, + + toJSON(message: JobVersionRequest): unknown { + const obj: any = {}; + if (message.jobVersionId !== "") { + obj.jobVersionId = message.jobVersionId; + } + return obj; + }, + + create(base?: DeepPartial): JobVersionRequest { + return JobVersionRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionRequest { + const message = createBaseJobVersionRequest(); + message.jobVersionId = object.jobVersionId ?? ""; + return message; + }, +}; + +function createBaseJobVersionResponse(): JobVersionResponse { + return { jobVersion: undefined }; +} + +export const JobVersionResponse: MessageFns = { + encode(message: JobVersionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobVersion !== undefined) { + Item1.encode(message.jobVersion, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobVersion = Item1.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionResponse { + return { jobVersion: isSet(object.jobVersion) ? Item1.fromJSON(object.jobVersion) : undefined }; + }, + + toJSON(message: JobVersionResponse): unknown { + const obj: any = {}; + if (message.jobVersion !== undefined) { + obj.jobVersion = Item1.toJSON(message.jobVersion); + } + return obj; + }, + + create(base?: DeepPartial): JobVersionResponse { + return JobVersionResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionResponse { + const message = createBaseJobVersionResponse(); + message.jobVersion = (object.jobVersion !== undefined && object.jobVersion !== null) + ? Item1.fromPartial(object.jobVersion) + : undefined; + return message; + }, +}; + +function createBaseJobVersionLatestRequest(): JobVersionLatestRequest { + return { jobId: "" }; +} + +export const JobVersionLatestRequest: MessageFns = { + encode(message: JobVersionLatestRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionLatestRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionLatestRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionLatestRequest { + return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" }; + }, + + toJSON(message: JobVersionLatestRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + return obj; + }, + + create(base?: DeepPartial): JobVersionLatestRequest { + return JobVersionLatestRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionLatestRequest { + const message = createBaseJobVersionLatestRequest(); + message.jobId = object.jobId ?? ""; + return message; + }, +}; + +function createBaseJobVersionLatestResponse(): JobVersionLatestResponse { + return { jobVersion: undefined }; +} + +export const JobVersionLatestResponse: MessageFns = { + encode(message: JobVersionLatestResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobVersion !== undefined) { + Item1.encode(message.jobVersion, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionLatestResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionLatestResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobVersion = Item1.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionLatestResponse { + return { jobVersion: isSet(object.jobVersion) ? Item1.fromJSON(object.jobVersion) : undefined }; + }, + + toJSON(message: JobVersionLatestResponse): unknown { + const obj: any = {}; + if (message.jobVersion !== undefined) { + obj.jobVersion = Item1.toJSON(message.jobVersion); + } + return obj; + }, + + create(base?: DeepPartial): JobVersionLatestResponse { + return JobVersionLatestResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionLatestResponse { + const message = createBaseJobVersionLatestResponse(); + message.jobVersion = (object.jobVersion !== undefined && object.jobVersion !== null) + ? Item1.fromPartial(object.jobVersion) + : undefined; + return message; + }, +}; + +function createBaseJobVersionsRequest(): JobVersionsRequest { + return { jobId: "" }; +} + +export const JobVersionsRequest: MessageFns = { + encode(message: JobVersionsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionsRequest { + return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" }; + }, + + toJSON(message: JobVersionsRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + return obj; + }, + + create(base?: DeepPartial): JobVersionsRequest { + return JobVersionsRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionsRequest { + const message = createBaseJobVersionsRequest(); + message.jobId = object.jobId ?? ""; + return message; + }, +}; + +function createBaseJobVersionsResponse(): JobVersionsResponse { + return { jobVersions: [] }; +} + +export const JobVersionsResponse: MessageFns = { + encode(message: JobVersionsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.jobVersions) { + Item1.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobVersions.push(Item1.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionsResponse { + return { + jobVersions: globalThis.Array.isArray(object?.jobVersions) + ? object.jobVersions.map((e: any) => Item1.fromJSON(e)) + : [], + }; + }, + + toJSON(message: JobVersionsResponse): unknown { + const obj: any = {}; + if (message.jobVersions?.length) { + obj.jobVersions = message.jobVersions.map((e) => Item1.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): JobVersionsResponse { + return JobVersionsResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionsResponse { + const message = createBaseJobVersionsResponse(); + message.jobVersions = object.jobVersions?.map((e) => Item1.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseJobVersionArchiveRequest(): JobVersionArchiveRequest { + return { jobId: "", jobVersionId: "" }; +} + +export const JobVersionArchiveRequest: MessageFns = { + encode(message: JobVersionArchiveRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.jobVersionId !== "") { + writer.uint32(18).string(message.jobVersionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionArchiveRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionArchiveRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.jobVersionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionArchiveRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + jobVersionId: isSet(object.jobVersionId) ? globalThis.String(object.jobVersionId) : "", + }; + }, + + toJSON(message: JobVersionArchiveRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.jobVersionId !== "") { + obj.jobVersionId = message.jobVersionId; + } + return obj; + }, + + create(base?: DeepPartial): JobVersionArchiveRequest { + return JobVersionArchiveRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionArchiveRequest { + const message = createBaseJobVersionArchiveRequest(); + message.jobId = object.jobId ?? ""; + message.jobVersionId = object.jobVersionId ?? ""; + return message; + }, +}; + +function createBaseJobVersionArchiveResponse(): JobVersionArchiveResponse { + return { seq: 0, data: new Uint8Array(0), end: false }; +} + +export const JobVersionArchiveResponse: MessageFns = { + encode(message: JobVersionArchiveResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.seq !== 0) { + writer.uint32(8).uint64(message.seq); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.end !== false) { + writer.uint32(24).bool(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobVersionArchiveResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobVersionArchiveResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.seq = longToNumber(reader.uint64()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.data = reader.bytes(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.end = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobVersionArchiveResponse { + return { + seq: isSet(object.seq) ? globalThis.Number(object.seq) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + end: isSet(object.end) ? globalThis.Boolean(object.end) : false, + }; + }, + + toJSON(message: JobVersionArchiveResponse): unknown { + const obj: any = {}; + if (message.seq !== 0) { + obj.seq = Math.round(message.seq); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.end !== false) { + obj.end = message.end; + } + return obj; + }, + + create(base?: DeepPartial): JobVersionArchiveResponse { + return JobVersionArchiveResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobVersionArchiveResponse { + const message = createBaseJobVersionArchiveResponse(); + message.seq = object.seq ?? 0; + message.data = object.data ?? new Uint8Array(0); + message.end = object.end ?? false; + return message; + }, +}; + +function createBaseJobActionRequest(): JobActionRequest { + return { jobId: "", actionId: "" }; +} + +export const JobActionRequest: MessageFns = { + encode(message: JobActionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.actionId !== "") { + writer.uint32(18).string(message.actionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.actionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : "", + }; + }, + + toJSON(message: JobActionRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.actionId !== "") { + obj.actionId = message.actionId; + } + return obj; + }, + + create(base?: DeepPartial): JobActionRequest { + return JobActionRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionRequest { + const message = createBaseJobActionRequest(); + message.jobId = object.jobId ?? ""; + message.actionId = object.actionId ?? ""; + return message; + }, +}; + +function createBaseJobActionResponse(): JobActionResponse { + return { action: undefined }; +} + +export const JobActionResponse: MessageFns = { + encode(message: JobActionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.action !== undefined) { + Item2.encode(message.action, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.action = Item2.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionResponse { + return { action: isSet(object.action) ? Item2.fromJSON(object.action) : undefined }; + }, + + toJSON(message: JobActionResponse): unknown { + const obj: any = {}; + if (message.action !== undefined) { + obj.action = Item2.toJSON(message.action); + } + return obj; + }, + + create(base?: DeepPartial): JobActionResponse { + return JobActionResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionResponse { + const message = createBaseJobActionResponse(); + message.action = (object.action !== undefined && object.action !== null) + ? Item2.fromPartial(object.action) + : undefined; + return message; + }, +}; + +function createBaseJobActionLatestRequest(): JobActionLatestRequest { + return { jobId: "" }; +} + +export const JobActionLatestRequest: MessageFns = { + encode(message: JobActionLatestRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionLatestRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionLatestRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionLatestRequest { + return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" }; + }, + + toJSON(message: JobActionLatestRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + return obj; + }, + + create(base?: DeepPartial): JobActionLatestRequest { + return JobActionLatestRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionLatestRequest { + const message = createBaseJobActionLatestRequest(); + message.jobId = object.jobId ?? ""; + return message; + }, +}; + +function createBaseJobActionLatestResponse(): JobActionLatestResponse { + return { action: undefined }; +} + +export const JobActionLatestResponse: MessageFns = { + encode(message: JobActionLatestResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.action !== undefined) { + Item2.encode(message.action, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionLatestResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionLatestResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.action = Item2.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionLatestResponse { + return { action: isSet(object.action) ? Item2.fromJSON(object.action) : undefined }; + }, + + toJSON(message: JobActionLatestResponse): unknown { + const obj: any = {}; + if (message.action !== undefined) { + obj.action = Item2.toJSON(message.action); + } + return obj; + }, + + create(base?: DeepPartial): JobActionLatestResponse { + return JobActionLatestResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionLatestResponse { + const message = createBaseJobActionLatestResponse(); + message.action = (object.action !== undefined && object.action !== null) + ? Item2.fromPartial(object.action) + : undefined; + return message; + }, +}; + +function createBaseJobActionsRequest(): JobActionsRequest { + return { jobId: "", versionId: undefined }; +} + +export const JobActionsRequest: MessageFns = { + encode(message: JobActionsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.versionId !== undefined) { + writer.uint32(18).string(message.versionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionsRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.versionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionsRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : undefined, + }; + }, + + toJSON(message: JobActionsRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.versionId !== undefined) { + obj.versionId = message.versionId; + } + return obj; + }, + + create(base?: DeepPartial): JobActionsRequest { + return JobActionsRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionsRequest { + const message = createBaseJobActionsRequest(); + message.jobId = object.jobId ?? ""; + message.versionId = object.versionId ?? undefined; + return message; + }, +}; + +function createBaseJobActionsResponse(): JobActionsResponse { + return { actions: [] }; +} + +export const JobActionsResponse: MessageFns = { + encode(message: JobActionsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.actions) { + Item2.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobActionsResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobActionsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.actions.push(Item2.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobActionsResponse { + return { + actions: globalThis.Array.isArray(object?.actions) ? object.actions.map((e: any) => Item2.fromJSON(e)) : [], + }; + }, + + toJSON(message: JobActionsResponse): unknown { + const obj: any = {}; + if (message.actions?.length) { + obj.actions = message.actions.map((e) => Item2.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): JobActionsResponse { + return JobActionsResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobActionsResponse { + const message = createBaseJobActionsResponse(); + message.actions = object.actions?.map((e) => Item2.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseJobTriggerRequest(): JobTriggerRequest { + return { jobId: "", triggerId: "" }; +} + +export const JobTriggerRequest: MessageFns = { + encode(message: JobTriggerRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.triggerId !== "") { + writer.uint32(18).string(message.triggerId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggerRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.triggerId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggerRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + triggerId: isSet(object.triggerId) ? globalThis.String(object.triggerId) : "", + }; + }, + + toJSON(message: JobTriggerRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.triggerId !== "") { + obj.triggerId = message.triggerId; + } + return obj; + }, + + create(base?: DeepPartial): JobTriggerRequest { + return JobTriggerRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggerRequest { + const message = createBaseJobTriggerRequest(); + message.jobId = object.jobId ?? ""; + message.triggerId = object.triggerId ?? ""; + return message; + }, +}; + +function createBaseJobTriggerResponse(): JobTriggerResponse { + return { trigger: undefined }; +} + +export const JobTriggerResponse: MessageFns = { + encode(message: JobTriggerResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.trigger !== undefined) { + Item3.encode(message.trigger, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggerResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.trigger = Item3.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggerResponse { + return { trigger: isSet(object.trigger) ? Item3.fromJSON(object.trigger) : undefined }; + }, + + toJSON(message: JobTriggerResponse): unknown { + const obj: any = {}; + if (message.trigger !== undefined) { + obj.trigger = Item3.toJSON(message.trigger); + } + return obj; + }, + + create(base?: DeepPartial): JobTriggerResponse { + return JobTriggerResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggerResponse { + const message = createBaseJobTriggerResponse(); + message.trigger = (object.trigger !== undefined && object.trigger !== null) + ? Item3.fromPartial(object.trigger) + : undefined; + return message; + }, +}; + +function createBaseJobTriggersRequest(): JobTriggersRequest { + return { jobId: "", versionId: undefined }; +} + +export const JobTriggersRequest: MessageFns = { + encode(message: JobTriggersRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.versionId !== undefined) { + writer.uint32(18).string(message.versionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggersRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggersRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.versionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggersRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : undefined, + }; + }, + + toJSON(message: JobTriggersRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.versionId !== undefined) { + obj.versionId = message.versionId; + } + return obj; + }, + + create(base?: DeepPartial): JobTriggersRequest { + return JobTriggersRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggersRequest { + const message = createBaseJobTriggersRequest(); + message.jobId = object.jobId ?? ""; + message.versionId = object.versionId ?? undefined; + return message; + }, +}; + +function createBaseJobTriggersResponse(): JobTriggersResponse { + return { triggers: [] }; +} + +export const JobTriggersResponse: MessageFns = { + encode(message: JobTriggersResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.triggers) { + Item3.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggersResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggersResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.triggers.push(Item3.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggersResponse { + return { + triggers: globalThis.Array.isArray(object?.triggers) ? object.triggers.map((e: any) => Item3.fromJSON(e)) : [], + }; + }, + + toJSON(message: JobTriggersResponse): unknown { + const obj: any = {}; + if (message.triggers?.length) { + obj.triggers = message.triggers.map((e) => Item3.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): JobTriggersResponse { + return JobTriggersResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggersResponse { + const message = createBaseJobTriggersResponse(); + message.triggers = object.triggers?.map((e) => Item3.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseJobTriggersLatestRequest(): JobTriggersLatestRequest { + return { jobId: "" }; +} + +export const JobTriggersLatestRequest: MessageFns = { + encode(message: JobTriggersLatestRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggersLatestRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggersLatestRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggersLatestRequest { + return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" }; + }, + + toJSON(message: JobTriggersLatestRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + return obj; + }, + + create(base?: DeepPartial): JobTriggersLatestRequest { + return JobTriggersLatestRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggersLatestRequest { + const message = createBaseJobTriggersLatestRequest(); + message.jobId = object.jobId ?? ""; + return message; + }, +}; + +function createBaseJobTriggersLatestResponse(): JobTriggersLatestResponse { + return { triggers: [] }; +} + +export const JobTriggersLatestResponse: MessageFns = { + encode(message: JobTriggersLatestResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.triggers) { + Item3.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): JobTriggersLatestResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseJobTriggersLatestResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.triggers.push(Item3.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): JobTriggersLatestResponse { + return { + triggers: globalThis.Array.isArray(object?.triggers) ? object.triggers.map((e: any) => Item3.fromJSON(e)) : [], + }; + }, + + toJSON(message: JobTriggersLatestResponse): unknown { + const obj: any = {}; + if (message.triggers?.length) { + obj.triggers = message.triggers.map((e) => Item3.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): JobTriggersLatestResponse { + return JobTriggersLatestResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): JobTriggersLatestResponse { + const message = createBaseJobTriggersLatestResponse(); + message.triggers = object.triggers?.map((e) => Item3.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseRunnerRequest(): RunnerRequest { + return { runnerId: "" }; +} + +export const RunnerRequest: MessageFns = { + encode(message: RunnerRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.runnerId !== "") { + writer.uint32(10).string(message.runnerId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): RunnerRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRunnerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runnerId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): RunnerRequest { + return { runnerId: isSet(object.runnerId) ? globalThis.String(object.runnerId) : "" }; + }, + + toJSON(message: RunnerRequest): unknown { + const obj: any = {}; + if (message.runnerId !== "") { + obj.runnerId = message.runnerId; + } + return obj; + }, + + create(base?: DeepPartial): RunnerRequest { + return RunnerRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): RunnerRequest { + const message = createBaseRunnerRequest(); + message.runnerId = object.runnerId ?? ""; + return message; + }, +}; + +function createBaseRunnerResponse(): RunnerResponse { + return { runner: undefined }; +} + +export const RunnerResponse: MessageFns = { + encode(message: RunnerResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.runner !== undefined) { + Item4.encode(message.runner, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): RunnerResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRunnerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runner = Item4.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): RunnerResponse { + return { runner: isSet(object.runner) ? Item4.fromJSON(object.runner) : undefined }; + }, + + toJSON(message: RunnerResponse): unknown { + const obj: any = {}; + if (message.runner !== undefined) { + obj.runner = Item4.toJSON(message.runner); + } + return obj; + }, + + create(base?: DeepPartial): RunnerResponse { + return RunnerResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): RunnerResponse { + const message = createBaseRunnerResponse(); + message.runner = (object.runner !== undefined && object.runner !== null) + ? Item4.fromPartial(object.runner) + : undefined; + return message; + }, +}; + +function createBaseRunnersRequest(): RunnersRequest { + return { jobId: undefined, versionId: undefined, actionId: undefined, status: undefined }; +} + +export const RunnersRequest: MessageFns = { + encode(message: RunnersRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== undefined) { + writer.uint32(10).string(message.jobId); + } + if (message.versionId !== undefined) { + writer.uint32(18).string(message.versionId); + } + if (message.actionId !== undefined) { + writer.uint32(26).string(message.actionId); + } + if (message.status !== undefined) { + writer.uint32(32).int32(item_StatusToNumber(message.status)); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): RunnersRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRunnersRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.actionId = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.status = item_StatusFromJSON(reader.int32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): RunnersRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : undefined, + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : undefined, + actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : undefined, + status: isSet(object.status) ? item_StatusFromJSON(object.status) : undefined, + }; + }, + + toJSON(message: RunnersRequest): unknown { + const obj: any = {}; + if (message.jobId !== undefined) { + obj.jobId = message.jobId; + } + if (message.versionId !== undefined) { + obj.versionId = message.versionId; + } + if (message.actionId !== undefined) { + obj.actionId = message.actionId; + } + if (message.status !== undefined) { + obj.status = item_StatusToJSON(message.status); + } + return obj; + }, + + create(base?: DeepPartial): RunnersRequest { + return RunnersRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): RunnersRequest { + const message = createBaseRunnersRequest(); + message.jobId = object.jobId ?? undefined; + message.versionId = object.versionId ?? undefined; + message.actionId = object.actionId ?? undefined; + message.status = object.status ?? undefined; + return message; + }, +}; + +function createBaseRunnersResponse(): RunnersResponse { + return { runners: [] }; +} + +export const RunnersResponse: MessageFns = { + encode(message: RunnersResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + for (const v of message.runners) { + Item4.encode(v!, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): RunnersResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseRunnersResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runners.push(Item4.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): RunnersResponse { + return { + runners: globalThis.Array.isArray(object?.runners) ? object.runners.map((e: any) => Item4.fromJSON(e)) : [], + }; + }, + + toJSON(message: RunnersResponse): unknown { + const obj: any = {}; + if (message.runners?.length) { + obj.runners = message.runners.map((e) => Item4.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): RunnersResponse { + return RunnersResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): RunnersResponse { + const message = createBaseRunnersResponse(); + message.runners = object.runners?.map((e) => Item4.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseDeleteRunnerRequest(): DeleteRunnerRequest { + return { runnerId: "" }; +} + +export const DeleteRunnerRequest: MessageFns = { + encode(message: DeleteRunnerRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.runnerId !== "") { + writer.uint32(10).string(message.runnerId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DeleteRunnerRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeleteRunnerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runnerId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DeleteRunnerRequest { + return { runnerId: isSet(object.runnerId) ? globalThis.String(object.runnerId) : "" }; + }, + + toJSON(message: DeleteRunnerRequest): unknown { + const obj: any = {}; + if (message.runnerId !== "") { + obj.runnerId = message.runnerId; + } + return obj; + }, + + create(base?: DeepPartial): DeleteRunnerRequest { + return DeleteRunnerRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DeleteRunnerRequest { + const message = createBaseDeleteRunnerRequest(); + message.runnerId = object.runnerId ?? ""; + return message; + }, +}; + +function createBaseStoreItemRequest(): StoreItemRequest { + return { jobId: "", key: "" }; +} + +export const StoreItemRequest: MessageFns = { + encode(message: StoreItemRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): StoreItemRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreItemRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.key = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): StoreItemRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + key: isSet(object.key) ? globalThis.String(object.key) : "", + }; + }, + + toJSON(message: StoreItemRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.key !== "") { + obj.key = message.key; + } + return obj; + }, + + create(base?: DeepPartial): StoreItemRequest { + return StoreItemRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): StoreItemRequest { + const message = createBaseStoreItemRequest(); + message.jobId = object.jobId ?? ""; + message.key = object.key ?? ""; + return message; + }, +}; + +function createBaseStoreItemResponse(): StoreItemResponse { + return { key: "", value: "" }; +} + +export const StoreItemResponse: MessageFns = { + encode(message: StoreItemResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): StoreItemResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStoreItemResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): StoreItemResponse { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: StoreItemResponse): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): StoreItemResponse { + return StoreItemResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): StoreItemResponse { + const message = createBaseStoreItemResponse(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseSetStoreItemRequest(): SetStoreItemRequest { + return { jobId: "", key: "", value: "", ttl: undefined }; +} + +export const SetStoreItemRequest: MessageFns = { + encode(message: SetStoreItemRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + if (message.value !== "") { + writer.uint32(26).string(message.value); + } + if (message.ttl !== undefined) { + writer.uint32(32).uint32(message.ttl); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SetStoreItemRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSetStoreItemRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.key = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.value = reader.string(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.ttl = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SetStoreItemRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + ttl: isSet(object.ttl) ? globalThis.Number(object.ttl) : undefined, + }; + }, + + toJSON(message: SetStoreItemRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + if (message.ttl !== undefined) { + obj.ttl = Math.round(message.ttl); + } + return obj; + }, + + create(base?: DeepPartial): SetStoreItemRequest { + return SetStoreItemRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SetStoreItemRequest { + const message = createBaseSetStoreItemRequest(); + message.jobId = object.jobId ?? ""; + message.key = object.key ?? ""; + message.value = object.value ?? ""; + message.ttl = object.ttl ?? undefined; + return message; + }, +}; + +function createBaseSetStoreItemResponse(): SetStoreItemResponse { + return { key: "", value: "" }; +} + +export const SetStoreItemResponse: MessageFns = { + encode(message: SetStoreItemResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): SetStoreItemResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSetStoreItemResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): SetStoreItemResponse { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: SetStoreItemResponse): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): SetStoreItemResponse { + return SetStoreItemResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): SetStoreItemResponse { + const message = createBaseSetStoreItemResponse(); + message.key = object.key ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseDeleteStoreItemRequest(): DeleteStoreItemRequest { + return { jobId: "", key: "" }; +} + +export const DeleteStoreItemRequest: MessageFns = { + encode(message: DeleteStoreItemRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.key !== "") { + writer.uint32(18).string(message.key); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DeleteStoreItemRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeleteStoreItemRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.key = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DeleteStoreItemRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + key: isSet(object.key) ? globalThis.String(object.key) : "", + }; + }, + + toJSON(message: DeleteStoreItemRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.key !== "") { + obj.key = message.key; + } + return obj; + }, + + create(base?: DeepPartial): DeleteStoreItemRequest { + return DeleteStoreItemRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DeleteStoreItemRequest { + const message = createBaseDeleteStoreItemRequest(); + message.jobId = object.jobId ?? ""; + message.key = object.key ?? ""; + return message; + }, +}; + +function createBaseDeleteStoreItemResponse(): DeleteStoreItemResponse { + return { key: "" }; +} + +export const DeleteStoreItemResponse: MessageFns = { + encode(message: DeleteStoreItemResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): DeleteStoreItemResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDeleteStoreItemResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.key = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): DeleteStoreItemResponse { + return { key: isSet(object.key) ? globalThis.String(object.key) : "" }; + }, + + toJSON(message: DeleteStoreItemResponse): unknown { + const obj: any = {}; + if (message.key !== "") { + obj.key = message.key; + } + return obj; + }, + + create(base?: DeepPartial): DeleteStoreItemResponse { + return DeleteStoreItemResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): DeleteStoreItemResponse { + const message = createBaseDeleteStoreItemResponse(); + message.key = object.key ?? ""; + return message; + }, +}; + +function createBasePublishMqttMessageRequest(): PublishMqttMessageRequest { + return { jobId: "", topic: "", payload: "" }; +} + +export const PublishMqttMessageRequest: MessageFns = { + encode(message: PublishMqttMessageRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.topic !== "") { + writer.uint32(18).string(message.topic); + } + if (message.payload !== "") { + writer.uint32(26).string(message.payload); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PublishMqttMessageRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublishMqttMessageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.topic = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.payload = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): PublishMqttMessageRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + topic: isSet(object.topic) ? globalThis.String(object.topic) : "", + payload: isSet(object.payload) ? globalThis.String(object.payload) : "", + }; + }, + + toJSON(message: PublishMqttMessageRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.topic !== "") { + obj.topic = message.topic; + } + if (message.payload !== "") { + obj.payload = message.payload; + } + return obj; + }, + + create(base?: DeepPartial): PublishMqttMessageRequest { + return PublishMqttMessageRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PublishMqttMessageRequest { + const message = createBasePublishMqttMessageRequest(); + message.jobId = object.jobId ?? ""; + message.topic = object.topic ?? ""; + message.payload = object.payload ?? ""; + return message; + }, +}; + +function createBasePublishMqttMessageResponse(): PublishMqttMessageResponse { + return {}; +} + +export const PublishMqttMessageResponse: MessageFns = { + encode(_: PublishMqttMessageResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): PublishMqttMessageResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePublishMqttMessageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): PublishMqttMessageResponse { + return {}; + }, + + toJSON(_: PublishMqttMessageResponse): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): PublishMqttMessageResponse { + return PublishMqttMessageResponse.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): PublishMqttMessageResponse { + const message = createBasePublishMqttMessageResponse(); + return message; + }, +}; + +function createBaseCreateSoftRunnerRequest(): CreateSoftRunnerRequest { + return { jobId: "", versionId: "", actionId: "" }; +} + +export const CreateSoftRunnerRequest: MessageFns = { + encode(message: CreateSoftRunnerRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.jobId !== "") { + writer.uint32(10).string(message.jobId); + } + if (message.versionId !== "") { + writer.uint32(18).string(message.versionId); + } + if (message.actionId !== "") { + writer.uint32(26).string(message.actionId); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateSoftRunnerRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateSoftRunnerRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.jobId = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.versionId = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.actionId = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateSoftRunnerRequest { + return { + jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "", + versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "", + actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : "", + }; + }, + + toJSON(message: CreateSoftRunnerRequest): unknown { + const obj: any = {}; + if (message.jobId !== "") { + obj.jobId = message.jobId; + } + if (message.versionId !== "") { + obj.versionId = message.versionId; + } + if (message.actionId !== "") { + obj.actionId = message.actionId; + } + return obj; + }, + + create(base?: DeepPartial): CreateSoftRunnerRequest { + return CreateSoftRunnerRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): CreateSoftRunnerRequest { + const message = createBaseCreateSoftRunnerRequest(); + message.jobId = object.jobId ?? ""; + message.versionId = object.versionId ?? ""; + message.actionId = object.actionId ?? ""; + return message; + }, +}; + +function createBaseCreateSoftRunnerResponse(): CreateSoftRunnerResponse { + return { runner: undefined }; +} + +export const CreateSoftRunnerResponse: MessageFns = { + encode(message: CreateSoftRunnerResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.runner !== undefined) { + Item4.encode(message.runner, writer.uint32(10).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): CreateSoftRunnerResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCreateSoftRunnerResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.runner = Item4.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): CreateSoftRunnerResponse { + return { runner: isSet(object.runner) ? Item4.fromJSON(object.runner) : undefined }; + }, + + toJSON(message: CreateSoftRunnerResponse): unknown { + const obj: any = {}; + if (message.runner !== undefined) { + obj.runner = Item4.toJSON(message.runner); + } + return obj; + }, + + create(base?: DeepPartial): CreateSoftRunnerResponse { + return CreateSoftRunnerResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): CreateSoftRunnerResponse { + const message = createBaseCreateSoftRunnerResponse(); + message.runner = (object.runner !== undefined && object.runner !== null) + ? Item4.fromPartial(object.runner) + : undefined; + return message; + }, +}; + +function createBaseTemplatesRequest(): TemplatesRequest { + return {}; +} + +export const TemplatesRequest: MessageFns = { + encode(_: TemplatesRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TemplatesRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTemplatesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(_: any): TemplatesRequest { + return {}; + }, + + toJSON(_: TemplatesRequest): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): TemplatesRequest { + return TemplatesRequest.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): TemplatesRequest { + const message = createBaseTemplatesRequest(); + return message; + }, +}; + +function createBaseTemplatesResponse(): TemplatesResponse { + return { templateBadGateway: "" }; +} + +export const TemplatesResponse: MessageFns = { + encode(message: TemplatesResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.templateBadGateway !== "") { + writer.uint32(10).string(message.templateBadGateway); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): TemplatesResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTemplatesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.templateBadGateway = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): TemplatesResponse { + return { templateBadGateway: isSet(object.templateBadGateway) ? globalThis.String(object.templateBadGateway) : "" }; + }, + + toJSON(message: TemplatesResponse): unknown { + const obj: any = {}; + if (message.templateBadGateway !== "") { + obj.templateBadGateway = message.templateBadGateway; + } + return obj; + }, + + create(base?: DeepPartial): TemplatesResponse { + return TemplatesResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): TemplatesResponse { + const message = createBaseTemplatesResponse(); + message.templateBadGateway = object.templateBadGateway ?? ""; + return message; + }, +}; + +export type GeneralAPIDefinition = typeof GeneralAPIDefinition; +export const GeneralAPIDefinition = { + name: "GeneralAPI", + fullName: "GeneralAPI.GeneralAPI", + methods: { + getJob: { + name: "getJob", + requestType: JobRequest, + requestStream: false, + responseType: JobResponse, + responseStream: false, + options: {}, + }, + getJobs: { + name: "getJobs", + requestType: JobsRequest, + requestStream: false, + responseType: JobsResponse, + responseStream: false, + options: {}, + }, + getJobVersion: { + name: "getJobVersion", + requestType: JobVersionRequest, + requestStream: false, + responseType: JobVersionResponse, + responseStream: false, + options: {}, + }, + getJobVersionLatest: { + name: "getJobVersionLatest", + requestType: JobVersionLatestRequest, + requestStream: false, + responseType: JobVersionLatestResponse, + responseStream: false, + options: {}, + }, + getJobVersions: { + name: "getJobVersions", + requestType: JobVersionsRequest, + requestStream: false, + responseType: JobVersionsResponse, + responseStream: false, + options: {}, + }, + getJobVersionArchive: { + name: "getJobVersionArchive", + requestType: JobVersionArchiveRequest, + requestStream: false, + responseType: JobVersionArchiveResponse, + responseStream: true, + options: {}, + }, + getJobAction: { + name: "getJobAction", + requestType: JobActionRequest, + requestStream: false, + responseType: JobActionResponse, + responseStream: false, + options: {}, + }, + getJobActionLatest: { + name: "getJobActionLatest", + requestType: JobActionLatestRequest, + requestStream: false, + responseType: JobActionLatestResponse, + responseStream: false, + options: {}, + }, + getJobActions: { + name: "getJobActions", + requestType: JobActionsRequest, + requestStream: false, + responseType: JobActionsResponse, + responseStream: false, + options: {}, + }, + getJobTrigger: { + name: "getJobTrigger", + requestType: JobTriggerRequest, + requestStream: false, + responseType: JobTriggerResponse, + responseStream: false, + options: {}, + }, + getJobTriggers: { + name: "getJobTriggers", + requestType: JobTriggersRequest, + requestStream: false, + responseType: JobTriggersResponse, + responseStream: false, + options: {}, + }, + getJobTriggersLatest: { + name: "getJobTriggersLatest", + requestType: JobTriggersLatestRequest, + requestStream: false, + responseType: JobTriggersLatestResponse, + responseStream: false, + options: {}, + }, + getRunner: { + name: "getRunner", + requestType: RunnerRequest, + requestStream: false, + responseType: RunnerResponse, + responseStream: false, + options: {}, + }, + getRunners: { + name: "getRunners", + requestType: RunnersRequest, + requestStream: false, + responseType: RunnersResponse, + responseStream: false, + options: {}, + }, + deleteRunner: { + name: "deleteRunner", + requestType: DeleteRunnerRequest, + requestStream: false, + responseType: Empty, + responseStream: false, + options: {}, + }, + getStoreItem: { + name: "getStoreItem", + requestType: StoreItemRequest, + requestStream: false, + responseType: StoreItemResponse, + responseStream: false, + options: {}, + }, + setStoreItem: { + name: "setStoreItem", + requestType: SetStoreItemRequest, + requestStream: false, + responseType: SetStoreItemResponse, + responseStream: false, + options: {}, + }, + deleteStoreItem: { + name: "deleteStoreItem", + requestType: DeleteStoreItemRequest, + requestStream: false, + responseType: DeleteStoreItemResponse, + responseStream: false, + options: {}, + }, + /** This will likely be migrated to its own service similar to the GatewayAPI */ + publishMqttMessage: { + name: "publishMqttMessage", + requestType: PublishMqttMessageRequest, + requestStream: false, + responseType: PublishMqttMessageResponse, + responseStream: false, + options: {}, + }, + createSoftRunner: { + name: "createSoftRunner", + requestType: CreateSoftRunnerRequest, + requestStream: false, + responseType: CreateSoftRunnerResponse, + responseStream: false, + options: {}, + }, + getTemplates: { + name: "getTemplates", + requestType: TemplatesRequest, + requestStream: false, + responseType: TemplatesResponse, + responseStream: false, + options: {}, + }, + }, +} as const; + +export interface GeneralAPIServiceImplementation { + getJob(request: JobRequest, context: CallContext & CallContextExt): Promise>; + getJobs(request: JobsRequest, context: CallContext & CallContextExt): Promise>; + getJobVersion( + request: JobVersionRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobVersionLatest( + request: JobVersionLatestRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobVersions( + request: JobVersionsRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobVersionArchive( + request: JobVersionArchiveRequest, + context: CallContext & CallContextExt, + ): ServerStreamingMethodResult>; + getJobAction( + request: JobActionRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobActionLatest( + request: JobActionLatestRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobActions( + request: JobActionsRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobTrigger( + request: JobTriggerRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobTriggers( + request: JobTriggersRequest, + context: CallContext & CallContextExt, + ): Promise>; + getJobTriggersLatest( + request: JobTriggersLatestRequest, + context: CallContext & CallContextExt, + ): Promise>; + getRunner(request: RunnerRequest, context: CallContext & CallContextExt): Promise>; + getRunners(request: RunnersRequest, context: CallContext & CallContextExt): Promise>; + deleteRunner(request: DeleteRunnerRequest, context: CallContext & CallContextExt): Promise>; + getStoreItem( + request: StoreItemRequest, + context: CallContext & CallContextExt, + ): Promise>; + setStoreItem( + request: SetStoreItemRequest, + context: CallContext & CallContextExt, + ): Promise>; + deleteStoreItem( + request: DeleteStoreItemRequest, + context: CallContext & CallContextExt, + ): Promise>; + /** This will likely be migrated to its own service similar to the GatewayAPI */ + publishMqttMessage( + request: PublishMqttMessageRequest, + context: CallContext & CallContextExt, + ): Promise>; + createSoftRunner( + request: CreateSoftRunnerRequest, + context: CallContext & CallContextExt, + ): Promise>; + getTemplates( + request: TemplatesRequest, + context: CallContext & CallContextExt, + ): Promise>; +} + +export interface GeneralAPIClient { + getJob(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + getJobs(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + getJobVersion( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobVersionLatest( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobVersions( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobVersionArchive( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): AsyncIterable; + getJobAction( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobActionLatest( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobActions( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobTrigger( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobTriggers( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getJobTriggersLatest( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getRunner(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + getRunners(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + deleteRunner(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + getStoreItem( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + setStoreItem( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + deleteStoreItem( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + /** This will likely be migrated to its own service similar to the GatewayAPI */ + publishMqttMessage( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + createSoftRunner( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + getTemplates( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; +} + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export type ServerStreamingMethodResult = { [Symbol.asyncIterator](): AsyncIterator }; + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/src/protoset.bin b/packages/grpc/src/protoset.bin new file mode 100644 index 0000000..66c0493 Binary files /dev/null and b/packages/grpc/src/protoset.bin differ diff --git a/packages/grpc/src/runner.ts b/packages/grpc/src/runner.ts new file mode 100644 index 0000000..fcd6e74 --- /dev/null +++ b/packages/grpc/src/runner.ts @@ -0,0 +1,1580 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.10.1 +// protoc v3.21.12 +// source: runner.proto + +/* eslint-disable */ +import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire"; +import type { CallContext, CallOptions } from "nice-grpc-common"; +import { Empty } from "./base.js"; + +export const protobufPackage = "RunnerAPI"; + +export interface BasicContext { + triggerName: string; +} + +export interface HttpHeader { + name: string; + value: string; +} + +export interface EventHttpRequest { + info?: BasicContext | undefined; + head?: EventHttpRequest_Head | undefined; + body?: EventHttpRequest_Body | undefined; +} + +export interface EventHttpRequest_Head { + id: string; + method: string; + scheme: string; + hostname: string; + path: string; + query: string; + headers: HttpHeader[]; +} + +export interface EventHttpRequest_Body { + id: string; + seq: number; + data: Uint8Array; + end: boolean; +} + +export interface EventHttpResponse { + head?: EventHttpResponse_Head | undefined; + body?: EventHttpResponse_Body | undefined; +} + +export interface EventHttpResponse_Head { + id: string; + status: number; + headers: HttpHeader[]; +} + +export interface EventHttpResponse_Body { + id: string; + seq: number; + data: Uint8Array; + end: boolean; +} + +export interface EventMqttRequest { + context: BasicContext | undefined; + topic: string; + payload: Uint8Array; +} + +export interface EventMqttResponse { + status: EventMqttResponse_Status; +} + +export const EventMqttResponse_Status = { + ACCEPTED: "ACCEPTED", + REJECTED: "REJECTED", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type EventMqttResponse_Status = typeof EventMqttResponse_Status[keyof typeof EventMqttResponse_Status]; + +export namespace EventMqttResponse_Status { + export type ACCEPTED = typeof EventMqttResponse_Status.ACCEPTED; + export type REJECTED = typeof EventMqttResponse_Status.REJECTED; + export type UNRECOGNIZED = typeof EventMqttResponse_Status.UNRECOGNIZED; +} + +export function eventMqttResponse_StatusFromJSON(object: any): EventMqttResponse_Status { + switch (object) { + case 0: + case "ACCEPTED": + return EventMqttResponse_Status.ACCEPTED; + case 1: + case "REJECTED": + return EventMqttResponse_Status.REJECTED; + case -1: + case "UNRECOGNIZED": + default: + return EventMqttResponse_Status.UNRECOGNIZED; + } +} + +export function eventMqttResponse_StatusToJSON(object: EventMqttResponse_Status): string { + switch (object) { + case EventMqttResponse_Status.ACCEPTED: + return "ACCEPTED"; + case EventMqttResponse_Status.REJECTED: + return "REJECTED"; + case EventMqttResponse_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function eventMqttResponse_StatusToNumber(object: EventMqttResponse_Status): number { + switch (object) { + case EventMqttResponse_Status.ACCEPTED: + return 0; + case EventMqttResponse_Status.REJECTED: + return 1; + case EventMqttResponse_Status.UNRECOGNIZED: + default: + return -1; + } +} + +export interface EventScheduleRequest { + context: BasicContext | undefined; + scheduledAt: string; +} + +export interface EventScheduleResponse { + status: EventScheduleResponse_Status; +} + +export const EventScheduleResponse_Status = { + ACCEPTED: "ACCEPTED", + REJECTED: "REJECTED", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type EventScheduleResponse_Status = + typeof EventScheduleResponse_Status[keyof typeof EventScheduleResponse_Status]; + +export namespace EventScheduleResponse_Status { + export type ACCEPTED = typeof EventScheduleResponse_Status.ACCEPTED; + export type REJECTED = typeof EventScheduleResponse_Status.REJECTED; + export type UNRECOGNIZED = typeof EventScheduleResponse_Status.UNRECOGNIZED; +} + +export function eventScheduleResponse_StatusFromJSON(object: any): EventScheduleResponse_Status { + switch (object) { + case 0: + case "ACCEPTED": + return EventScheduleResponse_Status.ACCEPTED; + case 1: + case "REJECTED": + return EventScheduleResponse_Status.REJECTED; + case -1: + case "UNRECOGNIZED": + default: + return EventScheduleResponse_Status.UNRECOGNIZED; + } +} + +export function eventScheduleResponse_StatusToJSON(object: EventScheduleResponse_Status): string { + switch (object) { + case EventScheduleResponse_Status.ACCEPTED: + return "ACCEPTED"; + case EventScheduleResponse_Status.REJECTED: + return "REJECTED"; + case EventScheduleResponse_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function eventScheduleResponse_StatusToNumber(object: EventScheduleResponse_Status): number { + switch (object) { + case EventScheduleResponse_Status.ACCEPTED: + return 0; + case EventScheduleResponse_Status.REJECTED: + return 1; + case EventScheduleResponse_Status.UNRECOGNIZED: + default: + return -1; + } +} + +export interface StatusResponse { + status: StatusResponse_Status; + lastRequestAt: number; + loadAverage5Seconds: number; + loadAverage1Minute: number; +} + +export const StatusResponse_Status = { + STARTING: "STARTING", + READY: "READY", + CLOSING: "CLOSING", + CLOSED: "CLOSED", + FATAL: "FATAL", + UNRECOGNIZED: "UNRECOGNIZED", +} as const; + +export type StatusResponse_Status = typeof StatusResponse_Status[keyof typeof StatusResponse_Status]; + +export namespace StatusResponse_Status { + export type STARTING = typeof StatusResponse_Status.STARTING; + export type READY = typeof StatusResponse_Status.READY; + export type CLOSING = typeof StatusResponse_Status.CLOSING; + export type CLOSED = typeof StatusResponse_Status.CLOSED; + export type FATAL = typeof StatusResponse_Status.FATAL; + export type UNRECOGNIZED = typeof StatusResponse_Status.UNRECOGNIZED; +} + +export function statusResponse_StatusFromJSON(object: any): StatusResponse_Status { + switch (object) { + case 0: + case "STARTING": + return StatusResponse_Status.STARTING; + case 1: + case "READY": + return StatusResponse_Status.READY; + case 2: + case "CLOSING": + return StatusResponse_Status.CLOSING; + case 3: + case "CLOSED": + return StatusResponse_Status.CLOSED; + case 4: + case "FATAL": + return StatusResponse_Status.FATAL; + case -1: + case "UNRECOGNIZED": + default: + return StatusResponse_Status.UNRECOGNIZED; + } +} + +export function statusResponse_StatusToJSON(object: StatusResponse_Status): string { + switch (object) { + case StatusResponse_Status.STARTING: + return "STARTING"; + case StatusResponse_Status.READY: + return "READY"; + case StatusResponse_Status.CLOSING: + return "CLOSING"; + case StatusResponse_Status.CLOSED: + return "CLOSED"; + case StatusResponse_Status.FATAL: + return "FATAL"; + case StatusResponse_Status.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export function statusResponse_StatusToNumber(object: StatusResponse_Status): number { + switch (object) { + case StatusResponse_Status.STARTING: + return 0; + case StatusResponse_Status.READY: + return 1; + case StatusResponse_Status.CLOSING: + return 2; + case StatusResponse_Status.CLOSED: + return 3; + case StatusResponse_Status.FATAL: + return 4; + case StatusResponse_Status.UNRECOGNIZED: + default: + return -1; + } +} + +function createBaseBasicContext(): BasicContext { + return { triggerName: "" }; +} + +export const BasicContext: MessageFns = { + encode(message: BasicContext, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.triggerName !== "") { + writer.uint32(10).string(message.triggerName); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): BasicContext { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseBasicContext(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.triggerName = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): BasicContext { + return { triggerName: isSet(object.triggerName) ? globalThis.String(object.triggerName) : "" }; + }, + + toJSON(message: BasicContext): unknown { + const obj: any = {}; + if (message.triggerName !== "") { + obj.triggerName = message.triggerName; + } + return obj; + }, + + create(base?: DeepPartial): BasicContext { + return BasicContext.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): BasicContext { + const message = createBaseBasicContext(); + message.triggerName = object.triggerName ?? ""; + return message; + }, +}; + +function createBaseHttpHeader(): HttpHeader { + return { name: "", value: "" }; +} + +export const HttpHeader: MessageFns = { + encode(message: HttpHeader, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.name !== "") { + writer.uint32(10).string(message.name); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): HttpHeader { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpHeader(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.name = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.value = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): HttpHeader { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + + toJSON(message: HttpHeader): unknown { + const obj: any = {}; + if (message.name !== "") { + obj.name = message.name; + } + if (message.value !== "") { + obj.value = message.value; + } + return obj; + }, + + create(base?: DeepPartial): HttpHeader { + return HttpHeader.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): HttpHeader { + const message = createBaseHttpHeader(); + message.name = object.name ?? ""; + message.value = object.value ?? ""; + return message; + }, +}; + +function createBaseEventHttpRequest(): EventHttpRequest { + return { info: undefined, head: undefined, body: undefined }; +} + +export const EventHttpRequest: MessageFns = { + encode(message: EventHttpRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.info !== undefined) { + BasicContext.encode(message.info, writer.uint32(10).fork()).join(); + } + if (message.head !== undefined) { + EventHttpRequest_Head.encode(message.head, writer.uint32(18).fork()).join(); + } + if (message.body !== undefined) { + EventHttpRequest_Body.encode(message.body, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.info = BasicContext.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.head = EventHttpRequest_Head.decode(reader, reader.uint32()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.body = EventHttpRequest_Body.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpRequest { + return { + info: isSet(object.info) ? BasicContext.fromJSON(object.info) : undefined, + head: isSet(object.head) ? EventHttpRequest_Head.fromJSON(object.head) : undefined, + body: isSet(object.body) ? EventHttpRequest_Body.fromJSON(object.body) : undefined, + }; + }, + + toJSON(message: EventHttpRequest): unknown { + const obj: any = {}; + if (message.info !== undefined) { + obj.info = BasicContext.toJSON(message.info); + } + if (message.head !== undefined) { + obj.head = EventHttpRequest_Head.toJSON(message.head); + } + if (message.body !== undefined) { + obj.body = EventHttpRequest_Body.toJSON(message.body); + } + return obj; + }, + + create(base?: DeepPartial): EventHttpRequest { + return EventHttpRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpRequest { + const message = createBaseEventHttpRequest(); + message.info = (object.info !== undefined && object.info !== null) + ? BasicContext.fromPartial(object.info) + : undefined; + message.head = (object.head !== undefined && object.head !== null) + ? EventHttpRequest_Head.fromPartial(object.head) + : undefined; + message.body = (object.body !== undefined && object.body !== null) + ? EventHttpRequest_Body.fromPartial(object.body) + : undefined; + return message; + }, +}; + +function createBaseEventHttpRequest_Head(): EventHttpRequest_Head { + return { id: "", method: "", scheme: "", hostname: "", path: "", query: "", headers: [] }; +} + +export const EventHttpRequest_Head: MessageFns = { + encode(message: EventHttpRequest_Head, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.method !== "") { + writer.uint32(18).string(message.method); + } + if (message.scheme !== "") { + writer.uint32(26).string(message.scheme); + } + if (message.hostname !== "") { + writer.uint32(34).string(message.hostname); + } + if (message.path !== "") { + writer.uint32(42).string(message.path); + } + if (message.query !== "") { + writer.uint32(50).string(message.query); + } + for (const v of message.headers) { + HttpHeader.encode(v!, writer.uint32(90).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpRequest_Head { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpRequest_Head(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.method = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.scheme = reader.string(); + continue; + } + case 4: { + if (tag !== 34) { + break; + } + + message.hostname = reader.string(); + continue; + } + case 5: { + if (tag !== 42) { + break; + } + + message.path = reader.string(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.query = reader.string(); + continue; + } + case 11: { + if (tag !== 90) { + break; + } + + message.headers.push(HttpHeader.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpRequest_Head { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + method: isSet(object.method) ? globalThis.String(object.method) : "", + scheme: isSet(object.scheme) ? globalThis.String(object.scheme) : "", + hostname: isSet(object.hostname) ? globalThis.String(object.hostname) : "", + path: isSet(object.path) ? globalThis.String(object.path) : "", + query: isSet(object.query) ? globalThis.String(object.query) : "", + headers: globalThis.Array.isArray(object?.headers) ? object.headers.map((e: any) => HttpHeader.fromJSON(e)) : [], + }; + }, + + toJSON(message: EventHttpRequest_Head): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.method !== "") { + obj.method = message.method; + } + if (message.scheme !== "") { + obj.scheme = message.scheme; + } + if (message.hostname !== "") { + obj.hostname = message.hostname; + } + if (message.path !== "") { + obj.path = message.path; + } + if (message.query !== "") { + obj.query = message.query; + } + if (message.headers?.length) { + obj.headers = message.headers.map((e) => HttpHeader.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): EventHttpRequest_Head { + return EventHttpRequest_Head.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpRequest_Head { + const message = createBaseEventHttpRequest_Head(); + message.id = object.id ?? ""; + message.method = object.method ?? ""; + message.scheme = object.scheme ?? ""; + message.hostname = object.hostname ?? ""; + message.path = object.path ?? ""; + message.query = object.query ?? ""; + message.headers = object.headers?.map((e) => HttpHeader.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEventHttpRequest_Body(): EventHttpRequest_Body { + return { id: "", seq: 0, data: new Uint8Array(0), end: false }; +} + +export const EventHttpRequest_Body: MessageFns = { + encode(message: EventHttpRequest_Body, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.seq !== 0) { + writer.uint32(16).uint64(message.seq); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + if (message.end !== false) { + writer.uint32(32).bool(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpRequest_Body { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpRequest_Body(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.seq = longToNumber(reader.uint64()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.data = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.end = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpRequest_Body { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + seq: isSet(object.seq) ? globalThis.Number(object.seq) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + end: isSet(object.end) ? globalThis.Boolean(object.end) : false, + }; + }, + + toJSON(message: EventHttpRequest_Body): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.seq !== 0) { + obj.seq = Math.round(message.seq); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.end !== false) { + obj.end = message.end; + } + return obj; + }, + + create(base?: DeepPartial): EventHttpRequest_Body { + return EventHttpRequest_Body.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpRequest_Body { + const message = createBaseEventHttpRequest_Body(); + message.id = object.id ?? ""; + message.seq = object.seq ?? 0; + message.data = object.data ?? new Uint8Array(0); + message.end = object.end ?? false; + return message; + }, +}; + +function createBaseEventHttpResponse(): EventHttpResponse { + return { head: undefined, body: undefined }; +} + +export const EventHttpResponse: MessageFns = { + encode(message: EventHttpResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.head !== undefined) { + EventHttpResponse_Head.encode(message.head, writer.uint32(10).fork()).join(); + } + if (message.body !== undefined) { + EventHttpResponse_Body.encode(message.body, writer.uint32(18).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.head = EventHttpResponse_Head.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.body = EventHttpResponse_Body.decode(reader, reader.uint32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpResponse { + return { + head: isSet(object.head) ? EventHttpResponse_Head.fromJSON(object.head) : undefined, + body: isSet(object.body) ? EventHttpResponse_Body.fromJSON(object.body) : undefined, + }; + }, + + toJSON(message: EventHttpResponse): unknown { + const obj: any = {}; + if (message.head !== undefined) { + obj.head = EventHttpResponse_Head.toJSON(message.head); + } + if (message.body !== undefined) { + obj.body = EventHttpResponse_Body.toJSON(message.body); + } + return obj; + }, + + create(base?: DeepPartial): EventHttpResponse { + return EventHttpResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpResponse { + const message = createBaseEventHttpResponse(); + message.head = (object.head !== undefined && object.head !== null) + ? EventHttpResponse_Head.fromPartial(object.head) + : undefined; + message.body = (object.body !== undefined && object.body !== null) + ? EventHttpResponse_Body.fromPartial(object.body) + : undefined; + return message; + }, +}; + +function createBaseEventHttpResponse_Head(): EventHttpResponse_Head { + return { id: "", status: 0, headers: [] }; +} + +export const EventHttpResponse_Head: MessageFns = { + encode(message: EventHttpResponse_Head, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.status !== 0) { + writer.uint32(16).int32(message.status); + } + for (const v of message.headers) { + HttpHeader.encode(v!, writer.uint32(26).fork()).join(); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpResponse_Head { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpResponse_Head(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.status = reader.int32(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.headers.push(HttpHeader.decode(reader, reader.uint32())); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpResponse_Head { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + status: isSet(object.status) ? globalThis.Number(object.status) : 0, + headers: globalThis.Array.isArray(object?.headers) ? object.headers.map((e: any) => HttpHeader.fromJSON(e)) : [], + }; + }, + + toJSON(message: EventHttpResponse_Head): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.status !== 0) { + obj.status = Math.round(message.status); + } + if (message.headers?.length) { + obj.headers = message.headers.map((e) => HttpHeader.toJSON(e)); + } + return obj; + }, + + create(base?: DeepPartial): EventHttpResponse_Head { + return EventHttpResponse_Head.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpResponse_Head { + const message = createBaseEventHttpResponse_Head(); + message.id = object.id ?? ""; + message.status = object.status ?? 0; + message.headers = object.headers?.map((e) => HttpHeader.fromPartial(e)) || []; + return message; + }, +}; + +function createBaseEventHttpResponse_Body(): EventHttpResponse_Body { + return { id: "", seq: 0, data: new Uint8Array(0), end: false }; +} + +export const EventHttpResponse_Body: MessageFns = { + encode(message: EventHttpResponse_Body, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.id !== "") { + writer.uint32(10).string(message.id); + } + if (message.seq !== 0) { + writer.uint32(16).uint64(message.seq); + } + if (message.data.length !== 0) { + writer.uint32(26).bytes(message.data); + } + if (message.end !== false) { + writer.uint32(32).bool(message.end); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventHttpResponse_Body { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventHttpResponse_Body(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.id = reader.string(); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.seq = longToNumber(reader.uint64()); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.data = reader.bytes(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.end = reader.bool(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventHttpResponse_Body { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + seq: isSet(object.seq) ? globalThis.Number(object.seq) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + end: isSet(object.end) ? globalThis.Boolean(object.end) : false, + }; + }, + + toJSON(message: EventHttpResponse_Body): unknown { + const obj: any = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.seq !== 0) { + obj.seq = Math.round(message.seq); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.end !== false) { + obj.end = message.end; + } + return obj; + }, + + create(base?: DeepPartial): EventHttpResponse_Body { + return EventHttpResponse_Body.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventHttpResponse_Body { + const message = createBaseEventHttpResponse_Body(); + message.id = object.id ?? ""; + message.seq = object.seq ?? 0; + message.data = object.data ?? new Uint8Array(0); + message.end = object.end ?? false; + return message; + }, +}; + +function createBaseEventMqttRequest(): EventMqttRequest { + return { context: undefined, topic: "", payload: new Uint8Array(0) }; +} + +export const EventMqttRequest: MessageFns = { + encode(message: EventMqttRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.context !== undefined) { + BasicContext.encode(message.context, writer.uint32(10).fork()).join(); + } + if (message.topic !== "") { + writer.uint32(18).string(message.topic); + } + if (message.payload.length !== 0) { + writer.uint32(26).bytes(message.payload); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventMqttRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventMqttRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.context = BasicContext.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.topic = reader.string(); + continue; + } + case 3: { + if (tag !== 26) { + break; + } + + message.payload = reader.bytes(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventMqttRequest { + return { + context: isSet(object.context) ? BasicContext.fromJSON(object.context) : undefined, + topic: isSet(object.topic) ? globalThis.String(object.topic) : "", + payload: isSet(object.payload) ? bytesFromBase64(object.payload) : new Uint8Array(0), + }; + }, + + toJSON(message: EventMqttRequest): unknown { + const obj: any = {}; + if (message.context !== undefined) { + obj.context = BasicContext.toJSON(message.context); + } + if (message.topic !== "") { + obj.topic = message.topic; + } + if (message.payload.length !== 0) { + obj.payload = base64FromBytes(message.payload); + } + return obj; + }, + + create(base?: DeepPartial): EventMqttRequest { + return EventMqttRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventMqttRequest { + const message = createBaseEventMqttRequest(); + message.context = (object.context !== undefined && object.context !== null) + ? BasicContext.fromPartial(object.context) + : undefined; + message.topic = object.topic ?? ""; + message.payload = object.payload ?? new Uint8Array(0); + return message; + }, +}; + +function createBaseEventMqttResponse(): EventMqttResponse { + return { status: EventMqttResponse_Status.ACCEPTED }; +} + +export const EventMqttResponse: MessageFns = { + encode(message: EventMqttResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.status !== EventMqttResponse_Status.ACCEPTED) { + writer.uint32(8).int32(eventMqttResponse_StatusToNumber(message.status)); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventMqttResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventMqttResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.status = eventMqttResponse_StatusFromJSON(reader.int32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventMqttResponse { + return { + status: isSet(object.status) + ? eventMqttResponse_StatusFromJSON(object.status) + : EventMqttResponse_Status.ACCEPTED, + }; + }, + + toJSON(message: EventMqttResponse): unknown { + const obj: any = {}; + if (message.status !== EventMqttResponse_Status.ACCEPTED) { + obj.status = eventMqttResponse_StatusToJSON(message.status); + } + return obj; + }, + + create(base?: DeepPartial): EventMqttResponse { + return EventMqttResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventMqttResponse { + const message = createBaseEventMqttResponse(); + message.status = object.status ?? EventMqttResponse_Status.ACCEPTED; + return message; + }, +}; + +function createBaseEventScheduleRequest(): EventScheduleRequest { + return { context: undefined, scheduledAt: "" }; +} + +export const EventScheduleRequest: MessageFns = { + encode(message: EventScheduleRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.context !== undefined) { + BasicContext.encode(message.context, writer.uint32(10).fork()).join(); + } + if (message.scheduledAt !== "") { + writer.uint32(18).string(message.scheduledAt); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventScheduleRequest { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventScheduleRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 10) { + break; + } + + message.context = BasicContext.decode(reader, reader.uint32()); + continue; + } + case 2: { + if (tag !== 18) { + break; + } + + message.scheduledAt = reader.string(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventScheduleRequest { + return { + context: isSet(object.context) ? BasicContext.fromJSON(object.context) : undefined, + scheduledAt: isSet(object.scheduledAt) ? globalThis.String(object.scheduledAt) : "", + }; + }, + + toJSON(message: EventScheduleRequest): unknown { + const obj: any = {}; + if (message.context !== undefined) { + obj.context = BasicContext.toJSON(message.context); + } + if (message.scheduledAt !== "") { + obj.scheduledAt = message.scheduledAt; + } + return obj; + }, + + create(base?: DeepPartial): EventScheduleRequest { + return EventScheduleRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventScheduleRequest { + const message = createBaseEventScheduleRequest(); + message.context = (object.context !== undefined && object.context !== null) + ? BasicContext.fromPartial(object.context) + : undefined; + message.scheduledAt = object.scheduledAt ?? ""; + return message; + }, +}; + +function createBaseEventScheduleResponse(): EventScheduleResponse { + return { status: EventScheduleResponse_Status.ACCEPTED }; +} + +export const EventScheduleResponse: MessageFns = { + encode(message: EventScheduleResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.status !== EventScheduleResponse_Status.ACCEPTED) { + writer.uint32(8).int32(eventScheduleResponse_StatusToNumber(message.status)); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): EventScheduleResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEventScheduleResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.status = eventScheduleResponse_StatusFromJSON(reader.int32()); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): EventScheduleResponse { + return { + status: isSet(object.status) + ? eventScheduleResponse_StatusFromJSON(object.status) + : EventScheduleResponse_Status.ACCEPTED, + }; + }, + + toJSON(message: EventScheduleResponse): unknown { + const obj: any = {}; + if (message.status !== EventScheduleResponse_Status.ACCEPTED) { + obj.status = eventScheduleResponse_StatusToJSON(message.status); + } + return obj; + }, + + create(base?: DeepPartial): EventScheduleResponse { + return EventScheduleResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): EventScheduleResponse { + const message = createBaseEventScheduleResponse(); + message.status = object.status ?? EventScheduleResponse_Status.ACCEPTED; + return message; + }, +}; + +function createBaseStatusResponse(): StatusResponse { + return { status: StatusResponse_Status.STARTING, lastRequestAt: 0, loadAverage5Seconds: 0, loadAverage1Minute: 0 }; +} + +export const StatusResponse: MessageFns = { + encode(message: StatusResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter { + if (message.status !== StatusResponse_Status.STARTING) { + writer.uint32(8).int32(statusResponse_StatusToNumber(message.status)); + } + if (message.lastRequestAt !== 0) { + writer.uint32(16).uint32(message.lastRequestAt); + } + if (message.loadAverage5Seconds !== 0) { + writer.uint32(24).uint32(message.loadAverage5Seconds); + } + if (message.loadAverage1Minute !== 0) { + writer.uint32(32).uint32(message.loadAverage1Minute); + } + return writer; + }, + + decode(input: BinaryReader | Uint8Array, length?: number): StatusResponse { + const reader = input instanceof BinaryReader ? input : new BinaryReader(input); + const end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStatusResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + if (tag !== 8) { + break; + } + + message.status = statusResponse_StatusFromJSON(reader.int32()); + continue; + } + case 2: { + if (tag !== 16) { + break; + } + + message.lastRequestAt = reader.uint32(); + continue; + } + case 3: { + if (tag !== 24) { + break; + } + + message.loadAverage5Seconds = reader.uint32(); + continue; + } + case 4: { + if (tag !== 32) { + break; + } + + message.loadAverage1Minute = reader.uint32(); + continue; + } + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skip(tag & 7); + } + return message; + }, + + fromJSON(object: any): StatusResponse { + return { + status: isSet(object.status) ? statusResponse_StatusFromJSON(object.status) : StatusResponse_Status.STARTING, + lastRequestAt: isSet(object.lastRequestAt) ? globalThis.Number(object.lastRequestAt) : 0, + loadAverage5Seconds: isSet(object.loadAverage5Seconds) ? globalThis.Number(object.loadAverage5Seconds) : 0, + loadAverage1Minute: isSet(object.loadAverage1Minute) ? globalThis.Number(object.loadAverage1Minute) : 0, + }; + }, + + toJSON(message: StatusResponse): unknown { + const obj: any = {}; + if (message.status !== StatusResponse_Status.STARTING) { + obj.status = statusResponse_StatusToJSON(message.status); + } + if (message.lastRequestAt !== 0) { + obj.lastRequestAt = Math.round(message.lastRequestAt); + } + if (message.loadAverage5Seconds !== 0) { + obj.loadAverage5Seconds = Math.round(message.loadAverage5Seconds); + } + if (message.loadAverage1Minute !== 0) { + obj.loadAverage1Minute = Math.round(message.loadAverage1Minute); + } + return obj; + }, + + create(base?: DeepPartial): StatusResponse { + return StatusResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): StatusResponse { + const message = createBaseStatusResponse(); + message.status = object.status ?? StatusResponse_Status.STARTING; + message.lastRequestAt = object.lastRequestAt ?? 0; + message.loadAverage5Seconds = object.loadAverage5Seconds ?? 0; + message.loadAverage1Minute = object.loadAverage1Minute ?? 0; + return message; + }, +}; + +export type RunnerAPIDefinition = typeof RunnerAPIDefinition; +export const RunnerAPIDefinition = { + name: "RunnerAPI", + fullName: "RunnerAPI.RunnerAPI", + methods: { + eventHttp: { + name: "eventHttp", + requestType: EventHttpRequest, + requestStream: true, + responseType: EventHttpResponse, + responseStream: true, + options: {}, + }, + eventMqtt: { + name: "eventMqtt", + requestType: EventMqttRequest, + requestStream: false, + responseType: EventMqttResponse, + responseStream: false, + options: {}, + }, + eventSchedule: { + name: "eventSchedule", + requestType: EventScheduleRequest, + requestStream: false, + responseType: EventScheduleResponse, + responseStream: false, + options: {}, + }, + status: { + name: "status", + requestType: Empty, + requestStream: false, + responseType: StatusResponse, + responseStream: false, + options: {}, + }, + }, +} as const; + +export interface RunnerAPIServiceImplementation { + eventHttp( + request: AsyncIterable, + context: CallContext & CallContextExt, + ): ServerStreamingMethodResult>; + eventMqtt(request: EventMqttRequest, context: CallContext & CallContextExt): Promise>; + eventSchedule( + request: EventScheduleRequest, + context: CallContext & CallContextExt, + ): Promise>; + status(request: Empty, context: CallContext & CallContextExt): Promise>; +} + +export interface RunnerAPIClient { + eventHttp( + request: AsyncIterable>, + options?: CallOptions & CallOptionsExt, + ): AsyncIterable; + eventMqtt(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + eventSchedule( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + status(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; +} + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(int64: { toString(): string }): number { + const num = globalThis.Number(int64.toString()); + if (num > globalThis.Number.MAX_SAFE_INTEGER) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + if (num < globalThis.Number.MIN_SAFE_INTEGER) { + throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER"); + } + return num; +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export type ServerStreamingMethodResult = { [Symbol.asyncIterator](): AsyncIterator }; + +export interface MessageFns { + encode(message: T, writer?: BinaryWriter): BinaryWriter; + decode(input: BinaryReader | Uint8Array, length?: number): T; + fromJSON(object: any): T; + toJSON(message: T): unknown; + create(base?: DeepPartial): T; + fromPartial(object: DeepPartial): T; +} diff --git a/packages/grpc/tsconfig.json b/packages/grpc/tsconfig.json new file mode 100644 index 0000000..cd126f1 --- /dev/null +++ b/packages/grpc/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "experimentalDecorators": true, + "inlineSourceMap": true, + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "declaration": true, + "types": ["node"], + "rootDir": "./src", + "outDir": "./dist", + "paths":{ + "~/*": ["./src/*"] + } + }, + "$schema": "https://json.schemastore.org/tsconfig", + "display": "Recommended" +} \ No newline at end of file diff --git a/packages/runner-node-entrypoint/package.json b/packages/runner-node-entrypoint/package.json index a0f2992..e20cf87 100644 --- a/packages/runner-node-entrypoint/package.json +++ b/packages/runner-node-entrypoint/package.json @@ -6,20 +6,29 @@ "types": "./dist/index.d.ts", "type": "module", "scripts": { - "build": "pnpm tsup" + "build": "pnpm tsdown" }, "keywords": [], "author": "Eithan Hersey-Tuit", "license": "MIT", "dependencies": { - "@jobber/tcp-frame-socket": "workspace:*" + "@grpc/grpc-js": "^1.14.3", + "@grpc/proto-loader": "^0.8.0", + "@jobber/common": "workspace:*", + "@jobber/grpc": "workspace:*", + "@jobber/tcp-frame-socket": "workspace:*", + "jose": "^6.1.3", + "nice-grpc": "^2.1.14", + "nice-grpc-server-reflection": "^3.0.3" }, "devDependencies": { "@tsconfig/node20": "^20.1.4", "@types/node": "^20.16.12", + "grpc-tools": "^1.13.1", "rimraf": "^5.0.10", + "ts-proto": "^2.10.1", "tsc-alias": "^1.8.10", - "tsup": "^8.3.5", + "tsdown": "^0.20.3", "typescript": "^5.6.3" } } diff --git a/packages/runner-node-entrypoint/src/context/global-context.ts b/packages/runner-node-entrypoint/src/context/global-context.ts new file mode 100644 index 0000000..9435f87 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/global-context.ts @@ -0,0 +1,85 @@ +import { ServerError, Status } from "nice-grpc"; +import { Runner } from "~/runner.js"; + +export class GlobalContext { + constructor(private runner: Runner) {} + + public async setStore(key: string, value: string, option?: { ttl?: number }) { + if (typeof key !== "string" || typeof value !== "string") { + throw new Error("Key and value must be strings"); + } + + await this.runner.client.methods.setStoreItem({ + jobId: this.runner.jobId, + key: key, + value: value, + ttl: option?.ttl, + }); + } + + public async setStoreJson( + key: string, + value: T, + option?: { ttl?: number }, + ) { + if (typeof key !== "string") { + throw new Error("Key must be a string"); + } + + await this.setStore(key, JSON.stringify(value), option); + } + + public async getStore(key: string) { + try { + const item = await this.runner.client.methods.getStoreItem({ + jobId: this.runner.jobId, + key: key, + }); + + return item.value; + } catch (err) { + if (err instanceof ServerError) { + if (err.code === Status.NOT_FOUND) { + return null; + } + } + + throw err; + } + } + + public async getStoreJson(key: string): Promise { + const data = await this.getStore(key); + + if (!data) { + return null; + } + + try { + return JSON.parse(data) as T; + } catch (err) { + console.error(err); + + return null; + } + } + + public async deleteStore(key: string) { + return await this.runner.client.methods.deleteStoreItem({ + jobId: this.runner.jobId, + key: key, + }); + } + + public async deleteStoreJson(key: string) { + await this.deleteStore(key); + } + + public async publish(topic: string, body: string) { + return this.runner.client.methods.publishMqttMessage({ + jobId: this.runner.jobId, + topic: topic, + payload: body, + }); + } +} diff --git a/packages/runner-node-entrypoint/src/context/http.ts b/packages/runner-node-entrypoint/src/context/http.ts new file mode 100644 index 0000000..b90c3d5 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/http.ts @@ -0,0 +1,413 @@ +import { deferred } from "@jobber/common/deferred.js"; +import { EventHttpRequest, EventHttpResponse } from "@jobber/grpc/runner.js"; +import assert from "node:assert"; +import { randomUUID } from "node:crypto"; +import { once } from "node:events"; +import { PassThrough, Readable, Writable } from "node:stream"; +import { Runner } from "~/runner.js"; + +class HttpContextRequest { + private eventBasicContext: EventHttpRequest["info"]; + private eventHttpHead: EventHttpRequest["head"]; + private stream = new PassThrough(); + private startStreamData = deferred(); + + private streamEventsStarted = false; + + private _receivedHeadersPromise = deferred(); + + constructor( + private runner: Runner, + private requestEvents: AsyncIterable, + ) {} + + public async _startStreamingEvents() { + if (this.streamEventsStarted) { + throw new Error( + `[HttpContextRequest/streamEvents] streamEvents can only be called once`, + ); + } + this.streamEventsStarted = true; + + let index = 0; // 0 = info, 1 = head, 2+ = body + + for await (const event of this.requestEvents) { + if (index === 0) { + if (!event.info) { + throw new Error( + `[HttpContextRequest/streamEvents] First event must be an info event`, + ); + } + + this.eventBasicContext = event.info; + } + + if (index === 1) { + if (!event.head) { + throw new Error( + `[HttpContextRequest/streamEvents] Second event must be an head event`, + ); + } + + this.eventHttpHead = event.head; + + this._receivedHeadersPromise.resolve(); + } + + if (index >= 2) { + // Do not start streaming body until it has been requested + await this.startStreamData.promise; + + if (!event.body) { + throw new Error( + `[HttpContextRequest/streamEvents] Body events must have a body`, + ); + } + + if (event.body.seq !== index - 2) { + throw new Error( + `[HttpContextRequest/streamEvents] Body event sequence mismatch. Expected ${index - 2} but got ${event.body.seq}`, + ); + } + + const writeResult = this.stream.write(event.body.data); + + if (!writeResult) { + await once(this.stream, "drain"); + } + + if (event.body.end) { + const finishedPromise = once(this.stream, "finish"); + + this.stream.end(); + + await finishedPromise; + + break; + } + } + + index++; + } + } + + public get receivedHeadersPromise() { + return this._receivedHeadersPromise.promise; + } + + public get name() { + if (!this.eventBasicContext) { + throw new Error("[HttpContextRequest/name] No info event received"); + } + + return this.eventBasicContext.triggerName; + } + + public header(name: string) { + if (!this.eventHttpHead) { + throw new Error("[HttpContextRequest/header] No head event received"); + } + + const headers = this.eventHttpHead.headers.filter( + (h) => h.name.toLowerCase() === name.toLowerCase(), + ); + + if (headers.length === 0) { + return undefined; + } + + if (headers.length === 1) { + return headers[0].value; + } + + if (headers.length >= 2) { + return headers.map((header) => header.value); + } + } + + public getHeaders(): { name: string; value: string }[] { + if (!this.eventHttpHead) { + throw new Error("[HttpContextRequest/getHeaders] No head event received"); + } + + return this.eventHttpHead.headers; + } + + public query(name: string) { + const values = this.getSearchParams().getAll(name); + + if (values.length === 0) { + return null; + } + + return values[0]; + } + + public queries(name: string) { + const values = this.getSearchParams().getAll(name); + + if (values.length === 0) { + return null; + } + + return values; + } + + public getSearchParams() { + if (!this.eventHttpHead) { + throw new Error( + "[HttpContextRequest/getSearchParams] No head event received", + ); + } + + return new URLSearchParams(this.eventHttpHead.query); + } + + public get path() { + if (!this.eventHttpHead) { + throw new Error("[HttpContextRequest/path] No head event received"); + } + + return this.eventHttpHead.path; + } + + public get method() { + if (!this.eventHttpHead) { + throw new Error("[HttpContextRequest/method] No head event received"); + } + + return this.eventHttpHead.method; + } + + public getReadStream(): Readable { + this.startStreamData.resolve(); + + return this.stream; + } + + public async json() { + this.startStreamData.resolve(); + + let body = ""; + + for await (const chunk of this.stream) { + body += chunk.toString(); + } + + return JSON.parse(body); + } + + public async text() { + this.startStreamData.resolve(); + + let body = ""; + + for await (const chunk of this.stream) { + body += chunk.toString(); + } + + return JSON.parse(body); + } + + /** + * @deprecated use getReadStream, this is solely for backwards compatibility. + */ + public async raw() { + this.startStreamData.resolve(); + + const buffers = []; + + for await (const chunk of this.stream) { + buffers.push(chunk); + } + + return Buffer.concat(buffers); + } +} + +class HttpContextResponse { + private headers = new Headers(); + private statusCode = 200; + private stream = new PassThrough(); + private headersFlushed = false; + private flushHeadersPromise = deferred(); + + constructor(private runner: Runner) {} + + public header(name: string, value: string) { + if (this.headersFlushed) { + throw new Error( + "[HttpContextResponse/header] Cannot set header after headers have been flushed", + ); + } + + this.headers.set(name, value); + } + + public status(statusCode: number) { + if (this.headersFlushed) { + throw new Error( + "[HttpContextResponse/status] Cannot set status after headers have been flushed", + ); + } + + this.statusCode = statusCode; + } + + public getWriteStream(): Writable { + this.headersFlushed = true; + this.flushHeadersPromise.resolve(); + + return this.stream; + } + + public json(data: T) { + if (this.headersFlushed) { + throw new Error( + "[HttpContextResponse/html] Cannot call html() after headers have been flushed", + ); + } + + this.headers.set("Content-Type", "application/json"); + + this.headersFlushed = true; + this.flushHeadersPromise.resolve(); + + this.stream.write(JSON.stringify(data)); + this.stream.end(); + } + + public text(data: string) { + assert( + typeof data === "string", + 'Argument "data" must be type of string on HttpContextResponse.text()', + ); + + if (this.headersFlushed) { + throw new Error( + "[HttpContextResponse/html] Cannot call html() after headers have been flushed", + ); + } + + this.headers.set("Content-Type", "text/plain"); + + this.headersFlushed = true; + this.flushHeadersPromise.resolve(); + + this.stream.write(data); + this.stream.end(); + } + + public html(data: string) { + assert( + typeof data === "string", + 'Argument "data" must be type of string on HttpContextResponse.html()', + ); + + if (this.headersFlushed) { + throw new Error( + "[HttpContextResponse/html] Cannot call html() after headers have been flushed", + ); + } + + this.headers.set("Content-Type", "text/html"); + + this.headersFlushed = true; + this.flushHeadersPromise.resolve(); + + this.stream.write(data); + this.stream.end(); + } + + /** + * Invoked once the clients-execution code has finished. This is used + * to avoid timeouts and hanging. + */ + public async _finished() { + if (!this.headersFlushed) { + this.status(204); + this.headersFlushed = true; + this.flushHeadersPromise.resolve(); + } + + await this.flushHeadersPromise.promise; + + if (!this.stream.writableEnded) { + this.stream.end(); + } + } + + public async *_createResponse(): AsyncGenerator { + await this.flushHeadersPromise.promise; + + const baseId = randomUUID(); + + yield { + head: { + headers: Array.from(this.headers.entries()).map(([name, value]) => ({ + name, + value, + })), + id: `${baseId}`, + status: this.statusCode, + }, + }; + + let seq = 0; + for await (const chunk of this.stream) { + yield { + body: { + id: `${baseId}-${seq}`, + data: chunk, + seq: seq, + end: false, + }, + }; + + seq++; + } + + yield { + body: { + id: `${baseId}-${seq}`, + data: Buffer.alloc(0), + seq: seq, + end: true, + }, + }; + } +} + +export class HttpContext { + private contextRequest: HttpContextRequest; + + private contextResponse: HttpContextResponse; + + constructor( + private runner: Runner, + requestEvents: AsyncIterable, + ) { + this.contextRequest = new HttpContextRequest(runner, requestEvents); + this.contextResponse = new HttpContextResponse(runner); + } + + public get name() { + return this.contextRequest.name; + } + + public get request() { + return this.contextRequest; + } + + public get response() { + return this.contextResponse; + } + + public async publish(topic: string, payload: string) { + await this.runner.client.methods.publishMqttMessage({ + jobId: this.runner.jobId, + topic, + payload, + }); + } +} diff --git a/packages/runner-node-entrypoint/src/context.ts b/packages/runner-node-entrypoint/src/context/legacy-context.ts similarity index 52% rename from packages/runner-node-entrypoint/src/context.ts rename to packages/runner-node-entrypoint/src/context/legacy-context.ts index 926f510..23e7e83 100644 --- a/packages/runner-node-entrypoint/src/context.ts +++ b/packages/runner-node-entrypoint/src/context/legacy-context.ts @@ -1,36 +1,30 @@ -import { JobberHandlerRequest } from "./request.js"; -import { JobberHandlerResponse } from "./response.js"; -import { Runner } from "./runner.js"; +import { Runner } from "~/runner.js"; -export class JobberHandlerContext { - private runner: Runner; - private request: JobberHandlerRequest; - private response: JobberHandlerResponse; - - constructor( - runner: Runner, - request: JobberHandlerRequest, - response: JobberHandlerResponse - ) { - this.runner = runner; - this.request = request; - this.response = response; - } +export class LegacyContext { + constructor(private runner: Runner) {} public async setStore(key: string, value: string, option?: { ttl?: number }) { - return this.runner.sendStoreSet(key, value, option); + await this.runner.client.methods.setStoreItem({ + jobId: this.runner.jobId, + key: key, + value: value, + ttl: option?.ttl, + }); } public async setStoreJson( key: string, value: T, - option?: { ttl?: number } + option?: { ttl?: number }, ) { await this.setStore(key, JSON.stringify(value), option); } public async getStore(key: string) { - return this.runner.sendStoreGet(key); + return this.runner.client.methods.getStoreItem({ + jobId: this.runner.jobId, + key: key, + }); } public async getStoreJson(key: string): Promise { @@ -50,26 +44,33 @@ export class JobberHandlerContext { } public async deleteStore(key: string) { - return await this.runner.sendStoreDelete(key); + return await this.runner.client.methods.deleteStoreItem({ + jobId: this.runner.jobId, + key: key, + }); } public async deleteStoreJson(key: string) { - await this.runner.sendStoreDelete(key); + await this.deleteStore(key); } public async publish(topic: string, body: Buffer | string | unknown) { - let payload: Buffer; + let payload: string; if (typeof body === "object" && !Buffer.isBuffer(body)) { - payload = Buffer.from(JSON.stringify(body)); + payload = JSON.stringify(body); } else if (typeof body === "string") { - payload = Buffer.from(body); - } else if (Buffer.isBuffer(body)) { payload = body; + } else if (Buffer.isBuffer(body)) { + payload = body.toString("utf8"); } else { throw new Error("Invalid body type for MQTT publish"); } - return this.runner.sendMqttPublish(topic, payload); + return this.runner.client.methods.publishMqttMessage({ + jobId: this.runner.jobId, + topic: topic, + payload: payload, + }); } } diff --git a/packages/runner-node-entrypoint/src/context/legacy-request.ts b/packages/runner-node-entrypoint/src/context/legacy-request.ts new file mode 100644 index 0000000..66d4102 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/legacy-request.ts @@ -0,0 +1,159 @@ +import { HttpContext } from "./http.js"; +import { MqttContext } from "./mqtt.js"; +import { ScheduleContext } from "./schedule.js"; + +export class LegacyContextRequest { + constructor(private _request: MqttContext | HttpContext | ScheduleContext) {} + + private body?: Buffer; + + public async _externalProcess() { + if (this._request instanceof HttpContext) { + this.body = await this._request.request.raw(); + } else if (this._request instanceof MqttContext) { + this.body = Buffer.from(this._request.payload); + } + } + + type() { + if (this._request instanceof MqttContext) { + return "mqtt"; + } else if (this._request instanceof HttpContext) { + return "http"; + } else if (this._request instanceof ScheduleContext) { + return "schedule"; + } else { + throw new Error("Unknown request type"); + } + } + + name() { + if (this._request instanceof MqttContext) { + return this._request.name; + } else if (this._request instanceof HttpContext) { + return this._request.name; + } else if (this._request instanceof ScheduleContext) { + return this._request.name; + } else { + throw new Error("Unknown request type"); + } + } + + header(name: string) { + if (this._request instanceof HttpContext) { + return this._request.request.header(name); + } else { + throw new Error("Headers are only available for HTTP requests"); + } + } + + query(name: string) { + if (this._request instanceof HttpContext) { + return this._request.request.query(name); + } else { + throw new Error("Query parameters are only available for HTTP requests"); + } + } + + queries(name: string) { + if (this._request instanceof HttpContext) { + return this._request.request.queries(name); + } else { + throw new Error("Query parameters are only available for HTTP requests"); + } + } + + method() { + if (this._request instanceof HttpContext) { + return this._request.request.method; + } else { + throw new Error("Method is only available for HTTP requests"); + } + } + + path() { + if (this._request instanceof HttpContext) { + return this._request.request.path; + } else { + throw new Error("Path is only available for HTTP requests"); + } + } + + topic() { + if (this._request instanceof MqttContext) { + return this._request.topic; + } else { + throw new Error("Topic is only available for MQTT requests"); + } + } + + json(): T { + return JSON.parse(this.text()); + } + + text() { + if ( + this._request instanceof HttpContext || + this._request instanceof MqttContext + ) { + if (!this.body) { + throw new Error( + "Body is not available. Ensure to call _streamBody() before accessing the body.", + ); + } + + return this.body.toString(); + } else { + throw new Error("Text body is only available for HTTP and MQTT requests"); + } + } + + data() { + if ( + this._request instanceof HttpContext || + this._request instanceof MqttContext + ) { + if (!this.body) { + throw new Error( + "Body is not available. Ensure to call _streamBody() before accessing the body.", + ); + } + + return this.body; + } else { + throw new Error("Data body is only available for HTTP and MQTT requests"); + } + } + + getHttpRequest(): Request { + if (!(this._request instanceof HttpContext)) { + throw new Error( + "Only HTTP requests can be converted to a Request object", + ); + } + + const urlScheme = "https"; + const urlHost = this.header("host") ?? "localhost"; + const urlPath = this._request.request.path; + const urlQuery = this._request.request.getSearchParams(); + + // This is cursed, but is what it is. + const body = + this._request.request.method === "GET" || + this._request.request.method === "HEAD" + ? undefined + : new Uint8Array(this.body ?? Buffer.alloc(0)); + + const headers = new Headers(); + for (const { name, value } of this._request.request.getHeaders()) { + headers.append(name, value); + } + + return new Request(`${urlScheme}://${urlHost}${urlPath}?${urlQuery}`, { + headers: headers, + method: this.method(), + body: body, + redirect: "manual", + }); + } +} diff --git a/packages/runner-node-entrypoint/src/context/legacy-response.ts b/packages/runner-node-entrypoint/src/context/legacy-response.ts new file mode 100644 index 0000000..0f9ead6 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/legacy-response.ts @@ -0,0 +1,142 @@ +import { once } from "events"; +import { HttpContext } from "./http.js"; +import { MqttContext } from "./mqtt.js"; +import { ScheduleContext } from "./schedule.js"; + +export class LegacyContextResponse { + constructor(private _context: MqttContext | HttpContext | ScheduleContext) {} + + private _status?: number; + private _headers?: Record; + private chunks = [] as Buffer[]; + private publishQueue = [] as Array<{ topic: string; body: Buffer }>; + + public async _externalProcess() { + if (this._context instanceof HttpContext) { + if (this._status) { + this._context.response.status(this._status); + } + + if (this._headers) { + for (const [name, value] of Object.entries(this._headers)) { + this._context.response.header(name, value); + } + } + + const writeStream = this._context.response.getWriteStream(); + for (const chunk of this.chunks) { + writeStream.write(chunk); + } + writeStream.end(); + + await once(writeStream, "finish"); + } + + if (this._context instanceof MqttContext) { + for (const { topic, body } of this.publishQueue) { + await this._context.publish(topic, body.toString()); + } + } + + if (this._context instanceof ScheduleContext) { + // No-op, Schedule responses are handled immediately in the json/text methods. + } + } + + header(name: string, value: string) { + if (this._context instanceof HttpContext) { + if (!this._headers) { + this._headers = {}; + } + + this._headers[name.toLowerCase()] = value; + + return this; + } else { + throw new Error("Headers are only available for HTTP responses"); + } + } + + status(status: number) { + if (this._context instanceof HttpContext) { + this._status = status; + + return this; + } else { + throw new Error("Status is only available for HTTP responses"); + } + } + + redirect(path: string, status = 303) { + if (this._context instanceof HttpContext) { + this._status = status; + this.header("Location", path); + + return this; + } else { + throw new Error("Redirect is only available for HTTP responses"); + } + } + + json(data: any, status = 200) { + if (this._context instanceof HttpContext) { + if (this.chunks.length > 0) { + this.chunks.splice(0, this.chunks.length); + } + + this._status = status; + this.header("Content-Type", "application/json"); + this.chunks.push(Buffer.from(JSON.stringify(data))); + + return this; + } else { + throw new Error("JSON responses are only available for HTTP responses"); + } + } + + text(data: string, status = 200) { + if (this._context instanceof HttpContext) { + if (this.chunks.length > 0) { + this.chunks.splice(0, this.chunks.length); + } + + this._status = status; + this.chunks.push(Buffer.from(data)); + + return this; + } else { + throw new Error("Text responses are only available for HTTP responses"); + } + } + + chunk(data: Buffer) { + if (this._context instanceof HttpContext) { + this.chunks.push(data); + + return this; + } else { + throw new Error( + "Chunked responses are only available for HTTP responses", + ); + } + } + + // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. + publish(topic: string, body: string | Buffer | any) { + if (this._context instanceof MqttContext) { + const bodyBuffer = + body instanceof Buffer + ? body + : typeof body === "string" + ? Buffer.from(body) + : Buffer.from(JSON.stringify(body)); + + this.publishQueue.push({ + topic, + body: bodyBuffer, + }); + } else { + throw new Error("Publish is only available for MQTT responses"); + } + } +} diff --git a/packages/runner-node-entrypoint/src/context/mqtt.ts b/packages/runner-node-entrypoint/src/context/mqtt.ts new file mode 100644 index 0000000..f5b5747 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/mqtt.ts @@ -0,0 +1,75 @@ +import { + EventMqttRequest, + EventMqttResponse, + EventMqttResponse_Status, +} from "@jobber/grpc/runner.js"; +import { Runner } from "~/runner.js"; + +export class MqttContext { + constructor( + private runner: Runner, + private request: EventMqttRequest, + ) {} + + public get name() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + return this.request.context.triggerName; + } + + public get topic() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + return this.request.topic; + } + + public get payload() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + return this.request.payload; + } + + public get text() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + return this.request.payload.toString(); + } + + public get json() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + try { + return JSON.parse(this.request.payload.toString()); + } catch (err) { + throw new Error("Failed to parse MQTT payload as JSON"); + } + } + + public async publish(topic: string, payload: string) { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + await this.runner.client.methods.publishMqttMessage({ + jobId: this.runner.jobId, + topic, + payload, + }); + } + + public _createResponse(): EventMqttResponse { + return { + status: EventMqttResponse_Status.ACCEPTED, + }; + } +} diff --git a/packages/runner-node-entrypoint/src/context/schedule.ts b/packages/runner-node-entrypoint/src/context/schedule.ts new file mode 100644 index 0000000..8cf10a3 --- /dev/null +++ b/packages/runner-node-entrypoint/src/context/schedule.ts @@ -0,0 +1,39 @@ +import { + EventMqttResponse, + EventMqttResponse_Status, + EventScheduleRequest, +} from "@jobber/grpc/runner.js"; +import { Runner } from "~/runner.js"; + +export class ScheduleContext { + constructor( + private runner: Runner, + private request: EventScheduleRequest, + ) {} + + public get name() { + if (!this.request.context) { + throw new Error("MqttContext is missing context"); + } + + return this.request.context.triggerName; + } + + public get scheduledAt() { + return this.request.scheduledAt; + } + + public async publish(topic: string, payload: string) { + await this.runner.client.methods.publishMqttMessage({ + jobId: this.runner.jobId, + topic, + payload, + }); + } + + public _createResponse(): EventMqttResponse { + return { + status: EventMqttResponse_Status.ACCEPTED, + }; + } +} diff --git a/packages/runner-node-entrypoint/src/index.ts b/packages/runner-node-entrypoint/src/index.ts index 3ceee2b..ca2e245 100644 --- a/packages/runner-node-entrypoint/src/index.ts +++ b/packages/runner-node-entrypoint/src/index.ts @@ -1,49 +1,45 @@ -import { randomBytes } from "crypto"; -import { getArgument } from "./util.js"; +import { getOptions } from "./options.js"; import { Runner } from "./runner.js"; -import assert from "assert"; const main = async () => { - const jobRunnerIdentifier = getArgument("job-runner-identifier"); - const jobControllerHost = getArgument("job-controller-host"); - const jobControllerPort = Number(getArgument("job-controller-port")); - const jobDebug = getArgument("job-debug") === "true"; - - assert(jobRunnerIdentifier); - assert(jobControllerHost); - assert(jobControllerPort); - - if (jobDebug) { - console.log("[main] Starting job runner with the following configuration:"); - console.log(` Job Runner Identifier: ${jobRunnerIdentifier}`); - console.log(` Job Controller Host: ${jobControllerHost}`); - console.log(` Job Controller Port: ${jobControllerPort}`); - console.log(` Job Debug Mode: ${jobDebug ? "Enabled" : "Disabled"}`); + const options = getOptions(); + + if (options.runnerDebug) { + console.log("[main] Starting runner with the following configuration:"); + console.log(` Runner Identifier: ${options.runnerId}`); + console.log(` Runner Client ID: ${options.runnerClientId}`); + console.log( + ` Runner Client Secret: ${"*".repeat(options.runnerClientSecret.length)}`, + ); + console.log(` Runner General API: ${options.runnerGeneralApiEndpoint}`); + console.log( + ` Runner Debug Mode: ${options.runnerDebug ? "Enabled" : "Disabled"}`, + ); } - const jobber = new Runner( - jobControllerHost, - jobControllerPort, - jobRunnerIdentifier, - jobDebug - ); + const runner = new Runner(options); - await jobber.connect(); + await runner.start(); - const shutdownRoutine = async () => { - if (jobDebug) { - console.log("[main/shutdownRoutine] Shutdown signal received"); + const shutdown = async () => { + if (options.runnerDebug) { + console.info("Shutdown procedure started..."); } - await jobber.onFrameShutdown(randomBytes(16).toString("hex")); + await runner.stop(); + + if (options.runnerDebug) { + console.info("Shutdown procedure completed"); + } + + process.exit(0); }; process.once("SIGTERM", async () => { - await shutdownRoutine(); + await shutdown(); }); - process.once("SIGINT", async () => { - await shutdownRoutine(); + await shutdown(); }); }; diff --git a/packages/runner-node-entrypoint/src/options.ts b/packages/runner-node-entrypoint/src/options.ts new file mode 100644 index 0000000..9cd7397 --- /dev/null +++ b/packages/runner-node-entrypoint/src/options.ts @@ -0,0 +1,124 @@ +export type RunnerOptions = { + runnerId: string; + runnerClientId: string; + runnerClientSecret: string; + runnerGeneralApiEndpoint: string; + + runnerOAuthTokenEndpoint: string; + runnerOAuthJwksEndpoint: string; + runnerOAuthIssuer: string; + + runnerApiPort: number; + + runnerDebug: boolean; +}; + +export function getArgument(name: string) { + const arg = process.argv.find((arg) => arg.startsWith(`--${name}=`)); + + if (!arg) { + return null; + } + + return arg.split("=", 2)[1]; +} + +function getOptionsFromArgs(): RunnerOptions | null { + const runnerId = getArgument("runner-id"); + const runnerClientId = getArgument("client-id"); + const runnerClientSecret = getArgument("client-secret"); + const runnerGeneralApiEndpoint = getArgument("general-api-endpoint"); + const runnerOAuthTokenEndpoint = getArgument("oauth-token-endpoint"); + const runnerOAuthJwksEndpoint = getArgument("oauth-jwks-endpoint"); + const runnerOAuthIssuer = getArgument("oauth-issuer"); + const runnerApiPort = Number(getArgument("port")); + + const runnerDebug = getArgument("debug"); + + if ( + runnerId === null || + runnerClientId === null || + runnerClientSecret === null || + runnerGeneralApiEndpoint === null || + runnerOAuthTokenEndpoint === null || + runnerOAuthJwksEndpoint === null || + runnerOAuthIssuer === null || + isNaN(runnerApiPort) || + runnerDebug === null + ) { + return null; + } + + const runnerDebugParsed = runnerDebug + ? ["true", "yes", "ok", "y"].includes(runnerDebug.toLowerCase()) + : false; + + return { + runnerId, + runnerClientId, + runnerClientSecret, + runnerGeneralApiEndpoint, + runnerOAuthTokenEndpoint, + runnerOAuthJwksEndpoint, + runnerOAuthIssuer, + runnerApiPort, + runnerDebug: runnerDebugParsed, + }; +} + +function getOptionsFromEnv(): RunnerOptions | null { + const runnerId = process.env.RUNNER_ID; + const runnerClientId = process.env.RUNNER_CLIENT_ID; + const runnerClientSecret = process.env.RUNNER_CLIENT_SECRET; + const runnerGeneralApiEndpoint = process.env.RUNNER_GENERAL_API_ENDPOINT; + const runnerOAuthTokenEndpoint = process.env.RUNNER_OAUTH_TOKEN_ENDPOINT; + const runnerOAuthJwksEndpoint = process.env.RUNNER_OAUTH_JWKS_ENDPOINT; + const runnerOAuthIssuer = process.env.RUNNER_OAUTH_ISSUER; + const runnerApiPort = Number(process.env.RUNNER_API_PORT); + const runnerDebug = process.env.RUNNER_DEBUG; + + if ( + !runnerId || + !runnerClientId || + !runnerClientSecret || + !runnerGeneralApiEndpoint || + !runnerOAuthTokenEndpoint || + !runnerOAuthJwksEndpoint || + !runnerOAuthIssuer || + isNaN(runnerApiPort) + ) { + return null; + } + + const runnerDebugParsed = runnerDebug + ? ["true", "yes", "ok", "y"].includes(runnerDebug.toLowerCase()) + : false; + + return { + runnerId, + runnerClientId, + runnerClientSecret, + runnerGeneralApiEndpoint, + runnerOAuthTokenEndpoint, + runnerOAuthJwksEndpoint, + runnerOAuthIssuer, + runnerApiPort, + runnerDebug: runnerDebugParsed, + }; +} + +export function getOptions(): RunnerOptions { + const fromArgs = getOptionsFromArgs(); + if (fromArgs) { + return fromArgs; + } + + const fromEnv = getOptionsFromEnv(); + if (fromEnv) { + return fromEnv; + } + + throw new Error( + "Failed to get options from arguments or environment variables. Please provide the necessary configuration.", + ); +} diff --git a/packages/runner-node-entrypoint/src/request.ts b/packages/runner-node-entrypoint/src/request.ts deleted file mode 100644 index 5d8e96e..0000000 --- a/packages/runner-node-entrypoint/src/request.ts +++ /dev/null @@ -1,192 +0,0 @@ -import assert from "assert"; - -type JobberHandlerRequestData = { - type: "http" | "schedule" | "mqtt"; - name?: string; - headers: Record; - query: Record; - queries: Record; - path: string; - method: string; - topic: string; - body: string; - bodyLength: number; -}; - -export class JobberHandlerRequest { - private _type: JobberHandlerRequestData["type"]; - private _name: JobberHandlerRequestData["name"]; - private _headers: JobberHandlerRequestData["headers"] = {}; - private _query: JobberHandlerRequestData["query"] = {}; - private _queries: JobberHandlerRequestData["queries"] = {}; - private _path: JobberHandlerRequestData["path"] = ""; - private _method: JobberHandlerRequestData["method"] = ""; - private _topic: JobberHandlerRequestData["topic"] = ""; - private _body?: Buffer; - private _bodyLength?: number; - - constructor(data: JobberHandlerRequestData) { - this._type = data.type; - this._name = data.name; - - if (this._type === "http") { - this._headers = data.headers; - } - - if (this._type === "http") { - this._query = data.query; - } - - if (this._type === "http") { - this._queries = data.queries; - } - - if (this._type === "http") { - this._path = data.path; - } - - if (this._type === "http") { - this._method = data.method; - } - - if (this._type === "mqtt") { - this._topic = data.topic; - } - - if (this._type === "http" || this._type == "mqtt") { - this._body = data.body - ? Buffer.from(data.body, "base64") - : Buffer.alloc(0); - } - - if (this._type === "http" || this._type == "mqtt") { - this._bodyLength = data.bodyLength; - } - } - - type() { - return this._type; - } - - name() { - return this._name ?? null; - } - - header(name: string) { - if (this._type !== "http") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - const key = name.toLowerCase(); - - if (this._headers[key]) { - return this._headers[key]; - } - - return null; - } - - query(name: string) { - if (this._type !== "http") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - const key = name.toLowerCase(); - - if (this._query[key]) { - return this._query[key]; - } - - return null; - } - - queries(name: string) { - if (this._type !== "http") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - const key = name.toLowerCase(); - - if (this._queries[key]) { - return this._queries[key]; - } - - return null; - } - - method() { - if (this._type !== "http") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - return this._method; - } - - path() { - if (this._type !== "http") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - return this._path; - } - - topic() { - if (this._type !== "mqtt") { - throw new Error("[JobberHandlerRequest/header] Expecting type of mqtt"); - } - - return this._topic; - } - - json(): T { - if (this._type !== "http" && this._type !== "mqtt") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - assert(this._body); - - return JSON.parse(this._body.toString()) as T; - } - - text() { - if (this._type !== "http" && this._type !== "mqtt") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - assert(this._body); - - return this._body.toString(); - } - - data() { - if (this._type !== "http" && this._type !== "mqtt") { - throw new Error("[JobberHandlerRequest/header] Expecting type of http"); - } - - return this._body; - } - - getHttpRequest(): Request { - if (this._type !== "http") { - throw new Error( - "[JobberHandlerRequest/getHttpRequest] Expecting type of http" - ); - } - - const urlScheme = "https"; - const urlHost = this.header("host") ?? "localhost"; - const urlPath = this._path; - const urlQuery = new URLSearchParams(this._query); - const body = - this._method === "GET" || this._method === "HEAD" - ? undefined - : this._body; - - return new Request(`${urlScheme}://${urlHost}${urlPath}?${urlQuery}`, { - headers: this._headers, - method: this._method, - body: body, - redirect: "manual", - }); - } -} diff --git a/packages/runner-node-entrypoint/src/response.ts b/packages/runner-node-entrypoint/src/response.ts deleted file mode 100644 index 8b81f73..0000000 --- a/packages/runner-node-entrypoint/src/response.ts +++ /dev/null @@ -1,190 +0,0 @@ -import assert from "assert"; -import { JobberHandlerRequest } from "./request.js"; - -export class JobberHandlerResponse { - private _request: JobberHandlerRequest; - public _status?: number; - public _headers?: Record; - public _body?: Buffer[]; - - // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. - public _publish?: Array<{ topic: string; body: Buffer }>; - - constructor(request: JobberHandlerRequest) { - this._request = request; - - if (this._request.type() === "http") { - /** - * @type {number} - */ - this._status = 200; - } - - if (this._request.type() === "http") { - /** - * @private - * @type {Record} - */ - this._headers = {}; - } - - if (this._request.type() === "http") { - /** - * @private - * @type {Buffer[]} - */ - this._body = []; - } - - if (this._request.type() === "mqtt") { - /** - * @private - * @type {Array<{topic: string, body: Buffer}>} - */ - this._publish = []; - } - } - - header(name: string, value: string) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(typeof name === "string"); - assert(typeof value === "string"); - assert(this._headers); - - this._headers[name.toLowerCase()] = value; - - return this; - } - - status(status: number) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(typeof status === "number"); - - this._status = status; - - return this; - } - - redirect(path: string, status = 303) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(typeof path === "string"); - assert(typeof status === "number"); - assert(this._headers); - - this._headers["Location"] = path; - - this._status = status; - - return this; - } - - json(data: any, status = 200) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(typeof status === "number"); - assert(this._body); - - this.header("Content-Type", "application/json"); - - const removed = this._body.splice(0, this._body.length).length; - if (removed > 0) { - console.warn( - `[JobberHandlerResponse] json() called, but body was not empty. Cleared ${removed} buffers.` - ); - } - - this._body.push(Buffer.from(JSON.stringify(data))); - - this._status = status; - - return this; - } - - text(data: string, status = 200) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(typeof data === "string"); - assert(typeof status === "number"); - assert(this._body); - - this.header("Content-Type", "text/plain"); - - const removed = this._body.splice(0, this._body.length).length; - if (removed > 0) { - console.warn( - `[JobberHandlerResponse] text() called, but body was not empty. Cleared ${removed} buffers.` - ); - } - - this._body.push(Buffer.from(data)); - - this._status = status; - - return this; - } - - chunk(data: Buffer) { - if (this._request.type() !== "http") { - throw new Error("Expecting request type of http"); - } - - assert(data instanceof Buffer); - assert(this._body); - - this._body.push(data); - - return this; - } - - // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. - publish(topic: string, body: string | Buffer | any) { - if (this._request.type() !== "mqtt") { - throw new Error("Unable to publish to non-mqtt request"); - } - - assert(typeof topic === "string"); - assert(this._publish); - - if (typeof body === "string") { - this._publish.push({ - topic, - body: Buffer.from(body), - }); - - return; - } - - if (body instanceof Buffer) { - this._publish.push({ - topic, - body: Buffer.from(body), - }); - - return; - } - - if (typeof body === "object") { - this._publish.push({ - topic, - body: Buffer.from(JSON.stringify(body)), - }); - - return; - } - - throw new Error("unexpected type of body"); - } -} diff --git a/packages/runner-node-entrypoint/src/runner-client.ts b/packages/runner-node-entrypoint/src/runner-client.ts new file mode 100644 index 0000000..4dfe24e --- /dev/null +++ b/packages/runner-node-entrypoint/src/runner-client.ts @@ -0,0 +1,165 @@ +import { ChannelImplementation } from "@grpc/grpc-js/build/src/channel.js"; +import { LoopBase } from "@jobber/common/loop-base.js"; +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { + ChannelCredentials, + Client, + createChannel, + createClient, + Metadata, +} from "nice-grpc"; +import { Runner } from "./runner.js"; +import { getUnixTimestamp } from "./util.js"; +import { RunnerOptions } from "./options.js"; + +type CachedToken = { + accessToken: string; + tokenType: string; + expiresAt: number; + renewsAt: number; + originalTtl: number; +}; + +export class RunnerClient extends LoopBase { + protected loopDuration = 10_000; + + protected loopStarted = undefined; + protected loopClosing = undefined; + + private cachedToken: CachedToken | null = null; + private cachedMetadata = Metadata(); + + private channel: ChannelImplementation | null = null; + private client: Client | null = null; + + protected async loopIteration() { + await this.checkClient(); + } + + constructor( + private runner: Runner, + private options: RunnerOptions, + ) { + super(); + } + + private async checkClient() { + if (this.cachedToken && getUnixTimestamp() < this.cachedToken.renewsAt) { + return; + } + + try { + if (this.options.runnerDebug) { + console.log(`[RunnerClient/loopIteration] Fetching new OAuth token...`); + } + + const token = await this.createAuth(); + + this.cachedToken = { + accessToken: token.accessToken, + tokenType: token.tokenType, + expiresAt: getUnixTimestamp() + token.expiresIn, + renewsAt: getUnixTimestamp() + Math.floor(token.expiresIn * 0.6), + originalTtl: token.expiresIn, + }; + + this.cachedMetadata.set( + "Authorization", + `${token.tokenType} ${token.accessToken}`, + ); + + if (this.options.runnerDebug) { + console.log( + `[RunnerClient/loopIteration] Obtained new OAuth token, expires in ${token.expiresIn} seconds.`, + ); + } + } catch (err) { + console.error( + `[RunnerClient/loopIteration] Failed to fetch OAuth token:`, + err, + ); + } + } + + private async createAuth() { + const response = await fetch(this.options.runnerOAuthTokenEndpoint, { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: new URLSearchParams({ + grant_type: "client_credentials", + client_id: this.options.runnerClientId, + client_secret: this.options.runnerClientSecret, + }), + }); + + if (!response.ok) { + throw new Error( + `Failed to fetch OAuth token: ${response.status} ${response.statusText}`, + ); + } + + const data = await response.json(); + + const accessToken = data.access_token as string; + const tokenType = data.token_type as string; + const expiresIn = data.expires_in as number; + + if ( + !accessToken || + !tokenType || + !expiresIn || + typeof accessToken !== "string" || + typeof tokenType !== "string" || + typeof expiresIn !== "number" + ) { + throw new Error(`Invalid OAuth token response: ${JSON.stringify(data)}`); + } + + return { + accessToken, + tokenType, + expiresIn, + }; + } + + protected async loopStarting() { + await this.checkClient(); + + this.channel = createChannel( + this.options.runnerGeneralApiEndpoint, + ChannelCredentials.createInsecure(), + { + "grpc.keepalive_permit_without_calls": 1, + }, + ); + + this.client = createClient(GeneralAPIDefinition, this.channel, { + "*": { + metadata: this.cachedMetadata, + }, + }); + } + + protected async loopClosed() { + if (this.channel) { + this.channel.close(); + this.channel = null; + } + + this.client = null; + + this.cachedToken = null; + + this.cachedMetadata = Metadata(); + } + + get methods() { + if (!this.client) { + throw new Error("[RunnerClient/methods] Client not initialized yet"); + } + + return this.client; + } +} diff --git a/packages/runner-node-entrypoint/src/runner-server.ts b/packages/runner-node-entrypoint/src/runner-server.ts new file mode 100644 index 0000000..97c7a92 --- /dev/null +++ b/packages/runner-node-entrypoint/src/runner-server.ts @@ -0,0 +1,318 @@ +import { BouncerBase } from "@jobber/common/bouncer-base.js"; +import { deferred } from "@jobber/common/deferred.js"; +import { JobberPermissionsSchema } from "@jobber/common/permissions.js"; +import { RunnerAPIDefinition } from "@jobber/grpc/runner.js"; +import { createRemoteJWKSet, jwtVerify } from "jose"; +import { JOSEError } from "jose/errors"; +import { + CallContext, + createServer, + ServerError, + ServiceImplementation, + Status, +} from "nice-grpc"; +import assert from "node:assert"; +import { HttpContext } from "./context/http.js"; +import { LegacyContext } from "./context/legacy-context.js"; +import { LegacyContextRequest } from "./context/legacy-request.js"; +import { LegacyContextResponse } from "./context/legacy-response.js"; +import { MqttContext } from "./context/mqtt.js"; +import { ScheduleContext } from "./context/schedule.js"; +import { Runner } from "./runner.js"; +import { getOAuthAudienceRunnerApi } from "@jobber/common/oauth.js"; +import { RunnerOptions } from "./options.js"; + +export class RunnerServer { + private jwks: ReturnType; + + private server: ReturnType | null = null; + + constructor( + private runner: Runner, + private options: RunnerOptions, + ) { + this.jwks = createRemoteJWKSet( + new URL(this.options.runnerOAuthJwksEndpoint), + { + cacheMaxAge: 5 * 60 * 1000, + }, + ); + } + + private async getBouncer(context: CallContext) { + try { + let token = context.metadata.get("Authorization"); + + if (!token) { + throw new ServerError(Status.UNAUTHENTICATED, "Unauthenticated"); + } + + if (token.startsWith("Bearer ")) { + token = token.slice("Bearer ".length); + } + + const { payload } = await jwtVerify(token, this.jwks, { + issuer: this.options.runnerOAuthIssuer, + audience: getOAuthAudienceRunnerApi(this.options.runnerId), + }); + + const permissions = await JobberPermissionsSchema.parseAsync( + payload.permissions, + ); + + return new BouncerBase(permissions); + } catch (err) { + if (err instanceof ServerError) { + throw err; + } + + if (err instanceof JOSEError) { + console.log("gRPC Unauthorized error:", err); + throw new ServerError(Status.UNAUTHENTICATED, "Unauthenticated"); + } + + console.log("gRPC Internal server error:", err); + throw new ServerError(Status.INTERNAL, "Internal server error"); + } + } + + private getServiceImplementation( + thisWas: this, + ): ServiceImplementation { + return { + status: async (request, context) => { + const bouncer = await thisWas.getBouncer(context); + + if (!bouncer.canReadRunnerStatus({ id: thisWas.runner.jobId })) { + throw new ServerError( + Status.PERMISSION_DENIED, + "Permission denied to read job status", + ); + } + + if (thisWas.runner.status === "starting") { + return { + status: "STARTING", + lastRequestAt: thisWas.runner.telemetry.lastRequestAt, + loadAverage5Second: thisWas.runner.telemetry.loadAverage5Second, + loadAverage60Second: thisWas.runner.telemetry.loadAverage60Second, + }; + } + + if (thisWas.runner.status === "running") { + return { + status: "READY", + lastRequestAt: thisWas.runner.telemetry.lastRequestAt, + loadAverage5Second: thisWas.runner.telemetry.loadAverage5Second, + loadAverage60Second: thisWas.runner.telemetry.loadAverage60Second, + }; + } + + if (thisWas.runner.status === "closing") { + return { + status: "CLOSING", + lastRequestAt: thisWas.runner.telemetry.lastRequestAt, + loadAverage5Second: thisWas.runner.telemetry.loadAverage5Second, + loadAverage60Second: thisWas.runner.telemetry.loadAverage60Second, + }; + } + + if (thisWas.runner.status === "pending") { + return { + status: "CLOSED", + lastRequestAt: thisWas.runner.telemetry.lastRequestAt, + loadAverage5Second: thisWas.runner.telemetry.loadAverage5Second, + loadAverage60Second: thisWas.runner.telemetry.loadAverage60Second, + }; + } + + return { + status: "UNRECOGNIZED", + lastRequestAt: 0, + loadAverage5Second: 0, + loadAverage60Second: 0, + }; + }, + + eventHttp: async function* (request, context) { + const bouncer = await thisWas.getBouncer(context); + + if (!bouncer.canInvokeRunnerHttpEvent({ id: thisWas.runner.jobId })) { + throw new ServerError( + Status.PERMISSION_DENIED, + "Permission denied to invoke HTTP event", + ); + } + + const httpContext = new HttpContext(thisWas.runner, request); + const executionComplete = deferred(); + + // Ensure headers are received before processing event + httpContext.request._startStreamingEvents(); // Do not await + await httpContext.request.receivedHeadersPromise; // Resolves when headers are received. + + thisWas.runner.telemetry.notifyRequest(); + + if (thisWas.runner.module.handlerHttp) { + setImmediate(async () => { + assert( + thisWas?.runner?.module?.handlerHttp, + "HandlerHttp should exist", + ); + + const result = thisWas.runner.module.handlerHttp(httpContext); + + if (result instanceof Promise) { + await result; + } + + await httpContext.response._finished(); + + executionComplete.resolve(); + }); + } else if (thisWas.runner.module.handler) { + const legacyRequest = new LegacyContextRequest(httpContext); + await legacyRequest._externalProcess(); // Legacy method streamed entire body into memory (yuck) + + const legacyResponse = new LegacyContextResponse(httpContext); + const legacyContext = new LegacyContext(thisWas.runner); + + setImmediate(async () => { + assert(thisWas?.runner?.module?.handler, "Handler should exist"); + + const result = thisWas.runner.module.handler( + legacyRequest, + legacyResponse, + legacyContext, + ); + + if (result instanceof Promise) { + await result; + } + + legacyResponse._externalProcess(); + + await httpContext.response._finished(); + + executionComplete.resolve(); + }); + } else { + throw new ServerError( + Status.NOT_FOUND, + "No http handler implemented in runner", + ); + } + + yield* httpContext.response._createResponse(); + + await executionComplete.promise; + + if (thisWas.options.runnerDebug) { + console.log("HTTP event processing complete"); + } + }, + + eventMqtt: async (request, context) => { + const bouncer = await thisWas.getBouncer(context); + + if (!bouncer.canInvokeRunnerMqttEvent({ id: thisWas.runner.jobId })) { + throw new ServerError( + Status.PERMISSION_DENIED, + "Permission denied to invoke MQTT event", + ); + } + + const mqttContext = new MqttContext(thisWas.runner, request); + + thisWas.runner.telemetry.notifyRequest(); + + if (thisWas.runner.module.handlerMqtt) { + const result = thisWas.runner.module.handlerMqtt(mqttContext); + + if (result instanceof Promise) { + await result; + } + } else if (thisWas.runner.module.handler) { + const legacyRequest = new LegacyContextRequest(mqttContext); + const legacyResponse = new LegacyContextResponse(mqttContext); + const legacyContext = new LegacyContext(thisWas.runner); + + const result = thisWas.runner.module.handler( + legacyRequest, + legacyResponse, + legacyContext, + ); + + if (result instanceof Promise) { + await result; + } + } else { + throw new ServerError( + Status.NOT_FOUND, + "No mqtt handler implemented in runner", + ); + } + + return mqttContext._createResponse(); + }, + + eventSchedule: async (request, context) => { + const bouncer = await thisWas.getBouncer(context); + + if ( + !bouncer.canInvokeRunnerScheduleEvent({ id: thisWas.runner.jobId }) + ) { + throw new ServerError( + Status.PERMISSION_DENIED, + "Permission denied to invoke Schedule event", + ); + } + + const scheduleContext = new ScheduleContext(this.runner, request); + + thisWas.runner.telemetry.notifyRequest(); + + if (this.runner.module.handlerSchedule) { + const result = this.runner.module.handlerSchedule(scheduleContext); + + if (result instanceof Promise) { + await result; + } + } else if (this.runner.module.handler) { + const legacyRequest = new LegacyContextRequest(scheduleContext); + const legacyResponse = new LegacyContextResponse(scheduleContext); + const legacyContext = new LegacyContext(this.runner); + + const result = this.runner.module.handler( + legacyRequest, + legacyResponse, + legacyContext, + ); + + if (result instanceof Promise) { + await result; + } + } else { + throw new ServerError( + Status.NOT_FOUND, + "No schedule handler implemented in runner", + ); + } + + return scheduleContext._createResponse(); + }, + }; + } + + public async start() { + this.server = createServer(); + + this.server.add(RunnerAPIDefinition, this.getServiceImplementation(this)); + + await this.server.listen(`0.0.0.0:${this.options.runnerApiPort}`); + } + + public async stop() { + await this.server?.shutdown(); + } +} diff --git a/packages/runner-node-entrypoint/src/runner.ts b/packages/runner-node-entrypoint/src/runner.ts index 9e178a3..80abe82 100644 --- a/packages/runner-node-entrypoint/src/runner.ts +++ b/packages/runner-node-entrypoint/src/runner.ts @@ -1,503 +1,289 @@ -import assert from "assert"; -import { randomBytes } from "crypto"; -import { readFile, writeFile } from "fs/promises"; -import { TcpFrameSocket } from "@jobber/tcp-frame-socket"; -import { getTmpFile, shortenString, timeout, unzip } from "./util.js"; -import { JobberHandlerRequest } from "./request.js"; -import { JobberHandlerResponse } from "./response.js"; -import { JobberHandlerContext } from "./context.js"; - -type FrameJson = { - runnerId: string; - name: string; - traceId: string; - dataType: "buffer" | "json"; -}; - -type StoreItem = { - key: string; - value: string; - expiry: number | null; - created: number; - modified: number; +import * as grpcRunner from "@jobber/grpc/basics/runner.js"; +import * as grpcAction from "@jobber/grpc/basics/action.js"; +import assert from "node:assert"; +import { open, readFile } from "node:fs/promises"; +import path from "node:path"; +import { HttpContext } from "./context/http.js"; +import { MqttContext } from "./context/mqtt.js"; +import { ScheduleContext } from "./context/schedule.js"; +import { RunnerClient } from "./runner-client.js"; +import { RunnerServer } from "./runner-server.js"; +import { fileExists, getTempFilePath, unzip } from "./util.js"; +import { validatePackageJson } from "./validator.js"; +import { Telemetry } from "./telemetry.js"; +import { deferred, Deferred } from "@jobber/common/deferred.js"; +import { RunnerOptions } from "./options.js"; +import { GlobalContext } from "./context/global-context.js"; + +type Status = "pending" | "starting" | "running" | "closing"; + +type RunnerExpectedModule = { + handler?: ( + request: unknown, + response: unknown, + context: unknown, + ) => Promise | unknown; + + bootstrap?: () => Promise | void; + + handlerHttp?: (context: HttpContext) => Promise | unknown; + + handlerSchedule?: (context: ScheduleContext) => Promise | unknown; + + handlerMqtt?: (context: MqttContext) => Promise | unknown; }; export class Runner { - private hostname: string; - private port: number; - private runnerId: string; - private debug: boolean; - - private isShuttingDown: boolean = false; - private handleRequestsProcessing: number = 0; - - private socket: TcpFrameSocket; - - private traceResponses = new Map< - string, - (frame: FrameJson, data: Buffer) => void - >(); - - constructor( - hostname: string, - port: number, - runnerId: string, - debug: boolean - ) { - this.hostname = hostname; - this.port = port; - this.runnerId = runnerId; - this.debug = debug; - - this.socket = new TcpFrameSocket(); - - this.socket.on("frame", (frame) => { - this.onFrame(frame); - }); + private _status: Status = "pending"; - this.socket.on("close", () => { - this.debugLog("[Runner] Received close events!"); - }); - } + private _statusPromise: { + starting: Deferred; + running: Deferred; + closing: Deferred; + pending: Deferred; + }; - async connect() { - await this.socket.connect({ - host: this.hostname, - port: this.port, - }); + protected _server: RunnerServer; - const traceId = `InitTraceId-${randomBytes(16).toString("hex")}`; + protected _client: RunnerClient; - this.traceResponses.set(traceId, async (frame, data) => { - assert(frame.dataType === "buffer"); + protected _telemetry: Telemetry; - const zipFile = getTmpFile({ extension: "zip" }); + private _runnerInfo: grpcRunner.Item | null = null; - await writeFile(zipFile, data); + private _actionInfo: grpcAction.Item | null = null; - await unzip(zipFile, process.cwd()); + private _module: RunnerExpectedModule | null = null; - await this.writeFrame( - { - name: "ready", - runnerId: frame.runnerId, - traceId: `ready-${randomBytes(16).toString("hex")}`, - dataType: "buffer", - }, - Buffer.alloc(0) - ); + constructor(private options: RunnerOptions) { + this._statusPromise = { + starting: deferred(), + running: deferred(), + closing: deferred(), + pending: deferred(), + }; - return; - }); + this._telemetry = new Telemetry(); - await this.writeFrame( - { - name: "init", - traceId: traceId, - runnerId: this.runnerId, - dataType: "buffer", - }, - Buffer.alloc(0) - ); - } - - sendStoreSet( - key: string, - value: string, - option?: { ttl?: number } - ): Promise { - const ttl = option?.ttl ?? null; - - const traceId = `StoreSetTraceId-${randomBytes(24).toString("hex")}`; - - return new Promise((resolve, reject) => { - let finished = false; - - const timeoutHandle = setTimeout(() => { - if (finished) { - return; - } + this._client = new RunnerClient(this, options); - finished = true; - - this.traceResponses.delete(traceId); - - return reject(new Error("Store set request timed out")); - }, 10000); - - this.traceResponses.set(traceId, (frame, data) => { - assert(frame.dataType === "json"); - - if (finished) { - return; - } - - finished = true; - - clearTimeout(timeoutHandle); - - this.traceResponses.delete(traceId); - - const body = JSON.parse(data.toString()) as StoreItem; - - return resolve(body); - }); - - this.writeFrame( - { - name: "store-set", - runnerId: this.runnerId, - dataType: "json", - traceId: traceId, - }, - Buffer.from( - JSON.stringify({ - key, - value, - ttl, - }) - ) - ); - }); + this._server = new RunnerServer(this, options); } - sendStoreGet(key: string): Promise { - const traceId = `StoreGetTraceId-${randomBytes(24).toString("hex")}`; + async start() { + this._status = "starting"; + this._statusPromise.starting.resolve(); - return new Promise((resolve, reject) => { - let finished = false; + await this._client.start(); - const timeoutHandle = setTimeout(() => { - if (finished) { - return; - } + await this._server.start(); - finished = true; + (globalThis as any).jobber = new GlobalContext(this); - this.traceResponses.delete(traceId); + await this.bootstrap(); - return reject(new Error("Store get request timed out")); - }, 10000); - - this.traceResponses.set(traceId, (frame, data) => { - assert(frame.dataType === "json"); - - if (finished) { - return; - } - - finished = true; - - clearTimeout(timeoutHandle); - - this.traceResponses.delete(traceId); - - const body = JSON.parse(data.toString()) as StoreItem; - - return resolve(body); - }); - - this.writeFrame( - { - name: "store-get", - runnerId: this.runnerId, - dataType: "json", - traceId: traceId, - }, - Buffer.from( - JSON.stringify({ - key, - }) - ) - ); - }); + this._status = "running"; + this._statusPromise.running.resolve(); } - sendStoreDelete(key: string): Promise { - const traceId = `StoreDeleteTraceId-${randomBytes(24).toString("hex")}`; - - return new Promise((resolve, reject) => { - let finished = false; - - const timeoutHandle = setTimeout(() => { - if (finished) { - return; - } - - finished = true; - - this.traceResponses.delete(traceId); + async stop() { + this._status = "closing"; + this._statusPromise.closing.resolve(); - return reject(new Error("Store delete request timed out")); - }, 10000); + if (this.options.runnerDebug) { + console.info("Shutting down gRPC server..."); + } - this.traceResponses.set(traceId, (frame, data) => { - assert(frame.dataType === "json"); + await this._server.stop(); - if (finished) { - return; - } - - finished = true; + if (this.options.runnerDebug) { + console.info("gRPC server shut down successfully."); + console.info("Shutting down gRPC client..."); + } - clearTimeout(timeoutHandle); + await this._client.stop(); - this.traceResponses.delete(traceId); + if (this.options.runnerDebug) { + console.info("gRPC client shut down successfully."); + } - const body = JSON.parse(data.toString()) as StoreItem; + delete (globalThis as any).jobber; - return resolve(body); - }); + this._status = "pending"; + this._statusPromise.pending.resolve(); - this.writeFrame( - { - name: "store-delete", - runnerId: this.runnerId, - dataType: "json", - traceId: traceId, - }, - Buffer.from( - JSON.stringify({ - key, - }) - ) - ); - }); + this._statusPromise = { + starting: deferred(), + running: deferred(), + closing: deferred(), + pending: deferred(), + }; } - sendMqttPublish(topic: string, body: Buffer): Promise { - const traceId = `MqttPublishTraceId-${randomBytes(24).toString("hex")}`; - - return new Promise((resolve, reject) => { - let finished = false; - - const timeoutHandle = setTimeout(() => { - if (finished) { - return; - } - - finished = true; - - this.traceResponses.delete(traceId); - - return reject(new Error("MQTT publish request timed out")); - }, 10000); - - this.traceResponses.set(traceId, (frame, data) => { - assert(frame.dataType === "json"); - - if (finished) { - return; - } - - finished = true; - - clearTimeout(timeoutHandle); - - this.traceResponses.delete(traceId); - - const result = JSON.parse(data.toString()) as boolean; + async populateRunnerInfo() { + const runnerResponse = await this._client.methods.getRunner({ + runnerId: this.options.runnerId, + }); - return resolve(result); - }); + if (!runnerResponse || !runnerResponse.runner) { + throw new Error(`Runner with ID ${this.options.runnerId} not found`); + } - this.writeFrame( - { - name: "mqtt-publish", - runnerId: this.runnerId, - dataType: "json", - traceId: traceId, - }, - Buffer.from( - JSON.stringify({ - topic, - body: body.toString("base64"), - }) - ) - ); + const actionResponse = await this._client.methods.getJobAction({ + actionId: runnerResponse.runner.actionId, + jobId: runnerResponse.runner.jobId, }); - } - async writeFrame(frame: FrameJson, data: Buffer) { - const buffer = Buffer.concat([ - Buffer.from(JSON.stringify(frame)), - Buffer.from("\n"), - data, - ]); + if (!actionResponse || !actionResponse.action) { + throw new Error( + `Action with ID ${runnerResponse.runner.actionId} not found`, + ); + } - await this.socket.writeFrame(buffer); + this._runnerInfo = runnerResponse.runner; + this._actionInfo = actionResponse.action; } - async onFrame(buffer: Buffer) { - const separator = buffer.indexOf("\n"); + async downloadArchive() { + assert( + this._runnerInfo, + "Runner info must be populated before bootstrapping", + ); - assert(separator > 0); + const archiveStream = this._client.methods.getJobVersionArchive({ + jobVersionId: this._runnerInfo.versionId, + jobId: this._runnerInfo.jobId, + }); - const chunkJson = buffer.subarray(0, separator); - const bodyBuffer = buffer.subarray(separator + 1); + const archiveFilename = getTempFilePath({ + extension: "zip", + }); - const frame = JSON.parse(chunkJson.toString("utf8")) as FrameJson; + const archiveHandle = await open(archiveFilename, "w"); - if (frame.name === "response") { - const traceResponseCallback = this.traceResponses.get(frame.traceId); + try { + for await (const chunk of archiveStream) { + await archiveHandle.write(chunk.data); - if (!traceResponseCallback) { - return; + if (chunk.end) { + await archiveHandle.close(); + } } - traceResponseCallback(frame, bodyBuffer); + return archiveFilename; + } catch (err) { + await archiveHandle.close(); - return; + throw err; } + } - if (frame.name === "handle") { - if (this.isShuttingDown) { - console.warn( - `[Runner/onFrame] ${frame.name} event received while shutting down` - ); - - return; - } + async bootstrap() { + await this.populateRunnerInfo(); - assert(frame.dataType === "json"); + const archiveFilename = await this.downloadArchive(); - const data = JSON.parse(bodyBuffer.toString()); + await unzip(archiveFilename, process.cwd()); - await this.onFrameHandle(frame, data); + const pathPackageJson = path.join(process.cwd(), "package.json"); - return; + if (!(await fileExists(pathPackageJson))) { + throw new Error("package.json not found in job archive"); } - if (frame.name === "shutdown") { - await this.onFrameShutdown(frame.traceId); + const contentPackageJson = await readFile(pathPackageJson, "utf8"); + const contentPackageJsonParsed = JSON.parse(contentPackageJson); + const contentPackageJsonValidated = validatePackageJson( + contentPackageJsonParsed, + ); - return; + if (!contentPackageJsonValidated.success) { + throw new Error( + `package.json validation failed: ${contentPackageJsonValidated.errors.join( + ", ", + )}`, + ); } - throw new Error(`Unexpected transaction name ${frame.name}`); - } - - async onFrameHandle(frame: FrameJson, data: any) { - const start = performance.now(); - - this.handleRequestsProcessing++; - - this.debugLog( - `[Runner/onFrameHandle] Starting, traceId ${shortenString(frame.traceId)}` + const pathMain = path.join( + process.cwd(), + contentPackageJsonValidated.data.main || "index.js", ); - try { - const packageJson = JSON.parse(await readFile("./package.json", "utf8")); - - if (typeof packageJson.main !== "string") { - throw new Error( - "Failed to load package.json, property 'main' is not present or not a string" - ); - } - - const clientModule = await import(packageJson.main); - - const jobberRequest = new JobberHandlerRequest(data); - const jobberResponse = new JobberHandlerResponse(jobberRequest); - const jobberContext = new JobberHandlerContext( - this, - jobberRequest, - jobberResponse + const module = (await import(pathMain)) as RunnerExpectedModule; + + // Validate it has at least one handler + if ( + typeof module.handler !== "function" && + typeof module.handlerHttp !== "function" && + typeof module.handlerSchedule !== "function" && + typeof module.handlerMqtt !== "function" + ) { + throw new Error( + "No handler function found. Please export a handler, handlerHttp, handlerSchedule, or handlerMqtt function.", ); + } - await clientModule.handler(jobberRequest, jobberResponse, jobberContext); - - const responseData: any = { - success: true, - duration: performance.now() - start, - }; - - if (jobberRequest.type() === "http") { - assert(jobberResponse._body); + if (typeof module.bootstrap === "function") { + const bootstrapResult = module.bootstrap(); - responseData.http = { - status: jobberResponse._status, - headers: jobberResponse._headers, - body: Buffer.concat(jobberResponse._body).toString("base64"), - }; + if (bootstrapResult instanceof Promise) { + await bootstrapResult; } + } - if (jobberRequest.type() === "mqtt") { - assert(jobberResponse._publish); + this._module = module; + } - // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. - await Promise.all( - jobberResponse._publish.map(async (pub) => { - console.warn(`@deprecated publish ${pub.topic}`); - await this.sendMqttPublish(pub.topic, pub.body); - }) - ); - } + public get status() { + return this._status; + } - await this.writeFrame( - { - name: "response", - runnerId: this.runnerId, - traceId: frame.traceId, - dataType: "json", - }, - Buffer.from(JSON.stringify(responseData)) - ); + public get server() { + return this._server; + } - this.debugLog( - "[Runner/onFrameHandle] Delivered response, traceId", - shortenString(frame.traceId) - ); - } catch (err) { - if (!(err instanceof Error)) { - console.error(err); - return; - } + public get client() { + return this._client; + } - this.debugLog( - "[Runner/onFrameHandle] Failed due to error, traceId", - shortenString(frame.traceId) - ); + public get telemetry() { + return this._telemetry; + } - console.error(err); - - await this.writeFrame( - { - name: "response", - runnerId: this.runnerId, - traceId: frame.traceId, - dataType: "json", - }, - Buffer.from( - JSON.stringify({ - success: false, - duration: performance.now() - start, - error: err.toString(), - }) - ) - ); - } finally { - this.handleRequestsProcessing--; + public get module() { + if (!this._module) { + throw new Error("Module not loaded yet"); } + + return this._module; } - async onFrameShutdown(traceId: string) { - this.debugLog("[Runner/onFrameShutdown] Starting shutdown routine"); + public get jobId() { + if (!this._runnerInfo) { + throw new Error("Runner info not loaded yet"); + } - this.isShuttingDown = true; + return this._runnerInfo.jobId; + } - while (this.handleRequestsProcessing > 0) { - await timeout(100); + public get runnerInfo() { + if (!this._runnerInfo) { + throw new Error("Runner info not loaded yet"); } - this.socket.end(() => { - process.exit(); - }); + return this._runnerInfo; } - private debugLog(...args: any[]) { - if (this.debug) { - console.log("[Runner]", ...args); + public get actionInfo() { + if (!this._actionInfo) { + throw new Error("Action info not loaded yet"); } + + return this._actionInfo; + } + + public get statusPromises() { + return this._statusPromise; } } diff --git a/packages/runner-node-entrypoint/src/telemetry.ts b/packages/runner-node-entrypoint/src/telemetry.ts new file mode 100644 index 0000000..9072ffe --- /dev/null +++ b/packages/runner-node-entrypoint/src/telemetry.ts @@ -0,0 +1,50 @@ +import { getUnixTimestamp } from "./util.js"; + +export class Telemetry { + private _lastRequestAt = 0; + + private _loadAverageBuckets = new Map(); + + public notifyRequest() { + this._lastRequestAt = getUnixTimestamp(); + + const now = getUnixTimestamp(); + + const bucket5Second = (Math.floor(now / 1) * 1).toString(); + const bucket60Second = (Math.floor(now / 60) * 60).toString(); + + this._loadAverageBuckets.set( + bucket5Second, + (this._loadAverageBuckets.get(bucket5Second) || 0) + 1, + ); + + this._loadAverageBuckets.set( + bucket60Second, + (this._loadAverageBuckets.get(bucket60Second) || 0) + 1, + ); + + for (const [key] of this._loadAverageBuckets) { + if (parseInt(key) < now - 300) { + this._loadAverageBuckets.delete(key); + } + } + } + + public get loadAverage5Second() { + const now = getUnixTimestamp(); + const bucket5Second = (Math.floor(now / 1) * 1).toString(); + + return this._loadAverageBuckets.get(bucket5Second) || 0; + } + + public get loadAverage60Second() { + const now = getUnixTimestamp(); + const bucket60Second = (Math.floor(now / 60) * 60).toString(); + + return this._loadAverageBuckets.get(bucket60Second) || 0; + } + + public get lastRequestAt() { + return this._lastRequestAt; + } +} diff --git a/packages/runner-node-entrypoint/src/util.ts b/packages/runner-node-entrypoint/src/util.ts index df86a0f..7174965 100644 --- a/packages/runner-node-entrypoint/src/util.ts +++ b/packages/runner-node-entrypoint/src/util.ts @@ -1,42 +1,63 @@ -import { spawn } from "child_process"; -import { randomBytes } from "crypto"; -import { tmpdir } from "os"; -import path from "path"; - -export const getArgument = (name: string) => { - const index = process.argv.indexOf(`--${name}`); - - if (index < 0) { - return null; +import { spawn } from "node:child_process"; +import { randomBytes } from "node:crypto"; +import { stat } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import path from "node:path"; + +export function sleep(ms: number) { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +export function getUnixTimestamp() { + return Math.floor(Date.now() / 1000); +} + +export function getTempFilePath({ + prefix = "jobber", + extension = ".tmp", + length = 16, +} = {}) { + let filename = ""; + + if (prefix) { + filename += prefix; + filename += "-"; } - if (typeof process.argv[index + 1] === "undefined") { - return null; - } + filename += randomBytes(length).toString("hex"); - return process.argv[index + 1]; -}; - -export const timeout = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); + if (extension) { + filename += "."; + filename += extension; + } -export const getUnixTimestamp = () => Math.round(Date.now() / 1000); + return path.join(tmpdir(), filename); +} + +export async function fileExists(path: string) { + try { + await stat(path); + return true; + } catch (err) { + if (err instanceof Error) { + if ( + "code" in err && + typeof err.code === "string" && + err.code === "ENOENT" + ) { + return false; + } + } -export const shortenString = (input: string, maxLength = 20) => { - if (input.length > maxLength) { - return `${input.substring(0, maxLength - 5)}...${input.substring( - input.length - 5 - )}`; + throw err; } +} - return input; -}; - -export const unzip = ( +export function unzip( source: string, destination: string, - timeout: number = 60 -) => { + timeout: number = 60, +) { return new Promise((resolve, reject) => { const logs: string[] = []; @@ -61,7 +82,7 @@ export const unzip = ( ], { stdio: "pipe", - } + }, ); proc.stderr.on("data", (data) => logs.push(data.toString())); @@ -99,14 +120,4 @@ export const unzip = ( throw new Error(`[unzip] Failed with exit code ${code}`); }); }); -}; - -export const getTmpFile = ({ extension = "", length = 16 }) => { - let filename = randomBytes(length).toString("hex"); - - if (extension) { - filename += `.${extension}`; - } - - return path.join(tmpdir(), filename); -}; +} diff --git a/packages/runner-node-entrypoint/src/validator.ts b/packages/runner-node-entrypoint/src/validator.ts new file mode 100644 index 0000000..70c3645 --- /dev/null +++ b/packages/runner-node-entrypoint/src/validator.ts @@ -0,0 +1,61 @@ +// Would be great to use zod, but its another dependency that should be avoided. + +type PackageJson = { + name: string; + version: string; + main: string; +}; + +type ValidationResult = + | { + success: true; + data: T; + } + | { + success: false; + errors: string[]; + }; + +export function validatePackageJson( + data: unknown, +): ValidationResult { + const errors: string[] = []; + const partial = {} as Partial; + + if (typeof data !== "object" || data === null) { + return { + success: false, + errors: ["package.json must be an object"], + }; + } + + if (!("name" in data) || typeof data.name !== "string") { + errors.push('property "name" must be a string'); + } else { + partial.name = data.name; + } + + if (!("version" in data) || typeof data.version !== "string") { + errors.push('property "version" must be a string'); + } else { + partial.version = data.version; + } + + if (!("main" in data) || typeof data.main !== "string") { + errors.push('property "main" must be a string if it exists'); + } else { + partial.main = data.main; + } + + if (errors.length > 0) { + return { + success: false, + errors, + }; + } + + return { + success: true, + data: partial as PackageJson, + }; +} diff --git a/packages/runner-node-entrypoint/tsconfig.json b/packages/runner-node-entrypoint/tsconfig.json index 5cfa7ab..830ba05 100644 --- a/packages/runner-node-entrypoint/tsconfig.json +++ b/packages/runner-node-entrypoint/tsconfig.json @@ -1,5 +1,6 @@ { "compilerOptions": { + "experimentalDecorators": true, "inlineSourceMap": true, "target": "ES2022", "module": "NodeNext", @@ -10,13 +11,14 @@ "forceConsistentCasingInFileNames": true, "declaration": true, "types": ["node"], + "rootDir": "./src", "outDir": "./dist", "paths":{ "~/*": ["./src/*"] } }, "include": [ - "./src" + "./src/**/*" ], "$schema": "https://json.schemastore.org/tsconfig", "display": "Recommended" diff --git a/packages/runner-node-entrypoint/tsdown.config.ts b/packages/runner-node-entrypoint/tsdown.config.ts new file mode 100644 index 0000000..7300e5d --- /dev/null +++ b/packages/runner-node-entrypoint/tsdown.config.ts @@ -0,0 +1,60 @@ +import { defineConfig } from "tsdown"; + +const parseName = (path: string) => { + const parts = path.split("/"); + + const lastNodeModule = parts.lastIndexOf("node_modules"); + + parts.splice(0, lastNodeModule + 1); + + return parts; +}; + +export default defineConfig({ + entry: "src/index.ts", + tsconfig: "tsconfig.json", + noExternal: /.*/, + treeshake: true, + format: { + cjs: { + target: ["node16"], + dts: false, + outDir: "dist/cjs", + }, + esm: { + target: ["node16"], + dts: false, + outDir: "dist/esm", + }, + }, + outputOptions: { + legalComments: "none", + sourcemap: "hidden", + entryFileNames: "jobber-start.js", + + chunkFileNames: (chunk) => { + return "jobber-modules/[name].js"; + }, + + codeSplitting: { + includeDependenciesRecursively: true, + groups: [ + { + name: (item) => { + const parts = parseName(item); + const firstPart = parts.at(0); + + if (firstPart?.startsWith("@jobber")) { + return `${firstPart}-${parts.at(1)}`; + } + + return `${firstPart}`; + }, + test: /node_modules/, + priority: 100, + }, + ], + }, + exports: "named", + }, +}); diff --git a/packages/runner-node-entrypoint/tsup.config.ts b/packages/runner-node-entrypoint/tsup.config.ts deleted file mode 100644 index c0c4ae1..0000000 --- a/packages/runner-node-entrypoint/tsup.config.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/index.ts"], - splitting: false, - sourcemap: false, - clean: true, - noExternal: ["@jobber/tcp-frame-socket"], - target: "es2020", - platform: "node", - format: "esm", -}); diff --git a/packages/server/.env.example b/packages/server/.env.example index 3159cf2..b2394f7 100644 --- a/packages/server/.env.example +++ b/packages/server/.env.example @@ -1,6 +1,6 @@ # Postgres Config DATABASE_URL="postgresql://jobber-username:jobber-password@127.0.0.1:8640/jobber-database" -DATABASE_BACKUP_SCHEDULE="0 * * * *" +DATABASE_BACKUP_SCHEDULE="0 0 0 0 0" DATABASE_BACKUP_RETENTION_COUNT="12" # Name of your Jobber instance. Must be unique per deployment @@ -15,7 +15,7 @@ DEBUG_RUNNER="true" # Specifies how the runners should connect to the manager MANAGER_PORT="5211" -MANAGER_HOST="" # Change to your local network IP address so the docker images can reach-out. e.g. 192.168.1.100 +MANAGER_GRPC_HOST="192.168.10.200" # Change to your local network IP address so the docker images can reach-out. e.g. 192.168.1.100 RUNNER_IMAGE_NODE24_URL="jobber-runner:24-latest" RUNNER_IMAGE_NODE22_URL="jobber-runner:22-latest" diff --git a/packages/server/drizzle.config.ts b/packages/server/drizzle.config.ts index 6e44611..73811b2 100644 --- a/packages/server/drizzle.config.ts +++ b/packages/server/drizzle.config.ts @@ -2,7 +2,7 @@ import { defineConfig } from "drizzle-kit"; export default defineConfig({ out: "./drizzle", - schema: "./dist/db/schema", + schema: "./dist/db/schema.js", dialect: "postgresql", dbCredentials: { url: process.env.DATABASE_URL!, diff --git a/packages/server/drizzle/0010_milky_microbe.sql b/packages/server/drizzle/0010_milky_microbe.sql new file mode 100644 index 0000000..5f9947e --- /dev/null +++ b/packages/server/drizzle/0010_milky_microbe.sql @@ -0,0 +1,40 @@ +CREATE TABLE "auditLog" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "subject" jsonb NOT NULL, + "entry" jsonb NOT NULL, + "created" timestamp DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "oauthServiceClient" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "is_system_managed" boolean DEFAULT false NOT NULL, + "name" varchar(255) NOT NULL, + "description" text, + "clientId" varchar(255) NOT NULL, + "metadata" jsonb NOT NULL, + "allowedAudiences" jsonb NOT NULL, + "allowedScopes" jsonb NOT NULL, + "enabled" boolean DEFAULT true NOT NULL, + "expiresAt" timestamp, + "createdAt" timestamp DEFAULT now() NOT NULL +); +--> statement-breakpoint +CREATE TABLE "oauthSigningKey" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "parent_id" uuid, + "child_id" uuid, + "created_by_user_id" uuid, + "status" varchar(255) NOT NULL, + "alg" varchar(255) NOT NULL, + "use" varchar(255) NOT NULL, + "private_key_encrypted" text NOT NULL, + "public_key" text NOT NULL, + "expiresAt" timestamp, + "renewsAt" timestamp, + "createdAt" timestamp DEFAULT now() NOT NULL +); +--> statement-breakpoint +ALTER TABLE "actions" ALTER COLUMN "runnerImage" SET DEFAULT 'node24';--> statement-breakpoint +ALTER TABLE "oauthSigningKey" ADD CONSTRAINT "oauthSigningKey_parent_id_oauthSigningKey_id_fk" FOREIGN KEY ("parent_id") REFERENCES "public"."oauthSigningKey"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "oauthSigningKey" ADD CONSTRAINT "oauthSigningKey_child_id_oauthSigningKey_id_fk" FOREIGN KEY ("child_id") REFERENCES "public"."oauthSigningKey"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "oauthSigningKey" ADD CONSTRAINT "oauthSigningKey_created_by_user_id_users_id_fk" FOREIGN KEY ("created_by_user_id") REFERENCES "public"."users"("id") ON DELETE set null ON UPDATE no action; \ No newline at end of file diff --git a/packages/server/drizzle/0011_military_malice.sql b/packages/server/drizzle/0011_military_malice.sql new file mode 100644 index 0000000..b486b50 --- /dev/null +++ b/packages/server/drizzle/0011_military_malice.sql @@ -0,0 +1 @@ +ALTER TABLE "oauthServiceClient" ADD COLUMN "permissions" jsonb NOT NULL DEFAULT '[]'::jsonb; \ No newline at end of file diff --git a/packages/server/drizzle/0012_nifty_plazm.sql b/packages/server/drizzle/0012_nifty_plazm.sql new file mode 100644 index 0000000..1d834e2 --- /dev/null +++ b/packages/server/drizzle/0012_nifty_plazm.sql @@ -0,0 +1,20 @@ +CREATE TABLE "runners" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "jobId" uuid NOT NULL, + "jobVersionId" uuid NOT NULL, + "actionId" uuid NOT NULL, + "environmentId" uuid, + "oauthServiceClientId" uuid, + "properties" jsonb, + "createdAt" timestamp DEFAULT now() NOT NULL, + "readyAt" timestamp, + "closingAt" timestamp, + "closedAt" timestamp +); +--> statement-breakpoint +ALTER TABLE "runners" ADD CONSTRAINT "runners_jobId_jobs_id_fk" FOREIGN KEY ("jobId") REFERENCES "public"."jobs"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "runners" ADD CONSTRAINT "runners_jobVersionId_job-versions_id_fk" FOREIGN KEY ("jobVersionId") REFERENCES "public"."job-versions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "runners" ADD CONSTRAINT "runners_actionId_actions_id_fk" FOREIGN KEY ("actionId") REFERENCES "public"."actions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "runners" ADD CONSTRAINT "runners_environmentId_environments_id_fk" FOREIGN KEY ("environmentId") REFERENCES "public"."environments"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "runners" ADD CONSTRAINT "runners_oauthServiceClientId_oauthServiceClient_id_fk" FOREIGN KEY ("oauthServiceClientId") REFERENCES "public"."oauthServiceClient"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint +ALTER TABLE "oauthServiceClient" ADD CONSTRAINT "oauthServiceClient_clientId_unique" UNIQUE("clientId"); \ No newline at end of file diff --git a/packages/server/drizzle/0013_thankful_thor.sql b/packages/server/drizzle/0013_thankful_thor.sql new file mode 100644 index 0000000..195d2d9 --- /dev/null +++ b/packages/server/drizzle/0013_thankful_thor.sql @@ -0,0 +1,3 @@ +ALTER TABLE "runners" ADD COLUMN "status" varchar(50); +UPDATE "runners" SET "status" = 'closed' WHERE "status" IS NULL; +ALTER TABLE "runners" ALTER COLUMN "status" SET NOT NULL; \ No newline at end of file diff --git a/packages/server/drizzle/0014_demonic_ronan.sql b/packages/server/drizzle/0014_demonic_ronan.sql new file mode 100644 index 0000000..3977012 --- /dev/null +++ b/packages/server/drizzle/0014_demonic_ronan.sql @@ -0,0 +1 @@ +-- Custom SQL migration file, put your code below! -- \ No newline at end of file diff --git a/packages/server/drizzle/meta/0010_snapshot.json b/packages/server/drizzle/meta/0010_snapshot.json new file mode 100644 index 0000000..5003cfb --- /dev/null +++ b/packages/server/drizzle/meta/0010_snapshot.json @@ -0,0 +1,1114 @@ +{ + "id": "27149207-986a-408b-9532-d90a800afdcc", + "prevId": "367b35f4-8ff6-43e1-a52e-51e1a76978b1", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.actions": { + "name": "actions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "runnerImage": { + "name": "runnerImage", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'node24'" + }, + "runnerAsynchronous": { + "name": "runnerAsynchronous", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "runnerMinCount": { + "name": "runnerMinCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "runnerMaxCount": { + "name": "runnerMaxCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 16 + }, + "runnerTimeout": { + "name": "runnerTimeout", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 60 + }, + "runnerMaxIdleAge": { + "name": "runnerMaxIdleAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "runnerMaxAge": { + "name": "runnerMaxAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 900 + }, + "runnerMaxAgeHard": { + "name": "runnerMaxAgeHard", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 960 + }, + "runnerDockerArguments": { + "name": "runnerDockerArguments", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "runnerMode": { + "name": "runnerMode", + "type": "text", + "primaryKey": false, + "notNull": false, + "default": "'standard'" + } + }, + "indexes": {}, + "foreignKeys": { + "actions_jobId_jobs_id_fk": { + "name": "actions_jobId_jobs_id_fk", + "tableFrom": "actions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "actions_jobVersionId_job-versions_id_fk": { + "name": "actions_jobVersionId_job-versions_id_fk", + "tableFrom": "actions", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.apiTokens": { + "name": "apiTokens", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(70)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": true, + "default": "'enabled'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "apiTokens_userId_users_id_fk": { + "name": "apiTokens_userId_users_id_fk", + "tableFrom": "apiTokens", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "apiTokens_token_unique": { + "name": "apiTokens_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.auditLog": { + "name": "auditLog", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "subject": { + "name": "subject", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "entry": { + "name": "entry", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.environments": { + "name": "environments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "environments_jobId_jobs_id_fk": { + "name": "environments_jobId_jobs_id_fk", + "tableFrom": "environments", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "environments_jobId_unique": { + "name": "environments_jobId_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.job-versions": { + "name": "job-versions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "version": { + "name": "version", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "job-versions_jobId_jobs_id_fk": { + "name": "job-versions_jobId_jobs_id_fk", + "tableFrom": "job-versions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "job-versions_jobId_version_unique": { + "name": "job-versions_jobId_version_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "version" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.jobs": { + "name": "jobs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobName": { + "name": "jobName", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": false, + "default": "'enabled'" + }, + "links": { + "name": "links", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + } + }, + "indexes": {}, + "foreignKeys": { + "jobs_jobVersionId_job-versions_id_fk": { + "name": "jobs_jobVersionId_job-versions_id_fk", + "tableFrom": "jobs", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "jobs_jobName_unique": { + "name": "jobs_jobName_unique", + "nullsNotDistinct": false, + "columns": [ + "jobName" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.lock": { + "name": "lock", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "lockKey": { + "name": "lockKey", + "type": "varchar(256)", + "primaryKey": false, + "notNull": true + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW() + INTERVAL '5 minutes'" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + }, + "modified": { + "name": "modified", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "lock_lockKey_unique": { + "name": "lock_lockKey_unique", + "nullsNotDistinct": false, + "columns": [ + "lockKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.logs": { + "name": "logs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "source": { + "name": "source", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "sort": { + "name": "sort", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "jobId_created_idx": { + "name": "jobId_created_idx", + "columns": [ + { + "expression": "jobId", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "created", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthServiceClient": { + "name": "oauthServiceClient", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "is_system_managed": { + "name": "is_system_managed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "clientId": { + "name": "clientId", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedAudiences": { + "name": "allowedAudiences", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedScopes": { + "name": "allowedScopes", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthSigningKey": { + "name": "oauthSigningKey", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "parent_id": { + "name": "parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "child_id": { + "name": "child_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_by_user_id": { + "name": "created_by_user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alg": { + "name": "alg", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "use": { + "name": "use", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "private_key_encrypted": { + "name": "private_key_encrypted", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "renewsAt": { + "name": "renewsAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "oauthSigningKey_parent_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_parent_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_child_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_child_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "child_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_created_by_user_id_users_id_fk": { + "name": "oauthSigningKey_created_by_user_id_users_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "users", + "columnsFrom": [ + "created_by_user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.store": { + "name": "store", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "storeKey": { + "name": "storeKey", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "storeValue": { + "name": "storeValue", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiry": { + "name": "expiry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "store_jobId_jobs_id_fk": { + "name": "store_jobId_jobs_id_fk", + "tableFrom": "store", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "store_jobId_storeKey_unique": { + "name": "store_jobId_storeKey_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "storeKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.triggers": { + "name": "triggers", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "triggers_jobId_jobs_id_fk": { + "name": "triggers_jobId_jobs_id_fk", + "tableFrom": "triggers", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "triggers_jobVersionId_job-versions_id_fk": { + "name": "triggers_jobVersionId_job-versions_id_fk", + "tableFrom": "triggers", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "username": { + "name": "username", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "usernameUniqueIndex": { + "name": "usernameUniqueIndex", + "columns": [ + { + "expression": "lower(\"username\")", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": true, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/server/drizzle/meta/0011_snapshot.json b/packages/server/drizzle/meta/0011_snapshot.json new file mode 100644 index 0000000..b68a7de --- /dev/null +++ b/packages/server/drizzle/meta/0011_snapshot.json @@ -0,0 +1,1120 @@ +{ + "id": "c7c2422b-1d4d-4cc3-9a33-e3373cfe67db", + "prevId": "27149207-986a-408b-9532-d90a800afdcc", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.actions": { + "name": "actions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "runnerImage": { + "name": "runnerImage", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'node24'" + }, + "runnerAsynchronous": { + "name": "runnerAsynchronous", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "runnerMinCount": { + "name": "runnerMinCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "runnerMaxCount": { + "name": "runnerMaxCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 16 + }, + "runnerTimeout": { + "name": "runnerTimeout", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 60 + }, + "runnerMaxIdleAge": { + "name": "runnerMaxIdleAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "runnerMaxAge": { + "name": "runnerMaxAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 900 + }, + "runnerMaxAgeHard": { + "name": "runnerMaxAgeHard", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 960 + }, + "runnerDockerArguments": { + "name": "runnerDockerArguments", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "runnerMode": { + "name": "runnerMode", + "type": "text", + "primaryKey": false, + "notNull": false, + "default": "'standard'" + } + }, + "indexes": {}, + "foreignKeys": { + "actions_jobId_jobs_id_fk": { + "name": "actions_jobId_jobs_id_fk", + "tableFrom": "actions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "actions_jobVersionId_job-versions_id_fk": { + "name": "actions_jobVersionId_job-versions_id_fk", + "tableFrom": "actions", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.apiTokens": { + "name": "apiTokens", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(70)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": true, + "default": "'enabled'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "apiTokens_userId_users_id_fk": { + "name": "apiTokens_userId_users_id_fk", + "tableFrom": "apiTokens", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "apiTokens_token_unique": { + "name": "apiTokens_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.auditLog": { + "name": "auditLog", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "subject": { + "name": "subject", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "entry": { + "name": "entry", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.environments": { + "name": "environments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "environments_jobId_jobs_id_fk": { + "name": "environments_jobId_jobs_id_fk", + "tableFrom": "environments", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "environments_jobId_unique": { + "name": "environments_jobId_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.job-versions": { + "name": "job-versions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "version": { + "name": "version", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "job-versions_jobId_jobs_id_fk": { + "name": "job-versions_jobId_jobs_id_fk", + "tableFrom": "job-versions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "job-versions_jobId_version_unique": { + "name": "job-versions_jobId_version_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "version" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.jobs": { + "name": "jobs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobName": { + "name": "jobName", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": false, + "default": "'enabled'" + }, + "links": { + "name": "links", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + } + }, + "indexes": {}, + "foreignKeys": { + "jobs_jobVersionId_job-versions_id_fk": { + "name": "jobs_jobVersionId_job-versions_id_fk", + "tableFrom": "jobs", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "jobs_jobName_unique": { + "name": "jobs_jobName_unique", + "nullsNotDistinct": false, + "columns": [ + "jobName" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.lock": { + "name": "lock", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "lockKey": { + "name": "lockKey", + "type": "varchar(256)", + "primaryKey": false, + "notNull": true + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW() + INTERVAL '5 minutes'" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + }, + "modified": { + "name": "modified", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "lock_lockKey_unique": { + "name": "lock_lockKey_unique", + "nullsNotDistinct": false, + "columns": [ + "lockKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.logs": { + "name": "logs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "source": { + "name": "source", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "sort": { + "name": "sort", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "jobId_created_idx": { + "name": "jobId_created_idx", + "columns": [ + { + "expression": "jobId", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "created", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthServiceClient": { + "name": "oauthServiceClient", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "is_system_managed": { + "name": "is_system_managed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "clientId": { + "name": "clientId", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedAudiences": { + "name": "allowedAudiences", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedScopes": { + "name": "allowedScopes", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthSigningKey": { + "name": "oauthSigningKey", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "parent_id": { + "name": "parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "child_id": { + "name": "child_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_by_user_id": { + "name": "created_by_user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alg": { + "name": "alg", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "use": { + "name": "use", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "private_key_encrypted": { + "name": "private_key_encrypted", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "renewsAt": { + "name": "renewsAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "oauthSigningKey_parent_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_parent_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_child_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_child_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "child_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_created_by_user_id_users_id_fk": { + "name": "oauthSigningKey_created_by_user_id_users_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "users", + "columnsFrom": [ + "created_by_user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.store": { + "name": "store", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "storeKey": { + "name": "storeKey", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "storeValue": { + "name": "storeValue", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiry": { + "name": "expiry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "store_jobId_jobs_id_fk": { + "name": "store_jobId_jobs_id_fk", + "tableFrom": "store", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "store_jobId_storeKey_unique": { + "name": "store_jobId_storeKey_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "storeKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.triggers": { + "name": "triggers", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "triggers_jobId_jobs_id_fk": { + "name": "triggers_jobId_jobs_id_fk", + "tableFrom": "triggers", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "triggers_jobVersionId_job-versions_id_fk": { + "name": "triggers_jobVersionId_job-versions_id_fk", + "tableFrom": "triggers", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "username": { + "name": "username", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "usernameUniqueIndex": { + "name": "usernameUniqueIndex", + "columns": [ + { + "expression": "lower(\"username\")", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": true, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/server/drizzle/meta/0012_snapshot.json b/packages/server/drizzle/meta/0012_snapshot.json new file mode 100644 index 0000000..5aacfc1 --- /dev/null +++ b/packages/server/drizzle/meta/0012_snapshot.json @@ -0,0 +1,1275 @@ +{ + "id": "e7d0b658-d417-4c3d-bcec-7418c7678082", + "prevId": "c7c2422b-1d4d-4cc3-9a33-e3373cfe67db", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.actions": { + "name": "actions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "runnerImage": { + "name": "runnerImage", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'node24'" + }, + "runnerAsynchronous": { + "name": "runnerAsynchronous", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "runnerMinCount": { + "name": "runnerMinCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "runnerMaxCount": { + "name": "runnerMaxCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 16 + }, + "runnerTimeout": { + "name": "runnerTimeout", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 60 + }, + "runnerMaxIdleAge": { + "name": "runnerMaxIdleAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "runnerMaxAge": { + "name": "runnerMaxAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 900 + }, + "runnerMaxAgeHard": { + "name": "runnerMaxAgeHard", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 960 + }, + "runnerDockerArguments": { + "name": "runnerDockerArguments", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "runnerMode": { + "name": "runnerMode", + "type": "text", + "primaryKey": false, + "notNull": false, + "default": "'standard'" + } + }, + "indexes": {}, + "foreignKeys": { + "actions_jobId_jobs_id_fk": { + "name": "actions_jobId_jobs_id_fk", + "tableFrom": "actions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "actions_jobVersionId_job-versions_id_fk": { + "name": "actions_jobVersionId_job-versions_id_fk", + "tableFrom": "actions", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.apiTokens": { + "name": "apiTokens", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(70)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": true, + "default": "'enabled'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "apiTokens_userId_users_id_fk": { + "name": "apiTokens_userId_users_id_fk", + "tableFrom": "apiTokens", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "apiTokens_token_unique": { + "name": "apiTokens_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.auditLog": { + "name": "auditLog", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "subject": { + "name": "subject", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "entry": { + "name": "entry", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.environments": { + "name": "environments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "environments_jobId_jobs_id_fk": { + "name": "environments_jobId_jobs_id_fk", + "tableFrom": "environments", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "environments_jobId_unique": { + "name": "environments_jobId_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.job-versions": { + "name": "job-versions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "version": { + "name": "version", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "job-versions_jobId_jobs_id_fk": { + "name": "job-versions_jobId_jobs_id_fk", + "tableFrom": "job-versions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "job-versions_jobId_version_unique": { + "name": "job-versions_jobId_version_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "version" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.jobs": { + "name": "jobs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobName": { + "name": "jobName", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": false, + "default": "'enabled'" + }, + "links": { + "name": "links", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + } + }, + "indexes": {}, + "foreignKeys": { + "jobs_jobVersionId_job-versions_id_fk": { + "name": "jobs_jobVersionId_job-versions_id_fk", + "tableFrom": "jobs", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "jobs_jobName_unique": { + "name": "jobs_jobName_unique", + "nullsNotDistinct": false, + "columns": [ + "jobName" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.lock": { + "name": "lock", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "lockKey": { + "name": "lockKey", + "type": "varchar(256)", + "primaryKey": false, + "notNull": true + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW() + INTERVAL '5 minutes'" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + }, + "modified": { + "name": "modified", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "lock_lockKey_unique": { + "name": "lock_lockKey_unique", + "nullsNotDistinct": false, + "columns": [ + "lockKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.logs": { + "name": "logs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "source": { + "name": "source", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "sort": { + "name": "sort", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "jobId_created_idx": { + "name": "jobId_created_idx", + "columns": [ + { + "expression": "jobId", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "created", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthServiceClient": { + "name": "oauthServiceClient", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "is_system_managed": { + "name": "is_system_managed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "clientId": { + "name": "clientId", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedAudiences": { + "name": "allowedAudiences", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedScopes": { + "name": "allowedScopes", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "oauthServiceClient_clientId_unique": { + "name": "oauthServiceClient_clientId_unique", + "nullsNotDistinct": false, + "columns": [ + "clientId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthSigningKey": { + "name": "oauthSigningKey", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "parent_id": { + "name": "parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "child_id": { + "name": "child_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_by_user_id": { + "name": "created_by_user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alg": { + "name": "alg", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "use": { + "name": "use", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "private_key_encrypted": { + "name": "private_key_encrypted", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "renewsAt": { + "name": "renewsAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "oauthSigningKey_parent_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_parent_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_child_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_child_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "child_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_created_by_user_id_users_id_fk": { + "name": "oauthSigningKey_created_by_user_id_users_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "users", + "columnsFrom": [ + "created_by_user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.runners": { + "name": "runners", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "environmentId": { + "name": "environmentId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "oauthServiceClientId": { + "name": "oauthServiceClientId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "properties": { + "name": "properties", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "readyAt": { + "name": "readyAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closingAt": { + "name": "closingAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closedAt": { + "name": "closedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": { + "runners_jobId_jobs_id_fk": { + "name": "runners_jobId_jobs_id_fk", + "tableFrom": "runners", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_jobVersionId_job-versions_id_fk": { + "name": "runners_jobVersionId_job-versions_id_fk", + "tableFrom": "runners", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_actionId_actions_id_fk": { + "name": "runners_actionId_actions_id_fk", + "tableFrom": "runners", + "tableTo": "actions", + "columnsFrom": [ + "actionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_environmentId_environments_id_fk": { + "name": "runners_environmentId_environments_id_fk", + "tableFrom": "runners", + "tableTo": "environments", + "columnsFrom": [ + "environmentId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "runners_oauthServiceClientId_oauthServiceClient_id_fk": { + "name": "runners_oauthServiceClientId_oauthServiceClient_id_fk", + "tableFrom": "runners", + "tableTo": "oauthServiceClient", + "columnsFrom": [ + "oauthServiceClientId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.store": { + "name": "store", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "storeKey": { + "name": "storeKey", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "storeValue": { + "name": "storeValue", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiry": { + "name": "expiry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "store_jobId_jobs_id_fk": { + "name": "store_jobId_jobs_id_fk", + "tableFrom": "store", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "store_jobId_storeKey_unique": { + "name": "store_jobId_storeKey_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "storeKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.triggers": { + "name": "triggers", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "triggers_jobId_jobs_id_fk": { + "name": "triggers_jobId_jobs_id_fk", + "tableFrom": "triggers", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "triggers_jobVersionId_job-versions_id_fk": { + "name": "triggers_jobVersionId_job-versions_id_fk", + "tableFrom": "triggers", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "username": { + "name": "username", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "usernameUniqueIndex": { + "name": "usernameUniqueIndex", + "columns": [ + { + "expression": "lower(\"username\")", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": true, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/server/drizzle/meta/0013_snapshot.json b/packages/server/drizzle/meta/0013_snapshot.json new file mode 100644 index 0000000..6030be1 --- /dev/null +++ b/packages/server/drizzle/meta/0013_snapshot.json @@ -0,0 +1,1281 @@ +{ + "id": "1d0bee54-f2c4-4e9e-888d-3d34fca8085c", + "prevId": "e7d0b658-d417-4c3d-bcec-7418c7678082", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.actions": { + "name": "actions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "runnerImage": { + "name": "runnerImage", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'node24'" + }, + "runnerAsynchronous": { + "name": "runnerAsynchronous", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "runnerMinCount": { + "name": "runnerMinCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "runnerMaxCount": { + "name": "runnerMaxCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 16 + }, + "runnerTimeout": { + "name": "runnerTimeout", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 60 + }, + "runnerMaxIdleAge": { + "name": "runnerMaxIdleAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "runnerMaxAge": { + "name": "runnerMaxAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 900 + }, + "runnerMaxAgeHard": { + "name": "runnerMaxAgeHard", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 960 + }, + "runnerDockerArguments": { + "name": "runnerDockerArguments", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "runnerMode": { + "name": "runnerMode", + "type": "text", + "primaryKey": false, + "notNull": false, + "default": "'standard'" + } + }, + "indexes": {}, + "foreignKeys": { + "actions_jobId_jobs_id_fk": { + "name": "actions_jobId_jobs_id_fk", + "tableFrom": "actions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "actions_jobVersionId_job-versions_id_fk": { + "name": "actions_jobVersionId_job-versions_id_fk", + "tableFrom": "actions", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.apiTokens": { + "name": "apiTokens", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(70)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": true, + "default": "'enabled'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "apiTokens_userId_users_id_fk": { + "name": "apiTokens_userId_users_id_fk", + "tableFrom": "apiTokens", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "apiTokens_token_unique": { + "name": "apiTokens_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.auditLog": { + "name": "auditLog", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "subject": { + "name": "subject", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "entry": { + "name": "entry", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.environments": { + "name": "environments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "environments_jobId_jobs_id_fk": { + "name": "environments_jobId_jobs_id_fk", + "tableFrom": "environments", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "environments_jobId_unique": { + "name": "environments_jobId_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.job-versions": { + "name": "job-versions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "version": { + "name": "version", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "job-versions_jobId_jobs_id_fk": { + "name": "job-versions_jobId_jobs_id_fk", + "tableFrom": "job-versions", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "job-versions_jobId_version_unique": { + "name": "job-versions_jobId_version_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "version" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.jobs": { + "name": "jobs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobName": { + "name": "jobName", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": false, + "default": "'enabled'" + }, + "links": { + "name": "links", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + } + }, + "indexes": {}, + "foreignKeys": { + "jobs_jobVersionId_job-versions_id_fk": { + "name": "jobs_jobVersionId_job-versions_id_fk", + "tableFrom": "jobs", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "jobs_jobName_unique": { + "name": "jobs_jobName_unique", + "nullsNotDistinct": false, + "columns": [ + "jobName" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.lock": { + "name": "lock", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "lockKey": { + "name": "lockKey", + "type": "varchar(256)", + "primaryKey": false, + "notNull": true + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW() + INTERVAL '5 minutes'" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + }, + "modified": { + "name": "modified", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "lock_lockKey_unique": { + "name": "lock_lockKey_unique", + "nullsNotDistinct": false, + "columns": [ + "lockKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.logs": { + "name": "logs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "source": { + "name": "source", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "sort": { + "name": "sort", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "jobId_created_idx": { + "name": "jobId_created_idx", + "columns": [ + { + "expression": "jobId", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "created", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthServiceClient": { + "name": "oauthServiceClient", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "is_system_managed": { + "name": "is_system_managed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "clientId": { + "name": "clientId", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedAudiences": { + "name": "allowedAudiences", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedScopes": { + "name": "allowedScopes", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "oauthServiceClient_clientId_unique": { + "name": "oauthServiceClient_clientId_unique", + "nullsNotDistinct": false, + "columns": [ + "clientId" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthSigningKey": { + "name": "oauthSigningKey", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "parent_id": { + "name": "parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "child_id": { + "name": "child_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_by_user_id": { + "name": "created_by_user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alg": { + "name": "alg", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "use": { + "name": "use", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "private_key_encrypted": { + "name": "private_key_encrypted", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "renewsAt": { + "name": "renewsAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "oauthSigningKey_parent_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_parent_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "parent_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_child_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_child_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "oauthSigningKey", + "columnsFrom": [ + "child_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "oauthSigningKey_created_by_user_id_users_id_fk": { + "name": "oauthSigningKey_created_by_user_id_users_id_fk", + "tableFrom": "oauthSigningKey", + "tableTo": "users", + "columnsFrom": [ + "created_by_user_id" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.runners": { + "name": "runners", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "environmentId": { + "name": "environmentId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "oauthServiceClientId": { + "name": "oauthServiceClientId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "properties": { + "name": "properties", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "readyAt": { + "name": "readyAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closingAt": { + "name": "closingAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closedAt": { + "name": "closedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": { + "runners_jobId_jobs_id_fk": { + "name": "runners_jobId_jobs_id_fk", + "tableFrom": "runners", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_jobVersionId_job-versions_id_fk": { + "name": "runners_jobVersionId_job-versions_id_fk", + "tableFrom": "runners", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_actionId_actions_id_fk": { + "name": "runners_actionId_actions_id_fk", + "tableFrom": "runners", + "tableTo": "actions", + "columnsFrom": [ + "actionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "runners_environmentId_environments_id_fk": { + "name": "runners_environmentId_environments_id_fk", + "tableFrom": "runners", + "tableTo": "environments", + "columnsFrom": [ + "environmentId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + }, + "runners_oauthServiceClientId_oauthServiceClient_id_fk": { + "name": "runners_oauthServiceClientId_oauthServiceClient_id_fk", + "tableFrom": "runners", + "tableTo": "oauthServiceClient", + "columnsFrom": [ + "oauthServiceClientId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "set null", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "tableTo": "users", + "columnsFrom": [ + "userId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "no action", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "nullsNotDistinct": false, + "columns": [ + "token" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.store": { + "name": "store", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "storeKey": { + "name": "storeKey", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "storeValue": { + "name": "storeValue", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiry": { + "name": "expiry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "store_jobId_jobs_id_fk": { + "name": "store_jobId_jobs_id_fk", + "tableFrom": "store", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "store_jobId_storeKey_unique": { + "name": "store_jobId_storeKey_unique", + "nullsNotDistinct": false, + "columns": [ + "jobId", + "storeKey" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.triggers": { + "name": "triggers", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "triggers_jobId_jobs_id_fk": { + "name": "triggers_jobId_jobs_id_fk", + "tableFrom": "triggers", + "tableTo": "jobs", + "columnsFrom": [ + "jobId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + }, + "triggers_jobVersionId_job-versions_id_fk": { + "name": "triggers_jobVersionId_job-versions_id_fk", + "tableFrom": "triggers", + "tableTo": "job-versions", + "columnsFrom": [ + "jobVersionId" + ], + "columnsTo": [ + "id" + ], + "onDelete": "cascade", + "onUpdate": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "username": { + "name": "username", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "usernameUniqueIndex": { + "name": "usernameUniqueIndex", + "columns": [ + { + "expression": "lower(\"username\")", + "asc": true, + "isExpression": true, + "nulls": "last" + } + ], + "isUnique": true, + "concurrently": false, + "method": "btree", + "with": {} + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_username_unique": { + "name": "users_username_unique", + "nullsNotDistinct": false, + "columns": [ + "username" + ] + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "views": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/server/drizzle/meta/0014_snapshot.json b/packages/server/drizzle/meta/0014_snapshot.json new file mode 100644 index 0000000..e94d149 --- /dev/null +++ b/packages/server/drizzle/meta/0014_snapshot.json @@ -0,0 +1,1281 @@ +{ + "id": "c8881913-3700-4715-ae27-d480080f2323", + "prevId": "1d0bee54-f2c4-4e9e-888d-3d34fca8085c", + "version": "7", + "dialect": "postgresql", + "tables": { + "public.actions": { + "name": "actions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "runnerImage": { + "name": "runnerImage", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'node24'" + }, + "runnerAsynchronous": { + "name": "runnerAsynchronous", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "runnerMinCount": { + "name": "runnerMinCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 1 + }, + "runnerMaxCount": { + "name": "runnerMaxCount", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 16 + }, + "runnerTimeout": { + "name": "runnerTimeout", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 60 + }, + "runnerMaxIdleAge": { + "name": "runnerMaxIdleAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 0 + }, + "runnerMaxAge": { + "name": "runnerMaxAge", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 900 + }, + "runnerMaxAgeHard": { + "name": "runnerMaxAgeHard", + "type": "integer", + "primaryKey": false, + "notNull": true, + "default": 960 + }, + "runnerDockerArguments": { + "name": "runnerDockerArguments", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "runnerMode": { + "name": "runnerMode", + "type": "text", + "primaryKey": false, + "notNull": false, + "default": "'standard'" + } + }, + "indexes": {}, + "foreignKeys": { + "actions_jobId_jobs_id_fk": { + "name": "actions_jobId_jobs_id_fk", + "tableFrom": "actions", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + }, + "actions_jobVersionId_job-versions_id_fk": { + "name": "actions_jobVersionId_job-versions_id_fk", + "tableFrom": "actions", + "columnsFrom": [ + "jobVersionId" + ], + "tableTo": "job-versions", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.apiTokens": { + "name": "apiTokens", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(70)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": true, + "default": "'enabled'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "apiTokens_userId_users_id_fk": { + "name": "apiTokens_userId_users_id_fk", + "tableFrom": "apiTokens", + "columnsFrom": [ + "userId" + ], + "tableTo": "users", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "apiTokens_token_unique": { + "name": "apiTokens_token_unique", + "columns": [ + "token" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.auditLog": { + "name": "auditLog", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "subject": { + "name": "subject", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "entry": { + "name": "entry", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.environments": { + "name": "environments", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'{}'::jsonb" + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "environments_jobId_jobs_id_fk": { + "name": "environments_jobId_jobs_id_fk", + "tableFrom": "environments", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "environments_jobId_unique": { + "name": "environments_jobId_unique", + "columns": [ + "jobId" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.job-versions": { + "name": "job-versions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "version": { + "name": "version", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "job-versions_jobId_jobs_id_fk": { + "name": "job-versions_jobId_jobs_id_fk", + "tableFrom": "job-versions", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "job-versions_jobId_version_unique": { + "name": "job-versions_jobId_version_unique", + "columns": [ + "jobId", + "version" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.jobs": { + "name": "jobs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobName": { + "name": "jobName", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(16)", + "primaryKey": false, + "notNull": false, + "default": "'enabled'" + }, + "links": { + "name": "links", + "type": "jsonb", + "primaryKey": false, + "notNull": true, + "default": "'[]'::jsonb" + } + }, + "indexes": {}, + "foreignKeys": { + "jobs_jobVersionId_job-versions_id_fk": { + "name": "jobs_jobVersionId_job-versions_id_fk", + "tableFrom": "jobs", + "columnsFrom": [ + "jobVersionId" + ], + "tableTo": "job-versions", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "jobs_jobName_unique": { + "name": "jobs_jobName_unique", + "columns": [ + "jobName" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.lock": { + "name": "lock", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "lockKey": { + "name": "lockKey", + "type": "varchar(256)", + "primaryKey": false, + "notNull": true + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW() + INTERVAL '5 minutes'" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + }, + "modified": { + "name": "modified", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "NOW()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "lock_lockKey_unique": { + "name": "lock_lockKey_unique", + "columns": [ + "lockKey" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.logs": { + "name": "logs", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "source": { + "name": "source", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "sort": { + "name": "sort", + "type": "varchar(32)", + "primaryKey": false, + "notNull": true, + "default": "''" + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "message": { + "name": "message", + "type": "text", + "primaryKey": false, + "notNull": true + } + }, + "indexes": { + "jobId_created_idx": { + "name": "jobId_created_idx", + "columns": [ + { + "expression": "jobId", + "isExpression": false, + "asc": true, + "nulls": "last" + }, + { + "expression": "created", + "isExpression": false, + "asc": true, + "nulls": "last" + } + ], + "isUnique": false, + "with": {}, + "method": "btree", + "concurrently": false + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthServiceClient": { + "name": "oauthServiceClient", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "is_system_managed": { + "name": "is_system_managed", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false + }, + "name": { + "name": "name", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": false + }, + "clientId": { + "name": "clientId", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "metadata": { + "name": "metadata", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedAudiences": { + "name": "allowedAudiences", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "allowedScopes": { + "name": "allowedScopes", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "oauthServiceClient_clientId_unique": { + "name": "oauthServiceClient_clientId_unique", + "columns": [ + "clientId" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.oauthSigningKey": { + "name": "oauthSigningKey", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "parent_id": { + "name": "parent_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "child_id": { + "name": "child_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "created_by_user_id": { + "name": "created_by_user_id", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "status": { + "name": "status", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "alg": { + "name": "alg", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "use": { + "name": "use", + "type": "varchar(255)", + "primaryKey": false, + "notNull": true + }, + "private_key_encrypted": { + "name": "private_key_encrypted", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "public_key": { + "name": "public_key", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiresAt": { + "name": "expiresAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "renewsAt": { + "name": "renewsAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "oauthSigningKey_parent_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_parent_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "columnsFrom": [ + "parent_id" + ], + "tableTo": "oauthSigningKey", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + }, + "oauthSigningKey_child_id_oauthSigningKey_id_fk": { + "name": "oauthSigningKey_child_id_oauthSigningKey_id_fk", + "tableFrom": "oauthSigningKey", + "columnsFrom": [ + "child_id" + ], + "tableTo": "oauthSigningKey", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + }, + "oauthSigningKey_created_by_user_id_users_id_fk": { + "name": "oauthSigningKey_created_by_user_id_users_id_fk", + "tableFrom": "oauthSigningKey", + "columnsFrom": [ + "created_by_user_id" + ], + "tableTo": "users", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.runners": { + "name": "runners", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "status": { + "name": "status", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "actionId": { + "name": "actionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "environmentId": { + "name": "environmentId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "oauthServiceClientId": { + "name": "oauthServiceClientId", + "type": "uuid", + "primaryKey": false, + "notNull": false + }, + "properties": { + "name": "properties", + "type": "jsonb", + "primaryKey": false, + "notNull": false + }, + "createdAt": { + "name": "createdAt", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + }, + "readyAt": { + "name": "readyAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closingAt": { + "name": "closingAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + }, + "closedAt": { + "name": "closedAt", + "type": "timestamp", + "primaryKey": false, + "notNull": false + } + }, + "indexes": {}, + "foreignKeys": { + "runners_jobId_jobs_id_fk": { + "name": "runners_jobId_jobs_id_fk", + "tableFrom": "runners", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + }, + "runners_jobVersionId_job-versions_id_fk": { + "name": "runners_jobVersionId_job-versions_id_fk", + "tableFrom": "runners", + "columnsFrom": [ + "jobVersionId" + ], + "tableTo": "job-versions", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + }, + "runners_actionId_actions_id_fk": { + "name": "runners_actionId_actions_id_fk", + "tableFrom": "runners", + "columnsFrom": [ + "actionId" + ], + "tableTo": "actions", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + }, + "runners_environmentId_environments_id_fk": { + "name": "runners_environmentId_environments_id_fk", + "tableFrom": "runners", + "columnsFrom": [ + "environmentId" + ], + "tableTo": "environments", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + }, + "runners_oauthServiceClientId_oauthServiceClient_id_fk": { + "name": "runners_oauthServiceClientId_oauthServiceClient_id_fk", + "tableFrom": "runners", + "columnsFrom": [ + "oauthServiceClientId" + ], + "tableTo": "oauthServiceClient", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "set null" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.sessions": { + "name": "sessions", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "token": { + "name": "token", + "type": "varchar(50)", + "primaryKey": false, + "notNull": true + }, + "userId": { + "name": "userId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "status": { + "name": "status", + "type": "varchar", + "primaryKey": false, + "notNull": true, + "default": "'active'" + }, + "expires": { + "name": "expires", + "type": "timestamp", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": {}, + "foreignKeys": { + "sessions_userId_users_id_fk": { + "name": "sessions_userId_users_id_fk", + "tableFrom": "sessions", + "columnsFrom": [ + "userId" + ], + "tableTo": "users", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "no action" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "sessions_token_unique": { + "name": "sessions_token_unique", + "columns": [ + "token" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.store": { + "name": "store", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "storeKey": { + "name": "storeKey", + "type": "varchar(128)", + "primaryKey": false, + "notNull": true + }, + "storeValue": { + "name": "storeValue", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "expiry": { + "name": "expiry", + "type": "integer", + "primaryKey": false, + "notNull": false + }, + "modified": { + "name": "modified", + "type": "integer", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "integer", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "store_jobId_jobs_id_fk": { + "name": "store_jobId_jobs_id_fk", + "tableFrom": "store", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "store_jobId_storeKey_unique": { + "name": "store_jobId_storeKey_unique", + "columns": [ + "jobId", + "storeKey" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.triggers": { + "name": "triggers", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "jobId": { + "name": "jobId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "jobVersionId": { + "name": "jobVersionId", + "type": "uuid", + "primaryKey": false, + "notNull": true + }, + "context": { + "name": "context", + "type": "jsonb", + "primaryKey": false, + "notNull": true + } + }, + "indexes": {}, + "foreignKeys": { + "triggers_jobId_jobs_id_fk": { + "name": "triggers_jobId_jobs_id_fk", + "tableFrom": "triggers", + "columnsFrom": [ + "jobId" + ], + "tableTo": "jobs", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + }, + "triggers_jobVersionId_job-versions_id_fk": { + "name": "triggers_jobVersionId_job-versions_id_fk", + "tableFrom": "triggers", + "columnsFrom": [ + "jobVersionId" + ], + "tableTo": "job-versions", + "columnsTo": [ + "id" + ], + "onUpdate": "no action", + "onDelete": "cascade" + } + }, + "compositePrimaryKeys": {}, + "uniqueConstraints": {}, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + }, + "public.users": { + "name": "users", + "schema": "", + "columns": { + "id": { + "name": "id", + "type": "uuid", + "primaryKey": true, + "notNull": true, + "default": "gen_random_uuid()" + }, + "username": { + "name": "username", + "type": "varchar", + "primaryKey": false, + "notNull": true + }, + "password": { + "name": "password", + "type": "text", + "primaryKey": false, + "notNull": true + }, + "enabled": { + "name": "enabled", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": true + }, + "permissions": { + "name": "permissions", + "type": "jsonb", + "primaryKey": false, + "notNull": true + }, + "created": { + "name": "created", + "type": "timestamp", + "primaryKey": false, + "notNull": true, + "default": "now()" + } + }, + "indexes": { + "usernameUniqueIndex": { + "name": "usernameUniqueIndex", + "columns": [ + { + "expression": "lower(\"username\")", + "isExpression": true, + "asc": true, + "nulls": "last" + } + ], + "isUnique": true, + "with": {}, + "method": "btree", + "concurrently": false + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {}, + "uniqueConstraints": { + "users_username_unique": { + "name": "users_username_unique", + "columns": [ + "username" + ], + "nullsNotDistinct": false + } + }, + "policies": {}, + "checkConstraints": {}, + "isRLSEnabled": false + } + }, + "enums": {}, + "schemas": {}, + "views": {}, + "sequences": {}, + "roles": {}, + "policies": {}, + "_meta": { + "columns": {}, + "schemas": {}, + "tables": {} + } +} \ No newline at end of file diff --git a/packages/server/drizzle/meta/_journal.json b/packages/server/drizzle/meta/_journal.json index 6e78cff..83977b9 100644 --- a/packages/server/drizzle/meta/_journal.json +++ b/packages/server/drizzle/meta/_journal.json @@ -71,6 +71,41 @@ "when": 1756893257221, "tag": "0009_short_zuras", "breakpoints": true + }, + { + "idx": 10, + "version": "7", + "when": 1770445177561, + "tag": "0010_milky_microbe", + "breakpoints": true + }, + { + "idx": 11, + "version": "7", + "when": 1770528413315, + "tag": "0011_military_malice", + "breakpoints": true + }, + { + "idx": 12, + "version": "7", + "when": 1772514717318, + "tag": "0012_nifty_plazm", + "breakpoints": true + }, + { + "idx": 13, + "version": "7", + "when": 1772615277681, + "tag": "0013_thankful_thor", + "breakpoints": true + }, + { + "idx": 14, + "version": "7", + "when": 1772615344788, + "tag": "0014_demonic_ronan", + "breakpoints": true } ] } \ No newline at end of file diff --git a/packages/server/package.json b/packages/server/package.json index 05d3314..e5154d8 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -10,21 +10,31 @@ "start": "node ./dist/index.js", "dev": "pnpm build && node --env-file-if-exists=.env ./dist/index.js", "drizzle:migrate": "pnpm build && pnpm drizzle-kit migrate", - "drizzle:generate": "pnpm build && pnpm drizzle-kit generate" + "drizzle:generate": "pnpm build && pnpm drizzle-kit generate", + "drizzle:generate:custom": "pnpm build && pnpm drizzle-kit generate --custom" }, "keywords": [], "author": "Eithan Hersey-Tuit", "license": "MIT", "dependencies": { + "@grpc/grpc-js": "^1.14.3", + "@grpc/proto-loader": "^0.8.0", "@hono/node-server": "^1.13.7", + "@jobber/common": "workspace:*", + "@jobber/grpc": "workspace:*", "@jobber/tcp-frame-socket": "workspace:*", "bcryptjs": "^3.0.2", "cron": "^3.2.1", "drizzle-orm": "^0.38.2", "hono": "^4.6.11", + "jose": "^6.1.3", + "long": "^5.3.2", "mqtt": "^5.10.3", + "nice-grpc": "^2.1.14", + "nice-grpc-server-reflection": "^3.0.3", "pg": "^8.13.1", "prom-client": "^15.1.3", + "protobufjs": "^8.0.0", "reflect-metadata": "^0.2.2", "semver": "^7.6.3", "tsyringe": "^4.10.0", @@ -36,7 +46,9 @@ "@types/pg": "^8.11.10", "@types/semver": "^7.5.8", "drizzle-kit": "^0.30.1", + "grpc-tools": "^1.13.1", "rimraf": "^5.0.10", + "ts-proto": "^2.10.1", "tsc-alias": "^1.8.10", "typescript": "^5.6.3", "vitest": "^3.2.4" diff --git a/packages/server/src/bouncer.ts b/packages/server/src/bouncer.ts index f8fa5a1..211329b 100644 --- a/packages/server/src/bouncer.ts +++ b/packages/server/src/bouncer.ts @@ -1,14 +1,10 @@ -import assert from "node:assert"; -import { ApiTokensTableType } from "./db/schema/api-tokens.js"; -import { SessionsTableType } from "./db/schema/sessions.js"; -import { UsersTableType } from "./db/schema/users.js"; import { - canPerformAction, - JobberPermissionAction, - JobberPermissions, -} from "./permissions.js"; -import { HTTPException } from "hono/http-exception"; -import { JobsTableType } from "./db/schema/jobs.js"; + ApiTokensTableType, + SessionsTableType, + UsersTableType, +} from "./db/types.js"; +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { BouncerBase } from "@jobber/common/bouncer-base.js"; type BouncerOptions = | { @@ -32,194 +28,14 @@ type BouncerOptions = * Bouncer class is a abstraction for nicely handling permission checks and access * control. It will be loaded into the Hono app context on all requests. */ -export class Bouncer { +export class Bouncer extends BouncerBase { private options: BouncerOptions; constructor(options: BouncerOptions) { + super(options.permissions); this.options = options; } - public can(resource: string, action: JobberPermissionAction): boolean { - return canPerformAction(this.options.permissions, resource, action); - } - - public canOrFail(resource: string, action: JobberPermissionAction): void { - if (!this.can(resource, action)) { - throw new HTTPException(403, { - message: "Insufficient Permissions", - }); - } - } - - public canRead(resource: string): boolean { - return this.can(resource, "read"); - } - - public canWrite(resource: string): boolean { - return this.can(resource, "write"); - } - - public canDelete(resource: string): boolean { - return this.can(resource, "delete"); - } - - public canReadJob(job: { id: string }): boolean { - return this.can(`job/${job.id}`, "read"); - } - - public canWriteJob(job: { id: string }): boolean { - return this.can(`job/${job.id}`, "write"); - } - - public canDeleteJob(job: { id: string }): boolean { - return this.can(`job/${job.id}`, "delete"); - } - - public canReadJobEnvironment( - environment: { jobId: string }, - name: string - ): boolean { - return this.can(`job/${environment.jobId}/environment/${name}`, "read"); - } - - public canWriteJobEnvironment( - environment: { jobId: string }, - name: string - ): boolean { - return this.can(`job/${environment.jobId}/environment/${name}`, "write"); - } - - public canDeleteJobEnvironment( - environment: { jobId: string }, - name: string - ): boolean { - return this.can(`job/${environment.jobId}/environment/${name}`, "delete"); - } - - public canReadJobAction(action: { jobId: string; id: string }): boolean { - return this.can(`job/${action.jobId}/actions/${action.id}`, "read"); - } - - public canWriteJobAction(action: { jobId: string; id: string }): boolean { - return this.can(`job/${action.jobId}/actions/${action.id}`, "write"); - } - - public canDeleteJobAction(action: { jobId: string; id: string }): boolean { - return this.can(`job/${action.jobId}/actions/${action.id}`, "delete"); - } - - public canReadJobRunners(job: { id: string }): boolean { - return this.can(`job/${job.id}/runners`, "read"); - } - - public canWriteJobRunners(job: { id: string }): boolean { - return this.can(`job/${job.id}/runners`, "write"); - } - - public canDeleteJobRunners(job: { id: string }): boolean { - return this.can(`job/${job.id}/runners`, "delete"); - } - - public canReadJobStoreItem(item: { jobId: string; id: string }): boolean { - return this.can(`job/${item.jobId}/store/${item.id}`, "read"); - } - - public canWriteJobStoreItem(item: { jobId: string; id: string }): boolean { - return this.can(`job/${item.jobId}/store/${item.id}`, "write"); - } - - public canDeleteJobStoreItem(item: { jobId: string; id: string }): boolean { - return this.can(`job/${item.jobId}/store/${item.id}`, "delete"); - } - - public canReadJobTriggers(trigger: { jobId: string; id: string }): boolean { - return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "read"); - } - - public canWriteJobTriggers(trigger: { jobId: string; id: string }): boolean { - return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "write"); - } - - public canDeleteJobTriggers(trigger: { jobId: string; id: string }): boolean { - return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "delete"); - } - - public canReadJobVersion(version: { jobId: string; id: string }): boolean { - return this.can(`job/${version.jobId}/versions/${version.id}`, "read"); - } - - public canJobPublish(): boolean { - return this.can(`job/-/publish`, "write"); - } - - public canReadApiTokenGenerally(): boolean { - return this.can(`api-tokens`, "read"); - } - - public canWriteApiTokenGenerally(): boolean { - return this.can(`api-tokens`, "write"); - } - - public canDeleteApiTokenGenerally(): boolean { - return this.can(`api-tokens`, "delete"); - } - - public canReadApiToken(token: { id: string }): boolean { - return this.can(`api-tokens/${token.id}`, "read"); - } - - public canWriteApiToken(token: { id: string }): boolean { - return this.can(`api-tokens/${token.id}`, "write"); - } - - public canDeleteApiToken(token: { id: string }): boolean { - return this.can(`api-tokens/${token.id}`, "delete"); - } - - public canReadSystemMetricsPrometheus(): boolean { - return this.can(`system/metrics/prometheus`, "read"); - } - - public canReadSystemMetricsOverview(): boolean { - return this.can(`system/metrics/overview`, "read"); - } - - public canReadUserGenerally(): boolean { - return this.can(`users`, "read"); - } - - public canWriteUserGenerally(): boolean { - return this.can(`users`, "write"); - } - - public canDeleteUserGenerally(): boolean { - return this.can(`users`, "delete"); - } - - public canReadUser(user: { id: string }): boolean { - return this.can(`users/${user.id}`, "read"); - } - - public canWriteUser(user: { id: string }): boolean { - return this.can(`users/${user.id}`, "write"); - } - - public canDeleteUser(user: { id: string }): boolean { - return this.can(`users/${user.id}`, "delete"); - } - - public canWriteUserUsername(user: { id: string }): boolean { - return this.can(`users/${user.id}/username`, "write"); - } - - public canWriteUserPassword(user: { id: string }): boolean { - return this.can(`users/${user.id}/password`, "write"); - } - - public canWriteUserPermissions(user: { id: string }): boolean { - return this.can(`users/${user.id}/permissions`, "write"); - } - public get type() { return this.options.type; } diff --git a/packages/server/src/config.ts b/packages/server/src/config.ts index e0be2ce..069a907 100644 --- a/packages/server/src/config.ts +++ b/packages/server/src/config.ts @@ -1,7 +1,10 @@ import { hostname } from "os"; import { z } from "zod"; +import { getSeedSchema } from "./seeding/index.js"; export const ConfigurationOptionsSchema = z.object({ + SECRET_PASSPHRASE: z.string().min(32).max(512), + DATABASE_URL: z.string(), DATABASE_BACKUP_SCHEDULE: z.string().default("0 0 * * *"), DATABASE_BACKUP_SCHEDULE_TIMEZONE: z.string().default("UTC"), @@ -12,6 +15,14 @@ export const ConfigurationOptionsSchema = z.object({ STARTUP_USERNAME: z.string().optional().default("admin"), STARTUP_PASSWORD: z.string().optional().default("Password1!"), + SEED: z + .string() + .default("{}") + .transform((val) => { + return JSON.parse(val); + }) + .pipe(getSeedSchema()), + AUTH_PUBLIC_REGISTRATION_ENABLED: z .string() .transform((val) => val.toLowerCase() === "true") @@ -23,6 +34,25 @@ export const ConfigurationOptionsSchema = z.object({ .pipe(z.boolean()) .default("true"), + ALLOWED_HOSTS: z + .string() + .default("") + .transform((val) => + val.split(",").map((host) => host.trim().toLowerCase()), + ), + + OAUTH_ISSUER: z.string().default("http://localhost:5211"), + OAUTH_SIGNING_KEY_ROTATE_IN_DAYS: z.coerce.number().default(5), // Rotate X days after creation + OAUTH_SIGNING_KEY_EXPIRE_IN_DAYS: z.coerce.number().default(30), // Expire X days after creation + OAUTH_MANAGEMENT_ALLOW_MANUAL_UPLOAD: z + .string() + .transform((val) => val.toLowerCase() === "true") + .pipe(z.boolean()) + .default("false") + .describe( + "Determines whether or not you can manually upload signing keys through the API. This includes frontend and backend.", + ), + DEBUG_HTTP: z .string() .transform((val) => val.toLowerCase() === "true") @@ -35,8 +65,12 @@ export const ConfigurationOptionsSchema = z.object({ .pipe(z.boolean()) .default("false"), - MANAGER_PORT: z.coerce.number().default(5211), - MANAGER_HOST: z.string().default(hostname()), + API_PORT: z.coerce.number().default(3000), + + // GRPC specific config + MANAGER_GRPC_PORT: z.coerce.number().default(5212), + MANAGER_GRPC_HOST: z.string().default(hostname()), // For the runners + MANAGER_GRPC_BIND_ADDRESS: z.string().default("0.0.0.0"), RUNNER_IMAGE_NODE24_URL: z .string() @@ -54,7 +88,7 @@ export const ConfigurationOptionsSchema = z.object({ RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES: z .string() - .transform((val) => val.split(",").map((type) => type.trim().toLowerCase())) + .transform((val) => val.split(",").map((type) => type.trim())) .pipe( z.array( z.enum([ @@ -64,8 +98,8 @@ export const ConfigurationOptionsSchema = z.object({ "labels", "memoryLimit", "directPassthroughArguments", - ]) - ) + ]), + ), ) .default(""), @@ -108,7 +142,7 @@ export const ConfigurationOptionsSchema = z.object({ .min(1) .default(15) .describe( - "The step in seconds for the Prometheus query. Default is 15 seconds." + "The step in seconds for the Prometheus query. Default is 15 seconds.", ), }); @@ -119,7 +153,7 @@ export type ConfigurationOptionsSchemaType = z.infer< export type ConfigurationOptions = keyof ConfigurationOptionsSchemaType; export const getConfigOption = ( - option: T + option: T, ): ConfigurationOptionsSchemaType[T] => { const schema = ConfigurationOptionsSchema.shape[option]; diff --git a/packages/server/src/constants.ts b/packages/server/src/constants.ts index 2d96dd9..4931b85 100644 --- a/packages/server/src/constants.ts +++ b/packages/server/src/constants.ts @@ -2,6 +2,6 @@ import path from "path"; export const PATH_CONFIG = path.join(process.cwd(), "./config/"); -export const ENTRYPOINT_NODE = "jobber-entrypoint.js"; +export const ENTRYPOINT_NODE = "jobber-start.js"; export const USERNAME_ANONYMOUS = "anonymous"; diff --git a/packages/server/src/container/abstract.ts b/packages/server/src/container/abstract.ts new file mode 100644 index 0000000..4347e67 --- /dev/null +++ b/packages/server/src/container/abstract.ts @@ -0,0 +1,25 @@ +export type ContainerStart = { + image: string; +}; + +export type ContainerInfoBasic = { + id: string; + image: string; + status: string; +}; + +export type ContainerInfo = ContainerInfoBasic & {}; + +export abstract class Container { + public abstract getContainers(): unknown; + + public abstract startContainer(): Promise; + + public abstract stopContainer(): Promise; + + public abstract removeContainer(): Promise; + + public abstract pauseContainer(id: string): Promise; + + public abstract unpauseContainer(id: string): Promise; +} diff --git a/packages/server/src/cursor.ts b/packages/server/src/cursor.ts new file mode 100644 index 0000000..725e8c7 --- /dev/null +++ b/packages/server/src/cursor.ts @@ -0,0 +1,48 @@ +import { z } from "zod"; + +// jobber-cursor-v1 +const magic = "jcv1"; + +const cursorSchema = z.object({ + size: z.number().int().positive().min(10).max(100).default(20), + + created: z.date(), + id: z.string(), + + createdPrevious: z.date().nullable().default(null), + idPrevious: z.string().nullable().default(null), +}); + +type Cursor = z.infer; + +export const parseDatabaseCursor = (input?: string): Cursor | null => { + try { + if (!input) { + return null; + } + + const decoded = Buffer.from(input, "hex").toString("utf-8"); + + if (!decoded.startsWith(magic)) { + return null; + } + + const { success, data } = cursorSchema.safeParse( + JSON.parse(decoded.substring(magic.length)), + ); + + if (!success) { + return null; + } + + return data; + } catch { + return null; + } +}; + +export const createDatabaseCursor = (cursor: Cursor) => { + const json = JSON.stringify(cursor); + + return Buffer.from(`${magic}${json}`, "utf-8").toString("hex"); +}; diff --git a/packages/server/src/db/actions.ts b/packages/server/src/db/actions.ts new file mode 100644 index 0000000..24dca22 --- /dev/null +++ b/packages/server/src/db/actions.ts @@ -0,0 +1,52 @@ +import { eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { actionsTable, jobsTable, jobVersionsTable } from "./schema.js"; + +async function byId(id: string) { + const action = await getDrizzle() + .select() + .from(actionsTable) + .where(eq(actionsTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return action; +} + +async function byVersionId(versionId: string) { + const actions = await getDrizzle() + .select() + .from(actionsTable) + .where(eq(actionsTable.jobVersionId, versionId)) + .then((res) => res.at(0)); + + return actions; +} + +async function byJobIdLatest(jobId: string) { + const action = await getDrizzle() + .select() + .from(actionsTable) + .innerJoin(jobsTable, eq(actionsTable.jobId, jobsTable.id)) + .innerJoin( + jobVersionsTable, + eq(jobsTable.jobVersionId, jobVersionsTable.id), + ) + .where(eq(jobsTable.id, jobId)) + .limit(1) + .then((res) => res.at(0)?.actions); + + return action; +} + +async function all() { + const actions = await getDrizzle().select().from(actionsTable); + return actions; +} + +export const actionsModel = { + byId, + byVersionId, + byJobIdLatest, + all, +}; diff --git a/packages/server/src/db/api-tokens.ts b/packages/server/src/db/api-tokens.ts new file mode 100644 index 0000000..7ba8c19 --- /dev/null +++ b/packages/server/src/db/api-tokens.ts @@ -0,0 +1,55 @@ +import { and, eq, lte } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { apiTokensTable } from "./schema.js"; +import { ApiTokensTableInsertType } from "./types.js"; + +async function byValidToken(token: string) { + return await getDrizzle() + .select() + .from(apiTokensTable) + .where( + and( + eq(apiTokensTable.token, token), + eq(apiTokensTable.status, "enabled"), + lte(apiTokensTable.expires, new Date()), + ), + ) + .limit(1) + .then((res) => res.at(0)); +} + +async function byToken(token: string) { + return await getDrizzle() + .select() + .from(apiTokensTable) + .where(eq(apiTokensTable.token, token)) + .limit(1) + .then((res) => res.at(0)); +} + +async function create(tokenData: ApiTokensTableInsertType) { + return await getDrizzle() + .insert(apiTokensTable) + .values(tokenData) + .returning() + .then((res) => res.at(0)); +} + +async function update( + tokenId: string, + updates: Partial, +) { + return await getDrizzle() + .update(apiTokensTable) + .set(updates) + .where(eq(apiTokensTable.id, tokenId)) + .returning() + .then((res) => res.at(0)); +} + +export const apiTokensModel = { + byToken, + byValidToken, + create, + update, +}; diff --git a/packages/server/src/db/audit-log.ts b/packages/server/src/db/audit-log.ts new file mode 100644 index 0000000..706e04c --- /dev/null +++ b/packages/server/src/db/audit-log.ts @@ -0,0 +1,133 @@ +import { and, desc, eq, gt, gte, or } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { AuditEntry } from "./types.js"; +import { auditLogTable } from "./schema.js"; +import { createDatabaseCursor, parseDatabaseCursor } from "~/cursor.js"; + +async function byId(id: string) { + const auditLog = await getDrizzle() + .select() + .from(auditLogTable) + .where(eq(auditLogTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return auditLog; +} + +async function createUserLog(userId: string, entry: AuditEntry) { + const auditLog = await getDrizzle() + .insert(auditLogTable) + .values({ + subject: { + type: "user", + userId, + }, + entry, + }) + .returning() + .then((res) => res.at(0)); + + return auditLog; +} + +async function createServiceClientLog( + serviceClientId: string, + entry: AuditEntry, +) { + const auditLog = await getDrizzle() + .insert(auditLogTable) + .values({ + subject: { + type: "service-client", + serviceClientId, + }, + entry, + }) + .returning() + .then((res) => res.at(0)); + + return auditLog; +} + +async function createSystemLog(entry: AuditEntry) { + const auditLog = await getDrizzle() + .insert(auditLogTable) + .values({ + subject: { + type: "system", + }, + entry, + }) + .returning() + .then((res) => res.at(0)); + + return auditLog; +} + +async function query(cursor?: string) { + const decodedCursor = parseDatabaseCursor(cursor); + + const size = decodedCursor?.size ?? 20; + + const auditLogs = await getDrizzle() + .select() + .from(auditLogTable) + .where( + decodedCursor + ? or( + gt(auditLogTable.created, decodedCursor.created), + and( + eq(auditLogTable.created, decodedCursor.created), + gt(auditLogTable.id, decodedCursor.id), + ), + ) + : undefined, + ) + .orderBy(desc(auditLogTable.created)) + .limit(size + 1) + .then((res) => res); + + let nextCursor = null as string | null; + let previousCursor = null as string | null; + + if (auditLogs.length > size) { + const next = auditLogs[size]; + + nextCursor = createDatabaseCursor({ + size, + created: next.created, + id: next.id, + + createdPrevious: auditLogs[0].created, + idPrevious: auditLogs[0].id, + }); + + auditLogs.pop(); + } + + if (decodedCursor) { + previousCursor = createDatabaseCursor({ + size, + created: auditLogs[0].created, + id: auditLogs[0].id, + + createdPrevious: decodedCursor.createdPrevious ?? null, + idPrevious: decodedCursor.idPrevious ?? null, + }); + } + + return { + data: auditLogs, + nextCursor: nextCursor, + prevCursor: previousCursor, + }; +} + +export const auditLogsModel = { + query, + byId, + createUserLog, + createServiceClientLog, + createSystemLog, +}; diff --git a/packages/server/src/db/environment.ts b/packages/server/src/db/environment.ts index b7ed87a..9bf05d7 100644 --- a/packages/server/src/db/environment.ts +++ b/packages/server/src/db/environment.ts @@ -1,6 +1,6 @@ import { eq } from "drizzle-orm"; import { getDrizzle } from "./index.js"; -import { environmentsTable } from "./schema/environments.js"; +import { environmentsTable } from "./schema.js"; async function byJobId(id: string) { const result = await getDrizzle() diff --git a/packages/server/src/db/job-versions.ts b/packages/server/src/db/job-versions.ts new file mode 100644 index 0000000..10e5d94 --- /dev/null +++ b/packages/server/src/db/job-versions.ts @@ -0,0 +1,32 @@ +import { and, eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { jobVersionsTable } from "./schema.js"; + +async function byId(id: string) { + const jobVersion = await getDrizzle() + .select() + .from(jobVersionsTable) + .where(eq(jobVersionsTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return jobVersion; +} + +async function all(constraints: { jobId?: string } = {}) { + const conditions = Object.entries(constraints).map(([key, value]) => + eq((jobVersionsTable as any)[key], value), + ); + + const jobVersions = await getDrizzle() + .select() + .from(jobVersionsTable) + .where(and(...conditions)); + + return jobVersions; +} + +export const jobVersionsModel = { + byId, + all, +}; diff --git a/packages/server/src/db/job.ts b/packages/server/src/db/job.ts index b5a4781..b7f2d01 100644 --- a/packages/server/src/db/job.ts +++ b/packages/server/src/db/job.ts @@ -1,6 +1,6 @@ import { eq } from "drizzle-orm"; import { getDrizzle } from "./index.js"; -import { jobsTable } from "./schema/jobs.js"; +import { jobsTable } from "./schema.js"; async function byId(id: string) { const job = await getDrizzle() @@ -13,6 +13,12 @@ async function byId(id: string) { return job; } +async function all() { + const jobs = await getDrizzle().select().from(jobsTable); + return jobs; +} + export const jobModel = { byId, + all, }; diff --git a/packages/server/src/db/oauth-service-client.ts b/packages/server/src/db/oauth-service-client.ts new file mode 100644 index 0000000..ba70c1e --- /dev/null +++ b/packages/server/src/db/oauth-service-client.ts @@ -0,0 +1,76 @@ +import { eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { + OauthServiceClientTableInsertType, + OauthServiceClientTableType, +} from "./types.js"; +import { oauthServiceClientTable } from "./schema.js"; + +async function byId(id: string) { + const serviceClient = await getDrizzle() + .select() + .from(oauthServiceClientTable) + .where(eq(oauthServiceClientTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return serviceClient; +} + +async function byEnabled() { + const serviceClients = await getDrizzle() + .select() + .from(oauthServiceClientTable) + .where(eq(oauthServiceClientTable.enabled, true)); + return serviceClients; +} + +async function byClientId(clientId: string) { + const serviceClient = await getDrizzle() + .select() + .from(oauthServiceClientTable) + .where(eq(oauthServiceClientTable.clientId, clientId)) + .limit(1) + .then((res) => res.at(0)); + + return serviceClient; +} + +async function upsert(serviceClient: OauthServiceClientTableInsertType) { + const createdServiceClient = await getDrizzle() + .insert(oauthServiceClientTable) + .values(serviceClient) + .onConflictDoUpdate({ + target: oauthServiceClientTable.clientId, + set: { + name: serviceClient.name, + description: serviceClient.description, + allowedAudiences: serviceClient.allowedAudiences, + allowedScopes: serviceClient.allowedScopes, + enabled: serviceClient.enabled, + expiresAt: serviceClient.expiresAt, + isSystemManaged: serviceClient.isSystemManaged, + permissions: serviceClient.permissions, + metadata: serviceClient.metadata, + }, + }) + .returning() + .then((res) => res.at(0)); + + return createdServiceClient; +} + +async function all() { + const serviceClients = await getDrizzle() + .select() + .from(oauthServiceClientTable); + return serviceClients; +} + +export const oauthServiceClientModel = { + byId, + byEnabled, + byClientId, + all, + upsert, +}; diff --git a/packages/server/src/db/oauth-signing-key.ts b/packages/server/src/db/oauth-signing-key.ts new file mode 100644 index 0000000..16b6d54 --- /dev/null +++ b/packages/server/src/db/oauth-signing-key.ts @@ -0,0 +1,133 @@ +import { + and, + count, + desc, + eq, + gte, + inArray, + isNull, + lte, + or, +} from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { + OauthSigningKeyTableInsertType, + OauthSigningKeyTableType, +} from "./types.js"; +import { oauthSigningKeyTable } from "./schema.js"; + +async function byId(id: string) { + const item = await getDrizzle() + .select() + .from(oauthSigningKeyTable) + .where(eq(oauthSigningKeyTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return item; +} + +async function create(item: OauthSigningKeyTableInsertType) { + const createdItem = await getDrizzle() + .insert(oauthSigningKeyTable) + .values(item) + .returning() + .then((res) => res.at(0)); + + return createdItem; +} + +async function all() { + const items = await getDrizzle().select().from(oauthSigningKeyTable); + return items; +} + +async function getValidKeys() { + const items = await getDrizzle() + .select() + .from(oauthSigningKeyTable) + .where( + and( + inArray(oauthSigningKeyTable.status, ["active", "retiring"]), + or( + gte(oauthSigningKeyTable.expiresAt, new Date()), + isNull(oauthSigningKeyTable.expiresAt), + ), + ), + ); + + return items; +} + +async function getValidKey() { + const item = await getDrizzle() + .select() + .from(oauthSigningKeyTable) + .where( + and( + inArray(oauthSigningKeyTable.status, ["active", "retiring"]), + or( + gte(oauthSigningKeyTable.expiresAt, new Date()), + isNull(oauthSigningKeyTable.expiresAt), + ), + ), + ) + .orderBy(desc(oauthSigningKeyTable.createdAt)) + .limit(1) + .then((res) => res.at(0)); + + return item; +} + +async function update( + id: string, + data: Partial, +) { + await getDrizzle() + .update(oauthSigningKeyTable) + .set(data) + .where(eq(oauthSigningKeyTable.id, id)); +} + +async function paginate( + page: number, + pageSize: number, + filters?: Partial>, +) { + const whereClauses = []; + + if (filters?.status) { + whereClauses.push(eq(oauthSigningKeyTable.status, filters.status)); + } + + const items = await getDrizzle() + .select() + .from(oauthSigningKeyTable) + .where(and(...whereClauses)) + .orderBy(desc(oauthSigningKeyTable.createdAt)) + .limit(pageSize) + .offset((page - 1) * pageSize); + + const totalItems = await getDrizzle() + .select({ count: count(oauthSigningKeyTable.id) }) + .from(oauthSigningKeyTable) + .where(and(...whereClauses)) + .then((res) => res.at(0)?.count ?? 0); + + return { + items, + totalItems, + totalPages: Math.ceil(totalItems / pageSize), + currentPage: page, + }; +} + +export const oauthSigningKeyModel = { + all, + byId, + create, + getValidKey, + getValidKeys, + paginate, + update, +}; diff --git a/packages/server/src/db/runners.ts b/packages/server/src/db/runners.ts new file mode 100644 index 0000000..9af7bc8 --- /dev/null +++ b/packages/server/src/db/runners.ts @@ -0,0 +1,138 @@ +import { and, eq, gt, inArray, or, SQL, sql, SQLWrapper } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { RunnersTableInsertType } from "./types.js"; +import { runnersTable } from "./schema.js"; +import assert from "node:assert"; + +async function byId(id: string) { + const runner = await getDrizzle() + .select() + .from(runnersTable) + .where(eq(runnersTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return runner; +} + +async function byStatus(status: RunnersTableInsertType["status"]) { + const runners = await getDrizzle() + .select() + .from(runnersTable) + .where(eq(runnersTable.status, status)); + + return runners; +} + +async function byStatuses(statuses: RunnersTableInsertType["status"][]) { + const runners = await getDrizzle() + .select() + .from(runnersTable) + .where(inArray(runnersTable.status, statuses)); + + return runners; +} + +async function byJobId( + jobId: string, + filter: { + /** + * If true, only returns statuses that are ready or starting. + */ + specialActiveIshOnly?: boolean; + } = {}, +) { + const conditions: SQL[] = [eq(runnersTable.jobId, jobId)]; + + if (filter.specialActiveIshOnly) { + const condition = inArray(runnersTable.status, ["ready", "starting"]); + + assert(condition); + + conditions.push(condition); + } + + const runners = await getDrizzle() + .select() + .from(runnersTable) + .where(and(...conditions)); + + return runners; +} + +async function byJobIdSpecial(jobId: string, specialFilter: boolean = false) { + const conditions: SQL[] = [eq(runnersTable.jobId, jobId)]; + + if (specialFilter) { + const condition = or( + inArray(runnersTable.status, ["closing", "ready", "starting"]), + and( + eq(runnersTable.status, "closed"), + gt(runnersTable.closedAt, sql`now() - interval '5 minutes'`), + ), + ); + + assert(condition); + + conditions.push(condition); + } + + const runners = await getDrizzle() + .select() + .from(runnersTable) + .where(and(...conditions)); + + return runners; +} + +async function byContainerName(containerName: string) { + const runners = await getDrizzle() + .select() + .from(runnersTable) + .where( + eq( + sql`${runnersTable.properties} ->> 'runnerContainerName'`, + containerName, + ), + ); + + return runners; +} + +async function all() { + const runners = await getDrizzle().select().from(runnersTable); + return runners; +} + +async function create(input: RunnersTableInsertType) { + const runner = await getDrizzle() + .insert(runnersTable) + .values(input) + .returning() + .then((res) => res.at(0)); + + return runner; +} + +async function update(id: string, input: Partial) { + const runner = await getDrizzle() + .update(runnersTable) + .set(input) + .where(eq(runnersTable.id, id)) + .returning() + .then((res) => res.at(0)); + + return runner; +} + +export const runnersModel = { + byId, + byStatus, + byStatuses, + byJobId, + byJobIdSpecial, + byContainerName, + all, + create, + update, +}; diff --git a/packages/server/src/db/schema.ts b/packages/server/src/db/schema.ts new file mode 100644 index 0000000..b2778d2 --- /dev/null +++ b/packages/server/src/db/schema.ts @@ -0,0 +1,389 @@ +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { sql } from "drizzle-orm"; +import { + boolean, + index, + integer, + jsonb, + PgColumn, + pgTable, + text, + timestamp, + unique, + uniqueIndex, + uuid, + varchar, +} from "drizzle-orm/pg-core"; +import { z } from "zod"; +import { getDefaultRuntimeImages } from "~/jobber/images.js"; +import { createToken } from "~/util.js"; +import { + ActionsDockerArgumentsSchemaType, + AuditEntry, + AuditSubject, + EnvironmentsContextSchemaType, + ServiceClientMetadata, + TriggersContextSchemaType, +} from "./types.js"; + +/** + * Runners + */ +export const runnersTable = pgTable("runners", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + status: varchar("status", { + length: 50, + enum: ["starting", "ready", "closing", "closed"], + }).notNull(), + + jobId: uuid() + .notNull() + .references(() => jobsTable.id, { onDelete: "cascade" }), + jobVersionId: uuid() + .notNull() + .references(() => jobVersionsTable.id, { onDelete: "cascade" }), + actionId: uuid() + .notNull() + .references(() => actionsTable.id, { onDelete: "cascade" }), + environmentId: uuid().references(() => environmentsTable.id, { + onDelete: "set null", + }), + + oauthServiceClientId: uuid().references(() => oauthServiceClientTable.id, { + onDelete: "set null", + }), + + properties: jsonb().$type<{ + runnerPid: string; + runnerContainerName: string; + runnerContainerNetworks: string[]; + runnerApiPort: number; + runnerDebug: boolean; + }>(), + + createdAt: timestamp().notNull().defaultNow(), + readyAt: timestamp(), + closingAt: timestamp(), + closedAt: timestamp(), +}); + +/** + * OAuth Signing Keys + */ +export const oauthSigningKeyTable = pgTable("oauthSigningKey", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + parentId: uuid("parent_id").references( + (): PgColumn => oauthSigningKeyTable.id, + { + onDelete: "set null", + }, + ), + childId: uuid("child_id").references( + (): PgColumn => oauthSigningKeyTable.id, + { + onDelete: "set null", + }, + ), + + createdByUserId: uuid("created_by_user_id").references(() => usersTable.id, { + onDelete: "set null", + }), + + status: varchar("status", { + length: 255, + enum: ["active", "retiring", "inactive"], + }).notNull(), + + alg: varchar("alg", { length: 255, enum: ["RS256"] }).notNull(), + use: varchar("use", { length: 255, enum: ["sig", "enc"] }).notNull(), + + privateKeyEncrypted: text("private_key_encrypted").notNull(), + publicKey: text("public_key").notNull(), + + expiresAt: timestamp(), + renewsAt: timestamp(), + createdAt: timestamp().defaultNow().notNull(), +}); + +/** + * OAuth Service Clients + */ +export const oauthServiceClientTable = pgTable("oauthServiceClient", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + isSystemManaged: boolean("is_system_managed").notNull().default(false), + + name: varchar("name", { length: 255 }).notNull(), + description: text("description"), + + clientId: varchar("clientId", { length: 255 }).unique().notNull(), + + metadata: jsonb("metadata").$type().notNull(), + + allowedAudiences: jsonb("allowedAudiences").$type().notNull(), + allowedScopes: jsonb("allowedScopes").$type().notNull(), + + permissions: jsonb("permissions").$type().notNull(), + + enabled: boolean("enabled").default(true).notNull(), + + expiresAt: timestamp(), + createdAt: timestamp().defaultNow().notNull(), +}); + +/** + * Logs + */ +export const logsTable = pgTable( + "logs", + { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + jobId: uuid().notNull(), + + actionId: uuid().notNull(), + + source: varchar({ + enum: ["system", "runner"], + }).notNull(), + + sort: varchar({ length: 32 }).notNull().default(""), + + created: timestamp().defaultNow().notNull(), + message: text().notNull(), + }, + (table) => [index("jobId_created_idx").on(table.jobId, table.created)], +); + +/** + * Locks + */ +export const lockTable = pgTable("lock", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + lockKey: varchar({ length: 256 }).unique().notNull(), + + expires: timestamp() + .notNull() + .default(sql`NOW() + INTERVAL '5 minutes'`), + created: timestamp() + .notNull() + .default(sql`NOW()`), + modified: timestamp() + .notNull() + .default(sql`NOW()`), +}); + +/** + * Jobs + */ +export const jobsTable = pgTable("jobs", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + jobName: varchar({ length: 128 }).unique().notNull(), + description: text(), + + jobVersionId: uuid().references((): PgColumn => jobVersionsTable.id, { + onDelete: "set null", + }), + + status: varchar({ + enum: ["enabled", "disabled"], + length: 16, + }).default("enabled"), + + links: jsonb() + .$type< + Array<{ + name: string; + url: string; + }> + >() + .notNull() + .default([]), +}); + +/** + * Job Versions + */ +export const jobVersionsTable = pgTable( + "job-versions", + { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + jobId: uuid() + .notNull() + .references((): PgColumn => jobsTable.id, { onDelete: "cascade" }), + + version: varchar({ length: 32 }).notNull(), + + modified: integer().notNull(), + created: integer().notNull(), + }, + (table) => [unique().on(table.jobId, table.version)], +); + +/** + * Environments + */ +export const environmentsTable = pgTable("environments", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + jobId: uuid() + .unique() + .notNull() + .references(() => jobsTable.id, { onDelete: "cascade" }), + + context: jsonb().$type().notNull().default({}), + + modified: integer().notNull(), +}); + +/** + * Audit Logs + */ +export const auditLogTable = pgTable("auditLog", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + subject: jsonb("subject").$type().notNull(), + entry: jsonb("entry").$type().notNull(), + + created: timestamp().defaultNow().notNull(), +}); + +/** + * API Tokens + */ +export const apiTokensTable = pgTable("apiTokens", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + token: varchar({ length: 70 }) + .notNull() + .unique() + .$defaultFn(() => createToken({ length: 70 }).substring(0, 70)), + userId: uuid() + .notNull() + .references(() => usersTable.id), + + description: text(), + + permissions: jsonb().notNull().$type(), + + status: varchar({ + enum: ["enabled", "disabled"], + length: 16, + }) + .notNull() + .default("enabled"), + + expires: timestamp().notNull(), + created: timestamp().defaultNow().notNull(), +}); + +/** + * Actions + */ +export const actionsTable = pgTable("actions", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + jobId: uuid() + .notNull() + .references(() => jobsTable.id, { onDelete: "cascade" }), + jobVersionId: uuid() + .notNull() + .references(() => jobVersionsTable.id, { onDelete: "cascade" }), + runnerImage: text().notNull().default(getDefaultRuntimeImages().node), + runnerAsynchronous: boolean().default(true).notNull(), + runnerMinCount: integer().default(1).notNull(), + runnerMaxCount: integer().default(16).notNull(), + runnerTimeout: integer().default(60).notNull(), + runnerMaxIdleAge: integer().default(0).notNull(), + runnerMaxAge: integer().default(900).notNull(), + runnerMaxAgeHard: integer().default(960).notNull(), + runnerDockerArguments: jsonb() + .$type() + .notNull() + .default({}), + runnerMode: text({ + enum: ["standard", "run-once"], + }).default("standard"), +}); + +/** + * Users + */ +export const usersTable = pgTable( + "users", + { + id: uuid("id").primaryKey().defaultRandom().notNull(), + + username: varchar().notNull().unique(), + password: text().notNull(), + + enabled: boolean().default(true).notNull(), + + permissions: jsonb().notNull().$type(), + + created: timestamp().defaultNow().notNull(), + }, + (table) => [ + uniqueIndex("usernameUniqueIndex").on(sql`lower(${table.username})`), + ], +); + +/** + * Triggers + */ +export const triggersTable = pgTable("triggers", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + jobId: uuid() + .notNull() + .references(() => jobsTable.id, { onDelete: "cascade" }), + jobVersionId: uuid() + .notNull() + .references(() => jobVersionsTable.id, { onDelete: "cascade" }), + context: jsonb().$type().notNull(), +}); + +/** + * Store + */ +export const storeTable = pgTable( + "store", + { + id: uuid("id").primaryKey().defaultRandom().notNull(), + jobId: uuid() + .notNull() + .references(() => jobsTable.id, { onDelete: "cascade" }), + + storeKey: varchar({ + length: 128, + }).notNull(), + + storeValue: text().notNull(), + + expiry: integer(), + modified: integer().notNull(), + created: integer().notNull(), + }, + (table) => [unique().on(table.jobId, table.storeKey)], +); + +/** + * Sessions + */ +export const sessionsTable = pgTable("sessions", { + id: uuid("id").primaryKey().defaultRandom().notNull(), + token: varchar({ length: 50 }) + .notNull() + .unique() + .$defaultFn(() => createToken({ length: 50 }).substring(0, 50)), + + userId: uuid() + .notNull() + .references(() => usersTable.id), + + status: varchar({ enum: ["active", "disabled"] }) + .notNull() + .default("active"), + + expires: timestamp().notNull(), + created: timestamp().defaultNow().notNull(), +}); diff --git a/packages/server/src/db/schema/actions.ts b/packages/server/src/db/schema/actions.ts deleted file mode 100644 index bb0a215..0000000 --- a/packages/server/src/db/schema/actions.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { - boolean, - integer, - jsonb, - pgTable, - text, - uuid, -} from "drizzle-orm/pg-core"; -import { getDefaultRuntimeImages } from "~/jobber/images.js"; -import { jobVersionsTable } from "./job-versions.js"; -import { jobsTable } from "./jobs.js"; -import { z } from "zod"; - -export const ActionsDockerArgumentsSchema = z.object({ - networks: z.string().array().optional(), - - volumes: z - .object({ - source: z.string(), - target: z.string(), - mode: z.enum(["rw", "ro"]).default("rw"), - }) - .array() - .optional(), - - labels: z - .object({ - key: z.string().regex(/^[a-zA-Z0-9._-]+$/), - value: z.string().regex(/^[a-zA-Z0-9._-]+$/), - }) - .array() - .optional(), - - memoryLimit: z - .string() - .regex(/^\d+[bkmg]$/) - .optional(), - - directPassthroughArguments: z.string().array().optional(), -}); - -export type ActionsDockerArgumentsSchemaType = z.infer< - typeof ActionsDockerArgumentsSchema ->; - -export const actionsTable = pgTable("actions", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - jobId: uuid() - .notNull() - .references(() => jobsTable.id, { onDelete: "cascade" }), - jobVersionId: uuid() - .notNull() - .references(() => jobVersionsTable.id, { onDelete: "cascade" }), - runnerImage: text().notNull().default(getDefaultRuntimeImages().node), - runnerAsynchronous: boolean().default(true).notNull(), - runnerMinCount: integer().default(1).notNull(), - runnerMaxCount: integer().default(16).notNull(), - runnerTimeout: integer().default(60).notNull(), - runnerMaxIdleAge: integer().default(0).notNull(), - runnerMaxAge: integer().default(900).notNull(), - runnerMaxAgeHard: integer().default(960).notNull(), - runnerDockerArguments: jsonb() - .$type() - .notNull() - .default({}), - runnerMode: text({ - enum: ["standard", "run-once"], - }).default("standard"), -}); - -export type ActionsTableType = typeof actionsTable.$inferSelect; diff --git a/packages/server/src/db/schema/api-tokens.ts b/packages/server/src/db/schema/api-tokens.ts deleted file mode 100644 index dec94c2..0000000 --- a/packages/server/src/db/schema/api-tokens.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { - jsonb, - pgTable, - text, - timestamp, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { JobberPermissions } from "~/permissions.js"; -import { usersTable } from "./users.js"; -import { createToken } from "~/util.js"; - -export const apiTokensTable = pgTable("apiTokens", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - token: varchar({ length: 70 }) - .notNull() - .unique() - .$defaultFn(() => createToken({ length: 70 }).substring(0, 70)), - userId: uuid() - .notNull() - .references(() => usersTable.id), - - description: text(), - - permissions: jsonb().notNull().$type(), - - status: varchar({ - enum: ["enabled", "disabled"], - length: 16, - }) - .notNull() - .default("enabled"), - - expires: timestamp().notNull(), - created: timestamp().defaultNow().notNull(), -}); - -export type ApiTokensTableType = typeof apiTokensTable.$inferSelect; -export type ApiTokensTableInsertType = typeof apiTokensTable.$inferInsert; diff --git a/packages/server/src/db/schema/environments.ts b/packages/server/src/db/schema/environments.ts deleted file mode 100644 index 5100e4b..0000000 --- a/packages/server/src/db/schema/environments.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { integer, jsonb, pgTable, uuid } from "drizzle-orm/pg-core"; -import { z } from "zod"; -import { jobsTable } from "./jobs.js"; - -export const EnvironmentsContextSchema = z.record( - z.string(), - z.object({ - value: z.string(), - type: z.enum(["secret", "text"]), - }) -); - -export type EnvironmentsContextSchemaType = z.infer< - typeof EnvironmentsContextSchema ->; - -export const environmentsTable = pgTable("environments", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - jobId: uuid() - .unique() - .notNull() - .references(() => jobsTable.id, { onDelete: "cascade" }), - - context: jsonb().$type().notNull().default({}), - - modified: integer().notNull(), -}); - -export type EnvironmentsTableType = typeof environmentsTable.$inferSelect; diff --git a/packages/server/src/db/schema/job-versions.ts b/packages/server/src/db/schema/job-versions.ts deleted file mode 100644 index 62c4e38..0000000 --- a/packages/server/src/db/schema/job-versions.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { - integer, - PgColumn, - pgTable, - unique, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { jobsTable } from "./jobs.js"; - -export const jobVersionsTable = pgTable( - "job-versions", - { - id: uuid("id").primaryKey().defaultRandom().notNull(), - - jobId: uuid() - .notNull() - .references((): PgColumn => jobsTable.id, { onDelete: "cascade" }), - - version: varchar({ length: 32 }).notNull(), - - modified: integer().notNull(), - created: integer().notNull(), - }, - (table) => [unique().on(table.jobId, table.version)] -); - -export type JobVersionsTableType = typeof jobVersionsTable.$inferSelect; diff --git a/packages/server/src/db/schema/jobs.ts b/packages/server/src/db/schema/jobs.ts deleted file mode 100644 index 090ad01..0000000 --- a/packages/server/src/db/schema/jobs.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { - jsonb, - PgColumn, - pgTable, - text, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { jobVersionsTable } from "./job-versions.js"; - -export const jobsTable = pgTable("jobs", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - jobName: varchar({ length: 128 }).unique().notNull(), - description: text(), - - jobVersionId: uuid().references((): PgColumn => jobVersionsTable.id, { - onDelete: "set null", - }), - - status: varchar({ - enum: ["enabled", "disabled"], - length: 16, - }).default("enabled"), - - links: jsonb() - .$type< - Array<{ - name: string; - url: string; - }> - >() - .notNull() - .default([]), -}); - -export type JobsTableType = typeof jobsTable.$inferSelect; diff --git a/packages/server/src/db/schema/lock.ts b/packages/server/src/db/schema/lock.ts deleted file mode 100644 index 84e26bd..0000000 --- a/packages/server/src/db/schema/lock.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { pgTable, uuid, timestamp, varchar } from "drizzle-orm/pg-core"; -import { sql } from "drizzle-orm"; - -export const lockTable = pgTable("lock", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - - lockKey: varchar({ length: 256 }).unique().notNull(), - - expires: timestamp() - .notNull() - .default(sql`NOW() + INTERVAL '5 minutes'`), - created: timestamp() - .notNull() - .default(sql`NOW()`), - modified: timestamp() - .notNull() - .default(sql`NOW()`), -}); - -export type LockTableType = typeof lockTable.$inferSelect; diff --git a/packages/server/src/db/schema/logs.ts b/packages/server/src/db/schema/logs.ts deleted file mode 100644 index 7b11698..0000000 --- a/packages/server/src/db/schema/logs.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { - index, - integer, - pgTable, - text, - timestamp, - uuid, - varchar, -} from "drizzle-orm/pg-core"; - -export const logsTable = pgTable( - "logs", - { - id: uuid("id").primaryKey().defaultRandom().notNull(), - - jobId: uuid().notNull(), - - actionId: uuid().notNull(), - - source: varchar({ - enum: ["system", "runner"], - }).notNull(), - - sort: varchar({ length: 32 }).notNull().default(""), - - created: timestamp().defaultNow().notNull(), - message: text().notNull(), - }, - (table) => [index("jobId_created_idx").on(table.jobId, table.created)] -); - -export type LogsTableType = typeof logsTable.$inferSelect; -export type LogsTableInsertType = typeof logsTable.$inferInsert; diff --git a/packages/server/src/db/schema/sessions.ts b/packages/server/src/db/schema/sessions.ts deleted file mode 100644 index 4a8286a..0000000 --- a/packages/server/src/db/schema/sessions.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { - boolean, - pgTable, - timestamp, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { usersTable } from "./users.js"; -import { createToken } from "~/util.js"; - -export const sessionsTable = pgTable("sessions", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - token: varchar({ length: 50 }) - .notNull() - .unique() - .$defaultFn(() => createToken({ length: 50 }).substring(0, 50)), - - userId: uuid() - .notNull() - .references(() => usersTable.id), - - status: varchar({ enum: ["active", "disabled"] }) - .notNull() - .default("active"), - - expires: timestamp().notNull(), - created: timestamp().defaultNow().notNull(), -}); - -export type SessionsTableType = typeof sessionsTable.$inferSelect; -export type SessionsTableInsertType = typeof sessionsTable.$inferInsert; diff --git a/packages/server/src/db/schema/store.ts b/packages/server/src/db/schema/store.ts deleted file mode 100644 index f18c214..0000000 --- a/packages/server/src/db/schema/store.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { - integer, - pgTable, - text, - unique, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { jobsTable } from "./jobs.js"; - -export const storeTable = pgTable( - "store", - { - id: uuid("id").primaryKey().defaultRandom().notNull(), - jobId: uuid() - .notNull() - .references(() => jobsTable.id, { onDelete: "cascade" }), - - storeKey: varchar({ - length: 128, - }).notNull(), - - storeValue: text().notNull(), - - expiry: integer(), - modified: integer().notNull(), - created: integer().notNull(), - }, - (table) => [unique().on(table.jobId, table.storeKey)] -); - -export type StoreTableType = typeof storeTable.$inferSelect; diff --git a/packages/server/src/db/schema/triggers.ts b/packages/server/src/db/schema/triggers.ts deleted file mode 100644 index 3f3a79a..0000000 --- a/packages/server/src/db/schema/triggers.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { jsonb, pgTable, uuid } from "drizzle-orm/pg-core"; -import { z } from "zod"; -import { jobVersionsTable } from "./job-versions.js"; -import { jobsTable } from "./jobs.js"; - -export const TriggersContextSchema = z.union([ - z.object({ - type: z.literal("schedule"), - name: z.string().optional(), - cron: z.string(), - timezone: z.string().optional(), - }), - z.object({ - type: z.literal("http"), - name: z.string().optional(), - hostname: z.string().nullable().default(null), - method: z.string().nullable().default(null), - path: z.string().nullable().default(null), - }), - z.object({ - type: z.literal("mqtt"), - name: z.string().optional(), - topics: z.array(z.string()), - connection: z.object({ - protocol: z.enum(["wss", "ws", "mqtt", "mqtts"]).optional(), - protocolVariable: z.string().optional(), - - port: z.string().optional(), - portVariable: z.string().optional(), - - host: z.string().optional(), - hostVariable: z.string().optional(), - - username: z.string().optional(), - usernameVariable: z.string().optional(), - - password: z.string().optional(), - passwordVariable: z.string().optional(), - - clientId: z.string().optional(), - clientIdVariable: z.string().optional(), - }), - }), -]); - -export type TriggersContextSchemaType = z.infer; - -export const triggersTable = pgTable("triggers", { - id: uuid("id").primaryKey().defaultRandom().notNull(), - jobId: uuid() - .notNull() - .references(() => jobsTable.id, { onDelete: "cascade" }), - jobVersionId: uuid() - .notNull() - .references(() => jobVersionsTable.id, { onDelete: "cascade" }), - context: jsonb().$type().notNull(), -}); - -export type TriggersTableType = typeof triggersTable.$inferSelect; diff --git a/packages/server/src/db/schema/users.ts b/packages/server/src/db/schema/users.ts deleted file mode 100644 index 8e6643c..0000000 --- a/packages/server/src/db/schema/users.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { sql } from "drizzle-orm"; -import { - boolean, - jsonb, - pgTable, - text, - timestamp, - uniqueIndex, - uuid, - varchar, -} from "drizzle-orm/pg-core"; -import { z } from "zod"; -import { JobberPermissions } from "~/permissions.js"; - -export const UserUsernameSchema = z.string().min(3).max(32); -export const UserPasswordSchema = z.string().min(7); - -export const usersTable = pgTable( - "users", - { - id: uuid("id").primaryKey().defaultRandom().notNull(), - - username: varchar().notNull().unique(), - password: text().notNull(), - - enabled: boolean().default(true).notNull(), - - permissions: jsonb().notNull().$type(), - - created: timestamp().defaultNow().notNull(), - }, - (table) => [ - uniqueIndex("usernameUniqueIndex").on(sql`lower(${table.username})`), - ] -); - -export type UsersTableType = typeof usersTable.$inferSelect; -export type UsersTableInsertType = typeof usersTable.$inferInsert; diff --git a/packages/server/src/db/store.ts b/packages/server/src/db/store.ts new file mode 100644 index 0000000..b962c5d --- /dev/null +++ b/packages/server/src/db/store.ts @@ -0,0 +1,78 @@ +import { and, eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { getUnixTimestamp } from "~/util.js"; +import { storeTable } from "./schema.js"; +import { StoreTableInsertType } from "./types.js"; + +async function byId(id: string) { + const store = await getDrizzle() + .select() + .from(storeTable) + .where(eq(storeTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return store; +} + +async function byKey(jobId: string, storeKey: string) { + const store = await getDrizzle() + .select() + .from(storeTable) + .where(and(eq(storeTable.jobId, jobId), eq(storeTable.storeKey, storeKey))) + .limit(1) + .then((res) => res.at(0)); + + return store; +} + +async function upsert( + data: Pick< + StoreTableInsertType, + "jobId" | "storeKey" | "storeValue" | "expiry" + >, +) { + const now = getUnixTimestamp(); + + const insertData = { + ...data, + modified: now, + created: now, + } satisfies StoreTableInsertType; + + const store = await getDrizzle() + .insert(storeTable) + .values(insertData) + .onConflictDoUpdate({ + target: [storeTable.jobId, storeTable.storeKey], + set: { + storeValue: insertData.storeValue, + expiry: insertData.expiry, + modified: insertData.modified, + }, + }) + .returning() + .then((res) => res.at(0)); + + return store; +} + +async function deleteById(id: string) { + await getDrizzle().delete(storeTable).where(eq(storeTable.id, id)); +} + +async function deleteByKey(jobId: string, storeKey: string) { + return await getDrizzle() + .delete(storeTable) + .where(and(eq(storeTable.jobId, jobId), eq(storeTable.storeKey, storeKey))) + .returning() + .then((items) => items.at(0) ?? null); +} + +export const storeModel = { + byId, + byKey, + upsert, + deleteById, + deleteByKey, +}; diff --git a/packages/server/src/db/triggers.ts b/packages/server/src/db/triggers.ts new file mode 100644 index 0000000..83bc451 --- /dev/null +++ b/packages/server/src/db/triggers.ts @@ -0,0 +1,34 @@ +import { and, eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { triggersTable } from "./schema.js"; + +async function byId(id: string) { + const trigger = await getDrizzle() + .select() + .from(triggersTable) + .where(eq(triggersTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return trigger; +} + +async function all( + constraints: Partial<{ jobId: string; jobVersionId: string }> = {}, +) { + const conditions = Object.entries(constraints).map(([key, value]) => + eq((triggersTable as any)[key], value), + ); + + const triggers = await getDrizzle() + .select() + .from(triggersTable) + .where(and(...conditions)); + + return triggers; +} + +export const triggersModel = { + byId, + all, +}; diff --git a/packages/server/src/db/types.ts b/packages/server/src/db/types.ts new file mode 100644 index 0000000..e702739 --- /dev/null +++ b/packages/server/src/db/types.ts @@ -0,0 +1,237 @@ +import { z } from "zod"; +import { + actionsTable, + apiTokensTable, + auditLogTable, + environmentsTable, + jobsTable, + jobVersionsTable, + lockTable, + logsTable, + oauthServiceClientTable, + oauthSigningKeyTable, + runnersTable, + sessionsTable, + storeTable, + triggersTable, + usersTable, +} from "./schema.js"; + +type ServiceClientMetadataClientSecretBasic = { + type: "client_secret_basic"; + clientSecretHashed: string; +}; + +type ServiceClientMetadataClientSecretBasicInsecure = { + type: "client_secret_basic_insecure"; + clientSecret: string; +}; + +type ServiceClientMetadataPrivateKeyJwt = { + type: "private_key_jwt"; + publicKey: string; +}; + +export type ServiceClientMetadata = + | ServiceClientMetadataClientSecretBasic + | ServiceClientMetadataPrivateKeyJwt + | ServiceClientMetadataClientSecretBasicInsecure; + +export const EnvironmentsContextSchema = z.record( + z.string(), + z.object({ + value: z.string(), + type: z.enum(["secret", "text"]), + }), +); + +export type EnvironmentsContextSchemaType = z.infer< + typeof EnvironmentsContextSchema +>; + +export type AuditSubject = + | { + type: "user"; + userId: string; + } + | { + type: "service-client"; + serviceClientId: string; + } + | { + type: "system"; + }; + +export type AuditEntry = + | { + type: "generic"; + message: string; + } + | { + type: "oauth-invalid-client-id"; + clientId: string; + } + | { + type: "oauth-invalid-client-secret"; + clientId: string; + } + | { + type: "oauth-disabled-client"; + clientId: string; + } + | { + type: "oauth-expired-client"; + clientId: string; + } + | { + type: "oauth-unsupported-grant-type"; + clientId: string; + grantType: string; + } + | { + type: "oauth-rate-limited"; + clientId: string; + reason: "global" | "client-id" | "ip"; + } + | { + type: "oauth-valid-client"; + clientId: string; + } + | { + type: "oauth-invalid-audience"; + clientId: string; + audience: string; + }; + +export const ActionsDockerArgumentsSchema = z.object({ + networks: z.string().array().optional(), + + volumes: z + .object({ + source: z.string(), + target: z.string(), + mode: z.enum(["rw", "ro"]).default("rw"), + }) + .array() + .optional(), + + labels: z + .object({ + key: z.string().regex(/^[a-zA-Z0-9._-]+$/), + value: z.string().regex(/^[a-zA-Z0-9._-]+$/), + }) + .array() + .optional(), + + memoryLimit: z + .string() + .regex(/^\d+[bkmg]$/) + .optional(), + + directPassthroughArguments: z.string().array().optional(), +}); + +export type ActionsDockerArgumentsSchemaType = z.infer< + typeof ActionsDockerArgumentsSchema +>; + +export const UserUsernameSchema = z.string().min(3).max(32); +export const UserPasswordSchema = z.string().min(7); + +export const TriggersContextSchema = z.union([ + z.object({ + type: z.literal("schedule"), + name: z.string().optional(), + cron: z.string(), + timezone: z.string().optional(), + }), + z.object({ + type: z.literal("http"), + name: z.string().optional(), + hostname: z.string().nullable().default(null), + method: z.string().nullable().default(null), + path: z.string().nullable().default(null), + }), + z.object({ + type: z.literal("mqtt"), + name: z.string().optional(), + topics: z.array(z.string()), + connection: z.object({ + protocol: z.enum(["wss", "ws", "mqtt", "mqtts"]).optional(), + protocolVariable: z.string().optional(), + + port: z.string().optional(), + portVariable: z.string().optional(), + + host: z.string().optional(), + hostVariable: z.string().optional(), + + username: z.string().optional(), + usernameVariable: z.string().optional(), + + password: z.string().optional(), + passwordVariable: z.string().optional(), + + clientId: z.string().optional(), + clientIdVariable: z.string().optional(), + }), + }), +]); +export type TriggersContextSchemaType = z.infer; + +// Runners +export type RunnersTableType = typeof runnersTable.$inferSelect; +export type RunnersTableInsertType = typeof runnersTable.$inferInsert; + +// OAuth Signing Keys +export type OauthSigningKeyTableType = typeof oauthSigningKeyTable.$inferSelect; +export type OauthSigningKeyTableInsertType = + typeof oauthSigningKeyTable.$inferInsert; + +// OAuth Service Client +export type OauthServiceClientTableType = + typeof oauthServiceClientTable.$inferSelect; +export type OauthServiceClientTableInsertType = + typeof oauthServiceClientTable.$inferInsert; + +// Logs +export type LogsTableType = typeof logsTable.$inferSelect; +export type LogsTableInsertType = typeof logsTable.$inferInsert; + +// Lock +export type LockTableType = typeof lockTable.$inferSelect; + +// Jobs +export type JobsTableType = typeof jobsTable.$inferSelect; + +// Versions +export type JobVersionsTableType = typeof jobVersionsTable.$inferSelect; + +// Env +export type EnvironmentsTableType = typeof environmentsTable.$inferSelect; + +// Audit Log +export type AuditLogTableType = typeof auditLogTable.$inferSelect; +export type AuditLogTableInsertType = typeof auditLogTable.$inferInsert; + +// API Tokens +export type ApiTokensTableType = typeof apiTokensTable.$inferSelect; +export type ApiTokensTableInsertType = typeof apiTokensTable.$inferInsert; + +// Actions +export type ActionsTableType = typeof actionsTable.$inferSelect; + +// Users +export type UsersTableType = typeof usersTable.$inferSelect; +export type UsersTableInsertType = typeof usersTable.$inferInsert; + +// Triggers +export type TriggersTableType = typeof triggersTable.$inferSelect; + +// Store +export type StoreTableType = typeof storeTable.$inferSelect; +export type StoreTableInsertType = typeof storeTable.$inferInsert; + +// Sessions +export type SessionsTableType = typeof sessionsTable.$inferSelect; +export type SessionsTableInsertType = typeof sessionsTable.$inferInsert; diff --git a/packages/server/src/db/user.ts b/packages/server/src/db/user.ts new file mode 100644 index 0000000..c316888 --- /dev/null +++ b/packages/server/src/db/user.ts @@ -0,0 +1,36 @@ +import { eq } from "drizzle-orm"; +import { getDrizzle } from "./index.js"; +import { usersTable } from "./schema.js"; +import { UsersTableInsertType } from "./types.js"; + +async function byId(id: string) { + const user = await getDrizzle() + .select() + .from(usersTable) + .where(eq(usersTable.id, id)) + .limit(1) + .then((res) => res.at(0)); + + return user; +} + +async function byUsername(username: string) { + const user = await getDrizzle() + .select() + .from(usersTable) + .where(eq(usersTable.username, username)) + .limit(1) + .then((res) => res.at(0)); + + return user; +} + +async function update(id: string, data: Partial) { + await getDrizzle().update(usersTable).set(data).where(eq(usersTable.id, id)); +} + +export const userModel = { + byId, + byUsername, + update, +}; diff --git a/packages/server/src/docker.ts b/packages/server/src/docker.ts index ccb6cca..23da547 100644 --- a/packages/server/src/docker.ts +++ b/packages/server/src/docker.ts @@ -21,7 +21,13 @@ export const getDockerContainers = (): Promise => { return new Promise((resolve, reject) => { const lines: string[] = []; - const process = spawn("docker", ["container", "ls", "--format", "json"]); + const process = spawn("docker", [ + "container", + "ls", + "--no-trunc", + "--format", + "json", + ]); process.stdout.on("data", (chunk: Buffer) => { lines.push(chunk.toString()); @@ -34,14 +40,14 @@ export const getDockerContainers = (): Promise => { return reject( new Error("Failed to get Docker Containers!", { cause: output, - }) + }), ); } resolve( output .filter((index) => !!index) - .map((outputLine) => JSON.parse(outputLine)) + .map((outputLine) => JSON.parse(outputLine)), ); }); }); @@ -61,8 +67,32 @@ export const stopDockerContainer = (id: string): Promise => { if (code !== 0) { console.error( `[stopDockerContainer] Failed to stop container ${id}: ${lines.join( - "" - )}` + "", + )}`, + ); + } + + return resolve(code === 0); + }); + }); +}; + +export const killDockerContainer = (id: string): Promise => { + return new Promise((resolve, reject) => { + const lines: string[] = []; + + const process = spawn("docker", ["container", "kill", id]); + + process.stdout.on("data", (chunk: Buffer) => { + lines.push(chunk.toString()); + }); + + process.once("exit", (code) => { + if (code !== 0) { + console.error( + `[killDockerContainer] Failed to kill container ${id}: ${lines.join( + "", + )}`, ); } @@ -84,7 +114,7 @@ export const pullDockerImage = (image: string): Promise => { process.once("exit", (code) => { if (code !== 0) { console.error( - `[pullDockerImage] Failed to pull image ${image}: ${lines.join("")}` + `[pullDockerImage] Failed to pull image ${image}: ${lines.join("")}`, ); } @@ -107,8 +137,8 @@ export const pauseDockerContainer = (id: string): Promise => { if (code !== 0) { console.error( `[pauseDockerContainer] Failed to pause container ${id}: ${lines.join( - "" - )}` + "", + )}`, ); } @@ -131,8 +161,8 @@ export const unpauseDockerContainer = (id: string): Promise => { if (code !== 0) { console.error( `[unpauseDockerContainer] Failed to unpause container ${id}: ${lines.join( - "" - )}` + "", + )}`, ); } diff --git a/packages/server/src/grpc/grpc-maps.ts b/packages/server/src/grpc/grpc-maps.ts new file mode 100644 index 0000000..02a76df --- /dev/null +++ b/packages/server/src/grpc/grpc-maps.ts @@ -0,0 +1,189 @@ +import * as grpcAction from "@jobber/grpc/basics/action.js"; +import * as grpcJobVersion from "@jobber/grpc/basics/job-version.js"; +import * as grpcJob from "@jobber/grpc/basics/job.js"; +import * as grpcRunner from "@jobber/grpc/basics/runner.js"; +import * as grpcTrigger from "@jobber/grpc/basics/trigger.js"; + +import { ServerError, Status } from "nice-grpc"; + +import { + ActionsTableType, + JobsTableType, + JobVersionsTableType, + RunnersTableType, + TriggersTableType, +} from "~/db/types.js"; + +export const mapGrpcJob = (job: JobsTableType): grpcJob.Item => { + let status: grpcJob.Item_Status; + if (job.status === "enabled") { + status = grpcJob.Item_Status.ENABLED; + } else if (job.status === "disabled") { + status = grpcJob.Item_Status.DISABLED; + } else { + throw new ServerError(Status.INTERNAL, "Unknown job status"); + } + + return { + id: job.id, + jobName: job.jobName, + status: status, + description: job.description ?? undefined, + versionId: job.jobVersionId || undefined, + links: job.links.map((link) => ({ + name: link.name, + url: link.url, + })), + }; +}; + +export const mapGrpcAction = (action: ActionsTableType): grpcAction.Item => { + let runnerMode: grpcAction.Item_RunnerMode; + if (action.runnerMode === "run-once") { + runnerMode = grpcAction.Item_RunnerMode.RUN_ONCE; + } else if (action.runnerMode === "standard") { + runnerMode = grpcAction.Item_RunnerMode.STANDARD; + } else { + throw new ServerError(Status.INTERNAL, "Unknown job status"); + } + + return { + id: action.id, + jobId: action.jobId, + versionId: action.jobVersionId, + + runnerImage: action.runnerImage, + runnerAsynchronous: action.runnerAsynchronous, + runnerMinCount: action.runnerMinCount, + runnerMaxCount: action.runnerMaxCount, + runnerTimeout: action.runnerTimeout, + runnerMaxIdleAge: action.runnerMaxIdleAge, + runnerMaxAge: action.runnerMaxAge, + runnerMaxAgeHard: action.runnerMaxAgeHard, + + dockerArguments: { + networks: action.runnerDockerArguments.networks ?? [], + + volumes: + action.runnerDockerArguments.volumes?.map((volume) => { + let volumeMode: grpcAction.Item_DockerArguments_Volume_VolumeMode; + if (volume.mode === "ro") { + volumeMode = + grpcAction.Item_DockerArguments_Volume_VolumeMode.READ_ONLY; + } else if (volume.mode === "rw") { + volumeMode = + grpcAction.Item_DockerArguments_Volume_VolumeMode.READ_WRITE; + } else { + volumeMode = + grpcAction.Item_DockerArguments_Volume_VolumeMode.READ_WRITE; + } + + return { + source: volume.source, + target: volume.target, + mode: volumeMode, + }; + }) ?? [], + + labels: action.runnerDockerArguments.labels || [], + + memoryLimit: action.runnerDockerArguments.memoryLimit || undefined, + + directPassthroughArguments: + action.runnerDockerArguments.directPassthroughArguments || [], + }, + + runnerMode: runnerMode, + }; +}; + +export const mapGrpcTrigger = ( + trigger: TriggersTableType, +): grpcTrigger.Item => { + return { + id: trigger.id, + jobId: trigger.jobId, + versionId: trigger.jobVersionId, + + schedule: + trigger.context.type === "schedule" + ? { + name: trigger.context.name ?? undefined, + cron: trigger.context.cron, + timezone: trigger.context.timezone ?? undefined, + } + : undefined, + + http: + trigger.context.type === "http" + ? { + name: trigger.context.name ?? undefined, + hostname: trigger.context.hostname ?? undefined, + method: trigger.context.method ?? undefined, + path: trigger.context.path ?? undefined, + } + : undefined, + + mqtt: + trigger.context.type === "mqtt" + ? { + name: trigger.context.name ?? undefined, + topics: trigger.context.topics, + connection: { + protocol: trigger.context.connection.protocol ?? undefined, + protocolVariable: + trigger.context.connection.protocolVariable ?? undefined, + port: trigger.context.connection.port ?? undefined, + portVariable: + trigger.context.connection.portVariable ?? undefined, + host: trigger.context.connection.host ?? undefined, + hostVariable: + trigger.context.connection.hostVariable ?? undefined, + username: trigger.context.connection.username ?? undefined, + usernameVariable: + trigger.context.connection.usernameVariable ?? undefined, + password: trigger.context.connection.password ?? undefined, + passwordVariable: + trigger.context.connection.passwordVariable ?? undefined, + clientId: trigger.context.connection.clientId ?? undefined, + clientIdVariable: + trigger.context.connection.clientIdVariable ?? undefined, + }, + } + : undefined, + }; +}; + +export const mapGrpcJobVersion = ( + jobVersion: JobVersionsTableType, +): grpcJobVersion.Item => { + return { + id: jobVersion.id, + jobId: jobVersion.jobId, + version: jobVersion.version, + created: new Date(jobVersion.created * 1000).toISOString(), + modified: new Date(jobVersion.modified * 1000).toISOString(), + }; +}; + +export const mapGrpcJobRunner = (runner: RunnersTableType): grpcRunner.Item => { + return { + id: runner.id, + jobId: runner.jobId, + actionId: runner.actionId, + versionId: runner.jobVersionId, + properties: runner.properties + ? { + runnerPid: runner.properties.runnerPid, + runnerContainerName: runner.properties.runnerContainerName, + runnerContainerNetworks: runner.properties.runnerContainerNetworks, + runnerApiPort: runner.properties.runnerApiPort, + runnerDebug: runner.properties.runnerDebug, + } + : undefined, + createdAt: runner.createdAt.toISOString(), + readyAt: runner.readyAt?.toISOString() ?? undefined, + closingAt: runner.closingAt?.toISOString() ?? undefined, + closedAt: runner.closedAt?.toISOString() ?? undefined, + }; +}; diff --git a/packages/server/src/grpc/grpc-server.ts b/packages/server/src/grpc/grpc-server.ts new file mode 100644 index 0000000..89393b8 --- /dev/null +++ b/packages/server/src/grpc/grpc-server.ts @@ -0,0 +1,124 @@ +import { ServerCredentials } from "@grpc/grpc-js"; +import { createServer, Server, ServiceImplementation } from "nice-grpc"; +import { + ServerReflection, + ServerReflectionService, +} from "nice-grpc-server-reflection"; +import { readFile } from "node:fs/promises"; +import { singleton } from "tsyringe"; + +import { LoopBase } from "@jobber/common"; +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; + +import { getConfigOption } from "~/config.js"; + +import path from "node:path"; +import { fileExists } from "~/util.js"; +import { createSoftRunner } from "./methods/create-soft-runner.js"; +import { deleteRunner } from "./methods/delete-runner.js"; +import { deleteStoreItem } from "./methods/delete-store-item.js"; +import { getJobActionLatest } from "./methods/get-job-action-latest.js"; +import { getJobAction } from "./methods/get-job-action.js"; +import { getJobActions } from "./methods/get-job-actions.js"; +import { getJobTriggers } from "./methods/get-job-trigger-.js"; +import { getJobTriggersLatest } from "./methods/get-job-trigger-latest.js"; +import { getJobTrigger } from "./methods/get-job-trigger.js"; +import { getJobVersionArchive } from "./methods/get-job-version-archive.js"; +import { getJobVersionLatest } from "./methods/get-job-version-latest.js"; +import { getJobVersion } from "./methods/get-job-version.js"; +import { getJobVersions } from "./methods/get-job-versions.js"; +import { getJob } from "./methods/get-job.js"; +import { getJobs } from "./methods/get-jobs.js"; +import { getRunner } from "./methods/get-runner.js"; +import { getRunners } from "./methods/get-runners.js"; +import { getStoreItem } from "./methods/get-store-item.js"; +import { getTemplates } from "./methods/get-templates.js"; +import { publishMqttMessage } from "./methods/publish-mqtt-message.js"; +import { setStoreItem } from "./methods/set-store-item.js"; + +const generalApiDefinition: ServiceImplementation = { + getJob: getJob, + + getJobs: getJobs, + + getJobAction: getJobAction, + + getJobActionLatest: getJobActionLatest, + + getJobActions: getJobActions, + + getJobTrigger: getJobTrigger, + + getJobTriggers: getJobTriggers, + + getJobTriggersLatest: getJobTriggersLatest, + + getJobVersion: getJobVersion, + + getJobVersionLatest: getJobVersionLatest, + + getJobVersions: getJobVersions, + + getJobVersionArchive: getJobVersionArchive, + + getRunner: getRunner, + + getRunners: getRunners, + + deleteRunner: deleteRunner, + + createSoftRunner: createSoftRunner, + + getStoreItem: getStoreItem, + + setStoreItem: setStoreItem, + + deleteStoreItem: deleteStoreItem, + + publishMqttMessage: publishMqttMessage, + + getTemplates: getTemplates, +}; + +@singleton() +export class GrpcServer extends LoopBase { + protected loopDuration = 1000; + + protected loopStarted = undefined; + protected loopClosed = undefined; + + private server: Server | null = null; + + protected async loopStarting() { + this.server = createServer({}); + + this.server.add(GeneralAPIDefinition, generalApiDefinition); + + const filenameProtoset = path.join( + process.cwd(), + "../grpc/src/protoset.bin", + ); + + if (await fileExists(filenameProtoset)) { + this.server.add( + ServerReflectionService, + ServerReflection(await readFile(filenameProtoset), [ + GeneralAPIDefinition.fullName, + ]), + ); + } + + await this.server.listen( + `${getConfigOption("MANAGER_GRPC_BIND_ADDRESS")}:${getConfigOption( + "MANAGER_GRPC_PORT", + )}`, + ServerCredentials.createInsecure(), + ); + } + + protected async loopClosing() { + await this.server?.shutdown(); + } + + protected async loopIteration() {} +} diff --git a/packages/server/src/grpc/methods/create-soft-runner.ts b/packages/server/src/grpc/methods/create-soft-runner.ts new file mode 100644 index 0000000..d865819 --- /dev/null +++ b/packages/server/src/grpc/methods/create-soft-runner.ts @@ -0,0 +1,39 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { container } from "tsyringe"; +import { jobModel } from "~/db/job.js"; +import { runnersModel } from "~/db/runners.js"; +import { RunnerManager } from "~/jobber/runners/manager.js"; +import { mapGrpcJobRunner } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const createSoftRunner: ServiceImplementation["createSoftRunner"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + if (!bouncer.canCreateSoftRunner(job)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const manager = container.resolve(RunnerManager); + + const runnerId = await manager.createSoftRunner(job.id); + + if (!runnerId) { + throw new ServerError(Status.INTERNAL, "Failed to create runner"); + } + + const runner = await runnersModel.byId(runnerId); + + if (!runner) { + throw new ServerError(Status.INTERNAL, "Failed to find created runner"); + } + + return { + runner: mapGrpcJobRunner(runner), + }; + }); diff --git a/packages/server/src/grpc/methods/delete-runner.ts b/packages/server/src/grpc/methods/delete-runner.ts new file mode 100644 index 0000000..e780ca5 --- /dev/null +++ b/packages/server/src/grpc/methods/delete-runner.ts @@ -0,0 +1,28 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { runnersModel } from "~/db/runners.js"; +import { mapGrpcJobRunner } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; +import { container } from "tsyringe"; +import { RunnerManager } from "~/jobber/runners/manager.js"; + +export const deleteRunner: ServiceImplementation["deleteRunner"] = + authorizedCall(async (request, _context, bouncer) => { + const runner = await runnersModel.byId(request.runnerId); + + if (!runner) { + throw new ServerError(Status.NOT_FOUND, "Runner not found"); + } + + if (!bouncer.canDeleteJobRunners({ id: runner.jobId })) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const manager = container.resolve(RunnerManager); + + manager.shutdownQueueAdd(runner.id, false); + + return { + runner: mapGrpcJobRunner(runner), + }; + }); diff --git a/packages/server/src/grpc/methods/delete-store-item.ts b/packages/server/src/grpc/methods/delete-store-item.ts new file mode 100644 index 0000000..ac47251 --- /dev/null +++ b/packages/server/src/grpc/methods/delete-store-item.ts @@ -0,0 +1,29 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobModel } from "~/db/job.js"; +import { storeModel } from "~/db/store.js"; +import { authorizedCall } from "../util.js"; + +export const deleteStoreItem: ServiceImplementation["deleteStoreItem"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + if (!bouncer.canDeleteJobStore({ jobId: request.jobId })) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const storeItem = await storeModel.deleteByKey(request.jobId, request.key); + + if (!storeItem) { + throw new ServerError(Status.NOT_FOUND, "Store item not found"); + } + + return { + key: storeItem.storeKey, + value: storeItem.storeValue, + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-action-latest.ts b/packages/server/src/grpc/methods/get-job-action-latest.ts new file mode 100644 index 0000000..fd722d2 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-action-latest.ts @@ -0,0 +1,33 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { actionsModel } from "~/db/actions.js"; +import { jobModel } from "~/db/job.js"; +import { mapGrpcAction } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobActionLatest: ServiceImplementation["getJobActionLatest"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job || !job.jobVersionId) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + const action = await actionsModel.byVersionId(job.jobVersionId); + + if (!action) { + throw new ServerError(Status.NOT_FOUND, "Action not found"); + } + + if (action.jobId !== request.jobId) { + throw new ServerError(Status.NOT_FOUND, "Action not found"); + } + + if (!bouncer.canReadJobAction(action)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + action: mapGrpcAction(action), + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-action.ts b/packages/server/src/grpc/methods/get-job-action.ts new file mode 100644 index 0000000..52a3c38 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-action.ts @@ -0,0 +1,26 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { actionsModel } from "~/db/actions.js"; +import { mapGrpcAction } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobAction: ServiceImplementation["getJobAction"] = + authorizedCall(async (request, _context, bouncer) => { + const action = await actionsModel.byId(request.actionId); + + if (!action) { + throw new ServerError(Status.NOT_FOUND, "Action not found"); + } + + if (action.jobId !== request.jobId) { + throw new ServerError(Status.NOT_FOUND, "Action not found"); + } + + if (!bouncer.canReadJobAction(action)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + action: mapGrpcAction(action), + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-actions.ts b/packages/server/src/grpc/methods/get-job-actions.ts new file mode 100644 index 0000000..d01dfc1 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-actions.ts @@ -0,0 +1,26 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServiceImplementation } from "nice-grpc"; +import { actionsModel } from "~/db/actions.js"; +import { mapGrpcAction } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobActions: ServiceImplementation["getJobActions"] = + authorizedCall(async (request, _context, bouncer) => { + const actions = (await actionsModel.all()) + .filter((action) => { + if (action.jobId !== request.jobId) { + return false; + } + + if (request.versionId && action.jobVersionId !== request.versionId) { + return false; + } + + return bouncer.canReadJobAction(action); + }) + .map(mapGrpcAction); + + return { + actions, + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-trigger-.ts b/packages/server/src/grpc/methods/get-job-trigger-.ts new file mode 100644 index 0000000..f8be321 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-trigger-.ts @@ -0,0 +1,31 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServiceImplementation } from "nice-grpc"; +import { triggersModel } from "~/db/triggers.js"; +import { mapGrpcTrigger } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobTriggers: ServiceImplementation["getJobTriggers"] = + authorizedCall(async (request, _context, bouncer) => { + const triggers = ( + await triggersModel.all({ + jobId: request.jobId, + jobVersionId: request.versionId || undefined, + }) + ) + .filter((trigger) => { + if (trigger.jobId !== request.jobId) { + return false; + } + + if (request.versionId && trigger.jobVersionId !== request.versionId) { + return false; + } + + return bouncer.canReadJobTriggers(trigger); + }) + .map(mapGrpcTrigger); + + return { + triggers, + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-trigger-latest.ts b/packages/server/src/grpc/methods/get-job-trigger-latest.ts new file mode 100644 index 0000000..352166b --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-trigger-latest.ts @@ -0,0 +1,34 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobModel } from "~/db/job.js"; +import { triggersModel } from "~/db/triggers.js"; +import { mapGrpcTrigger } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobTriggersLatest: ServiceImplementation["getJobTriggersLatest"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job || !job.jobVersionId) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + const triggers = ( + await triggersModel.all({ + jobId: request.jobId, + jobVersionId: job.jobVersionId, + }) + ) + .filter((trigger) => { + if (trigger.jobId !== request.jobId) { + return false; + } + + return bouncer.canReadJobTriggers(trigger); + }) + .map(mapGrpcTrigger); + + return { + triggers, + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-trigger.ts b/packages/server/src/grpc/methods/get-job-trigger.ts new file mode 100644 index 0000000..2dbbe35 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-trigger.ts @@ -0,0 +1,26 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { triggersModel } from "~/db/triggers.js"; +import { mapGrpcTrigger } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobTrigger: ServiceImplementation["getJobTrigger"] = + authorizedCall(async (request, _context, bouncer) => { + const trigger = await triggersModel.byId(request.triggerId); + + if (!trigger) { + throw new ServerError(Status.NOT_FOUND, "Trigger not found"); + } + + if (trigger.jobId !== request.jobId) { + throw new ServerError(Status.NOT_FOUND, "Trigger not found"); + } + + if (!bouncer.canReadJobTriggers(trigger)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + trigger: mapGrpcTrigger(trigger), + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-version-archive.ts b/packages/server/src/grpc/methods/get-job-version-archive.ts new file mode 100644 index 0000000..7fb0a6b --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-version-archive.ts @@ -0,0 +1,60 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { open } from "node:fs/promises"; +import { actionsModel } from "~/db/actions.js"; +import { jobVersionsModel } from "~/db/job-versions.js"; +import { getJobActionArchiveFile } from "~/paths.js"; +import { getBouncer } from "../util.js"; + +export const getJobVersionArchive: ServiceImplementation["getJobVersionArchive"] = + async function* (request, context) { + const bouncer = await getBouncer(context); + + const [jobVersion, jobAction] = await Promise.all([ + jobVersionsModel.byId(request.jobVersionId), + actionsModel.byVersionId(request.jobVersionId), + ]); + + if (!jobVersion || !jobAction) { + throw new ServerError(Status.NOT_FOUND, "Job version not found"); + } + + if (!bouncer.canReadJobVersionArchive(jobVersion)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const archiveFileName = getJobActionArchiveFile(jobVersion, jobAction); + + const handle = await open(archiveFileName, "r"); + const chunkSize = 1024 * 10; + + try { + for (let seq = 0; ; seq++) { + const position = seq * chunkSize; + + const buffer = Buffer.alloc(chunkSize); + + const { bytesRead } = await handle.read({ + buffer, + length: chunkSize, + position, + }); + + if (bytesRead === 0) { + break; + } + + yield { + seq, + data: buffer.subarray(0, bytesRead), + end: bytesRead < chunkSize, + }; + + if (bytesRead < chunkSize) { + break; + } + } + } finally { + await handle.close(); + } + }; diff --git a/packages/server/src/grpc/methods/get-job-version-latest.ts b/packages/server/src/grpc/methods/get-job-version-latest.ts new file mode 100644 index 0000000..e2df62a --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-version-latest.ts @@ -0,0 +1,29 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobVersionsModel } from "~/db/job-versions.js"; +import { jobModel } from "~/db/job.js"; +import { mapGrpcJobVersion } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobVersionLatest: ServiceImplementation["getJobVersionLatest"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job || !job.jobVersionId) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + const jobVersion = await jobVersionsModel.byId(job.jobVersionId); + + if (!jobVersion) { + throw new ServerError(Status.NOT_FOUND, "Job version not found"); + } + + if (!bouncer.canReadJobVersion(jobVersion)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + jobVersion: mapGrpcJobVersion(jobVersion), + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-version.ts b/packages/server/src/grpc/methods/get-job-version.ts new file mode 100644 index 0000000..1eb36df --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-version.ts @@ -0,0 +1,22 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobVersionsModel } from "~/db/job-versions.js"; +import { mapGrpcJobVersion } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobVersion: ServiceImplementation["getJobVersion"] = + authorizedCall(async (request, _context, bouncer) => { + const jobVersion = await jobVersionsModel.byId(request.jobVersionId); + + if (!jobVersion) { + throw new ServerError(Status.NOT_FOUND, "Job version not found"); + } + + if (!bouncer.canReadJobVersion(jobVersion)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + jobVersion: mapGrpcJobVersion(jobVersion), + }; + }); diff --git a/packages/server/src/grpc/methods/get-job-versions.ts b/packages/server/src/grpc/methods/get-job-versions.ts new file mode 100644 index 0000000..46d1b58 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job-versions.ts @@ -0,0 +1,22 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServiceImplementation } from "nice-grpc"; +import { jobVersionsModel } from "~/db/job-versions.js"; +import { mapGrpcJobVersion } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobVersions: ServiceImplementation["getJobVersions"] = + authorizedCall(async (request, _context, bouncer) => { + const jobVersions = (await jobVersionsModel.all({ jobId: request.jobId })) + .filter((jobVersion) => { + if (jobVersion.jobId !== request.jobId) { + return false; + } + + return bouncer.canReadJobVersion(jobVersion); + }) + .map(mapGrpcJobVersion); + + return { + jobVersions, + }; + }); diff --git a/packages/server/src/grpc/methods/get-job.ts b/packages/server/src/grpc/methods/get-job.ts new file mode 100644 index 0000000..7122532 --- /dev/null +++ b/packages/server/src/grpc/methods/get-job.ts @@ -0,0 +1,22 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobModel } from "~/db/job.js"; +import { mapGrpcJob } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJob: ServiceImplementation["getJob"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + if (bouncer.canReadJob(job)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + job: mapGrpcJob(job), + }; + }); diff --git a/packages/server/src/grpc/methods/get-jobs.ts b/packages/server/src/grpc/methods/get-jobs.ts new file mode 100644 index 0000000..04ab6b2 --- /dev/null +++ b/packages/server/src/grpc/methods/get-jobs.ts @@ -0,0 +1,16 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServiceImplementation } from "nice-grpc"; +import { jobModel } from "~/db/job.js"; +import { mapGrpcJob } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getJobs: ServiceImplementation["getJobs"] = + authorizedCall(async (request, _context, bouncer) => { + const jobs = (await jobModel.all()) + .map(mapGrpcJob) + .filter((job) => bouncer.canReadJob(job)); + + return { + jobs, + }; + }); diff --git a/packages/server/src/grpc/methods/get-runner.ts b/packages/server/src/grpc/methods/get-runner.ts new file mode 100644 index 0000000..0e0bb62 --- /dev/null +++ b/packages/server/src/grpc/methods/get-runner.ts @@ -0,0 +1,22 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { runnersModel } from "~/db/runners.js"; +import { mapGrpcJobRunner } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getRunner: ServiceImplementation["getRunner"] = + authorizedCall(async (request, _context, bouncer) => { + const runner = await runnersModel.byId(request.runnerId); + + if (!runner) { + throw new ServerError(Status.NOT_FOUND, "Runner not found"); + } + + if (!bouncer.canReadJobRunners({ id: runner.jobId })) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + runner: mapGrpcJobRunner(runner), + }; + }); diff --git a/packages/server/src/grpc/methods/get-runners.ts b/packages/server/src/grpc/methods/get-runners.ts new file mode 100644 index 0000000..a4936aa --- /dev/null +++ b/packages/server/src/grpc/methods/get-runners.ts @@ -0,0 +1,41 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { runnersModel } from "~/db/runners.js"; +import { mapGrpcJobRunner } from "../grpc-maps.js"; +import { authorizedCall } from "../util.js"; + +export const getRunners: ServiceImplementation["getRunners"] = + authorizedCall(async (request, _context, bouncer) => { + const runners = await runnersModel.all(); + + if (!runners) { + throw new ServerError(Status.NOT_FOUND, "Runner not found"); + } + + const filteredRunners = runners.filter((runner) => { + if (request.jobId && runner.jobId !== request.jobId) { + return false; + } + + if (request.versionId && runner.jobVersionId !== request.versionId) { + return false; + } + + if (request.actionId && runner.actionId !== request.actionId) { + return false; + } + + if ( + request.status && + runner.status.toLowerCase() !== request.status.toLowerCase() + ) { + return false; + } + + return bouncer.canReadJobRunners({ id: runner.jobId }); + }); + + return { + runners: filteredRunners.map((runner) => mapGrpcJobRunner(runner)), + }; + }); diff --git a/packages/server/src/grpc/methods/get-store-item.ts b/packages/server/src/grpc/methods/get-store-item.ts new file mode 100644 index 0000000..75c659b --- /dev/null +++ b/packages/server/src/grpc/methods/get-store-item.ts @@ -0,0 +1,22 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { storeModel } from "~/db/store.js"; +import { authorizedCall } from "../util.js"; + +export const getStoreItem: ServiceImplementation["getStoreItem"] = + authorizedCall(async (request, _context, bouncer) => { + const storeItem = await storeModel.byKey(request.jobId, request.key); + + if (!storeItem) { + throw new ServerError(Status.NOT_FOUND, "Store item not found"); + } + + if (!bouncer.canReadJobStore(storeItem)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + return { + key: storeItem.storeKey, + value: storeItem.storeValue, + }; + }); diff --git a/packages/server/src/grpc/methods/get-templates.ts b/packages/server/src/grpc/methods/get-templates.ts new file mode 100644 index 0000000..36d92b8 --- /dev/null +++ b/packages/server/src/grpc/methods/get-templates.ts @@ -0,0 +1,21 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { authorizedCall } from "../util.js"; + +export const getTemplates: ServiceImplementation["getTemplates"] = + authorizedCall(async (request, _context, bouncer) => { + if (!bouncer.canReadTemplatesGenerally()) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const badGatewayTemplate = await readFile( + path.join(process.cwd(), "src/static-templates/bad-gateway.html"), + "utf8", + ); + + return { + templateBadGateway: badGatewayTemplate, + }; + }); diff --git a/packages/server/src/grpc/methods/publish-mqtt-message.ts b/packages/server/src/grpc/methods/publish-mqtt-message.ts new file mode 100644 index 0000000..5831f99 --- /dev/null +++ b/packages/server/src/grpc/methods/publish-mqtt-message.ts @@ -0,0 +1,29 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { container } from "tsyringe"; +import { jobModel } from "~/db/job.js"; +import { TriggerMqtt } from "~/jobber/triggers/mqtt.js"; +import { authorizedCall } from "../util.js"; + +export const publishMqttMessage: ServiceImplementation["publishMqttMessage"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + if (!bouncer.canPublishMqttMessage(job)) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const triggerMqtt = container.resolve(TriggerMqtt); + + await triggerMqtt.publishMqttMessage( + job.id, + request.topic, + Buffer.from(request.payload), + ); + + return {}; + }); diff --git a/packages/server/src/grpc/methods/set-store-item.ts b/packages/server/src/grpc/methods/set-store-item.ts new file mode 100644 index 0000000..776f0f3 --- /dev/null +++ b/packages/server/src/grpc/methods/set-store-item.ts @@ -0,0 +1,37 @@ +import { GeneralAPIDefinition } from "@jobber/grpc/general.js"; +import { ServerError, ServiceImplementation, Status } from "nice-grpc"; +import { jobModel } from "~/db/job.js"; +import { storeModel } from "~/db/store.js"; +import { getUnixTimestamp } from "~/util.js"; +import { authorizedCall } from "../util.js"; + +export const setStoreItem: ServiceImplementation["setStoreItem"] = + authorizedCall(async (request, _context, bouncer) => { + const job = await jobModel.byId(request.jobId); + + if (!job) { + throw new ServerError(Status.NOT_FOUND, "Job not found"); + } + + if (!bouncer.canWriteJobStore({ jobId: request.jobId })) { + throw new ServerError(Status.PERMISSION_DENIED, "Permission denied"); + } + + const expiry = request.ttl ? getUnixTimestamp() + request.ttl : undefined; + + const storeItem = await storeModel.upsert({ + jobId: request.jobId, + storeKey: request.key, + storeValue: request.value, + expiry: expiry, + }); + + if (!storeItem) { + throw new ServerError(Status.INTERNAL, "Failed to set store item"); + } + + return { + key: storeItem.storeKey, + value: storeItem.storeValue, + }; + }); diff --git a/packages/server/src/grpc/util.ts b/packages/server/src/grpc/util.ts new file mode 100644 index 0000000..349df16 --- /dev/null +++ b/packages/server/src/grpc/util.ts @@ -0,0 +1,68 @@ +import { BouncerBase } from "@jobber/common/bouncer-base.js"; +import { getOAuthAudienceGeneralApi } from "@jobber/common/oauth.js"; +import { JobberPermissionsSchema } from "@jobber/common/permissions.js"; +import { createLocalJWKSet, errors as joseErrors, jwtVerify } from "jose"; +import { CallContext, ServerError, Status } from "nice-grpc"; +import { container } from "tsyringe"; +import { getConfigOption } from "~/config.js"; +import { OAuthServiceClients } from "~/service-clients.js"; +import { OAuthSigningKeys } from "~/signing-keys.js"; + +export const authorizedCall = ( + callback: ( + request: TRequest, + context: CallContext, + bouncer: BouncerBase, + ) => Promise, +) => { + return async ( + request: TRequest, + context: CallContext, + ): Promise => { + return callback(request, context, await getBouncer(context)); + }; +}; + +export const getBouncer = async (context: CallContext) => { + try { + const oauthSigningKeys = container.resolve(OAuthSigningKeys); + + let token = context.metadata.get("Authorization"); + + if (!token) { + console.log("gRPC Unauthorized error: No token provided"); + throw new ServerError(Status.UNAUTHENTICATED, "Unauthenticated"); + } + + if (token.startsWith("Bearer ")) { + token = token.slice("Bearer ".length); + } + + const jwks = createLocalJWKSet(await oauthSigningKeys.createJwksSet()); + + const { payload } = await jwtVerify(token, jwks, { + issuer: getConfigOption("OAUTH_ISSUER"), + audience: getOAuthAudienceGeneralApi(), + }); + + const permissions = await JobberPermissionsSchema.parseAsync( + payload.permissions, + ); + + const bouncer = new BouncerBase(permissions); + + return bouncer; + } catch (err) { + if (err instanceof ServerError) { + throw err; + } + + if (err instanceof joseErrors.JOSEError) { + console.log("gRPC Unauthorized error:", err); + throw new ServerError(Status.UNAUTHENTICATED, "Unauthenticated"); + } + + console.log("gRPC Internal server error:", err); + throw new ServerError(Status.INTERNAL, "Internal server error"); + } +}; diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 03b885a..372739d 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -1,12 +1,11 @@ import "reflect-metadata"; -import "./jobber/log-drivers/index.js"; import { LogDriverBase } from "./jobber/log-drivers/abstract.js"; +import "./jobber/log-drivers/index.js"; import { RunnerManager } from "./jobber/runners/manager.js"; import { Store } from "./jobber/store.js"; import { Telemetry } from "./jobber/telemetry.js"; import { TriggerCron } from "./jobber/triggers/cron.js"; -import { TriggerHttp } from "./jobber/triggers/http.js"; import { TriggerMqtt } from "./jobber/triggers/mqtt.js"; import { serve } from "@hono/node-server"; @@ -14,27 +13,29 @@ import { serveStatic } from "@hono/node-server/serve-static"; import { genSalt as bcryptGenSalt, hash as bcryptHash } from "bcryptjs"; import { eq } from "drizzle-orm"; import { Hono } from "hono"; -import { StatusCode } from "hono/utils/http-status"; -import { mkdir, readFile } from "node:fs/promises"; import { container } from "tsyringe"; import { ZodError } from "zod"; import { getDrizzle, getPool, runDrizzleMigration } from "./db/index.js"; -import { ApiTokensTableType } from "./db/schema/api-tokens.js"; -import { SessionsTableType } from "./db/schema/sessions.js"; -import { - UserPasswordSchema, - usersTable, - UsersTableType, - UserUsernameSchema, -} from "./db/schema/users.js"; +import { PERMISSION_SUPER } from "@jobber/common/permissions.js"; import { getConfigOption } from "./config.js"; import { cleanupLocks } from "./lock.js"; -import { getJobActionArchiveDirectory, getPgDumpDirectory } from "./paths.js"; -import { JobberPermissions, PERMISSION_SUPER } from "./permissions.js"; +import { ensureDirectoriesExist } from "./paths.js"; +import { Bouncer } from "./bouncer.js"; +import { USERNAME_ANONYMOUS } from "./constants.js"; +import { usersTable } from "./db/schema.js"; +import { + JobsTableType, + UserPasswordSchema, + UserUsernameSchema, +} from "./db/types.js"; +import { GrpcServer } from "./grpc/grpc-server.js"; +import { PgBackup } from "./pg-backup.js"; +import { RateLimit } from "./rate-limit.js"; import { createRouteApiTokens } from "./routes/api-tokens.js"; +import { createRouteAuditLog } from "./routes/audit-log.js"; import { createRouteAuth } from "./routes/auth.js"; import { createRouteConfig } from "./routes/config.js"; import { createRouteJobActions } from "./routes/job/actions.js"; @@ -48,11 +49,12 @@ import { createRouteJobStore } from "./routes/job/store.js"; import { createRouteJobTriggers } from "./routes/job/triggers.js"; import { createRouteVersions } from "./routes/job/versions.js"; import { createRouteMetrics } from "./routes/metrics.js"; +import { createRouteOAuthAdmin } from "./routes/oauth-admin.js"; +import { createRouteOAuth } from "./routes/oauth.js"; import { createRouteUser } from "./routes/user.js"; -import { USERNAME_ANONYMOUS } from "./constants.js"; -import { PgBackup } from "./pg-backup.js"; -import { Bouncer } from "./bouncer.js"; -import { JobsTableType } from "./db/schema/jobs.js"; +import { seedsRun } from "./seeding/index.js"; +import { OAuthServiceClients } from "./service-clients.js"; +import { OAuthSigningKeys } from "./signing-keys.js"; export type InternalHonoApp = { Variables: { @@ -75,14 +77,14 @@ async function createInternalHono() { issue.path.at(0) === "request" && (issue.path.at(1) === "body" || issue.path.at(1) === "query" || - issue.path.at(1) === "param") + issue.path.at(1) === "param"), ) ) { return c.json({ success: false, message: "Malformed request body", errors: err.errors.map( - (issue) => `${issue.path.join(".")} - ${issue.message}` + (issue) => `${issue.path.join(".")} - ${issue.message}`, ), }); } @@ -95,7 +97,7 @@ async function createInternalHono() { success: false, message: "Internal Server Error", }, - 500 + 500, ); }); @@ -105,33 +107,38 @@ async function createInternalHono() { success: false, message: "Not Found", }, - 404 + 404, ); }); app.route("/api/", await createRouteApiTokens()); + app.route("/api/", await createRouteAuditLog()); app.route("/api/", await createRouteAuth()); - app.route("/api/", await createRouteUser()); + app.route("/api/", await createRouteConfig()); + app.route("/api/", await createRouteJob()); app.route("/api/", await createRouteJobActions()); app.route("/api/", await createRouteJobEnvironment()); - app.route("/api/", await createRouteJob()); - app.route("/api/", await createRouteJobRunners()); - app.route("/api/", await createRouteJobMetrics()); app.route("/api/", await createRouteJobLogs()); + app.route("/api/", await createRouteJobMetrics()); app.route("/api/", await createRouteJobPublish()); + app.route("/api/", await createRouteJobRunners()); app.route("/api/", await createRouteJobStore()); app.route("/api/", await createRouteJobTriggers()); - app.route("/api/", await createRouteConfig()); - app.route("/api/", await createRouteVersions()); app.route("/api/", await createRouteMetrics()); + app.route("/api/", await createRouteOAuthAdmin()); + app.route("/api/", await createRouteUser()); + app.route("/api/", await createRouteVersions()); + + // Not within /api/ for compliance with OAuth 2.0 best practices. + app.route("/", await createRouteOAuth()); - app.get("/", async (c) => c.redirect("/jobber/")); + app.get("/", async (c) => c.redirect("/home/")); app.use( "/*", serveStatic({ root: "./public", - }) + }), ); app.use( @@ -139,101 +146,19 @@ async function createInternalHono() { serveStatic({ path: "index.html", root: "./public/", - }) + }), ); return app; } -async function createGatewayHono() { - const triggerHttp = container.resolve(TriggerHttp); - - const app = new Hono(); - - app.all("*", async (c, next) => { - const bodyDirect = await c.req.arrayBuffer(); - - const headers = c.req.header(); - const query = c.req.query(); - const queries = c.req.queries(); - const path = c.req.path; - const method = c.req.method; - const body = Buffer.from(bodyDirect); - const bodyLength = body.length; - - const response = await triggerHttp.sendHandleRequest({ - body: body.toString("base64"), - bodyLength, - method, - path, - queries, - query, - headers, - }); - - if (!response || !response.success || !response.http) { - const acceptHeader = c.req.header("accept") || ""; - - if (acceptHeader.includes("text/html")) { - const badGatewayPage = await readFile( - "./src/static-templates/bad-gateway.html" - ); - - return c.html(badGatewayPage.toString(), 502); - } - - if (acceptHeader.includes("application/json")) { - return c.json( - { - success: false, - message: `Jobber: Gateway error!`, - }, - 502 - ); - } - - if (acceptHeader.includes("application/xml")) { - return c.body( - `falseJobber: Gateway error!`, - 502, - { - "Content-Type": "application/xml", - } - ); - } - - return c.text(`Jobber: Gateway error!`, 502); - } - - if (!response.http) { - return c.json( - { - success: false, - message: `Jobber: Gateway Error! No HTTP response received.`, - }, - 502 - ); - } - - // TODO: In the future we should migrate to a streaming response for larger bodies. Previously - // it was implicitly converted to a string, which is nonideal. - return c.body( - Uint8Array.from(response.http.body).buffer, - response.http.status as StatusCode, - response.http.headers - ); - }); - - return app; -} - async function createStartupAccount() { const configUsername = getConfigOption("STARTUP_USERNAME"); const configPassword = getConfigOption("STARTUP_PASSWORD"); if (!configUsername || !configPassword) { console.log( - "[createStartupAccount] No startup username or password configured. Skipping account creation." + "[createStartupAccount] No startup username or password configured. Skipping account creation.", ); return; @@ -244,7 +169,7 @@ async function createStartupAccount() { if (!parsedUsername.success || !parsedPassword.success) { console.error( - "[createStartupAccount] Invalid startup username or password. Please check your configuration." + "[createStartupAccount] Invalid startup username or password. Please check your configuration.", ); return; @@ -268,14 +193,14 @@ async function createStartupAccount() { if (!user) { console.log( - "[createStartupAccount] User already exists or could not be created." + "[createStartupAccount] User already exists or could not be created.", ); return; } console.log( - `[createStartupAccount] Startup account created successfully: ${user.username}` + `[createStartupAccount] Startup account created successfully: ${user.username}`, ); } @@ -299,9 +224,102 @@ async function createAnonymousAccount() { }); } +// async function createApiTokenInternal() { +// // API token created for internal services, used for easy setup. Can be omitted for manual setup. + +// const tokenValue = getConfigOption("API_TOKEN_INTERNAL"); +// const tokenFlag = getConfigOption("API_TOKEN_INTERNAL_FLAG"); + +// if (tokenValue === null) { +// return; +// } + +// const permissions: JobberPermissions = []; + +// if (tokenFlag === "gateway-permissions") { +// // Allow gateway access to create JWTs for runners +// permissions.push({ +// effect: "allow", +// resource: "grpc/runner-jwt", +// actions: ["read", "write", "delete"], +// }); + +// // Allow full job access +// permissions.push({ +// effect: "allow", +// resource: "job", +// actions: ["read", "write", "delete"], +// }); + +// // Prevent from accessing job environment variables +// permissions.push({ +// effect: "deny", +// resource: "job/*/environment", +// actions: ["read", "write", "delete"], +// }); + +// // Prevent from accessing job store +// permissions.push({ +// effect: "deny", +// resource: "job/*/store", +// actions: ["read", "write", "delete"], +// }); + +// // Prevent publishing jobs +// permissions.push({ +// effect: "deny", +// resource: "job/-/publish", +// actions: ["read", "write", "delete"], +// }); + +// // Prevent all API tokens management +// permissions.push({ +// effect: "deny", +// resource: "api-tokens", +// actions: ["read", "write", "delete"], +// }); + +// // Prevent all user management +// permissions.push({ +// effect: "deny", +// resource: "users", +// actions: ["read", "write", "delete"], +// }); + +// // +// } + +// const anonymousUser = await userModel.byUsername(USERNAME_ANONYMOUS); + +// if (!anonymousUser) { +// throw new Error("Anonymous user does not exist."); +// } + +// await getDrizzle() +// .insert(apiTokensTable) +// .values({ +// token: tokenValue, +// description: "Internal API Token", +// permissions, +// status: "enabled", +// expires: new Date("2099-12-31T23:59:59Z"), +// userId: anonymousUser.id, +// }) +// .onConflictDoUpdate({ +// target: apiTokensTable.token, +// set: { +// description: "Internal API Token", +// permissions, +// status: "enabled", +// expires: new Date("2099-12-31T23:59:59Z"), +// userId: anonymousUser.id, +// }, +// }); +// } + async function main() { console.log( - "WARNING: This is an experimental runtime, and issues ARE expected! Report any issue, or raise a PR with a fix. Issues WILL be investigated and fixed." + "WARNING: This is an experimental runtime, and issues ARE expected! Report any issue, or raise a PR with a fix. Issues WILL be investigated and fixed.", ); console.log("[main] Initialising Database connection..."); @@ -318,22 +336,20 @@ async function main() { } console.log(`[main] Creating directories...`); - await mkdir(getJobActionArchiveDirectory(), { - recursive: true, - }); - await mkdir(getPgDumpDirectory(), { - recursive: true, - }); + ensureDirectoriesExist(); console.log(`[main] done.`); console.log(`[main] Starting db lock cleanup...`); await cleanupLocks(); - const lockCleanupInterval = setInterval(async () => { - try { - await cleanupLocks(); - } catch (err) { - console.error("[main] Error during lock cleanup:", err); - } - }, 1000 * 60 * 5); // Every 5 minutes + const lockCleanupInterval = setInterval( + async () => { + try { + await cleanupLocks(); + } catch (err) { + console.error("[main] Error during lock cleanup:", err); + } + }, + 1000 * 60 * 5, + ); // Every 5 minutes console.log(`[main] done.`); console.log(`[main] Creating startup account...`); @@ -344,6 +360,20 @@ async function main() { await createAnonymousAccount(); console.log(`[main] done.`); + console.log("[main] Initialising OAuth Signing Keys..."); + const oauthSigningKeys = container.resolve(OAuthSigningKeys); + await oauthSigningKeys.start(); + console.log(`[main] done.`); + + console.log("[main] Initialising OAuth Service Clients..."); + const oauthServiceClients = container.resolve(OAuthServiceClients); + oauthServiceClients.start(); + console.log(`[main] done.`); + + // console.log(`[main] Creating internal API token...`); + // await createApiTokenInternal(); + // console.log(`[main] done.`); + console.log("[main] Starting pg backup service..."); const pgBackup = container.resolve(PgBackup); await pgBackup.start(); @@ -367,19 +397,19 @@ async function main() { console.log(`[main] Initialising triggers (Cron, MQTT, HTTP)...`); const triggerCron = container.resolve(TriggerCron); const triggerMqtt = container.resolve(TriggerMqtt); - const triggerHttp = container.resolve(TriggerHttp); - await Promise.all([ - triggerCron.start(), - triggerMqtt.start(), - triggerHttp.start(), - ]); + await Promise.all([triggerCron.start(), triggerMqtt.start()]); console.log(`[main] done.`); - console.log(`[main] Registering MQTT Publish Handler...`); - runnerManager.registerMqttPublishHandler((...args) => - triggerMqtt.publishMqttMessage(...args) - ); + // console.log(`[main] Registering MQTT Publish Handler...`); + // runnerManager.registerMqttPublishHandler((...args) => + // triggerMqtt.publishMqttMessage(...args), + // ); + // console.log(`[main] done.`); + + console.log(`[main] Initialising rate limiter...`); + const rateLimit = container.resolve(RateLimit); + await rateLimit.start(); console.log(`[main] done.`); console.log(`[main] Initialising telemetry...`); @@ -387,28 +417,29 @@ async function main() { await telemetry.start(); console.log(`[main] done.`); + console.log("[main] Initialising gRPC server..."); + const grpcServer = container.resolve(GrpcServer); + await grpcServer.start(); + console.log("[main] done."); + console.log(`[main] Initialising APIs (API Internal, API Gateway)...`); const appInternal = await createInternalHono(); - const appGateway = await createGatewayHono(); const serverInternal = serve({ - port: 3000, + port: getConfigOption("API_PORT"), fetch: appInternal.fetch, }); - const serverGateway = serve({ - port: 3001, - fetch: appGateway.fetch, - }); + console.log(`[main] done.`); + + console.log(`[main] Running seeds...`); + await seedsRun(); + console.log(`[main] done.`); serverInternal.once("listening", () => { console.log("[main] API Internal now listening"); }); - serverGateway.once("listening", () => { - console.log("[main] API Gateway now listening"); - }); - console.log(`[main] Application startup routine has completed.`); const signalRoutine = async () => { @@ -419,11 +450,7 @@ async function main() { console.log(`[signalRoutine] done.`); console.log(`[signalRoutine] Stopping all triggers.`); - await Promise.all([ - triggerCron.stop(), - triggerMqtt.stop(), - triggerHttp.stop(), - ]); + await Promise.all([triggerCron.stop(), triggerMqtt.stop()]); console.log(`[signalRoutine] done.`); console.log(`[signalRoutine] Stopping telemetry.`); @@ -438,6 +465,10 @@ async function main() { await runnerManager.stop(); console.log(`[signalRoutine] done.`); + console.log("[signalRoutine] Stopping gRPC server."); + await grpcServer.stop(); + console.log("[signalRoutine] done."); + console.log(`[signalRoutine] Stopping pg backup service.`); await pgBackup.stop(); console.log(`[signalRoutine] done.`); @@ -450,8 +481,16 @@ async function main() { await store.stop(); console.log(`[signalRoutine] done.`); - console.log(`[signalRoutine] Closing API Gateway...`); - serverGateway.close(); + console.log(`[signalRoutine] Stopping rate limiter...`); + await rateLimit.stop(); + console.log(`[signalRoutine] done.`); + + console.log(`[signalRoutine] Stopping OAuth Service Clients...`); + await oauthServiceClients.stop(); + console.log(`[signalRoutine] done.`); + + console.log(`[signalRoutine] Stopping OAuth Signing Keys...`); + await oauthSigningKeys.stop(); console.log(`[signalRoutine] done.`); console.log(`[signalRoutine] Ending Database connection...`); diff --git a/packages/server/src/jobber/images.ts b/packages/server/src/jobber/images.ts index 43f3f79..4e80cd1 100644 --- a/packages/server/src/jobber/images.ts +++ b/packages/server/src/jobber/images.ts @@ -5,7 +5,7 @@ import path from "path"; import * as semver from "semver"; import { z } from "zod"; import { getConfigOption } from "~/config.js"; -import { ActionsDockerArgumentsSchema } from "~/db/schema/actions.js"; +import { ActionsDockerArgumentsSchema } from "~/db/types.js"; import { createToken, fileExists, unzip } from "~/util.js"; export type ImagesEntry = { @@ -15,43 +15,45 @@ export type ImagesEntry = { imageUrl: string; } & ({ runtime: "node" } | { runtime: "python" }); -const images: Array = [ - { - name: "node24", - status: "active", - runtime: "node", - version: "v24", - imageUrl: getConfigOption("RUNNER_IMAGE_NODE24_URL"), - }, - { - name: "node22", - status: "active", - runtime: "node", - version: "v22", - imageUrl: getConfigOption("RUNNER_IMAGE_NODE22_URL"), - }, - { - name: "node20", - status: "active", - runtime: "node", - version: "v20", - imageUrl: getConfigOption("RUNNER_IMAGE_NODE20_URL"), - }, - { - name: "python3", - status: "disabled", - runtime: "python", - version: "v3", - imageUrl: "", - }, - { - name: "python2", - status: "disabled", - runtime: "python", - version: "v2", - imageUrl: "", - }, -]; +const getInternalImages = (): ImagesEntry[] => { + return [ + { + name: "node24", + status: "active", + runtime: "node", + version: "v24", + imageUrl: getConfigOption("RUNNER_IMAGE_NODE24_URL"), + }, + { + name: "node22", + status: "active", + runtime: "node", + version: "v22", + imageUrl: getConfigOption("RUNNER_IMAGE_NODE22_URL"), + }, + { + name: "node20", + status: "active", + runtime: "node", + version: "v20", + imageUrl: getConfigOption("RUNNER_IMAGE_NODE20_URL"), + }, + { + name: "python3", + status: "disabled", + runtime: "python", + version: "v3", + imageUrl: "", + }, + { + name: "python2", + status: "disabled", + runtime: "python", + version: "v2", + imageUrl: "", + }, + ]; +}; const defaultRuntimeImages = { node: "node24", @@ -61,21 +63,21 @@ const defaultRuntimeImages = { export const getDefaultRuntimeImages = () => defaultRuntimeImages; export const getImage = async (name: string): Promise => { - return images.find((image) => image.name === name) ?? null; + return getInternalImages().find((image) => image.name === name) ?? null; }; export const getImages = async (): Promise => { - return images; + return getInternalImages(); }; const getImageFromArchivePackageJson = ( - packageJson: ArchivePackageJsonSchemaType + packageJson: ArchivePackageJsonSchemaType, ): string => { if (!packageJson.engines?.node) { return getDefaultRuntimeImages().node; } - for (const image of images) { + for (const image of getInternalImages()) { if (image.runtime !== "node") { continue; } @@ -155,11 +157,11 @@ const archivePackageJsonSchema = z.object({ clientIdVariable: z.string().optional(), }), }), - ]) + ]), ) .superRefine((triggers, ctx) => { const mqttTriggers = triggers.filter( - (trigger) => trigger.type === "mqtt" + (trigger) => trigger.type === "mqtt", ); if (mqttTriggers.length >= 2) { @@ -167,7 +169,7 @@ const archivePackageJsonSchema = z.object({ } for (const [triggerIndex, trigger] of Object.entries( - mqttTriggers.slice(1) + mqttTriggers.slice(1), )) { if (trigger.type === "mqtt") { ctx.addIssue({ @@ -183,7 +185,7 @@ const archivePackageJsonSchema = z.object({ z.object({ name: z.string(), url: z.string().url(), - }) + }), ) .default([]), }); @@ -193,7 +195,7 @@ export type ArchivePackageJsonSchemaType = z.infer< >; export const classifyArchiveFile = async ( - filename: string + filename: string, ): Promise< | null | { @@ -211,7 +213,7 @@ export const classifyArchiveFile = async ( try { const directory = path.join( tmpdir(), - createToken({ length: 12, prefix: "ArchiveValidation" }) + createToken({ length: 12, prefix: "ArchiveValidation" }), ); cleanupFiles.push(directory); @@ -230,7 +232,7 @@ export const classifyArchiveFile = async ( if (hasPackageJson) { const packageJson = await archivePackageJsonSchema.parseAsync( - JSON.parse(await readFile(packageFile, "utf8")) + JSON.parse(await readFile(packageFile, "utf8")), ); const imageName = getImageFromArchivePackageJson(packageJson); @@ -241,7 +243,9 @@ export const classifyArchiveFile = async ( return null; } - const image = images.find((index) => index.name === imageName); + const image = getInternalImages().find( + (index) => index.name === imageName, + ); assert(image); assert(image.runtime === "node"); diff --git a/packages/server/src/jobber/log-drivers/abstract.ts b/packages/server/src/jobber/log-drivers/abstract.ts index d43eae8..6a61b7d 100644 --- a/packages/server/src/jobber/log-drivers/abstract.ts +++ b/packages/server/src/jobber/log-drivers/abstract.ts @@ -1,4 +1,4 @@ -import { LoopBase } from "~/loop-base.js"; +import { LoopBase } from "@jobber/common"; export type LogDriverBaseItem = { actionId: string; diff --git a/packages/server/src/jobber/log-drivers/database.ts b/packages/server/src/jobber/log-drivers/database.ts index cb7f1e0..a9b67f8 100644 --- a/packages/server/src/jobber/log-drivers/database.ts +++ b/packages/server/src/jobber/log-drivers/database.ts @@ -1,6 +1,7 @@ import { desc, eq, gt, lt, sql } from "drizzle-orm"; import { getDrizzle } from "~/db/index.js"; -import { logsTable, LogsTableInsertType } from "~/db/schema/logs.js"; +import { logsTable } from "~/db/schema.js"; +import { LogsTableInsertType } from "~/db/types.js"; import { LogDriverBase, LogDriverBaseItem, @@ -73,7 +74,7 @@ export class LogDriverDatabase extends LogDriverBase { } public async query( - query: LogDriverBaseQuery + query: LogDriverBaseQuery, ): Promise { const page = 1; // TODO: this diff --git a/packages/server/src/jobber/runners/manager.ts b/packages/server/src/jobber/runners/manager.ts index 150e9f8..fe5f54a 100644 --- a/packages/server/src/jobber/runners/manager.ts +++ b/packages/server/src/jobber/runners/manager.ts @@ -1,191 +1,151 @@ import assert from "assert"; import { ChildProcessWithoutNullStreams, spawn } from "child_process"; import { and, eq, isNotNull } from "drizzle-orm"; +import { + Channel, + Client, + ClientError, + createChannel, + createClientFactory, + Metadata, + Status, +} from "nice-grpc"; import { inject, singleton } from "tsyringe"; +import { LoopBase, timeout } from "@jobber/common"; +import { Deferred, deferred } from "@jobber/common/deferred.js"; +import { getOAuthAudienceRunnerApi } from "@jobber/common/oauth.js"; +import { + EventMqttRequest, + EventMqttResponse, + EventScheduleRequest, + EventScheduleResponse, + RunnerAPIDefinition, + StatusResponse, +} from "@jobber/grpc/runner.js"; +import { unlink, writeFile } from "fs/promises"; import { getConfigOption } from "~/config.js"; import { ENTRYPOINT_NODE } from "~/constants.js"; +import { actionsModel } from "~/db/actions.js"; +import { environmentModel } from "~/db/environment.js"; import { getDrizzle } from "~/db/index.js"; -import { actionsTable, ActionsTableType } from "~/db/schema/actions.js"; +import { jobVersionsModel } from "~/db/job-versions.js"; +import { jobModel } from "~/db/job.js"; +import { runnersModel } from "~/db/runners.js"; import { + actionsTable, environmentsTable, - EnvironmentsTableType, -} from "~/db/schema/environments.js"; -import { + jobsTable, jobVersionsTable, +} from "~/db/schema.js"; +import { + ActionsTableType, + EnvironmentsTableType, + JobsTableType, JobVersionsTableType, -} from "~/db/schema/job-versions.js"; -import { jobsTable, JobsTableType } from "~/db/schema/jobs.js"; + RunnersTableType, +} from "~/db/types.js"; import { getDockerContainers, - pullDockerImage, + killDockerContainer, stopDockerContainer, } from "~/docker.js"; -import { LoopBase } from "~/loop-base.js"; +import { getRunnerEnvFile } from "~/paths.js"; +import { OAuthServiceClients } from "~/service-clients.js"; import { - counterRunnerRequests, - gaugeActiveRunners, - histogramJobManagerLoopDuration, - histogramRunnerRequestDuration, - histogramRunnerShutdownDuration, - histogramRunnerStartupDuration, -} from "~/metrics.js"; -import { - awaitTruthy, - createBenchmark, createToken, getUnixTimestamp, sanitiseSafeCharacters, shortenString, - timeout, } from "~/util.js"; -import { getImage, getImages } from "../images.js"; +import { getImage } from "../images.js"; import { LogDriverBase } from "../log-drivers/abstract.js"; -import { Store } from "../store.js"; -import { HandleRequest, HandleResponse, RunnerServer } from "./server.js"; +import { ChannelCredentials, connectivityState } from "@grpc/grpc-js"; -type RunnerManagerItem = { - status: "starting" | "ready" | "closing" | "closed"; - - id: string; +type CurrentVersionResult = { version: JobVersionsTableType; - action: ActionsTableType; job: JobsTableType; + action: ActionsTableType; environment: EnvironmentsTableType | null; - - process: ChildProcessWithoutNullStreams; - - requestsProcessing: number; - - lastRequestAt?: number; - createdAt: number; - readyAt?: number; - closingAt?: number; - closedAt?: number; }; -@singleton() -export class RunnerManager extends LoopBase { - protected loopDuration = 500; - protected loopClosing = undefined; - protected loopStarting = undefined; - - private server: RunnerServer; - - private runners: Record = {}; - - private requestedVersionIds = new Set(); - - private danglingLastRun = 0; - - private imagePullLastRun = 0; - - constructor( - @inject("LogDriverBase") private logger: LogDriverBase, - @inject(Store) private store: Store - ) { - super(); +type RunnerManagerItem = { + runnerId: string; - this.server = new RunnerServer(this.store); + job: JobsTableType; + jobVersion: JobVersionsTableType; + jobAction: ActionsTableType; + environment: EnvironmentsTableType | null; - this.server.on("runner-starting", (runnerId) => { - const runner = this.runners[runnerId]; + process: ChildProcessWithoutNullStreams; - if (!runner) { - console.warn( - `[RunnerManager/runner-starting] Runner not found for id ${runnerId}` - ); + properties: RunnersTableType["properties"]; - return; - } + // Arguments passed through to the runner + arguments: { + runnerId: string; - runner.status = "starting"; - }); + runnerClientId: string; + runnerClientSecret: string; + runnerGeneralApiEndpoint: string; - this.server.on("runner-ready", (runnerId) => { - const runner = this.runners[runnerId]; + runnerOAuthTokenEndpoint: string; + runnerOAuthJwksEndpoint: string; + runnerOAuthIssuer: string; - if (!runner) { - console.warn( - `[RunnerManager/runner-ready] Runner not found for id ${runnerId}` - ); + runnerApiPort: number; - return; - } + runnerDebug: boolean; + }; - runner.readyAt = getUnixTimestamp(); - runner.status = "ready"; + lastStatus?: StatusResponse; - console.log( - `[RunnerManager/runner-ready] Runner is ready at ${new Date( - runner.readyAt * 1000 - ).toISOString()}` - ); - - histogramRunnerStartupDuration - .labels({ - job_id: runner.job.id, - job_name: runner.job.jobName, - version: runner.version.version, - }) - .observe(runner.readyAt - runner.createdAt); - }); + grpcToken: string | null; + grpcTokenExpiry: number | null; + grpcMetadata: Metadata; + grpcChannel: Channel | null; + grpc: Client | null; - this.server.on("runner-closing", (runnerId) => { - const runner = this.runners[runnerId]; + promiseEvents: { + ready: Deferred; + closing: Deferred; + closed: Deferred; + }; - if (!runner) { - console.warn( - `[RunnerManager/runner-closing] Runner not found for id ${runnerId}` - ); + createdAt: number; +}; - return; - } +type RunnerManagerStartupItem = { + jobId: string; - runner.closingAt = getUnixTimestamp(); - runner.status = "closing"; - }); + startupPromise: Deferred; +}; - this.server.on("runner-close", (runnerId) => { - const runner = this.runners[runnerId]; +type RunnerManagerShutdownItem = { + runnerId: string; + method: "graceful" | "forceful"; +}; - if (!runner) { - console.warn( - `[RunnerManager/runner-close] Runner not found for id ${runnerId}. Was process forcefully killed?` - ); +@singleton() +export class RunnerManager extends LoopBase { + protected loopDuration = 250; + protected loopClosing = undefined; + protected loopStarting = undefined; - return; - } + private runners = new Map(); - runner.closedAt = getUnixTimestamp(); - runner.status = "closed"; - runner.process.kill("SIGKILL"); - - if (runner.closingAt) { - histogramRunnerShutdownDuration - .labels({ - job_id: runner.job.id, - job_name: runner.job.jobName, - version: runner.version.version, - }) - .observe(runner.closingAt - runner.closedAt); - } - }); - } + private queueShutdown = Array(); - protected async loopStarted() { - await this.server.start(); - } + private queueStartup = Array(); - protected async loopClosed() { - await this.loopClose(); - await this.server.stop(); + constructor( + @inject("LogDriverBase") private logger: LogDriverBase, + @inject(OAuthServiceClients) private serviceClients: OAuthServiceClients, + ) { + super(); } protected async loopIteration() { - const benchmark = createBenchmark(); - const end = histogramJobManagerLoopDuration.startTimer(); - const currentVersions = await getDrizzle() .select({ version: jobVersionsTable, @@ -198,1125 +158,995 @@ export class RunnerManager extends LoopBase { jobVersionsTable, and( eq(jobsTable.id, jobVersionsTable.jobId), - eq(jobsTable.jobVersionId, jobVersionsTable.id) - ) + eq(jobsTable.jobVersionId, jobVersionsTable.id), + ), ) .innerJoin( actionsTable, and( eq(jobsTable.id, actionsTable.jobId), - eq(jobsTable.jobVersionId, actionsTable.jobVersionId) - ) + eq(jobsTable.jobVersionId, actionsTable.jobVersionId), + ), ) .leftJoin(environmentsTable, eq(environmentsTable.jobId, jobsTable.id)) .where( - and(isNotNull(jobsTable.jobVersionId), eq(jobsTable.status, "enabled")) + and(isNotNull(jobsTable.jobVersionId), eq(jobsTable.status, "enabled")), ); + // TODO: RUN LESS FREQUENTLY!!! + await this.checkDanglingRunners(); + + // TODO: query the runners less frequently.. every few seconds feels more appropriate. await Promise.all( - currentVersions.map(async (item) => this.loopRunnerSpawner([item])) + Array.from(this.runners.keys()).map((runnerId) => + this.updateRunnerStatus(runnerId), + ), ); - await this.loopCheckEnvironmentChanges(currentVersions); - await this.loopCheckVersion(currentVersions); - await this.loopCheckMaxAge(currentVersions); - await this.loopCheckHardMaxAge(currentVersions); - await this.loopCheckMaxIdleAge(currentVersions); + await Promise.all( + Array.from(this.runners.keys()).map((runnerId) => + this.checkRunner(runnerId, currentVersions), + ), + ); - if (getUnixTimestamp() - this.danglingLastRun > 60) { - await this.loopCheckDanglingContainers(currentVersions); + await Promise.all( + currentVersions.map((currentVersion) => + this.checkScaling(currentVersion), + ), + ); - this.danglingLastRun = getUnixTimestamp(); - } + await this.processStartupQueue(); + await this.processShutdownQueue(); + } - if (getUnixTimestamp() - this.imagePullLastRun > 300) { - await this.loopImagePull(); + protected async loopStarted(): Promise { + // + } - this.imagePullLastRun = getUnixTimestamp(); + protected async loopClosed(): Promise { + for (const runner of this.runners.values()) { + await this.shutdownQueueAdd(runner.runnerId, false); } - const benchmarkResult = benchmark(); - if (benchmarkResult >= 10_000) { - console.log( - `[RunnerManager/loop] loop iteration exceeded 10,000ms (10s), took ${benchmarkResult.toFixed( - 2 - )}ms to complete!` - ); - } - - end(); + await this.processShutdownQueue(); } - public async sendHandleRequest( - version: JobVersionsTableType, - job: JobsTableType, - action: ActionsTableType, - handleRequest: HandleRequest - ): Promise { - assert(action.jobVersionId === version.id); - - const activeRunners = Object.values(this.runners).filter( - (index) => index.version.id === version.id - ); - - if (action.runnerMode === "run-once") { - const canCreateRunner = - action.runnerMaxCount === 0 || - activeRunners.length < action.runnerMaxCount; + private async updateRunnerStatus(runnerId: string) { + try { + const runner = this.runners.get(runnerId); - if (!canCreateRunner) { + if (!runner) { console.warn( - `[RunnerManager/sendHandleRequest] Failed to start runner, allocation of runners exhausted. actionRunners.length ${activeRunners.length}` + `[RunnerManager/updateRunnerStatus] Runner ${runnerId} not found in manager's runner list.`, ); - - return { - success: false, - duration: -1, - error: "Jobber: Failed to start runner.", - }; + return; } - const runnerId = await this.createRunner(version, action, job, { - dockerNamePrefix: job.jobName, - }); - - await this.server.awaitConnectionStatus(runnerId, "ready"); - - const runner = this.runners[runnerId]; - - if (!runner) { + if (!runner.grpc) { console.warn( - `[RunnerManager/sendHandleRequest] Failed to start runner, unable to find started runner` + `[RunnerManager/updateRunnerStatus] Runner ${runnerId} does not have gRPC client initialized yet.`, ); - - return { - success: false, - duration: -1, - error: "Jobber: Failed to start runner.", - }; + return; } - if (runner.status !== "ready") { + if (runner.process.killed) { console.warn( - `[Runners/sendHandleRequest] Failed to start runner, sending termination. status ${runner.status}` + `[RunnerManager/updateRunnerStatus] Runner ${runnerId} process is killed but still in runner list. Removing...`, ); + // Cleanup is handled in process 'exit' event. + return; + } - runner.process.kill("SIGTERM"); + if ( + runner.grpcTokenExpiry && + getUnixTimestamp() > runner.grpcTokenExpiry - 60 + ) { + // Token is expired or about to expire in the next 60 seconds, generate a new one - return { - success: false, - duration: 0, - error: "Jobber: Failed to start runner.", - }; - } + const tokenResult = await this.serviceClients.generateTokenForServer( + getOAuthAudienceRunnerApi(runnerId), + ); - let result: HandleResponse; + runner.grpcMetadata.set("Authorization", `Bearer ${tokenResult.jwt}`); + runner.grpcToken = tokenResult.jwt; + runner.grpcTokenExpiry = Math.floor( + tokenResult.expiration.getTime() / 1000, + ); + } try { - runner.lastRequestAt = getUnixTimestamp(); - runner.requestsProcessing++; + const previousStatus = runner.lastStatus?.status; - result = await this.server.sendHandleRequest(runner.id, handleRequest); + const status = await runner.grpc.status({}); - // Timeout is only here for run-once mode - await timeout(100); - } finally { - runner.requestsProcessing--; + runner.lastStatus = status; - await this.server.sendShutdownRequest(runner.id); - } + if (status.status === "READY" && previousStatus !== "READY") { + await runnersModel.update(runner.runnerId, { + status: "ready", + readyAt: new Date(), + }); + + runner.promiseEvents.ready.resolve(); + } + } catch (err) { + // When lastStatus is undefined, runner has not started yet. Ignore unavailable errors + // TODO: Possibly add other checks here? + if ( + err instanceof ClientError && + err.code === Status.UNAVAILABLE && + runner.lastStatus === undefined + ) { + return; + } - histogramRunnerRequestDuration - .labels({ - job_name: runner.job.jobName, - job_id: runner.job.id, - version: version.version, - trigger_type: handleRequest.type, - }) - .observe(result.duration); - - counterRunnerRequests - .labels({ - job_name: runner.job.jobName, - job_id: runner.job.id, - version: version.version, - trigger_type: handleRequest.type, - success: result.success ? 1 : 0, - }) - .inc(); - - return result; + throw err; + } + } catch (err) { + console.error(err); } + } - if (action.runnerMode === "standard") { - const runnersPool = activeRunners - .filter((index) => index.status === "ready") - .sort((a, b) => a.requestsProcessing - b.requestsProcessing); - - // Start new runner - if (runnersPool.length <= 0) { - this.requestedVersionIds.add(version.id); - - await awaitTruthy(async () => - Object.values(this.runners).some( - (index) => - index.version.id === version.id && index.status === "ready" - ) - ); + private async checkRunner( + runnerId: string, + currentVersions: CurrentVersionResult[], + ) { + const runner = this.runners.get(runnerId); - const runners = Object.values(this.runners).filter( - (index) => index.version.id === version.id && index.status === "ready" - ); + if (!runner) { + return; + } - if (runners.length <= 0) { - console.warn( - `[RunnerManager/sendHandleRequest Failed to start runner, refer to other logs for more details.` - ); + const currentVersion = currentVersions.find( + (item) => item.version.id === runner.jobVersion.id, + ); - return { - success: false, - duration: -1, - error: "Jobber: Runner failed to start!", - }; - } + if (!currentVersion) { + // Send shutdown - job no longer has a version attached to it. - runnersPool.push(...runners); - } + this.shutdownQueueAdd(runner.runnerId, false); - const runner = runnersPool.at(0); + return; + } - if (!runner || runner.status !== "ready") { - console.warn( - `[RunnerManager/sendHandleRequest] Cannot find runner for actionId ${action.id}` - ); + // Check if its running the expected version + if (runner.jobVersion.id !== currentVersion.version.id) { + // Send shutdown - return { - success: false, - error: "Jobber: Cannot find runner!", - duration: -1, - }; - } + this.shutdownQueueAdd(runner.runnerId, false); - let result: HandleResponse; + return; + } - try { - runner.lastRequestAt = getUnixTimestamp(); - runner.requestsProcessing++; + // Check max age + if ( + runner.jobAction.runnerMaxAge && + getUnixTimestamp() > runner.createdAt + runner.jobAction.runnerMaxAge + ) { + // Send shutdown - result = await this.server.sendHandleRequest(runner.id, handleRequest); - } finally { - runner.requestsProcessing--; - } + this.shutdownQueueAdd(runner.runnerId, false); - histogramRunnerRequestDuration - .labels({ - job_name: runner.job.jobName, - job_id: runner.job.id, - version: version.version, - trigger_type: handleRequest.type, - }) - .observe(result.duration); - - counterRunnerRequests - .labels({ - job_name: runner.job.jobName, - job_id: runner.job.id, - version: version.version, - trigger_type: handleRequest.type, - success: result.success ? 1 : 0, - }) - .inc(); - - return result; + return; } - throw new Error( - `[RunnerManager/sendHandleRequest] Unexpected runner mode.` - ); - } + // Check hard max age + if ( + runner.jobAction.runnerMaxAgeHard && + getUnixTimestamp() > runner.createdAt + runner.jobAction.runnerMaxAgeHard + ) { + // Send shutdown - public async sendShutdownGraceful(jobId: string, runnerId: string) { - const runner = this.runners[runnerId]; + this.shutdownQueueAdd(runnerId, true); - console.log( - `[RunnerManager/sendShutdownGraceful] Shutting down runner ${shortenString( - runnerId - )} for job ${shortenString(jobId)}` - ); + return; + } - if (!runner || runner.job.id !== jobId) { - console.warn( - `[RunnerManager/sendShutdownGraceful] Runner not found for jobId ${shortenString( - jobId - )} and runnerId ${shortenString(runnerId)}` - ); + // Check max idle age + if ( + runner.jobAction.runnerMaxIdleAge && + runner.lastStatus?.lastRequestAt && + getUnixTimestamp() > + runner.lastStatus?.lastRequestAt + runner.jobAction.runnerMaxIdleAge + ) { + // Send shutdown - return { - success: false, - message: "Runner not found", - } as const; + this.shutdownQueueAdd(runner.runnerId, false); + + return; } - await awaitTruthy( - async () => - runner.status === "ready" || - runner.status === "closing" || - runner.status === "closed", - 10_000 - ); + // Environment Changes - runner started without environment, but now has one + if (!runner.environment && currentVersion.environment) { + // Send shutdown - environment added - if (runner.status === "starting") { - console.warn( - `[RunnerManager/sendShutdownGraceful] Runner is still starting, cannot shutdown gracefully. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + this.shutdownQueueAdd(runner.runnerId, false); - return { - success: false, - message: "Runner is still starting, cannot shutdown gracefully", - } as const; + return; } - if (runner.status === "closed") { - console.warn( - `[RunnerManager/sendShutdownGraceful] Runner has already closed. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + // Environment Changes - runner started with environment, but now doesn't have one + if (runner.environment && !currentVersion.environment) { + // Send shutdown - environment removed - return { - success: false, - message: "Runner has already closed", - } as const; + this.shutdownQueueAdd(runner.runnerId, false); + + return; } - if (runner.status === "closing") { - console.warn( - `[RunnerManager/sendShutdownGraceful] Runner is already closing. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + // Environment Changes - runner started with environment, but it has been modified + if ( + runner.environment && + currentVersion.environment && + runner.environment.modified !== currentVersion.environment.modified + ) { + // Send shutdown - environment modified - return { - success: true, - message: "Runner is already closing", - } as const; - } + this.shutdownQueueAdd(runner.runnerId, false); - const response = await this.server.sendShutdownRequest(runner.id); + return; + } + } - if (!response) { - console.warn( - `[RunnerManager/sendShutdownGraceful] Failed to send shutdown request to runner. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + private async checkScaling({ + action, + environment, + job, + version, + }: CurrentVersionResult) { + // - return { - success: false, - message: "Failed to send shutdown request to runner", - } as const; + if (action.runnerMode !== "standard") { + // Will startup adhoc + return; } - console.log( - `[RunnerManager/sendShutdownGraceful] Runner shutdown request sent successfully. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` + const runnerCurrent = Array.from(this.runners.values()).filter( + (runner) => runner.jobVersion.id === version.id, ); - return { - success: true, - } as const; - } + // Average load for past 5 seconds + const averageLoad = runnerCurrent.reduce( + (acc, runner) => + ((runner.lastStatus?.loadAverage5Seconds ?? 0) + acc) / 2, + 0, + ); - public async sendShutdownForceful(jobId: string, runnerId: string) { - const runner = this.runners[runnerId]; + const targetLoadPerRunner = action.runnerAsynchronous ? 60 : 1; - console.log( - `[RunnerManager/sendShutdownForceful] Forcefully shutting down runner ${shortenString( - runnerId - )} for job ${shortenString(jobId)}` + let targetRunnerCount = Math.floor( + (averageLoad / targetLoadPerRunner) * 1.2, ); - if (!runner || runner.job.id !== jobId) { - console.warn( - `[RunnerManager/sendShutdownForceful] Runner not found for jobId ${shortenString( - jobId - )} and runnerId ${shortenString(runnerId)}` - ); + if (isNaN(targetRunnerCount)) { + targetRunnerCount = 0; + } - return { - success: false, - message: "Runner not found", - } as const; + if (targetRunnerCount > action.runnerMaxCount) { + targetRunnerCount = action.runnerMaxCount; } - if (runner.status === "closed") { - console.warn( - `[RunnerManager/sendShutdownForceful] Runner has already closed. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + if (targetRunnerCount < action.runnerMinCount) { + targetRunnerCount = action.runnerMinCount; + } + + const spawnQuantity = targetRunnerCount - runnerCurrent.length; - return { - success: false, - message: "Runner is already closed", - } as const; + if (spawnQuantity > 0) { + for (let i = 0; i < spawnQuantity; i++) { + this.startupQueueAdd(job.id); + } } + } - runner.process.kill("SIGKILL"); + private async checkDanglingRunners() { + // TODO: Cleanup this + const runnerRecordsNotClosed = await runnersModel.byStatuses([ + "starting", + "ready", + "closing", + ]); - console.log( - `[RunnerManager/sendShutdownForceful] Runner process killed. runnerId ${shortenString( - runnerId - )}, jobId ${shortenString(jobId)}` - ); + const containers = await getDockerContainers(); - return { - success: true, - message: "Runner process has been killed forcefully.", - } as const; - } + await Promise.all( + runnerRecordsNotClosed.map(async (runnerRecord) => { + const isKnown = this.runners.has(runnerRecord.id); - public async getRunners() { - return Object.values(this.runners).map((index) => ({ - status: index.status, - jobId: index.action.jobId, - actionId: index.action.id, - id: index.id, - requestsProcessing: index.requestsProcessing, - lastRequestAt: index.lastRequestAt, - createdAt: index.createdAt, - readyAt: index.readyAt, - closingAt: index.closingAt, - closedAt: index.closedAt, - })); - } + if (isKnown) { + return; + } - public async findRunnersByJobId(jobId: string) { - return Object.values(this.runners) - .filter((index) => index.action.jobId === jobId) - .map((index) => ({ - status: index.status, - jobId: index.action.jobId, - actionId: index.action.id, - id: index.id, - requestsProcessing: index.requestsProcessing, - lastRequestAt: index.lastRequestAt, - createdAt: index.createdAt, - readyAt: index.readyAt, - closingAt: index.closingAt, - closedAt: index.closedAt, - })); - } + console.warn( + `[RunnerManager/checkDanglingRunners] Found dangling runner record ${runnerRecord.id} for job ${runnerRecord.jobId}. Marking as closed...`, + ); - public async findRunnersByActionId(actionId: string) { - return Object.values(this.runners) - .filter((index) => index.action.id === actionId) - .map((index) => ({ - status: index.status, - jobId: index.action.jobId, - actionId: index.action.id, - id: index.id, - requestsProcessing: index.requestsProcessing, - lastRequestAt: index.lastRequestAt, - createdAt: index.createdAt, - readyAt: index.readyAt, - closingAt: index.closingAt, - closedAt: index.closedAt, - })); - } + await runnersModel.update(runnerRecord.id, { + status: "closed", + closedAt: new Date(), + }); - public registerMqttPublishHandler( - callback: (jobId: string, topic: string, body: Buffer) => Promise - ) { - this.server.addListener("mqtt-publish-request", callback); - } + const container = containers.find( + (container) => + container.Names === runnerRecord.properties?.runnerContainerName, + ); - public deregisterMqttPublishHandler( - callback: (jobId: string, topic: string, body: Buffer) => Promise - ) { - this.server.removeListener("mqtt-publish-request", callback); + if (container) { + await stopDockerContainer(container.ID).catch((err) => {}); + } + }), + ); } - private async createRunner( - version: JobVersionsTableType, - action: ActionsTableType, - job: JobsTableType, - options?: { - dockerNamePrefix?: string; + private async processStartupQueue() { + const queue = this.queueStartup.splice(0, this.queueStartup.length); + + // Group queue by jobId to avoid race conditions + const queueByJobId: Record = {}; + + for (const item of queue) { + if (queueByJobId[item.jobId]) { + queueByJobId[item.jobId].push(item); + } else { + queueByJobId[item.jobId] = [item]; + } } - ) { - assert(action.jobVersionId === version.id); - console.log( - `[RunnerManager/createRunner] Creating runner from action ${shortenString( - action.id - )}` + await Promise.all( + Object.entries(queueByJobId).map(async ([jobId, jobQueue]) => { + const [runnersActive, action] = await Promise.all([ + runnersModel.byJobId(jobId, { + specialActiveIshOnly: true, + }), + actionsModel.byJobIdLatest(jobId), + ]); + + const runnerMaxCount = action?.runnerMaxCount ?? Infinity; + const runnerCurrentCount = runnersActive.length; + + let spawnAmount = jobQueue.length; + + if (spawnAmount + runnerCurrentCount >= runnerMaxCount) { + spawnAmount = runnerMaxCount - runnerCurrentCount; + } + + const itemsForStartup = jobQueue.slice(0, spawnAmount); + const itemsForRequeue = jobQueue.slice(spawnAmount); + + for (const item of itemsForRequeue) { + this.queueStartup.push(item); + } + + await Promise.all( + itemsForStartup.map(async (item) => { + const result = await this.createRunner(item.jobId); + + item.startupPromise.resolve(result); + }), + ); + }), ); + } - const prefix = sanitiseSafeCharacters( - `JobberRunner-${options?.dockerNamePrefix?.substring(0, 16)}` - ).substring(0, 32); + private async processShutdownQueue() { + const queue = this.queueShutdown.splice(0, this.queueShutdown.length); - const id = createToken({ - length: 32, - prefix, - }); + await Promise.all( + queue.map(async (item) => { + try { + const runner = this.runners.get(item.runnerId); - this.server.registerConnection(id, action, version); + if (!runner) { + return; + } - const image = await getImage(action.runnerImage); + await runnersModel.update(runner.runnerId, { + status: "closing", + closingAt: new Date(), + }); - if (!image) { - throw new Error( - `[RunnerManager/createRunner] Failed to find the image associated with action. actionId ${shortenString( - action.id - )}, actionRunnerImage ${action.runnerImage}` - ); - } + runner.promiseEvents.closing.resolve(); + + if (item.method === "forceful") { + if (runner.properties?.runnerContainerName) { + await killDockerContainer( + runner.properties?.runnerContainerName, + ).catch((err) => {}); + } + } else if (item.method === "graceful") { + if (runner.properties?.runnerContainerName) { + await stopDockerContainer( + runner.properties?.runnerContainerName, + ).catch((err) => {}); + } + } else { + console.warn( + `[RunnerManager/processShutdownQueue] Unknown shutdown method ${item.method} for runner ${item.runnerId}. Defaulting to graceful.`, + ); + } - if (image.status === "disabled") { - throw new Error( - `[RunnerManager/createRunner] Action is using a disabled image! Unable to start runner. actionId ${shortenString( - action.id - )}, actionRunnerImage ${action.runnerImage}` - ); - } + await runner.promiseEvents.closed.promise; + } catch (err) { + console.error(err); + } + }), + ); + } - if (image.status === "deprecated") { - console.log( - `[RunnerManager/createRunner] Action is using a deprecated image! actionId ${shortenString( - action.id - )}, actionRunnerImage ${action.runnerImage}` - ); - } + private async createRunner(jobId: string): Promise { + let cleanupFiles: string[] = []; - const environment = await getDrizzle() - .select() - .from(environmentsTable) - .where(eq(environmentsTable.jobId, action.jobId)) - .limit(1) - .then((res) => res.at(0) ?? null); - // + try { + const job = await jobModel.byId(jobId); - const args: string[] = []; + if (!job) { + console.warn( + `[RunnerManager/createRunner] Failed to create runner for job ${jobId} - job not found`, + ); - args.push("run", "--rm", "--name", id); + return null; + } - args.push("--label", "jobber=true"); - args.push("--label", `jobber-manager=${getConfigOption("JOBBER_NAME")}`); + if (!job.jobVersionId) { + console.warn( + `[RunnerManager/createRunner] Failed to create runner for job ${jobId} - job version not found`, + ); - const dockerNetwork = getConfigOption("RUNNER_CONTAINER_DOCKER_NETWORK"); - if (dockerNetwork) { - args.push("--network", dockerNetwork); - } + return null; + } + + const jobVersion = await jobVersionsModel.byId(job.jobVersionId); + assert(jobVersion, "Job version not found"); + + const action = await actionsModel.byVersionId(jobVersion.id); + assert(action, "Action not found"); + + const environment = await environmentModel.byJobId(job.id); + + const image = await getImage(action.runnerImage); + + if (!image) { + console.warn( + `[RunnerManager/createRunner] Failed to create runner for job ${jobId} - image ${action.runnerImage} not found`, + ); - if (environment) { - for (const [name, { value }] of Object.entries(environment.context)) { - args.push("--env", `${name}=${value}`); + return null; } - } - const actionArgumentsEnabled = getConfigOption( - "RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES" - ); + if (image.status === "disabled") { + console.warn( + `[RunnerManager/createRunner] Failed to create runner for job ${jobId} - image ${action.runnerImage} is disabled`, + ); - if ( - actionArgumentsEnabled.includes("networks") && - action.runnerDockerArguments.networks - ) { - for (const network of action.runnerDockerArguments.networks) { - args.push("--network", network); + return null; } - } else if (action.runnerDockerArguments.networks) { - this.logger.write({ - actionId: action.id, - jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Action is using docker networks, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "networks". Skipping networks.`, - }); - } - if ( - actionArgumentsEnabled.includes("volumes") && - action.runnerDockerArguments.volumes - ) { - for (const volume of action.runnerDockerArguments.volumes) { - args.push( - "--volume", - `${volume.source}:${volume.target}:${volume.mode}` + if (image.status === "deprecated") { + console.warn( + `[RunnerManager/createRunner] Warning: creating runner for job ${jobId} with deprecated image ${action.runnerImage}`, ); } - } else if (action.runnerDockerArguments.volumes) { - this.logger.write({ + + const serviceClientRunner = + await this.serviceClients.getSystemClientForRunner(job); + + const runnerRecord = await runnersModel.create({ + status: "starting", actionId: action.id, + environmentId: environment?.id, jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Action is using docker volumes, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "volumes". Skipping volumes.`, + jobVersionId: jobVersion.id, + oauthServiceClientId: serviceClientRunner.client?.id, }); - } - if ( - actionArgumentsEnabled.includes("labels") && - action.runnerDockerArguments.labels - ) { - for (const label of action.runnerDockerArguments.labels) { - if (["jobber-manager", "jobber"].includes(label.key.toLowerCase())) { - continue; - } + if (!runnerRecord) { + console.warn( + `[RunnerManager/createRunner] Failed to create runner for job ${jobId} - failed to create runner record in database`, + ); - args.push("--label", `${label.key}=${label.value}`); + return null; } - } else if (action.runnerDockerArguments.labels) { - this.logger.write({ - actionId: action.id, - jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Action is using docker labels, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "labels". Skipping labels.`, - }); - } - if ( - actionArgumentsEnabled.includes("memoryLimit") && - action.runnerDockerArguments.memoryLimit - ) { - args.push("--memory", action.runnerDockerArguments.memoryLimit); - } else if (action.runnerDockerArguments.memoryLimit) { - this.logger.write({ - actionId: action.id, - jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Action is using docker memory limit, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "memoryLimit". Skipping memory limit.`, + const containerName = createToken({ + length: 16, + prefix: sanitiseSafeCharacters(`runner-${job.jobName}`).substring( + 0, + 20, + ), }); - } - if ( - getConfigOption("RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH") && - actionArgumentsEnabled.includes("directPassthroughArguments") && - action.runnerDockerArguments.directPassthroughArguments - ) { - args.push(...action.runnerDockerArguments.directPassthroughArguments); - } else if (action.runnerDockerArguments.directPassthroughArguments) { - this.logger.write({ - actionId: action.id, - jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Action is using docker direct passthrough arguments, but RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH is not enabled, or RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "directPassthroughArguments". Skipping direct passthrough arguments.`, - }); - } + const portRandomised = Math.floor(Math.random() * 10000) + 2000; - args.push( - image.imageUrl, - "node", - ENTRYPOINT_NODE, - "--job-runner-identifier", - id, - "--job-controller-host", - getConfigOption("MANAGER_HOST"), - "--job-controller-port", - getConfigOption("MANAGER_PORT").toString(), - "--job-debug", - getConfigOption("DEBUG_RUNNER") ? "true" : "false" - ); + const runnerParameters: RunnerManagerItem["arguments"] = { + runnerId: runnerRecord.id, - if (getConfigOption("DEBUG_RUNNER")) { - const secureArgs: string[] = []; + runnerClientId: serviceClientRunner.client?.clientId ?? "", + runnerClientSecret: serviceClientRunner.secret, + runnerGeneralApiEndpoint: `http://${getConfigOption("MANAGER_GRPC_HOST")}:${getConfigOption("MANAGER_GRPC_PORT")}`, - const secureValues: string[] = []; + runnerOAuthTokenEndpoint: `http://${getConfigOption("MANAGER_GRPC_HOST")}:${getConfigOption("API_PORT")}/oauth/token`, + runnerOAuthJwksEndpoint: `http://${getConfigOption("MANAGER_GRPC_HOST")}:${getConfigOption("API_PORT")}/.well-known/jwks.json`, + runnerOAuthIssuer: getConfigOption("OAUTH_ISSUER"), - if (environment) { - for (const [name, value] of Object.entries(environment.context)) { - if (value.type === "secret") { - secureValues.push(value.value); - } - } - } + runnerApiPort: portRandomised, - for (const argItem of args) { - let argItemClean = argItem; + runnerDebug: getConfigOption("DEBUG_RUNNER"), + }; - for (const secretValue of secureValues) { - argItemClean = argItemClean.replace(secretValue, ""); - } + const args: string[] = []; + + args.push("run", "--rm", "--name", containerName); - secureArgs.push(argItemClean); + args.push("--label", "jobber=true"); + args.push("--label", `jobber-manager=${getConfigOption("JOBBER_NAME")}`); + args.push("--label", `jobber-version=${jobVersion.version}`); + + const dockerNetwork = getConfigOption("RUNNER_CONTAINER_DOCKER_NETWORK"); + if (dockerNetwork) { + args.push("--network", dockerNetwork); } - this.logger.write({ - actionId: action.id, - jobId: job.id, - jobName: job.jobName, - created: new Date(), - source: "system", - message: `[RunnerManager/createRunner] Starting runner with arguments: ${JSON.stringify( - secureArgs - )}`, - }); - } + const environmentFileLines: string[] = [ + `# Job ${JSON.stringify(job.jobName)}`, + "", + "# System Defined Environment Variables - DO NOT MODIFY", + `RUNNER_ID=${runnerParameters.runnerId}`, + `RUNNER_CLIENT_ID=${runnerParameters.runnerClientId}`, + `RUNNER_CLIENT_SECRET=${runnerParameters.runnerClientSecret}`, + `RUNNER_GENERAL_API_ENDPOINT=${runnerParameters.runnerGeneralApiEndpoint}`, + `RUNNER_OAUTH_TOKEN_ENDPOINT=${runnerParameters.runnerOAuthTokenEndpoint}`, + `RUNNER_OAUTH_JWKS_ENDPOINT=${runnerParameters.runnerOAuthJwksEndpoint}`, + `RUNNER_OAUTH_ISSUER=${runnerParameters.runnerOAuthIssuer}`, + `RUNNER_API_PORT=${runnerParameters.runnerApiPort}`, + `RUNNER_DEBUG=${runnerParameters.runnerDebug ? "true" : "false"}`, + ]; - const process = spawn("docker", args, { - windowsHide: true, - stdio: "pipe", - }); + if (environment) { + environmentFileLines.push(""); + environmentFileLines.push(""); + environmentFileLines.push(`# User defined environment variables`); + for (const [name, { value }] of Object.entries(environment.context)) { + environmentFileLines.push( + `${name.toUpperCase()}=${JSON.stringify(value)}`, + ); + } + } - process.once("exit", () => { - delete this.runners[id]; + // TODO: if fails, fallback to the old insecure strategy + const environmentFilePath = getRunnerEnvFile(runnerRecord); + await writeFile(environmentFilePath, environmentFileLines.join("\n")); + cleanupFiles.push(environmentFilePath); - gaugeActiveRunners - .labels({ - job_name: job.jobName, - job_id: job.id, - version: version.version, - }) - .dec(); - }); + args.push("--env-file", environmentFilePath); - process.stderr.on("data", (buffer: Buffer) => { - const chunks = buffer.toString().split("\n"); - for (const chunk of chunks) { + const actionArgumentsEnabled = getConfigOption( + "RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES", + ); + + if ( + actionArgumentsEnabled.includes("networks") && + action.runnerDockerArguments.networks + ) { + for (const network of action.runnerDockerArguments.networks) { + args.push("--network", network); + } + } else if (action.runnerDockerArguments.networks) { this.logger.write({ actionId: action.id, jobId: job.id, jobName: job.jobName, created: new Date(), - source: "runner", - message: chunk.toString(), + source: "system", + message: `[RunnerManager/createRunner] Action is using docker networks, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "networks". Skipping networks.`, }); } - }); - process.stdout.on("data", (buffer: Buffer) => { - const chunks = buffer.toString().split("\n"); - for (const chunk of chunks) { + if ( + actionArgumentsEnabled.includes("volumes") && + action.runnerDockerArguments.volumes + ) { + for (const volume of action.runnerDockerArguments.volumes) { + args.push( + "--volume", + `${volume.source}:${volume.target}:${volume.mode}`, + ); + } + } else if (action.runnerDockerArguments.volumes) { this.logger.write({ actionId: action.id, jobId: job.id, jobName: job.jobName, created: new Date(), - source: "runner", - message: chunk.toString(), + source: "system", + message: `[RunnerManager/createRunner] Action is using docker volumes, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "volumes". Skipping volumes.`, }); } - }); - - this.runners[id] = { - action, - version, - job, - createdAt: getUnixTimestamp(), - environment, - id, - process, - requestsProcessing: 0, - status: "starting", - }; - - gaugeActiveRunners - .labels({ - job_name: job.jobName, - job_id: job.id, - version: version.version, - }) - .inc(); - - return id; - } - - private async loopClose() { - // Graceful shutdown - await Promise.all( - Object.values(this.runners).map((runner) => - this.server.sendShutdownRequest(runner.id) - ) - ); - - if ( - await awaitTruthy(async () => { - return Object.values(this.runners).length === 0; - }, 60_000) - ) { - return; - } - - // Forceful shutdown any lingering runners - for (const runner of Object.values(this.runners)) { - runner.process.kill("SIGTERM"); - } - if ( - await awaitTruthy(async () => { - return Object.values(this.runners).length === 0; - }, 60_000) - ) { - return; - } - } - - private async loopCheckDanglingContainers( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - const containers = await getDockerContainers(); - - for (const container of containers) { - const labels = container.Labels.split(",").map((label) => - label.split("=", 2) - ); - - const isJobberRunner = labels.some( - ([labelName, labelValue]) => - labelName === "jobber" && labelValue.toLowerCase() === "true" - ); - - if (!isJobberRunner) { - continue; - } - - const jobberManager = labels - .find(([labelName]) => labelName === "jobber-manager") - ?.at(1); + if ( + actionArgumentsEnabled.includes("labels") && + action.runnerDockerArguments.labels + ) { + for (const label of action.runnerDockerArguments.labels) { + if (["jobber-manager", "jobber"].includes(label.key.toLowerCase())) { + continue; + } - if (jobberManager !== getConfigOption("JOBBER_NAME")) { - if (getConfigOption("DEBUG_RUNNER")) { - console.log( - `[RunnerManager/loopCheckDanglingContainers] Found dangling container that is not owned by this Jobber instance. container: ${container.ID}, jobber-owner: ${jobberManager}` - ); + args.push("--label", `${label.key}=${label.value}`); } + } else if (action.runnerDockerArguments.labels) { + this.logger.write({ + actionId: action.id, + jobId: job.id, + jobName: job.jobName, + created: new Date(), + source: "system", + message: `[RunnerManager/createRunner] Action is using docker labels, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "labels". Skipping labels.`, + }); + } - continue; + if ( + actionArgumentsEnabled.includes("memoryLimit") && + action.runnerDockerArguments.memoryLimit + ) { + args.push("--memory", action.runnerDockerArguments.memoryLimit); + } else if (action.runnerDockerArguments.memoryLimit) { + this.logger.write({ + actionId: action.id, + jobId: job.id, + jobName: job.jobName, + created: new Date(), + source: "system", + message: `[RunnerManager/createRunner] Action is using docker memory limit, but RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "memoryLimit". Skipping memory limit.`, + }); } - const hasRunner = !!this.runners[container.Names]; - if (hasRunner) { - continue; + if ( + getConfigOption("RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH") && + actionArgumentsEnabled.includes("directPassthroughArguments") && + action.runnerDockerArguments.directPassthroughArguments + ) { + args.push(...action.runnerDockerArguments.directPassthroughArguments); + } else if (action.runnerDockerArguments.directPassthroughArguments) { + this.logger.write({ + actionId: action.id, + jobId: job.id, + jobName: job.jobName, + created: new Date(), + source: "system", + message: `[RunnerManager/createRunner] Action is using docker direct passthrough arguments, but RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH is not enabled, or RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES does not include "directPassthroughArguments". Skipping direct passthrough arguments.`, + }); } - console.log( - `[RunnerManager/loopCheckDockerContainers] Found dangling container! This should NOT happen. Are you running multiple Jobber instances on the same host? Did Jobber previously crash? containerId: ${shortenString( - container.ID - )}, containerNames: ${container.Names}` + args.push( + image.imageUrl, + "node", + ENTRYPOINT_NODE, + // TODO: if environment file fails, fallback to this. + // `--runner-id=${runnerParameters.runnerId}`, + // `--client-id=${runnerParameters.runnerClientId}`, + // `--client-secret=${runnerParameters.runnerClientSecret}`, + // `--general-api-endpoint=${runnerParameters.runnerGeneralApiEndpoint}`, + // `--oauth-token-endpoint=${runnerParameters.runnerOAuthTokenEndpoint}`, + // `--oauth-jwks-endpoint=${runnerParameters.runnerOAuthJwksEndpoint}`, + // `--oauth-issuer=${runnerParameters.runnerOAuthIssuer}`, + // `--port=${runnerParameters.runnerApiPort}`, + // `--debug=${runnerParameters.runnerDebug ? "true" : "false"}`, ); - const result = await stopDockerContainer(container.ID); + // NOTE: !!!! NEVER ENABLE SHELL=TRUE !!!! + const process = spawn("docker", args, { + windowsHide: true, + stdio: "pipe", + }); + // NOTE: !!!! NEVER ENABLE SHELL=TRUE !!!! - if (result) { - console.log( - "[RunnerManager/loopCheckDockerContainers] Killed dangling container successfully." - ); - } else { - console.log( - "[RunnerManager/loopCheckDockerContainers] Failed to kill dangling container!" - ); - } - } - } + process.once("exit", async (code) => { + await runnersModel.update(runnerRecord.id, { + status: "closed", + closedAt: new Date(), + }); - private async loopImagePull() { - const images = await getImages(); + const runner = this.runners.get(runnerRecord.id); - await Promise.all( - images.map(async (image) => { - if (image.status !== "active") { + if (!runner) { return; } - const result = await pullDockerImage(image.imageUrl); + runner.promiseEvents.closed.resolve(); - if (!result) { - console.log( - `[RunnerManager/loopImagePull] Failed to pull image ${image.imageUrl}` - ); + this.runners.delete(runnerRecord.id); + }); + + process.stderr.on("data", (buffer: Buffer) => { + const chunks = buffer.toString().split("\n"); + for (const chunk of chunks) { + this.logger.write({ + actionId: action.id, + jobId: job.id, + jobName: job.jobName, + created: new Date(), + source: "runner", + message: chunk.toString(), + }); } - }) - ); - } + }); - private async loopRunnerSpawner( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - for (const currentVersion of currentVersions) { - const action = currentVersion.action; - const job = currentVersion.job; - const version = currentVersion.version; + process.stdout.on("data", (buffer: Buffer) => { + const chunks = buffer.toString().split("\n"); + for (const chunk of chunks) { + this.logger.write({ + actionId: action.id, + jobId: job.id, + jobName: job.jobName, + created: new Date(), + source: "runner", + message: chunk.toString(), + }); + } + }); - const runnersCurrent = Object.values(this.runners).filter( - (runner) => runner.version.id === version.id + const tokenResult = await this.serviceClients.generateTokenForServer( + getOAuthAudienceRunnerApi(runnerRecord.id), ); - if (action.runnerMode !== "standard") { - continue; - } + const metadata = Metadata({ + Authorization: `Bearer ${tokenResult.jwt}`, + }); - const runnerLoad = runnersCurrent.reduce( - (prev, runner) => (runner.requestsProcessing + prev) / 2, - 0 + const channel = createChannel( + `${containerName}:${runnerParameters.runnerApiPort}`, + ChannelCredentials.createInsecure(), ); + // const channel = createChannel( + // `192.168.10.200:${runnerParameters.runnerApiPort}`, + // ); + const grpc = createClientFactory().create(RunnerAPIDefinition, channel, { + "*": { + metadata, + }, + }) as Client; + + const properties = { + runnerApiPort: runnerParameters.runnerApiPort, + runnerContainerName: containerName, + runnerContainerNetworks: action.runnerDockerArguments.networks ?? [], + runnerDebug: runnerParameters.runnerDebug, + runnerPid: process.pid?.toString() ?? "", + }; + + const promiseEvents = { + ready: deferred(), + closing: deferred(), + closed: deferred(), + }; + + this.runners.set(runnerRecord.id, { + runnerId: runnerRecord.id, + + job, + jobVersion, + jobAction: action, + environment: environment ?? null, + + process, + properties, + arguments: runnerParameters, + + grpcToken: tokenResult.jwt, + grpcTokenExpiry: Math.floor(tokenResult.expiration.getTime() / 1000), + grpcMetadata: metadata, + grpcChannel: channel, + grpc: grpc, + + promiseEvents, + + createdAt: getUnixTimestamp(), + }); - const targetLoadPerRunner = action.runnerAsynchronous ? 10 : 1; + await runnersModel.update(runnerRecord.id, { + properties, + }); - let targetRunnerCount = Math.floor( - (runnerLoad / targetLoadPerRunner) * 1.2 - ); + // A simple hack to wait for runner to be ready, main loop is blocked + setImmediate(async () => { + for (let i = 0; i < 250; i++) { + const runner = this.runners.get(runnerRecord.id); - if (Number.isNaN(targetRunnerCount)) { - targetRunnerCount = 0; - } + if (!runner) { + break; + } - if (this.requestedVersionIds.has(version.id)) { - this.requestedVersionIds.delete(version.id); + if (runner.lastStatus?.status === "READY") { + break; + } - if (targetRunnerCount <= 0) { - targetRunnerCount++; - } - } + await this.updateRunnerStatus(runnerRecord.id); - if (targetRunnerCount > action.runnerMaxCount) { - targetRunnerCount = action.runnerMaxCount; - } + // gradually sleep longer, try not to thrash the EventLoop + if (i > 200) { + await timeout(250); + } else if (i > 100) { + await timeout(100); + } else { + await timeout(50); + } + } + }); - if (targetRunnerCount < action.runnerMinCount) { - targetRunnerCount = action.runnerMinCount; - } + // Await until process opens... or fails to open + await Promise.any([ + timeout(120_000), // Timeout after 2 minutes + promiseEvents.ready.promise, + promiseEvents.closing.promise, + promiseEvents.closed.promise, + ]); - const count = targetRunnerCount - runnersCurrent.length; - - if (count > 0) { - console.log( - `[Runners/loopRunnerSpawner] Spawning ${count} new runners. jobName ${ - job.jobName - }, jobId ${shortenString(job.id)}, actionId ${shortenString( - action.id - )}, version ${version.version}, versionId ${shortenString( - version.id - )}` - ); + console.log( + `[RunnerManager/createRunner] Runner for job ${shortenString(job.jobName, 16)} (${shortenString(job.id, 5)}) version ${jobVersion.version} created with runner id ${shortenString(runnerRecord.id, 4)}`, + ); - for (let i = 0; i < count; i++) { - const runnerId = await this.createRunner(version, action, job, { - dockerNamePrefix: job.jobName, - }); + return runnerRecord.id; + } catch (err) { + console.error(err); - await this.server.awaitConnectionStatus(runnerId, "ready"); - } + return null; + } finally { + for (const filePath of cleanupFiles) { + await unlink(filePath).catch(() => {}); } } } - private async loopCheckEnvironmentChanges( - currentVersions: { - version: JobVersionsTableType; - action: ActionsTableType; - job: JobsTableType; - environment: EnvironmentsTableType | null; - }[] - ) { - for (const [_runnerId, runner] of Object.entries(this.runners)) { - if (runner.status !== "starting" && runner.status !== "ready") { - continue; - } + public startupQueueAdd(jobId: string) { + const startupPromise = deferred(); - const currentVersion = currentVersions.find( - (index) => index.version.id === runner.version.id - ); + this.queueStartup.push({ + jobId, + startupPromise, + }); - if (!currentVersion) { - continue; - } + return startupPromise.promise; + } - if (!runner.environment && !currentVersion.environment) { - continue; - } + /** + * Gets or creates a runner depending on runner mode + * - RUN_ONCE: creates a new runner + * - STANDARD: Attempts to fetch runner from runner pool. If runners are starting, waits for runner to start. If no runners exist, creates one. + */ + public async createSoftRunner(jobId: string): Promise { + const action = await actionsModel.byJobIdLatest(jobId); - // Runner started with no environment, and environment has since been configured. - if (!runner.environment && currentVersion.environment) { - console.log( - `[RunnerManager/loopCheckEnvironmentChanges] Shutting down ${shortenString( - runner.id - )} due to environment change. Runner started without environment, but environment has now been configured.` - ); + if (!action) { + return null; + } - await this.server.sendShutdownRequest(runner.id); + if (action.runnerMode === "run-once") { + return await this.startupQueueAdd(jobId); + } - continue; - } + if (action.runnerMode === "standard") { + const runners = await runnersModel.byJobId(jobId, { + specialActiveIshOnly: true, + }); - // Runner started with an environment, and its since been deleted. - if (runner.environment && !currentVersion.environment) { - console.log( - `[RunnerManager/loopCheckEnvironmentChanges] Shutting down ${shortenString( - runner.id - )} due to environment change. Runner started with an environment, but environment has now been deleted.` - ); + // attempt to use an existing runner... starting or otherwise + for (const runner of runners) { + const managerRunner = this.runners.get(runner.id); - await this.server.sendShutdownRequest(runner.id); + if (!managerRunner || !managerRunner.grpcChannel) { + continue; + } - continue; - } + const status = managerRunner.grpcChannel.getConnectivityState(false); - // Runners environment updated while it was running - if ( - runner.environment?.modified !== currentVersion.environment?.modified - ) { - console.log( - `[RunnerManager/loopCheckEnvironmentChanges] Shutting down ${shortenString( - runner.id - )} due to environment change. Environment has been updated while runner was running.` - ); + if ( + status === connectivityState.READY || + status === connectivityState.IDLE || + status === connectivityState.CONNECTING + ) { + return runner.id; + } + } - await this.server.sendShutdownRequest(runner.id); + const existingQueueItem = this.queueStartup.find( + (item) => item.jobId === action.jobId, + ); - continue; + if (existingQueueItem) { + // avoid requeueing a standard request, this will remove the risk of a huge backlog of + // runners.. in theory. Who knows, race conditions can be painful. Worst case this becomes + // a DOS vector. + // TODO: When e2e tests are figured out, add a test for this. + return await existingQueueItem.startupPromise.promise; } + + // no runner found, start new one. + return await this.startupQueueAdd(jobId); } - } - private async loopCheckVersion( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - for (const runner of Object.values(this.runners)) { - if ( - currentVersions.some((index) => index.version.id === runner.version.id) - ) { - continue; - } + return null; + } - console.log( - `[RunnerManager/loopCheckVersion] Shutting down ${shortenString( - runner.id - )} due to action version change.` - ); + public shutdownQueueAdd(runnerId: string, forceful: boolean = false) { + const queueItem = this.queueShutdown.find( + (item) => item.runnerId === runnerId, + ); - await this.server.sendShutdownRequest(runner.id); + if (queueItem) { + queueItem.method = forceful ? "forceful" : "graceful"; + } else { + this.queueShutdown.push({ + runnerId, + method: forceful ? "forceful" : "graceful", + }); } } - private async loopCheckMaxAge( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - for (const runner of Object.values(this.runners)) { - if (!runner.readyAt) { - continue; - } - - if (runner.action.runnerMaxAge === 0) { - continue; - } + public async eventSchedule( + jobId: string, + trigger: EventScheduleRequest, + ): Promise { + const runnerId = await this.createSoftRunner(jobId); - const duration = getUnixTimestamp() - runner.readyAt; + if (!runnerId) { + throw new Error(`No runner available for job ${jobId}`); + } - if (duration < runner.action.runnerMaxAge) { - continue; - } + const runner = this.runners.get(runnerId); - console.log( - `[RunnerManager/loopCheckMaxAge] Shutting down ${shortenString( - runner.id - )} due to max age exceeded. duration ${duration}s, maxAge ${ - runner.action.runnerMaxAge - }s` - ); + if (!runner) { + throw new Error(`Runner ${runnerId} not found`); + } - await this.server.sendShutdownRequest(runner.id); + if (!runner.grpc) { + throw new Error(`Runner ${runnerId} gRPC client not initialized`); } - } - private async loopCheckHardMaxAge( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - for (const runner of Object.values(this.runners)) { - if (!runner.readyAt) { - continue; - } + try { + const response = await runner.grpc.eventSchedule(trigger); - if (runner.action.runnerMaxAgeHard === 0) { - continue; + if (runner.jobAction.runnerMode === "run-once") { + this.shutdownQueueAdd(runnerId, false); } - const duration = getUnixTimestamp() - runner.readyAt; - - if (duration < runner.action.runnerMaxAgeHard) { - continue; - } + return response; + } catch (err) { + console.error(err); + throw err; + } + } - console.log( - `[RunnerManager/loopCheckHardMaxAge] Shutting down ${shortenString( - runner.id - )} due to hard max age exceeded. duration ${duration}s, hardMaxAge ${ - runner.action.runnerMaxAgeHard - }s` - ); + public async eventMqtt( + jobId: string, + trigger: EventMqttRequest, + ): Promise { + const runnerId = await this.createSoftRunner(jobId); - runner.process.kill("SIGTERM"); + if (!runnerId) { + throw new Error(`No runner available for job ${jobId}`); } - } - private async loopCheckMaxIdleAge( - currentVersions: { - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; - }[] - ) { - for (const runner of Object.values(this.runners)) { - const lastRequestAtEffective = - typeof runner.lastRequestAt === "number" - ? runner.lastRequestAt - : runner.readyAt; - - if (!runner.readyAt || !lastRequestAtEffective) { - continue; - } + const runner = this.runners.get(runnerId); - if (runner.action.runnerMaxIdleAge === 0) { - continue; - } + if (!runner) { + throw new Error(`Runner ${runnerId} not found`); + } - const duration = getUnixTimestamp() - lastRequestAtEffective; + if (!runner.grpc) { + throw new Error(`Runner ${runnerId} gRPC client not initialized`); + } - if (duration < runner.action.runnerMaxIdleAge) { - continue; - } + try { + const response = await runner.grpc.eventMqtt(trigger); - console.log( - `[RunnerManager/loopCheckMaxIdleAge] Shutting down ${shortenString( - runner.id - )} due to max idle age exceeded. duration ${duration}s, maxIdleAge ${ - runner.action.runnerMaxIdleAge - }s` - ); + if (runner.jobAction.runnerMode === "run-once") { + this.shutdownQueueAdd(runnerId, false); + } - await this.server.sendShutdownRequest(runner.id); + return response; + } catch (err) { + console.error(err); + throw err; } } } diff --git a/packages/server/src/jobber/runners/server.ts b/packages/server/src/jobber/runners/server.ts deleted file mode 100644 index 91c6b6e..0000000 --- a/packages/server/src/jobber/runners/server.ts +++ /dev/null @@ -1,710 +0,0 @@ -import { TcpFrameSocket } from "@jobber/tcp-frame-socket"; -import EventEmitter from "events"; -import { readFile } from "fs/promises"; -import { Server } from "net"; -import { getConfigOption } from "~/config.js"; -import { ActionsTableType } from "~/db/schema/actions.js"; -import { JobVersionsTableType } from "~/db/schema/job-versions.js"; -import { getJobActionArchiveFile } from "~/paths.js"; -import { awaitTruthy, createToken, shortenString } from "~/util.js"; -import { Store } from "../store.js"; - -export type HandleRequestSchedule = { - type: "schedule"; - cron: string; - timezone?: string; -}; - -export type HandleRequestHttp = { - type: "http"; - headers: Record; - query: Record; - queries: Record; - path: string; - method: string; - - body: string; - bodyLength: number; -}; - -export type HandleRequestMqtt = { - type: "mqtt"; - topic: string; - - body: string; - bodyLength: number; -}; - -export type HandleRequest = { name?: string } & ( - | HandleRequestSchedule - | HandleRequestHttp - | HandleRequestMqtt -); - -export type HandleResponse = ( - | { - success: true; - http?: { - status: number; - headers: Record; - body: Buffer; - }; - mqtt?: { - publish: Array<{ - topic: string; - body: Buffer; - }>; - }; - } - | { success: false; error: string } -) & { - duration: number; -}; - -type RunnerServerItem = { - runnerId: string; - action: ActionsTableType; - version: JobVersionsTableType; -} & ( - | { - status: "pending"; - } - | { - status: "starting" | "ready" | "closing"; - socket: TcpFrameSocket; - } -); - -type FrameJson = { - runnerId: string; - name: string; - traceId: string; - dataType: "buffer" | "json"; -}; - -export class RunnerServer extends EventEmitter<{ - "runner-close": [runnerId: string]; - "runner-closing": [runnerId: string]; - "runner-starting": [runnerId: string]; - "runner-ready": [runnerId: string]; - "mqtt-publish-request": [jobId: string, topic: string, body: Buffer]; -}> { - private connections = new Map(); - - private traceResponses = new Map< - string, - (frame: FrameJson, data: Buffer) => void - >(); - - private server: Server; - - constructor(private store: Store) { - super(); - - this.server = new Server({ - noDelay: true, - }); - - this.server.on("connection", (socket) => { - const socketFrame = new TcpFrameSocket(socket); - - socketFrame.on("frame", (buffer: Buffer) => - this.onFrame(socketFrame, buffer) - ); - }); - - this.server.on("error", (err) => console.error(err)); - } - - public async start() { - if (this.server.listening) { - return; - } - - this.server.listen(getConfigOption("MANAGER_PORT")); - } - - public stop() { - return new Promise((resolve) => { - this.server.once("close", () => resolve(null)); - - this.server.close(); - }); - } - - public registerConnection( - runnerId: string, - action: ActionsTableType, - version: JobVersionsTableType - ) { - this.connections.set(runnerId, { - status: "pending", - action, - version, - runnerId, - }); - } - - public getConnectionStatus(runnerId: string) { - const connection = this.connections.get(runnerId); - - return connection?.status ?? null; - } - - public async awaitConnectionStatus( - runnerId: string, - status: RunnerServerItem["status"] = "ready" - ) { - return await awaitTruthy(async () => { - return this.getConnectionStatus(runnerId) === status; - }, 60_000); - } - - public sendHandleRequest( - runnerId: string, - handleRequest: HandleRequest - ): Promise { - return new Promise((resolve, _reject) => { - const traceId = createToken({ - length: 256, - prefix: "HandleRequestTraceId", - }); - - const connection = this.connections.get(runnerId); - - if (!connection) { - return resolve({ - success: false, - duration: -1, - error: "Jobber: Runner connection not found", - }); - } - - if (connection.status !== "ready") { - return resolve({ - success: false, - duration: -1, - error: "Jobber: Runner connection not ready", - }); - } - - const timeoutInterval = setTimeout(() => { - if (connection.action.runnerTimeout === 0) { - return; - } - - this.traceResponses.delete(traceId); - - return resolve({ - success: false, - duration: -1, - error: "Jobber: Timeout Error", - }); - }, connection.action.runnerTimeout * 1000); - - this.traceResponses.set(traceId, (frame, data) => { - clearTimeout(timeoutInterval); - - this.traceResponses.delete(traceId); - - if (typeof frame.dataType !== "string" || frame.dataType !== "json") { - return resolve({ - success: false, - duration: -1, - error: `Jobber: Runner sent back "${frame.dataType}", expected "json"`, - }); - } - - const handleResponse = JSON.parse(data.toString()); - - if (typeof handleResponse.success !== "boolean") { - console.warn( - `[RunnerServer/sendHandleRequest] Malformed response object, handleResponse.success expected to be boolean` - ); - - return resolve({ - success: false, - duration: -1, - error: `Jobber: Malformed response object`, - }); - } - - if (!handleResponse.success) { - return resolve({ - success: false, - error: handleResponse.error ?? "An unknown error occurred", - duration: handleResponse.duration ?? -1, - }); - } - - if (handleRequest.type === "http" && handleResponse.http) { - const httpBody = Buffer.from(handleResponse.http.body, "base64"); - - return resolve({ - success: true, - duration: handleResponse.duration ?? -1, - http: { - body: httpBody, - headers: handleResponse.http.headers, - status: handleResponse.http.status, - }, - }); - } - - if (handleRequest.type === "mqtt" && handleResponse.mqtt) { - const publish: NonNullable< - Extract["mqtt"] - >["publish"] = []; - - if (handleResponse.mqtt?.publish) { - for (const item of handleResponse.mqtt?.publish) { - publish.push({ - topic: item.topic as string, - body: Buffer.from(item.body, "base64"), - }); - } - } - - return resolve({ - success: true, - duration: handleResponse.duration ?? -1, - mqtt: { - publish, - }, - }); - } - - return resolve({ - success: true, - duration: handleResponse.duration ?? -1, - }); - }); - - this.writeFrame( - { - name: "handle", - traceId, - runnerId, - dataType: "json", - }, - Buffer.from(JSON.stringify(handleRequest)) - ); - }); - } - - public async sendShutdownRequest(runnerId: string) { - const connection = this.connections.get(runnerId); - - if (!connection || connection.status !== "ready") { - return false; - } - - const traceId = createToken({ - length: 128, - prefix: "ShutdownRequestTraceId", - }); - - await this.writeFrame( - { - traceId, - name: "shutdown", - runnerId: runnerId, - dataType: "buffer", - }, - Buffer.alloc(0) - ); - - this.emit("runner-closing", runnerId); - - this.connections.set(runnerId, { - ...connection, - status: "closing", - }); - - return true; - } - - private async onFrame(socket: TcpFrameSocket, buffer: Buffer) { - const separator = buffer.indexOf("\n"); - - if (separator <= 0) { - console.warn(`[RunnerServer/onFrame] Received malformed frame!`); - - return; - } - - const chunkJson = buffer.subarray(0, separator); - const bodyBuffer = buffer.subarray(separator + 1); - - const frame = JSON.parse(chunkJson.toString("utf8")) as FrameJson; - - if (frame.name === "response") { - return await this.onFrameResponse(socket, frame, bodyBuffer); - } - - if (frame.name === "init") { - return await this.onFrameInit(socket, frame, bodyBuffer); - } - - if (frame.name === "ready") { - return await this.onFrameReady(socket, frame, bodyBuffer); - } - - if (frame.name.startsWith("store")) { - return await this.onFrameStore(socket, frame, bodyBuffer); - } - - if (frame.name.startsWith("mqtt")) { - return await this.onFrameMqtt(socket, frame, bodyBuffer); - } - - console.warn( - `[RunnerServer/onFrame] Received unknown frame name "${frame.name}"!` - ); - } - - private async onFrameResponse( - _socket: TcpFrameSocket, - frame: FrameJson, - bodyBuffer: Buffer - ) { - const traceResponseCallback = this.traceResponses.get(frame.traceId); - - if (!traceResponseCallback) { - return; - } - - traceResponseCallback(frame, bodyBuffer); - } - - private async onFrameInit( - socket: TcpFrameSocket, - frame: FrameJson, - _bodyBuffer: Buffer - ) { - const connection = this.connections.get(frame.runnerId); - - if (!connection) { - console.warn( - `[RunnerServer/onFrameInit] handle frame name "${ - frame.name - }", cannot find connection for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - if (connection.status !== "pending") { - console.warn( - `[RunnerServer/onFrameInit] handle frame name "${ - frame.name - }", connection already initialised, with status of ${ - connection.status - }, for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - this.connections.set(frame.runnerId, { - ...connection, - status: "starting", - socket, - }); - - this.emit("runner-starting", frame.runnerId); - - socket.once("close", () => { - this.emit("runner-close", frame.runnerId); - this.connections.delete(frame.runnerId); - }); - - this.writeFrame( - { - name: "response", - runnerId: frame.runnerId, - traceId: frame.traceId, - dataType: "buffer", - }, - await readFile( - getJobActionArchiveFile(connection.version, connection.action) - ) - ); - - return; - } - - private async onFrameReady( - socket: TcpFrameSocket, - frame: FrameJson, - _bodyBuffer: Buffer - ) { - const connection = this.connections.get(frame.runnerId); - - if (!connection) { - console.warn( - `[RunnerServer/onFrameReady] handle frame name "${ - frame.name - }", cannot find connection for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - if (connection.status !== "starting") { - console.warn( - `[RunnerServer/onFrameReady] handle frame name "${ - frame.name - }", connection already initialised, with status of ${ - connection.status - }, for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - this.connections.set(frame.runnerId, { - ...connection, - status: "ready", - socket, - }); - - this.emit("runner-ready", frame.runnerId); - } - - private async onFrameStore( - _socket: TcpFrameSocket, - frame: FrameJson, - bodyBuffer: Buffer - ) { - const connection = this.connections.get(frame.runnerId); - - if (!connection) { - console.warn( - `[RunnerServer/onFrameStore] handle frame name "${ - frame.name - }", cannot find connection for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - if (connection.status === "pending") { - console.warn( - `[RunnerServer/onFrameStore] handle frame name "${ - frame.name - }", connection has not started! runner ${shortenString( - frame.runnerId - )}!` - ); - - return; - } - - if (frame.dataType !== "json") { - console.warn( - `[RunnerServer/onFrameStore] handle frame name "${ - frame.name - }", received unexpected dataType! runner ${shortenString( - frame.runnerId - )}!` - ); - - return; - } - - switch (frame.name) { - case "store-get": { - const bodyParsed = JSON.parse(bodyBuffer.toString()) as { key: string }; - - const item = await this.store.getItem( - connection.action.jobId, - bodyParsed.key - ); - - await this.writeFrame( - { - dataType: "json", - name: "response", - runnerId: frame.runnerId, - traceId: frame.traceId, - }, - Buffer.from(JSON.stringify(item)) - ); - - break; - } - - case "store-set": { - const bodyParsed = JSON.parse(bodyBuffer.toString()) as { - key: string; - value: string; - ttl?: number; - }; - - const item = await this.store.setItem( - connection.action.jobId, - bodyParsed.key, - { - value: bodyParsed.value, - ttl: bodyParsed.ttl, - } - ); - - await this.writeFrame( - { - dataType: "json", - name: "response", - runnerId: frame.runnerId, - traceId: frame.traceId, - }, - Buffer.from(JSON.stringify(item)) - ); - - break; - } - - case "store-delete": { - const bodyParsed = JSON.parse(bodyBuffer.toString()) as { - key: string; - }; - - const item = await this.store.deleteItem( - connection.action.jobId, - bodyParsed.key - ); - - await this.writeFrame( - { - dataType: "json", - name: "response", - runnerId: frame.runnerId, - traceId: frame.traceId, - }, - Buffer.from(JSON.stringify(item)) - ); - - break; - } - - default: { - console.warn( - `[RunnerServer/onFrameStore] handle frame name "${ - frame.name - }", received unknown store action! runner ${shortenString( - frame.runnerId - )}!` - ); - return; - } - } - } - - private async onFrameMqtt( - _socket: TcpFrameSocket, - frame: FrameJson, - bodyBuffer: Buffer - ) { - const connection = this.connections.get(frame.runnerId); - - if (!connection) { - console.warn( - `[RunnerServer/onFrameMqtt] handle frame name "${ - frame.name - }", cannot find connection for runner ${shortenString(frame.runnerId)}!` - ); - - return; - } - - if (connection.status === "pending") { - console.warn( - `[RunnerServer/onFrameMqtt] handle frame name "${ - frame.name - }", connection has not started! runner ${shortenString( - frame.runnerId - )}!` - ); - - return; - } - - if (frame.dataType !== "json") { - console.warn( - `[RunnerServer/onFrameMqtt] handle frame name "${ - frame.name - }", received unexpected dataType! runner ${shortenString( - frame.runnerId - )}!` - ); - - return; - } - - if (frame.name === "mqtt-publish") { - try { - const bodyParsed = JSON.parse(bodyBuffer.toString()) as { - topic: string; - body: string; - }; - - // TODO: Introduce some zod validation here, introduce zod validation for all frame bodies. - if ( - typeof bodyParsed.topic !== "string" || - typeof bodyParsed.body !== "string" - ) { - throw new Error(`Invalid mqtt-publish frame body structure`); - } - - const bodyDecoded = Buffer.from(bodyParsed.body, "base64"); - - this.emit( - "mqtt-publish-request", - connection.action.jobId, - bodyParsed.topic, - bodyDecoded - ); - - // presume successful. Fix this later if possible. - const success = true; - - await this.writeFrame( - { - dataType: "json", - name: "response", - runnerId: frame.runnerId, - traceId: frame.traceId, - }, - Buffer.from(JSON.stringify(success)) - ); - } catch (err) { - console.error( - `[RunnerServer/onFrameMqtt] Error handling mqtt-publish frame:`, - err - ); - } - } - } - - private async writeFrame(frame: FrameJson, buffer: Buffer) { - const connection = this.connections.get(frame.runnerId); - - if (!connection) { - return false; - } - - if (connection.status !== "ready" && connection.status !== "starting") { - return false; - } - - const data = Buffer.concat([ - Buffer.from(JSON.stringify(frame)), - Buffer.from("\n"), - buffer, - ]); - - await connection.socket.writeFrame(data); - - return true; - } -} diff --git a/packages/server/src/jobber/store.ts b/packages/server/src/jobber/store.ts index 42d14aa..dda6927 100644 --- a/packages/server/src/jobber/store.ts +++ b/packages/server/src/jobber/store.ts @@ -1,8 +1,8 @@ import { and, eq, lt } from "drizzle-orm"; import { singleton } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; -import { storeTable } from "~/db/schema/store.js"; -import { LoopBase } from "~/loop-base.js"; +import { storeTable } from "~/db/schema.js"; +import { LoopBase } from "@jobber/common"; import { getUnixTimestamp } from "~/util.js"; type StoreItem = { @@ -95,7 +95,7 @@ export class Store extends LoopBase { public async getItemById( jobId: string, - id: string + id: string, ): Promise { const result = ( await getDrizzle() @@ -126,7 +126,7 @@ export class Store extends LoopBase { options: { value: string; ttl?: number; - } + }, ): Promise { const expiry = options.ttl ? getUnixTimestamp() + options.ttl : null; @@ -169,7 +169,7 @@ export class Store extends LoopBase { public async deleteItem( jobId: string, - key: string + key: string, ): Promise { const result = ( await getDrizzle() @@ -195,7 +195,7 @@ export class Store extends LoopBase { public async deleteItemById( jobId: string, - id: string + id: string, ): Promise { const result = ( await getDrizzle() diff --git a/packages/server/src/jobber/telemetry.ts b/packages/server/src/jobber/telemetry.ts index 8d3ba0e..cfeb938 100644 --- a/packages/server/src/jobber/telemetry.ts +++ b/packages/server/src/jobber/telemetry.ts @@ -1,9 +1,9 @@ import { eq, sql } from "drizzle-orm"; import { singleton } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; -import { jobsTable } from "~/db/schema/jobs.js"; -import { storeTable } from "~/db/schema/store.js"; -import { LoopBase } from "~/loop-base.js"; +import { jobsTable } from "~/db/schema.js"; +import { storeTable } from "~/db/schema.js"; +import { LoopBase } from "@jobber/common"; import { gaugeAppInfo, gaugeJobsInfo, gaugeJobStoreCount } from "~/metrics.js"; import { getUnixTimestamp } from "~/util.js"; @@ -39,7 +39,7 @@ export class Telemetry extends LoopBase { arch: process.arch, start_time: this.startTime.toString(), }, - 1 + 1, ); } diff --git a/packages/server/src/jobber/triggers/cron.ts b/packages/server/src/jobber/triggers/cron.ts index f47557c..c59a7dd 100644 --- a/packages/server/src/jobber/triggers/cron.ts +++ b/packages/server/src/jobber/triggers/cron.ts @@ -1,20 +1,23 @@ +import { LoopBase } from "@jobber/common"; import assert from "assert"; import { CronTime } from "cron"; import { CronError } from "cron/dist/errors.js"; import { and, eq, isNotNull, sql } from "drizzle-orm"; -import { autoInjectable, inject, singleton } from "tsyringe"; +import { inject, singleton } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; -import { actionsTable, ActionsTableType } from "~/db/schema/actions.js"; -import { - jobVersionsTable, - JobVersionsTableType, -} from "~/db/schema/job-versions.js"; -import { jobsTable, JobsTableType } from "~/db/schema/jobs.js"; -import { triggersTable, TriggersTableType } from "~/db/schema/triggers.js"; -import { LoopBase } from "~/loop-base.js"; -import { counterTriggerCron } from "~/metrics.js"; +import { actionsTable } from "~/db/schema.js"; +import { jobVersionsTable } from "~/db/schema.js"; +import { jobsTable } from "~/db/schema.js"; +import { triggersTable } from "~/db/schema.js"; import { LogDriverBase } from "../log-drivers/abstract.js"; import { RunnerManager } from "../runners/manager.js"; +import { + ActionsTableType, + JobsTableType, + JobVersionsTableType, + TriggersTableType, +} from "~/db/types.js"; +import { EventScheduleResponse_Status } from "@jobber/grpc/runner.js"; type TriggerCronItem = { trigger: TriggersTableType; @@ -45,7 +48,7 @@ export class TriggerCron extends LoopBase { constructor( @inject(RunnerManager) private runnerManager: RunnerManager, - @inject("LogDriverBase") private logger: LogDriverBase + @inject("LogDriverBase") private logger: LogDriverBase, ) { super(); } @@ -93,29 +96,29 @@ export class TriggerCron extends LoopBase { jobVersionsTable, and( eq(triggersTable.jobId, jobVersionsTable.jobId), - eq(triggersTable.jobVersionId, jobVersionsTable.id) - ) + eq(triggersTable.jobVersionId, jobVersionsTable.id), + ), ) .innerJoin( jobsTable, and( eq(triggersTable.jobId, jobsTable.id), - eq(triggersTable.jobVersionId, jobsTable.jobVersionId) - ) + eq(triggersTable.jobVersionId, jobsTable.jobVersionId), + ), ) .innerJoin( actionsTable, and( eq(triggersTable.jobId, actionsTable.jobId), - eq(triggersTable.jobVersionId, actionsTable.jobVersionId) - ) + eq(triggersTable.jobVersionId, actionsTable.jobVersionId), + ), ) .where( and( isNotNull(jobsTable.jobVersionId), sql`${triggersTable.context} ->> 'type' = 'schedule'`, - eq(jobsTable.status, "enabled") - ) + eq(jobsTable.status, "enabled"), + ), ); await this.loopCheckNewTriggers(triggers); @@ -132,7 +135,7 @@ export class TriggerCron extends LoopBase { trigger: TriggersTableType; action: ActionsTableType; job: JobsTableType; - }[] + }[], ) { for (const triggerSource of triggersSource) { if (this.triggers[triggerSource.trigger.id]) { @@ -144,7 +147,7 @@ export class TriggerCron extends LoopBase { try { const cron = new CronTime( triggerSource.trigger.context.cron, - triggerSource.trigger.context.timezone + triggerSource.trigger.context.timezone, ); this.triggers[triggerSource.trigger.id] = { @@ -168,7 +171,7 @@ export class TriggerCron extends LoopBase { }; console.log( - `[TriggerCron/loopCheckNewTriggers] Invalid cron syntax for trigger ${triggerSource.trigger.id} on job ${triggerSource.job.id}: ${err.message}` + `[TriggerCron/loopCheckNewTriggers] Invalid cron syntax for trigger ${triggerSource.trigger.id} on job ${triggerSource.job.id}: ${err.message}`, ); this.logger.write({ @@ -196,7 +199,7 @@ export class TriggerCron extends LoopBase { trigger: TriggersTableType; action: ActionsTableType; job: JobsTableType; - }[] + }[], ) { for (const [triggerId, trigger] of Object.entries(this.triggers)) { if (triggersSource.some((index) => index.trigger.id === triggerId)) { @@ -216,7 +219,7 @@ export class TriggerCron extends LoopBase { trigger: TriggersTableType; action: ActionsTableType; job: JobsTableType; - }[] + }[], ) { const time = Date.now(); @@ -229,38 +232,58 @@ export class TriggerCron extends LoopBase { continue; } + const scheduledAt = new Date(trigger.scheduledAt).toISOString(); + trigger.scheduledAt = trigger.cron.sendAt().toMillis(); assert(trigger.trigger.context.type === "schedule"); this.runnerManager - .sendHandleRequest(trigger.version, trigger.job, trigger.action, { - type: "schedule", - name: trigger.trigger.context.name, - cron: trigger.trigger.context.cron, - timezone: trigger.trigger.context.timezone, + .eventSchedule(trigger.job.id, { + context: { + triggerName: trigger.trigger.context.name ?? "unnamed", + }, + scheduledAt: scheduledAt, }) - .then((handleResponse) => { - counterTriggerCron - .labels({ - job_id: trigger.job.id, - job_name: trigger.job.jobName, - version: trigger.version.version, - success: handleResponse.success ? 1 : 0, - }) - .inc(); - - if (!handleResponse.success) { + .then((response) => { + if (response.status !== EventScheduleResponse_Status.ACCEPTED) { console.log( - `[TriggerCron/loopCheckTriggers] Sending schedule handle event failed! ${handleResponse.error}` + `[TriggerCron/loopCheckTriggers] Failed to send schedule event for trigger ${trigger.trigger.id} on job ${trigger.job.id}: ${response.status}`, ); - - return; } }) .catch((err) => { console.error(err); }); + + // this.runnerManager + // .sendHandleRequest(trigger.version, trigger.job, trigger.action, { + // type: "schedule", + // name: trigger.trigger.context.name, + // cron: trigger.trigger.context.cron, + // timezone: trigger.trigger.context.timezone, + // }) + // .then((handleResponse) => { + // counterTriggerCron + // .labels({ + // job_id: trigger.job.id, + // job_name: trigger.job.jobName, + // version: trigger.version.version, + // success: handleResponse.success ? 1 : 0, + // }) + // .inc(); + + // if (!handleResponse.success) { + // console.log( + // `[TriggerCron/loopCheckTriggers] Sending schedule handle event failed! ${handleResponse.error}`, + // ); + + // return; + // } + // }) + // .catch((err) => { + // console.error(err); + // }); } } } diff --git a/packages/server/src/jobber/triggers/http.ts b/packages/server/src/jobber/triggers/http.ts deleted file mode 100644 index 19deec8..0000000 --- a/packages/server/src/jobber/triggers/http.ts +++ /dev/null @@ -1,274 +0,0 @@ -import assert from "assert"; -import { and, eq, isNotNull, sql } from "drizzle-orm"; -import { getDrizzle } from "~/db/index.js"; -import { actionsTable, ActionsTableType } from "~/db/schema/actions.js"; -import { - jobVersionsTable, - JobVersionsTableType, -} from "~/db/schema/job-versions.js"; -import { jobsTable, JobsTableType } from "~/db/schema/jobs.js"; -import { triggersTable, TriggersTableType } from "~/db/schema/triggers.js"; -import { LoopBase } from "~/loop-base.js"; -import { counterTriggerHttp } from "~/metrics.js"; -import { getUnixTimestamp } from "~/util.js"; -import { LogDriverBase } from "../log-drivers/abstract.js"; -import { RunnerManager } from "../runners/manager.js"; -import { HandleRequest, HandleRequestHttp } from "../runners/server.js"; -import { autoInjectable, inject, singleton } from "tsyringe"; - -type TriggerHttpItem = { - trigger: Omit & { - context: Extract; - }; - triggerPathRegex?: RegExp; - triggerPathString?: string; - action: ActionsTableType; - version: JobVersionsTableType; - job: JobsTableType; -}; - -@singleton() -export class TriggerHttp extends LoopBase { - protected loopDuration = 1000; - protected loopStarting = undefined; - protected loopStarted = undefined; - protected loopClosing = undefined; - protected loopClosed = undefined; - - private triggers: Record = {}; - - constructor( - @inject(RunnerManager) private runnerManager: RunnerManager, - @inject("LogDriverBase") private logger: LogDriverBase - ) { - super(); - } - - public getTriggerStatus(jobId: string, triggerId: string) { - const trigger = this.triggers[triggerId]; - - if (!trigger || trigger.job.id !== jobId) { - return { - status: "unknown", - message: "unknown", - } as const; - } - - if (this.status !== "started") { - return { - status: "unhealthy", - message: "Cron not running", - } as const; - } - - return { - status: "healthy", - message: `HTTP Trigger registered for version ${trigger.version.version}`, - } as const; - } - - protected async loopIteration() { - const triggers = await getDrizzle() - .select({ - trigger: triggersTable, - version: jobVersionsTable, - action: actionsTable, - job: jobsTable, - }) - .from(triggersTable) - .innerJoin( - jobVersionsTable, - and( - eq(triggersTable.jobId, jobVersionsTable.jobId), - eq(triggersTable.jobVersionId, jobVersionsTable.id) - ) - ) - .innerJoin( - jobsTable, - and( - eq(triggersTable.jobId, jobsTable.id), - eq(triggersTable.jobVersionId, jobsTable.jobVersionId) - ) - ) - .innerJoin( - actionsTable, - and( - eq(triggersTable.jobId, actionsTable.jobId), - eq(triggersTable.jobVersionId, actionsTable.jobVersionId) - ) - ) - .where( - and( - isNotNull(jobsTable.jobVersionId), - sql`${triggersTable.context} ->> 'type' = 'http'`, - eq(jobsTable.status, "enabled") - ) - ); - - await this.loopCheckNewTriggers(triggers); - await this.loopCheckOldTriggers(triggers); - } - - public async sendHandleRequest( - request: Pick< - HandleRequestHttp, - | "body" - | "bodyLength" - | "headers" - | "method" - | "path" - | "queries" - | "query" - > - ) { - for (const [triggerId, trigger] of Object.entries(this.triggers)) { - const headerHost = request.headers["host"]; - - if ( - trigger.trigger.context.hostname && - trigger.trigger.context.hostname !== headerHost - ) { - continue; - } - - if ( - trigger.trigger.context.method && - trigger.trigger.context.method !== request.method - ) { - continue; - } - - if ( - trigger.triggerPathRegex && - !trigger.triggerPathRegex.test(request.path) - ) { - continue; - } - - if ( - trigger.triggerPathString && - trigger.triggerPathString !== request.path - ) { - continue; - } - - const handleRequest: HandleRequest = { - ...request, - type: "http", - name: trigger.trigger.context.name ?? "", - }; - - const result = await this.runnerManager.sendHandleRequest( - trigger.version, - trigger.job, - trigger.action, - handleRequest - ); - - if (result.success && result.http?.status) { - counterTriggerHttp - .labels({ - host: trigger.trigger.context.hostname ?? "", - method: trigger.trigger.context.method ?? "", - path: trigger.trigger.context.path ?? "", - request_host: headerHost ?? "", - request_method: handleRequest.method, - request_path: handleRequest.path, - - job_id: trigger.job.id, - job_name: trigger.job.jobName, - version: trigger.version.version, - - status_code: result.http.status, - }) - .inc(); - } - - return result; - } - - return null; - } - - /** - * Attempts to start any newly created triggers. - */ - private async loopCheckNewTriggers( - triggersSource: { - version: JobVersionsTableType; - trigger: TriggersTableType; - action: ActionsTableType; - job: JobsTableType; - }[] - ) { - for (const triggerSource of triggersSource) { - if (this.triggers[triggerSource.trigger.id]) { - continue; - } - - assert(triggerSource.trigger.context.type === "http"); - - let triggerPathRegex: RegExp | undefined; - let triggerPathString: string | undefined; - - if (triggerSource.trigger.context?.path) { - const pathString = triggerSource.trigger.context.path; - - if (pathString.startsWith("^")) { - triggerPathRegex = new RegExp(pathString); - } else { - triggerPathString = pathString; - } - } - - this.triggers[triggerSource.trigger.id] = { - trigger: structuredClone( - triggerSource.trigger - ) as TriggerHttpItem["trigger"], - triggerPathRegex, - triggerPathString, - action: structuredClone(triggerSource.action), - version: structuredClone(triggerSource.version), - job: structuredClone(triggerSource.job), - }; - - this.logger.write({ - source: "system", - jobId: triggerSource.job.id, - jobName: triggerSource.job.jobName, - actionId: triggerSource.action.id, - message: `[SYSTEM] HTTP trigger (version: ${triggerSource.version.version}) ${triggerSource.trigger.id} registered`, - created: new Date(), - }); - } - } - - /** - * Attempts to remove any old triggers. - */ - private async loopCheckOldTriggers( - triggersSource: { - version: JobVersionsTableType; - trigger: TriggersTableType; - action: ActionsTableType; - job: JobsTableType; - }[] - ) { - for (const [triggerId, trigger] of Object.entries(this.triggers)) { - if (triggersSource.some((index) => index.trigger.id === triggerId)) { - continue; - } - - delete this.triggers[triggerId]; - - this.logger.write({ - source: "system", - jobId: trigger.job.id, - jobName: trigger.job.jobName, - actionId: trigger.action.id, - message: `[SYSTEM] HTTP trigger (version: ${trigger.version.version}) ${triggerId} removed`, - created: new Date(), - }); - } - } -} diff --git a/packages/server/src/jobber/triggers/mqtt.ts b/packages/server/src/jobber/triggers/mqtt.ts index 7d25d94..86241e9 100644 --- a/packages/server/src/jobber/triggers/mqtt.ts +++ b/packages/server/src/jobber/triggers/mqtt.ts @@ -2,23 +2,24 @@ import assert from "assert"; import { and, eq, isNotNull, sql } from "drizzle-orm"; import { connectAsync, IClientOptions, MqttClient } from "mqtt"; import { getDrizzle } from "~/db/index.js"; -import { actionsTable, ActionsTableType } from "~/db/schema/actions.js"; -import { - environmentsTable, - EnvironmentsTableType, -} from "~/db/schema/environments.js"; -import { - jobVersionsTable, - JobVersionsTableType, -} from "~/db/schema/job-versions.js"; -import { jobsTable, JobsTableType } from "~/db/schema/jobs.js"; -import { triggersTable, TriggersTableType } from "~/db/schema/triggers.js"; -import { LoopBase } from "~/loop-base.js"; -import { counterTriggerMqtt, counterTriggerMqttPublish } from "~/metrics.js"; +import { actionsTable } from "~/db/schema.js"; +import { environmentsTable } from "~/db/schema.js"; +import { jobVersionsTable } from "~/db/schema.js"; +import { jobsTable } from "~/db/schema.js"; +import { triggersTable } from "~/db/schema.js"; +import { LoopBase } from "@jobber/common"; import { createSha1Hash, shortenString } from "~/util.js"; import { LogDriverBase } from "../log-drivers/abstract.js"; import { RunnerManager } from "../runners/manager.js"; import { inject, singleton } from "tsyringe"; +import { + ActionsTableType, + EnvironmentsTableType, + JobsTableType, + JobVersionsTableType, + TriggersTableType, +} from "~/db/types.js"; +import { EventMqttResponse_Status } from "@jobber/grpc/runner.js"; type TriggerMqttItem = { trigger: TriggersTableType; @@ -47,7 +48,7 @@ export class TriggerMqtt extends LoopBase { constructor( @inject(RunnerManager) private runnerManager: RunnerManager, - @inject("LogDriverBase") private logger: LogDriverBase + @inject("LogDriverBase") private logger: LogDriverBase, ) { super(); } @@ -109,8 +110,8 @@ export class TriggerMqtt extends LoopBase { if (!trigger) { console.warn( `[TriggerMqtt/publishMqttMessage] MQTT trigger not found for job ID "${shortenString( - jobId - )}", cannot publish message to topic "${topic}"` + jobId, + )}", cannot publish message to topic "${topic}"`, ); return false; @@ -118,7 +119,7 @@ export class TriggerMqtt extends LoopBase { if (!trigger.client.connected) { console.warn( - `[TriggerMqtt/publishMqttMessage] MQTT client is not connected, cannot publish message to topic "${topic}"` + `[TriggerMqtt/publishMqttMessage] MQTT client is not connected, cannot publish message to topic "${topic}"`, ); return false; @@ -143,33 +144,33 @@ export class TriggerMqtt extends LoopBase { jobVersionsTable, and( eq(triggersTable.jobId, jobVersionsTable.jobId), - eq(triggersTable.jobVersionId, jobVersionsTable.id) - ) + eq(triggersTable.jobVersionId, jobVersionsTable.id), + ), ) .innerJoin( jobsTable, and( eq(triggersTable.jobId, jobsTable.id), - eq(triggersTable.jobVersionId, jobsTable.jobVersionId) - ) + eq(triggersTable.jobVersionId, jobsTable.jobVersionId), + ), ) .innerJoin( actionsTable, and( eq(triggersTable.jobId, actionsTable.jobId), - eq(triggersTable.jobVersionId, actionsTable.jobVersionId) - ) + eq(triggersTable.jobVersionId, actionsTable.jobVersionId), + ), ) .leftJoin( environmentsTable, - eq(environmentsTable.jobId, triggersTable.jobId) + eq(environmentsTable.jobId, triggersTable.jobId), ) .where( and( isNotNull(jobsTable.jobVersionId), sql`${triggersTable.context} ->> 'type' = 'mqtt'`, - eq(jobsTable.status, "enabled") - ) + eq(jobsTable.status, "enabled"), + ), ); await this.loopCheckOldTriggers(triggers); @@ -201,7 +202,7 @@ export class TriggerMqtt extends LoopBase { action: ActionsTableType; job: JobsTableType; environment: EnvironmentsTableType | null; - }[] + }[], ) { for (const [triggerId, trigger] of Object.entries(this.triggers)) { try { @@ -237,7 +238,7 @@ export class TriggerMqtt extends LoopBase { action: ActionsTableType; job: JobsTableType; environment: EnvironmentsTableType | null; - }[] + }[], ) { for (const triggerSource of triggersSource) { const trigger = this.triggers[triggerSource.trigger.id]; @@ -249,7 +250,7 @@ export class TriggerMqtt extends LoopBase { try { const config = this.buildMqttConfig( triggerSource.trigger, - triggerSource.environment + triggerSource.environment, ); if (!config.success) { @@ -302,7 +303,7 @@ export class TriggerMqtt extends LoopBase { action: ActionsTableType; job: JobsTableType; environment: EnvironmentsTableType | null; - }[] + }[], ) { for (const triggerSource of triggersSource) { try { @@ -314,13 +315,13 @@ export class TriggerMqtt extends LoopBase { const config = this.buildMqttConfig( triggerSource.trigger, - triggerSource.environment + triggerSource.environment, ); if (!config.success) { console.warn( `[TriggerMqtt/loopCheckNewTriggers] Failed to build MQTT config! Errors...`, - config.errors + config.errors, ); this.logger.write({ @@ -329,7 +330,7 @@ export class TriggerMqtt extends LoopBase { jobId: triggerSource.job.id, jobName: triggerSource.job.jobName, message: `[SYSTEM] MQTT Initialisation error! Configuration error: ${config.errorsSimple.join( - ", " + ", ", )}`, created: new Date(), }); @@ -342,7 +343,7 @@ export class TriggerMqtt extends LoopBase { await client.subscribeAsync(triggerSource.trigger.context.topics); client.on("message", async (topic, payload) => - this.onMqttMessage(triggerSource.trigger.id, topic, payload) + this.onMqttMessage(triggerSource.trigger.id, topic, payload), ); this.triggers[triggerSource.trigger.id] = { @@ -390,7 +391,7 @@ export class TriggerMqtt extends LoopBase { private async onMqttMessage( triggerId: string, topic: string, - payload: Buffer + payload: Buffer, ) { const triggerItem = this.triggers[triggerId]; @@ -408,100 +409,119 @@ export class TriggerMqtt extends LoopBase { created: new Date(), }); - const handleResponse = await this.runnerManager.sendHandleRequest( - triggerItem.version, - triggerItem.job, - triggerItem.action, - { - type: "mqtt", - topic, - body: payload.toString("base64"), - bodyLength: payload.length, - } - ); - - counterTriggerMqtt - .labels({ - job_id: triggerItem.job.id, - job_name: triggerItem.job.jobName, - version: triggerItem.version.version, - success: handleResponse.success ? 1 : 0, + this.runnerManager + .eventMqtt(triggerItem.job.id, { + context: { + triggerName: triggerItem.trigger.context.name ?? "", + }, + topic: topic, + payload: payload, }) - .inc(); - - if (!handleResponse.success) { - console.log( - `[TriggerMqtt/onMqttMessage] Sending MQTT handle event failed! topic "${topic}", error: ${handleResponse.error}` - ); - - return; - } - - if (!handleResponse.mqtt) { - return; - } - - // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. - for (const publishItem of handleResponse.mqtt.publish) { - try { - console.warn( - `[TriggerMqtt/onMqttMessage] Received deprecated publish event for topic "${topic}".` - ); - - if (!triggerItem.client.connected) { - console.warn( - `[TriggerMqtt/onMqttMessage] MQTT client is not connected, cannot publish message to topic "${publishItem.topic}"` + .then((response) => { + if (response.status !== EventMqttResponse_Status.ACCEPTED) { + console.log( + `[TriggerMqtt/onMqttMessage] Runner rejected MQTT event for trigger ${triggerItem.trigger.id} on job ${triggerItem.job.id} with status ${EventMqttResponse_Status[response.status]}`, ); - - this.logger.write({ - source: "system", - actionId: triggerItem.action.id, - jobId: triggerItem.job.id, - jobName: triggerItem.job.jobName, - message: `[SYSTEM] MQTT client is not connected, cannot publish message to topic "${publishItem.topic}"`, - created: new Date(), - }); - - continue; } - - this.logger.write({ - source: "system", - actionId: triggerItem.action.id, - jobId: triggerItem.job.id, - jobName: triggerItem.job.jobName, - message: `[SYSTEM] MQTT message published to topic "${publishItem.topic}"`, - created: new Date(), - }); - - counterTriggerMqttPublish - .labels({ - job_id: triggerItem.job.id, - job_name: triggerItem.job.jobName, - version: triggerItem.version.version, - topic: publishItem.topic, - }) - .inc(); - - await triggerItem.client.publishAsync( - publishItem.topic, - publishItem.body - ); - } catch (err) { + }) + .catch((err) => { console.error(err); + }); - this.logger.write({ - source: "system", - actionId: triggerItem.action.id, - jobId: triggerItem.job.id, - jobName: triggerItem.job.jobName, - message: `[SYSTEM] MQTT publish error! topic: ${ - publishItem.topic - }, ${err instanceof Error ? err.message : String(err)}`, - created: new Date(), - }); - } - } + // const handleResponse = await this.runnerManager.sendHandleRequest( + // triggerItem.version, + // triggerItem.job, + // triggerItem.action, + // { + // type: "mqtt", + // topic, + // body: payload.toString("base64"), + // bodyLength: payload.length, + // }, + // ); + + // counterTriggerMqtt + // .labels({ + // job_id: triggerItem.job.id, + // job_name: triggerItem.job.jobName, + // version: triggerItem.version.version, + // success: handleResponse.success ? 1 : 0, + // }) + // .inc(); + + // if (!handleResponse.success) { + // console.log( + // `[TriggerMqtt/onMqttMessage] Sending MQTT handle event failed! topic "${topic}", error: ${handleResponse.error}`, + // ); + + // return; + // } + + // if (!handleResponse.mqtt) { + // return; + // } + + // // TODO: Remove this in a later revision, deprecated way of publishing MQTT events. + // for (const publishItem of handleResponse.mqtt.publish) { + // try { + // console.warn( + // `[TriggerMqtt/onMqttMessage] Received deprecated publish event for topic "${topic}".`, + // ); + + // if (!triggerItem.client.connected) { + // console.warn( + // `[TriggerMqtt/onMqttMessage] MQTT client is not connected, cannot publish message to topic "${publishItem.topic}"`, + // ); + + // this.logger.write({ + // source: "system", + // actionId: triggerItem.action.id, + // jobId: triggerItem.job.id, + // jobName: triggerItem.job.jobName, + // message: `[SYSTEM] MQTT client is not connected, cannot publish message to topic "${publishItem.topic}"`, + // created: new Date(), + // }); + + // continue; + // } + + // this.logger.write({ + // source: "system", + // actionId: triggerItem.action.id, + // jobId: triggerItem.job.id, + // jobName: triggerItem.job.jobName, + // message: `[SYSTEM] MQTT message published to topic "${publishItem.topic}"`, + // created: new Date(), + // }); + + // counterTriggerMqttPublish + // .labels({ + // job_id: triggerItem.job.id, + // job_name: triggerItem.job.jobName, + // version: triggerItem.version.version, + // topic: publishItem.topic, + // }) + // .inc(); + + // await triggerItem.client.publishAsync( + // publishItem.topic, + // publishItem.body, + // ); + // } catch (err) { + // console.error(err); + + // this.logger.write({ + // source: "system", + // actionId: triggerItem.action.id, + // jobId: triggerItem.job.id, + // jobName: triggerItem.job.jobName, + // message: `[SYSTEM] MQTT publish error! topic: ${ + // publishItem.topic + // }, ${err instanceof Error ? err.message : String(err)}`, + // created: new Date(), + // }); + // } + // } } catch (err) { console.error(err); @@ -520,7 +540,7 @@ export class TriggerMqtt extends LoopBase { private buildMqttConfig( trigger: TriggersTableType, - environment: EnvironmentsTableType | null + environment: EnvironmentsTableType | null, ) { assert(trigger.context.type === "mqtt"); @@ -539,11 +559,11 @@ export class TriggerMqtt extends LoopBase { env[trigger.context.connection.clientIdVariable].value; } else { errors.push( - `MQTT Config Building: clientId from environment failure, ${trigger.context.connection.clientIdVariable} missing` + `MQTT Config Building: clientId from environment failure, ${trigger.context.connection.clientIdVariable} missing`, ); errorsSimple.push( - `${trigger.context.connection.clientIdVariable} missing` + `${trigger.context.connection.clientIdVariable} missing`, ); } } @@ -555,7 +575,7 @@ export class TriggerMqtt extends LoopBase { result.host = env[trigger.context.connection.hostVariable].value; } else { errors.push( - `MQTT Config Building: host from environment failure, ${trigger.context.connection.hostVariable} missing` + `MQTT Config Building: host from environment failure, ${trigger.context.connection.hostVariable} missing`, ); errorsSimple.push(`${trigger.context.connection.hostVariable} missing`); @@ -570,11 +590,11 @@ export class TriggerMqtt extends LoopBase { env[trigger.context.connection.passwordVariable].value; } else { errors.push( - `MQTT Config Building: password from environment failure, ${trigger.context.connection.passwordVariable} missing` + `MQTT Config Building: password from environment failure, ${trigger.context.connection.passwordVariable} missing`, ); errorsSimple.push( - `${trigger.context.connection.passwordVariable} missing` + `${trigger.context.connection.passwordVariable} missing`, ); } } @@ -584,11 +604,11 @@ export class TriggerMqtt extends LoopBase { } else if (trigger.context.connection.portVariable) { if (env[trigger.context.connection.portVariable]) { result.port = Number( - env[trigger.context.connection.portVariable].value + env[trigger.context.connection.portVariable].value, ); } else { errors.push( - `MQTT Config Building: port from environment failure, ${trigger.context.connection.portVariable} missing` + `MQTT Config Building: port from environment failure, ${trigger.context.connection.portVariable} missing`, ); errorsSimple.push(`${trigger.context.connection.portVariable} missing`); @@ -597,11 +617,11 @@ export class TriggerMqtt extends LoopBase { if (Number.isNaN(result.port)) { errors.push( - `MQTT Config Building: port from environment failure, ${trigger.context.connection.portVariable} expected valid number` + `MQTT Config Building: port from environment failure, ${trigger.context.connection.portVariable} expected valid number`, ); errorsSimple.push( - `${trigger.context.connection.portVariable} expected valid number` + `${trigger.context.connection.portVariable} expected valid number`, ); } @@ -613,11 +633,11 @@ export class TriggerMqtt extends LoopBase { .value as typeof result.protocol; } else { errors.push( - `MQTT Config Building: protocol from environment failure, ${trigger.context.connection.protocolVariable} missing` + `MQTT Config Building: protocol from environment failure, ${trigger.context.connection.protocolVariable} missing`, ); errorsSimple.push( - `${trigger.context.connection.protocolVariable} missing` + `${trigger.context.connection.protocolVariable} missing`, ); } } @@ -630,11 +650,11 @@ export class TriggerMqtt extends LoopBase { env[trigger.context.connection.usernameVariable].value; } else { errors.push( - `MQTT Config Building: username from environment failure, ${trigger.context.connection.usernameVariable} missing` + `MQTT Config Building: username from environment failure, ${trigger.context.connection.usernameVariable} missing`, ); errorsSimple.push( - `${trigger.context.connection.usernameVariable} missing` + `${trigger.context.connection.usernameVariable} missing`, ); } } @@ -656,7 +676,8 @@ export class TriggerMqtt extends LoopBase { private getMqttTriggerByJobId(jobId: string) { return Object.values(this.triggers).find( - (index) => index.job.id === jobId && index.trigger.context.type === "mqtt" + (index) => + index.job.id === jobId && index.trigger.context.type === "mqtt", ); } } diff --git a/packages/server/src/lock.ts b/packages/server/src/lock.ts index 50fa99a..0dc569d 100644 --- a/packages/server/src/lock.ts +++ b/packages/server/src/lock.ts @@ -1,11 +1,11 @@ import { eq, lt, sql } from "drizzle-orm"; import { getDrizzle } from "./db/index.js"; -import { lockTable } from "./db/schema/lock.js"; -import { timeout } from "./util.js"; +import { lockTable } from "./db/schema.js"; +import { timeout } from "@jobber/common"; export const acquireLock = async ( table: string, - primaryKey: string + primaryKey: string, ): Promise => { try { const lockKey = `${table}:${primaryKey}`; @@ -41,7 +41,7 @@ export const releaseLock = async (lockId: string): Promise => { export const withLock = async ( table: string, primaryKey: string, - callback: () => Promise + callback: () => Promise, ): Promise => { let lockId: string | null = null; diff --git a/packages/server/src/loop-base.ts b/packages/server/src/loop-base.ts deleted file mode 100644 index c502119..0000000 --- a/packages/server/src/loop-base.ts +++ /dev/null @@ -1,76 +0,0 @@ -import assert from "node:assert"; -import { awaitTruthy, timeout } from "./util.js"; - -/** - * Lifecycle: - * 1) neutral = = default state (pre-start or stopped) - * 2) starting = in process of starting - * 3) started = active and running - * 4) stopping = in process of stopping - * 5) One stopped, goes to neutral. - */ -export type StatusLifecycle = "neutral" | "starting" | "started" | "stopping"; - -export abstract class LoopBase { - private isLoopRunning = false; - - protected status: StatusLifecycle = "neutral"; - - protected abstract loopDuration: number; - - public async start() { - assert(this.status === "neutral"); - - this.status = "starting"; - - if (this.loopStarting) { - await this.loopStarting(); - } - - this.loop(); - - await awaitTruthy(() => Promise.resolve(this.isLoopRunning)); - - this.status = "started"; - - if (this.loopStarted) { - await this.loopStarted(); - } - } - - public async stop() { - assert(this.status === "started"); - - this.status = "stopping"; - - if (this.loopClosing) { - await this.loopClosing(); - } - - await awaitTruthy(() => Promise.resolve(!this.isLoopRunning)); - - this.status = "neutral"; - - if (this.loopClosed) { - await this.loopClosed(); - } - } - - private async loop() { - this.isLoopRunning = true; - - while (this.status === "starting" || this.status === "started") { - await this.loopIteration(); - - await timeout(this.loopDuration); - } - - this.isLoopRunning = false; - } - - protected abstract loopIteration(): Promise; - protected abstract loopClosing?(): Promise; - protected abstract loopClosed?(): Promise; - protected abstract loopStarting?(): Promise; - protected abstract loopStarted?(): Promise; -} diff --git a/packages/server/src/middleware/auth.ts b/packages/server/src/middleware/auth.ts index fcfb657..86822af 100644 --- a/packages/server/src/middleware/auth.ts +++ b/packages/server/src/middleware/auth.ts @@ -4,9 +4,9 @@ import { getCookie } from "hono/cookie"; import { Bouncer } from "~/bouncer.js"; import { USERNAME_ANONYMOUS } from "~/constants.js"; import { getDrizzle } from "~/db/index.js"; -import { apiTokensTable } from "~/db/schema/api-tokens.js"; -import { sessionsTable } from "~/db/schema/sessions.js"; -import { usersTable } from "~/db/schema/users.js"; +import { apiTokensTable } from "~/db/schema.js"; +import { sessionsTable } from "~/db/schema.js"; +import { usersTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; const extractApiToken = (c: Context) => { @@ -49,7 +49,7 @@ export const createMiddlewareAuth = () => { if (!result) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -58,21 +58,21 @@ export const createMiddlewareAuth = () => { if (!users.enabled) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (sessions.expires < new Date()) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (sessions.status === "disabled") { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -83,7 +83,7 @@ export const createMiddlewareAuth = () => { user: users, session: sessions, permissions: users.permissions, - }) + }), ); return await next(); @@ -102,21 +102,21 @@ export const createMiddlewareAuth = () => { if (!apiToken) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (apiToken.expires < new Date()) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (apiToken.status !== "enabled") { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -126,7 +126,7 @@ export const createMiddlewareAuth = () => { type: "token", token: apiToken, permissions: apiToken.permissions, - }) + }), ); return await next(); @@ -143,27 +143,27 @@ export const createMiddlewareAuth = () => { if (!anonymousUser) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (!anonymousUser.enabled) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if ( !anonymousUser.permissions.some( - (permission) => permission.effect === "allow" + (permission) => permission.effect === "allow", ) ) { // The anonymous user doesn't have any allow permissions, safe to assume they are // going to be rejected downstream return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -173,7 +173,7 @@ export const createMiddlewareAuth = () => { type: "anonymous", user: anonymousUser, permissions: anonymousUser.permissions, - }) + }), ); return await next(); diff --git a/packages/server/src/middleware/response-time.ts b/packages/server/src/middleware/response-time.ts index 7079334..d66d628 100644 --- a/packages/server/src/middleware/response-time.ts +++ b/packages/server/src/middleware/response-time.ts @@ -1,5 +1,5 @@ import { Context, Next } from "hono"; -import { timeout } from "~/util.js"; +import { timeout } from "@jobber/common"; export function createMiddlewareResponseTime(duration: number) { return async (c: Context, next: Next) => { diff --git a/packages/server/src/paths.ts b/packages/server/src/paths.ts index ee885b0..1e811ab 100644 --- a/packages/server/src/paths.ts +++ b/packages/server/src/paths.ts @@ -1,23 +1,50 @@ import path from "path"; import { PATH_CONFIG } from "./constants.js"; -import { ActionsTableType } from "./db/schema/actions.js"; -import { JobVersionsTableType } from "./db/schema/job-versions.js"; +import { + ActionsTableType, + JobVersionsTableType, + RunnersTableType, +} from "./db/types.js"; import { sanitiseFilename } from "./util.js"; +import { mkdir } from "node:fs/promises"; +import { tmpdir } from "node:os"; export function getJobActionArchiveDirectory() { return path.join(PATH_CONFIG, "action-archives"); } export function getJobActionArchiveFile( - version: JobVersionsTableType, - action: ActionsTableType + version: Pick, + action: Pick, ) { return path.join( getJobActionArchiveDirectory(), - sanitiseFilename(`${version.version}_${action.id}.zip`) + sanitiseFilename(`${version.version}_${action.id}.zip`), ); } export function getPgDumpDirectory() { return path.join(PATH_CONFIG, "pg-dumps"); } + +export function getRunnerEnvDirectory() { + return path.join(tmpdir(), "jobber-env"); +} + +export function getRunnerEnvFile(runner: RunnersTableType) { + return path.join(getRunnerEnvDirectory(), `${runner.id}.env`); +} + +export async function ensureDirectoriesExist() { + await mkdir(getJobActionArchiveDirectory(), { + recursive: true, + }); + + await mkdir(getPgDumpDirectory(), { + recursive: true, + }); + + await mkdir(getRunnerEnvDirectory(), { + recursive: true, + }); +} diff --git a/packages/server/src/pg-backup.ts b/packages/server/src/pg-backup.ts index 3fe9bb1..4c5127c 100644 --- a/packages/server/src/pg-backup.ts +++ b/packages/server/src/pg-backup.ts @@ -1,7 +1,7 @@ import { CronTime } from "cron"; import { singleton } from "tsyringe"; -import { LoopBase } from "~/loop-base.js"; +import { LoopBase } from "@jobber/common"; import { getConfigOption } from "./config.js"; import { getPgDumpDirectory } from "./paths.js"; diff --git a/packages/server/src/rate-limit.ts b/packages/server/src/rate-limit.ts new file mode 100644 index 0000000..b772495 --- /dev/null +++ b/packages/server/src/rate-limit.ts @@ -0,0 +1,74 @@ +import { LoopBase } from "@jobber/common"; +import { singleton } from "tsyringe"; + +type BucketItem = { + count: number; + + created: number; + expires: number; +}; + +// Minutely +const RATE_LIMIT_PERIOD_MS = 60 * 1000; + +@singleton() +export class RateLimit extends LoopBase { + protected loopDuration = 60 * 1000; // 1 minute + + protected loopStarting = undefined; + protected loopStarted = undefined; + protected loopClosing = undefined; + protected loopClosed = undefined; + + private buckets = new Map(); + + private createBucketKey(key: string) { + const calculatedPeriod = Date.now() - (Date.now() % RATE_LIMIT_PERIOD_MS); + + return `${key}:${calculatedPeriod}`; + } + + public isRateLimited(key: string, limit: number) { + return false; + + // TODO: Reenable + // if (this.status !== "started") { + // throw new Error("RateLimit is not started"); + // } + + // const bucketKey = this.createBucketKey(key); + // const bucket = this.buckets.get(bucketKey); + + // return bucket && bucket.count >= limit; + } + + public increment(key: string) { + if (this.status !== "started") { + throw new Error("RateLimit is not started"); + } + + const bucketKey = this.createBucketKey(key); + const bucket = this.buckets.get(bucketKey); + + if (!bucket) { + this.buckets.set(bucketKey, { + count: 1, + created: Date.now(), + expires: Date.now() + RATE_LIMIT_PERIOD_MS, + }); + return; + } + + bucket.count += 1; + } + + protected async loopIteration() { + const now = Date.now(); + + for (const [key, bucket] of this.buckets) { + if (bucket.expires <= now) { + this.buckets.delete(key); + } + } + } +} diff --git a/packages/server/src/routes/api-tokens.ts b/packages/server/src/routes/api-tokens.ts index 7fa2f7f..6128115 100644 --- a/packages/server/src/routes/api-tokens.ts +++ b/packages/server/src/routes/api-tokens.ts @@ -3,11 +3,12 @@ import { Hono } from "hono"; import assert from "node:assert"; import { z } from "zod"; import { getDrizzle } from "~/db/index.js"; -import { apiTokensTable, ApiTokensTableType } from "~/db/schema/api-tokens.js"; +import { apiTokensTable } from "~/db/schema.js"; +import { ApiTokensTableType } from "~/db/types.js"; import { InternalHonoApp } from "~/index.js"; import { withLock } from "~/lock.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction, JobberPermissionsSchema } from "~/permissions.js"; +import { JobberPermissionsSchema } from "@jobber/common/permissions.js"; export async function createRouteApiTokens() { const app = new Hono(); @@ -32,7 +33,7 @@ export async function createRouteApiTokens() { .from(apiTokensTable); const tokensFiltered = tokens.filter((token) => - bouncer.canReadApiToken(token) + bouncer.canReadApiToken(token), ); return c.json({ diff --git a/packages/server/src/routes/audit-log.ts b/packages/server/src/routes/audit-log.ts new file mode 100644 index 0000000..610d5ff --- /dev/null +++ b/packages/server/src/routes/audit-log.ts @@ -0,0 +1,30 @@ +import { Hono } from "hono"; +import { auditLogsModel } from "~/db/audit-log.js"; +import { InternalHonoApp } from "~/index.js"; +import { createMiddlewareAuth } from "~/middleware/auth.js"; + +export async function createRouteAuditLog() { + const app = new Hono(); + + app.get("/audit-log/", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + if (!bouncer.canReadAuditLogGenerally()) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const cursor = c.req.query("cursor") ?? undefined; + + const result = await auditLogsModel.query(cursor); + + return c.json({ + success: true, + data: result, + }); + }); + + return app; +} diff --git a/packages/server/src/routes/auth.ts b/packages/server/src/routes/auth.ts index 660f630..899c0e4 100644 --- a/packages/server/src/routes/auth.ts +++ b/packages/server/src/routes/auth.ts @@ -10,16 +10,12 @@ import assert from "node:assert"; import { z } from "zod"; import { getConfigOption } from "~/config.js"; import { getDrizzle } from "~/db/index.js"; -import { sessionsTable } from "~/db/schema/sessions.js"; -import { - UserPasswordSchema, - usersTable, - UserUsernameSchema, -} from "~/db/schema/users.js"; +import { sessionsTable, usersTable } from "~/db/schema.js"; +import { UserPasswordSchema, UserUsernameSchema } from "~/db/types.js"; import { InternalHonoApp } from "~/index.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; import { createMiddlewareResponseTime } from "~/middleware/response-time.js"; -import { PERMISSION_NONE } from "~/permissions.js"; +import { PERMISSION_NONE } from "@jobber/common/permissions.js"; export async function createRouteAuth() { const app = new Hono(); @@ -31,7 +27,7 @@ export async function createRouteAuth() { if (!getConfigOption("AUTH_PUBLIC_LOGIN_ENABLED")) { return c.json( { success: false, message: "Public login is disabled" }, - 403 + 403, ); } @@ -57,7 +53,7 @@ export async function createRouteAuth() { if (!user) { return c.json( { success: false, message: "Invalid username or password" }, - 401 + 401, ); } @@ -66,7 +62,7 @@ export async function createRouteAuth() { if (!isValidPassword) { return c.json( { success: false, message: "Invalid username or password" }, - 401 + 401, ); } @@ -95,7 +91,7 @@ export async function createRouteAuth() { session: {}, }, }); - } + }, ); app.post( @@ -105,7 +101,7 @@ export async function createRouteAuth() { if (!getConfigOption("AUTH_PUBLIC_REGISTRATION_ENABLED")) { return c.json( { success: false, message: "Public registration is disabled" }, - 403 + 403, ); } @@ -131,7 +127,7 @@ export async function createRouteAuth() { if (existingUser) { return c.json( { success: false, message: "Username already exists" }, - 409 + 409, ); } @@ -151,7 +147,7 @@ export async function createRouteAuth() { if (!user) { return c.json( { success: false, message: "Failed to create user" }, - 500 + 500, ); } @@ -180,7 +176,7 @@ export async function createRouteAuth() { user: {}, }, }); - } + }, ); app.get("/auth", createMiddlewareAuth(), async (c) => { diff --git a/packages/server/src/routes/job/actions.ts b/packages/server/src/routes/job/actions.ts index 8ef885e..1aa7ece 100644 --- a/packages/server/src/routes/job/actions.ts +++ b/packages/server/src/routes/job/actions.ts @@ -1,9 +1,7 @@ import { and, eq } from "drizzle-orm"; import { Hono } from "hono"; import { getDrizzle } from "~/db/index.js"; -import { actionsTable } from "~/db/schema/actions.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; -import { jobsTable } from "~/db/schema/jobs.js"; +import { actionsTable, jobVersionsTable, jobsTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; @@ -37,15 +35,15 @@ export async function createRouteJobActions() { jobsTable, and( eq(jobsTable.id, actionsTable.jobId), - eq(jobsTable.jobVersionId, actionsTable.jobVersionId) - ) + eq(jobsTable.jobVersionId, actionsTable.jobVersionId), + ), ) .innerJoin( jobVersionsTable, and( eq(jobVersionsTable.jobId, actionsTable.jobId), - eq(jobVersionsTable.id, actionsTable.jobVersionId) - ) + eq(jobVersionsTable.id, actionsTable.jobVersionId), + ), ) .where(eq(actionsTable.jobId, jobId)); @@ -86,8 +84,8 @@ export async function createRouteJobActions() { jobVersionsTable, and( eq(jobVersionsTable.jobId, actionsTable.jobId), - eq(jobVersionsTable.id, actionsTable.jobVersionId) - ) + eq(jobVersionsTable.id, actionsTable.jobVersionId), + ), ) .where(eq(actionsTable.jobId, jobId)); diff --git a/packages/server/src/routes/job/environment.ts b/packages/server/src/routes/job/environment.ts index aded87a..4ebd221 100644 --- a/packages/server/src/routes/job/environment.ts +++ b/packages/server/src/routes/job/environment.ts @@ -1,10 +1,8 @@ import { Hono } from "hono"; import { z } from "zod"; import { getDrizzle } from "~/db/index.js"; -import { - EnvironmentsContextSchemaType, - environmentsTable, -} from "~/db/schema/environments.js"; +import { environmentsTable } from "~/db/schema.js"; +import { EnvironmentsContextSchemaType } from "~/db/types.js"; import { getUnixTimestamp } from "~/util.js"; import { jobEnvironmentNameSchema } from "./schemas-common.js"; import { InternalHonoApp } from "~/index.js"; @@ -61,7 +59,7 @@ export async function createRouteJobEnvironment() { success: true, data: env, }); - } + }, ); app.post( @@ -79,7 +77,7 @@ export async function createRouteJobEnvironment() { const name = await jobEnvironmentNameSchema.parseAsync( c.req.param("name"), - { path: ["request", "param"] } + { path: ["request", "param"] }, ); const body = await schema.parseAsync(await c.req.parseBody(), { @@ -95,7 +93,7 @@ export async function createRouteJobEnvironment() { if (!bouncer.canWriteJobEnvironment({ jobId: job.id }, name)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -132,7 +130,7 @@ export async function createRouteJobEnvironment() { message: "ok", }); }); - } + }, ); app.delete( @@ -144,7 +142,7 @@ export async function createRouteJobEnvironment() { const name = await jobEnvironmentNameSchema.parseAsync( c.req.param("name"), - { path: ["request", "param"] } + { path: ["request", "param"] }, ); const job = await jobModel.byId(jobId); @@ -156,7 +154,7 @@ export async function createRouteJobEnvironment() { if (!bouncer.canDeleteJobEnvironment({ jobId: job.id }, name)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -191,7 +189,7 @@ export async function createRouteJobEnvironment() { message: "ok", }); }); - } + }, ); return app; diff --git a/packages/server/src/routes/job/job.ts b/packages/server/src/routes/job/job.ts index 5b5b54d..ba31882 100644 --- a/packages/server/src/routes/job/job.ts +++ b/packages/server/src/routes/job/job.ts @@ -4,14 +4,15 @@ import { rm } from "node:fs/promises"; import { z } from "zod"; import { getDrizzle } from "~/db/index.js"; import { jobModel } from "~/db/job.js"; -import { actionsTable } from "~/db/schema/actions.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; -import { jobsTable } from "~/db/schema/jobs.js"; -import { logsTable } from "~/db/schema/logs.js"; +import { + actionsTable, + jobVersionsTable, + jobsTable, + logsTable, +} from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; import { getJobActionArchiveFile } from "~/paths.js"; -import { canPerformAction } from "~/permissions.js"; export async function createRouteJob() { const app = new Hono(); @@ -37,8 +38,8 @@ export async function createRouteJob() { jobVersionsTable, and( eq(jobVersionsTable.jobId, jobsTable.id), - eq(jobVersionsTable.id, jobsTable.jobVersionId) - ) + eq(jobVersionsTable.id, jobsTable.jobVersionId), + ), ) .where(eq(jobsTable.id, jobId)) .limit(1) @@ -78,8 +79,8 @@ export async function createRouteJob() { jobVersionsTable, and( eq(jobVersionsTable.jobId, jobsTable.id), - eq(jobVersionsTable.id, jobsTable.jobVersionId) - ) + eq(jobVersionsTable.id, jobsTable.jobVersionId), + ), ) .orderBy(desc(jobVersionsTable.created)); @@ -152,7 +153,7 @@ export async function createRouteJob() { .where(eq(actionsTable.jobId, job.id)) .innerJoin( jobVersionsTable, - eq(actionsTable.jobVersionId, jobVersionsTable.id) + eq(actionsTable.jobVersionId, jobVersionsTable.id), ); // @@ -168,7 +169,7 @@ export async function createRouteJob() { for (const actionArchive of actionArchives) { const filename = getJobActionArchiveFile( actionArchive.version, - actionArchive.action + actionArchive.action, ); await rm(filename).catch((err) => { diff --git a/packages/server/src/routes/job/metrics.ts b/packages/server/src/routes/job/metrics.ts index 4c8ec76..3119088 100644 --- a/packages/server/src/routes/job/metrics.ts +++ b/packages/server/src/routes/job/metrics.ts @@ -4,11 +4,10 @@ import assert from "node:assert"; import { z } from "zod"; import { getConfigOption } from "~/config.js"; import { getDrizzle } from "~/db/index.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; -import { jobsTable } from "~/db/schema/jobs.js"; +import { jobVersionsTable, jobsTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; import { getUnixTimestamp } from "~/util.js"; type PrometheusQueryResponse = @@ -102,7 +101,7 @@ export async function createRouteJobMetrics() { success: false, message: "Metrics are not configured", }, - 500 + 500, ); } @@ -126,7 +125,7 @@ export async function createRouteJobMetrics() { success: false, message: "jobId, metric, and version are required", }, - 400 + 400, ); } @@ -146,8 +145,8 @@ export async function createRouteJobMetrics() { jobVersionsTable, and( eq(jobsTable.id, jobVersionsTable.jobId), - eq(jobsTable.jobVersionId, jobVersionsTable.id) - ) + eq(jobsTable.jobVersionId, jobVersionsTable.id), + ), ) .where(eq(jobsTable.id, jobId)) .limit(1) @@ -159,14 +158,14 @@ export async function createRouteJobMetrics() { success: false, message: "Job not found", }, - 404 + 404, ); } if (!bouncer.canReadJob(job)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -177,7 +176,7 @@ export async function createRouteJobMetrics() { success: false, message: "Job does not have a version", }, - 400 + 400, ); } @@ -290,7 +289,7 @@ export async function createRouteJobMetrics() { success: false, message: `Metric "${metric}" is not supported`, }, - 400 + 400, ); } @@ -306,7 +305,7 @@ export async function createRouteJobMetrics() { if (result.status === "error") { console.error( - `Error querying Prometheus: ${result.errorType} - ${result.error}` + `Error querying Prometheus: ${result.errorType} - ${result.error}`, ); return c.json( @@ -315,7 +314,7 @@ export async function createRouteJobMetrics() { message: "Error querying Prometheus", error: result.error, }, - 500 + 500, ); } @@ -341,9 +340,9 @@ export async function createRouteJobMetrics() { }; }), }, - 200 + 200, ); - } + }, ); return app; diff --git a/packages/server/src/routes/job/publish.ts b/packages/server/src/routes/job/publish.ts index a16644c..e181444 100644 --- a/packages/server/src/routes/job/publish.ts +++ b/packages/server/src/routes/job/publish.ts @@ -6,15 +6,17 @@ import { Hono } from "hono"; import { ReadableStream } from "node:stream/web"; import { z } from "zod"; import { getDrizzle } from "~/db/index.js"; -import { actionsTable } from "~/db/schema/actions.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; -import { jobsTable } from "~/db/schema/jobs.js"; -import { triggersTable } from "~/db/schema/triggers.js"; +import { + actionsTable, + jobVersionsTable, + jobsTable, + triggersTable, +} from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { classifyArchiveFile } from "~/jobber/images.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; import { getJobActionArchiveFile } from "~/paths.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; import { createBenchmark, getTmpFile, @@ -43,7 +45,7 @@ export async function createRouteJobPublish() { console.log("[/publish/] User does not have permission to publish jobs"); return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -65,7 +67,7 @@ export async function createRouteJobPublish() { success: false, message: "Expected file", }, - 400 + 400, ); } @@ -77,7 +79,7 @@ export async function createRouteJobPublish() { success: false, message: "Unexpected file type", }, - 400 + 400, ); } @@ -86,7 +88,7 @@ export async function createRouteJobPublish() { await handleReadableStreamPipe( archiveFile.stream() as ReadableStream, - writeStream + writeStream, ); console.log(`[/publish/] ${benchmark()}ms - File streamed to disk`); @@ -99,7 +101,7 @@ export async function createRouteJobPublish() { success: false, message: "Malformed archive file!", }, - 400 + 400, ); } @@ -158,7 +160,7 @@ export async function createRouteJobPublish() { if (!version) { console.log( - `[/publish/] Failed to create job version for ${job.jobName} v${packageJson.version}` + `[/publish/] Failed to create job version for ${job.jobName} v${packageJson.version}`, ); return c.json({ @@ -169,7 +171,7 @@ export async function createRouteJobPublish() { if (version.created !== timestamp) { console.log( - `[/publish/] Job version ${job.jobName} v${packageJson.version} already exists, skipping action creation` + `[/publish/] Job version ${job.jobName} v${packageJson.version} already exists, skipping action creation`, ); return c.json({ @@ -241,7 +243,7 @@ export async function createRouteJobPublish() { }, }); } - }) + }), ); if (action) { @@ -250,7 +252,7 @@ export async function createRouteJobPublish() { if (query.allowAutomaticRollout) { console.log( - "[/publish/] Automatic rollout enabled, updating job version" + "[/publish/] Automatic rollout enabled, updating job version", ); await tx @@ -261,7 +263,7 @@ export async function createRouteJobPublish() { .where(eq(jobsTable.id, job.id)); } else { console.log( - "[/publish/] Automatic rollout disabled, not updating job version" + "[/publish/] Automatic rollout disabled, not updating job version", ); } diff --git a/packages/server/src/routes/job/runners.ts b/packages/server/src/routes/job/runners.ts index 192304b..c314c07 100644 --- a/packages/server/src/routes/job/runners.ts +++ b/packages/server/src/routes/job/runners.ts @@ -3,11 +3,13 @@ import { Hono } from "hono"; import { container } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; import { jobModel } from "~/db/job.js"; -import { jobsTable } from "~/db/schema/jobs.js"; +import { jobsTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { RunnerManager } from "~/jobber/runners/manager.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; +import { runnersModel } from "~/db/runners.js"; +import { create } from "domain"; export async function createRouteJobRunners() { const runnerManager = container.resolve(RunnerManager); @@ -17,7 +19,6 @@ export async function createRouteJobRunners() { app.get("/job/:jobId/runners", createMiddlewareAuth(), async (c, next) => { const jobId = c.req.param("jobId"); const bouncer = c.get("bouncer")!; - const job = await jobModel.byId(jobId); if (!job) { @@ -27,15 +28,25 @@ export async function createRouteJobRunners() { if (!bouncer.canReadJobRunners(job)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } - const runners = await runnerManager.findRunnersByJobId(jobId); + const runners = await runnersModel.byJobIdSpecial(jobId, true); return c.json({ success: true, - data: runners, + data: runners.map((runner) => ({ + id: runner.id, + status: runner.status, + actionId: runner.actionId, + jobId: runner.jobId, + requestsProcessing: 0, + createdAt: Math.floor(runner.createdAt.getTime() / 1000), + readyAt: Math.floor(runner.readyAt?.getTime() ?? 0 / 1000), + closingAt: Math.floor(runner.closingAt?.getTime() ?? 0 / 1000), + closedAt: Math.floor(runner.closedAt?.getTime() ?? 0 / 1000), + })), }); }); @@ -44,11 +55,9 @@ export async function createRouteJobRunners() { createMiddlewareAuth(), async (c, next) => { const bouncer = c.get("bouncer")!; - const jobId = c.req.param("jobId"); const runnerId = c.req.param("runnerId"); const queryShutdownForcefully = c.req.query("forceful") === "true"; - const job = await jobModel.byId(jobId); if (!job) { @@ -58,26 +67,20 @@ export async function createRouteJobRunners() { if (!bouncer.canWriteJobRunners(job)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } - let result; - - if (queryShutdownForcefully) { - result = await runnerManager.sendShutdownForceful(job.id, runnerId); - } else { - result = await runnerManager.sendShutdownGraceful(job.id, runnerId); - } + runnerManager.shutdownQueueAdd(runnerId, !!queryShutdownForcefully); return c.json( { - success: result.success, - message: result.message, + success: true, + message: "Scheduled for shutdown", }, - result.success ? 200 : 400 + 200, ); - } + }, ); return app; diff --git a/packages/server/src/routes/job/store.ts b/packages/server/src/routes/job/store.ts index 6958039..beabed2 100644 --- a/packages/server/src/routes/job/store.ts +++ b/packages/server/src/routes/job/store.ts @@ -3,7 +3,7 @@ import { container } from "tsyringe"; import { InternalHonoApp } from "~/index.js"; import { Store } from "~/jobber/store.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; export async function createRouteJobStore() { const store = container.resolve(Store); @@ -23,10 +23,10 @@ export async function createRouteJobStore() { return next(); } - if (!bouncer.canReadJobStoreItem(item)) { + if (!bouncer.canReadJobStore(item)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -34,7 +34,7 @@ export async function createRouteJobStore() { success: true, data: item, }); - } + }, ); app.get("/job/:jobId/store/", createMiddlewareAuth(), async (c, next) => { @@ -44,7 +44,7 @@ export async function createRouteJobStore() { const items = await store.getItemsNoValue(jobId); const itemsFiltered = items.filter((item) => { - return bouncer.canReadJobStoreItem(item); + return bouncer.canReadJobStore(item); }); return c.json({ @@ -67,10 +67,10 @@ export async function createRouteJobStore() { return next(); } - if (!bouncer.canDeleteJobStoreItem(item)) { + if (!bouncer.canDeleteJobStore(item)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } @@ -80,7 +80,7 @@ export async function createRouteJobStore() { success: true, data: item, }); - } + }, ); return app; diff --git a/packages/server/src/routes/job/triggers.ts b/packages/server/src/routes/job/triggers.ts index 2644708..38dad90 100644 --- a/packages/server/src/routes/job/triggers.ts +++ b/packages/server/src/routes/job/triggers.ts @@ -2,19 +2,15 @@ import { and, eq } from "drizzle-orm"; import { Hono } from "hono"; import { container } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; -import { jobsTable } from "~/db/schema/jobs.js"; -import { triggersTable } from "~/db/schema/triggers.js"; +import { jobVersionsTable, jobsTable, triggersTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { TriggerCron } from "~/jobber/triggers/cron.js"; -import { TriggerHttp } from "~/jobber/triggers/http.js"; import { TriggerMqtt } from "~/jobber/triggers/mqtt.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; export async function createRouteJobTriggers() { const triggerCron = container.resolve(TriggerCron); - const triggerHttp = container.resolve(TriggerHttp); const triggerMqtt = container.resolve(TriggerMqtt); const app = new Hono(); @@ -41,15 +37,15 @@ export async function createRouteJobTriggers() { triggersTable, and( eq(jobsTable.id, triggersTable.jobId), - eq(jobsTable.jobVersionId, triggersTable.jobVersionId) - ) + eq(jobsTable.jobVersionId, triggersTable.jobVersionId), + ), ) .innerJoin( jobVersionsTable, and( eq(jobVersionsTable.jobId, triggersTable.jobId), - eq(jobVersionsTable.id, triggersTable.jobVersionId) - ) + eq(jobVersionsTable.id, triggersTable.jobVersionId), + ), ) .where(eq(triggersTable.jobId, jobId)); @@ -63,15 +59,6 @@ export async function createRouteJobTriggers() { }; } - if (trigger.context.type === "http") { - const status = triggerHttp.getTriggerStatus(jobId, trigger.id); - - return { - ...trigger, - status, - }; - } - if (trigger.context.type === "mqtt") { const status = triggerMqtt.getTriggerStatus(jobId, trigger.id); @@ -98,7 +85,7 @@ export async function createRouteJobTriggers() { success: true, data: triggersFiltered, }); - } + }, ); app.get("/job/:jobId/triggers", createMiddlewareAuth(), async (c, next) => { @@ -120,8 +107,8 @@ export async function createRouteJobTriggers() { jobVersionsTable, and( eq(triggersTable.jobId, jobVersionsTable.jobId), - eq(triggersTable.jobVersionId, jobVersionsTable.id) - ) + eq(triggersTable.jobVersionId, jobVersionsTable.id), + ), ) .where(eq(triggersTable.jobId, jobId)); @@ -129,13 +116,6 @@ export async function createRouteJobTriggers() { if (trigger.context.type === "schedule") { const status = triggerCron.getTriggerStatus(jobId, trigger.id); - return { - ...trigger, - status, - }; - } else if (trigger.context.type === "http") { - const status = triggerHttp.getTriggerStatus(jobId, trigger.id); - return { ...trigger, status, @@ -185,7 +165,7 @@ export async function createRouteJobTriggers() { }) .from(triggersTable) .where( - and(eq(triggersTable.id, triggerId), eq(triggersTable.jobId, jobId)) + and(eq(triggersTable.id, triggerId), eq(triggersTable.jobId, jobId)), ) .limit(1) .then((row) => row.at(0) ?? null); @@ -197,20 +177,13 @@ export async function createRouteJobTriggers() { if (!bouncer.canReadJobTriggers(trigger)) { return c.json( { success: false, message: "Insufficient Permissions" }, - 403 + 403, ); } if (trigger.context.type === "schedule") { const status = await triggerCron.getTriggerStatus(jobId, triggerId); - return c.json({ - success: true, - data: status, - }); - } else if (trigger.context.type === "http") { - const status = await triggerHttp.getTriggerStatus(jobId, triggerId); - return c.json({ success: true, data: status, @@ -225,7 +198,7 @@ export async function createRouteJobTriggers() { } throw new Error("Unsupported trigger type: " + trigger.context["type"]); - } + }, ); return app; diff --git a/packages/server/src/routes/job/versions.ts b/packages/server/src/routes/job/versions.ts index aab2cd9..42741ee 100644 --- a/packages/server/src/routes/job/versions.ts +++ b/packages/server/src/routes/job/versions.ts @@ -1,10 +1,10 @@ import { eq } from "drizzle-orm"; import { Hono } from "hono"; import { getDrizzle } from "~/db/index.js"; -import { jobVersionsTable } from "~/db/schema/job-versions.js"; +import { jobVersionsTable } from "~/db/schema.js"; import { InternalHonoApp } from "~/index.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; export async function createRouteVersions() { const app = new Hono(); diff --git a/packages/server/src/routes/metrics.ts b/packages/server/src/routes/metrics.ts index c50e57d..3604b55 100644 --- a/packages/server/src/routes/metrics.ts +++ b/packages/server/src/routes/metrics.ts @@ -3,12 +3,11 @@ import { Hono } from "hono"; import { register } from "prom-client"; import { container } from "tsyringe"; import { getDrizzle } from "~/db/index.js"; -import { jobsTable } from "~/db/schema/jobs.js"; import { InternalHonoApp } from "~/index.js"; import { RunnerManager } from "~/jobber/runners/manager.js"; import { Telemetry } from "~/jobber/telemetry.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; -import { canPerformAction } from "~/permissions.js"; +import { canPerformAction } from "@jobber/common/permissions.js"; import { getUnixTimestamp } from "~/util.js"; export async function createRouteMetrics() { @@ -27,116 +26,94 @@ export async function createRouteMetrics() { }); app.get("/metrics/overview", createMiddlewareAuth(), async (c) => { - const bouncer = c.get("bouncer")!; - - if (!bouncer.canReadSystemMetricsOverview()) { - return c.text("Insufficient Permissions", 403); - } - - const telemetry = container.resolve(Telemetry); - - const runners = await runnerManager.getRunners(); - - const runnersTotal = runners.length; - - const runnersStarting = runners.reduce((count, runner) => { - if (runner.status === "starting") { - return count + 1; - } - - return count; - }, 0); - - const runnersReady = runners.reduce((count, runner) => { - if (runner.status === "ready") { - return count + 1; - } - - return count; - }, 0); - - const runnersClosing = runners.reduce((count, runner) => { - if (runner.status === "closing") { - return count + 1; - } - - return count; - }, 0); - - const runnersClosed = runners.reduce((count, runner) => { - if (runner.status === "closed") { - return count + 1; - } - - return count; - }, 0); - - const runnersLoadTotal = runners.reduce((total, runner) => { - return total + runner.requestsProcessing; - }, 0); - - const runnersLoadAverage = - runnersLoadTotal === 0 ? 0 : runnersLoadTotal / runnersTotal; - - const lastRequestAt = runners.reduce((latest, runner) => { - if (runner.lastRequestAt && runner.lastRequestAt > latest) { - return runner.lastRequestAt; - } - - return latest; - }, 0); - - const jobsTotal = await getDrizzle() - .select({ - count: sql`COUNT(${jobsTable.id})`, - }) - .from(jobsTable) - .then((res) => res[0].count as number); - // - - const jobsDisabled = await getDrizzle() - .select({ - count: sql`COUNT(${jobsTable.id})`, - }) - .from(jobsTable) - .where(eq(jobsTable.status, "disabled")) - .then((res) => res[0].count as number); - // - - const jobsEnabled = await getDrizzle() - .select({ - count: sql`COUNT(${jobsTable.id})`, - }) - .from(jobsTable) - .where(eq(jobsTable.status, "enabled")) - .then((res) => res[0].count as number); - // - - return c.json({ - success: true, - data: { - runnerMetrics: { - runnersTotal, - runnersStarting, - runnersReady, - runnersClosing, - runnersClosed, - - runnersLoadTotal, - runnersLoadAverage, - - lastRequestAt, - }, - - jobsMetrics: { - jobsTotal, - jobsDisabled, - jobsEnabled, - }, - - uptime: getUnixTimestamp() - telemetry.getStartTime(), - }, - }); + return c.json({ success: false, message: "Not Implemented" }, 501); + + // const bouncer = c.get("bouncer")!; + // if (!bouncer.canReadSystemMetricsOverview()) { + // return c.text("Insufficient Permissions", 403); + // } + // const telemetry = container.resolve(Telemetry); + // const runners = await runnerManager.getRunners(); + // const runnersTotal = runners.length; + // const runnersStarting = runners.reduce((count, runner) => { + // if (runner.status === "starting") { + // return count + 1; + // } + // return count; + // }, 0); + // const runnersReady = runners.reduce((count, runner) => { + // if (runner.status === "ready") { + // return count + 1; + // } + // return count; + // }, 0); + // const runnersClosing = runners.reduce((count, runner) => { + // if (runner.status === "closing") { + // return count + 1; + // } + // return count; + // }, 0); + // const runnersClosed = runners.reduce((count, runner) => { + // if (runner.status === "closed") { + // return count + 1; + // } + // return count; + // }, 0); + // const runnersLoadTotal = runners.reduce((total, runner) => { + // return total + runner.requestsProcessing; + // }, 0); + // const runnersLoadAverage = + // runnersLoadTotal === 0 ? 0 : runnersLoadTotal / runnersTotal; + // const lastRequestAt = runners.reduce((latest, runner) => { + // if (runner.lastRequestAt && runner.lastRequestAt > latest) { + // return runner.lastRequestAt; + // } + // return latest; + // }, 0); + // const jobsTotal = await getDrizzle() + // .select({ + // count: sql`COUNT(${jobsTable.id})`, + // }) + // .from(jobsTable) + // .then((res) => res[0].count as number); + // // + // const jobsDisabled = await getDrizzle() + // .select({ + // count: sql`COUNT(${jobsTable.id})`, + // }) + // .from(jobsTable) + // .where(eq(jobsTable.status, "disabled")) + // .then((res) => res[0].count as number); + // // + // const jobsEnabled = await getDrizzle() + // .select({ + // count: sql`COUNT(${jobsTable.id})`, + // }) + // .from(jobsTable) + // .where(eq(jobsTable.status, "enabled")) + // .then((res) => res[0].count as number); + // // + // return c.json({ + // success: true, + // data: { + // runnerMetrics: { + // runnersTotal, + // runnersStarting, + // runnersReady, + // runnersClosing, + // runnersClosed, + // runnersLoadTotal, + // runnersLoadAverage, + // lastRequestAt, + // }, + // jobsMetrics: { + // jobsTotal, + // jobsDisabled, + // jobsEnabled, + // }, + // uptime: getUnixTimestamp() - telemetry.getStartTime(), + // }, + // }); }); return app; diff --git a/packages/server/src/routes/oauth-admin.ts b/packages/server/src/routes/oauth-admin.ts new file mode 100644 index 0000000..67885f4 --- /dev/null +++ b/packages/server/src/routes/oauth-admin.ts @@ -0,0 +1,302 @@ +import { JobberPermissionsSchema } from "@jobber/common/permissions.js"; +import { Hono } from "hono"; +import { container } from "tsyringe"; +import { z } from "zod"; +import { oauthServiceClientModel } from "~/db/oauth-service-client.js"; +import { oauthSigningKeyModel } from "~/db/oauth-signing-key.js"; +import { + OauthServiceClientTableType, + OauthSigningKeyTableType, +} from "~/db/types.js"; +import { InternalHonoApp } from "~/index.js"; +import { createMiddlewareAuth } from "~/middleware/auth.js"; +import { OAuthServiceClients } from "~/service-clients.js"; +import { OAuthSigningKeys } from "~/signing-keys.js"; + +function censorKey(key: OauthSigningKeyTableType) { + return { + id: key.id, + parentId: key.parentId, + childId: key.childId, + + createdByUserId: key.createdByUserId, + + status: key.status, + + alg: key.alg, + use: key.use, + + publicKey: key.publicKey, + + expiresAt: key.expiresAt, + renewsAt: key.renewsAt, + createdAt: key.createdAt, + } as const; +} + +function censorServiceClient(client: OauthServiceClientTableType) { + return { + id: client.id, + clientId: client.clientId, + + name: client.name, + description: client.description, + + isSystemManaged: client.isSystemManaged, + + allowedAudiences: client.allowedAudiences, + allowedScopes: client.allowedScopes, + + permissions: client.permissions, + + enabled: client.enabled, + + expiresAt: client.expiresAt, + createdAt: client.createdAt, + } as const; +} + +export async function createRouteOAuthAdmin() { + const serviceClients = container.resolve(OAuthServiceClients); + const signingKeys = container.resolve(OAuthSigningKeys); + + const app = new Hono(); + + app.get("/oauth/signing-keys/", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + if (!bouncer.canReadOauthSigningKeyGenerally()) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const keys = await oauthSigningKeyModel.all(); + + const result = keys + .filter((key) => bouncer.canReadOauthSigningKey(key)) + .map(censorKey); + + return c.json({ + success: true, + data: result, + }); + }); + + app.get("/oauth/signing-keys/:id", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + const { id } = c.req.param(); + + const key = await oauthSigningKeyModel.byId(id); + + if (!key) { + return c.json({ success: false, message: "Key not found" }, 404); + } + + if (!bouncer.canReadOauthSigningKey(key)) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + return c.json({ + success: true, + data: censorKey(key), + }); + }); + + app.put("/oauth/signing-keys/:id", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + const { id } = c.req.param(); + + const key = await oauthSigningKeyModel.byId(id); + + if (!key) { + return c.json({ success: false, message: "Key not found" }, 404); + } + + if (!bouncer.canWriteOauthSigningKey(key)) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const schema = z.object({ + status: z.enum(["active", "retiring", "inactive"]).optional(), + expiresAt: z.string().datetime().nullable().optional(), + }); + + const body = await schema.parseAsync(await c.req.json(), { + path: ["request", "body"], + }); + + await oauthSigningKeyModel.update(id, { + status: body.status, + expiresAt: body.expiresAt ? new Date(body.expiresAt) : undefined, + }); + + return c.json({ + success: true, + }); + }); + + app.post("/oauth/signing-keys/", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + if (!bouncer.canWriteOauthSigningKeyGenerally()) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const schema = z.object({ + alg: z.enum(["RS256"]), + use: z.enum(["sig", "enc"]), + + expiresAt: z.string().datetime().optional(), + renewsAt: z.string().datetime().optional(), + + parentId: z.string().optional(), + }); + + const body = await schema.parseAsync(await c.req.json(), { + path: ["request", "body"], + }); + + const key = await signingKeys.createSigningKey({ + alg: body.alg, + use: body.use, + + expiresAt: body.expiresAt ? new Date(body.expiresAt) : undefined, + renewsAt: body.renewsAt ? new Date(body.renewsAt) : undefined, + + parentId: body.parentId, + }); + + if (!key) { + return c.json( + { success: false, message: "Failed to create signing key" }, + 500, + ); + } + + return c.json({ + success: true, + data: censorKey(key), + }); + }); + + app.get("/oauth/service-client/", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + const hideDisabled = (c.req.query("hide-disabled") ?? "true") === "true"; + + if (!bouncer.canReadOauthServiceClientGenerally()) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const serviceClients = hideDisabled + ? await oauthServiceClientModel.byEnabled() + : await oauthServiceClientModel.all(); + + const result = serviceClients + .filter((client) => bouncer.canReadOauthServiceClient(client)) + .map(censorServiceClient); + + return c.json({ + success: true, + data: result, + }); + }); + + app.get("/oauth/service-client/:id", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + const { id } = c.req.param(); + + const client = await oauthServiceClientModel.byId(id); + + if (!client) { + return c.json( + { success: false, message: "Service Client not found" }, + 404, + ); + } + + if (!bouncer.canReadOauthServiceClient(client)) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + return c.json({ + success: true, + data: censorServiceClient(client), + }); + }); + + app.post("/oauth/service-client/", createMiddlewareAuth(), async (c) => { + const bouncer = c.get("bouncer")!; + + if (!bouncer.canWriteOauthServiceClientGenerally()) { + return c.json( + { success: false, message: "Insufficient Permissions" }, + 403, + ); + } + + const schema = z.object({ + name: z.string(), + description: z.string().optional(), + + allowedAudiences: z.array(z.string()).default([]), + allowedScopes: z.array(z.string()).default([]), + + permissions: z.lazy(() => JobberPermissionsSchema), + + expiresAt: z.string().datetime().optional(), + }); + + const body = await schema.parseAsync(await c.req.json(), { + path: ["request", "body"], + }); + + const { client, secret } = await serviceClients.upsertServiceClient({ + name: body.name, + description: body.description, + + allowedAudiences: body.allowedAudiences, + allowedScopes: body.allowedScopes, + + permissions: body.permissions, + + expiresAt: body.expiresAt ? new Date(body.expiresAt) : undefined, + }); + + if (!client) { + return c.json( + { success: false, message: "Failed to create service client" }, + 500, + ); + } + + return c.json({ + success: true, + data: { + client: censorServiceClient(client), + secret, + }, + }); + }); + + return app; +} diff --git a/packages/server/src/routes/oauth.ts b/packages/server/src/routes/oauth.ts new file mode 100644 index 0000000..1550559 --- /dev/null +++ b/packages/server/src/routes/oauth.ts @@ -0,0 +1,185 @@ +import bcrypt from "bcryptjs"; +import { Hono } from "hono"; +import { container } from "tsyringe"; +import { z } from "zod"; +import { getConfigOption } from "~/config.js"; +import { auditLogsModel } from "~/db/audit-log.js"; +import { oauthServiceClientModel } from "~/db/oauth-service-client.js"; +import { InternalHonoApp } from "~/index.js"; +import { createMiddlewareResponseTime } from "~/middleware/response-time.js"; +import { RateLimit } from "~/rate-limit.js"; +import { OAuthServiceClients } from "~/service-clients.js"; +import { OAuthSigningKeys } from "~/signing-keys.js"; +import { getAbsoluteUrl } from "~/util.js"; + +export async function createRouteOAuth() { + const rateLimit = container.resolve(RateLimit); + const oauthSigningKeys = container.resolve(OAuthSigningKeys); + const serviceClients = container.resolve(OAuthServiceClients); + + const app = new Hono(); + + app.get("/.well-known/openid-configuration", async (c) => { + return c.json({ + issuer: getConfigOption("OAUTH_ISSUER"), + token_endpoint: getAbsoluteUrl(c, "/oauth/token"), + jwks_uri: getAbsoluteUrl(c, "/.well-known/jwks.json"), + token_endpoint_auth_methods_supported: ["client_secret_basic"], + }); + }); + + app.get("/.well-known/jwks.json", async (c) => { + return c.json(await oauthSigningKeys.createJwksSet()); + }); + + app.post("/oauth/token", createMiddlewareResponseTime(2000), async (c) => { + const schema = z.object({ + grant_type: z.literal("client_credentials"), + client_id: z.string(), + client_secret: z.string(), + audience: z.string().optional(), + }); + + const body = await schema.parseAsync(await c.req.parseBody(), { + path: ["request", "body"], + }); + + const rateLimitKeys = { + global: () => `oauth-token-global`, + clientIdOk: (clientId: string) => `oauth-token-client-id-${clientId}`, + clientIdFail: (clientId: string) => `oauth-token-client-id-${clientId}`, + ip: (ip: string) => `oauth-token-ip-${ip.replace(/:/g, "-")}`, + } as const; + + rateLimit.increment(rateLimitKeys.global()); + + if (rateLimit.isRateLimited(rateLimitKeys.global(), 120)) { + await auditLogsModel.createServiceClientLog(body.client_id, { + type: "oauth-rate-limited", + clientId: body.client_id, + reason: "global", + }); + + return c.json({ error: "rate_limited" }, 429); + } + + if (rateLimit.isRateLimited(rateLimitKeys.clientIdOk(body.client_id), 20)) { + await auditLogsModel.createServiceClientLog(body.client_id, { + type: "oauth-rate-limited", + clientId: body.client_id, + reason: "client-id", + }); + + return c.json({ error: "rate_limited" }, 429); + } + + if ( + rateLimit.isRateLimited(rateLimitKeys.clientIdFail(body.client_id), 5) + ) { + await auditLogsModel.createServiceClientLog(body.client_id, { + type: "oauth-rate-limited", + clientId: body.client_id, + reason: "client-id", + }); + + return c.json({ error: "rate_limited" }, 429); + } + + const serviceClient = await oauthServiceClientModel.byClientId( + body.client_id, + ); + + if (!serviceClient) { + rateLimit.increment(rateLimitKeys.clientIdFail(body.client_id)); + + await auditLogsModel.createServiceClientLog(body.client_id, { + type: "oauth-invalid-client-id", + clientId: body.client_id, + }); + + return c.json({ error: "invalid_client" }, 401); + } + + if (!serviceClient.enabled) { + rateLimit.increment(rateLimitKeys.clientIdFail(body.client_id)); + + await auditLogsModel.createServiceClientLog(serviceClient.id, { + type: "oauth-disabled-client", + clientId: body.client_id, + }); + + return c.json({ error: "invalid_client" }, 401); + } + + if (serviceClient.expiresAt && serviceClient.expiresAt < new Date()) { + rateLimit.increment(rateLimitKeys.clientIdFail(body.client_id)); + + await auditLogsModel.createServiceClientLog(serviceClient.id, { + type: "oauth-expired-client", + clientId: body.client_id, + }); + + return c.json({ error: "invalid_client" }, 401); + } + + let isSecretValid = false; + + if (serviceClient.metadata.type === "client_secret_basic") { + const clientSecretDecoded = Buffer.from( + body.client_secret, + "base64", + ).toString("ascii"); + + isSecretValid = await bcrypt.compare( + clientSecretDecoded, + serviceClient.metadata.clientSecretHashed, + ); + } else if (serviceClient.metadata.type === "client_secret_basic_insecure") { + isSecretValid = + body.client_secret === serviceClient.metadata.clientSecret; + } else { + rateLimit.increment(rateLimitKeys.clientIdFail(body.client_id)); + + await auditLogsModel.createServiceClientLog(serviceClient.id, { + type: "oauth-unsupported-grant-type", + clientId: body.client_id, + grantType: serviceClient.metadata.type, + }); + + return c.json({ error: "invalid_client" }, 401); + } + + if (!isSecretValid) { + rateLimit.increment(rateLimitKeys.clientIdFail(body.client_id)); + + await auditLogsModel.createServiceClientLog(serviceClient.id, { + type: "oauth-invalid-client-secret", + clientId: body.client_id, + }); + + return c.json({ error: "invalid_client" }, 401); + } + + rateLimit.increment(rateLimitKeys.clientIdOk(body.client_id)); + + await auditLogsModel.createServiceClientLog(serviceClient.id, { + type: "oauth-valid-client", + clientId: body.client_id, + }); + + const tokenResult = await serviceClients.generateToken( + serviceClient, + body.audience, + ); + + return c.json({ + access_token: tokenResult.jwt, + token_type: "Bearer", + expires_in: Math.floor( + (tokenResult.expiration.getTime() - Date.now()) / 1000, + ), + }); + }); + + return app; +} diff --git a/packages/server/src/routes/user.ts b/packages/server/src/routes/user.ts index 31e2e13..64a0cba 100644 --- a/packages/server/src/routes/user.ts +++ b/packages/server/src/routes/user.ts @@ -4,11 +4,8 @@ import { Hono } from "hono"; import assert from "node:assert"; import { z } from "zod"; import { getDrizzle } from "~/db/index.js"; -import { - UserPasswordSchema, - usersTable, - UserUsernameSchema, -} from "~/db/schema/users.js"; +import { usersTable } from "~/db/schema.js"; +import { UserPasswordSchema, UserUsernameSchema } from "~/db/types.js"; import { InternalHonoApp } from "~/index.js"; import { withLock } from "~/lock.js"; import { createMiddlewareAuth } from "~/middleware/auth.js"; @@ -16,7 +13,7 @@ import { canPerformAction, JobberPermissions, JobberPermissionsSchema, -} from "~/permissions.js"; +} from "@jobber/common/permissions.js"; export async function createRouteUser() { const app = new Hono(); @@ -108,7 +105,7 @@ export async function createRouteUser() { if (existingUser) { return c.json( { success: false, message: "Username already exists" }, - 409 + 409, ); } @@ -183,7 +180,7 @@ export async function createRouteUser() { if (!bouncer.canWriteUserUsername(user)) { return c.json( { success: false, message: "Unauthorized to change username" }, - 403 + 403, ); } @@ -194,7 +191,7 @@ export async function createRouteUser() { if (!bouncer.canWriteUserPassword(user)) { return c.json( { success: false, message: "Unauthorized to change password" }, - 403 + 403, ); } @@ -206,7 +203,7 @@ export async function createRouteUser() { if (!bouncer.canWriteUserPermissions(user)) { return c.json( { success: false, message: "Unauthorized to change permissions" }, - 403 + 403, ); } diff --git a/packages/server/src/seeding/api-tokens.ts b/packages/server/src/seeding/api-tokens.ts new file mode 100644 index 0000000..d7bd431 --- /dev/null +++ b/packages/server/src/seeding/api-tokens.ts @@ -0,0 +1,72 @@ +import { JobberPermissions } from "@jobber/common/permissions.js"; +import assert from "node:assert"; +import { z } from "zod"; +import { userModel } from "~/db/user.js"; +import { defineSeed } from "./types.js"; +import { + extractPermissionFromSeedPermissionSchema, + SeedPermissionSchema, +} from "./utils.js"; +import { apiTokensModel } from "~/db/api-tokens.js"; + +export const seedApiTokens = defineSeed({ + name: "api-tokens", + payload: z + .array( + z.object({ + token: z.string().min(8), + permissions: SeedPermissionSchema, + ttl: z.number().optional(), + }), + ) + .optional(), + handler: async (payload) => { + assert(payload); + + for (const item of Object.values(payload)) { + const existing = await apiTokensModel.byToken(item.token); + + let expires: Date; + if (item.ttl) { + expires = new Date(Date.now() + item.ttl * 1000); + } else { + // Set expiration to 100 years in the future if ttl is not provided + expires = new Date(); + expires.setFullYear(expires.getFullYear() + 100); + } + + if (existing) { + apiTokensModel.update(existing.id, { + permissions: extractPermissionFromSeedPermissionSchema( + item.permissions, + ), + expires, + }); + + continue; + } else { + const anonymousUser = await userModel.byUsername("anonymous"); + + if (!anonymousUser) { + console.log( + // This should never happen, as the anonymous user is seeded by default + `Anonymous user not found, skipping seeding for API token with token ${item.token}.`, + ); + + continue; + } + + apiTokensModel.create({ + token: item.token, + permissions: extractPermissionFromSeedPermissionSchema( + item.permissions, + ), + expires, + userId: anonymousUser.id, + description: "Seeded API token", + status: "enabled", + }); + } + } + }, +}); diff --git a/packages/server/src/seeding/index.ts b/packages/server/src/seeding/index.ts new file mode 100644 index 0000000..a4365a9 --- /dev/null +++ b/packages/server/src/seeding/index.ts @@ -0,0 +1,54 @@ +import { z } from "zod"; +import { seedOauthClients } from "./oauth-clients.js"; +import { SeedSchemaMap } from "./types.js"; +import { getConfigOption } from "~/config.js"; +import { seedUsers } from "./users.js"; +import { seedApiTokens } from "./api-tokens.js"; + +const seeds = [seedOauthClients, seedUsers, seedApiTokens] as const; + +type SeedNames = (typeof seeds)[number]["name"]; + +type SchemaMap = SeedSchemaMap; + +export function getSeedNames(): SeedNames[] { + return seeds.map((seed) => seed.name); +} + +export function getSeedSchema() { + const shape = {} as { [K in keyof SchemaMap]: SchemaMap[K] }; + + for (const seed of seeds) { + (shape as any)[seed.name] = seed.payload; + } + + return z.object(shape); +} + +export async function seedsRun() { + const option = getConfigOption("SEED"); + + let runCount = 0; + + for (const [key, value] of Object.entries(option)) { + const seed = seeds.find((s) => s.name === key); + + if (!seed) { + console.warn(`No seed found for key: ${key}`); + continue; + } + + try { + runCount++; + + await seed.handler(value); + console.log(`Seed "${key}" executed successfully.`); + } catch (error) { + console.error(`Error executing seed "${key}":`, error); + } + } + + if (!runCount) { + console.log("No seeds to run."); + } +} diff --git a/packages/server/src/seeding/oauth-clients.ts b/packages/server/src/seeding/oauth-clients.ts new file mode 100644 index 0000000..fc81547 --- /dev/null +++ b/packages/server/src/seeding/oauth-clients.ts @@ -0,0 +1,52 @@ +import { + getOAuthAudienceGatewayApi, + getOAuthAudienceGeneralApi, + getOAuthAudienceRunnerApi, +} from "@jobber/common/oauth.js"; +import assert from "node:assert"; +import { z } from "zod"; +import { oauthServiceClientModel } from "~/db/oauth-service-client.js"; +import { defineSeed } from "./types.js"; +import { + extractPermissionFromSeedPermissionSchema, + SeedPermissionSchema, +} from "./utils.js"; + +export const seedOauthClients = defineSeed({ + name: "oauth-clients", + payload: z + .object({ + clientId: z.string().min(8), + clientSecret: z.string().min(16), + allowedAudiences: z + .string() + .array() + .default([ + getOAuthAudienceGatewayApi(), + getOAuthAudienceGeneralApi(), + getOAuthAudienceRunnerApi("*"), + ]), + permissions: SeedPermissionSchema.default({ type: "all" }), + }) + .optional(), + handler: async (payload) => { + assert(payload); + + await oauthServiceClientModel.upsert({ + clientId: payload.clientId, + metadata: { + type: "client_secret_basic_insecure", + clientSecret: payload.clientSecret, + }, + allowedAudiences: payload.allowedAudiences ?? [], + allowedScopes: [], + name: "Seeded Client", + description: "A client created from the seed script.", + enabled: true, + isSystemManaged: false, + permissions: extractPermissionFromSeedPermissionSchema( + payload.permissions, + ), + }); + }, +}); diff --git a/packages/server/src/seeding/types.ts b/packages/server/src/seeding/types.ts new file mode 100644 index 0000000..4e81b34 --- /dev/null +++ b/packages/server/src/seeding/types.ts @@ -0,0 +1,25 @@ +import { z, ZodOptional, ZodType } from "zod"; + +export type Seed< + TName extends string = string, + TPayload extends ZodType = ZodType, +> = { + name: TName; + payload: TPayload; + handler: (payload: unknown) => Promise; +}; + +export function defineSeed< + TName extends string, + TPayload extends ZodType, +>(seed: { + name: TName; + payload: TPayload; + handler: (payload: z.infer) => Promise; +}): Seed { + return seed; +} + +export type SeedSchemaMap = { + [K in T[number] as K["name"]]: K["payload"]; +}; diff --git a/packages/server/src/seeding/users.ts b/packages/server/src/seeding/users.ts new file mode 100644 index 0000000..b381e03 --- /dev/null +++ b/packages/server/src/seeding/users.ts @@ -0,0 +1,42 @@ +import { JobberPermissions } from "@jobber/common/permissions.js"; +import assert from "node:assert"; +import { z } from "zod"; +import { userModel } from "~/db/user.js"; +import { defineSeed } from "./types.js"; +import { + extractPermissionFromSeedPermissionSchema, + SeedPermissionSchema, +} from "./utils.js"; + +export const seedUsers = defineSeed({ + name: "users", + payload: z + .object({ + userPermissions: z.record(z.string(), SeedPermissionSchema), + }) + .optional(), + handler: async (payload) => { + assert(payload); + + for (const [username, seedPermission] of Object.entries( + payload.userPermissions, + )) { + const user = await userModel.byUsername(username); + + if (!user) { + console.log( + `User with username ${username} not found, skipping seeding for this user.`, + ); + + continue; + } + + const permissions: JobberPermissions = + extractPermissionFromSeedPermissionSchema(seedPermission); + + await userModel.update(user.id, { + permissions, + }); + } + }, +}); diff --git a/packages/server/src/seeding/utils.ts b/packages/server/src/seeding/utils.ts new file mode 100644 index 0000000..41e1633 --- /dev/null +++ b/packages/server/src/seeding/utils.ts @@ -0,0 +1,30 @@ +import { + JobberPermissions, + JobberPermissionsSchema, + PERMISSION_SUPER, +} from "@jobber/common/permissions.js"; +import { z } from "zod"; + +export const SeedPermissionSchema = z.union([ + z.object({ + type: z.literal("custom"), + permissions: JobberPermissionsSchema, + }), + z.object({ + type: z.literal("all"), + }), +]); + +export const extractPermissionFromSeedPermissionSchema = ( + input: z.infer, +): JobberPermissions => { + const permissions: JobberPermissions = []; + + if (input.type === "all") { + permissions.push(...PERMISSION_SUPER); + } else if (input.type === "custom") { + permissions.push(...input.permissions); + } + + return permissions; +}; diff --git a/packages/server/src/service-clients.ts b/packages/server/src/service-clients.ts new file mode 100644 index 0000000..a05834b --- /dev/null +++ b/packages/server/src/service-clients.ts @@ -0,0 +1,313 @@ +import { LoopBase } from "@jobber/common"; +import { singleton } from "tsyringe"; +import { secureRandomBytes } from "./util.js"; +import { genSalt as bcryptGenSalt, hash as bcryptHash } from "bcryptjs"; +import { oauthServiceClientModel } from "./db/oauth-service-client.js"; +import { + canOAuthAccessAudience, + getOAuthAudienceGeneralApi, + getOAuthAudienceRunnerApi, +} from "@jobber/common/oauth.js"; +import { oauthSigningKeyModel } from "./db/oauth-signing-key.js"; +import { createPrivateKey } from "node:crypto"; +import { getConfigOption } from "./config.js"; +import { SignJWT } from "jose"; +import assert from "node:assert"; +import { + JobsTableType, + OauthServiceClientTableInsertType, + OauthServiceClientTableType, +} from "./db/types.js"; +import { getDrizzle } from "./db/index.js"; +import { oauthServiceClientTable, runnersTable } from "./db/schema.js"; +import { and, eq, exists } from "drizzle-orm"; + +const CLIENT_ID_SYSTEM_CODE = `system-client-core`; +const SYSTEM_RESERVED_CLIENT_IDS = [CLIENT_ID_SYSTEM_CODE]; + +@singleton() +export class OAuthServiceClients extends LoopBase { + protected loopDuration = 60 * 1000; // 1 minute + + protected loopStarting = undefined; + protected loopStarted = undefined; + protected loopClosing = undefined; + protected loopClosed = undefined; + + private cachedSystemClientForServer: { + client: OauthServiceClientTableType; + secret: string; + } | null = null; + + protected async loopIteration() { + // await this.validateSigningKeys(); + await this.disableClosedRunnerServiceClients(); + } + + private async disableClosedRunnerServiceClients() { + // Disable service clients associated with closed runners. + const updatedServiceClients = await getDrizzle() + .update(oauthServiceClientTable) + .set({ + enabled: false, + }) + .where( + and( + eq(oauthServiceClientTable.enabled, true), + eq(oauthServiceClientTable.isSystemManaged, true), + exists( + getDrizzle() + .select() + .from(runnersTable) + .where( + and( + eq( + runnersTable.oauthServiceClientId, + oauthServiceClientTable.id, + ), + eq(runnersTable.status, "closed"), + ), + ), + ), + ), + ) + .returning(); + + for (const item of updatedServiceClients) { + console.debug( + `[OAuthServiceClients/disableClosedRunnerServiceClients] Disabled ServiceClient for closed runner ${item.name}`, + ); + } + } + + public async upsertServiceClient( + data: Pick< + OauthServiceClientTableInsertType, + | "name" + | "description" + | "allowedAudiences" + | "allowedScopes" + | "enabled" + | "expiresAt" + | "isSystemManaged" + | "permissions" + > & { + clientId?: string; + }, + ) { + if ( + !data.isSystemManaged && + data.clientId && + SYSTEM_RESERVED_CLIENT_IDS.includes(data.clientId) + ) { + throw new Error( + `Client ID ${data.clientId} is reserved for system use. Please choose a different client ID.`, + ); + } + + const secretKey = secureRandomBytes(56); + const secretKeyAscii = secretKey.toString("ascii"); + const secretKeyEncoded = secretKey.toString("base64"); + const secretKeyHashed = await bcryptHash( + secretKeyAscii, + await bcryptGenSalt(9), + ); + + const clientId = + data.clientId ?? + secureRandomBytes(36) + .toString("base64") + .replace(/\+/g, "-") + .replace(/\//g, "_") + .replace(/=+$/, ""); + + const client = await oauthServiceClientModel.upsert({ + ...data, + + clientId, + metadata: { + type: "client_secret_basic", + clientSecretHashed: secretKeyHashed, + }, + }); + + assert(client); + + return { + client, + secret: secretKeyEncoded, + }; + } + + /** + * Generates a system managed oauth token for runners to authenticate with dependencies + */ + public async getSystemClientForRunner(job: JobsTableType) { + const serviceClientRunner = await this.upsertServiceClient({ + name: `System Client for Runner ${job.jobName} (Runner -> Core)`, + description: `OAuth Service Client managed by the system for job ${job.jobName}`, + isSystemManaged: true, + allowedAudiences: [getOAuthAudienceGeneralApi()], + allowedScopes: [], + permissions: [ + { + // Allow runner to read job info for itself and other runners of the same job + effect: "allow", + resource: `job/${job.id}/runners`, + actions: ["read"], + }, + { + // Allow runner to read its own action info + effect: "allow", + resource: `job/${job.id}/actions`, + actions: ["read"], + }, + { + // Allow runner to read/write to its own store + effect: "allow", + resource: `job/${job.id}/store`, + actions: ["read", "write", "delete"], + }, + { + // Allow runner to read the current version (inclusive of archive file!) + effect: "allow", + resource: `job/${job.id}/versions/${job.jobVersionId}`, + actions: ["read"], + }, + { + // Allow runner to publish to MQTT topics for its own job + effect: "allow", + resource: `special/job/${job.id}/publish-mqtt`, + actions: ["write"], + }, + ], + }); + + return serviceClientRunner; + } + + /** + * Generates a system managed oauth token which allows the main server to communicate with its dependencies (runners, gateway, etc.) + */ + public async getSystemClientForServer() { + if (!this.cachedSystemClientForServer) { + // TODO: Not sure I like this at all, breaks a lot of compatiblity if we want to move to the server + // running in parallel + this.cachedSystemClientForServer = await this.upsertServiceClient({ + clientId: CLIENT_ID_SYSTEM_CODE, + + name: `System Client for Core (Core -> Runners, Gateway, etc.)`, + description: `OAuth Service Client managed by the system for the core server to communicate with its dependencies`, + isSystemManaged: true, + allowedAudiences: [getOAuthAudienceRunnerApi("*")], + allowedScopes: [], + permissions: [ + { + // Allow core to invoke HTTP events + effect: "allow", + resource: `special/job/*/invoke-http-event`, + actions: ["write"], + }, + { + // Allow core to invoke MQTT events + effect: "allow", + resource: `special/job/*/invoke-mqtt-event`, + actions: ["write"], + }, + { + // Allow core to invoke CRON events + effect: "allow", + resource: `special/job/*/invoke-schedule-event`, + actions: ["write"], + }, + { + // Allow core to check runner status + effect: "allow", + resource: `special/job/*/runner-status`, + actions: ["read"], + }, + ], + }); + } + + return this.cachedSystemClientForServer; + } + + public async generateTokenForServer(audience: string) { + const serviceClientCore = await this.getSystemClientForServer(); + + assert(serviceClientCore.client); + + const tokenResult = await this.generateToken( + serviceClientCore.client, + audience, + ); + + return tokenResult; + } + + /** + * Generates a token + */ + public async generateToken( + serviceClient: OauthServiceClientTableType, + audience?: string, + ) { + // Set expiration to 10 minutes from now, or if the client is expiring within 10 minutes, set it to that expiration. + let expiration = new Date(Date.now() + 10 * 60 * 1000); + if (serviceClient.expiresAt && serviceClient.expiresAt < expiration) { + expiration = serviceClient.expiresAt; + } + + let jti = `${serviceClient.id}-${Date.now()}`; + + const validKey = await oauthSigningKeyModel.getValidKey(); + + if (!validKey) { + console.error( + `[OAuthTokenRoute] No valid signing key found when trying to issue token for client ${serviceClient.id}`, + ); + + throw new Error("No valid signing key found"); + } + + const key = createPrivateKey({ + key: validKey.privateKeyEncrypted, + format: "pem", + passphrase: getConfigOption("SECRET_PASSPHRASE"), + }); + + const audiences: string[] = []; + + if (audience) { + if (canOAuthAccessAudience(audience, serviceClient.allowedAudiences)) { + audiences.push(audience); + } else { + throw new Error("Invalid audience"); + } + } else { + audiences.push(...serviceClient.allowedAudiences); + } + + const jwt = await new SignJWT({ + sub: serviceClient.id, + kid: validKey.id, + permissions: serviceClient.permissions, + typ: "JWT", + }) + .setProtectedHeader({ + alg: validKey.alg, + kid: validKey.id, + }) + .setIssuer(getConfigOption("OAUTH_ISSUER")) + .setAudience(audiences) + .setExpirationTime(expiration) + .setJti(jti) + .sign(key); + + return { + jwt, + expiration, + }; + } +} diff --git a/packages/server/src/signing-keys.ts b/packages/server/src/signing-keys.ts new file mode 100644 index 0000000..c226faa --- /dev/null +++ b/packages/server/src/signing-keys.ts @@ -0,0 +1,160 @@ +import { LoopBase } from "@jobber/common"; +import { singleton } from "tsyringe"; +import { oauthSigningKeyModel } from "./db/oauth-signing-key.js"; +import { generateKeyPair } from "node:crypto"; +import { promisify } from "node:util"; +import { getConfigOption } from "./config.js"; +import { exportJWK, importSPKI } from "jose"; +import { OauthSigningKeyTableInsertType } from "./db/types.js"; + +const generateKeyPairPromised = promisify(generateKeyPair); + +@singleton() +export class OAuthSigningKeys extends LoopBase { + protected loopDuration = 60 * 1000; // 1 minute + + protected loopStarting = undefined; + protected loopStarted = undefined; + protected loopClosing = undefined; + protected loopClosed = undefined; + + protected async loopIteration() { + await this.validateSigningKeys(); + } + + public async validateSigningKeys() { + const validKeys = await oauthSigningKeyModel.getValidKeys(); + + if (validKeys.length === 0) { + return await this.createSigningKey(); + } + + for (const key of validKeys) { + // If the key has expired, mark it as inactive + if ( + key.expiresAt && + new Date() > key.expiresAt && + key.status !== "inactive" + ) { + await oauthSigningKeyModel.update(key.id, { status: "inactive" }); + } + + // If the key needs to be rotated, create a new key and mark the old key as retiring + if ( + key.renewsAt && + new Date() > key.renewsAt && + key.status === "active" + ) { + // create a new key with the current key as the parent + const replacementKey = await this.createSigningKey({ + parentId: key.id, + createdByUserId: key.createdByUserId, + }); + + if (!replacementKey) { + console.warn( + `[OAuthSigningKeys/validateSigningKeys] Failed to create replacement key for key ${key.id}`, + ); + continue; + } + + await oauthSigningKeyModel.update(key.id, { + status: "retiring", + childId: replacementKey.id, + }); + } + } + } + + public async createSigningKey( + data?: Partial< + Pick< + OauthSigningKeyTableInsertType, + | "createdByUserId" + | "parentId" + | "childId" + | "use" + | "alg" + | "expiresAt" + | "renewsAt" + > + >, + ) { + console.log( + `[OAuthSigningKeys/createSigningKey] Creating new signing key with data: ${JSON.stringify( + data, + )}`, + ); + + const { privateKey, publicKey } = await generateKeyPairPromised("rsa", { + modulusLength: 2048, + publicKeyEncoding: { + type: "spki", + format: "pem", + }, + privateKeyEncoding: { + type: "pkcs8", + format: "pem", + cipher: "aes-256-cbc", + passphrase: getConfigOption("SECRET_PASSPHRASE"), + }, + }); + + return await oauthSigningKeyModel.create({ + parentId: data?.parentId, + createdByUserId: data?.createdByUserId, + + status: "active", + + alg: data?.alg ?? "RS256", + use: data?.use ?? "sig", + + privateKeyEncrypted: privateKey, + publicKey: publicKey, + + expiresAt: + data?.expiresAt ?? + new Date( + Date.now() + + getConfigOption("OAUTH_SIGNING_KEY_EXPIRE_IN_DAYS") * + 60 * + 60 * + 24 * + 1000, + ), + + renewsAt: + data?.renewsAt ?? + new Date( + Date.now() + + getConfigOption("OAUTH_SIGNING_KEY_ROTATE_IN_DAYS") * + 60 * + 60 * + 24 * + 1000, + ), + }); + } + + public async createJwksSet() { + const signedKeys = await oauthSigningKeyModel.getValidKeys(); + + const keys = await Promise.all( + signedKeys.map(async (key) => { + const publicKeyObject = await importSPKI(key.publicKey, key.alg); + + const jwk = await exportJWK(publicKeyObject); + + jwk.kid = key.id; + jwk.use = key.use; + jwk.alg = key.alg; + + return jwk; + }), + ); + + return { + keys: keys, + }; + } +} diff --git a/packages/server/src/util.ts b/packages/server/src/util.ts index 7ab2ebf..a99bd57 100644 --- a/packages/server/src/util.ts +++ b/packages/server/src/util.ts @@ -2,51 +2,15 @@ import { spawn } from "child_process"; import { hash, randomBytes } from "crypto"; import { createReadStream } from "fs"; import { stat } from "fs/promises"; +import { Context } from "hono"; import { tmpdir } from "os"; import path from "path"; import { Readable, Writable } from "stream"; import { ReadableStream } from "stream/web"; +import { getConfigOption } from "./config.js"; export const getUnixTimestamp = () => Math.round(Date.now() / 1000); -export const timeout = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); - -/** - * Awaits until the callback yields true - */ -export const awaitTruthy = async ( - callback: () => Promise, - timeoutMs: number = 30_000 -) => { - let startTime = Date.now(); - - let index = 0; - while (true) { - if (Date.now() - startTime > timeoutMs) { - return false; - } - - if (await callback()) { - return true; - } - - index++; - - if (index <= 10) { - await timeout(10); - } - - if (index > 10 && index <= 20) { - await timeout(20); - } - - if (index > 20) { - await timeout(100); - } - } -}; - export const sanitiseFilename = (filename: string) => { return filename.replaceAll(/[^0-9a-z-_ .]/gi, "").substring(0, 255); }; @@ -61,13 +25,13 @@ export const sanitiseSafeCharacters = (name: string) => { export const unzip = ( source: string, destination: string, - timeout: number = 60 + timeout: number = 60, ) => { return new Promise((resolve, reject) => { console.log( `[unzip] Extracting, source ${presentablePath( - source - )} destination ${presentablePath(destination)}` + source, + )} destination ${presentablePath(destination)}`, ); const logs: string[] = []; @@ -92,7 +56,7 @@ export const unzip = ( ], { stdio: "pipe", - } + }, ); proc.stderr.on("data", (data) => logs.push(data.toString())); @@ -146,7 +110,7 @@ export const getTmpFile = ({ extension = "", length = 16 }) => { export const handleReadableStreamPipe = ( source: ReadableStream, - destination: Writable + destination: Writable, ) => { return new Promise((resolve, reject) => { let resolved = false; @@ -215,7 +179,7 @@ export const fileExists = async (filename: string) => { export const createToken = (options: { prefix?: string; length?: number }) => { if (options.prefix) { return `${options.prefix}-${secureRandomBytes( - options.length ?? 16 + options.length ?? 16, ).toString("hex")}`; } @@ -225,7 +189,7 @@ export const createToken = (options: { prefix?: string; length?: number }) => { export const shortenString = (input: string, maxLength = 20) => { if (input.length > maxLength) { return `${input.substring(0, maxLength - 5)}...${input.substring( - input.length - 5 + input.length - 5, )}`; } @@ -251,7 +215,7 @@ export const presentablePath = (path: string) => { export const readFileLines = ( filename: string, - callbackLine: (line: string) => void + callbackLine: (line: string) => void, ) => { return new Promise((resolve, reject) => { const stream = createReadStream(filename); @@ -297,3 +261,22 @@ export const secureRandomBytes = (length: number) => { crypto.getRandomValues(result); return Buffer.from(result); }; + +export const getAbsoluteUrl = (c: Context, path: string) => { + const proto = c.req.header("x-forwarded-proto") || "http"; + const host = c.req.header("x-forwarded-host") || c.req.header("host"); + + if (!host) { + throw new Error("Unable to determine host for absolute url"); + } + + if (getConfigOption("ALLOWED_HOSTS").length >= 1) { + const allowedHosts = getConfigOption("ALLOWED_HOSTS"); + + if (!allowedHosts.includes(host.toLowerCase())) { + throw new Error(`Host ${host} is not in allowed hosts list`); + } + } + + return `${proto}://${host}${path}`; +}; diff --git a/packages/server/tests/permissions.test.ts b/packages/server/tests/permissions.test.ts index 6d4454b..b61b557 100644 --- a/packages/server/tests/permissions.test.ts +++ b/packages/server/tests/permissions.test.ts @@ -1,4 +1,4 @@ -import { resourceMatches } from "../src/permissions"; +import { resourceMatches } from "../src/permissions.js"; import { describe, expect, it } from "vitest"; diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json index 0b57a00..721d76a 100644 --- a/packages/server/tsconfig.json +++ b/packages/server/tsconfig.json @@ -12,12 +12,13 @@ "declaration": false, "types": ["node"], "outDir": "./dist", + "rootDir": "./src", "paths":{ "~/*": ["./src/*"] } }, "include": [ - "./src" + "src/**/*" ], "$schema": "https://json.schemastore.org/tsconfig", "display": "Recommended" diff --git a/packages/tcp-frame-socket/tsconfig.json b/packages/tcp-frame-socket/tsconfig.json index 5cfa7ab..6ffe955 100644 --- a/packages/tcp-frame-socket/tsconfig.json +++ b/packages/tcp-frame-socket/tsconfig.json @@ -1,5 +1,6 @@ { "compilerOptions": { + "experimentalDecorators": true, "inlineSourceMap": true, "target": "ES2022", "module": "NodeNext", @@ -8,16 +9,14 @@ "esModuleInterop": true, "skipLibCheck": true, "forceConsistentCasingInFileNames": true, - "declaration": true, + "declaration": false, "types": ["node"], + "rootDir": "./src", "outDir": "./dist", "paths":{ "~/*": ["./src/*"] } }, - "include": [ - "./src" - ], "$schema": "https://json.schemastore.org/tsconfig", "display": "Recommended" } \ No newline at end of file diff --git a/packages/web/package.json b/packages/web/package.json index 758bfda..ca3d84f 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -29,6 +29,7 @@ "vite": "^6.0.3" }, "dependencies": { + "@jobber/common": "workspace:*", "chart.js": "^4.4.9", "react-chartjs-2": "^5.3.0" } diff --git a/packages/web/src/api/api-tokens.ts b/packages/web/src/api/api-tokens.ts index f1e84de..948096f 100644 --- a/packages/web/src/api/api-tokens.ts +++ b/packages/web/src/api/api-tokens.ts @@ -1,4 +1,5 @@ -import { JobberGenericResponse, JobberPermissions } from "./common"; +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { JobberGenericResponse } from "./common"; export type JobberApiToken = { id: string; @@ -23,7 +24,7 @@ export const getApiTokens = async (): Promise< }; export const getApiToken = async ( - tokenId: string + tokenId: string, ): Promise> => { const result = await fetch(`/api/api-tokens/${tokenId}`); @@ -33,7 +34,7 @@ export const getApiToken = async ( export const createApiToken = async ( permissions: JobberPermissions, description: string, - ttl: number + ttl: number, ): Promise> => { const result = await fetch(`/api/api-tokens/`, { method: "POST", @@ -52,7 +53,7 @@ export const updateApiToken = async ( permissions?: JobberPermissions; status?: "enabled" | "disabled"; description?: string; - } + }, ): Promise> => { const result = await fetch(`/api/api-tokens/${tokenId}`, { method: "PUT", @@ -66,7 +67,7 @@ export const updateApiToken = async ( }; export const deleteApiToken = async ( - tokenId: string + tokenId: string, ): Promise> => { const result = await fetch(`/api/api-tokens/${tokenId}`, { method: "DELETE", diff --git a/packages/web/src/api/audit-log.ts b/packages/web/src/api/audit-log.ts new file mode 100644 index 0000000..edcfc4a --- /dev/null +++ b/packages/web/src/api/audit-log.ts @@ -0,0 +1,46 @@ +import { JobberGenericResponse } from "./common"; + +export type JobberAuditLogSubject = + | { + type: "user"; + userId: string; + } + | { + type: "service-client"; + serviceClientId: string; + } + | { + type: "system"; + }; + +export type JobberAuditLogEntry = { + id: string; + + subject: JobberAuditLogSubject; + entry: + | { + type: "generic"; + message: string; + } + | { + // there is more + type: `oauth-${string}`; + clientId: string; + }; + + created: string; +}; + +export type JobberAuditLogData = { + data: []; + nextCursor: string | null; + prevCursor: string | null; +}; + +export const getAuditLogs = async ( + cursor?: string, +): Promise> => { + const result = await fetch(`/api/audit-log/?cursor=${cursor}`); + + return await result.json(); +}; diff --git a/packages/web/src/api/auth.ts b/packages/web/src/api/auth.ts index c5533f5..0b4e305 100644 --- a/packages/web/src/api/auth.ts +++ b/packages/web/src/api/auth.ts @@ -1,4 +1,5 @@ -import { JobberGenericResponse, JobberPermissions } from "./common"; +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { JobberGenericResponse } from "./common"; export type JobberAuth = { permissions: JobberPermissions; @@ -16,7 +17,7 @@ export type JobberAuth = { export const createAuthLogin = async ( username: string, - password: string + password: string, ): Promise => { const result = await fetch("/api/auth/login", { method: "POST", @@ -34,7 +35,7 @@ export const createAuthLogin = async ( export const createAuthRegister = async ( username: string, - password: string + password: string, ): Promise => { const result = await fetch("/api/auth/register", { method: "POST", diff --git a/packages/web/src/api/common.ts b/packages/web/src/api/common.ts index 6e29ba8..a2a0440 100644 --- a/packages/web/src/api/common.ts +++ b/packages/web/src/api/common.ts @@ -8,9 +8,3 @@ export type JobberGenericResponse = success: false; message: string; }; - -export type JobberPermissions = Array<{ - effect: "allow" | "deny"; - resource: string; - actions: Array<"read" | "write" | "delete">; -}>; diff --git a/packages/web/src/api/oauth-admin.ts b/packages/web/src/api/oauth-admin.ts new file mode 100644 index 0000000..e1d9dc3 --- /dev/null +++ b/packages/web/src/api/oauth-admin.ts @@ -0,0 +1,137 @@ +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { JobberGenericResponse } from "./common"; + +export type JobberOAuthSigningKey = { + id: string; + parentId: string | null; + childId: string | null; + + createdByUserId: string; + + status: "active" | "inactive" | "revoked"; + + alg: string; + use: string; + + publicKey: string; + + expiresAt: string | null; + renewsAt: string | null; + createdAt: string; +}; + +export type JobberOAuthServiceClient = { + id: string; + clientId: string; + + name: string; + description: string; + + isSystemManaged: boolean; + + allowedAudiences: string[]; + allowedScopes: string[]; + + permissions: JobberPermissions; + + enabled: boolean; + + expiresAt: string | null; + createdAt: string; +}; + +export const getOAuthSigningKeys = async (): Promise< + JobberGenericResponse +> => { + const result = await fetch(`/api/oauth/signing-keys/`); + + return result.json(); +}; + +export const getOAuthSigningKey = async ( + keyId: string, +): Promise> => { + const result = await fetch(`/api/oauth/signing-keys/${keyId}`); + + return result.json(); +}; + +export const updateOAuthSigningKey = async ( + keyId: string, + payload: { + status?: "active" | "inactive" | "revoked"; + expiresAt?: string | null; + }, +): Promise> => { + const result = await fetch(`/api/oauth/signing-keys/${keyId}`, { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(payload), + }); + + return result.json(); +}; + +export const createOAuthSigningKey = async (payload: { + alg: "RS256"; + use: "sig" | "enc"; + + expiresAt?: string | null; + renewsAt?: string | null; + + parentId?: string | null; +}): Promise> => { + const result = await fetch(`/api/oauth/signing-keys/`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(payload), + }); + + return result.json(); +}; + +export const getOAuthServiceClients = async ( + hideDisabled = true, +): Promise> => { + const result = await fetch( + `/api/oauth/service-client/?hide-disabled=${hideDisabled ? "true" : "false"}`, + ); + + return result.json(); +}; + +export const getOAuthServiceClient = async ( + clientId: string, +): Promise> => { + const result = await fetch(`/api/oauth/service-client/${clientId}`); + + return result.json(); +}; + +export const createOAuthServiceClient = async (payload: { + name: string; + description?: string; + + allowedAudiences: string[]; + allowedScopes: string[]; + + permissions: JobberPermissions; + + expiresAt?: string | null; +}): Promise< + JobberGenericResponse<{ client: JobberOAuthServiceClient; secret: string }> +> => { + const result = await fetch(`/api/oauth/service-client/`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(payload), + }); + + return result.json(); +}; diff --git a/packages/web/src/api/users.ts b/packages/web/src/api/users.ts index bebe360..ba57713 100644 --- a/packages/web/src/api/users.ts +++ b/packages/web/src/api/users.ts @@ -1,4 +1,5 @@ -import { JobberGenericResponse, JobberPermissions } from "./common"; +import { JobberPermissions } from "@jobber/common/permissions.js"; +import { JobberGenericResponse } from "./common"; export type JobberUser = { id: string; @@ -16,7 +17,7 @@ export const getUsers = async (): Promise< }; export const getUser = async ( - userId: string + userId: string, ): Promise> => { const result = await fetch(`/api/users/${userId}`); @@ -26,7 +27,7 @@ export const getUser = async ( export const createUser = async ( username: string, password: string, - permissions: JobberPermissions + permissions: JobberPermissions, ): Promise> => { const result = await fetch(`/api/users/`, { method: "POST", @@ -45,7 +46,7 @@ export const updateUser = async ( username?: string; password?: string; permissions?: JobberPermissions; - } + }, ): Promise> => { const result = await fetch(`/api/users/${userId}`, { method: "PUT", diff --git a/packages/web/src/components/permission-guard.tsx b/packages/web/src/components/permission-guard.tsx index 0459181..520f3de 100644 --- a/packages/web/src/components/permission-guard.tsx +++ b/packages/web/src/components/permission-guard.tsx @@ -1,5 +1,6 @@ import { useContext } from "react"; import { AuthContext } from "../contexts/auth-context"; +import { JobberPermissionAction } from "@jobber/common/permissions.js"; export const PermissionGuardComponent = ({ children, @@ -8,7 +9,7 @@ export const PermissionGuardComponent = ({ }: { children: React.ReactNode; resource: string; - action: "read" | "write" | "delete"; + action: JobberPermissionAction; }) => { const { auth, canPerformAction } = useContext(AuthContext); diff --git a/packages/web/src/components/permissions-list-component.tsx b/packages/web/src/components/permissions-list-component.tsx index f4b009e..cf88eb8 100644 --- a/packages/web/src/components/permissions-list-component.tsx +++ b/packages/web/src/components/permissions-list-component.tsx @@ -1,11 +1,7 @@ -type Permission = { - effect: string; - resource: string; - actions: string[]; -}; +import { JobberPermissions } from "@jobber/common/permissions.js"; type PermissionsListComponentProps = { - permissions: Permission[]; + permissions: JobberPermissions; }; export const PermissionsListComponent = ({ diff --git a/packages/web/src/contexts/auth-context.tsx b/packages/web/src/contexts/auth-context.tsx index 7f63179..8be4f1e 100644 --- a/packages/web/src/contexts/auth-context.tsx +++ b/packages/web/src/contexts/auth-context.tsx @@ -8,6 +8,10 @@ import { import { createAuthLogin, getAuth, JobberAuth } from "../api/auth"; import { JobberGenericResponse } from "../api/common"; import { getConfig, JobberConfig } from "../api/config"; +import { + canPerformAction as canPerformActionOriginal, + JobberPermissionAction, +} from "@jobber/common/permissions.js"; export type AuthContextType = { initialised: boolean; @@ -15,12 +19,12 @@ export type AuthContextType = { auth: JobberAuth | null; canPerformAction: ( resource: string, - action: "read" | "write" | "delete" + action: "read" | "write" | "delete", ) => boolean; login: (username: string, password: string) => Promise; register: ( username: string, - password: string + password: string, ) => Promise; }; @@ -70,46 +74,18 @@ export const AuthProvider = ({ children }: { children: React.ReactNode }) => { }, []); const canPerformAction = useCallback( - (resource: string, action: "read" | "write" | "delete") => { + (resource: string, action: JobberPermissionAction) => { if (context.auth === null) { return false; } - for (const permission of context.auth.permissions) { - if (permission.effect !== "deny") { - continue; - } - - if (!permission.actions.includes(action)) { - continue; - } - - if (!resourceMatches(resource, permission.resource)) { - continue; - } - - return false; - } - - for (const permission of context.auth.permissions) { - if (permission.effect !== "allow") { - continue; - } - - if (!permission.actions.includes(action)) { - continue; - } - - if (!resourceMatches(resource, permission.resource)) { - continue; - } - - return true; - } - - return false; + return canPerformActionOriginal( + context.auth.permissions, + resource, + action, + ); }, - [context] + [context], ); const login = useCallback(async (username: string, password: string) => { diff --git a/packages/web/src/hooks/use-audit-logs.ts b/packages/web/src/hooks/use-audit-logs.ts new file mode 100644 index 0000000..ba62701 --- /dev/null +++ b/packages/web/src/hooks/use-audit-logs.ts @@ -0,0 +1,32 @@ +import { useEffect, useState } from "react"; +import { getAuditLogs, JobberAuditLogData } from "../api/audit-log"; + +export const useAuditLogs = (cursor: string) => { + const [auditLogs, setAuditLogs] = useState(null); + const [error, setError] = useState(null); + const [reloadFlag, setReloadFlag] = useState(0); + + const handleUpdate = () => { + getAuditLogs(cursor).then((res) => { + if (!res.success) { + setError("Failed to fetch audit logs"); + + console.error("Failed to fetch audit logs", res.message); + + return; + } + + setAuditLogs(res.data); + }); + }; + + const reload = () => { + setReloadFlag((prev) => prev + 1); + }; + + useEffect(() => { + handleUpdate(); + }, [cursor, reloadFlag]); + + return { auditLogs, auditLogError: error, reloadAuditLogs: reload }; +}; diff --git a/packages/web/src/hooks/use-service-client.ts b/packages/web/src/hooks/use-service-client.ts new file mode 100644 index 0000000..746fa21 --- /dev/null +++ b/packages/web/src/hooks/use-service-client.ts @@ -0,0 +1,40 @@ +import { useEffect, useState } from "react"; +import { + getOAuthServiceClient, + JobberOAuthServiceClient, +} from "../api/oauth-admin"; + +export const useServiceClient = (clientId: string) => { + const [serviceClient, setServiceClient] = + useState(null); + const [error, setError] = useState(null); + const [reloadFlag, setReloadFlag] = useState(0); + + const handleUpdate = () => { + getOAuthServiceClient(clientId).then((res) => { + if (!res.success) { + setError("Failed to fetch service client"); + + console.error("Failed to fetch service client", res.message); + + return; + } + + setServiceClient(res.data); + }); + }; + + const reload = () => { + setReloadFlag((prev) => prev + 1); + }; + + useEffect(() => { + handleUpdate(); + }, [reloadFlag, clientId]); + + return { + serviceClient, + serviceClientError: error, + reloadServiceClient: reload, + }; +}; diff --git a/packages/web/src/hooks/use-service-clients.ts b/packages/web/src/hooks/use-service-clients.ts new file mode 100644 index 0000000..08106ac --- /dev/null +++ b/packages/web/src/hooks/use-service-clients.ts @@ -0,0 +1,41 @@ +import { useEffect, useState } from "react"; +import { + getOAuthServiceClients, + JobberOAuthServiceClient, +} from "../api/oauth-admin"; + +export const useServiceClients = () => { + const [serviceClients, setServiceClients] = useState< + JobberOAuthServiceClient[] | null + >(null); + const [error, setError] = useState(null); + const [reloadFlag, setReloadFlag] = useState(0); + + const handleUpdate = () => { + getOAuthServiceClients().then((res) => { + if (!res.success) { + setError("Failed to fetch service clients"); + + console.error("Failed to fetch service clients", res.message); + + return; + } + + setServiceClients(res.data); + }); + }; + + const reload = () => { + setReloadFlag((prev) => prev + 1); + }; + + useEffect(() => { + handleUpdate(); + }, [reloadFlag]); + + return { + serviceClients, + serviceClientsError: error, + reloadServiceClients: reload, + }; +}; diff --git a/packages/web/src/hooks/use-signing-key.ts b/packages/web/src/hooks/use-signing-key.ts new file mode 100644 index 0000000..0ee5495 --- /dev/null +++ b/packages/web/src/hooks/use-signing-key.ts @@ -0,0 +1,34 @@ +import { useEffect, useState } from "react"; +import { getOAuthSigningKey, JobberOAuthSigningKey } from "../api/oauth-admin"; + +export const useSigningKey = (keyId: string) => { + const [signingKey, setSigningKey] = useState( + null, + ); + const [error, setError] = useState(null); + const [reloadFlag, setReloadFlag] = useState(0); + + const handleUpdate = () => { + getOAuthSigningKey(keyId).then((res) => { + if (!res.success) { + setError("Failed to fetch signing key"); + + console.error("Failed to fetch signing key", res.message); + + return; + } + + setSigningKey(res.data); + }); + }; + + const reload = () => { + setReloadFlag((prev) => prev + 1); + }; + + useEffect(() => { + handleUpdate(); + }, [reloadFlag, keyId]); + + return { signingKey, signingKeyError: error, reloadSigningKey: reload }; +}; diff --git a/packages/web/src/hooks/use-signing-keys.ts b/packages/web/src/hooks/use-signing-keys.ts new file mode 100644 index 0000000..6d1071b --- /dev/null +++ b/packages/web/src/hooks/use-signing-keys.ts @@ -0,0 +1,34 @@ +import { useEffect, useState } from "react"; +import { getOAuthSigningKeys, JobberOAuthSigningKey } from "../api/oauth-admin"; + +export const useSigningKeys = () => { + const [signingKeys, setSigningKeys] = useState< + JobberOAuthSigningKey[] | null + >(null); + const [error, setError] = useState(null); + const [reloadFlag, setReloadFlag] = useState(0); + + const handleUpdate = () => { + getOAuthSigningKeys().then((res) => { + if (!res.success) { + setError("Failed to fetch signing keys"); + + console.error("Failed to fetch signing keys", res.message); + + return; + } + + setSigningKeys(res.data); + }); + }; + + const reload = () => { + setReloadFlag((prev) => prev + 1); + }; + + useEffect(() => { + handleUpdate(); + }, [reloadFlag]); + + return { signingKeys, signingKeysError: error, reloadSigningKeys: reload }; +}; diff --git a/packages/web/src/pages/home/api-tokens/[tokenId]/edit.tsx b/packages/web/src/pages/home/api-tokens/[tokenId]/edit.tsx index 552db63..53fba12 100644 --- a/packages/web/src/pages/home/api-tokens/[tokenId]/edit.tsx +++ b/packages/web/src/pages/home/api-tokens/[tokenId]/edit.tsx @@ -1,11 +1,11 @@ import { MouseEvent, useEffect, useState } from "react"; import { Link, useParams } from "react-router-dom"; import { updateApiToken } from "../../../../api/api-tokens"; -import { JobberPermissions } from "../../../../api/common"; import { HomePageComponent } from "../../../../components/home-page-component"; import { PermissionGuardComponent } from "../../../../components/permission-guard"; import { TimeSinceComponent } from "../../../../components/time-since-component"; import { useApiToken } from "../../../../hooks/use-api-token"; +import { JobberPermissions } from "@jobber/common/permissions.js"; const STATUS_OPTIONS = [ { value: "enabled", label: "Enabled" }, @@ -19,10 +19,10 @@ const Component = () => { "enabled" | "disabled" | null >(null); const [payloadPermissions, setPayloadPermissions] = useState( - null + null, ); const [payloadDescription, setPayloadDescription] = useState( - null + null, ); const { apiToken } = useApiToken(tokenId); @@ -138,7 +138,7 @@ const Component = () => {
@@ -148,7 +148,7 @@ const Component = () => {
diff --git a/packages/web/src/pages/home/api-tokens/new.tsx b/packages/web/src/pages/home/api-tokens/new.tsx index 96f27b6..cf1a7f6 100644 --- a/packages/web/src/pages/home/api-tokens/new.tsx +++ b/packages/web/src/pages/home/api-tokens/new.tsx @@ -1,9 +1,9 @@ import { MouseEvent, useState } from "react"; import { Link } from "react-router-dom"; import { createApiToken } from "../../../api/api-tokens"; -import { JobberPermissions } from "../../../api/common"; import { HomePageComponent } from "../../../components/home-page-component"; import { PermissionGuardComponent } from "../../../components/permission-guard"; +import { JobberPermissions } from "@jobber/common/permissions.js"; const TTL_OPTIONS = [ { value: 300, label: "5 minutes" }, @@ -27,7 +27,7 @@ const DEFAULT_PERMISSIONS: JobberPermissions = [ const Component = () => { const [payloadTtl, setPayloadTtl] = useState(TTL_OPTIONS[5].value); const [payloadPermissions, setPayloadPermissions] = useState( - JSON.stringify(DEFAULT_PERMISSIONS, null, 2) + JSON.stringify(DEFAULT_PERMISSIONS, null, 2), ); const [payloadDescription, setPayloadDescription] = useState(""); @@ -69,7 +69,7 @@ const Component = () => { const result = await createApiToken( parsedPermissions, payloadDescription, - payloadTtl + payloadTtl, ); if (!result.success) { diff --git a/packages/web/src/pages/home/audit-log/landing.tsx b/packages/web/src/pages/home/audit-log/landing.tsx new file mode 100644 index 0000000..e70ce49 --- /dev/null +++ b/packages/web/src/pages/home/audit-log/landing.tsx @@ -0,0 +1,203 @@ +import { useState } from "react"; +import { HomePageComponent } from "../../../components/home-page-component"; +import { PermissionGuardComponent } from "../../../components/permission-guard"; +import { TimeSinceComponent } from "../../../components/time-since-component"; +import { useAuditLogs } from "../../../hooks/use-audit-logs"; +import { + JobberAuditLogEntry, + JobberAuditLogSubject, +} from "../../../api/audit-log"; + +const formatSubject = (subject: JobberAuditLogSubject): string => { + switch (subject.type) { + case "user": + return `User ${subject.userId.slice(0, 8)}…`; + case "service-client": + return `Service Client ${subject.serviceClientId.slice(0, 8)}…`; + case "system": + return "System"; + } +}; + +const formatEntry = (entry: JobberAuditLogEntry["entry"]): string => { + if (entry.type === "generic") { + return entry.message; + } + + return `${entry.type} (client: ${entry.clientId})`; +}; + +const Component = () => { + const [cursor, setCursor] = useState(""); + const { auditLogs, auditLogError } = useAuditLogs(cursor); + + return ( + + +
+
+
+

Audit Log

+

+ View system activity and changes +

+
+
+ +
+
+ + + + + + + + + + {auditLogs ? ( + auditLogs.data.length > 0 ? ( + auditLogs.data.map((log: JobberAuditLogEntry) => ( + + + + + + )) + ) : ( + + + + ) + ) : ( + + + + )} + +
+ Subject + + Event + + Time +
+ {formatSubject(log.subject)} + + {formatEntry(log.entry)} + + +
+ No audit log entries found. +
+ {auditLogError ? ( +
+ + + +

+ Error loading audit logs +

+

+ {auditLogError} +

+
+ ) : ( +
+ + + + +

Loading audit logs...

+
+ )} +
+
+ + {auditLogs && ( +
+ + +
+ )} +
+
+
+
+ ); +}; + +export default Component; diff --git a/packages/web/src/pages/home/index.tsx b/packages/web/src/pages/home/index.tsx index d47b875..4054ede 100644 --- a/packages/web/src/pages/home/index.tsx +++ b/packages/web/src/pages/home/index.tsx @@ -16,6 +16,11 @@ import TokensTokenIdEditComponent from "./api-tokens/[tokenId]/edit"; import TokensTokenIdLandingComponent from "./api-tokens/[tokenId]/landing"; import TokensComponent from "./api-tokens/landing"; import TokensNewComponent from "./api-tokens/new"; +import SigningKeysComponent from "./oauth/signing-keys/landing"; +import SigningKeyViewComponent from "./oauth/signing-keys/[signingKeyId]/view"; +import ServiceClientsComponent from "./oauth/service-clients/landing"; +import ServiceClientViewComponent from "./oauth/service-clients/[serviceClientId]/view"; +import ServiceClientNewComponent from "./oauth/service-clients/new"; import JobIdEnvironmentComponent from "./jobs/[jobId]/environment"; import JobIdLandingComponent from "./jobs/[jobId]/landing"; import JobIdLogsComponent from "./jobs/[jobId]/logs"; @@ -23,6 +28,7 @@ import JobIdMetricsComponent from "./jobs/[jobId]/metrics"; import JobIdStoreComponent from "./jobs/[jobId]/store"; import JobIdVersionsComponent from "./jobs/[jobId]/versions"; import LandingComponent from "./landing"; +import AuditLogComponent from "./audit-log/landing"; import UsersUserIdEditComponent from "./users/[userId]/edit"; import UsersUserIdLandingComponent from "./users/[userId]/landing"; import UsersComponent from "./users/landing"; @@ -205,6 +211,90 @@ const Component = () => { API Tokens + + + + + + + Signing Keys + + + + + + + + + Service Clients + + + + + + + + + Audit Log + + {sortedJobs && sortedJobs.length > 0 && ( @@ -273,6 +363,12 @@ export default { Component: LandingComponent, }, + // AUDIT LOG + { + path: "audit-log/", + Component: AuditLogComponent, + }, + // USERS { path: "users/", @@ -305,6 +401,30 @@ export default { Component: TokensTokenIdLandingComponent, }, + // OAUTH SIGNING KEYS + { + path: "oauth/signing-keys/", + Component: SigningKeysComponent, + }, + { + path: "oauth/signing-keys/:signingKeyId/", + Component: SigningKeyViewComponent, + }, + + // OAUTH SERVICE CLIENTS + { + path: "oauth/service-clients/", + Component: ServiceClientsComponent, + }, + { + path: "oauth/service-clients/new", + Component: ServiceClientNewComponent, + }, + { + path: "oauth/service-clients/:serviceClientId/", + Component: ServiceClientViewComponent, + }, + // JOBS { path: "job/:jobId/", diff --git a/packages/web/src/pages/home/jobs/[jobId]/store.tsx b/packages/web/src/pages/home/jobs/[jobId]/store.tsx index dbe068a..c724c15 100644 --- a/packages/web/src/pages/home/jobs/[jobId]/store.tsx +++ b/packages/web/src/pages/home/jobs/[jobId]/store.tsx @@ -167,10 +167,7 @@ export const Component = () => { - {canPerformAction( - `job/${job.id}/store/${item.id}`, - "read" - ) ? ( + {canPerformAction(`job/${job.id}/store`, "read") ? ( + + + + {/* Success Message */} + {result && result.success && ( +
+
+ + + +
+

+ Service Client Created Successfully! +

+

+ Make sure to copy the client secret now as you won't be + able to see it again. +

+ + {/* Client ID */} +
+
+ + Client ID + + +
+ + {result.clientId} + +
+ + {/* Client Secret */} +
+
+ + Client Secret + + +
+ + {result.clientSecret} + +
+ +
+ + View all service clients + + + + +
+
+
+
+ )} + + {/* Form Section */} +
+
+
+ + setPayloadName(e.target.value)} + placeholder="e.g., My Application" + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-transparent bg-white text-gray-900" + /> +
+ +
+ + setPayloadDescription(e.target.value)} + placeholder="e.g., Backend service for processing jobs" + className="w-full px-3 py-2 border border-gray-300 rounded-lg focus:ring-2 focus:ring-blue-500 focus:border-transparent bg-white text-gray-900" + /> +
+ +
+ + +
+ +
+ +