diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..4ba1992
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,7 @@
+{
+ "protoc": {
+ "options": [
+ "--proto_path=${workspaceRoot}/packages/grpc/proto"
+ ]
+ }
+}
\ No newline at end of file
diff --git a/README.md b/README.md
index 7732230..eda3d52 100644
--- a/README.md
+++ b/README.md
@@ -38,7 +38,7 @@ The objective of this project is to provide nothing more than a basic interface
- `DATABASE_URL` Postgres connection URL. Example: `postgresql://user:pass@host/db`
- `JOBBER_NAME` The name of your jobber instance, should be unique per host.
- `MANAGER_PORT` Port that runner-manager server operates on. Default: 5211
-- `MANAGER_HOST` Host that runner-manager server operates on. Default: hostname()
+- `MANAGER_GRPC_HOST` Host that runner-manager server operates on. Default: hostname()
- `STARTUP_USERNAME` The administrator account username. Created at every startup. Has full permissions. If you change this after a previous start, it will create a NEW account, not update the previous account.
- `STARTUP_PASSWORD` The administrator account password.
diff --git a/packages/server/bruno/DELETE Job -> (name) -> Environment -> (name).bru b/bruno/DELETE Job -> (name) -> Environment -> (name).bru
similarity index 100%
rename from packages/server/bruno/DELETE Job -> (name) -> Environment -> (name).bru
rename to bruno/DELETE Job -> (name) -> Environment -> (name).bru
diff --git a/packages/server/bruno/DELETE Job -> (name).bru b/bruno/DELETE Job -> (name).bru
similarity index 100%
rename from packages/server/bruno/DELETE Job -> (name).bru
rename to bruno/DELETE Job -> (name).bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru b/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru
rename to bruno/GET Job -> (name) -> Actions -> (id) -> Runners.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Actions.bru b/bruno/GET Job -> (name) -> Actions.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Actions.bru
rename to bruno/GET Job -> (name) -> Actions.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Actions:latest.bru b/bruno/GET Job -> (name) -> Actions:latest.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Actions:latest.bru
rename to bruno/GET Job -> (name) -> Actions:latest.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Debug.bru b/bruno/GET Job -> (name) -> Debug.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Debug.bru
rename to bruno/GET Job -> (name) -> Debug.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Environment.bru b/bruno/GET Job -> (name) -> Environment.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Environment.bru
rename to bruno/GET Job -> (name) -> Environment.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Logs.bru b/bruno/GET Job -> (name) -> Logs.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Logs.bru
rename to bruno/GET Job -> (name) -> Logs.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Runners.bru b/bruno/GET Job -> (name) -> Runners.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Runners.bru
rename to bruno/GET Job -> (name) -> Runners.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Triggers.bru b/bruno/GET Job -> (name) -> Triggers.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Triggers.bru
rename to bruno/GET Job -> (name) -> Triggers.bru
diff --git a/packages/server/bruno/GET Job -> (name) -> Triggers:latest.bru b/bruno/GET Job -> (name) -> Triggers:latest.bru
similarity index 100%
rename from packages/server/bruno/GET Job -> (name) -> Triggers:latest.bru
rename to bruno/GET Job -> (name) -> Triggers:latest.bru
diff --git a/packages/server/bruno/GET Job > (name).bru b/bruno/GET Job > (name).bru
similarity index 100%
rename from packages/server/bruno/GET Job > (name).bru
rename to bruno/GET Job > (name).bru
diff --git a/packages/server/bruno/GET Jobs.bru b/bruno/GET Jobs.bru
similarity index 100%
rename from packages/server/bruno/GET Jobs.bru
rename to bruno/GET Jobs.bru
diff --git a/packages/server/bruno/POST Job -> (name) -> Environment -> (name).bru b/bruno/POST Job -> (name) -> Environment -> (name).bru
similarity index 100%
rename from packages/server/bruno/POST Job -> (name) -> Environment -> (name).bru
rename to bruno/POST Job -> (name) -> Environment -> (name).bru
diff --git a/packages/server/bruno/POST Jobs -> Publish.bru b/bruno/POST Jobs -> Publish.bru
similarity index 100%
rename from packages/server/bruno/POST Jobs -> Publish.bru
rename to bruno/POST Jobs -> Publish.bru
diff --git a/packages/server/bruno/PUT Job -> (name).bru b/bruno/PUT Job -> (name).bru
similarity index 100%
rename from packages/server/bruno/PUT Job -> (name).bru
rename to bruno/PUT Job -> (name).bru
diff --git a/packages/server/bruno/bruno.json b/bruno/bruno.json
similarity index 100%
rename from packages/server/bruno/bruno.json
rename to bruno/bruno.json
diff --git a/packages/server/bruno/environments/local.bru b/bruno/environments/local.bru
similarity index 100%
rename from packages/server/bruno/environments/local.bru
rename to bruno/environments/local.bru
diff --git a/docker-compose.yaml b/docker-compose.yaml
index b3b04b3..5abf25a 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -20,6 +20,7 @@ services:
- "jobber-db"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
+ - /tmp/jobber-env:/tmp/jobber-env
- jobber-data:/app/config
environment:
DATABASE_URL: postgresql://pg-user:pg-pass@jobber-db/database
diff --git a/docker/gateway.Dockerfile b/docker/gateway.Dockerfile
new file mode 100644
index 0000000..0e325de
--- /dev/null
+++ b/docker/gateway.Dockerfile
@@ -0,0 +1,22 @@
+FROM node:24-slim AS base
+ENV PNPM_HOME="/pnpm"
+ENV PATH="$PNPM_HOME:$PATH"
+WORKDIR /app
+RUN corepack enable && corepack prepare pnpm@10.15.1 --activate
+
+
+FROM base AS build
+COPY . /repo
+WORKDIR /repo
+RUN apt update \
+ && apt install protobuf-compiler --no-install-recommends -y \
+ && pnpm install --frozen-lockfile \
+ && pnpm run -r build \
+ && pnpm --prod --filter=@jobber/gateway --node-linker hoisted deploy /app
+
+
+
+FROM base
+WORKDIR /app
+COPY --from=build /app /app
+ENTRYPOINT ["node", "./dist/index.js"]
\ No newline at end of file
diff --git a/docker/node-20.Dockerfile b/docker/node-20.Dockerfile
index 7b57a84..3b5db6a 100644
--- a/docker/node-20.Dockerfile
+++ b/docker/node-20.Dockerfile
@@ -13,7 +13,9 @@ RUN apt update \
FROM base AS build
COPY . /repo
WORKDIR /repo
-RUN pnpm install --frozen-lockfile \
+RUN apt update \
+ && apt install protobuf-compiler --no-install-recommends -y \
+ && pnpm install --frozen-lockfile \
&& pnpm run -r build \
&& pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app
@@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \
FROM base
WORKDIR /app
-COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js
+COPY --from=build /app/dist/esm/ /app/
diff --git a/docker/node-22.Dockerfile b/docker/node-22.Dockerfile
index 8344a12..c411fe5 100644
--- a/docker/node-22.Dockerfile
+++ b/docker/node-22.Dockerfile
@@ -13,7 +13,9 @@ RUN apt update \
FROM base AS build
COPY . /repo
WORKDIR /repo
-RUN pnpm install --frozen-lockfile \
+RUN apt update \
+ && apt install protobuf-compiler --no-install-recommends -y \
+ && pnpm install --frozen-lockfile \
&& pnpm run -r build \
&& pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app
@@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \
FROM base
WORKDIR /app
-COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js
+COPY --from=build /app/dist/esm/ /app/
diff --git a/docker/node-24.Dockerfile b/docker/node-24.Dockerfile
index 16ac7f0..c65ea09 100644
--- a/docker/node-24.Dockerfile
+++ b/docker/node-24.Dockerfile
@@ -13,7 +13,9 @@ RUN apt update \
FROM base AS build
COPY . /repo
WORKDIR /repo
-RUN pnpm install --frozen-lockfile \
+RUN apt update \
+ && apt install protobuf-compiler --no-install-recommends -y \
+ && pnpm install --frozen-lockfile \
&& pnpm run -r build \
&& pnpm --prod --filter=@jobber/runner-node-entrypoint --node-linker hoisted deploy /app
@@ -21,4 +23,4 @@ RUN pnpm install --frozen-lockfile \
FROM base
WORKDIR /app
-COPY --from=build /app/dist/index.js /app/jobber-entrypoint.js
+COPY --from=build /app/dist/esm/ /app/
diff --git a/docker/server.Dockerfile b/docker/server.Dockerfile
index 8afd6da..8ef83df 100644
--- a/docker/server.Dockerfile
+++ b/docker/server.Dockerfile
@@ -27,7 +27,9 @@ FROM base AS build
COPY . /repo
WORKDIR /repo
-RUN pnpm install --frozen-lockfile \
+RUN apt update \
+ && apt install protobuf-compiler --no-install-recommends -y \
+ && pnpm install --frozen-lockfile \
&& pnpm run -r build \
&& pnpm --prod --filter=@jobber/server --node-linker hoisted deploy /app \
&& mkdir /app/public/ \
diff --git a/docs/environment-variables.md b/docs/environment-variables.md
index b8372b1..43b3d5d 100644
--- a/docs/environment-variables.md
+++ b/docs/environment-variables.md
@@ -28,7 +28,7 @@ This document describes all environment variables used to configure Jobber.
- `MANAGER_PORT` - Port for the runner-manager server
**Default:** `5211`
-- `MANAGER_HOST` - Host address for the runner-manager server
+- `MANAGER_GRPC_HOST` - Host address for the runner-manager server
**Default:** `hostname()`
## Authentication
@@ -58,7 +58,7 @@ This document describes all environment variables used to configure Jobber.
**Default:** eithan1231/runner-node-20:latest
- `RUNNER_CONTAINER_DOCKER_NETWORK` - Docker network for runner containers
- **Note:** Must have access to `MANAGER_HOST`
+ **Note:** Must have access to `MANAGER_GRPC_HOST`
- `RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES` - Permitted Docker argument types for projects
**Values:** `volumes`, `networks`, `labels`, `memoryLimit`, `directPassthroughArguments`
diff --git a/docs/permissions.md b/docs/permissions.md
index ad24df2..20a1e16 100644
--- a/docs/permissions.md
+++ b/docs/permissions.md
@@ -28,7 +28,7 @@ Example resource pattern: `job/*/actions`
#### Job -> Store
-- `job/:jobId/store/:storeId` READ/DELETE
+- `job/:jobId/store` READ/WRITE/DELETE
#### Job -> Triggers
diff --git a/e2e/config/nanomq.conf b/e2e/config/nanomq.conf
new file mode 100644
index 0000000..48291c0
--- /dev/null
+++ b/e2e/config/nanomq.conf
@@ -0,0 +1,3 @@
+listeners.tcp {
+ bind = "0.0.0.0:1883"
+}
diff --git a/e2e/docker-compose.yaml b/e2e/docker-compose.yaml
new file mode 100644
index 0000000..6140039
--- /dev/null
+++ b/e2e/docker-compose.yaml
@@ -0,0 +1,106 @@
+networks:
+ internal:
+ driver: bridge
+ runner:
+ driver: bridge
+ mqtt-network:
+ driver: bridge
+
+name: "jobber-test"
+
+services:
+ postgres:
+ image: postgres:14
+ restart: unless-stopped
+ networks:
+ - internal
+ environment:
+ - POSTGRES_USER=jobber-username
+ - POSTGRES_PASSWORD=jobber-password
+ - POSTGRES_DB=jobber-database
+
+ mqtt:
+ image: emqx/nanomq:0.24.6-slim
+ restart: unless-stopped
+ networks:
+ - mqtt-network
+ volumes:
+ - ./config/nanomq.conf:/etc/nanomq.conf:ro
+
+ server:
+ depends_on:
+ - postgres
+ - mqtt
+ labels:
+ - "jobber-discovery=server"
+ image: jobber-e2e-server
+ build:
+ context: ../
+ dockerfile: docker/server.Dockerfile
+ restart: unless-stopped
+ ports:
+ - 5000:5000
+ networks:
+ - internal
+ - runner
+ - mqtt-network
+ volumes:
+ - /tmp/jobber-env:/tmp/jobber-env
+ - /var/run/docker.sock:/var/run/docker.sock
+ environment:
+ SECRET_PASSPHRASE: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+
+ JOBBER_NAME: "JobberE2E"
+
+ ALLOWED_HOSTS: "localhost,localhost:5000,127.0.0.1:5000,server:5000"
+
+ DATABASE_URL: "postgresql://jobber-username:jobber-password@postgres:5432/jobber-database"
+
+ ALLOW_PUBLIC_REGISTRATION: "true"
+ AUTH_PUBLIC_LOGIN_ENABLED: "true"
+
+ OAUTH_ISSUER: "http://localhost:5000"
+
+ DEBUG_HTTP: "true"
+ DEBUG_RUNNER: "true"
+
+ API_PORT: "5000"
+
+ MANAGER_GRPC_PORT: "5001"
+ MANAGER_GRPC_HOST: "server"
+
+ # "{compose_name}_{network_name}" is the convention for compose networks
+ RUNNER_CONTAINER_DOCKER_NETWORK: "jobber-test_runner"
+ RUNNER_ALLOW_DOCKER_ARGUMENT_TYPES: "volumes,networks,labels,memoryLimit,directPassthroughArguments"
+ RUNNER_ALLOW_ARGUMENT_DIRECT_PASSTHROUGH: "true"
+
+ RUNNER_IMAGE_NODE20_URL: "jobber-e2e-runner:20-latest"
+ RUNNER_IMAGE_NODE22_URL: "jobber-e2e-runner:22-latest"
+ RUNNER_IMAGE_NODE24_URL: "jobber-e2e-runner:24-latest"
+
+ # Seed data required for e2e tests
+ # NOTE: This will be INSECURE! Do not use this format for production!
+ SEED: '{"oauth-clients": {"clientId": "e2e-client", "clientSecret": "secret-secret-secret-secret-secret"}, "api-tokens": [{"token": "super-power-anonymous-token", "permissions": { "type": "all" }}]}'
+
+ gateway:
+ depends_on:
+ - server
+ image: jobber-e2e-gateway
+ build:
+ context: ../
+ dockerfile: docker/gateway.Dockerfile
+ restart: unless-stopped
+ networks:
+ - runner
+ environment:
+ PORT: "5002"
+
+ GRPC_ENDPOINT: "https://server:5001"
+
+ OIDC_ISSUER_URL: "http://localhost:5000"
+ OIDC_DISCOVERY_URL: "http://server:5000/.well-known/openid-configuration"
+
+ OAUTH_CLIENT_ID: "e2e-client"
+ OAUTH_CLIENT_SECRET: "secret-secret-secret-secret-secret"
+ ports:
+ - 5002:5002
diff --git a/e2e/test.sh b/e2e/test.sh
new file mode 100755
index 0000000..ad21b77
--- /dev/null
+++ b/e2e/test.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+
+sudo docker build -f docker/node-20.Dockerfile -t jobber-e2e-runner:20-latest .
+sudo docker build -f docker/node-22.Dockerfile -t jobber-e2e-runner:22-latest .
+sudo docker build -f docker/node-24.Dockerfile -t jobber-e2e-runner:24-latest .
+
+docker compose -f e2e/docker-compose.yaml up -d --build
+
+# sleep a few seconds to allow processes to startup
+sleep 5
+
+# Run tests
+bash e2e/tests/test-runner-basics.sh
+bash e2e/tests/test-common-js.sh
+bash e2e/tests/test-run-once.sh
+
+docker compose -f e2e/docker-compose.yaml down
\ No newline at end of file
diff --git a/e2e/tests/test-common-js.sh b/e2e/tests/test-common-js.sh
new file mode 100644
index 0000000..dec439e
--- /dev/null
+++ b/e2e/tests/test-common-js.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+cd examples/http-javascript-cjs
+./publish.sh super-power-anonymous-token http://localhost:5000
+cd ../../
+
+sleep 3
+
+RESPONSE=$(curl -s "http://localhost:5002/http-javascript-cjs")
+if echo "$RESPONSE" | grep -q "path.join example from commonjs"; then
+ echo "PASS: Successfully received expected response from http-javascript-cjs job"
+else
+ echo "FAIL: Failed to receive expected response from http-javascript-cjs job"
+ echo "Actual response: $RESPONSE"
+ exit 1
+fi
diff --git a/e2e/tests/test-run-once.sh b/e2e/tests/test-run-once.sh
new file mode 100644
index 0000000..e074921
--- /dev/null
+++ b/e2e/tests/test-run-once.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+cd examples/http-javascript-run-once
+./publish.sh super-power-anonymous-token http://localhost:5000
+cd ../../
+
+sleep 3
+
+RESPONSE=$(curl -s "http://localhost:5002/http-javascript-run-once")
+if echo "$RESPONSE" | grep -q "run-once-response"; then
+ echo "PASS: Successfully received expected response from http-javascript-run-once job"
+else
+ echo "FAIL: Failed to receive expected response from http-javascript-run-once job"
+ echo "Actual response: $RESPONSE"
+ exit 1
+fi
diff --git a/e2e/tests/test-runner-basics.sh b/e2e/tests/test-runner-basics.sh
new file mode 100644
index 0000000..599d629
--- /dev/null
+++ b/e2e/tests/test-runner-basics.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+cd examples/e2e
+./publish.sh super-power-anonymous-token http://localhost:5000
+cd ../../
+
+sleep 3
+
+curl -s "http://localhost:5002/e2e?action=set-state&value=my-test-value" > /dev/null
+GET_STORE_RESPONSE=$(curl -s "http://localhost:5002/e2e?action=get-state")
+if echo "$GET_STORE_RESPONSE" | grep -q "my-test-value"; then
+ echo "PASS: Successfully set and got state value"
+else
+ echo "FAIL: Failed to set and get state value"
+ echo "Actual response: $GET_STORE_RESPONSE"
+ exit 1
+fi
+
+
+sleep 1
+
+curl -s "http://localhost:5002/e2e?action=mqtt" > /dev/null
+
+sleep 1
+
+
+RESPONSE=$(curl -s "http://localhost:5002/e2e")
+
+# Check that bootstrap is true
+
+if echo "$RESPONSE" | grep -q '"bootstrap":true'; then
+ echo "PASS: Bootstrap is true"
+else
+ echo "FAIL: Bootstrap is not true"
+ echo "Actual response: $RESPONSE"
+ exit 1
+fi
+
+if echo "$RESPONSE" | grep -q '"lastScheduleRecent":true'; then
+ echo "PASS: Schedule Recent is true"
+else
+ echo "FAIL: Schedule Recent is not true"
+ echo "Actual response: $RESPONSE"
+ exit 1
+fi
+
+
+if echo "$RESPONSE" | grep -q '"lastMqttRecent":true'; then
+ echo "PASS: MQTT Recent is true"
+else
+ echo "FAIL: MQTT Recent is not true"
+ echo "Actual response: $RESPONSE"
+ exit 1
+fi
+
+
+# hang response status code should be 204
+HANG_RESPONSE=$(curl -s "http://localhost:5002/e2e?action=hang")
+if [ -z "$HANG_RESPONSE" ]; then
+ echo "PASS: Hang response is empty as expected"
+else
+ echo "FAIL: Hang response is not empty"
+ echo "Actual response: $HANG_RESPONSE"
+ exit 1
+fi
diff --git a/examples/e2e/package-lock.json b/examples/e2e/package-lock.json
new file mode 100644
index 0000000..9579ea4
--- /dev/null
+++ b/examples/e2e/package-lock.json
@@ -0,0 +1,13 @@
+{
+ "name": "e2e",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "e2e",
+ "version": "1.0.0",
+ "license": "SEE LICENSE IN ../../"
+ }
+ }
+}
diff --git a/examples/e2e/package.json b/examples/e2e/package.json
new file mode 100644
index 0000000..dcb06db
--- /dev/null
+++ b/examples/e2e/package.json
@@ -0,0 +1,43 @@
+{
+ "name": "e2e",
+ "version": "1.0.0",
+ "description": "HTTP Javascript Example",
+ "license": "SEE LICENSE IN ../../",
+ "author": "Eithan",
+ "type": "module",
+ "main": "./src/index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "action": {
+ "runnerAsynchronous": true,
+ "runnerMinCount": 1,
+ "runnerMaxCount": 1,
+ "runnerMaxAge": 0,
+ "runnerMaxIdleAge": 0,
+ "runnerMaxAgeHard": 0,
+ "runnerMode": "standard"
+ },
+ "triggers": [
+ {
+ "type": "http",
+ "name": "http-javascript-trigger",
+ "path": "/e2e",
+ "method": "GET"
+ },
+ {
+ "type": "mqtt",
+ "topics": [
+ "#"
+ ],
+ "connection": {
+ "protocol": "mqtt",
+ "host": "mqtt"
+ }
+ },
+ {
+ "type": "schedule",
+ "cron": "* * * * * *"
+ }
+ ]
+}
diff --git a/examples/e2e/publish.sh b/examples/e2e/publish.sh
new file mode 100755
index 0000000..86ceeb5
--- /dev/null
+++ b/examples/e2e/publish.sh
@@ -0,0 +1,22 @@
+#/bin/bash
+
+# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm)
+npm install > /dev/null
+
+# Archive essential files
+zip -rvq archive.zip ./package.json ./src > /dev/null
+
+# Get base url argument from argument, defaults to localhost:3000
+TOKEN=${1}
+BASE_URL=${2:-http://localhost:3000}
+
+# Upload to Jobber
+curl \
+ --silent \
+ --request POST \
+ --url "$BASE_URL/api/job/publish/" \
+ --header 'content-type: multipart/form-data' \
+ --header "Authorization: Bearer $TOKEN" \
+ --form 'archive=@archive.zip;type=application/zip' > /dev/null
+
+rm archive.zip
\ No newline at end of file
diff --git a/examples/e2e/src/index.js b/examples/e2e/src/index.js
new file mode 100644
index 0000000..b808cb4
--- /dev/null
+++ b/examples/e2e/src/index.js
@@ -0,0 +1,65 @@
+const unixTimestamp = () => Math.floor(Date.now() / 1000);
+
+const myState = {
+ bootstrap: false,
+ lastSchedule: 0,
+ lastMqtt: 0,
+};
+
+export const handlerHttp = async (context) => {
+ const action = context.request.query("action");
+
+ if (action === "hang") {
+ // It gives no response, it just hangs.
+ return;
+ }
+
+ if (action === "mqtt") {
+ await context.publish("ping", "this is pretty cool");
+
+ return context.response.text("published to mqtt!");
+ }
+
+ if (action === "set-state") {
+ const key = "test-key";
+
+ const value = context.request.query("value");
+
+ if (!value) {
+ return context.response.text("Missing 'value' query parameter");
+ }
+
+ await globalThis.jobber.setStore(key, value);
+
+ return await context.response.text("set!");
+ }
+
+ if (action === "get-state") {
+ const key = "test-key";
+
+ return await context.response.text(await globalThis.jobber.getStore(key));
+ }
+
+ context.response.json({
+ bootstrap: myState.bootstrap,
+ lastScheduleRecent: myState.lastSchedule + 60 > unixTimestamp(),
+ lastMqttRecent: myState.lastMqtt + 60 > unixTimestamp(),
+ });
+};
+
+export const handlerMqtt = async (context) => {
+ myState.lastMqtt = unixTimestamp();
+ if (context.topic === "ping") {
+ await context.publish("pong", "Hello from Jobber MQTT JavaScript Example!");
+ }
+};
+
+export const handlerSchedule = async (context) => {
+ myState.lastSchedule = unixTimestamp();
+};
+
+export const bootstrap = async (context) => {
+ console.log("Bootstrap function called with context:", context);
+
+ myState.bootstrap = true;
+};
diff --git a/examples/http-javascript-cjs/package-lock.json b/examples/http-javascript-cjs/package-lock.json
new file mode 100644
index 0000000..eebad91
--- /dev/null
+++ b/examples/http-javascript-cjs/package-lock.json
@@ -0,0 +1,13 @@
+{
+ "name": "http-javascript-cjs",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "http-javascript-cjs",
+ "version": "1.0.0",
+ "license": "SEE LICENSE IN ../../"
+ }
+ }
+}
diff --git a/examples/http-javascript-cjs/package.json b/examples/http-javascript-cjs/package.json
new file mode 100644
index 0000000..6af499e
--- /dev/null
+++ b/examples/http-javascript-cjs/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "http-javascript-cjs",
+ "version": "1.0.0",
+ "description": "HTTP CommonJS Javascript Example",
+ "license": "SEE LICENSE IN ../../",
+ "author": "Eithan",
+ "type": "commonjs",
+ "main": "./src/index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "action": {
+ "runnerAsynchronous": true,
+ "runnerMinCount": 1,
+ "runnerMaxCount": 1,
+ "runnerMaxAge": 0,
+ "runnerMaxIdleAge": 0,
+ "runnerMaxAgeHard": 0,
+ "runnerMode": "standard"
+ },
+ "triggers": [
+ {
+ "type": "http",
+ "name": "http-javascript-trigger",
+ "path": "/http-javascript-cjs",
+ "method": "GET"
+ }
+ ]
+}
diff --git a/examples/http-javascript-cjs/publish.sh b/examples/http-javascript-cjs/publish.sh
new file mode 100755
index 0000000..86ceeb5
--- /dev/null
+++ b/examples/http-javascript-cjs/publish.sh
@@ -0,0 +1,22 @@
+#/bin/bash
+
+# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm)
+npm install > /dev/null
+
+# Archive essential files
+zip -rvq archive.zip ./package.json ./src > /dev/null
+
+# Get base url argument from argument, defaults to localhost:3000
+TOKEN=${1}
+BASE_URL=${2:-http://localhost:3000}
+
+# Upload to Jobber
+curl \
+ --silent \
+ --request POST \
+ --url "$BASE_URL/api/job/publish/" \
+ --header 'content-type: multipart/form-data' \
+ --header "Authorization: Bearer $TOKEN" \
+ --form 'archive=@archive.zip;type=application/zip' > /dev/null
+
+rm archive.zip
\ No newline at end of file
diff --git a/examples/http-javascript-cjs/src/index.js b/examples/http-javascript-cjs/src/index.js
new file mode 100644
index 0000000..0ccd48b
--- /dev/null
+++ b/examples/http-javascript-cjs/src/index.js
@@ -0,0 +1,11 @@
+// ew 20015 called and asked for its javascript back
+
+const path = require("path");
+
+exports.handlerHttp = async (context) => {
+ const host = context.request.header("host");
+
+ return context.response.text(
+ `path.join example from commonjs: ${path.join("foo", "bar")}`,
+ );
+};
diff --git a/examples/http-javascript-run-once/package-lock.json b/examples/http-javascript-run-once/package-lock.json
new file mode 100644
index 0000000..1347e8c
--- /dev/null
+++ b/examples/http-javascript-run-once/package-lock.json
@@ -0,0 +1,13 @@
+{
+ "name": "http-javascript-run-once",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "http-javascript-run-once",
+ "version": "1.0.0",
+ "license": "SEE LICENSE IN ../../"
+ }
+ }
+}
diff --git a/examples/http-javascript-run-once/package.json b/examples/http-javascript-run-once/package.json
new file mode 100644
index 0000000..fc7e679
--- /dev/null
+++ b/examples/http-javascript-run-once/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "http-javascript-run-once",
+ "version": "1.0.0",
+ "description": "HTTP Javascript Run Once Example",
+ "license": "SEE LICENSE IN ../../",
+ "author": "Eithan",
+ "type": "module",
+ "main": "./src/index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "action": {
+ "runnerAsynchronous": false,
+ "runnerMinCount": 0,
+ "runnerMaxCount": 1,
+ "runnerMaxAge": 0,
+ "runnerMaxIdleAge": 0,
+ "runnerMaxAgeHard": 0,
+ "runnerMode": "run-once"
+ },
+ "triggers": [
+ {
+ "type": "http",
+ "name": "http-javascript-trigger",
+ "path": "/http-javascript-run-once",
+ "method": "GET"
+ }
+ ]
+}
diff --git a/examples/http-javascript-run-once/publish.sh b/examples/http-javascript-run-once/publish.sh
new file mode 100755
index 0000000..86ceeb5
--- /dev/null
+++ b/examples/http-javascript-run-once/publish.sh
@@ -0,0 +1,22 @@
+#/bin/bash
+
+# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm)
+npm install > /dev/null
+
+# Archive essential files
+zip -rvq archive.zip ./package.json ./src > /dev/null
+
+# Get base url argument from argument, defaults to localhost:3000
+TOKEN=${1}
+BASE_URL=${2:-http://localhost:3000}
+
+# Upload to Jobber
+curl \
+ --silent \
+ --request POST \
+ --url "$BASE_URL/api/job/publish/" \
+ --header 'content-type: multipart/form-data' \
+ --header "Authorization: Bearer $TOKEN" \
+ --form 'archive=@archive.zip;type=application/zip' > /dev/null
+
+rm archive.zip
\ No newline at end of file
diff --git a/examples/http-javascript-run-once/src/index.js b/examples/http-javascript-run-once/src/index.js
new file mode 100644
index 0000000..d00a47a
--- /dev/null
+++ b/examples/http-javascript-run-once/src/index.js
@@ -0,0 +1,3 @@
+export const handlerHttp = async (context) => {
+ return context.response.text(`run-once-response`);
+};
diff --git a/examples/http-javascript/package-lock.json b/examples/http-javascript/package-lock.json
new file mode 100644
index 0000000..9f1f8a4
--- /dev/null
+++ b/examples/http-javascript/package-lock.json
@@ -0,0 +1,13 @@
+{
+ "name": "http-javascript",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "http-javascript",
+ "version": "1.0.0",
+ "license": "SEE LICENSE IN ../../"
+ }
+ }
+}
diff --git a/examples/http-javascript/package.json b/examples/http-javascript/package.json
new file mode 100644
index 0000000..caaebea
--- /dev/null
+++ b/examples/http-javascript/package.json
@@ -0,0 +1,29 @@
+{
+ "name": "http-javascript",
+ "version": "1.0.0",
+ "description": "HTTP Javascript Example",
+ "license": "SEE LICENSE IN ../../",
+ "author": "Eithan",
+ "type": "module",
+ "main": "./src/index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "action": {
+ "runnerAsynchronous": true,
+ "runnerMinCount": 1,
+ "runnerMaxCount": 1,
+ "runnerMaxAge": 30,
+ "runnerMaxIdleAge": 30,
+ "runnerMaxAgeHard": 60,
+ "runnerMode": "standard"
+ },
+ "triggers": [
+ {
+ "type": "http",
+ "name": "http-javascript-trigger",
+ "path": "/http-javascript",
+ "method": "GET"
+ }
+ ]
+}
diff --git a/examples/http-javascript/publish.sh b/examples/http-javascript/publish.sh
new file mode 100755
index 0000000..86ceeb5
--- /dev/null
+++ b/examples/http-javascript/publish.sh
@@ -0,0 +1,22 @@
+#/bin/bash
+
+# Install dependencies using NPM (issues with pnpm and mono repos, had to use npm)
+npm install > /dev/null
+
+# Archive essential files
+zip -rvq archive.zip ./package.json ./src > /dev/null
+
+# Get base url argument from argument, defaults to localhost:3000
+TOKEN=${1}
+BASE_URL=${2:-http://localhost:3000}
+
+# Upload to Jobber
+curl \
+ --silent \
+ --request POST \
+ --url "$BASE_URL/api/job/publish/" \
+ --header 'content-type: multipart/form-data' \
+ --header "Authorization: Bearer $TOKEN" \
+ --form 'archive=@archive.zip;type=application/zip' > /dev/null
+
+rm archive.zip
\ No newline at end of file
diff --git a/examples/http-javascript/src/index.js b/examples/http-javascript/src/index.js
new file mode 100644
index 0000000..7c0ad8c
--- /dev/null
+++ b/examples/http-javascript/src/index.js
@@ -0,0 +1,17 @@
+export const handlerHttp = async (context) => {
+ const host = context.request.header("host");
+
+ context.response.html(`
+
+
+ Jobber HTTP JavaScript Example
+
+
+ Jobber HTTP JavaScript Example
+ This is an example of a simple HTTP server built with Jobber and JavaScript.
+ To test this out, send a request to this endpoint using curl or your browser:
+ curl http://${host}/
+
+
+ `);
+};
diff --git a/examples/http-typescript/package.json b/examples/http-typescript-legacy/package.json
similarity index 94%
rename from examples/http-typescript/package.json
rename to examples/http-typescript-legacy/package.json
index 6ca8742..bb27084 100644
--- a/examples/http-typescript/package.json
+++ b/examples/http-typescript-legacy/package.json
@@ -1,6 +1,6 @@
{
- "name": "http-typescript",
- "version": "0.0.9",
+ "name": "http-typescript-legacy",
+ "version": "0.0.10",
"description": "Jobber Example, Typescript HTTP Demo",
"main": "./dist/index.js",
"type": "module",
diff --git a/examples/http-typescript/publish.sh b/examples/http-typescript-legacy/publish.sh
similarity index 80%
rename from examples/http-typescript/publish.sh
rename to examples/http-typescript-legacy/publish.sh
index 722a8a2..bfabdcd 100755
--- a/examples/http-typescript/publish.sh
+++ b/examples/http-typescript-legacy/publish.sh
@@ -18,12 +18,15 @@ npm run build
# Archive essential files
zip -rv archive.zip ./package.json ./dist ./src ./node_modules
+# Get base url argument from argument, defaults to localhost:3000
+BASE_URL=${1:-http://localhost:3000}
+
# Upload to Jobber
curl \
--silent \
--show-error \
--request POST \
- --url 'http://localhost:3000/api/job/publish/' \
+ --url "$BASE_URL/api/job/publish/" \
--header 'content-type: multipart/form-data' \
--form 'archive=@archive.zip;type=application/zip'
diff --git a/examples/http-typescript/src/declaration.d.ts b/examples/http-typescript-legacy/src/declaration.d.ts
similarity index 100%
rename from examples/http-typescript/src/declaration.d.ts
rename to examples/http-typescript-legacy/src/declaration.d.ts
diff --git a/examples/http-typescript/src/index.ts b/examples/http-typescript-legacy/src/index.ts
similarity index 85%
rename from examples/http-typescript/src/index.ts
rename to examples/http-typescript-legacy/src/index.ts
index c2a219c..2ead3a2 100644
--- a/examples/http-typescript/src/index.ts
+++ b/examples/http-typescript-legacy/src/index.ts
@@ -5,21 +5,25 @@ type StoreCounter = number;
export const handler = async (
request: JobberHandlerRequest,
response: JobberHandlerResponse,
- context: JobberHandlerContext
+ context: JobberHandlerContext,
) => {
if (request.type() !== "http") {
throw new Error("Expecting HTTP request");
}
+ if (request.query("test")) {
+ return response.json({ hello: "speedy" }, 200);
+ }
+
// console.log("name:", request.name());
await context.setStoreJson(
"medium-length",
- "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU"
+ "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU",
);
await context.setStoreJson(
"long-length-sdfhkfgasufygasiuyfgweuofygweoyfvewifyvewrifygverygifvegerg",
- "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU"
+ "SDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGUSDGFDHGUDGGU",
);
await context.setStoreJson("1d expiry", "", {
@@ -46,6 +50,6 @@ export const handler = async (
{
count,
},
- 200
+ 200,
);
};
diff --git a/examples/mqtt-typescript/tsconfig.json b/examples/http-typescript-legacy/tsconfig.json
similarity index 88%
rename from examples/mqtt-typescript/tsconfig.json
rename to examples/http-typescript-legacy/tsconfig.json
index 5c356fc..82a0a34 100644
--- a/examples/mqtt-typescript/tsconfig.json
+++ b/examples/http-typescript-legacy/tsconfig.json
@@ -8,7 +8,8 @@
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"declaration": false,
- "outDir": "./dist",
+ "rootDir": "src",
+ "outDir": "./dist"
},
"$schema": "https://json.schemastore.org/tsconfig",
"display": "Recommended"
diff --git a/examples/http-typescript/package-lock.json b/examples/http-typescript/package-lock.json
deleted file mode 100644
index 6dd6db3..0000000
--- a/examples/http-typescript/package-lock.json
+++ /dev/null
@@ -1,990 +0,0 @@
-{
- "name": "http-typescript",
- "version": "0.0.9",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "http-typescript",
- "version": "0.0.9",
- "license": "MIT",
- "devDependencies": {
- "@tsconfig/node20": "^20.1.4",
- "@types/node": "^20.16.12",
- "rimraf": "^5.0.10",
- "tsc-alias": "^1.8.10",
- "typescript": "^5.6.3"
- }
- },
- "node_modules/@isaacs/cliui": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
- "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
- "dev": true,
- "dependencies": {
- "string-width": "^5.1.2",
- "string-width-cjs": "npm:string-width@^4.2.0",
- "strip-ansi": "^7.0.1",
- "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
- "wrap-ansi": "^8.1.0",
- "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
- "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
- "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
- "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@pkgjs/parseargs": {
- "version": "0.11.0",
- "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
- "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
- "dev": true,
- "optional": true,
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/@tsconfig/node20": {
- "version": "20.1.4",
- "resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.4.tgz",
- "integrity": "sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==",
- "dev": true
- },
- "node_modules/@types/node": {
- "version": "20.17.11",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.11.tgz",
- "integrity": "sha512-Ept5glCK35R8yeyIeYlRIZtX6SLRyqMhOFTgj5SOkMpLTdw3SEHI9fHx60xaUZ+V1aJxQJODE+7/j5ocZydYTg==",
- "dev": true,
- "dependencies": {
- "undici-types": "~6.19.2"
- }
- },
- "node_modules/ansi-regex": {
- "version": "6.1.0",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
- "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
- "dev": true,
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/ansi-styles": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
- "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
- "dev": true,
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/anymatch": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
- "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
- "dev": true,
- "dependencies": {
- "normalize-path": "^3.0.0",
- "picomatch": "^2.0.4"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/array-union": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
- "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "dev": true
- },
- "node_modules/binary-extensions": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
- "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/brace-expansion": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
- "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
- "dev": true,
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/braces": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
- "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
- "dev": true,
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chokidar": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
- "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
- "dev": true,
- "dependencies": {
- "anymatch": "~3.1.2",
- "braces": "~3.0.2",
- "glob-parent": "~5.1.2",
- "is-binary-path": "~2.1.0",
- "is-glob": "~4.0.1",
- "normalize-path": "~3.0.0",
- "readdirp": "~3.6.0"
- },
- "engines": {
- "node": ">= 8.10.0"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
- "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
- "dev": true,
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
- "dev": true
- },
- "node_modules/commander": {
- "version": "9.5.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz",
- "integrity": "sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==",
- "dev": true,
- "engines": {
- "node": "^12.20.0 || >=14"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.6",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
- "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
- "dev": true,
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/eastasianwidth": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
- "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
- "dev": true
- },
- "node_modules/emoji-regex": {
- "version": "9.2.2",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
- "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
- "dev": true
- },
- "node_modules/fast-glob": {
- "version": "3.3.2",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
- "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
- "dev": true,
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.4"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
- "node_modules/fastq": {
- "version": "1.18.0",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz",
- "integrity": "sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw==",
- "dev": true,
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
- "node_modules/fill-range": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
- "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
- "dev": true,
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/foreground-child": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz",
- "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==",
- "dev": true,
- "dependencies": {
- "cross-spawn": "^7.0.0",
- "signal-exit": "^4.0.1"
- },
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/fsevents": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
- "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
- "dev": true,
- "hasInstallScript": true,
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/glob": {
- "version": "10.4.5",
- "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
- "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
- "dev": true,
- "dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^3.1.2",
- "minimatch": "^9.0.4",
- "minipass": "^7.1.2",
- "package-json-from-dist": "^1.0.0",
- "path-scurry": "^1.11.1"
- },
- "bin": {
- "glob": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/globby": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
- "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
- "dev": true,
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/ignore": {
- "version": "5.3.2",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
- "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
- "dev": true,
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/is-binary-path": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
- "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
- "dev": true,
- "dependencies": {
- "binary-extensions": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-fullwidth-code-point": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
- "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-glob": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
- "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
- "dev": true,
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
- "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
- "dev": true,
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true
- },
- "node_modules/jackspeak": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
- "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
- "dev": true,
- "dependencies": {
- "@isaacs/cliui": "^8.0.2"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "optionalDependencies": {
- "@pkgjs/parseargs": "^0.11.0"
- }
- },
- "node_modules/lru-cache": {
- "version": "10.4.3",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
- "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
- "dev": true
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "dev": true,
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
- "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
- "dev": true,
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/minimatch": {
- "version": "9.0.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
- "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
- "dev": true,
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/minipass": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
- "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
- "dev": true,
- "engines": {
- "node": ">=16 || 14 >=14.17"
- }
- },
- "node_modules/mylas": {
- "version": "2.1.13",
- "resolved": "https://registry.npmjs.org/mylas/-/mylas-2.1.13.tgz",
- "integrity": "sha512-+MrqnJRtxdF+xngFfUUkIMQrUUL0KsxbADUkn23Z/4ibGg192Q+z+CQyiYwvWTsYjJygmMR8+w3ZDa98Zh6ESg==",
- "dev": true,
- "engines": {
- "node": ">=12.0.0"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/raouldeheer"
- }
- },
- "node_modules/normalize-path": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
- "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
- "dev": true,
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/package-json-from-dist": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
- "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
- "dev": true
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-scurry": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
- "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
- "dev": true,
- "dependencies": {
- "lru-cache": "^10.2.0",
- "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
- },
- "engines": {
- "node": ">=16 || 14 >=14.18"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "dev": true,
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/plimit-lit": {
- "version": "1.6.1",
- "resolved": "https://registry.npmjs.org/plimit-lit/-/plimit-lit-1.6.1.tgz",
- "integrity": "sha512-B7+VDyb8Tl6oMJT9oSO2CW8XC/T4UcJGrwOVoNGwOQsQYhlpfajmrMj5xeejqaASq3V/EqThyOeATEOMuSEXiA==",
- "dev": true,
- "dependencies": {
- "queue-lit": "^1.5.1"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/queue-lit": {
- "version": "1.5.2",
- "resolved": "https://registry.npmjs.org/queue-lit/-/queue-lit-1.5.2.tgz",
- "integrity": "sha512-tLc36IOPeMAubu8BkW8YDBV+WyIgKlYU7zUNs0J5Vk9skSZ4JfGlPOqplP0aHdfv7HL0B2Pg6nwiq60Qc6M2Hw==",
- "dev": true,
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
- "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ]
- },
- "node_modules/readdirp": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
- "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
- "dev": true,
- "dependencies": {
- "picomatch": "^2.2.1"
- },
- "engines": {
- "node": ">=8.10.0"
- }
- },
- "node_modules/reusify": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz",
- "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==",
- "dev": true,
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
- "node_modules/rimraf": {
- "version": "5.0.10",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
- "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
- "dev": true,
- "dependencies": {
- "glob": "^10.3.7"
- },
- "bin": {
- "rimraf": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
- "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/signal-exit": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
- "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
- "dev": true,
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
- "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
- "dev": true,
- "dependencies": {
- "eastasianwidth": "^0.2.0",
- "emoji-regex": "^9.2.2",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/string-width-cjs": {
- "name": "string-width",
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "dev": true
- },
- "node_modules/string-width-cjs/node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
- "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
- "dev": true,
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/strip-ansi-cjs": {
- "name": "strip-ansi",
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
- "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
- "dev": true,
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/tsc-alias": {
- "version": "1.8.10",
- "resolved": "https://registry.npmjs.org/tsc-alias/-/tsc-alias-1.8.10.tgz",
- "integrity": "sha512-Ibv4KAWfFkFdKJxnWfVtdOmB0Zi1RJVxcbPGiCDsFpCQSsmpWyuzHG3rQyI5YkobWwxFPEyQfu1hdo4qLG2zPw==",
- "dev": true,
- "dependencies": {
- "chokidar": "^3.5.3",
- "commander": "^9.0.0",
- "globby": "^11.0.4",
- "mylas": "^2.1.9",
- "normalize-path": "^3.0.0",
- "plimit-lit": "^1.2.6"
- },
- "bin": {
- "tsc-alias": "dist/bin/index.js"
- }
- },
- "node_modules/typescript": {
- "version": "5.7.2",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz",
- "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==",
- "dev": true,
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/undici-types": {
- "version": "6.19.8",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
- "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
- "dev": true
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/wrap-ansi": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
- "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
- "dev": true,
- "dependencies": {
- "ansi-styles": "^6.1.0",
- "string-width": "^5.0.1",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs": {
- "name": "wrap-ansi",
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
- "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "dev": true,
- "dependencies": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
- "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
- "dev": true,
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "dev": true
- },
- "node_modules/wrap-ansi-cjs/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- }
- }
-}
diff --git a/examples/mqtt-typescript/package.json b/examples/mqtt-typescript-legacy/package.json
similarity index 96%
rename from examples/mqtt-typescript/package.json
rename to examples/mqtt-typescript-legacy/package.json
index a6afbe8..a1d2416 100644
--- a/examples/mqtt-typescript/package.json
+++ b/examples/mqtt-typescript-legacy/package.json
@@ -1,5 +1,5 @@
{
- "name": "mqtt-typescript",
+ "name": "mqtt-typescript-legacy",
"version": "0.0.9",
"description": "Jobber Example for MQTT in TypeScript",
"main": "./dist/index.js",
diff --git a/examples/mqtt-typescript/publish.sh b/examples/mqtt-typescript-legacy/publish.sh
similarity index 78%
rename from examples/mqtt-typescript/publish.sh
rename to examples/mqtt-typescript-legacy/publish.sh
index f6bcaca..bfabdcd 100755
--- a/examples/mqtt-typescript/publish.sh
+++ b/examples/mqtt-typescript-legacy/publish.sh
@@ -18,14 +18,16 @@ npm run build
# Archive essential files
zip -rv archive.zip ./package.json ./dist ./src ./node_modules
+# Get base url argument from argument, defaults to localhost:3000
+BASE_URL=${1:-http://localhost:3000}
+
# Upload to Jobber
curl \
--silent \
--show-error \
--request POST \
- --url 'http://localhost:3000/api/job/publish/' \
+ --url "$BASE_URL/api/job/publish/" \
--header 'content-type: multipart/form-data' \
- --header 'Authorization: Bearer 2abb54173128350bdc916054f320a300c8b76bc873dd3c80301a02946f39c9e24bc824' \
--form 'archive=@archive.zip;type=application/zip'
rm archive.zip
\ No newline at end of file
diff --git a/examples/mqtt-typescript/src/declaration.d.ts b/examples/mqtt-typescript-legacy/src/declaration.d.ts
similarity index 100%
rename from examples/mqtt-typescript/src/declaration.d.ts
rename to examples/mqtt-typescript-legacy/src/declaration.d.ts
diff --git a/examples/mqtt-typescript/src/index.ts b/examples/mqtt-typescript-legacy/src/index.ts
similarity index 100%
rename from examples/mqtt-typescript/src/index.ts
rename to examples/mqtt-typescript-legacy/src/index.ts
diff --git a/examples/http-typescript/tsconfig.json b/examples/mqtt-typescript-legacy/tsconfig.json
similarity index 94%
rename from examples/http-typescript/tsconfig.json
rename to examples/mqtt-typescript-legacy/tsconfig.json
index 5c356fc..b3eedcd 100644
--- a/examples/http-typescript/tsconfig.json
+++ b/examples/mqtt-typescript-legacy/tsconfig.json
@@ -8,6 +8,7 @@
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"declaration": false,
+ "rootDir": "src",
"outDir": "./dist",
},
"$schema": "https://json.schemastore.org/tsconfig",
diff --git a/examples/mqtt-typescript/package-lock.json b/examples/mqtt-typescript/package-lock.json
deleted file mode 100644
index cebb374..0000000
--- a/examples/mqtt-typescript/package-lock.json
+++ /dev/null
@@ -1,895 +0,0 @@
-{
- "name": "mqtt-typescript",
- "version": "0.0.9",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "mqtt-typescript",
- "version": "0.0.9",
- "license": "MIT",
- "devDependencies": {
- "@tsconfig/node20": "^20.1.4",
- "@types/node": "^20.16.12",
- "rimraf": "^5.0.10",
- "tsc-alias": "^1.8.10",
- "typescript": "^5.6.3"
- }
- },
- "node_modules/@isaacs/cliui": {
- "version": "8.0.2",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "string-width": "^5.1.2",
- "string-width-cjs": "npm:string-width@^4.2.0",
- "strip-ansi": "^7.0.1",
- "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
- "wrap-ansi": "^8.1.0",
- "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@pkgjs/parseargs": {
- "version": "0.11.0",
- "dev": true,
- "license": "MIT",
- "optional": true,
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/@tsconfig/node20": {
- "version": "20.1.4",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@types/node": {
- "version": "20.17.11",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "undici-types": "~6.19.2"
- }
- },
- "node_modules/ansi-regex": {
- "version": "6.1.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/ansi-styles": {
- "version": "6.2.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/anymatch": {
- "version": "3.1.3",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "normalize-path": "^3.0.0",
- "picomatch": "^2.0.4"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/array-union": {
- "version": "2.1.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/binary-extensions": {
- "version": "2.3.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/brace-expansion": {
- "version": "2.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/braces": {
- "version": "3.0.3",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/chokidar": {
- "version": "3.6.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "anymatch": "~3.1.2",
- "braces": "~3.0.2",
- "glob-parent": "~5.1.2",
- "is-binary-path": "~2.1.0",
- "is-glob": "~4.0.1",
- "normalize-path": "~3.0.0",
- "readdirp": "~3.6.0"
- },
- "engines": {
- "node": ">= 8.10.0"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/commander": {
- "version": "9.5.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^12.20.0 || >=14"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.6",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/eastasianwidth": {
- "version": "0.2.0",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/emoji-regex": {
- "version": "9.2.2",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/fast-glob": {
- "version": "3.3.2",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.4"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
- "node_modules/fastq": {
- "version": "1.18.0",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
- "node_modules/fill-range": {
- "version": "7.1.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/foreground-child": {
- "version": "3.3.0",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "cross-spawn": "^7.0.0",
- "signal-exit": "^4.0.1"
- },
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob": {
- "version": "10.4.5",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^3.1.2",
- "minimatch": "^9.0.4",
- "minipass": "^7.1.2",
- "package-json-from-dist": "^1.0.0",
- "path-scurry": "^1.11.1"
- },
- "bin": {
- "glob": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob-parent": {
- "version": "5.1.2",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/globby": {
- "version": "11.1.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/ignore": {
- "version": "5.3.2",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/is-binary-path": {
- "version": "2.1.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "binary-extensions": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-fullwidth-code-point": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-glob": {
- "version": "4.0.3",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/jackspeak": {
- "version": "3.4.3",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "@isaacs/cliui": "^8.0.2"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "optionalDependencies": {
- "@pkgjs/parseargs": "^0.11.0"
- }
- },
- "node_modules/lru-cache": {
- "version": "10.4.3",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/minimatch": {
- "version": "9.0.5",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/minipass": {
- "version": "7.1.2",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": ">=16 || 14 >=14.17"
- }
- },
- "node_modules/mylas": {
- "version": "2.1.13",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12.0.0"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/raouldeheer"
- }
- },
- "node_modules/normalize-path": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/package-json-from-dist": {
- "version": "1.0.1",
- "dev": true,
- "license": "BlueOak-1.0.0"
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-scurry": {
- "version": "1.11.1",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "lru-cache": "^10.2.0",
- "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
- },
- "engines": {
- "node": ">=16 || 14 >=14.18"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/path-type": {
- "version": "4.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/plimit-lit": {
- "version": "1.6.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "queue-lit": "^1.5.1"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/queue-lit": {
- "version": "1.5.2",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
- },
- "node_modules/readdirp": {
- "version": "3.6.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "picomatch": "^2.2.1"
- },
- "engines": {
- "node": ">=8.10.0"
- }
- },
- "node_modules/reusify": {
- "version": "1.0.4",
- "dev": true,
- "license": "MIT",
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
- "node_modules/rimraf": {
- "version": "5.0.10",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "glob": "^10.3.7"
- },
- "bin": {
- "rimraf": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/signal-exit": {
- "version": "4.1.0",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/slash": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width": {
- "version": "5.1.2",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "eastasianwidth": "^0.2.0",
- "emoji-regex": "^9.2.2",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/string-width-cjs": {
- "name": "string-width",
- "version": "4.2.3",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/string-width-cjs/node_modules/strip-ansi": {
- "version": "6.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi": {
- "version": "7.1.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/strip-ansi-cjs": {
- "name": "strip-ansi",
- "version": "6.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/tsc-alias": {
- "version": "1.8.10",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "chokidar": "^3.5.3",
- "commander": "^9.0.0",
- "globby": "^11.0.4",
- "mylas": "^2.1.9",
- "normalize-path": "^3.0.0",
- "plimit-lit": "^1.2.6"
- },
- "bin": {
- "tsc-alias": "dist/bin/index.js"
- }
- },
- "node_modules/typescript": {
- "version": "5.7.2",
- "dev": true,
- "license": "Apache-2.0",
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/undici-types": {
- "version": "6.19.8",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/which": {
- "version": "2.0.2",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/wrap-ansi": {
- "version": "8.1.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^6.1.0",
- "string-width": "^5.0.1",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs": {
- "name": "wrap-ansi",
- "version": "7.0.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": {
- "version": "5.0.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
- "version": "4.3.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/wrap-ansi-cjs/node_modules/string-width": {
- "version": "4.2.3",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": {
- "version": "6.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- }
- }
-}
diff --git a/examples/schedule-javascript/README.md b/examples/schedule-javascript-legacy/README.md
similarity index 100%
rename from examples/schedule-javascript/README.md
rename to examples/schedule-javascript-legacy/README.md
diff --git a/examples/schedule-javascript/package.json b/examples/schedule-javascript-legacy/package.json
similarity index 94%
rename from examples/schedule-javascript/package.json
rename to examples/schedule-javascript-legacy/package.json
index cdc9792..8925566 100644
--- a/examples/schedule-javascript/package.json
+++ b/examples/schedule-javascript-legacy/package.json
@@ -1,5 +1,5 @@
{
- "name": "schedule-javascript",
+ "name": "schedule-javascript-legacy",
"version": "0.0.1",
"description": "Jobber Example, Javascript Schedule",
"main": "./src/index.js",
diff --git a/examples/schedule-javascript/publish.sh b/examples/schedule-javascript-legacy/publish.sh
similarity index 76%
rename from examples/schedule-javascript/publish.sh
rename to examples/schedule-javascript-legacy/publish.sh
index 4f7d8e3..3c05060 100644
--- a/examples/schedule-javascript/publish.sh
+++ b/examples/schedule-javascript-legacy/publish.sh
@@ -9,12 +9,15 @@ npm install
# Archive essential files
zip -rv archive.zip ./package.json ./src ./node_modules
+# Get base url argument from argument, defaults to localhost:3000
+BASE_URL=${1:-http://localhost:3000}
+
# Upload to Jobber
curl \
--silent \
--show-error \
--request POST \
- --url 'http://localhost:3000/api/job/publish/' \
+ --url "$BASE_URL/api/job/publish/" \
--header 'content-type: multipart/form-data' \
--form 'archive=@archive.zip;type=application/zip'
diff --git a/examples/schedule-javascript/src/index.js b/examples/schedule-javascript-legacy/src/index.js
similarity index 100%
rename from examples/schedule-javascript/src/index.js
rename to examples/schedule-javascript-legacy/src/index.js
diff --git a/examples/schedule-javascript/package-lock.json b/examples/schedule-javascript/package-lock.json
deleted file mode 100644
index db4f3f8..0000000
--- a/examples/schedule-javascript/package-lock.json
+++ /dev/null
@@ -1,33 +0,0 @@
-{
- "name": "schedule-javascript",
- "version": "0.0.1",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "schedule-javascript",
- "version": "0.0.1",
- "license": "MIT",
- "devDependencies": {
- "@types/node": "^20.16.12"
- }
- },
- "node_modules/@types/node": {
- "version": "20.19.0",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.0.tgz",
- "integrity": "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "undici-types": "~6.21.0"
- }
- },
- "node_modules/undici-types": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
- "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
- "dev": true,
- "license": "MIT"
- }
- }
-}
diff --git a/packages/common/package.json b/packages/common/package.json
new file mode 100644
index 0000000..92c6b16
--- /dev/null
+++ b/packages/common/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "@jobber/common",
+ "version": "1.0.0",
+ "description": "Common utilities for Jobber Services",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "exports": {
+ "./*": {
+ "types": "./dist/*",
+ "import": "./dist/*",
+ "require": "./dist/*"
+ },
+ ".": {
+ "types": "./dist/index.d.ts",
+ "import": "./dist/index.js",
+ "require": "./dist/index.js"
+ }
+ },
+ "type": "module",
+ "scripts": {
+ "build": "rimraf ./dist/* && tsc && tsc-alias -p tsconfig.json"
+ },
+ "keywords": [],
+ "author": "Eithan Hersey-Tuit",
+ "license": "MIT",
+ "dependencies": {
+ "@jobber/tcp-frame-socket": "workspace:*",
+ "zod": "3.23.8"
+ },
+ "devDependencies": {
+ "@tsconfig/node20": "^20.1.4",
+ "@types/node": "^20.16.12",
+ "rimraf": "^5.0.10",
+ "tsc-alias": "^1.8.10",
+ "typescript": "^5.6.3"
+ }
+}
diff --git a/packages/common/src/await-truthy.ts b/packages/common/src/await-truthy.ts
new file mode 100644
index 0000000..d8cd352
--- /dev/null
+++ b/packages/common/src/await-truthy.ts
@@ -0,0 +1,36 @@
+import { timeout } from "./timeout.js";
+
+/**
+ * Awaits until the callback yields true
+ */
+export const awaitTruthy = async (
+ callback: () => Promise,
+ timeoutMs: number = 30_000
+) => {
+ let startTime = Date.now();
+
+ let index = 0;
+ while (true) {
+ if (Date.now() - startTime > timeoutMs) {
+ return false;
+ }
+
+ if (await callback()) {
+ return true;
+ }
+
+ index++;
+
+ if (index <= 10) {
+ await timeout(10);
+ }
+
+ if (index > 10 && index <= 20) {
+ await timeout(20);
+ }
+
+ if (index > 20) {
+ await timeout(100);
+ }
+ }
+};
diff --git a/packages/common/src/bouncer-base.ts b/packages/common/src/bouncer-base.ts
new file mode 100644
index 0000000..b4b2444
--- /dev/null
+++ b/packages/common/src/bouncer-base.ts
@@ -0,0 +1,291 @@
+import {
+ canPerformAction,
+ type JobberPermissionAction,
+ type JobberPermissions,
+} from "./permissions.js";
+
+export class BouncerBase {
+ private _permissions;
+
+ constructor(permissions: JobberPermissions) {
+ this._permissions = permissions;
+ }
+
+ public can(resource: string, action: JobberPermissionAction): boolean {
+ return canPerformAction(this._permissions, resource, action);
+ }
+
+ public canRead(resource: string): boolean {
+ return this.can(resource, "read");
+ }
+
+ public canWrite(resource: string): boolean {
+ return this.can(resource, "write");
+ }
+
+ public canDelete(resource: string): boolean {
+ return this.can(resource, "delete");
+ }
+
+ public canReadJob(job: { id: string }): boolean {
+ return this.can(`job/${job.id}`, "read");
+ }
+
+ public canWriteJob(job: { id: string }): boolean {
+ return this.can(`job/${job.id}`, "write");
+ }
+
+ public canDeleteJob(job: { id: string }): boolean {
+ return this.can(`job/${job.id}`, "delete");
+ }
+
+ public canReadJobEnvironment(
+ environment: { jobId: string },
+ name: string,
+ ): boolean {
+ return this.can(`job/${environment.jobId}/environment/${name}`, "read");
+ }
+
+ public canWriteJobEnvironment(
+ environment: { jobId: string },
+ name: string,
+ ): boolean {
+ return this.can(`job/${environment.jobId}/environment/${name}`, "write");
+ }
+
+ public canDeleteJobEnvironment(
+ environment: { jobId: string },
+ name: string,
+ ): boolean {
+ return this.can(`job/${environment.jobId}/environment/${name}`, "delete");
+ }
+
+ public canReadJobAction(action: { jobId: string; id: string }): boolean {
+ return this.can(`job/${action.jobId}/actions/${action.id}`, "read");
+ }
+
+ public canWriteJobAction(action: { jobId: string; id: string }): boolean {
+ return this.can(`job/${action.jobId}/actions/${action.id}`, "write");
+ }
+
+ public canDeleteJobAction(action: { jobId: string; id: string }): boolean {
+ return this.can(`job/${action.jobId}/actions/${action.id}`, "delete");
+ }
+
+ public canReadJobRunners(job: { id: string }): boolean {
+ return this.can(`job/${job.id}/runners`, "read");
+ }
+
+ public canWriteJobRunners(job: { id: string }): boolean {
+ return this.can(`job/${job.id}/runners`, "write");
+ }
+
+ public canDeleteJobRunners(job: { id: string }): boolean {
+ return this.can(`job/${job.id}/runners`, "delete");
+ }
+
+ public canReadJobStore(item: { jobId: string }): boolean {
+ return this.can(`job/${item.jobId}/store`, "read");
+ }
+
+ public canWriteJobStore(item: { jobId: string }): boolean {
+ return this.can(`job/${item.jobId}/store`, "write");
+ }
+
+ public canDeleteJobStore(item: { jobId: string }): boolean {
+ return this.can(`job/${item.jobId}/store`, "delete");
+ }
+
+ public canReadJobTriggers(trigger: { jobId: string; id: string }): boolean {
+ return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "read");
+ }
+
+ public canWriteJobTriggers(trigger: { jobId: string; id: string }): boolean {
+ return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "write");
+ }
+
+ public canDeleteJobTriggers(trigger: { jobId: string; id: string }): boolean {
+ return this.can(`job/${trigger.jobId}/triggers/${trigger.id}`, "delete");
+ }
+
+ public canReadJobVersion(version: { jobId: string; id: string }): boolean {
+ return this.can(`job/${version.jobId}/versions/${version.id}`, "read");
+ }
+
+ public canReadJobVersionArchive(version: {
+ jobId: string;
+ id: string;
+ }): boolean {
+ return this.can(
+ `job/${version.jobId}/versions/${version.id}/archive`,
+ "read",
+ );
+ }
+
+ public canJobPublish(): boolean {
+ return this.can(`job/-/publish`, "write");
+ }
+
+ public canReadApiTokenGenerally(): boolean {
+ return this.can(`api-tokens`, "read");
+ }
+
+ public canWriteApiTokenGenerally(): boolean {
+ return this.can(`api-tokens`, "write");
+ }
+
+ public canDeleteApiTokenGenerally(): boolean {
+ return this.can(`api-tokens`, "delete");
+ }
+
+ public canReadApiToken(token: { id: string }): boolean {
+ return this.can(`api-tokens/${token.id}`, "read");
+ }
+
+ public canWriteApiToken(token: { id: string }): boolean {
+ return this.can(`api-tokens/${token.id}`, "write");
+ }
+
+ public canDeleteApiToken(token: { id: string }): boolean {
+ return this.can(`api-tokens/${token.id}`, "delete");
+ }
+
+ public canReadSystemMetricsPrometheus(): boolean {
+ return this.can(`system/metrics/prometheus`, "read");
+ }
+
+ public canReadSystemMetricsOverview(): boolean {
+ return this.can(`system/metrics/overview`, "read");
+ }
+
+ public canReadUserGenerally(): boolean {
+ return this.can(`users`, "read");
+ }
+
+ public canWriteUserGenerally(): boolean {
+ return this.can(`users`, "write");
+ }
+
+ public canDeleteUserGenerally(): boolean {
+ return this.can(`users`, "delete");
+ }
+
+ public canReadUser(user: { id: string }): boolean {
+ return this.can(`users/${user.id}`, "read");
+ }
+
+ public canWriteUser(user: { id: string }): boolean {
+ return this.can(`users/${user.id}`, "write");
+ }
+
+ public canDeleteUser(user: { id: string }): boolean {
+ return this.can(`users/${user.id}`, "delete");
+ }
+
+ public canWriteUserUsername(user: { id: string }): boolean {
+ return this.can(`users/${user.id}/username`, "write");
+ }
+
+ public canWriteUserPassword(user: { id: string }): boolean {
+ return this.can(`users/${user.id}/password`, "write");
+ }
+
+ public canWriteUserPermissions(user: { id: string }): boolean {
+ return this.can(`users/${user.id}/permissions`, "write");
+ }
+
+ public canReadOauthServiceClientGenerally(): boolean {
+ return this.can(`oauth/service-client`, "read");
+ }
+
+ public canWriteOauthServiceClientGenerally(): boolean {
+ return this.can(`oauth/service-client`, "read");
+ }
+
+ public canReadOauthServiceClient(serviceClient: { id: string }): boolean {
+ return this.can(`oauth/service-client/${serviceClient.id}`, "read");
+ }
+
+ public canWriteOauthServiceClient(serviceClient: { id: string }): boolean {
+ return this.can(`oauth/service-client/${serviceClient.id}`, "write");
+ }
+
+ public canDeleteOauthServiceClient(serviceClient: { id: string }): boolean {
+ return this.can(`oauth/service-client/${serviceClient.id}`, "delete");
+ }
+
+ public canReadOauthSigningKeyGenerally(): boolean {
+ return this.can(`oauth/signing-key`, "read");
+ }
+
+ public canWriteOauthSigningKeyGenerally(): boolean {
+ return this.can(`oauth/signing-key`, "write");
+ }
+
+ public canReadOauthSigningKey(signingKey: { id: string }): boolean {
+ return this.can(`oauth/signing-key/${signingKey.id}`, "read");
+ }
+
+ public canWriteOauthSigningKey(signingKey: { id: string }): boolean {
+ return this.can(`oauth/signing-key/${signingKey.id}`, "write");
+ }
+
+ public canDeleteOauthSigningKey(signingKey: { id: string }): boolean {
+ return this.can(`oauth/signing-key/${signingKey.id}`, "delete");
+ }
+
+ public canReadTemplatesGenerally() {
+ return this.can(`templates`, "read");
+ }
+
+ public canReadAuditLogGenerally() {
+ return this.can(`audit-log`, "read");
+ }
+
+ /**
+ * SPECIAL: This is a special case to allow runners to publish MQTT messages
+ */
+ public canPublishMqttMessage(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/publish-mqtt`, "write");
+ }
+
+ /**
+ * Used within runner
+ * SPECIAL: This is a special case to allow other services to read runner status
+ */
+ public canReadRunnerStatus(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/runner-status`, "read");
+ }
+
+ /**
+ * Used within runner
+ * SPECIAL: This is a special case to allow runners to invoke HTTP events
+ */
+ public canInvokeRunnerHttpEvent(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/invoke-http-event`, "write");
+ }
+
+ /**
+ * Used within runner
+ * SPECIAL: This is a special case to allow runners to invoke MQTT events
+ */
+ public canInvokeRunnerMqttEvent(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/invoke-mqtt-event`, "write");
+ }
+
+ /**
+ * Used within runner
+ * SPECIAL: This is a special case to allow runners to invoke schedule events
+ */
+ public canInvokeRunnerScheduleEvent(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/invoke-schedule-event`, "write");
+ }
+
+ /**
+ * Used by gateway
+ * SPECIAL: This enables services such as the gateway, to spawn runners for run-once jobs.
+ */
+ public canCreateSoftRunner(job: { id: string }): boolean {
+ return this.can(`special/job/${job.id}/create-soft-runner`, "write");
+ }
+}
diff --git a/packages/common/src/deferred.ts b/packages/common/src/deferred.ts
new file mode 100644
index 0000000..4a8fd44
--- /dev/null
+++ b/packages/common/src/deferred.ts
@@ -0,0 +1,13 @@
+export function deferred() {
+ let resolve!: (value: T | PromiseLike) => void;
+ let reject!: (reason?: unknown) => void;
+
+ const promise = new Promise((res, rej) => {
+ resolve = res;
+ reject = rej;
+ });
+
+ return { promise, resolve, reject };
+}
+
+export type Deferred = ReturnType>;
diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts
new file mode 100644
index 0000000..14fdaae
--- /dev/null
+++ b/packages/common/src/index.ts
@@ -0,0 +1,3 @@
+export * from "./loop-base.js";
+export * from "./await-truthy.js";
+export * from "./timeout.js";
diff --git a/packages/common/src/loop-base.ts b/packages/common/src/loop-base.ts
new file mode 100644
index 0000000..878225d
--- /dev/null
+++ b/packages/common/src/loop-base.ts
@@ -0,0 +1,108 @@
+import assert from "node:assert";
+import { awaitTruthy } from "./await-truthy.js";
+import { timeout } from "./timeout.js";
+import EventEmitter from "node:events";
+
+/**
+ * Lifecycle:
+ * 1) neutral = = default state (pre-start or stopped)
+ * 2) starting = in process of starting
+ * 3) started = active and running
+ * 4) stopping = in process of stopping
+ * 5) One stopped, goes to neutral.
+ */
+export type StatusLifecycle = "neutral" | "starting" | "started" | "stopping";
+
+type EventEmitterEvents = {
+ neutral: [];
+ starting: [];
+ started: [];
+ stopping: [];
+};
+
+export abstract class LoopBase {
+ protected status: StatusLifecycle = "neutral";
+
+ private signal: AbortController | null = null;
+
+ protected abstract loopDuration: number;
+
+ private events = new EventEmitter();
+
+ public start() {
+ return new Promise(async (resolve) => {
+ assert(this.status === "neutral");
+
+ this.signal = new AbortController();
+
+ this.events.once("started", () => {
+ resolve();
+ });
+
+ this.status = "starting";
+
+ if (this.loopStarting) {
+ await this.loopStarting();
+ }
+
+ this.events.emit("starting");
+
+ this.loop();
+ });
+ }
+
+ public stop() {
+ return new Promise(async (resolve) => {
+ assert(this.status === "started");
+
+ this.events.once("neutral", () => {
+ resolve();
+ });
+
+ this.status = "stopping";
+
+ this.signal?.abort();
+
+ if (this.loopClosing) {
+ await this.loopClosing();
+ }
+
+ this.events.emit("stopping");
+ });
+ }
+
+ private async loop() {
+ this.status = "started";
+
+ if (this.loopStarted) {
+ await this.loopStarted();
+ }
+
+ this.events.emit("started");
+
+ while (this.status === "started") {
+ try {
+ await this.loopIteration();
+ } catch (err) {
+ console.error(err);
+ }
+
+ await timeout(this.loopDuration, this.signal?.signal);
+ }
+
+ this.status = "neutral";
+ this.signal = null;
+
+ if (this.loopClosed) {
+ await this.loopClosed();
+ }
+
+ this.events.emit("neutral");
+ }
+
+ protected abstract loopIteration(): Promise;
+ protected abstract loopClosing?(): Promise;
+ protected abstract loopClosed?(): Promise;
+ protected abstract loopStarting?(): Promise;
+ protected abstract loopStarted?(): Promise;
+}
diff --git a/packages/common/src/oauth.ts b/packages/common/src/oauth.ts
new file mode 100644
index 0000000..b20c7ff
--- /dev/null
+++ b/packages/common/src/oauth.ts
@@ -0,0 +1,48 @@
+export const getOAuthAudienceGeneralApi = () => {
+ return "jobber-api";
+};
+
+export const getOAuthAudienceRunnerApi = (runnerId: string) => {
+ return `jobber-runner:${runnerId}`;
+};
+
+export const getOAuthAudienceGatewayApi = () => {
+ return "jobber-gateway";
+};
+
+/**
+ * Checks whether a given audience matches any of the allowed audiences, supporting
+ * wildcard segments using the `*` character, split by `:`.
+ */
+export const canOAuthAccessAudience = (
+ audience: string,
+ allowedAudiences: string[],
+) => {
+ const audienceChunks = audience.split(":");
+
+ for (const allowedAudience of allowedAudiences) {
+ const allowedAudienceChunks = allowedAudience.split(":");
+
+ if (allowedAudienceChunks.length !== audienceChunks.length) {
+ continue;
+ }
+
+ let matches = true;
+
+ for (let i = 0; i < allowedAudienceChunks.length; i++) {
+ if (
+ allowedAudienceChunks[i] !== "*" &&
+ allowedAudienceChunks[i] !== audienceChunks[i]
+ ) {
+ matches = false;
+ break;
+ }
+ }
+
+ if (matches) {
+ return true;
+ }
+ }
+
+ return false;
+};
diff --git a/packages/server/src/permissions.ts b/packages/common/src/permissions.ts
similarity index 62%
rename from packages/server/src/permissions.ts
rename to packages/common/src/permissions.ts
index cf33434..756186b 100644
--- a/packages/server/src/permissions.ts
+++ b/packages/common/src/permissions.ts
@@ -1,4 +1,5 @@
import { z } from "zod";
+
export const JobberPermissionEffectSchema = z.enum(["allow", "deny"]);
export const JobberPermissionActionSchema = z.enum(["read", "write", "delete"]);
@@ -44,10 +45,83 @@ export const PERMISSION_READ_ONLY: JobberPermissions = [
},
] as const;
+export const PERMISSION_GATEWAY: JobberPermissions = [
+ {
+ effect: "allow",
+ resource: "job/*",
+ actions: ["read"],
+ },
+ {
+ effect: "allow",
+ resource: "special/job/*/runner-status",
+ actions: ["read"],
+ },
+ {
+ effect: "allow",
+ resource: "templates",
+ actions: ["read"],
+ },
+ {
+ effect: "allow",
+ resource: "special/job/*/invoke-http-event",
+ actions: ["write"],
+ },
+ {
+ effect: "allow",
+ resource: "special/job/*/create-soft-runner",
+ actions: ["write"],
+ },
+ {
+ effect: "allow",
+ resource: "job/*/runners",
+ actions: ["read", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "job/*/environment/*",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "job/*/runners",
+ actions: ["write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "job/*/store",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "job/*/versions/*/archive",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "job/*/publish",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "api-tokens",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "system",
+ actions: ["read", "write", "delete"],
+ },
+ {
+ effect: "deny",
+ resource: "users",
+ actions: ["read", "write", "delete"],
+ },
+] as const;
+
export const canPerformAction = (
permissions: JobberPermissions,
resource: string,
- action: JobberPermissionAction
+ action: JobberPermissionAction,
): boolean => {
// Check for deny permissions first
for (const permission of permissions) {
diff --git a/packages/common/src/timeout.ts b/packages/common/src/timeout.ts
new file mode 100644
index 0000000..8afd2de
--- /dev/null
+++ b/packages/common/src/timeout.ts
@@ -0,0 +1,27 @@
+/**
+ * Creates a promise that resolves after a timeout
+ * @param ms Time to wait in milliseconds
+ * @returns
+ */
+export const timeout = (ms: number, signal?: AbortSignal) => {
+ return new Promise((resolve, reject) => {
+ if (signal?.aborted) {
+ return resolve();
+ }
+
+ const resolver = () => {
+ clearTimeout(timeoutId);
+ signal?.removeEventListener("abort", resolver);
+
+ resolve();
+ };
+
+ const timeoutId = setTimeout(() => {
+ resolver();
+ }, ms);
+
+ signal?.addEventListener("abort", () => {
+ resolver();
+ });
+ });
+};
diff --git a/packages/common/tsconfig.json b/packages/common/tsconfig.json
new file mode 100644
index 0000000..cd126f1
--- /dev/null
+++ b/packages/common/tsconfig.json
@@ -0,0 +1,22 @@
+{
+ "compilerOptions": {
+ "experimentalDecorators": true,
+ "inlineSourceMap": true,
+ "target": "ES2022",
+ "module": "NodeNext",
+ "moduleResolution": "NodeNext",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "declaration": true,
+ "types": ["node"],
+ "rootDir": "./src",
+ "outDir": "./dist",
+ "paths":{
+ "~/*": ["./src/*"]
+ }
+ },
+ "$schema": "https://json.schemastore.org/tsconfig",
+ "display": "Recommended"
+}
\ No newline at end of file
diff --git a/packages/gateway/package.json b/packages/gateway/package.json
new file mode 100644
index 0000000..67963b6
--- /dev/null
+++ b/packages/gateway/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "@jobber/gateway",
+ "version": "1.0.0",
+ "description": "Jobber API Gateway",
+ "main": "./dist/index.js",
+ "type": "module",
+ "scripts": {
+ "build:image": "docker build -t jobber-gateway ./",
+ "build": "rimraf ./dist/* && tsc && tsc-alias -p tsconfig.json",
+ "start": "node ./dist/index.js",
+ "dev": "pnpm build && node --env-file-if-exists=.env ./dist/index.js"
+ },
+ "keywords": [],
+ "author": "Eithan Hersey-Tuit",
+ "license": "MIT",
+ "dependencies": {
+ "@grpc/grpc-js": "^1.14.3",
+ "@grpc/proto-loader": "^0.8.0",
+ "@hono/node-server": "^1.13.7",
+ "@jobber/common": "workspace:*",
+ "@jobber/grpc": "workspace:*",
+ "@jobber/tcp-frame-socket": "workspace:*",
+ "hono": "^4.6.11",
+ "jose": "^6.1.3",
+ "long": "^5.3.2",
+ "nice-grpc": "^2.1.14",
+ "prom-client": "^15.1.3",
+ "protobufjs": "^8.0.0",
+ "reflect-metadata": "^0.2.2",
+ "semver": "^7.6.3",
+ "tsyringe": "^4.10.0",
+ "zod": "^3.23.8"
+ },
+ "devDependencies": {
+ "@tsconfig/node20": "^20.1.4",
+ "@types/node": "^20.16.12",
+ "@types/semver": "^7.5.8",
+ "grpc-tools": "^1.13.1",
+ "rimraf": "^5.0.10",
+ "ts-proto": "^2.10.1",
+ "tsc-alias": "^1.8.10",
+ "typescript": "^5.6.3",
+ "vitest": "^3.2.4"
+ }
+}
diff --git a/packages/gateway/src/config.ts b/packages/gateway/src/config.ts
new file mode 100644
index 0000000..cec6a96
--- /dev/null
+++ b/packages/gateway/src/config.ts
@@ -0,0 +1,34 @@
+import { z } from "zod";
+
+export const ConfigurationOptionsSchema = z.object({
+ // The port the gateway will listen to traffic on
+ PORT: z.coerce.number().default(3000),
+
+ // Upstream gRPC service (for gateway -> backend)
+ GRPC_ENDPOINT: z.string().url(),
+
+ // OIDC Issuer URL
+ OIDC_ISSUER_URL: z.string().url(),
+
+ // OIDC Discovery URL (if not provided, will be derived from issuer url)
+ OIDC_DISCOVERY_URL: z.string().url().optional(),
+
+ OAUTH_CLIENT_ID: z.string().min(1),
+ OAUTH_CLIENT_SECRET: z.string().min(1),
+});
+
+export type ConfigurationOptionsSchemaType = z.infer<
+ typeof ConfigurationOptionsSchema
+>;
+
+export type ConfigurationOptions = keyof ConfigurationOptionsSchemaType;
+
+export const getConfigOption = (
+ option: T,
+): ConfigurationOptionsSchemaType[T] => {
+ const schema = ConfigurationOptionsSchema.shape[option];
+
+ return schema.parse(process.env[option], {
+ path: ["config", option],
+ }) as ConfigurationOptionsSchemaType[T];
+};
diff --git a/packages/gateway/src/gateway.ts b/packages/gateway/src/gateway.ts
new file mode 100644
index 0000000..9c2df56
--- /dev/null
+++ b/packages/gateway/src/gateway.ts
@@ -0,0 +1,568 @@
+import { awaitTruthy, LoopBase } from "@jobber/common";
+import {
+ Channel,
+ ChannelCredentials,
+ ClientError,
+ createChannel,
+ createClientFactory,
+ Metadata,
+ RawClient,
+ ServerError,
+ Status,
+} from "nice-grpc";
+import { Item as JobItem } from "@jobber/grpc/basics/job.js";
+import { Item as ActionItem } from "@jobber/grpc/basics/action.js";
+import { Item as TriggerItem } from "@jobber/grpc/basics/trigger.js";
+import { Item as RunnerItem } from "@jobber/grpc/basics/runner.js";
+import {
+ EventHttpRequest,
+ EventHttpRequest_Head,
+ RunnerAPIDefinition,
+} from "@jobber/grpc/runner.js";
+import { FromTsProtoServiceDefinition } from "nice-grpc/lib/service-definitions/ts-proto.js";
+import { IncomingMessage, Server, ServerResponse } from "node:http";
+import {
+ getOAuthAudienceGeneralApi,
+ getOAuthAudienceRunnerApi,
+} from "@jobber/common/oauth.js";
+import { randomUUID } from "node:crypto";
+import { getConfigOption } from "./config.js";
+import { GeneralAPIDefinition } from "@jobber/grpc/general.js";
+import assert from "node:assert";
+import { createOauth2Token } from "./oauth-client.js";
+import { ConnectivityState } from "@grpc/grpc-js/build/src/connectivity-state.js";
+
+type RunnerClient = RawClient<
+ FromTsProtoServiceDefinition
+>;
+
+type GeneralClient = RawClient<
+ FromTsProtoServiceDefinition
+>;
+
+type GrpcAuth = {
+ audience: string;
+ jwt: string;
+ expiresAt: number;
+ refreshAt: number;
+ metadata: Metadata;
+};
+
+type JobEntry = {
+ job: JobItem;
+ action: ActionItem;
+ triggers: TriggerItem[];
+ runners: RunnerItem[];
+};
+
+type RunnerConnection = {
+ jobId: string;
+ auth: GrpcAuth;
+ channel: Channel;
+ client: RunnerClient;
+};
+
+export class GatewayClient extends LoopBase {
+ protected loopDuration = 500;
+
+ protected loopStarted = undefined;
+ protected loopClosed = undefined;
+
+ private server: Server | null = null;
+
+ private grpcAuth: GrpcAuth | null = null;
+ private grpcChannel: Channel | null = null;
+ private grpcClient: GeneralClient | null = null;
+
+ /** Key: job.id */
+ private jobs = new Map();
+
+ /** Key: runner.id */
+ private runnerGrpc = new Map();
+
+ /** Key: trigger.id */
+ private triggers = new Map();
+
+ /** Key: template name */
+ private templates = new Map<"bad-gateway", string>();
+
+ constructor() {
+ super();
+ }
+
+ private static createAuth(
+ audience: string,
+ tokenResult: { token: string; expiresAt: number; refreshAt: number },
+ ): GrpcAuth {
+ return {
+ audience,
+ jwt: tokenResult.token,
+ expiresAt: tokenResult.expiresAt,
+ refreshAt: tokenResult.refreshAt,
+ metadata: Metadata({
+ Authorization: `Bearer ${tokenResult.token}`,
+ }),
+ };
+ }
+
+ private async refreshAuthIfNeeded(auth: GrpcAuth): Promise {
+ if (Date.now() / 1000 < auth.refreshAt) {
+ return false;
+ }
+
+ console.log(
+ `[Gateway] Refreshing OAuth2 token for audience: ${auth.audience}`,
+ );
+ const tokenResult = await createOauth2Token(auth.audience);
+
+ auth.jwt = tokenResult.token;
+ auth.expiresAt = tokenResult.expiresAt;
+ auth.refreshAt = tokenResult.refreshAt;
+ auth.metadata.set("Authorization", `Bearer ${tokenResult.token}`);
+
+ return true;
+ }
+
+ private async refreshRunnerTokens() {
+ for (const connection of this.runnerGrpc.values()) {
+ await this.refreshAuthIfNeeded(connection.auth);
+ }
+ }
+
+ protected async loopStarting() {
+ const audience = getOAuthAudienceGeneralApi();
+ const tokenResult = await createOauth2Token(audience);
+
+ this.grpcAuth = GatewayClient.createAuth(audience, tokenResult);
+
+ this.grpcChannel = createChannel(
+ getConfigOption("GRPC_ENDPOINT"),
+ ChannelCredentials.createInsecure(),
+ );
+ this.grpcClient = createClientFactory().create(
+ GeneralAPIDefinition,
+ this.grpcChannel,
+ { "*": { metadata: this.grpcAuth.metadata } },
+ );
+
+ // Force a loop iteration to ensure routes are ready
+ await this.loopIteration();
+
+ this.server = new Server();
+ this.server.listen(getConfigOption("PORT"));
+ this.server.on("request", (req, res) => this.handleHttpRequest(req, res));
+ }
+
+ protected async loopClosing() {
+ await new Promise((resolve, reject) =>
+ this.server?.close((err) => (err ? reject(err) : resolve(true))),
+ );
+
+ for (const connection of this.runnerGrpc.values()) {
+ connection.channel.close();
+ }
+ this.runnerGrpc.clear();
+ this.triggers.clear();
+ this.jobs.clear();
+
+ this.grpcChannel?.close();
+ this.grpcChannel = null;
+ this.grpcClient = null;
+ this.grpcAuth = null;
+ }
+
+ protected async loopIteration() {
+ assert(this.grpcClient);
+ assert(this.grpcAuth);
+
+ // Refresh tokens if they are approaching expiry
+ await this.refreshAuthIfNeeded(this.grpcAuth);
+ await this.refreshRunnerTokens();
+
+ // Fetch enabled jobs
+ const jobs = (await this.grpcClient.getJobs({})).jobs.filter(
+ (job) => job.status === "ENABLED" && job.versionId,
+ );
+
+ // Remove jobs that no longer exist
+ const activeJobIds = new Set(jobs.map((job) => job.id));
+
+ for (const [id, data] of this.jobs) {
+ if (!activeJobIds.has(id)) {
+ this.handleJobRemoval(data.job);
+ }
+ }
+
+ // Add or update existing jobs
+ await Promise.all(jobs.map((job) => this.handleJobUpdate(job)));
+
+ // fetch templates
+ await this.handleFetchTemplates();
+ }
+
+ private async handleJobUpdate(job: JobItem) {
+ assert(this.grpcClient);
+
+ // Fetch triggers, action, and runners in parallel
+ const [{ triggers }, { action }, { runners }] = await Promise.all([
+ this.grpcClient.getJobTriggersLatest({ jobId: job.id }),
+ this.grpcClient.getJobActionLatest({ jobId: job.id }),
+ this.grpcClient.getRunners({
+ jobId: job.id,
+ status: "READY",
+ versionId: job.versionId,
+ }),
+ ]);
+
+ if (!action) {
+ console.log(`[Gateway] Job ${job.id} has no action, skipping`);
+ return;
+ }
+
+ const readyRunners = runners.filter((runner) => runner.readyAt !== null);
+ const previous = this.jobs.get(job.id);
+
+ // Clean up gRPC connections for runners that are no longer active
+ const activeRunnerIds = new Set(readyRunners.map((r) => r.id));
+
+ for (const [runnerId, connection] of this.runnerGrpc) {
+ if (connection.jobId === job.id && !activeRunnerIds.has(runnerId)) {
+ connection.channel.close();
+ this.runnerGrpc.delete(runnerId);
+ }
+ }
+
+ // Create gRPC connections for new runners
+ for (const runner of readyRunners) {
+ if (this.runnerGrpc.has(runner.id)) {
+ continue;
+ }
+
+ const audience = getOAuthAudienceRunnerApi(runner.id);
+ const tokenResult = await createOauth2Token(audience);
+ const auth = GatewayClient.createAuth(audience, tokenResult);
+
+ const channel = createChannel(
+ `http://${runner.properties?.runnerContainerName}:${runner.properties?.runnerApiPort}`,
+ ChannelCredentials.createInsecure(),
+ {
+ "grpc.keepalive_permit_without_calls": 1,
+ "grpc.keepalive_timeout_ms": 30_000,
+ },
+ );
+
+ const client = createClientFactory().create(
+ RunnerAPIDefinition,
+ channel,
+ { "*": { metadata: auth.metadata } },
+ );
+
+ this.runnerGrpc.set(runner.id, { jobId: job.id, auth, channel, client });
+ }
+
+ // Remove triggers that no longer exist, then upsert current ones
+ if (previous) {
+ const currentTriggerIds = new Set(triggers.map((t) => t.id));
+
+ for (const old of previous.triggers) {
+ if (!currentTriggerIds.has(old.id)) {
+ this.triggers.delete(old.id);
+ }
+ }
+ }
+
+ for (const trigger of triggers) {
+ this.triggers.set(trigger.id, trigger);
+ }
+
+ this.jobs.set(job.id, { job, action, triggers, runners: readyRunners });
+ }
+
+ private handleJobRemoval(job: JobItem) {
+ for (const [triggerId, trigger] of this.triggers) {
+ if (trigger.jobId === job.id) {
+ this.triggers.delete(triggerId);
+ }
+ }
+
+ for (const [runnerId, connection] of this.runnerGrpc) {
+ if (connection.jobId === job.id) {
+ connection.channel.close();
+ this.runnerGrpc.delete(runnerId);
+ }
+ }
+
+ this.jobs.delete(job.id);
+ }
+
+ private async handleFetchTemplates() {
+ if (!this.grpcClient) {
+ return;
+ }
+
+ const templates = await this.grpcClient.getTemplates({});
+
+ this.templates.set("bad-gateway", templates.templateBadGateway);
+ }
+
+ private async getRunner(entry: JobEntry) {
+ assert(this.grpcClient);
+
+ if (entry.action.runnerMode === "STANDARD" && entry.runners.length >= 1) {
+ const runner =
+ entry.runners[Math.floor(Math.random() * entry.runners.length)];
+
+ const grpc = this.runnerGrpc.get(runner.id);
+ if (grpc) {
+ const state = grpc.channel.getConnectivityState(false);
+
+ if (
+ state === ConnectivityState.READY ||
+ state === ConnectivityState.IDLE ||
+ state === ConnectivityState.CONNECTING
+ ) {
+ return runner;
+ }
+ }
+ }
+
+ try {
+ const { runner } = await this.grpcClient.createSoftRunner({
+ jobId: entry.job.id,
+ actionId: entry.action.id,
+ versionId: entry.job.versionId,
+ });
+
+ if (!runner) {
+ return null;
+ }
+
+ await awaitTruthy(async () => {
+ const grpc = this.runnerGrpc.get(runner.id);
+ if (!grpc) {
+ return false;
+ }
+
+ const state = grpc.channel.getConnectivityState(false);
+ return (
+ state === ConnectivityState.READY ||
+ state === ConnectivityState.IDLE ||
+ state === ConnectivityState.CONNECTING
+ );
+ }, 30_000);
+
+ return runner;
+ } catch (err) {
+ if (err instanceof ClientError) {
+ console.warn(
+ `Failed to create soft-create runner for job ${entry.job.id}: ${err.message}`,
+ );
+
+ return null;
+ }
+
+ throw err;
+ }
+ }
+
+ private async handleHttpRequest(req: IncomingMessage, res: ServerResponse) {
+ if (this.status !== "started") {
+ res.statusCode = 503;
+ res.setHeader("Content-Type", "text/html");
+ res.end(this.templates.get("bad-gateway"));
+ return;
+ }
+
+ const trigger = this.matchTrigger(req);
+
+ if (!trigger?.http || !this.jobs.has(trigger.jobId)) {
+ res.statusCode = 502;
+ res.setHeader("Content-Type", "text/html");
+ res.end(this.templates.get("bad-gateway"));
+ return;
+ }
+
+ const entry = this.jobs.get(trigger.jobId)!;
+
+ const runner = await this.getRunner(entry);
+
+ if (!runner) {
+ res.statusCode = 502;
+ res.setHeader("Content-Type", "text/html");
+ res.end(this.templates.get("bad-gateway"));
+ return;
+ }
+
+ const connection = this.runnerGrpc.get(runner.id);
+ if (!connection) {
+ res.statusCode = 502;
+ res.setHeader("Content-Type", "text/html");
+ res.end(this.templates.get("bad-gateway"));
+ return;
+ }
+
+ try {
+ let hasStartedResponding = false;
+ let hasTimeout = false;
+
+ const timeoutHandle = setTimeout(() => {
+ if (hasStartedResponding) {
+ return;
+ }
+
+ // Only timeout when the runner hasn't started responding
+ hasTimeout = true;
+ console.warn(
+ `[Gateway] HTTP request to runner ${runner.id} timed out after 30 seconds without response. Closing connection.`,
+ );
+ }, entry.action.runnerTimeout * 1000);
+
+ const response = connection.client.eventHttp(
+ this.buildHttpRequestStream(req, trigger),
+ );
+
+ for await (const event of response) {
+ if (hasTimeout) {
+ break;
+ }
+
+ hasStartedResponding = true;
+
+ if (event.head) {
+ res.statusCode = event.head.status;
+ for (const header of event.head.headers) {
+ res.setHeader(header.name, header.value);
+ }
+ }
+
+ if (event.body) {
+ res.write(event.body.data);
+ if (event.body.end) {
+ res.end();
+ }
+ }
+ }
+
+ clearTimeout(timeoutHandle);
+ } catch (err) {
+ console.error(
+ `[Gateway] Error proxying request to runner ${runner.id}:`,
+ err,
+ );
+
+ if (!res.headersSent) {
+ res.statusCode = 502;
+ res.setHeader("Content-Type", "text/html");
+ res.end(this.templates.get("bad-gateway"));
+ } else if (!res.writableEnded) {
+ res.end();
+ }
+ } finally {
+ if (entry.action.runnerMode === "RUN_ONCE") {
+ await this.grpcClient?.deleteRunner({ runnerId: runner.id });
+ }
+ }
+ }
+
+ private async *buildHttpRequestStream(
+ req: IncomingMessage,
+ trigger: TriggerItem,
+ ): AsyncIterable {
+ const headers: EventHttpRequest_Head["headers"] = [];
+
+ for (const [key, value] of Object.entries(req.headers)) {
+ if (Array.isArray(value)) {
+ for (const v of value) {
+ headers.push({ name: key, value: v });
+ }
+ } else if (value !== undefined) {
+ headers.push({ name: key, value });
+ }
+ }
+
+ let path = "";
+ let query = "";
+
+ if (req.url) {
+ const qPos = req.url.indexOf("?");
+ if (qPos >= 0) {
+ path = req.url.substring(0, qPos);
+ query = req.url.substring(qPos + 1);
+ } else {
+ path = req.url;
+ }
+ }
+
+ yield { info: { triggerName: trigger.http!.name ?? "" } };
+
+ yield {
+ head: {
+ id: randomUUID(),
+ scheme: "http",
+ method: req.method || "GET",
+ hostname: req.headers["host"] || "",
+ headers,
+ query,
+ path,
+ },
+ };
+
+ let seq = 0;
+
+ for await (const chunk of req) {
+ yield {
+ body: { id: randomUUID(), seq: seq++, data: chunk, end: false },
+ };
+ }
+
+ yield {
+ body: {
+ id: randomUUID(),
+ seq: seq++,
+ data: new Uint8Array(0),
+ end: true,
+ },
+ };
+ }
+
+ private matchTrigger(req: IncomingMessage): TriggerItem | null {
+ const host = req.headers["host"];
+ const method = req.method;
+
+ if (!host || !method || !req.url) {
+ return null;
+ }
+
+ const qPos = req.url.indexOf("?");
+ const path = qPos >= 0 ? req.url.substring(0, qPos) : req.url;
+
+ for (const trigger of this.triggers.values()) {
+ if (!trigger.http) {
+ continue;
+ }
+
+ if (trigger.http.hostname && trigger.http.hostname !== host) {
+ continue;
+ }
+
+ if (trigger.http.method && trigger.http.method !== method) {
+ continue;
+ }
+
+ if (trigger.http.path) {
+ if (trigger.http.path.startsWith("^")) {
+ const regex = new RegExp(trigger.http.path);
+ if (!regex.test(path)) {
+ continue;
+ }
+ } else if (trigger.http.path !== path) {
+ continue;
+ }
+ }
+
+ return trigger;
+ }
+
+ return null;
+ }
+}
diff --git a/packages/gateway/src/index.ts b/packages/gateway/src/index.ts
new file mode 100644
index 0000000..04ab40a
--- /dev/null
+++ b/packages/gateway/src/index.ts
@@ -0,0 +1,28 @@
+import "reflect-metadata";
+import { GatewayClient } from "./gateway.js";
+
+async function main() {
+ console.log("Starting Gateway Management Client...");
+
+ console.log("Starting Gateway...");
+ const gateway = new GatewayClient();
+ await gateway.start();
+ console.log("Gateway Management Client started");
+
+ const shutdown = async () => {
+ console.log("Stopping Gateway...");
+ await gateway.stop();
+ console.log("stopped");
+
+ process.exit(0);
+ };
+
+ process.once("SIGTERM", async () => {
+ await shutdown();
+ });
+ process.once("SIGINT", async () => {
+ await shutdown();
+ });
+}
+
+main();
diff --git a/packages/gateway/src/oauth-client.ts b/packages/gateway/src/oauth-client.ts
new file mode 100644
index 0000000..6faab88
--- /dev/null
+++ b/packages/gateway/src/oauth-client.ts
@@ -0,0 +1,97 @@
+import { getConfigOption } from "./config.js";
+import { createRemoteJWKSet, decodeJwt } from "jose";
+
+function getDiscoveryUrl(): string {
+ if (getConfigOption("OIDC_DISCOVERY_URL")) {
+ return getConfigOption("OIDC_DISCOVERY_URL")!;
+ }
+
+ const issuerUrl = getConfigOption("OIDC_ISSUER_URL");
+
+ return `${issuerUrl.replace(/\/+$/, "")}/.well-known/openid-configuration`;
+}
+
+async function getOAuthDiscovery() {
+ const response = await fetch(getDiscoveryUrl());
+
+ if (!response.ok) {
+ throw new Error(
+ `Failed to fetch OIDC discovery document: ${response.status} ${response.statusText}`,
+ );
+ }
+
+ return (await response.json()) as {
+ issuer: string;
+ token_endpoint: string;
+ jwks_uri: string;
+ token_endpoint_auth_methods_supported: string[];
+ };
+}
+
+async function getRemoteJwks() {
+ const discovery = await getOAuthDiscovery();
+
+ return createRemoteJWKSet(new URL(discovery.jwks_uri), {
+ cacheMaxAge: 5 * 60 * 1000,
+ });
+}
+
+export async function createOauth2Token(audience: string) {
+ const discovery = await getOAuthDiscovery();
+
+ if (
+ !discovery.token_endpoint_auth_methods_supported.includes(
+ "client_secret_basic",
+ )
+ ) {
+ throw new Error(
+ "OIDC provider does not support client_secret_basic authentication",
+ );
+ }
+
+ const params = new URLSearchParams();
+ params.append("grant_type", "client_credentials");
+ params.append("client_id", getConfigOption("OAUTH_CLIENT_ID"));
+ params.append("client_secret", getConfigOption("OAUTH_CLIENT_SECRET"));
+ params.append("audience", audience);
+
+ const response = await fetch(discovery.token_endpoint, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/x-www-form-urlencoded",
+ },
+ body: params.toString(),
+ });
+
+ if (!response.ok) {
+ console.error(
+ `Client credentials request failed ${response.status}: ${await response.text()}`,
+ );
+
+ throw new Error(
+ `Failed to fetch OIDC token: ${response.status} ${response.statusText}`,
+ );
+ }
+
+ const data = await response.json();
+
+ if (typeof data.access_token !== "string") {
+ throw new Error("OIDC token response does not contain access_token");
+ }
+
+ if (!data.token_type || data.token_type.toLowerCase() !== "bearer") {
+ throw new Error("OIDC token response does not contain bearer token");
+ }
+
+ const decoded = decodeJwt(data.access_token);
+
+ if (!decoded.exp) {
+ throw new Error("OIDC token does not contain exp claim");
+ }
+
+ return {
+ token: data.access_token,
+ expiresAt: decoded.exp,
+ refreshAt: decoded.exp - 60,
+ };
+}
diff --git a/packages/gateway/tsconfig.json b/packages/gateway/tsconfig.json
new file mode 100644
index 0000000..6ffe955
--- /dev/null
+++ b/packages/gateway/tsconfig.json
@@ -0,0 +1,22 @@
+{
+ "compilerOptions": {
+ "experimentalDecorators": true,
+ "inlineSourceMap": true,
+ "target": "ES2022",
+ "module": "NodeNext",
+ "moduleResolution": "NodeNext",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "declaration": false,
+ "types": ["node"],
+ "rootDir": "./src",
+ "outDir": "./dist",
+ "paths":{
+ "~/*": ["./src/*"]
+ }
+ },
+ "$schema": "https://json.schemastore.org/tsconfig",
+ "display": "Recommended"
+}
\ No newline at end of file
diff --git a/packages/grpc/package.json b/packages/grpc/package.json
new file mode 100644
index 0000000..42b35cd
--- /dev/null
+++ b/packages/grpc/package.json
@@ -0,0 +1,37 @@
+{
+ "name": "@jobber/grpc",
+ "version": "1.0.0",
+ "description": "gRPC Definitions for Jobber",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "exports": {
+ ".": "./dist/index.js",
+ "./*": "./dist/*"
+ },
+ "type": "module",
+ "scripts": {
+ "grpc": "rm -rf dist 2>/dev/null || : && protoc --plugin=protoc-gen-ts_proto=./node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt=importSuffix=.js --ts_proto_out=./src --ts_proto_opt=outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false,stringEnums=true,esModuleInterop=true,enumsAsLiterals=true,outputDefaultValues=true --descriptor_set_out=src/protoset.bin --include_imports --proto_path=./proto/ ./proto/*.proto ./proto/**/*.proto",
+ "build": "pnpm grpc && tsc"
+ },
+ "keywords": [],
+ "author": "Eithan Hersey-Tuit",
+ "license": "MIT",
+ "dependencies": {
+ "@bufbuild/protobuf": "^2.10.2",
+ "@grpc/grpc-js": "^1.14.3",
+ "@grpc/proto-loader": "^0.8.0",
+ "long": "^5.3.2",
+ "nice-grpc": "^2.1.14",
+ "nice-grpc-common": "^2.0.2",
+ "prom-client": "^15.1.3",
+ "protobufjs": "^8.0.0"
+ },
+ "devDependencies": {
+ "@tsconfig/node20": "^20.1.4",
+ "@types/node": "^20.16.12",
+ "grpc-tools": "^1.13.1",
+ "ts-proto": "^2.10.1",
+ "tsc-alias": "^1.8.10",
+ "typescript": "^5.6.3"
+ }
+}
diff --git a/packages/grpc/proto/base.proto b/packages/grpc/proto/base.proto
new file mode 100644
index 0000000..afc4441
--- /dev/null
+++ b/packages/grpc/proto/base.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+
+package Base;
+
+message Empty {}
+
+message ExportChunk {
+ string id = 1;
+ int64 sequence = 2;
+ bytes data = 3;
+}
diff --git a/packages/grpc/proto/basics/action.proto b/packages/grpc/proto/basics/action.proto
new file mode 100644
index 0000000..105a1f5
--- /dev/null
+++ b/packages/grpc/proto/basics/action.proto
@@ -0,0 +1,49 @@
+syntax = "proto3";
+
+package Action;
+
+message Item {
+ message DockerArguments {
+ message Volume {
+ enum VolumeMode {
+ READ_ONLY = 0;
+ READ_WRITE = 1;
+ }
+
+ string source = 1;
+ string target = 2;
+ VolumeMode mode = 3;
+ }
+
+ message Label {
+ string key = 1;
+ string value = 2;
+ }
+
+ repeated string networks = 1;
+ repeated Volume volumes = 2;
+ repeated Label labels = 3;
+ optional string memoryLimit = 4;
+ repeated string directPassthroughArguments = 5;
+ }
+
+ enum RunnerMode {
+ STANDARD = 0;
+ RUN_ONCE = 1;
+ }
+
+ string id = 1;
+ string jobId = 2;
+ string versionId = 3;
+
+ string runnerImage = 4;
+ bool runnerAsynchronous = 5;
+ uint32 runnerMinCount = 6;
+ uint32 runnerMaxCount = 7;
+ uint32 runnerTimeout = 8;
+ uint32 runnerMaxIdleAge = 9;
+ uint32 runnerMaxAge = 10;
+ uint32 runnerMaxAgeHard = 11;
+ DockerArguments dockerArguments = 12;
+ RunnerMode runnerMode = 13;
+}
diff --git a/packages/grpc/proto/basics/api-token.proto b/packages/grpc/proto/basics/api-token.proto
new file mode 100644
index 0000000..ae3659b
--- /dev/null
+++ b/packages/grpc/proto/basics/api-token.proto
@@ -0,0 +1,23 @@
+syntax = "proto3";
+
+import "basics/common.proto";
+
+package ApiToken;
+
+message Item {
+ enum Status {
+ ENABLED = 0;
+ DISABLED = 1;
+ }
+
+ string id = 1;
+ string token = 2;
+ string userId = 3;
+ string description = 4;
+ Status status = 5;
+
+ repeated Common.Permission permissions = 6;
+
+ string expires = 7;
+ string created = 8;
+}
diff --git a/packages/grpc/proto/basics/common.proto b/packages/grpc/proto/basics/common.proto
new file mode 100644
index 0000000..53d691a
--- /dev/null
+++ b/packages/grpc/proto/basics/common.proto
@@ -0,0 +1,20 @@
+syntax = "proto3";
+
+package Common;
+
+message Permission {
+ enum Effect {
+ ALLOW = 0;
+ DENY = 1;
+ }
+
+ enum Action {
+ READ = 0;
+ WRITE = 1;
+ DELETE = 2;
+ }
+
+ Effect effect = 1;
+ string resource = 2;
+ repeated Action actions = 3;
+}
\ No newline at end of file
diff --git a/packages/grpc/proto/basics/environment.proto b/packages/grpc/proto/basics/environment.proto
new file mode 100644
index 0000000..73a0fbc
--- /dev/null
+++ b/packages/grpc/proto/basics/environment.proto
@@ -0,0 +1,24 @@
+syntax = "proto3";
+
+import "basics/common.proto";
+
+package Environment;
+
+message Item {
+ message ContextValue {
+ enum Type {
+ TEXT = 0;
+ SECRET = 1;
+ }
+
+ Type type = 1;
+ optional string value = 2;
+ }
+
+ string id = 1;
+ string jobId = 2;
+
+ map context = 3;
+
+ string modified = 4;
+}
diff --git a/packages/grpc/proto/basics/job-version.proto b/packages/grpc/proto/basics/job-version.proto
new file mode 100644
index 0000000..26c74e3
--- /dev/null
+++ b/packages/grpc/proto/basics/job-version.proto
@@ -0,0 +1,13 @@
+syntax = "proto3";
+
+import "basics/common.proto";
+
+package JobVersion;
+
+message Item {
+ string id = 1;
+ string jobId = 2;
+ string version = 3;
+ string modified = 4;
+ string created = 5;
+}
diff --git a/packages/grpc/proto/basics/job.proto b/packages/grpc/proto/basics/job.proto
new file mode 100644
index 0000000..2ad3de3
--- /dev/null
+++ b/packages/grpc/proto/basics/job.proto
@@ -0,0 +1,22 @@
+syntax = "proto3";
+
+package Job;
+
+message Item {
+ message Link {
+ string name = 1;
+ string url = 2;
+ }
+
+ enum Status {
+ ENABLED = 0;
+ DISABLED = 1;
+ }
+
+ string id = 1;
+ string jobName = 2;
+ Status status = 3;
+ optional string description = 4;
+ optional string versionId = 5;
+ repeated Link links = 6;
+}
diff --git a/packages/grpc/proto/basics/runner.proto b/packages/grpc/proto/basics/runner.proto
new file mode 100644
index 0000000..b50d3ed
--- /dev/null
+++ b/packages/grpc/proto/basics/runner.proto
@@ -0,0 +1,34 @@
+syntax = "proto3";
+
+package Runner;
+
+message Item {
+ enum Status {
+ STARTING = 0;
+ READY = 1;
+ CLOSING = 2;
+ CLOSED = 3;
+ }
+
+ message Properties {
+ string runnerPid = 1;
+ reserved 2;
+ string runnerContainerName = 3;
+ repeated string runnerContainerNetworks = 4;
+ uint32 runnerApiPort = 5;
+ bool runnerDebug = 6;
+ }
+
+ string id = 1;
+ string jobId = 2;
+ string actionId = 3;
+ string versionId = 4;
+ Properties properties = 5;
+
+ reserved 6 to 10;
+
+ string createdAt = 11;
+ optional string readyAt = 12;
+ optional string closingAt = 13;
+ optional string closedAt = 14;
+}
diff --git a/packages/grpc/proto/basics/trigger.proto b/packages/grpc/proto/basics/trigger.proto
new file mode 100644
index 0000000..d22ba40
--- /dev/null
+++ b/packages/grpc/proto/basics/trigger.proto
@@ -0,0 +1,56 @@
+syntax = "proto3";
+
+import "basics/common.proto";
+
+package Trigger;
+
+message Item {
+ message TriggerSchedule {
+ optional string name = 1;
+ string cron = 2;
+ optional string timezone = 3;
+ }
+
+ message TriggerHttp {
+ optional string name = 1;
+ optional string hostname = 2;
+ optional string method = 3;
+ optional string path = 4;
+ }
+
+ message TriggerMqtt {
+ message Connection {
+ optional string protocol = 1;
+ optional string protocolVariable = 2;
+
+ optional string port = 3;
+ optional string portVariable = 4;
+
+ optional string host = 5;
+ optional string hostVariable = 6;
+
+ optional string username = 7;
+ optional string usernameVariable = 8;
+
+ optional string password = 9;
+ optional string passwordVariable = 10;
+
+ optional string clientId = 11;
+ optional string clientIdVariable = 12;
+ }
+
+ optional string name = 1;
+ repeated string topics = 2;
+ Connection connection = 3;
+ }
+
+ string id = 1;
+ string jobId = 2;
+ string versionId = 3;
+
+ oneof context {
+ TriggerSchedule schedule = 4;
+ TriggerHttp http = 5;
+ TriggerMqtt mqtt = 6;
+ }
+}
diff --git a/packages/grpc/proto/gateway.proto b/packages/grpc/proto/gateway.proto
new file mode 100644
index 0000000..80d31f8
--- /dev/null
+++ b/packages/grpc/proto/gateway.proto
@@ -0,0 +1,9 @@
+syntax = "proto3";
+
+import "base.proto";
+
+package GatewayAPI;
+
+service GatewayAPI {
+//
+}
diff --git a/packages/grpc/proto/general.proto b/packages/grpc/proto/general.proto
new file mode 100644
index 0000000..bb6a25b
--- /dev/null
+++ b/packages/grpc/proto/general.proto
@@ -0,0 +1,248 @@
+syntax = "proto3";
+
+import "base.proto";
+import "basics/action.proto";
+import "basics/api-token.proto";
+import "basics/environment.proto";
+import "basics/job-version.proto";
+import "basics/job.proto";
+import "basics/runner.proto";
+import "basics/trigger.proto";
+
+package GeneralAPI;
+
+service GeneralAPI {
+ rpc getJob(JobRequest) returns (JobResponse);
+ rpc getJobs(JobsRequest) returns (JobsResponse);
+
+ rpc getJobVersion(JobVersionRequest) returns (JobVersionResponse);
+ rpc getJobVersionLatest(JobVersionLatestRequest) returns (JobVersionLatestResponse);
+ rpc getJobVersions(JobVersionsRequest) returns (JobVersionsResponse);
+ rpc getJobVersionArchive(JobVersionArchiveRequest) returns (stream JobVersionArchiveResponse);
+
+ rpc getJobAction(JobActionRequest) returns (JobActionResponse);
+ rpc getJobActionLatest(JobActionLatestRequest) returns (JobActionLatestResponse);
+ rpc getJobActions(JobActionsRequest) returns (JobActionsResponse);
+
+ rpc getJobTrigger(JobTriggerRequest) returns (JobTriggerResponse);
+ rpc getJobTriggers(JobTriggersRequest) returns (JobTriggersResponse);
+ rpc getJobTriggersLatest(JobTriggersLatestRequest) returns (JobTriggersLatestResponse);
+
+ rpc getRunner(RunnerRequest) returns (RunnerResponse);
+ rpc getRunners(RunnersRequest) returns (RunnersResponse);
+ rpc deleteRunner(DeleteRunnerRequest) returns (Base.Empty);
+
+ rpc getStoreItem(StoreItemRequest) returns (StoreItemResponse);
+ rpc setStoreItem(SetStoreItemRequest) returns (SetStoreItemResponse);
+ rpc deleteStoreItem(DeleteStoreItemRequest) returns (DeleteStoreItemResponse);
+
+
+ // This will likely be migrated to its own service similar to the GatewayAPI
+ rpc publishMqttMessage(PublishMqttMessageRequest) returns (PublishMqttMessageResponse);
+
+ rpc createSoftRunner(CreateSoftRunnerRequest) returns (CreateSoftRunnerResponse);
+
+ rpc getTemplates (TemplatesRequest) returns (TemplatesResponse);
+}
+
+/** getJob **/
+message JobRequest {
+ string jobId = 1;
+}
+message JobResponse {
+ Job.Item job = 1;
+}
+
+
+/** getJobs **/
+message JobsRequest {}
+message JobsResponse {
+ repeated Job.Item jobs = 1;
+}
+
+
+/** getJobVersion **/
+message JobVersionRequest {
+ string jobVersionId = 1;
+}
+message JobVersionResponse {
+ JobVersion.Item jobVersion = 1;
+}
+
+
+/** getJobVersionLatest **/
+message JobVersionLatestRequest {
+ string jobId = 1;
+}
+message JobVersionLatestResponse {
+ JobVersion.Item jobVersion = 1;
+}
+
+
+/** getJobVersions **/
+message JobVersionsRequest {
+ string jobId = 1;
+}
+message JobVersionsResponse {
+ repeated JobVersion.Item jobVersions = 1;
+}
+
+
+/** getJobVersionArchive **/
+message JobVersionArchiveRequest {
+ string jobId = 1;
+ string jobVersionId = 2;
+}
+message JobVersionArchiveResponse {
+ uint64 seq = 1;
+ bytes data = 2;
+ bool end = 3;
+}
+
+
+/** getJobAction **/
+message JobActionRequest {
+ string jobId = 1;
+ string actionId = 2;
+}
+message JobActionResponse {
+ Action.Item action = 1;
+}
+
+
+/** getJobActionLatest **/
+message JobActionLatestRequest {
+ string jobId = 1;
+}
+message JobActionLatestResponse {
+ Action.Item action = 1;
+}
+
+
+/** getJobActions **/
+message JobActionsRequest {
+ string jobId = 1;
+ optional string versionId = 2;
+}
+message JobActionsResponse {
+ repeated Action.Item actions = 1;
+}
+
+
+/** getJobTrigger **/
+message JobTriggerRequest {
+ string jobId = 1;
+ string triggerId = 2;
+}
+message JobTriggerResponse {
+ Trigger.Item trigger = 1;
+}
+
+
+/** getJobTriggers **/
+message JobTriggersRequest {
+ string jobId = 1;
+ optional string versionId = 2;
+}
+message JobTriggersResponse {
+ repeated Trigger.Item triggers = 1;
+}
+
+
+/** getJobTriggersLatest **/
+message JobTriggersLatestRequest {
+ string jobId = 1;
+}
+message JobTriggersLatestResponse {
+ repeated Trigger.Item triggers = 1;
+}
+
+
+/** getRunner **/
+message RunnerRequest {
+ string runnerId = 1;
+}
+message RunnerResponse {
+ Runner.Item runner = 1;
+}
+
+
+/** getRunners **/
+message RunnersRequest {
+ optional string jobId = 1;
+ optional string versionId = 2;
+ optional string actionId = 3;
+ optional Runner.Item.Status status = 4;
+}
+message RunnersResponse {
+ repeated Runner.Item runners = 1;
+}
+
+
+/** deleteRunner **/
+message DeleteRunnerRequest {
+ string runnerId = 1;
+}
+
+
+/** getStoreItem **/
+message StoreItemRequest {
+ string jobId = 1;
+ string key = 2;
+}
+message StoreItemResponse {
+ string key = 1;
+ string value = 2;
+}
+
+
+/** setStoreItem **/
+message SetStoreItemRequest {
+ string jobId = 1;
+ string key = 2;
+ string value = 3;
+ optional uint32 ttl = 4;
+}
+message SetStoreItemResponse {
+ string key = 1;
+ string value = 2;
+}
+
+
+/** deleteStoreItem **/
+message DeleteStoreItemRequest {
+ string jobId = 1;
+ string key = 2;
+}
+message DeleteStoreItemResponse {
+ string key = 1;
+}
+
+
+/** publishMqttMessage **/
+message PublishMqttMessageRequest {
+ string jobId = 1;
+ string topic = 2;
+ string payload = 3;
+}
+message PublishMqttMessageResponse {}
+
+
+/** createRunner **/
+message CreateSoftRunnerRequest {
+ string jobId = 1;
+ string versionId = 2;
+ string actionId = 3;
+}
+message CreateSoftRunnerResponse {
+ Runner.Item runner = 1;
+}
+
+
+/** getTemplates **/
+message TemplatesRequest {}
+message TemplatesResponse {
+ string templateBadGateway = 1;
+
+ reserved 2 to 20;
+}
\ No newline at end of file
diff --git a/packages/grpc/proto/runner.proto b/packages/grpc/proto/runner.proto
new file mode 100644
index 0000000..11c1175
--- /dev/null
+++ b/packages/grpc/proto/runner.proto
@@ -0,0 +1,116 @@
+syntax = "proto3";
+
+import "base.proto";
+
+package RunnerAPI;
+
+service RunnerAPI {
+ rpc eventHttp(stream EventHttpRequest) returns (stream EventHttpResponse);
+ rpc eventMqtt(EventMqttRequest) returns (EventMqttResponse);
+ rpc eventSchedule(EventScheduleRequest) returns (EventScheduleResponse);
+
+ rpc status(Base.Empty) returns (StatusResponse);
+}
+
+message BasicContext {
+ string triggerName = 1;
+ reserved 2 to 10;
+}
+
+message HttpHeader {
+ string name = 1;
+ string value = 2;
+}
+
+message EventHttpRequest {
+ message Head {
+ string id = 1;
+ string method = 2;
+ string scheme = 3;
+ string hostname = 4;
+ string path = 5;
+ string query = 6;
+ reserved 7 to 10;
+ repeated HttpHeader headers = 11;
+ }
+
+ message Body {
+ string id = 1;
+ uint64 seq = 2;
+ bytes data = 3;
+ bool end = 4;
+ reserved 5 to 8;
+ }
+
+ oneof event {
+ BasicContext info = 1;
+ Head head = 2;
+ Body body = 3;
+ }
+}
+
+
+message EventHttpResponse {
+ message Head {
+ string id = 1;
+ int32 status = 2;
+ repeated HttpHeader headers = 3;
+ }
+
+ message Body {
+ string id = 1;
+ uint64 seq = 2;
+ bytes data = 3;
+ bool end = 4;
+ }
+
+ oneof event {
+ Head head = 1;
+ Body body = 2;
+ }
+}
+
+message EventMqttRequest {
+ BasicContext context = 1;
+ string topic = 2;
+ bytes payload = 3;
+}
+
+message EventMqttResponse {
+ enum Status {
+ ACCEPTED = 0;
+ REJECTED = 1;
+ }
+
+ Status status = 1;
+}
+
+message EventScheduleRequest {
+ BasicContext context = 1;
+ string scheduledAt = 2;
+}
+
+message EventScheduleResponse {
+ enum Status {
+ ACCEPTED = 0;
+ REJECTED = 1;
+ }
+
+ Status status = 1;
+}
+
+
+message StatusResponse {
+ enum Status {
+ STARTING = 0;
+ READY = 1;
+ CLOSING = 2;
+ CLOSED = 3;
+ FATAL = 4;
+ }
+
+ Status status = 1;
+ uint32 lastRequestAt = 2;
+ uint32 loadAverage5Seconds = 3;
+ uint32 loadAverage1Minute = 4;
+}
diff --git a/packages/grpc/src/base.ts b/packages/grpc/src/base.ts
new file mode 100644
index 0000000..f8576ed
--- /dev/null
+++ b/packages/grpc/src/base.ts
@@ -0,0 +1,211 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: base.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Base";
+
+export interface Empty {
+}
+
+export interface ExportChunk {
+ id: string;
+ sequence: number;
+ data: Uint8Array;
+}
+
+function createBaseEmpty(): Empty {
+ return {};
+}
+
+export const Empty: MessageFns = {
+ encode(_: Empty, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Empty {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseEmpty();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(_: any): Empty {
+ return {};
+ },
+
+ toJSON(_: Empty): unknown {
+ const obj: any = {};
+ return obj;
+ },
+
+ create(base?: DeepPartial): Empty {
+ return Empty.fromPartial(base ?? {});
+ },
+ fromPartial(_: DeepPartial): Empty {
+ const message = createBaseEmpty();
+ return message;
+ },
+};
+
+function createBaseExportChunk(): ExportChunk {
+ return { id: "", sequence: 0, data: new Uint8Array(0) };
+}
+
+export const ExportChunk: MessageFns = {
+ encode(message: ExportChunk, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.sequence !== 0) {
+ writer.uint32(16).int64(message.sequence);
+ }
+ if (message.data.length !== 0) {
+ writer.uint32(26).bytes(message.data);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): ExportChunk {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseExportChunk();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 16) {
+ break;
+ }
+
+ message.sequence = longToNumber(reader.int64());
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.data = reader.bytes();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): ExportChunk {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ sequence: isSet(object.sequence) ? globalThis.Number(object.sequence) : 0,
+ data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0),
+ };
+ },
+
+ toJSON(message: ExportChunk): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.sequence !== 0) {
+ obj.sequence = Math.round(message.sequence);
+ }
+ if (message.data.length !== 0) {
+ obj.data = base64FromBytes(message.data);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): ExportChunk {
+ return ExportChunk.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): ExportChunk {
+ const message = createBaseExportChunk();
+ message.id = object.id ?? "";
+ message.sequence = object.sequence ?? 0;
+ message.data = object.data ?? new Uint8Array(0);
+ return message;
+ },
+};
+
+function bytesFromBase64(b64: string): Uint8Array {
+ if ((globalThis as any).Buffer) {
+ return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+ } else {
+ const bin = globalThis.atob(b64);
+ const arr = new Uint8Array(bin.length);
+ for (let i = 0; i < bin.length; ++i) {
+ arr[i] = bin.charCodeAt(i);
+ }
+ return arr;
+ }
+}
+
+function base64FromBytes(arr: Uint8Array): string {
+ if ((globalThis as any).Buffer) {
+ return globalThis.Buffer.from(arr).toString("base64");
+ } else {
+ const bin: string[] = [];
+ arr.forEach((byte) => {
+ bin.push(globalThis.String.fromCharCode(byte));
+ });
+ return globalThis.btoa(bin.join(""));
+ }
+}
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function longToNumber(int64: { toString(): string }): number {
+ const num = globalThis.Number(int64.toString());
+ if (num > globalThis.Number.MAX_SAFE_INTEGER) {
+ throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER");
+ }
+ if (num < globalThis.Number.MIN_SAFE_INTEGER) {
+ throw new globalThis.Error("Value is smaller than Number.MIN_SAFE_INTEGER");
+ }
+ return num;
+}
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/action.ts b/packages/grpc/src/basics/action.ts
new file mode 100644
index 0000000..ce6acea
--- /dev/null
+++ b/packages/grpc/src/basics/action.ts
@@ -0,0 +1,739 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/action.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Action";
+
+export interface Item {
+ id: string;
+ jobId: string;
+ versionId: string;
+ runnerImage: string;
+ runnerAsynchronous: boolean;
+ runnerMinCount: number;
+ runnerMaxCount: number;
+ runnerTimeout: number;
+ runnerMaxIdleAge: number;
+ runnerMaxAge: number;
+ runnerMaxAgeHard: number;
+ dockerArguments: Item_DockerArguments | undefined;
+ runnerMode: Item_RunnerMode;
+}
+
+export const Item_RunnerMode = { STANDARD: "STANDARD", RUN_ONCE: "RUN_ONCE", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_RunnerMode = typeof Item_RunnerMode[keyof typeof Item_RunnerMode];
+
+export namespace Item_RunnerMode {
+ export type STANDARD = typeof Item_RunnerMode.STANDARD;
+ export type RUN_ONCE = typeof Item_RunnerMode.RUN_ONCE;
+ export type UNRECOGNIZED = typeof Item_RunnerMode.UNRECOGNIZED;
+}
+
+export function item_RunnerModeFromJSON(object: any): Item_RunnerMode {
+ switch (object) {
+ case 0:
+ case "STANDARD":
+ return Item_RunnerMode.STANDARD;
+ case 1:
+ case "RUN_ONCE":
+ return Item_RunnerMode.RUN_ONCE;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_RunnerMode.UNRECOGNIZED;
+ }
+}
+
+export function item_RunnerModeToJSON(object: Item_RunnerMode): string {
+ switch (object) {
+ case Item_RunnerMode.STANDARD:
+ return "STANDARD";
+ case Item_RunnerMode.RUN_ONCE:
+ return "RUN_ONCE";
+ case Item_RunnerMode.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_RunnerModeToNumber(object: Item_RunnerMode): number {
+ switch (object) {
+ case Item_RunnerMode.STANDARD:
+ return 0;
+ case Item_RunnerMode.RUN_ONCE:
+ return 1;
+ case Item_RunnerMode.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export interface Item_DockerArguments {
+ networks: string[];
+ volumes: Item_DockerArguments_Volume[];
+ labels: Item_DockerArguments_Label[];
+ memoryLimit?: string | undefined;
+ directPassthroughArguments: string[];
+}
+
+export interface Item_DockerArguments_Volume {
+ source: string;
+ target: string;
+ mode: Item_DockerArguments_Volume_VolumeMode;
+}
+
+export const Item_DockerArguments_Volume_VolumeMode = {
+ READ_ONLY: "READ_ONLY",
+ READ_WRITE: "READ_WRITE",
+ UNRECOGNIZED: "UNRECOGNIZED",
+} as const;
+
+export type Item_DockerArguments_Volume_VolumeMode =
+ typeof Item_DockerArguments_Volume_VolumeMode[keyof typeof Item_DockerArguments_Volume_VolumeMode];
+
+export namespace Item_DockerArguments_Volume_VolumeMode {
+ export type READ_ONLY = typeof Item_DockerArguments_Volume_VolumeMode.READ_ONLY;
+ export type READ_WRITE = typeof Item_DockerArguments_Volume_VolumeMode.READ_WRITE;
+ export type UNRECOGNIZED = typeof Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED;
+}
+
+export function item_DockerArguments_Volume_VolumeModeFromJSON(object: any): Item_DockerArguments_Volume_VolumeMode {
+ switch (object) {
+ case 0:
+ case "READ_ONLY":
+ return Item_DockerArguments_Volume_VolumeMode.READ_ONLY;
+ case 1:
+ case "READ_WRITE":
+ return Item_DockerArguments_Volume_VolumeMode.READ_WRITE;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED;
+ }
+}
+
+export function item_DockerArguments_Volume_VolumeModeToJSON(object: Item_DockerArguments_Volume_VolumeMode): string {
+ switch (object) {
+ case Item_DockerArguments_Volume_VolumeMode.READ_ONLY:
+ return "READ_ONLY";
+ case Item_DockerArguments_Volume_VolumeMode.READ_WRITE:
+ return "READ_WRITE";
+ case Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_DockerArguments_Volume_VolumeModeToNumber(object: Item_DockerArguments_Volume_VolumeMode): number {
+ switch (object) {
+ case Item_DockerArguments_Volume_VolumeMode.READ_ONLY:
+ return 0;
+ case Item_DockerArguments_Volume_VolumeMode.READ_WRITE:
+ return 1;
+ case Item_DockerArguments_Volume_VolumeMode.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export interface Item_DockerArguments_Label {
+ key: string;
+ value: string;
+}
+
+function createBaseItem(): Item {
+ return {
+ id: "",
+ jobId: "",
+ versionId: "",
+ runnerImage: "",
+ runnerAsynchronous: false,
+ runnerMinCount: 0,
+ runnerMaxCount: 0,
+ runnerTimeout: 0,
+ runnerMaxIdleAge: 0,
+ runnerMaxAge: 0,
+ runnerMaxAgeHard: 0,
+ dockerArguments: undefined,
+ runnerMode: Item_RunnerMode.STANDARD,
+ };
+}
+
+export const Item: MessageFns- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobId !== "") {
+ writer.uint32(18).string(message.jobId);
+ }
+ if (message.versionId !== "") {
+ writer.uint32(26).string(message.versionId);
+ }
+ if (message.runnerImage !== "") {
+ writer.uint32(34).string(message.runnerImage);
+ }
+ if (message.runnerAsynchronous !== false) {
+ writer.uint32(40).bool(message.runnerAsynchronous);
+ }
+ if (message.runnerMinCount !== 0) {
+ writer.uint32(48).uint32(message.runnerMinCount);
+ }
+ if (message.runnerMaxCount !== 0) {
+ writer.uint32(56).uint32(message.runnerMaxCount);
+ }
+ if (message.runnerTimeout !== 0) {
+ writer.uint32(64).uint32(message.runnerTimeout);
+ }
+ if (message.runnerMaxIdleAge !== 0) {
+ writer.uint32(72).uint32(message.runnerMaxIdleAge);
+ }
+ if (message.runnerMaxAge !== 0) {
+ writer.uint32(80).uint32(message.runnerMaxAge);
+ }
+ if (message.runnerMaxAgeHard !== 0) {
+ writer.uint32(88).uint32(message.runnerMaxAgeHard);
+ }
+ if (message.dockerArguments !== undefined) {
+ Item_DockerArguments.encode(message.dockerArguments, writer.uint32(98).fork()).join();
+ }
+ if (message.runnerMode !== Item_RunnerMode.STANDARD) {
+ writer.uint32(104).int32(item_RunnerModeToNumber(message.runnerMode));
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.versionId = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.runnerImage = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 40) {
+ break;
+ }
+
+ message.runnerAsynchronous = reader.bool();
+ continue;
+ }
+ case 6: {
+ if (tag !== 48) {
+ break;
+ }
+
+ message.runnerMinCount = reader.uint32();
+ continue;
+ }
+ case 7: {
+ if (tag !== 56) {
+ break;
+ }
+
+ message.runnerMaxCount = reader.uint32();
+ continue;
+ }
+ case 8: {
+ if (tag !== 64) {
+ break;
+ }
+
+ message.runnerTimeout = reader.uint32();
+ continue;
+ }
+ case 9: {
+ if (tag !== 72) {
+ break;
+ }
+
+ message.runnerMaxIdleAge = reader.uint32();
+ continue;
+ }
+ case 10: {
+ if (tag !== 80) {
+ break;
+ }
+
+ message.runnerMaxAge = reader.uint32();
+ continue;
+ }
+ case 11: {
+ if (tag !== 88) {
+ break;
+ }
+
+ message.runnerMaxAgeHard = reader.uint32();
+ continue;
+ }
+ case 12: {
+ if (tag !== 98) {
+ break;
+ }
+
+ message.dockerArguments = Item_DockerArguments.decode(reader, reader.uint32());
+ continue;
+ }
+ case 13: {
+ if (tag !== 104) {
+ break;
+ }
+
+ message.runnerMode = item_RunnerModeFromJSON(reader.int32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "",
+ runnerImage: isSet(object.runnerImage) ? globalThis.String(object.runnerImage) : "",
+ runnerAsynchronous: isSet(object.runnerAsynchronous) ? globalThis.Boolean(object.runnerAsynchronous) : false,
+ runnerMinCount: isSet(object.runnerMinCount) ? globalThis.Number(object.runnerMinCount) : 0,
+ runnerMaxCount: isSet(object.runnerMaxCount) ? globalThis.Number(object.runnerMaxCount) : 0,
+ runnerTimeout: isSet(object.runnerTimeout) ? globalThis.Number(object.runnerTimeout) : 0,
+ runnerMaxIdleAge: isSet(object.runnerMaxIdleAge) ? globalThis.Number(object.runnerMaxIdleAge) : 0,
+ runnerMaxAge: isSet(object.runnerMaxAge) ? globalThis.Number(object.runnerMaxAge) : 0,
+ runnerMaxAgeHard: isSet(object.runnerMaxAgeHard) ? globalThis.Number(object.runnerMaxAgeHard) : 0,
+ dockerArguments: isSet(object.dockerArguments)
+ ? Item_DockerArguments.fromJSON(object.dockerArguments)
+ : undefined,
+ runnerMode: isSet(object.runnerMode) ? item_RunnerModeFromJSON(object.runnerMode) : Item_RunnerMode.STANDARD,
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.versionId !== "") {
+ obj.versionId = message.versionId;
+ }
+ if (message.runnerImage !== "") {
+ obj.runnerImage = message.runnerImage;
+ }
+ if (message.runnerAsynchronous !== false) {
+ obj.runnerAsynchronous = message.runnerAsynchronous;
+ }
+ if (message.runnerMinCount !== 0) {
+ obj.runnerMinCount = Math.round(message.runnerMinCount);
+ }
+ if (message.runnerMaxCount !== 0) {
+ obj.runnerMaxCount = Math.round(message.runnerMaxCount);
+ }
+ if (message.runnerTimeout !== 0) {
+ obj.runnerTimeout = Math.round(message.runnerTimeout);
+ }
+ if (message.runnerMaxIdleAge !== 0) {
+ obj.runnerMaxIdleAge = Math.round(message.runnerMaxIdleAge);
+ }
+ if (message.runnerMaxAge !== 0) {
+ obj.runnerMaxAge = Math.round(message.runnerMaxAge);
+ }
+ if (message.runnerMaxAgeHard !== 0) {
+ obj.runnerMaxAgeHard = Math.round(message.runnerMaxAgeHard);
+ }
+ if (message.dockerArguments !== undefined) {
+ obj.dockerArguments = Item_DockerArguments.toJSON(message.dockerArguments);
+ }
+ if (message.runnerMode !== Item_RunnerMode.STANDARD) {
+ obj.runnerMode = item_RunnerModeToJSON(message.runnerMode);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobId = object.jobId ?? "";
+ message.versionId = object.versionId ?? "";
+ message.runnerImage = object.runnerImage ?? "";
+ message.runnerAsynchronous = object.runnerAsynchronous ?? false;
+ message.runnerMinCount = object.runnerMinCount ?? 0;
+ message.runnerMaxCount = object.runnerMaxCount ?? 0;
+ message.runnerTimeout = object.runnerTimeout ?? 0;
+ message.runnerMaxIdleAge = object.runnerMaxIdleAge ?? 0;
+ message.runnerMaxAge = object.runnerMaxAge ?? 0;
+ message.runnerMaxAgeHard = object.runnerMaxAgeHard ?? 0;
+ message.dockerArguments = (object.dockerArguments !== undefined && object.dockerArguments !== null)
+ ? Item_DockerArguments.fromPartial(object.dockerArguments)
+ : undefined;
+ message.runnerMode = object.runnerMode ?? Item_RunnerMode.STANDARD;
+ return message;
+ },
+};
+
+function createBaseItem_DockerArguments(): Item_DockerArguments {
+ return { networks: [], volumes: [], labels: [], memoryLimit: undefined, directPassthroughArguments: [] };
+}
+
+export const Item_DockerArguments: MessageFns = {
+ encode(message: Item_DockerArguments, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ for (const v of message.networks) {
+ writer.uint32(10).string(v!);
+ }
+ for (const v of message.volumes) {
+ Item_DockerArguments_Volume.encode(v!, writer.uint32(18).fork()).join();
+ }
+ for (const v of message.labels) {
+ Item_DockerArguments_Label.encode(v!, writer.uint32(26).fork()).join();
+ }
+ if (message.memoryLimit !== undefined) {
+ writer.uint32(34).string(message.memoryLimit);
+ }
+ for (const v of message.directPassthroughArguments) {
+ writer.uint32(42).string(v!);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_DockerArguments();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.networks.push(reader.string());
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.volumes.push(Item_DockerArguments_Volume.decode(reader, reader.uint32()));
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.labels.push(Item_DockerArguments_Label.decode(reader, reader.uint32()));
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.memoryLimit = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.directPassthroughArguments.push(reader.string());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_DockerArguments {
+ return {
+ networks: globalThis.Array.isArray(object?.networks) ? object.networks.map((e: any) => globalThis.String(e)) : [],
+ volumes: globalThis.Array.isArray(object?.volumes)
+ ? object.volumes.map((e: any) => Item_DockerArguments_Volume.fromJSON(e))
+ : [],
+ labels: globalThis.Array.isArray(object?.labels)
+ ? object.labels.map((e: any) => Item_DockerArguments_Label.fromJSON(e))
+ : [],
+ memoryLimit: isSet(object.memoryLimit) ? globalThis.String(object.memoryLimit) : undefined,
+ directPassthroughArguments: globalThis.Array.isArray(object?.directPassthroughArguments)
+ ? object.directPassthroughArguments.map((e: any) => globalThis.String(e))
+ : [],
+ };
+ },
+
+ toJSON(message: Item_DockerArguments): unknown {
+ const obj: any = {};
+ if (message.networks?.length) {
+ obj.networks = message.networks;
+ }
+ if (message.volumes?.length) {
+ obj.volumes = message.volumes.map((e) => Item_DockerArguments_Volume.toJSON(e));
+ }
+ if (message.labels?.length) {
+ obj.labels = message.labels.map((e) => Item_DockerArguments_Label.toJSON(e));
+ }
+ if (message.memoryLimit !== undefined) {
+ obj.memoryLimit = message.memoryLimit;
+ }
+ if (message.directPassthroughArguments?.length) {
+ obj.directPassthroughArguments = message.directPassthroughArguments;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_DockerArguments {
+ return Item_DockerArguments.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_DockerArguments {
+ const message = createBaseItem_DockerArguments();
+ message.networks = object.networks?.map((e) => e) || [];
+ message.volumes = object.volumes?.map((e) => Item_DockerArguments_Volume.fromPartial(e)) || [];
+ message.labels = object.labels?.map((e) => Item_DockerArguments_Label.fromPartial(e)) || [];
+ message.memoryLimit = object.memoryLimit ?? undefined;
+ message.directPassthroughArguments = object.directPassthroughArguments?.map((e) => e) || [];
+ return message;
+ },
+};
+
+function createBaseItem_DockerArguments_Volume(): Item_DockerArguments_Volume {
+ return { source: "", target: "", mode: Item_DockerArguments_Volume_VolumeMode.READ_ONLY };
+}
+
+export const Item_DockerArguments_Volume: MessageFns = {
+ encode(message: Item_DockerArguments_Volume, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.source !== "") {
+ writer.uint32(10).string(message.source);
+ }
+ if (message.target !== "") {
+ writer.uint32(18).string(message.target);
+ }
+ if (message.mode !== Item_DockerArguments_Volume_VolumeMode.READ_ONLY) {
+ writer.uint32(24).int32(item_DockerArguments_Volume_VolumeModeToNumber(message.mode));
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments_Volume {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_DockerArguments_Volume();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.source = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.target = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 24) {
+ break;
+ }
+
+ message.mode = item_DockerArguments_Volume_VolumeModeFromJSON(reader.int32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_DockerArguments_Volume {
+ return {
+ source: isSet(object.source) ? globalThis.String(object.source) : "",
+ target: isSet(object.target) ? globalThis.String(object.target) : "",
+ mode: isSet(object.mode)
+ ? item_DockerArguments_Volume_VolumeModeFromJSON(object.mode)
+ : Item_DockerArguments_Volume_VolumeMode.READ_ONLY,
+ };
+ },
+
+ toJSON(message: Item_DockerArguments_Volume): unknown {
+ const obj: any = {};
+ if (message.source !== "") {
+ obj.source = message.source;
+ }
+ if (message.target !== "") {
+ obj.target = message.target;
+ }
+ if (message.mode !== Item_DockerArguments_Volume_VolumeMode.READ_ONLY) {
+ obj.mode = item_DockerArguments_Volume_VolumeModeToJSON(message.mode);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_DockerArguments_Volume {
+ return Item_DockerArguments_Volume.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_DockerArguments_Volume {
+ const message = createBaseItem_DockerArguments_Volume();
+ message.source = object.source ?? "";
+ message.target = object.target ?? "";
+ message.mode = object.mode ?? Item_DockerArguments_Volume_VolumeMode.READ_ONLY;
+ return message;
+ },
+};
+
+function createBaseItem_DockerArguments_Label(): Item_DockerArguments_Label {
+ return { key: "", value: "" };
+}
+
+export const Item_DockerArguments_Label: MessageFns = {
+ encode(message: Item_DockerArguments_Label, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.key !== "") {
+ writer.uint32(10).string(message.key);
+ }
+ if (message.value !== "") {
+ writer.uint32(18).string(message.value);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_DockerArguments_Label {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_DockerArguments_Label();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.key = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.value = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_DockerArguments_Label {
+ return {
+ key: isSet(object.key) ? globalThis.String(object.key) : "",
+ value: isSet(object.value) ? globalThis.String(object.value) : "",
+ };
+ },
+
+ toJSON(message: Item_DockerArguments_Label): unknown {
+ const obj: any = {};
+ if (message.key !== "") {
+ obj.key = message.key;
+ }
+ if (message.value !== "") {
+ obj.value = message.value;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_DockerArguments_Label {
+ return Item_DockerArguments_Label.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_DockerArguments_Label {
+ const message = createBaseItem_DockerArguments_Label();
+ message.key = object.key ?? "";
+ message.value = object.value ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/api-token.ts b/packages/grpc/src/basics/api-token.ts
new file mode 100644
index 0000000..f36c04f
--- /dev/null
+++ b/packages/grpc/src/basics/api-token.ts
@@ -0,0 +1,275 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/api-token.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+import { Permission } from "./common.js";
+
+export const protobufPackage = "ApiToken";
+
+export interface Item {
+ id: string;
+ token: string;
+ userId: string;
+ description: string;
+ status: Item_Status;
+ permissions: Permission[];
+ expires: string;
+ created: string;
+}
+
+export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_Status = typeof Item_Status[keyof typeof Item_Status];
+
+export namespace Item_Status {
+ export type ENABLED = typeof Item_Status.ENABLED;
+ export type DISABLED = typeof Item_Status.DISABLED;
+ export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED;
+}
+
+export function item_StatusFromJSON(object: any): Item_Status {
+ switch (object) {
+ case 0:
+ case "ENABLED":
+ return Item_Status.ENABLED;
+ case 1:
+ case "DISABLED":
+ return Item_Status.DISABLED;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_Status.UNRECOGNIZED;
+ }
+}
+
+export function item_StatusToJSON(object: Item_Status): string {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return "ENABLED";
+ case Item_Status.DISABLED:
+ return "DISABLED";
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_StatusToNumber(object: Item_Status): number {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return 0;
+ case Item_Status.DISABLED:
+ return 1;
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+function createBaseItem(): Item {
+ return {
+ id: "",
+ token: "",
+ userId: "",
+ description: "",
+ status: Item_Status.ENABLED,
+ permissions: [],
+ expires: "",
+ created: "",
+ };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.token !== "") {
+ writer.uint32(18).string(message.token);
+ }
+ if (message.userId !== "") {
+ writer.uint32(26).string(message.userId);
+ }
+ if (message.description !== "") {
+ writer.uint32(34).string(message.description);
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ writer.uint32(40).int32(item_StatusToNumber(message.status));
+ }
+ for (const v of message.permissions) {
+ Permission.encode(v!, writer.uint32(50).fork()).join();
+ }
+ if (message.expires !== "") {
+ writer.uint32(58).string(message.expires);
+ }
+ if (message.created !== "") {
+ writer.uint32(66).string(message.created);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.token = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.userId = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.description = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 40) {
+ break;
+ }
+
+ message.status = item_StatusFromJSON(reader.int32());
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.permissions.push(Permission.decode(reader, reader.uint32()));
+ continue;
+ }
+ case 7: {
+ if (tag !== 58) {
+ break;
+ }
+
+ message.expires = reader.string();
+ continue;
+ }
+ case 8: {
+ if (tag !== 66) {
+ break;
+ }
+
+ message.created = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ token: isSet(object.token) ? globalThis.String(object.token) : "",
+ userId: isSet(object.userId) ? globalThis.String(object.userId) : "",
+ description: isSet(object.description) ? globalThis.String(object.description) : "",
+ status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED,
+ permissions: globalThis.Array.isArray(object?.permissions)
+ ? object.permissions.map((e: any) => Permission.fromJSON(e))
+ : [],
+ expires: isSet(object.expires) ? globalThis.String(object.expires) : "",
+ created: isSet(object.created) ? globalThis.String(object.created) : "",
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.token !== "") {
+ obj.token = message.token;
+ }
+ if (message.userId !== "") {
+ obj.userId = message.userId;
+ }
+ if (message.description !== "") {
+ obj.description = message.description;
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ obj.status = item_StatusToJSON(message.status);
+ }
+ if (message.permissions?.length) {
+ obj.permissions = message.permissions.map((e) => Permission.toJSON(e));
+ }
+ if (message.expires !== "") {
+ obj.expires = message.expires;
+ }
+ if (message.created !== "") {
+ obj.created = message.created;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.token = object.token ?? "";
+ message.userId = object.userId ?? "";
+ message.description = object.description ?? "";
+ message.status = object.status ?? Item_Status.ENABLED;
+ message.permissions = object.permissions?.map((e) => Permission.fromPartial(e)) || [];
+ message.expires = object.expires ?? "";
+ message.created = object.created ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/common.ts b/packages/grpc/src/basics/common.ts
new file mode 100644
index 0000000..bd77824
--- /dev/null
+++ b/packages/grpc/src/basics/common.ts
@@ -0,0 +1,254 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/common.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Common";
+
+export interface Permission {
+ effect: Permission_Effect;
+ resource: string;
+ actions: Permission_Action[];
+}
+
+export const Permission_Effect = { ALLOW: "ALLOW", DENY: "DENY", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Permission_Effect = typeof Permission_Effect[keyof typeof Permission_Effect];
+
+export namespace Permission_Effect {
+ export type ALLOW = typeof Permission_Effect.ALLOW;
+ export type DENY = typeof Permission_Effect.DENY;
+ export type UNRECOGNIZED = typeof Permission_Effect.UNRECOGNIZED;
+}
+
+export function permission_EffectFromJSON(object: any): Permission_Effect {
+ switch (object) {
+ case 0:
+ case "ALLOW":
+ return Permission_Effect.ALLOW;
+ case 1:
+ case "DENY":
+ return Permission_Effect.DENY;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Permission_Effect.UNRECOGNIZED;
+ }
+}
+
+export function permission_EffectToJSON(object: Permission_Effect): string {
+ switch (object) {
+ case Permission_Effect.ALLOW:
+ return "ALLOW";
+ case Permission_Effect.DENY:
+ return "DENY";
+ case Permission_Effect.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function permission_EffectToNumber(object: Permission_Effect): number {
+ switch (object) {
+ case Permission_Effect.ALLOW:
+ return 0;
+ case Permission_Effect.DENY:
+ return 1;
+ case Permission_Effect.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export const Permission_Action = {
+ READ: "READ",
+ WRITE: "WRITE",
+ DELETE: "DELETE",
+ UNRECOGNIZED: "UNRECOGNIZED",
+} as const;
+
+export type Permission_Action = typeof Permission_Action[keyof typeof Permission_Action];
+
+export namespace Permission_Action {
+ export type READ = typeof Permission_Action.READ;
+ export type WRITE = typeof Permission_Action.WRITE;
+ export type DELETE = typeof Permission_Action.DELETE;
+ export type UNRECOGNIZED = typeof Permission_Action.UNRECOGNIZED;
+}
+
+export function permission_ActionFromJSON(object: any): Permission_Action {
+ switch (object) {
+ case 0:
+ case "READ":
+ return Permission_Action.READ;
+ case 1:
+ case "WRITE":
+ return Permission_Action.WRITE;
+ case 2:
+ case "DELETE":
+ return Permission_Action.DELETE;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Permission_Action.UNRECOGNIZED;
+ }
+}
+
+export function permission_ActionToJSON(object: Permission_Action): string {
+ switch (object) {
+ case Permission_Action.READ:
+ return "READ";
+ case Permission_Action.WRITE:
+ return "WRITE";
+ case Permission_Action.DELETE:
+ return "DELETE";
+ case Permission_Action.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function permission_ActionToNumber(object: Permission_Action): number {
+ switch (object) {
+ case Permission_Action.READ:
+ return 0;
+ case Permission_Action.WRITE:
+ return 1;
+ case Permission_Action.DELETE:
+ return 2;
+ case Permission_Action.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+function createBasePermission(): Permission {
+ return { effect: Permission_Effect.ALLOW, resource: "", actions: [] };
+}
+
+export const Permission: MessageFns = {
+ encode(message: Permission, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.effect !== Permission_Effect.ALLOW) {
+ writer.uint32(8).int32(permission_EffectToNumber(message.effect));
+ }
+ if (message.resource !== "") {
+ writer.uint32(18).string(message.resource);
+ }
+ writer.uint32(26).fork();
+ for (const v of message.actions) {
+ writer.int32(permission_ActionToNumber(v));
+ }
+ writer.join();
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Permission {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBasePermission();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 8) {
+ break;
+ }
+
+ message.effect = permission_EffectFromJSON(reader.int32());
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.resource = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag === 24) {
+ message.actions.push(permission_ActionFromJSON(reader.int32()));
+
+ continue;
+ }
+
+ if (tag === 26) {
+ const end2 = reader.uint32() + reader.pos;
+ while (reader.pos < end2) {
+ message.actions.push(permission_ActionFromJSON(reader.int32()));
+ }
+
+ continue;
+ }
+
+ break;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Permission {
+ return {
+ effect: isSet(object.effect) ? permission_EffectFromJSON(object.effect) : Permission_Effect.ALLOW,
+ resource: isSet(object.resource) ? globalThis.String(object.resource) : "",
+ actions: globalThis.Array.isArray(object?.actions)
+ ? object.actions.map((e: any) => permission_ActionFromJSON(e))
+ : [],
+ };
+ },
+
+ toJSON(message: Permission): unknown {
+ const obj: any = {};
+ if (message.effect !== Permission_Effect.ALLOW) {
+ obj.effect = permission_EffectToJSON(message.effect);
+ }
+ if (message.resource !== "") {
+ obj.resource = message.resource;
+ }
+ if (message.actions?.length) {
+ obj.actions = message.actions.map((e) => permission_ActionToJSON(e));
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Permission {
+ return Permission.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Permission {
+ const message = createBasePermission();
+ message.effect = object.effect ?? Permission_Effect.ALLOW;
+ message.resource = object.resource ?? "";
+ message.actions = object.actions?.map((e) => e) || [];
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/environment.ts b/packages/grpc/src/basics/environment.ts
new file mode 100644
index 0000000..fa533fe
--- /dev/null
+++ b/packages/grpc/src/basics/environment.ts
@@ -0,0 +1,388 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/environment.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Environment";
+
+export interface Item {
+ id: string;
+ jobId: string;
+ context: { [key: string]: Item_ContextValue };
+ modified: string;
+}
+
+export interface Item_ContextValue {
+ type: Item_ContextValue_Type;
+ value?: string | undefined;
+}
+
+export const Item_ContextValue_Type = { TEXT: "TEXT", SECRET: "SECRET", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_ContextValue_Type = typeof Item_ContextValue_Type[keyof typeof Item_ContextValue_Type];
+
+export namespace Item_ContextValue_Type {
+ export type TEXT = typeof Item_ContextValue_Type.TEXT;
+ export type SECRET = typeof Item_ContextValue_Type.SECRET;
+ export type UNRECOGNIZED = typeof Item_ContextValue_Type.UNRECOGNIZED;
+}
+
+export function item_ContextValue_TypeFromJSON(object: any): Item_ContextValue_Type {
+ switch (object) {
+ case 0:
+ case "TEXT":
+ return Item_ContextValue_Type.TEXT;
+ case 1:
+ case "SECRET":
+ return Item_ContextValue_Type.SECRET;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_ContextValue_Type.UNRECOGNIZED;
+ }
+}
+
+export function item_ContextValue_TypeToJSON(object: Item_ContextValue_Type): string {
+ switch (object) {
+ case Item_ContextValue_Type.TEXT:
+ return "TEXT";
+ case Item_ContextValue_Type.SECRET:
+ return "SECRET";
+ case Item_ContextValue_Type.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_ContextValue_TypeToNumber(object: Item_ContextValue_Type): number {
+ switch (object) {
+ case Item_ContextValue_Type.TEXT:
+ return 0;
+ case Item_ContextValue_Type.SECRET:
+ return 1;
+ case Item_ContextValue_Type.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export interface Item_ContextEntry {
+ key: string;
+ value: Item_ContextValue | undefined;
+}
+
+function createBaseItem(): Item {
+ return { id: "", jobId: "", context: {}, modified: "" };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobId !== "") {
+ writer.uint32(18).string(message.jobId);
+ }
+ globalThis.Object.entries(message.context).forEach(([key, value]: [string, Item_ContextValue]) => {
+ Item_ContextEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).join();
+ });
+ if (message.modified !== "") {
+ writer.uint32(34).string(message.modified);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ const entry3 = Item_ContextEntry.decode(reader, reader.uint32());
+ if (entry3.value !== undefined) {
+ message.context[entry3.key] = entry3.value;
+ }
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.modified = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ context: isObject(object.context)
+ ? (globalThis.Object.entries(object.context) as [string, any][]).reduce(
+ (acc: { [key: string]: Item_ContextValue }, [key, value]: [string, any]) => {
+ acc[key] = Item_ContextValue.fromJSON(value);
+ return acc;
+ },
+ {},
+ )
+ : {},
+ modified: isSet(object.modified) ? globalThis.String(object.modified) : "",
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.context) {
+ const entries = globalThis.Object.entries(message.context) as [string, Item_ContextValue][];
+ if (entries.length > 0) {
+ obj.context = {};
+ entries.forEach(([k, v]) => {
+ obj.context[k] = Item_ContextValue.toJSON(v);
+ });
+ }
+ }
+ if (message.modified !== "") {
+ obj.modified = message.modified;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobId = object.jobId ?? "";
+ message.context = (globalThis.Object.entries(object.context ?? {}) as [string, Item_ContextValue][]).reduce(
+ (acc: { [key: string]: Item_ContextValue }, [key, value]: [string, Item_ContextValue]) => {
+ if (value !== undefined) {
+ acc[key] = Item_ContextValue.fromPartial(value);
+ }
+ return acc;
+ },
+ {},
+ );
+ message.modified = object.modified ?? "";
+ return message;
+ },
+};
+
+function createBaseItem_ContextValue(): Item_ContextValue {
+ return { type: Item_ContextValue_Type.TEXT, value: undefined };
+}
+
+export const Item_ContextValue: MessageFns = {
+ encode(message: Item_ContextValue, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.type !== Item_ContextValue_Type.TEXT) {
+ writer.uint32(8).int32(item_ContextValue_TypeToNumber(message.type));
+ }
+ if (message.value !== undefined) {
+ writer.uint32(18).string(message.value);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_ContextValue {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_ContextValue();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 8) {
+ break;
+ }
+
+ message.type = item_ContextValue_TypeFromJSON(reader.int32());
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.value = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_ContextValue {
+ return {
+ type: isSet(object.type) ? item_ContextValue_TypeFromJSON(object.type) : Item_ContextValue_Type.TEXT,
+ value: isSet(object.value) ? globalThis.String(object.value) : undefined,
+ };
+ },
+
+ toJSON(message: Item_ContextValue): unknown {
+ const obj: any = {};
+ if (message.type !== Item_ContextValue_Type.TEXT) {
+ obj.type = item_ContextValue_TypeToJSON(message.type);
+ }
+ if (message.value !== undefined) {
+ obj.value = message.value;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_ContextValue {
+ return Item_ContextValue.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_ContextValue {
+ const message = createBaseItem_ContextValue();
+ message.type = object.type ?? Item_ContextValue_Type.TEXT;
+ message.value = object.value ?? undefined;
+ return message;
+ },
+};
+
+function createBaseItem_ContextEntry(): Item_ContextEntry {
+ return { key: "", value: undefined };
+}
+
+export const Item_ContextEntry: MessageFns = {
+ encode(message: Item_ContextEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.key !== "") {
+ writer.uint32(10).string(message.key);
+ }
+ if (message.value !== undefined) {
+ Item_ContextValue.encode(message.value, writer.uint32(18).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_ContextEntry {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_ContextEntry();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.key = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.value = Item_ContextValue.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_ContextEntry {
+ return {
+ key: isSet(object.key) ? globalThis.String(object.key) : "",
+ value: isSet(object.value) ? Item_ContextValue.fromJSON(object.value) : undefined,
+ };
+ },
+
+ toJSON(message: Item_ContextEntry): unknown {
+ const obj: any = {};
+ if (message.key !== "") {
+ obj.key = message.key;
+ }
+ if (message.value !== undefined) {
+ obj.value = Item_ContextValue.toJSON(message.value);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_ContextEntry {
+ return Item_ContextEntry.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_ContextEntry {
+ const message = createBaseItem_ContextEntry();
+ message.key = object.key ?? "";
+ message.value = (object.value !== undefined && object.value !== null)
+ ? Item_ContextValue.fromPartial(object.value)
+ : undefined;
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isObject(value: any): boolean {
+ return typeof value === "object" && value !== null;
+}
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/job-version.ts b/packages/grpc/src/basics/job-version.ts
new file mode 100644
index 0000000..94adfea
--- /dev/null
+++ b/packages/grpc/src/basics/job-version.ts
@@ -0,0 +1,163 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/job-version.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "JobVersion";
+
+export interface Item {
+ id: string;
+ jobId: string;
+ version: string;
+ modified: string;
+ created: string;
+}
+
+function createBaseItem(): Item {
+ return { id: "", jobId: "", version: "", modified: "", created: "" };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobId !== "") {
+ writer.uint32(18).string(message.jobId);
+ }
+ if (message.version !== "") {
+ writer.uint32(26).string(message.version);
+ }
+ if (message.modified !== "") {
+ writer.uint32(34).string(message.modified);
+ }
+ if (message.created !== "") {
+ writer.uint32(42).string(message.created);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.version = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.modified = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.created = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ version: isSet(object.version) ? globalThis.String(object.version) : "",
+ modified: isSet(object.modified) ? globalThis.String(object.modified) : "",
+ created: isSet(object.created) ? globalThis.String(object.created) : "",
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.version !== "") {
+ obj.version = message.version;
+ }
+ if (message.modified !== "") {
+ obj.modified = message.modified;
+ }
+ if (message.created !== "") {
+ obj.created = message.created;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobId = object.jobId ?? "";
+ message.version = object.version ?? "";
+ message.modified = object.modified ?? "";
+ message.created = object.created ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/job.ts b/packages/grpc/src/basics/job.ts
new file mode 100644
index 0000000..fe88d04
--- /dev/null
+++ b/packages/grpc/src/basics/job.ts
@@ -0,0 +1,310 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/job.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Job";
+
+export interface Item {
+ id: string;
+ jobName: string;
+ status: Item_Status;
+ description?: string | undefined;
+ versionId?: string | undefined;
+ links: Item_Link[];
+}
+
+export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_Status = typeof Item_Status[keyof typeof Item_Status];
+
+export namespace Item_Status {
+ export type ENABLED = typeof Item_Status.ENABLED;
+ export type DISABLED = typeof Item_Status.DISABLED;
+ export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED;
+}
+
+export function item_StatusFromJSON(object: any): Item_Status {
+ switch (object) {
+ case 0:
+ case "ENABLED":
+ return Item_Status.ENABLED;
+ case 1:
+ case "DISABLED":
+ return Item_Status.DISABLED;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_Status.UNRECOGNIZED;
+ }
+}
+
+export function item_StatusToJSON(object: Item_Status): string {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return "ENABLED";
+ case Item_Status.DISABLED:
+ return "DISABLED";
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_StatusToNumber(object: Item_Status): number {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return 0;
+ case Item_Status.DISABLED:
+ return 1;
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export interface Item_Link {
+ name: string;
+ url: string;
+}
+
+function createBaseItem(): Item {
+ return { id: "", jobName: "", status: Item_Status.ENABLED, description: undefined, versionId: undefined, links: [] };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobName !== "") {
+ writer.uint32(18).string(message.jobName);
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ writer.uint32(24).int32(item_StatusToNumber(message.status));
+ }
+ if (message.description !== undefined) {
+ writer.uint32(34).string(message.description);
+ }
+ if (message.versionId !== undefined) {
+ writer.uint32(42).string(message.versionId);
+ }
+ for (const v of message.links) {
+ Item_Link.encode(v!, writer.uint32(50).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobName = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 24) {
+ break;
+ }
+
+ message.status = item_StatusFromJSON(reader.int32());
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.description = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.versionId = reader.string();
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.links.push(Item_Link.decode(reader, reader.uint32()));
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobName: isSet(object.jobName) ? globalThis.String(object.jobName) : "",
+ status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED,
+ description: isSet(object.description) ? globalThis.String(object.description) : undefined,
+ versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : undefined,
+ links: globalThis.Array.isArray(object?.links) ? object.links.map((e: any) => Item_Link.fromJSON(e)) : [],
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobName !== "") {
+ obj.jobName = message.jobName;
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ obj.status = item_StatusToJSON(message.status);
+ }
+ if (message.description !== undefined) {
+ obj.description = message.description;
+ }
+ if (message.versionId !== undefined) {
+ obj.versionId = message.versionId;
+ }
+ if (message.links?.length) {
+ obj.links = message.links.map((e) => Item_Link.toJSON(e));
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobName = object.jobName ?? "";
+ message.status = object.status ?? Item_Status.ENABLED;
+ message.description = object.description ?? undefined;
+ message.versionId = object.versionId ?? undefined;
+ message.links = object.links?.map((e) => Item_Link.fromPartial(e)) || [];
+ return message;
+ },
+};
+
+function createBaseItem_Link(): Item_Link {
+ return { name: "", url: "" };
+}
+
+export const Item_Link: MessageFns = {
+ encode(message: Item_Link, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.name !== "") {
+ writer.uint32(10).string(message.name);
+ }
+ if (message.url !== "") {
+ writer.uint32(18).string(message.url);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_Link {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_Link();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.name = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.url = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_Link {
+ return {
+ name: isSet(object.name) ? globalThis.String(object.name) : "",
+ url: isSet(object.url) ? globalThis.String(object.url) : "",
+ };
+ },
+
+ toJSON(message: Item_Link): unknown {
+ const obj: any = {};
+ if (message.name !== "") {
+ obj.name = message.name;
+ }
+ if (message.url !== "") {
+ obj.url = message.url;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_Link {
+ return Item_Link.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_Link {
+ const message = createBaseItem_Link();
+ message.name = object.name ?? "";
+ message.url = object.url ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/jwt-key.ts b/packages/grpc/src/basics/jwt-key.ts
new file mode 100644
index 0000000..813b617
--- /dev/null
+++ b/packages/grpc/src/basics/jwt-key.ts
@@ -0,0 +1,229 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/jwt-key.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "JwtKey";
+
+export interface Item {
+ id: string;
+ privateKey?: string | undefined;
+ publicKey: string;
+ status: Item_Status;
+ expires: string;
+ created: string;
+}
+
+export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_Status = typeof Item_Status[keyof typeof Item_Status];
+
+export namespace Item_Status {
+ export type ENABLED = typeof Item_Status.ENABLED;
+ export type DISABLED = typeof Item_Status.DISABLED;
+ export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED;
+}
+
+export function item_StatusFromJSON(object: any): Item_Status {
+ switch (object) {
+ case 0:
+ case "ENABLED":
+ return Item_Status.ENABLED;
+ case 1:
+ case "DISABLED":
+ return Item_Status.DISABLED;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_Status.UNRECOGNIZED;
+ }
+}
+
+export function item_StatusToJSON(object: Item_Status): string {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return "ENABLED";
+ case Item_Status.DISABLED:
+ return "DISABLED";
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_StatusToNumber(object: Item_Status): number {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return 0;
+ case Item_Status.DISABLED:
+ return 1;
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+function createBaseItem(): Item {
+ return { id: "", privateKey: undefined, publicKey: "", status: Item_Status.ENABLED, expires: "", created: "" };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.privateKey !== undefined) {
+ writer.uint32(18).string(message.privateKey);
+ }
+ if (message.publicKey !== "") {
+ writer.uint32(26).string(message.publicKey);
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ writer.uint32(32).int32(item_StatusToNumber(message.status));
+ }
+ if (message.expires !== "") {
+ writer.uint32(42).string(message.expires);
+ }
+ if (message.created !== "") {
+ writer.uint32(50).string(message.created);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.privateKey = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.publicKey = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 32) {
+ break;
+ }
+
+ message.status = item_StatusFromJSON(reader.int32());
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.expires = reader.string();
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.created = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ privateKey: isSet(object.privateKey) ? globalThis.String(object.privateKey) : undefined,
+ publicKey: isSet(object.publicKey) ? globalThis.String(object.publicKey) : "",
+ status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED,
+ expires: isSet(object.expires) ? globalThis.String(object.expires) : "",
+ created: isSet(object.created) ? globalThis.String(object.created) : "",
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.privateKey !== undefined) {
+ obj.privateKey = message.privateKey;
+ }
+ if (message.publicKey !== "") {
+ obj.publicKey = message.publicKey;
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ obj.status = item_StatusToJSON(message.status);
+ }
+ if (message.expires !== "") {
+ obj.expires = message.expires;
+ }
+ if (message.created !== "") {
+ obj.created = message.created;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.privateKey = object.privateKey ?? undefined;
+ message.publicKey = object.publicKey ?? "";
+ message.status = object.status ?? Item_Status.ENABLED;
+ message.expires = object.expires ?? "";
+ message.created = object.created ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/jwt-keys.ts b/packages/grpc/src/basics/jwt-keys.ts
new file mode 100644
index 0000000..8d465b0
--- /dev/null
+++ b/packages/grpc/src/basics/jwt-keys.ts
@@ -0,0 +1,229 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/jwt-keys.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "JwtKey";
+
+export interface Item {
+ id: string;
+ privateKey?: string | undefined;
+ publicKey: string;
+ status: Item_Status;
+ expires: string;
+ created: string;
+}
+
+export const Item_Status = { ENABLED: "ENABLED", DISABLED: "DISABLED", UNRECOGNIZED: "UNRECOGNIZED" } as const;
+
+export type Item_Status = typeof Item_Status[keyof typeof Item_Status];
+
+export namespace Item_Status {
+ export type ENABLED = typeof Item_Status.ENABLED;
+ export type DISABLED = typeof Item_Status.DISABLED;
+ export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED;
+}
+
+export function item_StatusFromJSON(object: any): Item_Status {
+ switch (object) {
+ case 0:
+ case "ENABLED":
+ return Item_Status.ENABLED;
+ case 1:
+ case "DISABLED":
+ return Item_Status.DISABLED;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_Status.UNRECOGNIZED;
+ }
+}
+
+export function item_StatusToJSON(object: Item_Status): string {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return "ENABLED";
+ case Item_Status.DISABLED:
+ return "DISABLED";
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_StatusToNumber(object: Item_Status): number {
+ switch (object) {
+ case Item_Status.ENABLED:
+ return 0;
+ case Item_Status.DISABLED:
+ return 1;
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+function createBaseItem(): Item {
+ return { id: "", privateKey: undefined, publicKey: "", status: Item_Status.ENABLED, expires: "", created: "" };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.privateKey !== undefined) {
+ writer.uint32(18).string(message.privateKey);
+ }
+ if (message.publicKey !== "") {
+ writer.uint32(26).string(message.publicKey);
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ writer.uint32(32).int32(item_StatusToNumber(message.status));
+ }
+ if (message.expires !== "") {
+ writer.uint32(42).string(message.expires);
+ }
+ if (message.created !== "") {
+ writer.uint32(50).string(message.created);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.privateKey = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.publicKey = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 32) {
+ break;
+ }
+
+ message.status = item_StatusFromJSON(reader.int32());
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.expires = reader.string();
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.created = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ privateKey: isSet(object.privateKey) ? globalThis.String(object.privateKey) : undefined,
+ publicKey: isSet(object.publicKey) ? globalThis.String(object.publicKey) : "",
+ status: isSet(object.status) ? item_StatusFromJSON(object.status) : Item_Status.ENABLED,
+ expires: isSet(object.expires) ? globalThis.String(object.expires) : "",
+ created: isSet(object.created) ? globalThis.String(object.created) : "",
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.privateKey !== undefined) {
+ obj.privateKey = message.privateKey;
+ }
+ if (message.publicKey !== "") {
+ obj.publicKey = message.publicKey;
+ }
+ if (message.status !== Item_Status.ENABLED) {
+ obj.status = item_StatusToJSON(message.status);
+ }
+ if (message.expires !== "") {
+ obj.expires = message.expires;
+ }
+ if (message.created !== "") {
+ obj.created = message.created;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.privateKey = object.privateKey ?? undefined;
+ message.publicKey = object.publicKey ?? "";
+ message.status = object.status ?? Item_Status.ENABLED;
+ message.expires = object.expires ?? "";
+ message.created = object.created ?? "";
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/runner.ts b/packages/grpc/src/basics/runner.ts
new file mode 100644
index 0000000..e772522
--- /dev/null
+++ b/packages/grpc/src/basics/runner.ts
@@ -0,0 +1,448 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/runner.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Runner";
+
+export interface Item {
+ id: string;
+ jobId: string;
+ actionId: string;
+ versionId: string;
+ properties: Item_Properties | undefined;
+ createdAt: string;
+ readyAt?: string | undefined;
+ closingAt?: string | undefined;
+ closedAt?: string | undefined;
+}
+
+export const Item_Status = {
+ STARTING: "STARTING",
+ READY: "READY",
+ CLOSING: "CLOSING",
+ CLOSED: "CLOSED",
+ UNRECOGNIZED: "UNRECOGNIZED",
+} as const;
+
+export type Item_Status = typeof Item_Status[keyof typeof Item_Status];
+
+export namespace Item_Status {
+ export type STARTING = typeof Item_Status.STARTING;
+ export type READY = typeof Item_Status.READY;
+ export type CLOSING = typeof Item_Status.CLOSING;
+ export type CLOSED = typeof Item_Status.CLOSED;
+ export type UNRECOGNIZED = typeof Item_Status.UNRECOGNIZED;
+}
+
+export function item_StatusFromJSON(object: any): Item_Status {
+ switch (object) {
+ case 0:
+ case "STARTING":
+ return Item_Status.STARTING;
+ case 1:
+ case "READY":
+ return Item_Status.READY;
+ case 2:
+ case "CLOSING":
+ return Item_Status.CLOSING;
+ case 3:
+ case "CLOSED":
+ return Item_Status.CLOSED;
+ case -1:
+ case "UNRECOGNIZED":
+ default:
+ return Item_Status.UNRECOGNIZED;
+ }
+}
+
+export function item_StatusToJSON(object: Item_Status): string {
+ switch (object) {
+ case Item_Status.STARTING:
+ return "STARTING";
+ case Item_Status.READY:
+ return "READY";
+ case Item_Status.CLOSING:
+ return "CLOSING";
+ case Item_Status.CLOSED:
+ return "CLOSED";
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return "UNRECOGNIZED";
+ }
+}
+
+export function item_StatusToNumber(object: Item_Status): number {
+ switch (object) {
+ case Item_Status.STARTING:
+ return 0;
+ case Item_Status.READY:
+ return 1;
+ case Item_Status.CLOSING:
+ return 2;
+ case Item_Status.CLOSED:
+ return 3;
+ case Item_Status.UNRECOGNIZED:
+ default:
+ return -1;
+ }
+}
+
+export interface Item_Properties {
+ runnerPid: string;
+ runnerContainerName: string;
+ runnerContainerNetworks: string[];
+ runnerApiPort: number;
+ runnerDebug: boolean;
+}
+
+function createBaseItem(): Item {
+ return {
+ id: "",
+ jobId: "",
+ actionId: "",
+ versionId: "",
+ properties: undefined,
+ createdAt: "",
+ readyAt: undefined,
+ closingAt: undefined,
+ closedAt: undefined,
+ };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobId !== "") {
+ writer.uint32(18).string(message.jobId);
+ }
+ if (message.actionId !== "") {
+ writer.uint32(26).string(message.actionId);
+ }
+ if (message.versionId !== "") {
+ writer.uint32(34).string(message.versionId);
+ }
+ if (message.properties !== undefined) {
+ Item_Properties.encode(message.properties, writer.uint32(42).fork()).join();
+ }
+ if (message.createdAt !== "") {
+ writer.uint32(90).string(message.createdAt);
+ }
+ if (message.readyAt !== undefined) {
+ writer.uint32(98).string(message.readyAt);
+ }
+ if (message.closingAt !== undefined) {
+ writer.uint32(106).string(message.closingAt);
+ }
+ if (message.closedAt !== undefined) {
+ writer.uint32(114).string(message.closedAt);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.actionId = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.versionId = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.properties = Item_Properties.decode(reader, reader.uint32());
+ continue;
+ }
+ case 11: {
+ if (tag !== 90) {
+ break;
+ }
+
+ message.createdAt = reader.string();
+ continue;
+ }
+ case 12: {
+ if (tag !== 98) {
+ break;
+ }
+
+ message.readyAt = reader.string();
+ continue;
+ }
+ case 13: {
+ if (tag !== 106) {
+ break;
+ }
+
+ message.closingAt = reader.string();
+ continue;
+ }
+ case 14: {
+ if (tag !== 114) {
+ break;
+ }
+
+ message.closedAt = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : "",
+ versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "",
+ properties: isSet(object.properties) ? Item_Properties.fromJSON(object.properties) : undefined,
+ createdAt: isSet(object.createdAt) ? globalThis.String(object.createdAt) : "",
+ readyAt: isSet(object.readyAt) ? globalThis.String(object.readyAt) : undefined,
+ closingAt: isSet(object.closingAt) ? globalThis.String(object.closingAt) : undefined,
+ closedAt: isSet(object.closedAt) ? globalThis.String(object.closedAt) : undefined,
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.actionId !== "") {
+ obj.actionId = message.actionId;
+ }
+ if (message.versionId !== "") {
+ obj.versionId = message.versionId;
+ }
+ if (message.properties !== undefined) {
+ obj.properties = Item_Properties.toJSON(message.properties);
+ }
+ if (message.createdAt !== "") {
+ obj.createdAt = message.createdAt;
+ }
+ if (message.readyAt !== undefined) {
+ obj.readyAt = message.readyAt;
+ }
+ if (message.closingAt !== undefined) {
+ obj.closingAt = message.closingAt;
+ }
+ if (message.closedAt !== undefined) {
+ obj.closedAt = message.closedAt;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobId = object.jobId ?? "";
+ message.actionId = object.actionId ?? "";
+ message.versionId = object.versionId ?? "";
+ message.properties = (object.properties !== undefined && object.properties !== null)
+ ? Item_Properties.fromPartial(object.properties)
+ : undefined;
+ message.createdAt = object.createdAt ?? "";
+ message.readyAt = object.readyAt ?? undefined;
+ message.closingAt = object.closingAt ?? undefined;
+ message.closedAt = object.closedAt ?? undefined;
+ return message;
+ },
+};
+
+function createBaseItem_Properties(): Item_Properties {
+ return { runnerPid: "", runnerContainerName: "", runnerContainerNetworks: [], runnerApiPort: 0, runnerDebug: false };
+}
+
+export const Item_Properties: MessageFns = {
+ encode(message: Item_Properties, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.runnerPid !== "") {
+ writer.uint32(10).string(message.runnerPid);
+ }
+ if (message.runnerContainerName !== "") {
+ writer.uint32(26).string(message.runnerContainerName);
+ }
+ for (const v of message.runnerContainerNetworks) {
+ writer.uint32(34).string(v!);
+ }
+ if (message.runnerApiPort !== 0) {
+ writer.uint32(40).uint32(message.runnerApiPort);
+ }
+ if (message.runnerDebug !== false) {
+ writer.uint32(48).bool(message.runnerDebug);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_Properties {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_Properties();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.runnerPid = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.runnerContainerName = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.runnerContainerNetworks.push(reader.string());
+ continue;
+ }
+ case 5: {
+ if (tag !== 40) {
+ break;
+ }
+
+ message.runnerApiPort = reader.uint32();
+ continue;
+ }
+ case 6: {
+ if (tag !== 48) {
+ break;
+ }
+
+ message.runnerDebug = reader.bool();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_Properties {
+ return {
+ runnerPid: isSet(object.runnerPid) ? globalThis.String(object.runnerPid) : "",
+ runnerContainerName: isSet(object.runnerContainerName) ? globalThis.String(object.runnerContainerName) : "",
+ runnerContainerNetworks: globalThis.Array.isArray(object?.runnerContainerNetworks)
+ ? object.runnerContainerNetworks.map((e: any) => globalThis.String(e))
+ : [],
+ runnerApiPort: isSet(object.runnerApiPort) ? globalThis.Number(object.runnerApiPort) : 0,
+ runnerDebug: isSet(object.runnerDebug) ? globalThis.Boolean(object.runnerDebug) : false,
+ };
+ },
+
+ toJSON(message: Item_Properties): unknown {
+ const obj: any = {};
+ if (message.runnerPid !== "") {
+ obj.runnerPid = message.runnerPid;
+ }
+ if (message.runnerContainerName !== "") {
+ obj.runnerContainerName = message.runnerContainerName;
+ }
+ if (message.runnerContainerNetworks?.length) {
+ obj.runnerContainerNetworks = message.runnerContainerNetworks;
+ }
+ if (message.runnerApiPort !== 0) {
+ obj.runnerApiPort = Math.round(message.runnerApiPort);
+ }
+ if (message.runnerDebug !== false) {
+ obj.runnerDebug = message.runnerDebug;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_Properties {
+ return Item_Properties.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_Properties {
+ const message = createBaseItem_Properties();
+ message.runnerPid = object.runnerPid ?? "";
+ message.runnerContainerName = object.runnerContainerName ?? "";
+ message.runnerContainerNetworks = object.runnerContainerNetworks?.map((e) => e) || [];
+ message.runnerApiPort = object.runnerApiPort ?? 0;
+ message.runnerDebug = object.runnerDebug ?? false;
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/basics/trigger.ts b/packages/grpc/src/basics/trigger.ts
new file mode 100644
index 0000000..d6ebaf4
--- /dev/null
+++ b/packages/grpc/src/basics/trigger.ts
@@ -0,0 +1,763 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: basics/trigger.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+
+export const protobufPackage = "Trigger";
+
+export interface Item {
+ id: string;
+ jobId: string;
+ versionId: string;
+ schedule?: Item_TriggerSchedule | undefined;
+ http?: Item_TriggerHttp | undefined;
+ mqtt?: Item_TriggerMqtt | undefined;
+}
+
+export interface Item_TriggerSchedule {
+ name?: string | undefined;
+ cron: string;
+ timezone?: string | undefined;
+}
+
+export interface Item_TriggerHttp {
+ name?: string | undefined;
+ hostname?: string | undefined;
+ method?: string | undefined;
+ path?: string | undefined;
+}
+
+export interface Item_TriggerMqtt {
+ name?: string | undefined;
+ topics: string[];
+ connection: Item_TriggerMqtt_Connection | undefined;
+}
+
+export interface Item_TriggerMqtt_Connection {
+ protocol?: string | undefined;
+ protocolVariable?: string | undefined;
+ port?: string | undefined;
+ portVariable?: string | undefined;
+ host?: string | undefined;
+ hostVariable?: string | undefined;
+ username?: string | undefined;
+ usernameVariable?: string | undefined;
+ password?: string | undefined;
+ passwordVariable?: string | undefined;
+ clientId?: string | undefined;
+ clientIdVariable?: string | undefined;
+}
+
+function createBaseItem(): Item {
+ return { id: "", jobId: "", versionId: "", schedule: undefined, http: undefined, mqtt: undefined };
+}
+
+export const Item: MessageFns
- = {
+ encode(message: Item, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.id !== "") {
+ writer.uint32(10).string(message.id);
+ }
+ if (message.jobId !== "") {
+ writer.uint32(18).string(message.jobId);
+ }
+ if (message.versionId !== "") {
+ writer.uint32(26).string(message.versionId);
+ }
+ if (message.schedule !== undefined) {
+ Item_TriggerSchedule.encode(message.schedule, writer.uint32(34).fork()).join();
+ }
+ if (message.http !== undefined) {
+ Item_TriggerHttp.encode(message.http, writer.uint32(42).fork()).join();
+ }
+ if (message.mqtt !== undefined) {
+ Item_TriggerMqtt.encode(message.mqtt, writer.uint32(50).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.id = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.versionId = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.schedule = Item_TriggerSchedule.decode(reader, reader.uint32());
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.http = Item_TriggerHttp.decode(reader, reader.uint32());
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.mqtt = Item_TriggerMqtt.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item {
+ return {
+ id: isSet(object.id) ? globalThis.String(object.id) : "",
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ versionId: isSet(object.versionId) ? globalThis.String(object.versionId) : "",
+ schedule: isSet(object.schedule) ? Item_TriggerSchedule.fromJSON(object.schedule) : undefined,
+ http: isSet(object.http) ? Item_TriggerHttp.fromJSON(object.http) : undefined,
+ mqtt: isSet(object.mqtt) ? Item_TriggerMqtt.fromJSON(object.mqtt) : undefined,
+ };
+ },
+
+ toJSON(message: Item): unknown {
+ const obj: any = {};
+ if (message.id !== "") {
+ obj.id = message.id;
+ }
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.versionId !== "") {
+ obj.versionId = message.versionId;
+ }
+ if (message.schedule !== undefined) {
+ obj.schedule = Item_TriggerSchedule.toJSON(message.schedule);
+ }
+ if (message.http !== undefined) {
+ obj.http = Item_TriggerHttp.toJSON(message.http);
+ }
+ if (message.mqtt !== undefined) {
+ obj.mqtt = Item_TriggerMqtt.toJSON(message.mqtt);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial
- ): Item {
+ return Item.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial
- ): Item {
+ const message = createBaseItem();
+ message.id = object.id ?? "";
+ message.jobId = object.jobId ?? "";
+ message.versionId = object.versionId ?? "";
+ message.schedule = (object.schedule !== undefined && object.schedule !== null)
+ ? Item_TriggerSchedule.fromPartial(object.schedule)
+ : undefined;
+ message.http = (object.http !== undefined && object.http !== null)
+ ? Item_TriggerHttp.fromPartial(object.http)
+ : undefined;
+ message.mqtt = (object.mqtt !== undefined && object.mqtt !== null)
+ ? Item_TriggerMqtt.fromPartial(object.mqtt)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseItem_TriggerSchedule(): Item_TriggerSchedule {
+ return { name: undefined, cron: "", timezone: undefined };
+}
+
+export const Item_TriggerSchedule: MessageFns = {
+ encode(message: Item_TriggerSchedule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.name !== undefined) {
+ writer.uint32(10).string(message.name);
+ }
+ if (message.cron !== "") {
+ writer.uint32(18).string(message.cron);
+ }
+ if (message.timezone !== undefined) {
+ writer.uint32(26).string(message.timezone);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerSchedule {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_TriggerSchedule();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.name = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.cron = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.timezone = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_TriggerSchedule {
+ return {
+ name: isSet(object.name) ? globalThis.String(object.name) : undefined,
+ cron: isSet(object.cron) ? globalThis.String(object.cron) : "",
+ timezone: isSet(object.timezone) ? globalThis.String(object.timezone) : undefined,
+ };
+ },
+
+ toJSON(message: Item_TriggerSchedule): unknown {
+ const obj: any = {};
+ if (message.name !== undefined) {
+ obj.name = message.name;
+ }
+ if (message.cron !== "") {
+ obj.cron = message.cron;
+ }
+ if (message.timezone !== undefined) {
+ obj.timezone = message.timezone;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_TriggerSchedule {
+ return Item_TriggerSchedule.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_TriggerSchedule {
+ const message = createBaseItem_TriggerSchedule();
+ message.name = object.name ?? undefined;
+ message.cron = object.cron ?? "";
+ message.timezone = object.timezone ?? undefined;
+ return message;
+ },
+};
+
+function createBaseItem_TriggerHttp(): Item_TriggerHttp {
+ return { name: undefined, hostname: undefined, method: undefined, path: undefined };
+}
+
+export const Item_TriggerHttp: MessageFns = {
+ encode(message: Item_TriggerHttp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.name !== undefined) {
+ writer.uint32(10).string(message.name);
+ }
+ if (message.hostname !== undefined) {
+ writer.uint32(18).string(message.hostname);
+ }
+ if (message.method !== undefined) {
+ writer.uint32(26).string(message.method);
+ }
+ if (message.path !== undefined) {
+ writer.uint32(34).string(message.path);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerHttp {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_TriggerHttp();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.name = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.hostname = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.method = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.path = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_TriggerHttp {
+ return {
+ name: isSet(object.name) ? globalThis.String(object.name) : undefined,
+ hostname: isSet(object.hostname) ? globalThis.String(object.hostname) : undefined,
+ method: isSet(object.method) ? globalThis.String(object.method) : undefined,
+ path: isSet(object.path) ? globalThis.String(object.path) : undefined,
+ };
+ },
+
+ toJSON(message: Item_TriggerHttp): unknown {
+ const obj: any = {};
+ if (message.name !== undefined) {
+ obj.name = message.name;
+ }
+ if (message.hostname !== undefined) {
+ obj.hostname = message.hostname;
+ }
+ if (message.method !== undefined) {
+ obj.method = message.method;
+ }
+ if (message.path !== undefined) {
+ obj.path = message.path;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_TriggerHttp {
+ return Item_TriggerHttp.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_TriggerHttp {
+ const message = createBaseItem_TriggerHttp();
+ message.name = object.name ?? undefined;
+ message.hostname = object.hostname ?? undefined;
+ message.method = object.method ?? undefined;
+ message.path = object.path ?? undefined;
+ return message;
+ },
+};
+
+function createBaseItem_TriggerMqtt(): Item_TriggerMqtt {
+ return { name: undefined, topics: [], connection: undefined };
+}
+
+export const Item_TriggerMqtt: MessageFns = {
+ encode(message: Item_TriggerMqtt, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.name !== undefined) {
+ writer.uint32(10).string(message.name);
+ }
+ for (const v of message.topics) {
+ writer.uint32(18).string(v!);
+ }
+ if (message.connection !== undefined) {
+ Item_TriggerMqtt_Connection.encode(message.connection, writer.uint32(26).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerMqtt {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_TriggerMqtt();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.name = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.topics.push(reader.string());
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.connection = Item_TriggerMqtt_Connection.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_TriggerMqtt {
+ return {
+ name: isSet(object.name) ? globalThis.String(object.name) : undefined,
+ topics: globalThis.Array.isArray(object?.topics) ? object.topics.map((e: any) => globalThis.String(e)) : [],
+ connection: isSet(object.connection) ? Item_TriggerMqtt_Connection.fromJSON(object.connection) : undefined,
+ };
+ },
+
+ toJSON(message: Item_TriggerMqtt): unknown {
+ const obj: any = {};
+ if (message.name !== undefined) {
+ obj.name = message.name;
+ }
+ if (message.topics?.length) {
+ obj.topics = message.topics;
+ }
+ if (message.connection !== undefined) {
+ obj.connection = Item_TriggerMqtt_Connection.toJSON(message.connection);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_TriggerMqtt {
+ return Item_TriggerMqtt.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_TriggerMqtt {
+ const message = createBaseItem_TriggerMqtt();
+ message.name = object.name ?? undefined;
+ message.topics = object.topics?.map((e) => e) || [];
+ message.connection = (object.connection !== undefined && object.connection !== null)
+ ? Item_TriggerMqtt_Connection.fromPartial(object.connection)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseItem_TriggerMqtt_Connection(): Item_TriggerMqtt_Connection {
+ return {
+ protocol: undefined,
+ protocolVariable: undefined,
+ port: undefined,
+ portVariable: undefined,
+ host: undefined,
+ hostVariable: undefined,
+ username: undefined,
+ usernameVariable: undefined,
+ password: undefined,
+ passwordVariable: undefined,
+ clientId: undefined,
+ clientIdVariable: undefined,
+ };
+}
+
+export const Item_TriggerMqtt_Connection: MessageFns = {
+ encode(message: Item_TriggerMqtt_Connection, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.protocol !== undefined) {
+ writer.uint32(10).string(message.protocol);
+ }
+ if (message.protocolVariable !== undefined) {
+ writer.uint32(18).string(message.protocolVariable);
+ }
+ if (message.port !== undefined) {
+ writer.uint32(26).string(message.port);
+ }
+ if (message.portVariable !== undefined) {
+ writer.uint32(34).string(message.portVariable);
+ }
+ if (message.host !== undefined) {
+ writer.uint32(42).string(message.host);
+ }
+ if (message.hostVariable !== undefined) {
+ writer.uint32(50).string(message.hostVariable);
+ }
+ if (message.username !== undefined) {
+ writer.uint32(58).string(message.username);
+ }
+ if (message.usernameVariable !== undefined) {
+ writer.uint32(66).string(message.usernameVariable);
+ }
+ if (message.password !== undefined) {
+ writer.uint32(74).string(message.password);
+ }
+ if (message.passwordVariable !== undefined) {
+ writer.uint32(82).string(message.passwordVariable);
+ }
+ if (message.clientId !== undefined) {
+ writer.uint32(90).string(message.clientId);
+ }
+ if (message.clientIdVariable !== undefined) {
+ writer.uint32(98).string(message.clientIdVariable);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): Item_TriggerMqtt_Connection {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseItem_TriggerMqtt_Connection();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.protocol = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.protocolVariable = reader.string();
+ continue;
+ }
+ case 3: {
+ if (tag !== 26) {
+ break;
+ }
+
+ message.port = reader.string();
+ continue;
+ }
+ case 4: {
+ if (tag !== 34) {
+ break;
+ }
+
+ message.portVariable = reader.string();
+ continue;
+ }
+ case 5: {
+ if (tag !== 42) {
+ break;
+ }
+
+ message.host = reader.string();
+ continue;
+ }
+ case 6: {
+ if (tag !== 50) {
+ break;
+ }
+
+ message.hostVariable = reader.string();
+ continue;
+ }
+ case 7: {
+ if (tag !== 58) {
+ break;
+ }
+
+ message.username = reader.string();
+ continue;
+ }
+ case 8: {
+ if (tag !== 66) {
+ break;
+ }
+
+ message.usernameVariable = reader.string();
+ continue;
+ }
+ case 9: {
+ if (tag !== 74) {
+ break;
+ }
+
+ message.password = reader.string();
+ continue;
+ }
+ case 10: {
+ if (tag !== 82) {
+ break;
+ }
+
+ message.passwordVariable = reader.string();
+ continue;
+ }
+ case 11: {
+ if (tag !== 90) {
+ break;
+ }
+
+ message.clientId = reader.string();
+ continue;
+ }
+ case 12: {
+ if (tag !== 98) {
+ break;
+ }
+
+ message.clientIdVariable = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): Item_TriggerMqtt_Connection {
+ return {
+ protocol: isSet(object.protocol) ? globalThis.String(object.protocol) : undefined,
+ protocolVariable: isSet(object.protocolVariable) ? globalThis.String(object.protocolVariable) : undefined,
+ port: isSet(object.port) ? globalThis.String(object.port) : undefined,
+ portVariable: isSet(object.portVariable) ? globalThis.String(object.portVariable) : undefined,
+ host: isSet(object.host) ? globalThis.String(object.host) : undefined,
+ hostVariable: isSet(object.hostVariable) ? globalThis.String(object.hostVariable) : undefined,
+ username: isSet(object.username) ? globalThis.String(object.username) : undefined,
+ usernameVariable: isSet(object.usernameVariable) ? globalThis.String(object.usernameVariable) : undefined,
+ password: isSet(object.password) ? globalThis.String(object.password) : undefined,
+ passwordVariable: isSet(object.passwordVariable) ? globalThis.String(object.passwordVariable) : undefined,
+ clientId: isSet(object.clientId) ? globalThis.String(object.clientId) : undefined,
+ clientIdVariable: isSet(object.clientIdVariable) ? globalThis.String(object.clientIdVariable) : undefined,
+ };
+ },
+
+ toJSON(message: Item_TriggerMqtt_Connection): unknown {
+ const obj: any = {};
+ if (message.protocol !== undefined) {
+ obj.protocol = message.protocol;
+ }
+ if (message.protocolVariable !== undefined) {
+ obj.protocolVariable = message.protocolVariable;
+ }
+ if (message.port !== undefined) {
+ obj.port = message.port;
+ }
+ if (message.portVariable !== undefined) {
+ obj.portVariable = message.portVariable;
+ }
+ if (message.host !== undefined) {
+ obj.host = message.host;
+ }
+ if (message.hostVariable !== undefined) {
+ obj.hostVariable = message.hostVariable;
+ }
+ if (message.username !== undefined) {
+ obj.username = message.username;
+ }
+ if (message.usernameVariable !== undefined) {
+ obj.usernameVariable = message.usernameVariable;
+ }
+ if (message.password !== undefined) {
+ obj.password = message.password;
+ }
+ if (message.passwordVariable !== undefined) {
+ obj.passwordVariable = message.passwordVariable;
+ }
+ if (message.clientId !== undefined) {
+ obj.clientId = message.clientId;
+ }
+ if (message.clientIdVariable !== undefined) {
+ obj.clientIdVariable = message.clientIdVariable;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): Item_TriggerMqtt_Connection {
+ return Item_TriggerMqtt_Connection.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): Item_TriggerMqtt_Connection {
+ const message = createBaseItem_TriggerMqtt_Connection();
+ message.protocol = object.protocol ?? undefined;
+ message.protocolVariable = object.protocolVariable ?? undefined;
+ message.port = object.port ?? undefined;
+ message.portVariable = object.portVariable ?? undefined;
+ message.host = object.host ?? undefined;
+ message.hostVariable = object.hostVariable ?? undefined;
+ message.username = object.username ?? undefined;
+ message.usernameVariable = object.usernameVariable ?? undefined;
+ message.password = object.password ?? undefined;
+ message.passwordVariable = object.passwordVariable ?? undefined;
+ message.clientId = object.clientId ?? undefined;
+ message.clientIdVariable = object.clientIdVariable ?? undefined;
+ return message;
+ },
+};
+
+type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
+
+export type DeepPartial = T extends Builtin ? T
+ : T extends globalThis.Array ? globalThis.Array>
+ : T extends ReadonlyArray ? ReadonlyArray>
+ : T extends {} ? { [K in keyof T]?: DeepPartial }
+ : Partial;
+
+function isSet(value: any): boolean {
+ return value !== null && value !== undefined;
+}
+
+export interface MessageFns {
+ encode(message: T, writer?: BinaryWriter): BinaryWriter;
+ decode(input: BinaryReader | Uint8Array, length?: number): T;
+ fromJSON(object: any): T;
+ toJSON(message: T): unknown;
+ create(base?: DeepPartial): T;
+ fromPartial(object: DeepPartial): T;
+}
diff --git a/packages/grpc/src/gateway.ts b/packages/grpc/src/gateway.ts
new file mode 100644
index 0000000..460b52e
--- /dev/null
+++ b/packages/grpc/src/gateway.ts
@@ -0,0 +1,19 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: gateway.proto
+
+/* eslint-disable */
+
+export const protobufPackage = "GatewayAPI";
+
+/** */
+export type GatewayAPIDefinition = typeof GatewayAPIDefinition;
+export const GatewayAPIDefinition = { name: "GatewayAPI", fullName: "GatewayAPI.GatewayAPI", methods: {} } as const;
+
+export interface GatewayAPIServiceImplementation {
+}
+
+export interface GatewayAPIClient {
+}
diff --git a/packages/grpc/src/general.ts b/packages/grpc/src/general.ts
new file mode 100644
index 0000000..17ce433
--- /dev/null
+++ b/packages/grpc/src/general.ts
@@ -0,0 +1,3331 @@
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+// protoc-gen-ts_proto v2.10.1
+// protoc v3.21.12
+// source: general.proto
+
+/* eslint-disable */
+import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
+import type { CallContext, CallOptions } from "nice-grpc-common";
+import { Empty } from "./base.js";
+import { Item as Item2 } from "./basics/action.js";
+import { Item as Item1 } from "./basics/job-version.js";
+import { Item } from "./basics/job.js";
+import {
+ Item as Item4,
+ Item_Status,
+ item_StatusFromJSON,
+ item_StatusToJSON,
+ item_StatusToNumber,
+} from "./basics/runner.js";
+import { Item as Item3 } from "./basics/trigger.js";
+
+export const protobufPackage = "GeneralAPI";
+
+/** getJob * */
+export interface JobRequest {
+ jobId: string;
+}
+
+export interface JobResponse {
+ job: Item | undefined;
+}
+
+/** getJobs * */
+export interface JobsRequest {
+}
+
+export interface JobsResponse {
+ jobs: Item[];
+}
+
+/** getJobVersion * */
+export interface JobVersionRequest {
+ jobVersionId: string;
+}
+
+export interface JobVersionResponse {
+ jobVersion: Item1 | undefined;
+}
+
+/** getJobVersionLatest * */
+export interface JobVersionLatestRequest {
+ jobId: string;
+}
+
+export interface JobVersionLatestResponse {
+ jobVersion: Item1 | undefined;
+}
+
+/** getJobVersions * */
+export interface JobVersionsRequest {
+ jobId: string;
+}
+
+export interface JobVersionsResponse {
+ jobVersions: Item1[];
+}
+
+/** getJobVersionArchive * */
+export interface JobVersionArchiveRequest {
+ jobId: string;
+ jobVersionId: string;
+}
+
+export interface JobVersionArchiveResponse {
+ seq: number;
+ data: Uint8Array;
+ end: boolean;
+}
+
+/** getJobAction * */
+export interface JobActionRequest {
+ jobId: string;
+ actionId: string;
+}
+
+export interface JobActionResponse {
+ action: Item2 | undefined;
+}
+
+/** getJobActionLatest * */
+export interface JobActionLatestRequest {
+ jobId: string;
+}
+
+export interface JobActionLatestResponse {
+ action: Item2 | undefined;
+}
+
+/** getJobActions * */
+export interface JobActionsRequest {
+ jobId: string;
+ versionId?: string | undefined;
+}
+
+export interface JobActionsResponse {
+ actions: Item2[];
+}
+
+/** getJobTrigger * */
+export interface JobTriggerRequest {
+ jobId: string;
+ triggerId: string;
+}
+
+export interface JobTriggerResponse {
+ trigger: Item3 | undefined;
+}
+
+/** getJobTriggers * */
+export interface JobTriggersRequest {
+ jobId: string;
+ versionId?: string | undefined;
+}
+
+export interface JobTriggersResponse {
+ triggers: Item3[];
+}
+
+/** getJobTriggersLatest * */
+export interface JobTriggersLatestRequest {
+ jobId: string;
+}
+
+export interface JobTriggersLatestResponse {
+ triggers: Item3[];
+}
+
+/** getRunner * */
+export interface RunnerRequest {
+ runnerId: string;
+}
+
+export interface RunnerResponse {
+ runner: Item4 | undefined;
+}
+
+/** getRunners * */
+export interface RunnersRequest {
+ jobId?: string | undefined;
+ versionId?: string | undefined;
+ actionId?: string | undefined;
+ status?: Item_Status | undefined;
+}
+
+export interface RunnersResponse {
+ runners: Item4[];
+}
+
+/** deleteRunner * */
+export interface DeleteRunnerRequest {
+ runnerId: string;
+}
+
+/** getStoreItem * */
+export interface StoreItemRequest {
+ jobId: string;
+ key: string;
+}
+
+export interface StoreItemResponse {
+ key: string;
+ value: string;
+}
+
+/** setStoreItem * */
+export interface SetStoreItemRequest {
+ jobId: string;
+ key: string;
+ value: string;
+ ttl?: number | undefined;
+}
+
+export interface SetStoreItemResponse {
+ key: string;
+ value: string;
+}
+
+/** deleteStoreItem * */
+export interface DeleteStoreItemRequest {
+ jobId: string;
+ key: string;
+}
+
+export interface DeleteStoreItemResponse {
+ key: string;
+}
+
+/** publishMqttMessage * */
+export interface PublishMqttMessageRequest {
+ jobId: string;
+ topic: string;
+ payload: string;
+}
+
+export interface PublishMqttMessageResponse {
+}
+
+/** createRunner * */
+export interface CreateSoftRunnerRequest {
+ jobId: string;
+ versionId: string;
+ actionId: string;
+}
+
+export interface CreateSoftRunnerResponse {
+ runner: Item4 | undefined;
+}
+
+/** getTemplates * */
+export interface TemplatesRequest {
+}
+
+export interface TemplatesResponse {
+ templateBadGateway: string;
+}
+
+function createBaseJobRequest(): JobRequest {
+ return { jobId: "" };
+}
+
+export const JobRequest: MessageFns = {
+ encode(message: JobRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobRequest {
+ return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" };
+ },
+
+ toJSON(message: JobRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobRequest {
+ return JobRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobRequest {
+ const message = createBaseJobRequest();
+ message.jobId = object.jobId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobResponse(): JobResponse {
+ return { job: undefined };
+}
+
+export const JobResponse: MessageFns = {
+ encode(message: JobResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.job !== undefined) {
+ Item.encode(message.job, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.job = Item.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobResponse {
+ return { job: isSet(object.job) ? Item.fromJSON(object.job) : undefined };
+ },
+
+ toJSON(message: JobResponse): unknown {
+ const obj: any = {};
+ if (message.job !== undefined) {
+ obj.job = Item.toJSON(message.job);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobResponse {
+ return JobResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobResponse {
+ const message = createBaseJobResponse();
+ message.job = (object.job !== undefined && object.job !== null) ? Item.fromPartial(object.job) : undefined;
+ return message;
+ },
+};
+
+function createBaseJobsRequest(): JobsRequest {
+ return {};
+}
+
+export const JobsRequest: MessageFns = {
+ encode(_: JobsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobsRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobsRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(_: any): JobsRequest {
+ return {};
+ },
+
+ toJSON(_: JobsRequest): unknown {
+ const obj: any = {};
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobsRequest {
+ return JobsRequest.fromPartial(base ?? {});
+ },
+ fromPartial(_: DeepPartial): JobsRequest {
+ const message = createBaseJobsRequest();
+ return message;
+ },
+};
+
+function createBaseJobsResponse(): JobsResponse {
+ return { jobs: [] };
+}
+
+export const JobsResponse: MessageFns = {
+ encode(message: JobsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ for (const v of message.jobs) {
+ Item.encode(v!, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobsResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobsResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobs.push(Item.decode(reader, reader.uint32()));
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobsResponse {
+ return { jobs: globalThis.Array.isArray(object?.jobs) ? object.jobs.map((e: any) => Item.fromJSON(e)) : [] };
+ },
+
+ toJSON(message: JobsResponse): unknown {
+ const obj: any = {};
+ if (message.jobs?.length) {
+ obj.jobs = message.jobs.map((e) => Item.toJSON(e));
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobsResponse {
+ return JobsResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobsResponse {
+ const message = createBaseJobsResponse();
+ message.jobs = object.jobs?.map((e) => Item.fromPartial(e)) || [];
+ return message;
+ },
+};
+
+function createBaseJobVersionRequest(): JobVersionRequest {
+ return { jobVersionId: "" };
+}
+
+export const JobVersionRequest: MessageFns = {
+ encode(message: JobVersionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobVersionId !== "") {
+ writer.uint32(10).string(message.jobVersionId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobVersionId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionRequest {
+ return { jobVersionId: isSet(object.jobVersionId) ? globalThis.String(object.jobVersionId) : "" };
+ },
+
+ toJSON(message: JobVersionRequest): unknown {
+ const obj: any = {};
+ if (message.jobVersionId !== "") {
+ obj.jobVersionId = message.jobVersionId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionRequest {
+ return JobVersionRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionRequest {
+ const message = createBaseJobVersionRequest();
+ message.jobVersionId = object.jobVersionId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobVersionResponse(): JobVersionResponse {
+ return { jobVersion: undefined };
+}
+
+export const JobVersionResponse: MessageFns = {
+ encode(message: JobVersionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobVersion !== undefined) {
+ Item1.encode(message.jobVersion, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobVersion = Item1.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionResponse {
+ return { jobVersion: isSet(object.jobVersion) ? Item1.fromJSON(object.jobVersion) : undefined };
+ },
+
+ toJSON(message: JobVersionResponse): unknown {
+ const obj: any = {};
+ if (message.jobVersion !== undefined) {
+ obj.jobVersion = Item1.toJSON(message.jobVersion);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionResponse {
+ return JobVersionResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionResponse {
+ const message = createBaseJobVersionResponse();
+ message.jobVersion = (object.jobVersion !== undefined && object.jobVersion !== null)
+ ? Item1.fromPartial(object.jobVersion)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseJobVersionLatestRequest(): JobVersionLatestRequest {
+ return { jobId: "" };
+}
+
+export const JobVersionLatestRequest: MessageFns = {
+ encode(message: JobVersionLatestRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionLatestRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionLatestRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionLatestRequest {
+ return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" };
+ },
+
+ toJSON(message: JobVersionLatestRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionLatestRequest {
+ return JobVersionLatestRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionLatestRequest {
+ const message = createBaseJobVersionLatestRequest();
+ message.jobId = object.jobId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobVersionLatestResponse(): JobVersionLatestResponse {
+ return { jobVersion: undefined };
+}
+
+export const JobVersionLatestResponse: MessageFns = {
+ encode(message: JobVersionLatestResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobVersion !== undefined) {
+ Item1.encode(message.jobVersion, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionLatestResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionLatestResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobVersion = Item1.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionLatestResponse {
+ return { jobVersion: isSet(object.jobVersion) ? Item1.fromJSON(object.jobVersion) : undefined };
+ },
+
+ toJSON(message: JobVersionLatestResponse): unknown {
+ const obj: any = {};
+ if (message.jobVersion !== undefined) {
+ obj.jobVersion = Item1.toJSON(message.jobVersion);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionLatestResponse {
+ return JobVersionLatestResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionLatestResponse {
+ const message = createBaseJobVersionLatestResponse();
+ message.jobVersion = (object.jobVersion !== undefined && object.jobVersion !== null)
+ ? Item1.fromPartial(object.jobVersion)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseJobVersionsRequest(): JobVersionsRequest {
+ return { jobId: "" };
+}
+
+export const JobVersionsRequest: MessageFns = {
+ encode(message: JobVersionsRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionsRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionsRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionsRequest {
+ return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" };
+ },
+
+ toJSON(message: JobVersionsRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionsRequest {
+ return JobVersionsRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionsRequest {
+ const message = createBaseJobVersionsRequest();
+ message.jobId = object.jobId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobVersionsResponse(): JobVersionsResponse {
+ return { jobVersions: [] };
+}
+
+export const JobVersionsResponse: MessageFns = {
+ encode(message: JobVersionsResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ for (const v of message.jobVersions) {
+ Item1.encode(v!, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionsResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionsResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobVersions.push(Item1.decode(reader, reader.uint32()));
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionsResponse {
+ return {
+ jobVersions: globalThis.Array.isArray(object?.jobVersions)
+ ? object.jobVersions.map((e: any) => Item1.fromJSON(e))
+ : [],
+ };
+ },
+
+ toJSON(message: JobVersionsResponse): unknown {
+ const obj: any = {};
+ if (message.jobVersions?.length) {
+ obj.jobVersions = message.jobVersions.map((e) => Item1.toJSON(e));
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionsResponse {
+ return JobVersionsResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionsResponse {
+ const message = createBaseJobVersionsResponse();
+ message.jobVersions = object.jobVersions?.map((e) => Item1.fromPartial(e)) || [];
+ return message;
+ },
+};
+
+function createBaseJobVersionArchiveRequest(): JobVersionArchiveRequest {
+ return { jobId: "", jobVersionId: "" };
+}
+
+export const JobVersionArchiveRequest: MessageFns = {
+ encode(message: JobVersionArchiveRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ if (message.jobVersionId !== "") {
+ writer.uint32(18).string(message.jobVersionId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionArchiveRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionArchiveRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.jobVersionId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionArchiveRequest {
+ return {
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ jobVersionId: isSet(object.jobVersionId) ? globalThis.String(object.jobVersionId) : "",
+ };
+ },
+
+ toJSON(message: JobVersionArchiveRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.jobVersionId !== "") {
+ obj.jobVersionId = message.jobVersionId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionArchiveRequest {
+ return JobVersionArchiveRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionArchiveRequest {
+ const message = createBaseJobVersionArchiveRequest();
+ message.jobId = object.jobId ?? "";
+ message.jobVersionId = object.jobVersionId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobVersionArchiveResponse(): JobVersionArchiveResponse {
+ return { seq: 0, data: new Uint8Array(0), end: false };
+}
+
+export const JobVersionArchiveResponse: MessageFns = {
+ encode(message: JobVersionArchiveResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.seq !== 0) {
+ writer.uint32(8).uint64(message.seq);
+ }
+ if (message.data.length !== 0) {
+ writer.uint32(18).bytes(message.data);
+ }
+ if (message.end !== false) {
+ writer.uint32(24).bool(message.end);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobVersionArchiveResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobVersionArchiveResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 8) {
+ break;
+ }
+
+ message.seq = longToNumber(reader.uint64());
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.data = reader.bytes();
+ continue;
+ }
+ case 3: {
+ if (tag !== 24) {
+ break;
+ }
+
+ message.end = reader.bool();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobVersionArchiveResponse {
+ return {
+ seq: isSet(object.seq) ? globalThis.Number(object.seq) : 0,
+ data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0),
+ end: isSet(object.end) ? globalThis.Boolean(object.end) : false,
+ };
+ },
+
+ toJSON(message: JobVersionArchiveResponse): unknown {
+ const obj: any = {};
+ if (message.seq !== 0) {
+ obj.seq = Math.round(message.seq);
+ }
+ if (message.data.length !== 0) {
+ obj.data = base64FromBytes(message.data);
+ }
+ if (message.end !== false) {
+ obj.end = message.end;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobVersionArchiveResponse {
+ return JobVersionArchiveResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobVersionArchiveResponse {
+ const message = createBaseJobVersionArchiveResponse();
+ message.seq = object.seq ?? 0;
+ message.data = object.data ?? new Uint8Array(0);
+ message.end = object.end ?? false;
+ return message;
+ },
+};
+
+function createBaseJobActionRequest(): JobActionRequest {
+ return { jobId: "", actionId: "" };
+}
+
+export const JobActionRequest: MessageFns = {
+ encode(message: JobActionRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ if (message.actionId !== "") {
+ writer.uint32(18).string(message.actionId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobActionRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobActionRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ case 2: {
+ if (tag !== 18) {
+ break;
+ }
+
+ message.actionId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobActionRequest {
+ return {
+ jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "",
+ actionId: isSet(object.actionId) ? globalThis.String(object.actionId) : "",
+ };
+ },
+
+ toJSON(message: JobActionRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ if (message.actionId !== "") {
+ obj.actionId = message.actionId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobActionRequest {
+ return JobActionRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobActionRequest {
+ const message = createBaseJobActionRequest();
+ message.jobId = object.jobId ?? "";
+ message.actionId = object.actionId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobActionResponse(): JobActionResponse {
+ return { action: undefined };
+}
+
+export const JobActionResponse: MessageFns = {
+ encode(message: JobActionResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.action !== undefined) {
+ Item2.encode(message.action, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobActionResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobActionResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.action = Item2.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobActionResponse {
+ return { action: isSet(object.action) ? Item2.fromJSON(object.action) : undefined };
+ },
+
+ toJSON(message: JobActionResponse): unknown {
+ const obj: any = {};
+ if (message.action !== undefined) {
+ obj.action = Item2.toJSON(message.action);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobActionResponse {
+ return JobActionResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobActionResponse {
+ const message = createBaseJobActionResponse();
+ message.action = (object.action !== undefined && object.action !== null)
+ ? Item2.fromPartial(object.action)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseJobActionLatestRequest(): JobActionLatestRequest {
+ return { jobId: "" };
+}
+
+export const JobActionLatestRequest: MessageFns = {
+ encode(message: JobActionLatestRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.jobId !== "") {
+ writer.uint32(10).string(message.jobId);
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobActionLatestRequest {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobActionLatestRequest();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.jobId = reader.string();
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobActionLatestRequest {
+ return { jobId: isSet(object.jobId) ? globalThis.String(object.jobId) : "" };
+ },
+
+ toJSON(message: JobActionLatestRequest): unknown {
+ const obj: any = {};
+ if (message.jobId !== "") {
+ obj.jobId = message.jobId;
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobActionLatestRequest {
+ return JobActionLatestRequest.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobActionLatestRequest {
+ const message = createBaseJobActionLatestRequest();
+ message.jobId = object.jobId ?? "";
+ return message;
+ },
+};
+
+function createBaseJobActionLatestResponse(): JobActionLatestResponse {
+ return { action: undefined };
+}
+
+export const JobActionLatestResponse: MessageFns = {
+ encode(message: JobActionLatestResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
+ if (message.action !== undefined) {
+ Item2.encode(message.action, writer.uint32(10).fork()).join();
+ }
+ return writer;
+ },
+
+ decode(input: BinaryReader | Uint8Array, length?: number): JobActionLatestResponse {
+ const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
+ const end = length === undefined ? reader.len : reader.pos + length;
+ const message = createBaseJobActionLatestResponse();
+ while (reader.pos < end) {
+ const tag = reader.uint32();
+ switch (tag >>> 3) {
+ case 1: {
+ if (tag !== 10) {
+ break;
+ }
+
+ message.action = Item2.decode(reader, reader.uint32());
+ continue;
+ }
+ }
+ if ((tag & 7) === 4 || tag === 0) {
+ break;
+ }
+ reader.skip(tag & 7);
+ }
+ return message;
+ },
+
+ fromJSON(object: any): JobActionLatestResponse {
+ return { action: isSet(object.action) ? Item2.fromJSON(object.action) : undefined };
+ },
+
+ toJSON(message: JobActionLatestResponse): unknown {
+ const obj: any = {};
+ if (message.action !== undefined) {
+ obj.action = Item2.toJSON(message.action);
+ }
+ return obj;
+ },
+
+ create(base?: DeepPartial): JobActionLatestResponse {
+ return JobActionLatestResponse.fromPartial(base ?? {});
+ },
+ fromPartial(object: DeepPartial): JobActionLatestResponse {
+ const message = createBaseJobActionLatestResponse();
+ message.action = (object.action !== undefined && object.action !== null)
+ ? Item2.fromPartial(object.action)
+ : undefined;
+ return message;
+ },
+};
+
+function createBaseJobActionsRequest(): JobActionsRequest {
+ return { jobId: "", versionId: undefined };
+}
+
+export const JobActionsRequest: MessageFns