diff --git a/integration-tests/cli/CHANGELOG.md b/integration-tests/cli/CHANGELOG.md index ab29a5f65..dd9b43eaa 100644 --- a/integration-tests/cli/CHANGELOG.md +++ b/integration-tests/cli/CHANGELOG.md @@ -1,5 +1,13 @@ # @openfn/integration-tests-cli +## 1.0.9 + +### Patch Changes + +- Updated dependencies [d1a0e7c] + - @openfn/project@0.11.0 + - @openfn/lightning-mock@2.4.2 + ## 1.0.8 ### Patch Changes diff --git a/integration-tests/cli/package.json b/integration-tests/cli/package.json index 551e1b0c6..668c1421f 100644 --- a/integration-tests/cli/package.json +++ b/integration-tests/cli/package.json @@ -1,7 +1,7 @@ { "name": "@openfn/integration-tests-cli", "private": true, - "version": "1.0.8", + "version": "1.0.9", "description": "CLI integration tests", "author": "Open Function Group ", "license": "ISC", diff --git a/integration-tests/cli/test/execute-workflow.test.ts b/integration-tests/cli/test/execute-workflow.test.ts index 5d5adacbc..9e69d8465 100644 --- a/integration-tests/cli/test/execute-workflow.test.ts +++ b/integration-tests/cli/test/execute-workflow.test.ts @@ -83,6 +83,41 @@ test.serial( } ); +test.serial( + `openfn ${jobsPath}/wf-array.yaml -S "{ \\"data\\": { \\"items\\": [\\"z\\"] } }"`, + async (t) => { + const { err } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + t.is(out.data.items.length, 4); + t.deepEqual(out.data.items, ['z', 'a', 'b', 'c']); + } +); + +test.serial( + `openfn ${jobsPath}/wf-array-legacy.yaml -S "{ \\"data\\": { \\"items\\": [\\"z\\"] } }"`, + async (t) => { + const { stdout, err } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + t.is(out.data.items.length, 3); + t.deepEqual(out.data.items, ['z', 'b', 'c']); + } +); + +test.serial( + `openfn ${jobsPath}/wf-array-legacy.json -S "{ \\"data\\": { \\"items\\": [\\"z\\"] } }"`, + async (t) => { + const { err } = await run(t.title); + t.falsy(err); + const out = getJSON(); + t.is(out.data.items.length, 3); + t.deepEqual(out.data.items, ['z', 'b', 'c']); + } +); + // special start step test.serial( `openfn ${jobsPath}/wf-array.json --start b -S "{ \\"data\\": { \\"items\\": [] } }"`, @@ -96,6 +131,17 @@ test.serial( t.true(out.data.items.includes('c')); } ); +test.serial( + `openfn ${jobsPath}/wf-array-legacy.json --start c -S "{ \\"data\\": { \\"items\\": [] } }"`, + async (t) => { + const { err } = await run(t.title); + t.falsy(err); + + const out = getJSON(); + t.is(out.data.items.length, 1); + t.true(out.data.items.includes('c')); + } +); // only step test.serial( diff --git a/integration-tests/cli/test/fixtures/wf-array-legacy.json b/integration-tests/cli/test/fixtures/wf-array-legacy.json new file mode 100644 index 000000000..3a823ee8f --- /dev/null +++ b/integration-tests/cli/test/fixtures/wf-array-legacy.json @@ -0,0 +1,26 @@ +{ + "workflow": { + "steps": [ + { + "id": "a", + "adaptor": "common", + "expression": "fn((state) => { if (!state.data.items) { state.data.items = []; } state.data.items.push('a'); return state; });", + "next": { "b": true } + }, + { + "id": "b", + "adaptor": "common", + "expression": "fn((state) => { state.data.items.push('b'); return state; });", + "next": { "c": true } + }, + { + "id": "c", + "adaptor": "common", + "expression": "fn((state) => { state.data.items.push('c'); return state; });" + } + ] + }, + "options": { + "start": "b" + } +} diff --git a/integration-tests/cli/test/fixtures/wf-array-legacy.yaml b/integration-tests/cli/test/fixtures/wf-array-legacy.yaml new file mode 100644 index 000000000..c1592c8be --- /dev/null +++ b/integration-tests/cli/test/fixtures/wf-array-legacy.yaml @@ -0,0 +1,17 @@ +workflow: + steps: + - id: a + adaptor: common + expression: fn((state) => { if (!state.data.items) { state.data.items = []; } state.data.items.push('a'); return state; }); + next: + b: true + - id: b + adaptor: common + expression: fn((state) => { state.data.items.push('b'); return state; }); + next: + c: true + - id: c + adaptor: common + expression: fn((state) => { state.data.items.push('c'); return state; }); +options: + start: b diff --git a/integration-tests/cli/test/fixtures/wf-array.json b/integration-tests/cli/test/fixtures/wf-array.json index 06e949844..589bf7033 100644 --- a/integration-tests/cli/test/fixtures/wf-array.json +++ b/integration-tests/cli/test/fixtures/wf-array.json @@ -1,23 +1,21 @@ { - "workflow": { - "steps": [ - { - "id": "a", - "adaptor": "common", - "expression": "fn((state) => { if (!state.data.items) { state.data.items = []; } state.data.items.push('a'); return state; });", - "next": { "b": true } - }, - { - "id": "b", - "adaptor": "common", - "expression": "fn((state) => { state.data.items.push('b'); return state; });", - "next": { "c": true } - }, - { - "id": "c", - "adaptor": "common", - "expression": "fn((state) => { state.data.items.push('c'); return state; });" - } - ] - } + "steps": [ + { + "id": "a", + "adaptor": "common", + "expression": "fn((state) => { if (!state.data.items) { state.data.items = []; } state.data.items.push('a'); return state; });", + "next": { "b": true } + }, + { + "id": "b", + "adaptor": "common", + "expression": "fn((state) => { state.data.items.push('b'); return state; });", + "next": { "c": true } + }, + { + "id": "c", + "adaptor": "common", + "expression": "fn((state) => { state.data.items.push('c'); return state; });" + } + ] } diff --git a/integration-tests/cli/test/fixtures/wf-array.yaml b/integration-tests/cli/test/fixtures/wf-array.yaml new file mode 100644 index 000000000..099c9fbb4 --- /dev/null +++ b/integration-tests/cli/test/fixtures/wf-array.yaml @@ -0,0 +1,14 @@ +steps: + - id: a + adaptor: common + expression: fn((state) => { if (!state.data.items) { state.data.items = []; } state.data.items.push('a'); return state; }); + next: + b: true + - id: b + adaptor: common + expression: fn((state) => { state.data.items.push('b'); return state; }); + next: + c: true + - id: c + adaptor: common + expression: fn((state) => { state.data.items.push('c'); return state; }); diff --git a/integration-tests/cli/test/project-v1.test.ts b/integration-tests/cli/test/project-v1.test.ts index 13007e429..078ca0e42 100644 --- a/integration-tests/cli/test/project-v1.test.ts +++ b/integration-tests/cli/test/project-v1.test.ts @@ -3,6 +3,8 @@ import { rm, mkdir, writeFile, readFile } from 'node:fs/promises'; import path from 'node:path'; import run from '../src/run'; +const TMP_DIR = path.resolve('tmp/project-v1'); + // These tests use the legacy v1 yaml structure const mainYaml = ` @@ -35,7 +37,7 @@ workflows: jobs: - name: Transform data body: | - // TODO + fn(() => ({ x: 1})) adaptor: "@openfn/language-common@latest" id: b8b780f3-98dd-4244-880b-e534d8f24547 project_credential_id: null @@ -95,16 +97,16 @@ workflows: source_trigger_id: 7bb476cc-0292-4573-89d0-b13417bc648e condition_type: always `; -const projectsPath = path.resolve('tmp/project'); +const projectsPath = path.resolve(TMP_DIR); test.before(async () => { - // await rm('tmp/project', { recursive: true }); - await mkdir('tmp/project/.projects', { recursive: true }); + // await rm(TMP_DIR, { recursive: true }); + await mkdir(`${TMP_DIR}/.projects`, { recursive: true }); - await writeFile('tmp/project/openfn.yaml', ''); - await writeFile('tmp/project/.projects/main@app.openfn.org.yaml', mainYaml); + await writeFile(`${TMP_DIR}/openfn.yaml`, ''); + await writeFile(`${TMP_DIR}/.projects/main@app.openfn.org.yaml`, mainYaml); await writeFile( - 'tmp/project/.projects/staging@app.openfn.org.yaml', + `${TMP_DIR}/.projects/staging@app.openfn.org.yaml`, stagingYaml ); }); @@ -131,6 +133,7 @@ test.serial('Checkout a project', async (t) => { workflowYaml, `id: my-workflow name: my workflow +start: trigger-webhook options: {} steps: - id: trigger @@ -150,7 +153,22 @@ steps: path.resolve(projectsPath, 'workflows/my-workflow/transform-data.js'), 'utf8' ); - t.is(expr.trim(), '// TODO'); + t.is(expr.trim(), 'fn(() => ({ x: 1}))'); +}); + +// note: order of tests is important here +test.serial('execute a workflow from the checked out project', async (t) => { + // cheeky bonus test of checkout by alias + await run(`openfn checkout main -w ${projectsPath}`); + + // execute a workflow + const { stdout } = await run( + `openfn my-workflow -o ${TMP_DIR}/output.json --log debug --workspace ${projectsPath}` + ); + + const output = await readFile(`${TMP_DIR}/output.json`, 'utf8'); + const finalState = JSON.parse(output); + t.deepEqual(finalState, { x: 1 }); }); // requires the prior test to run @@ -163,7 +181,7 @@ test.serial('merge a project', async (t) => { // assert the initial step code const initial = await readStep(); - t.is(initial, '// TODO'); + t.is(initial, 'fn(() => ({ x: 1}))'); // Run the merge await run(`openfn merge hello-world-staging -w ${projectsPath} --force`); diff --git a/integration-tests/cli/test/project-v2.test.ts b/integration-tests/cli/test/project-v2.test.ts index 295ea83d9..f3b7cab7b 100644 --- a/integration-tests/cli/test/project-v2.test.ts +++ b/integration-tests/cli/test/project-v2.test.ts @@ -2,6 +2,9 @@ import test from 'ava'; import { rm, mkdir, writeFile, readFile } from 'node:fs/promises'; import path from 'node:path'; import run from '../src/run'; +import createLightningServer from '@openfn/lightning-mock'; + +const TMP_DIR = path.resolve('tmp/project-v2'); const mainYaml = ` id: sandboxing-simple @@ -26,6 +29,7 @@ options: retention_policy: retain_all workflows: - name: Hello Workflow + start: trigger steps: - id: trigger type: webhook @@ -40,8 +44,8 @@ workflows: uuid: add150e9-8616-48ca-844e-8aaa489c7a10 - id: transform-data name: Transform data - expression: |- - // TODO + expression: | + fn(() => ({ x: 1})) adaptor: "@openfn/language-dhis2@8.0.4" openfn: uuid: a9f64216-7974-469d-8415-d6d9baf2f92e @@ -81,6 +85,7 @@ options: retention_policy: retain_all workflows: - name: Hello Workflow + start: trigger steps: - id: trigger type: webhook @@ -95,7 +100,7 @@ workflows: uuid: f34146b5-de43-4b05-ac00-3b4f327e62ec - id: transform-data name: Transform data - expression: |- + expression: | fn() adaptor: "@openfn/language-dhis2@8.0.4" openfn: @@ -112,22 +117,21 @@ workflows: id: hello-workflow history: [] `; -const projectsPath = path.resolve('tmp/project'); test.before(async () => { - await rm('tmp/project', { recursive: true }); - await mkdir('tmp/project/.projects', { recursive: true }); + // await rm(TMP_DIR, { recursive: true }); + await mkdir(`${TMP_DIR}/.projects`, { recursive: true }); - await writeFile('tmp/project/openfn.yaml', ''); - await writeFile('tmp/project/.projects/main@app.openfn.org.yaml', mainYaml); + await writeFile(`${TMP_DIR}/openfn.yaml`, ''); + await writeFile(`${TMP_DIR}/.projects/main@app.openfn.org.yaml`, mainYaml); await writeFile( - 'tmp/project/.projects/staging@app.openfn.org.yaml', + `${TMP_DIR}/.projects/staging@app.openfn.org.yaml`, stagingYaml ); }); test.serial('list available projects', async (t) => { - const { stdout } = await run(`openfn projects -w ${projectsPath}`); + const { stdout } = await run(`openfn projects -w ${TMP_DIR}`); t.regex(stdout, /sandboxing-simple/); t.regex(stdout, /a272a529-716a-4de7-a01c-a082916c6d23/); t.regex(stdout, /staging/); @@ -135,17 +139,18 @@ test.serial('list available projects', async (t) => { }); test.serial('Checkout a project', async (t) => { - await run(`openfn checkout staging -w ${projectsPath}`); + await run(`openfn checkout staging -w ${TMP_DIR}`); // check workflow.yaml const workflowYaml = await readFile( - path.resolve(projectsPath, 'workflows/hello-workflow/hello-workflow.yaml'), + path.resolve(TMP_DIR, 'workflows/hello-workflow/hello-workflow.yaml'), 'utf8' ); t.is( workflowYaml, `id: hello-workflow name: Hello Workflow +start: trigger options: {} steps: - id: trigger @@ -162,30 +167,167 @@ steps: ); const expr = await readFile( - path.resolve(projectsPath, 'workflows/hello-workflow/transform-data.js'), + path.resolve(TMP_DIR, 'workflows/hello-workflow/transform-data.js'), 'utf8' ); t.is(expr.trim(), 'fn()'); }); -// requires the prior test to run +test.serial('execute a workflow from the checked out project', async (t) => { + // cheeky bonus test of checkout by alias + await run(`openfn checkout main -w ${TMP_DIR}`); + + // execute a workflow + await run( + `openfn hello-workflow -o ${TMP_DIR}/output.json --workspace ${TMP_DIR}` + ); + + const output = await readFile(`${TMP_DIR}/output.json`, 'utf8'); + const finalState = JSON.parse(output); + t.deepEqual(finalState, { x: 1 }); +}); + +test.serial( + 'execute a workflow from the checked out project with a credential map', + async (t) => { + await run(`openfn checkout main --log debug -w ${TMP_DIR}`); + + // Modify the checked out workflow code + await writeFile( + `${TMP_DIR}/workflows/hello-workflow/transform-data.js`, + `fn(s => ({ user: s.configuration.username }))` + ); + + // Modify the checked out workflow to add a credential + await writeFile( + `${TMP_DIR}/workflows/hello-workflow/hello-workflow.yaml`, + `id: hello-workflow +name: Hello Workflow +start: trigger +options: {} +steps: + - id: trigger + type: webhook + next: + transform-data: + disabled: false + condition: true + - id: transform-data + name: Transform data + configuration: abcd + adaptor: "@openfn/language-common@3.2.1" + expression: ./transform-data.js +` + ); + + // add the credential map to the yaml + await writeFile(`${TMP_DIR}/openfn.yaml`, `credentials: creds.yaml`); + + // write the credential map + await writeFile( + `${TMP_DIR}/creds.yaml`, + `abcd: + username: pparker` + ); + + // finally execute the workflow + const { stdout } = await run( + `openfn hello-workflow -o ${TMP_DIR}/output.json --log debug --workspace ${TMP_DIR}` + ); + + const output = await readFile(`${TMP_DIR}/output.json`, 'utf8'); + const finalState = JSON.parse(output); + t.deepEqual(finalState, { user: 'pparker' }); + } +); + +test.serial( + 'execute a workflow from the checked out project with credentials and collections', + async (t) => { + const server = await createLightningServer({ port: 1234 }); + server.collections.createCollection('stuff'); + // Important: the collection value MUST be as string + server.collections.upsert('stuff', 'x', JSON.stringify({ id: 'x' })); + + await run(`openfn checkout main --log debug -w ${TMP_DIR}`); + + // Modify the checked out workflow code + await writeFile( + `${TMP_DIR}/workflows/hello-workflow/transform-data.js`, + ` +fn(s => ({ ...s, user: s.configuration.username })); +collections.get('stuff', 'x')` + ); + + // Modify the checked out workflow to add a credential + await writeFile( + `${TMP_DIR}/workflows/hello-workflow/hello-workflow.yaml`, + `id: hello-workflow +name: Hello Workflow +start: trigger +options: {} +steps: + - id: trigger + type: webhook + next: + transform-data: + disabled: false + condition: true + - id: transform-data + name: Transform data + configuration: 'abcd' + adaptor: "@openfn/language-common@3.2.1" + expression: ./transform-data.js +` + ); + + // add the credential map to the yaml + await writeFile( + `${TMP_DIR}/openfn.yaml`, + ` +project: + endpoint: http://localhost:1234 +workspace: + credentials: creds.yaml` + ); + + // write the credential map + await writeFile( + `${TMP_DIR}/creds.yaml`, + `abcd: + username: pparker` + ); + + const { stdout } = await run( + `openfn hello-workflow -o ${TMP_DIR}/output.json --log debug --workspace ${TMP_DIR}` + ); + + const output = await readFile(`${TMP_DIR}/output.json`, 'utf8'); + const finalState = JSON.parse(output); + + t.deepEqual(finalState, { + data: { id: 'x' }, + user: 'pparker', + }); + server.destroy(); + } +); + test.serial('merge a project', async (t) => { + await run(`openfn checkout main -w ${TMP_DIR}`); + const readStep = () => readFile( - path.resolve(projectsPath, 'workflows/hello-workflow/transform-data.js'), + path.resolve(TMP_DIR, 'workflows/hello-workflow/transform-data.js'), 'utf8' ).then((str) => str.trim()); - await run(`openfn checkout sandboxing-simple -w ${projectsPath}`); - // assert the initial step code const initial = await readStep(); - t.is(initial, '// TODO'); + t.is(initial, 'fn(() => ({ x: 1}))'); // Run the merge - const { stdout } = await run( - `openfn merge staging -w ${projectsPath} --force` - ); + const { stdout } = await run(`openfn merge staging -w ${TMP_DIR} --force`); // Check the step is updated const merged = await readStep(); diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index b01cfaf3e..fb4a462a0 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,34 @@ # @openfn/cli +## 1.23.0 + +### Minor Changes + +- projects: when pulling, include a `start` option which points to the trigger, ensuring workflow.yaml files start executing from the right place. +- projects: When running `execute` inside a Workspace (a folder with an `openfn.yaml` file), allow Workflows to be run directly. I.e. do this: + + ```bash + openfn process-patients + ``` + + Instead of: + + ``` + openfn ./workflows/process-patients/process-patients.yaml + ``` + + When running through a Workspace, credential maps and collections endpoints are automatically applied for you. + +### Patch Changes + +- b262d10: projects: Support workflow.jaml/json files without a top workflow key +- d1a0e7c: When executing jobs, the CLI no longer defaults the path to job.js +- Updated dependencies [b262d10] +- Updated dependencies [147a431] +- Updated dependencies [d1a0e7c] + - @openfn/runtime@1.8.1 + - @openfn/project@0.11.0 + ## 1.22.0 ### Minor Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index 84482bf4d..cdd0c892d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/cli", - "version": "1.22.0", + "version": "1.23.0", "description": "CLI devtools for the OpenFn toolchain", "engines": { "node": ">=18", diff --git a/packages/cli/src/execute/apply-credential-map.ts b/packages/cli/src/execute/apply-credential-map.ts index e0c75691d..a4a0cf174 100644 --- a/packages/cli/src/execute/apply-credential-map.ts +++ b/packages/cli/src/execute/apply-credential-map.ts @@ -10,6 +10,8 @@ type JobId = string; export type CredentialMap = Record; +export const CREDENTIALS_KEY = '$CREDENTIALS$'; + const applyCredentialMap = ( plan: ExecutionPlan, map: CredentialMap = {}, @@ -17,22 +19,37 @@ const applyCredentialMap = ( ) => { const stepsWithCredentialIds = plan.workflow.steps.filter( (step: any) => - typeof step.configuration === 'string' && - !step.configuration.endsWith('.json') + (typeof step.configuration === 'string' && + !step.configuration.endsWith('.json')) || + step.configuration?.[CREDENTIALS_KEY] ) as { configuration: string; name?: string; id: string }[]; const unmapped: Record = {}; for (const step of stepsWithCredentialIds) { - if (map[step.configuration]) { - logger?.debug( - `Applying credential ${step.configuration} to "${step.name ?? step.id}"` - ); - step.configuration = map[step.configuration]; + if (typeof step.configuration === 'string') { + const configId = step.configuration; + if (configId in map) { + step.configuration = map[configId]; + } else { + unmapped[configId] = true; + // @ts-ignore + delete step.configuration; + } } else { - unmapped[step.configuration] = true; - // @ts-ignore - delete step.configuration; + const configId = step.configuration[CREDENTIALS_KEY]; + delete step.configuration[CREDENTIALS_KEY]; + if (configId in map) { + Object.assign(step.configuration, map[configId]); + } else { + unmapped[configId] = true; + } + + if (!(configId in unmapped)) { + logger?.debug( + `Applied credential ${configId} to "${step.name ?? step.id}"` + ); + } } } diff --git a/packages/cli/src/execute/command.ts b/packages/cli/src/execute/command.ts index 71cdc65cd..77e859203 100644 --- a/packages/cli/src/execute/command.ts +++ b/packages/cli/src/execute/command.ts @@ -1,10 +1,11 @@ import yargs from 'yargs'; import { build, ensure, override } from '../util/command-builders'; import * as o from '../options'; +import * as po from '../projects/options'; import type { Opts } from '../options'; -export type ExecuteOptions = Required< +export type ExecuteOptions = { workspace?: string } & Required< Pick< Opts, | 'apiKey' @@ -38,6 +39,7 @@ export type ExecuteOptions = Required< | 'trace' | 'useAdaptorsMonorepo' | 'workflowPath' + | 'workflowName' | 'globals' > > & @@ -75,6 +77,8 @@ const options = [ o.timeout, o.trace, o.useAdaptorsMonorepo, + + po.workspace, ]; const executeCommand: yargs.CommandModule = { diff --git a/packages/cli/src/execute/handler.ts b/packages/cli/src/execute/handler.ts index 66f0d5fcc..e31d439be 100644 --- a/packages/cli/src/execute/handler.ts +++ b/packages/cli/src/execute/handler.ts @@ -57,7 +57,7 @@ const loadAndApplyCredentialMap = async ( if (options.credentials) { try { const credsRaw = await readFile( - path.resolve(options.credentials), + path.resolve(options.workspace!, options.credentials), 'utf8' ); if (options.credentials.endsWith('.json')) { @@ -65,14 +65,25 @@ const loadAndApplyCredentialMap = async ( } else { creds = yamlToJson(credsRaw); } - } catch (e) { - logger.error('Error processing credential map:'); - logger.error(e); - // probably want to exist if the credential map is invalid - process.exitCode = 1; - return; + logger.info('Credential map loaded '); + } catch (e: any) { + // If we get here, the credential map failed to load + // That could mean 3 things: + // 1. The user passed --credentials to the CLI with an invalid path. + // 2. The user ran through a Project and the default credential map was not found + // 3. The user ran through a Project and an explicitly set credential map was not found + // The case of 1 is handled by opts.ensure(), which validates the path passed to the CLI + // For 2 we should continue executing but log a warning. For 3 we should probably error + // But because it's hard to recognise the case, we'll just log. + if (e?.message?.match(/ENOENT/)) { + logger.debug('Credential map not found at', options.credentials); + } else { + logger.error('Error processing credential map:'); + // probably want to exit if the credential map is invalid + process.exitCode = 1; + throw e; + } } - logger.info('Credential map loaded '); } return applyCredentialMap(plan, creds, logger); }; diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index 0d050644e..cc3f5917a 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -8,6 +8,7 @@ import { ensureLogOpts, LogLevel, } from './util'; +import { existsSync } from 'node:fs'; // Central type definition for the main options // This represents the types coming out of yargs, @@ -67,6 +68,7 @@ export type Opts = { trace?: boolean; useAdaptorsMonorepo?: boolean; workflow: string; + workflowName?: string; // deprecated workflowPath?: string; @@ -270,6 +272,20 @@ export const credentials: CLIOption = { alias: ['creds'], description: 'A path which points to a credential map', }, + ensure(opts) { + if (opts.credentials) { + const mapPath = nodePath.resolve( + (opts as any).workspace ?? '', + opts.credentials + ); + // Throw if a user-provided credential map not found + if (!existsSync(mapPath)) { + const e = new Error('Credential map not found at ' + mapPath); + delete e.stack; + throw e; + } + } + }, }; export const describe: CLIOption = { @@ -366,6 +382,7 @@ export const projectId: CLIOption = { }; // Input path covers expressionPath and workflowPath +// TODO this needs unit testing! export const inputPath: CLIOption = { name: 'input-path', yargs: { @@ -373,13 +390,12 @@ export const inputPath: CLIOption = { }, ensure: (opts) => { const { path: basePath } = opts; - if (basePath?.endsWith('.json')) { + if (basePath?.match(/.(json|ya?ml)$/)) { opts.planPath = basePath; } else if (basePath?.endsWith('.js')) { opts.expressionPath = basePath; - } else { - const base = getBaseDir(opts); - setDefaultValue(opts, 'expressionPath', nodePath.join(base, 'job.js')); + } else if (!opts.expressionPath) { + opts.workflowName = basePath; } }, }; diff --git a/packages/cli/src/projects/options.ts b/packages/cli/src/projects/options.ts index be95eeb90..6cbc3d52b 100644 --- a/packages/cli/src/projects/options.ts +++ b/packages/cli/src/projects/options.ts @@ -23,6 +23,7 @@ export const env: CLIOption = { export const alias: CLIOption = { name: 'alias', yargs: { + alias: ['env'], description: 'Environment name (eg staging, prod, branch)', }, }; @@ -51,7 +52,7 @@ export const workflowMappings: CLIOption = { export const outputPath: CLIOption = { name: 'output-path', yargs: { - alias: ['output'], + alias: ['o', 'output'], type: 'string', description: 'Path to output the fetched project to', }, diff --git a/packages/cli/src/pull/handler.ts b/packages/cli/src/pull/handler.ts index 7309a33ba..fe96e7188 100644 --- a/packages/cli/src/pull/handler.ts +++ b/packages/cli/src/pull/handler.ts @@ -14,6 +14,7 @@ import beta from '../projects/pull'; async function pullHandler(options: PullOptions, logger: Logger) { if (options.beta) { + (options as any).project = options.projectId; return beta(options as any, logger); } diff --git a/packages/cli/src/types.ts b/packages/cli/src/types.ts index 00ccbe3f0..60e89ab0e 100644 --- a/packages/cli/src/types.ts +++ b/packages/cli/src/types.ts @@ -13,7 +13,9 @@ export type OldCLIWorkflow = { export type CLIExecutionPlan = { id?: UUID; - options?: WorkflowOptions; + options?: WorkflowOptions & { + collectionsEndpoint?: string; + }; workflow: { id?: string; name?: string; diff --git a/packages/cli/src/util/load-plan.ts b/packages/cli/src/util/load-plan.ts index 00dba22db..de773cc4d 100644 --- a/packages/cli/src/util/load-plan.ts +++ b/packages/cli/src/util/load-plan.ts @@ -1,7 +1,7 @@ import fs from 'node:fs/promises'; import path, { dirname } from 'node:path'; import { isPath } from '@openfn/compiler'; -import Project, { yamlToJson } from '@openfn/project'; +import { Workspace, yamlToJson } from '@openfn/project'; import abort from './abort'; import expandAdaptors from './expand-adaptors'; @@ -11,6 +11,7 @@ import type { Opts } from '../options'; import type { Logger } from './logger'; import type { CLIExecutionPlan, CLIJobNode, OldCLIWorkflow } from '../types'; import resolvePath from './resolve-path'; +import { CREDENTIALS_KEY } from '../execute/apply-credential-map'; const loadPlan = async ( options: Pick< @@ -18,92 +19,95 @@ const loadPlan = async ( | 'expressionPath' | 'planPath' | 'workflowPath' + | 'workflowName' | 'adaptors' | 'baseDir' | 'expandAdaptors' | 'path' | 'globals' + | 'credentials' + | 'collectionsEndpoint' > & { workflow?: Opts['workflow']; + workspace?: string; // from project opts }, logger: Logger ): Promise => { // TODO all these paths probably need rethinkng now that we're supporting // so many more input formats - const { workflowPath, planPath, expressionPath } = options; + const { workflowPath, planPath, expressionPath, workflowName } = options; - if (options.path && /ya?ml$/.test(options.path)) { - const content = await fs.readFile(path.resolve(options.path), 'utf-8'); - const workflow = yamlToJson(content); - options.baseDir = dirname(options.path); - return loadXPlan({ workflow }, options, logger); - } + let workflowObj; + + if (workflowName || options.workflow) { + logger.debug( + 'Loading workflow from active project in workspace at ', + options.workspace + ); + const workspace = new Workspace(options.workspace!); + const proj = await workspace.getCheckedOutProject(); + + const name = workflowName || options.workflow!; + const workflow = proj?.getWorkflow(name); + if (!workflow) { + const e = new Error(`Could not find Workflow "${name}"`); + delete e.stack; + throw e; + } + + workflowObj = { + workflow: workflow.toJSON(), + }; - // Run a workflow from a project, with a path and workflow name - if (options.path && options.workflow) { - options.baseDir = options.path; - return fromProject(options.path, options.workflow, options, logger); + options.credentials ??= workspace.getConfig().credentials; + options.collectionsEndpoint ??= proj.openfn?.endpoint; } - // Run a workflow from a project in the current working dir - // (no expression or workflow path, and no file extension) - if ( - !expressionPath && - !workflowPath && - !/\.(js|json|yaml)+$/.test(options.path || '') && - !options.workflow - ) { - // If the path has no extension - // Run a workflow from a project in the working dir - const workflow = options.path; - return fromProject(path.resolve('.'), workflow!, options, logger); + if (options.path && /ya?ml$/.test(options.path)) { + const content = await fs.readFile(path.resolve(options.path), 'utf-8'); + options.baseDir = dirname(options.path); + workflowObj = yamlToJson(content); + const { options: o, ...rest } = workflowObj; + // restructure the workflow so that options are not on the workflow object, + // but part of hte execution plan options instead + if (!workflowObj.workflow && workflowObj.options) { + workflowObj = { workflow: rest, options: o }; + } } - if (expressionPath) { + if (!workflowObj && expressionPath) { return loadExpression(options, logger); } const jsonPath = planPath || workflowPath; - if (!options.baseDir) { - options.baseDir = path.dirname(jsonPath!); + if (jsonPath && !options.baseDir) { + options.baseDir = path.dirname(jsonPath); } - const json = await loadJson(jsonPath!, logger); - const defaultName = path.parse(jsonPath!).name; + workflowObj = workflowObj ?? (await loadJson(jsonPath!, logger)); + const defaultName = workflowObj.name || path.parse(jsonPath ?? '').name; - if (json.workflow) { - return loadXPlan(json, options, logger, defaultName); + // Support very old workflow formats + if (workflowObj.jobs) { + return loadOldWorkflow(workflowObj, options, logger, defaultName); + } + // support workflow saved like { workflow, options } + else if (workflowObj.workflow) { + return loadXPlan( + workflowObj, + Object.assign({}, workflowObj.options, options), + logger, + defaultName + ); } else { - return loadOldWorkflow(json, options, logger, defaultName); + // This is the main route now - just load the workflow from the file + return loadXPlan({ workflow: workflowObj }, options, logger, defaultName); } }; export default loadPlan; -const fromProject = async ( - rootDir: string, - workflowName: string, - options: Partial, - logger: Logger -): Promise => { - logger.debug('Loading Repo from ', path.resolve(rootDir)); - const project = await Project.from('fs', { root: rootDir }); - logger.debug('Loading workflow ', workflowName); - const workflow = project.getWorkflow(workflowName); - if (!workflow) { - throw new Error(`Workflow "${workflowName}" not found`); - } - return loadXPlan({ workflow }, options, logger); -}; - -// load a workflow from a repo -// if you do `openfn wf1` then we use this - you've asked for a workflow name, which we'll find -// const loadRepo = () => {}; - -// Load a workflow straight from yaml -// const loadYaml = () => {}; - const loadJson = async (workflowPath: string, logger: Logger): Promise => { let text: string; @@ -340,13 +344,14 @@ type ensureCollectionsOptions = { const ensureCollections = ( plan: CLIExecutionPlan, { - endpoint = 'https://app.openfn.org', + endpoint, version = 'latest', apiKey = 'null', }: ensureCollectionsOptions = {}, logger?: Logger ) => { let collectionsFound = false; + endpoint ??= plan.options?.collectionsEndpoint ?? 'https://app.openfn.org'; Object.values(plan.workflow.steps) .filter((step) => (step as any).expression?.match(/(collections\.)/)) @@ -362,6 +367,12 @@ const ensureCollections = ( job.adaptors.push( `@openfn/language-collections@${version || 'latest'}` ); + if (typeof job.configuration === 'string') { + // If the config is a string credential ID, write it to a special value + job.configuration = { + [CREDENTIALS_KEY]: job.configuration, + }; + } job.configuration = Object.assign({}, job.configuration, { collections_endpoint: `${endpoint}/collections`, diff --git a/packages/cli/src/util/validate-adaptors.ts b/packages/cli/src/util/validate-adaptors.ts index 54dcb0f2d..ea71dc3f6 100644 --- a/packages/cli/src/util/validate-adaptors.ts +++ b/packages/cli/src/util/validate-adaptors.ts @@ -10,6 +10,7 @@ const validateAdaptors = async ( | 'repoDir' | 'workflowPath' | 'planPath' + | 'expressionPath' > & { workflow?: Opts['workflow']; }, @@ -18,11 +19,9 @@ const validateAdaptors = async ( if (options.skipAdaptorValidation) { return; } - const isPlan = options.planPath || options.workflowPath || options.workflow; - const hasDeclaredAdaptors = options.adaptors && options.adaptors.length > 0; - if (isPlan && hasDeclaredAdaptors) { + if (!options.expressionPath && hasDeclaredAdaptors) { logger.error('ERROR: adaptor and workflow provided'); logger.error( 'This is probably not what you meant to do. A workflow should declare an adaptor for each job.' @@ -30,10 +29,8 @@ const validateAdaptors = async ( throw new Error('adaptor and workflow provided'); } - // If no adaptor is specified, pass a warning - // (The runtime is happy to run without) - // This can be overriden from options - if (!isPlan && !hasDeclaredAdaptors) { + // If running a .js file directly and no adaptor is specified, send a warning + if (options.expressionPath && !hasDeclaredAdaptors) { logger.warn('WARNING: No adaptor provided!'); logger.warn( 'This job will probably fail. Pass an adaptor with the -a flag, eg:' diff --git a/packages/cli/test/compile/compile.test.ts b/packages/cli/test/compile/compile.test.ts index af5b5556b..158644d2d 100644 --- a/packages/cli/test/compile/compile.test.ts +++ b/packages/cli/test/compile/compile.test.ts @@ -34,7 +34,7 @@ test.serial('compile from source string', async (t) => { const opts = {} as CompileOptions; - const result = await compile(job, opts, mockLog) + const result = await compile(job, opts, mockLog); const expected = 'export default [x()];'; t.is(result.code, expected); @@ -92,53 +92,68 @@ test.serial('throw an AbortError if a job is uncompilable', async (t) => { t.assert(logger._find('error', /critical error: aborting command/i)); }); -test.serial('throw an AbortError if an xplan contains an uncompilable job', async (t) => { - const plan: ExecutionPlan = { - workflow: { - steps: [{ id: 'a', expression: 'x b' }], - }, - options: {}, - }; - - const opts = {} as CompileOptions; - - const logger = createMockLogger(); - await t.throwsAsync(() => compile(plan, opts, logger), { - message: 'Failed to compile job a', - }); +test.serial( + 'throw an AbortError if an xplan contains an uncompilable job', + async (t) => { + const plan: ExecutionPlan = { + workflow: { + steps: [{ id: 'a', expression: 'x b' }], + }, + options: {}, + }; + + const opts = {} as CompileOptions; + + const logger = createMockLogger(); + await t.throwsAsync(() => compile(plan, opts, logger), { + message: 'Failed to compile job a', + }); - t.assert(logger._find('error', /unexpected token/i)); - t.assert(logger._find('always', /check the syntax of the job expression/i)); - t.assert(logger._find('error', /critical error: aborting command/i)); -}); + t.assert(logger._find('error', /unexpected token/i)); + t.assert(logger._find('always', /check the syntax of the job expression/i)); + t.assert(logger._find('error', /critical error: aborting command/i)); + } +); -test.serial('stripVersionSpecifier: remove version specifier from @openfn', (t) => { - const specifier = '@openfn/language-common@3.0.0-rc2'; - const transformed = stripVersionSpecifier(specifier); - const expected = '@openfn/language-common'; - t.assert(transformed == expected); -}); +test.serial( + 'stripVersionSpecifier: remove version specifier from @openfn', + (t) => { + const specifier = '@openfn/language-common@3.0.0-rc2'; + const transformed = stripVersionSpecifier(specifier); + const expected = '@openfn/language-common'; + t.assert(transformed == expected); + } +); -test.serial('stripVersionSpecifier: remove version specifier from arbitrary package', (t) => { - const specifier = 'ava@1.0.0'; - const transformed = stripVersionSpecifier(specifier); - const expected = 'ava'; - t.assert(transformed == expected); -}); +test.serial( + 'stripVersionSpecifier: remove version specifier from arbitrary package', + (t) => { + const specifier = 'ava@1.0.0'; + const transformed = stripVersionSpecifier(specifier); + const expected = 'ava'; + t.assert(transformed == expected); + } +); -test.serial('stripVersionSpecifier: remove version specifier from arbitrary namespaced package', (t) => { - const specifier = '@ava/some-pkg@^1'; - const transformed = stripVersionSpecifier(specifier); - const expected = '@ava/some-pkg'; - t.assert(transformed == expected); -}); +test.serial( + 'stripVersionSpecifier: remove version specifier from arbitrary namespaced package', + (t) => { + const specifier = '@ava/some-pkg@^1'; + const transformed = stripVersionSpecifier(specifier); + const expected = '@ava/some-pkg'; + t.assert(transformed == expected); + } +); -test.serial("stripVersionSpecifier: do nothing if there's no specifier", (t) => { - const specifier = '@openfn/language-common'; - const transformed = stripVersionSpecifier(specifier); - const expected = '@openfn/language-common'; - t.assert(transformed == expected); -}); +test.serial( + "stripVersionSpecifier: do nothing if there's no specifier", + (t) => { + const specifier = '@openfn/language-common'; + const transformed = stripVersionSpecifier(specifier); + const expected = '@openfn/language-common'; + t.assert(transformed == expected); + } +); test.serial('loadTransformOptions: do nothing', async (t) => { const opts = {}; @@ -157,31 +172,40 @@ test.serial( } ); -test.serial('resolveSpecifierPath: return a relative path if passed', async (t) => { - const path = await resolveSpecifierPath('pkg=./a', '/repo', mockLog); - t.assert(path === './a'); -}); +test.serial( + 'resolveSpecifierPath: return a relative path if passed', + async (t) => { + const path = await resolveSpecifierPath('pkg=./a', '/repo', mockLog); + t.assert(path === './a'); + } +); -test.serial('resolveSpecifierPath: return an absolute path if passed', async (t) => { - const path = await resolveSpecifierPath('pkg=/a', '/repo', mockLog); - t.assert(path === '/a'); -}); +test.serial( + 'resolveSpecifierPath: return an absolute path if passed', + async (t) => { + const path = await resolveSpecifierPath('pkg=/a', '/repo', mockLog); + t.assert(path === '/a'); + } +); test.serial('resolveSpecifierPath: return a path if passed', async (t) => { const path = await resolveSpecifierPath('pkg=a/b/c', '/repo', mockLog); t.assert(path === 'a/b/c'); }); -test.serial('resolveSpecifierPath: basically return anything after the =', async (t) => { - const path = await resolveSpecifierPath('pkg=a', '/repo', mockLog); - t.assert(path === 'a'); +test.serial( + 'resolveSpecifierPath: basically return anything after the =', + async (t) => { + const path = await resolveSpecifierPath('pkg=a', '/repo', mockLog); + t.assert(path === 'a'); - const path2 = await resolveSpecifierPath('pkg=@', '/repo', mockLog); - t.assert(path2 === '@'); + const path2 = await resolveSpecifierPath('pkg=@', '/repo', mockLog); + t.assert(path2 === '@'); - const path3 = await resolveSpecifierPath('pkg=!', '/repo', mockLog); - t.assert(path3 === '!'); -}); + const path3 = await resolveSpecifierPath('pkg=!', '/repo', mockLog); + t.assert(path3 === '!'); + } +); test.serial( 'resolveSpecifierPath: return a path to the repo if the module is found', diff --git a/packages/cli/test/compile/options.test.ts b/packages/cli/test/compile/options.test.ts index e3d896f8d..a43b76b6e 100644 --- a/packages/cli/test/compile/options.test.ts +++ b/packages/cli/test/compile/options.test.ts @@ -49,12 +49,6 @@ test("don't expand adaptors if --no-expand-adaptors is set", (t) => { t.deepEqual(options.adaptors, ['common']); }); -test('default job path', (t) => { - const options = parse('compile /tmp/my-job/ --immutable'); - t.is(options.path, '/tmp/my-job/'); - t.is(options.expressionPath, '/tmp/my-job/job.js'); -}); - test('enable json logging', (t) => { const options = parse('compile job.js --log-json'); t.true(options.logJson); diff --git a/packages/cli/test/execute/apply-credential-map.test.ts b/packages/cli/test/execute/apply-credential-map.test.ts index e4eb72c10..0c8f8af08 100644 --- a/packages/cli/test/execute/apply-credential-map.test.ts +++ b/packages/cli/test/execute/apply-credential-map.test.ts @@ -1,5 +1,7 @@ import test from 'ava'; -import applyCredentialMap from '../../src/execute/apply-credential-map'; +import applyCredentialMap, { + CREDENTIALS_KEY, +} from '../../src/execute/apply-credential-map'; import { createMockLogger } from '@openfn/logger/dist'; const fn = `const fn = (fn) => (s) => fn(s); @@ -52,6 +54,18 @@ test('apply a credential to a single step', (t) => { t.deepEqual(wf.workflow.steps[0].configuration, map.A); }); +test('apply a credential to a single step which already has config', (t) => { + const wf = createWorkflow(); + wf.workflow.steps[0].configuration = { x: 1, [CREDENTIALS_KEY]: 'A' }; + const map = { + A: { user: 'Anne Arnold' }, + }; + + applyCredentialMap(wf, map); + + t.deepEqual(wf.workflow.steps[0].configuration, { ...map.A, x: 1 }); +}); + test('apply a credential to several steps', (t) => { const wf = createWorkflow([ { id: 'a', configuration: 'A' }, diff --git a/packages/cli/test/execute/execute.test.ts b/packages/cli/test/execute/execute.test.ts index 1e28d46d4..7fcbbff85 100644 --- a/packages/cli/test/execute/execute.test.ts +++ b/packages/cli/test/execute/execute.test.ts @@ -130,7 +130,7 @@ test.serial('run a workflow with a JSON credential map', async (t) => { t.is(result.b, 'b'); }); -test.serial.skip('run a workflow with a YAML credential map', async (t) => { +test.serial('run a workflow with a YAML credential map', async (t) => { const workflow = { workflow: { steps: [ @@ -167,6 +167,38 @@ B: t.is(result.b, 'b'); }); +// Note that the execute function only logs if the credential map isn't found, +// which is what will happen when auto-loading the credential map +// The CLI will throw earlier through ensure() if an explicitly provided map file +// is not found. See loadAndApplyCredentialMap +test.serial( + 'Log if the credential map is not found (through Project map)', + async (t) => { + const logger = createMockLogger(undefined, { level: 'debug' }); + const workflow = { + workflow: { + steps: [ + { + id: 'a', + }, + ], + }, + }; + mockFs({ + '/workflow.json': JSON.stringify(workflow), + }); + + const options = { + ...defaultOptions, + workflowPath: '/workflow.json', + credentials: '/creds.json', + }; + + await handler(options, logger); + t.truthy(logger._find('debug', /credential map not found/i)); + } +); + test.serial('run a workflow with state', async (t) => { const workflow = { workflow: { diff --git a/packages/cli/test/execute/options.test.ts b/packages/cli/test/execute/options.test.ts index f6d3bc56b..96a87016a 100644 --- a/packages/cli/test/execute/options.test.ts +++ b/packages/cli/test/execute/options.test.ts @@ -26,6 +26,45 @@ test('correct default options', (t) => { t.falsy(options.useAdaptorsMonorepo); }); +test('inputPath: expression -> expressionPath', (t) => { + const options = parse('execute job.js'); + t.is(options.expressionPath, 'job.js'); +}); + +test('inputPath: json workflow -> planPath', (t) => { + const filename = parse('execute wf.json'); + t.is(filename.planPath, 'wf.json'); + + const rel = parse('execute ./wf.json'); + t.is(rel.planPath, './wf.json'); + + const abs = parse('execute /wf.json'); + t.is(abs.planPath, '/wf.json'); +}); + +test('inputPath: yaml workflow -> planPath', (t) => { + const filename = parse('execute wf.yaml'); + t.is(filename.planPath, 'wf.yaml'); + + // .yml extension works too! + const rel = parse('execute ./wf.yml'); + t.is(rel.planPath, './wf.yml'); + + const abs = parse('execute /wf.yml'); + t.is(abs.planPath, '/wf.yml'); +}); + +test('inputPath: workflow name -> workflowName', (t) => { + const simple = parse('execute workflow'); + t.is(simple.workflowName, 'workflow'); + + const hyphenated = parse('execute my-workflow'); + t.is(hyphenated.workflowName, 'my-workflow'); + + const dotted = parse('execute my.workflow'); + t.is(dotted.workflowName, 'my.workflow'); +}); + test('pass an adaptor (longform)', (t) => { const options = parse('execute job.js --adaptor @openfn/language-common'); t.deepEqual(options.adaptors, ['@openfn/language-common']); @@ -75,12 +114,6 @@ test('enable immutability', (t) => { t.true(options.immutable); }); -test('default job path', (t) => { - const options = parse('execute /tmp/my-job/ --immutable'); - t.is(options.path, '/tmp/my-job/'); - t.is(options.expressionPath, '/tmp/my-job/job.js'); -}); - test('enable json logging', (t) => { const options = parse('execute job.js --log-json'); t.true(options.logJson); diff --git a/packages/cli/test/options/ensure/inputPath.test.ts b/packages/cli/test/options/ensure/inputPath.test.ts index e62a827b7..4083203ea 100644 --- a/packages/cli/test/options/ensure/inputPath.test.ts +++ b/packages/cli/test/options/ensure/inputPath.test.ts @@ -10,33 +10,3 @@ test('sets expressionPath using path', (t) => { t.is(opts.expressionPath, 'jam.js'); }); - -test('sets expressionPath to path/job.js if path is a folder', (t) => { - const opts = { - path: '/jam', - } as Opts; - - inputPath.ensure!(opts); - - t.is(opts.expressionPath, '/jam/job.js'); -}); - -test('sets expressionPath to path/job.js if path is a folder (trailing slash)', (t) => { - const opts = { - path: '/jam/', - } as Opts; - - inputPath.ensure!(opts); - - t.is(opts.expressionPath, '/jam/job.js'); -}); - -test.skip('set workflowPath if path ends in json', (t) => { - const opts = { - path: 'workflow.json', - } as Opts; - - inputPath.ensure!(opts); - - t.is(opts.workflowPath, 'workflow.json'); -}); diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index 06947f829..93ecc56f9 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -430,6 +430,7 @@ test.serial('respect openfn.yaml settings', async (t) => { uuid: id: staging workspace: + credentials: credentials.yaml dirs: projects: p workflows: w @@ -450,6 +451,7 @@ workspace: id: 'simple-workflow', name: 'Simple Workflow', options: {}, + start: 'trigger-webhook', steps: [ { id: 'trigger', diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 51ee74351..8c8b4d8de 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -450,6 +450,7 @@ test.serial( }, }, ], + start: 'trigger-webhook', openfn: { uuid: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', inserted_at: '2025-04-23T11:19:32Z', diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index 02682ea53..9cb301162 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -100,4 +100,5 @@ workflows: inserted_at: 2025-04-23T11:19:32Z updated_at: 2025-04-23T11:19:32Z lock_version: 1 - id: my-workflow`; + id: my-workflow + start: trigger-webhook`; diff --git a/packages/cli/test/projects/list.test.ts b/packages/cli/test/projects/list.test.ts index dbc4e7e4a..6b4b2d05e 100644 --- a/packages/cli/test/projects/list.test.ts +++ b/packages/cli/test/projects/list.test.ts @@ -163,7 +163,7 @@ main | my-project (active) workflows: - simple-workflow -main | my-project +staging | my-project workflows: - simple-workflow diff --git a/packages/cli/test/util/load-plan.test.ts b/packages/cli/test/util/load-plan.test.ts index 6e50dde63..388495100 100644 --- a/packages/cli/test/util/load-plan.test.ts +++ b/packages/cli/test/util/load-plan.test.ts @@ -4,11 +4,7 @@ import { createMockLogger } from '@openfn/logger'; import type { Job } from '@openfn/lexicon'; import loadPlan from '../../src/util/load-plan'; -import { - collectionsEndpoint, - collectionsVersion, - Opts, -} from '../../src/options'; +import { Opts } from '../../src/options'; const logger = createMockLogger(undefined, { level: 'debug' }); @@ -38,6 +34,7 @@ test.beforeEach(() => { jobs: [{ id: 'a', expression: 'x()' }], }), 'test/wf.json': JSON.stringify(sampleXPlan), + 'test/wf-flat.json': JSON.stringify(sampleXPlan.workflow), 'test/wf-err.json': '!!!', }); }); @@ -163,7 +160,7 @@ test.serial( } ); -test.serial('xplan: load a plan from workflow path', async (t) => { +test.serial('xplan: load an old-style plan from workflow path', async (t) => { const opts = { workflowPath: 'test/wf.json', expandAdaptors: true, @@ -176,6 +173,22 @@ test.serial('xplan: load a plan from workflow path', async (t) => { t.deepEqual(plan, sampleXPlan); }); +test.serial('xplan: load a new flat plan from workflow path', async (t) => { + const opts = { + workflowPath: 'test/wf-flat.json', + expandAdaptors: true, + plan: {}, + }; + + const plan = await loadPlan(opts, logger); + + t.truthy(plan); + t.deepEqual(plan, { + options: {}, // no options here! + workflow: sampleXPlan.workflow, + }); +}); + test.serial('xplan: expand adaptors', async (t) => { const opts = { workflowPath: 'test/wf.json', @@ -429,3 +442,217 @@ test.serial('xplan: append collections', async (t) => { collections_token: opts.apiKey, }); }); + +test.serial( + 'xplan: append collections to existing credential object', + async (t) => { + const opts = { + workflowPath: 'test/wf.json', + collectionsVersion: '1.1.1', + collectionsEndpoint: 'https://localhost:4000/', + apiKey: 'abc', + }; + + const plan = createPlan([ + { + id: 'a', + expression: 'collections.get()', + adaptors: ['@openfn/language-common@1.0.0'], + configuration: { + x: 1, + }, + }, + ]); + + mock({ + 'test/wf.json': JSON.stringify(plan), + }); + + const result = await loadPlan(opts, logger); + t.truthy(result); + + const step = result.workflow.steps[0] as Job; + t.deepEqual(step.adaptors, [ + '@openfn/language-common@1.0.0', + '@openfn/language-collections@1.1.1', + ]); + + t.deepEqual(step.configuration, { + collections_endpoint: `${opts.collectionsEndpoint}/collections`, + collections_token: opts.apiKey, + x: 1, + }); + } +); + +test.serial( + 'xplan: load a workflow.yaml without top workflow key', + async (t) => { + mock({ + 'test/wf.yaml': ` +name: wf +steps: + - id: a + adaptors: [] + expression: x() +`, + }); + const opts = { + path: 'test/wf.yaml', + }; + + const plan = await loadPlan(opts, logger); + + t.truthy(plan); + // Note that options are lost in this design! + t.deepEqual(plan, { workflow: sampleXPlan.workflow, options: {} }); + } +); + +test.serial( + 'xplan: load a workflow.yaml without top workflow key and options', + async (t) => { + mock({ + 'test/wf.yaml': ` +name: wf +steps: + - id: a + adaptors: [] + expression: x() +options: + start: x +`, + }); + const opts = { + path: 'test/wf.yaml', + }; + + const plan = await loadPlan(opts, logger); + + t.truthy(plan); + // Note that options are lost in this design! + t.deepEqual(plan, { + workflow: sampleXPlan.workflow, + options: { start: 'x' }, + }); + } +); + +test.serial('xplan: load a workflow.yaml with top workflow key', async (t) => { + mock({ + 'test/wf.yaml': ` +workflow: + name: wf + steps: + - id: a + adaptors: [] + expression: x() +options: + start: a +`, + }); + const opts = { + path: 'test/wf.yaml', + }; + + const plan = await loadPlan(opts, logger); + + t.truthy(plan); + t.deepEqual(plan, sampleXPlan); +}); + +test.serial('xplan: load a workflow through a Workspace', async (t) => { + mock({ + '/tmp/workflows/wf.yaml': ` +id: wf +steps: + - id: a + expression: x() +`, + '/tmp/openfn.yaml': ` +dirs: + workflows: /tmp/workflows +`, + }); + + const opts = { + // TODO is worked out through yargs via the inputPath option + workflowName: 'wf', + workspace: '/tmp', + }; + + const plan = await loadPlan(opts, logger); + t.truthy(plan); + t.deepEqual(plan, { + workflow: { + id: 'wf', + steps: [{ id: 'a', expression: 'x()', adaptors: [] }], + history: [], + }, + options: {}, + }); +}); + +test.serial('xplan: throw if a named workflow does not exist', async (t) => { + mock({ + '/tmp/workflows/wf.yaml': ` +id: wf +steps: + - id: a + expression: x() +`, + '/tmp/openfn.yaml': ` +dirs: + workflows: /tmp/workflows +`, + }); + + const opts = { + workflowName: 'JAM', + workspace: '/tmp', + }; + + await t.throwsAsync(() => loadPlan(opts, logger), { + message: /could not find workflow "jam"/i, + }); +}); + +test.serial( + 'xplan: load a workflow through a project .yaml and apply the credentials map by default', + async (t) => { + mock({ + '/tmp/workflows/wf.yaml': ` +id: wf +steps: + - id: a + expression: x() +start: a +`, + '/tmp/openfn.yaml': ` +credentials: /creds.yaml +dirs: + workflows: /tmp/workflows +`, + '/creds.yaml': `x: y`, + }); + const opts = { + workflowName: 'wf', + workspace: '/tmp', + }; + + const plan = await loadPlan(opts, logger); + + t.truthy(plan); + t.deepEqual(plan, { + workflow: { + id: 'wf', + steps: [{ id: 'a', expression: 'x()', adaptors: [] }], + history: [], + start: 'a', + }, + options: {}, + }); + + t.is(opts.credentials, '/creds.yaml'); + } +); diff --git a/packages/cli/test/util/validate-adaptors.test.ts b/packages/cli/test/util/validate-adaptors.test.ts index 38ce2bff7..2e7a209c2 100644 --- a/packages/cli/test/util/validate-adaptors.test.ts +++ b/packages/cli/test/util/validate-adaptors.test.ts @@ -10,8 +10,8 @@ test.afterEach(() => { mockfs.restore(); }); -test.serial('should warn if no adaptor is passed', async (t) => { - await validateAdaptors({ adaptors: [] }, logger); +test.serial('should warn if expression passed with no adaptor', async (t) => { + await validateAdaptors({ expressionPath: 'job.js', adaptors: [] }, logger); t.assert(logger._history.length > 1); const { message, level } = logger._parse(logger._history[0]); t.is(level, 'warn'); @@ -19,18 +19,47 @@ test.serial('should warn if no adaptor is passed', async (t) => { }); test.serial( - 'should not warn if no adaptor is passed but skip-adaptor-warning is set', + 'should NOT warn if no adaptor ifskip-adaptor-warning is set', async (t) => { await validateAdaptors( - { adaptors: [], skipAdaptorValidation: true }, + { expressionPath: 'job.js', adaptors: [], skipAdaptorValidation: true }, logger ); t.is(logger._history.length, 0); } ); -test.serial('should not warn if a workflow is being used', async (t) => { - await validateAdaptors({ adaptors: [], workflowPath: 'wf.json' }, logger); +test.serial( + 'should NOT warn if a workflow json is being used (workflow path)', + async (t) => { + await validateAdaptors({ adaptors: [], workflowPath: 'wf.json' }, logger); + t.is(logger._history.length, 0); + } +); + +test.serial( + 'should NOT warn if a workflow yaml is being used (workflow path)', + async (t) => { + await validateAdaptors({ adaptors: [], workflowPath: 'wf.yaml' }, logger); + t.is(logger._history.length, 0); + } +); + +test.serial('should NOT warn if a workflow json is being used', async (t) => { + await validateAdaptors({ adaptors: [], planPath: 'wf.json' }, logger); + t.is(logger._history.length, 0); +}); + +test.serial('should NOT warn if a workflow yaml is being used', async (t) => { + await validateAdaptors({ adaptors: [], planPath: 'wf.yaml' }, logger); + t.is(logger._history.length, 0); +}); + +test.serial('should NOT warn if a workflow name is used', async (t) => { + await validateAdaptors( + { adaptors: [], workflowName: 'my-workflow' } as any, + logger + ); t.is(logger._history.length, 0); }); diff --git a/packages/cli/turtle-power/output.json b/packages/cli/turtle-power/output.json new file mode 100644 index 000000000..3699bcced --- /dev/null +++ b/packages/cli/turtle-power/output.json @@ -0,0 +1,3 @@ +{ + "x": 1 +} \ No newline at end of file diff --git a/packages/engine-multi/CHANGELOG.md b/packages/engine-multi/CHANGELOG.md index 55ef919fe..d927ada96 100644 --- a/packages/engine-multi/CHANGELOG.md +++ b/packages/engine-multi/CHANGELOG.md @@ -1,5 +1,13 @@ # engine-multi +## 1.10.1 + +### Patch Changes + +- Updated dependencies [b262d10] +- Updated dependencies [147a431] + - @openfn/runtime@1.8.1 + ## 1.10.0 ### Minor Changes diff --git a/packages/engine-multi/package.json b/packages/engine-multi/package.json index a831f1d70..4f32735af 100644 --- a/packages/engine-multi/package.json +++ b/packages/engine-multi/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/engine-multi", - "version": "1.10.0", + "version": "1.10.1", "description": "Multi-process runtime engine", "main": "dist/index.js", "type": "module", diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 15a198974..5e596d5e5 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -87,6 +87,7 @@ export interface WorkspaceFile { } export interface WorkspaceConfig { + credentials?: string; dirs: { workflows: string; projects: string; @@ -161,6 +162,12 @@ export type Workflow = { // holds history information of a workflow history?: string[]; + + /** The default start node - the one the workflow was designed for (the trigger) */ + start?: string; + + /** extra options from the app. Not really used */ + options?: any; }; export type StepId = string; diff --git a/packages/lightning-mock/CHANGELOG.md b/packages/lightning-mock/CHANGELOG.md index b1a421891..dcff0e2b9 100644 --- a/packages/lightning-mock/CHANGELOG.md +++ b/packages/lightning-mock/CHANGELOG.md @@ -1,5 +1,14 @@ # @openfn/lightning-mock +## 2.4.2 + +### Patch Changes + +- Updated dependencies [b262d10] +- Updated dependencies [147a431] + - @openfn/runtime@1.8.1 + - @openfn/engine-multi@1.10.1 + ## 2.4.1 ### Patch Changes diff --git a/packages/lightning-mock/package.json b/packages/lightning-mock/package.json index 813ad64b0..1dcabee7c 100644 --- a/packages/lightning-mock/package.json +++ b/packages/lightning-mock/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/lightning-mock", - "version": "2.4.1", + "version": "2.4.2", "private": true, "description": "A mock Lightning server", "main": "dist/index.js", diff --git a/packages/project/CHANGELOG.md b/packages/project/CHANGELOG.md index f12c59f0b..8bb546557 100644 --- a/packages/project/CHANGELOG.md +++ b/packages/project/CHANGELOG.md @@ -1,5 +1,11 @@ # @openfn/project +## 0.11.0 + +### Minor Changes + +- d1a0e7c: Allow workflows to be run directly through a project + ## 0.10.1 ### Patch Changes diff --git a/packages/project/package.json b/packages/project/package.json index a0c148af5..b3185dbbc 100644 --- a/packages/project/package.json +++ b/packages/project/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/project", - "version": "0.10.1", + "version": "0.11.0", "description": "Read, serialize, replicate and sync OpenFn projects", "scripts": { "test": "pnpm ava", diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 2a5e3c98b..0f57dc94d 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -30,7 +30,16 @@ class Workflow { // history needs to be on workflow object. this.workflow.history = workflow.history?.length ? workflow.history : []; - const { id, name, openfn, steps, history, ...options } = workflow; + const { + id, + name, + openfn, + steps, + history, + start: _start, + options, + ...rest + } = workflow; if (!(id || name)) { throw new Error('A Workflow MUST have a name or id'); } @@ -45,7 +54,7 @@ class Workflow { } this.openfn = openfn; - this.options = options; + this.options = Object.assign({}, options, rest); this._buildIndex(); } @@ -54,6 +63,14 @@ class Workflow { return this.workflow.steps; } + get start(): string | undefined { + return this.workflow.start; + } + + set start(s: string) { + this.workflow.start = s; + } + _buildIndex() { for (const step of this.workflow.steps) { const s = step as any; diff --git a/packages/project/src/Workspace.ts b/packages/project/src/Workspace.ts index 2e06265fc..16289bc34 100644 --- a/packages/project/src/Workspace.ts +++ b/packages/project/src/Workspace.ts @@ -22,6 +22,8 @@ export class Workspace { // TODO activeProject should be the actual project activeProject?: l.ProjectMeta; + root: string; + private projects: Project[] = []; private projectPaths = new Map(); private isValid: boolean = false; @@ -30,6 +32,7 @@ export class Workspace { // Set validate to false to suppress warnings if a Workspace doesn't exist // This is appropriate if, say, fetching a project for the first time constructor(workspacePath: string, logger?: Logger, validate = true) { + this.root = workspacePath; this.logger = logger ?? createLogger('Workspace', { level: 'info' }); let context = { workspace: undefined, project: undefined }; @@ -112,6 +115,14 @@ export class Workspace { ); } + getCheckedOutProject() { + return Project.from('fs', { root: this.root, config: this.config }); + } + + getCredentialMap() { + return this.config.credentials; + } + // TODO this needs to return default values // We should always rely on the workspace to load these values getConfig(): Partial { diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index 5224d6fc3..9f08e83c2 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -107,6 +107,10 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { workflow.triggers.forEach((trigger: Provisioner.Trigger) => { const { type, ...otherProps } = trigger; + if (!mapped.start) { + mapped.start = `trigger-${type}`; + } + const connectedEdges = edges.filter( (e) => e.source_trigger_id === trigger.id ); diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index 612c2b838..d1ff194d2 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -15,6 +15,7 @@ import { Logger } from '@openfn/logger'; export type FromFsConfig = { root: string; + config?: Partial; logger?: Logger; }; @@ -27,7 +28,7 @@ export const parseProject = async (options: FromFsConfig) => { const { type, content } = findWorkspaceFile(root); const context = loadWorkspaceFile(content, type as any); - const config = buildConfig(context.workspace); + const config = buildConfig(options.config ?? context.workspace); const proj: any = { id: context.project?.id, @@ -44,7 +45,7 @@ export const parseProject = async (options: FromFsConfig) => { const workflowDir = (config as any).workflowRoot ?? config.dirs?.workflows ?? 'workflows'; const fileType = config.formats?.workflow ?? 'yaml'; - const pattern = `${root}/${workflowDir}/*/*.${fileType}`; + const pattern = path.resolve(root, workflowDir) + `/**/*.${fileType}`; const candidateWfs = await glob(pattern, { ignore: ['**node_modules/**', '**tmp**'], }); @@ -52,8 +53,24 @@ export const parseProject = async (options: FromFsConfig) => { for (const filePath of candidateWfs) { const candidate = await fs.readFile(filePath, 'utf-8'); try { - const wf = + let wf = fileType === 'yaml' ? yamlToJson(candidate) : JSON.parse(candidate); + + if (wf.workflow) { + // Support the { workflow, options } workflow format + // TODO Would like to remove this on the next major + if (wf.options) { + const { start, ...rest } = wf.options; + if (start) { + wf.workflow.start = start; + } + if (rest) { + wf.workflow.options = Object.assign({}, wf.workflow.options, rest); + } + } + wf = wf.workflow; + } + if (wf.id && Array.isArray(wf.steps)) { //logger?.log('Loading workflow at ', filePath); // TODO logger.debug for (const step of wf.steps) { @@ -71,7 +88,7 @@ export const parseProject = async (options: FromFsConfig) => { } } - // Now track UUIDs for edges against state + // convert edge conditions for (const target in step.next || {}) { if (typeof step.next[target] === 'boolean') { const bool = step.next[target]; diff --git a/packages/project/src/parse/from-project.ts b/packages/project/src/parse/from-project.ts index 68399abe1..e99eabcd1 100644 --- a/packages/project/src/parse/from-project.ts +++ b/packages/project/src/parse/from-project.ts @@ -3,7 +3,7 @@ import * as l from '@openfn/lexicon'; import Project from '../Project'; import ensureJson from '../util/ensure-json'; import { Provisioner } from '@openfn/lexicon/lightning'; -import fromAppState from './from-app-state'; +import fromAppState, { fromAppStateConfig } from './from-app-state'; import { WithMeta } from '../Workflow'; // Load a project from any JSON or yaml representation @@ -32,25 +32,23 @@ export default ( // first ensure the data is in JSON format let rawJson = ensureJson(data); - let json; if (rawJson.cli?.version ?? rawJson.version /*deprecated*/) { // If there's any version key at all, its at least v2 - json = from_v2(rawJson as SerializedProject); - } else { - json = from_v1(rawJson as Provisioner.Project); + return new Project(from_v2(rawJson as SerializedProject), config); } - return new Project(json, config); + return from_v1(rawJson as Provisioner.Project, config as fromAppStateConfig); }; -const from_v1 = (data: Provisioner.Project) => { - // TODO is there any way to look up the config file? - // But we have no notion of a working dir here - // Maybe there are optional options that can be provided - // by from fs or from path - return fromAppState(data); +// TODO test that config (alias) works +const from_v1 = ( + data: Provisioner.Project, + config: fromAppStateConfig = {} +) => { + return fromAppState(data, {}, config); }; +// TODO this should return a Project really! const from_v2 = (data: SerializedProject) => { // nothing to do // (When we add v3, we'll ned to migrate through this) diff --git a/packages/project/src/serialize/to-fs.ts b/packages/project/src/serialize/to-fs.ts index ca9f59956..e934ad624 100644 --- a/packages/project/src/serialize/to-fs.ts +++ b/packages/project/src/serialize/to-fs.ts @@ -48,6 +48,7 @@ export const extractWorkflow = (project: Project, workflowId: string) => { const wf = { id: workflow.id, name: workflow.name, + start: workflow.start, // Note: if no options are defined, options will serialize to an empty object // Not crazy about this - maybe we should do something better? Or do we like the consistency? options: workflow.options, diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index 47dd1494b..67f7e9fec 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -8,6 +8,7 @@ import Project from '../Project'; // Initialize and default Workspace (and Project) config export const buildConfig = (config: Partial = {}) => ({ + credentials: 'credentials.yaml', ...config, dirs: { projects: config.dirs?.projects ?? '.projects', diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index 049ccb27e..05e9be6fb 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -31,6 +31,7 @@ export const json: SerializedProject = { id: 'workflow', openfn: { uuid: 1 }, history: [], + start: 'trigger', }, ], }; @@ -67,4 +68,5 @@ workflows: openfn: uuid: 1 history: [] + start: trigger `; diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 6f509bc04..699853cb4 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -86,6 +86,7 @@ test('should create a Project from prov state with a workflow', (t) => { id: 'my-workflow', name: 'My Workflow', history: [], + start: 'trigger-webhook', steps: [ { id: 'trigger', diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 1a3d1e103..c5fa400a2 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -40,6 +40,7 @@ test.serial('should load workspace config from json', async (t) => { t.deepEqual(project.config, { x: 1, + credentials: 'credentials.yaml', dirs: { projects: '.projects', workflows: 'workflows' }, formats: { openfn: 'json', project: 'json', workflow: 'json' }, }); @@ -62,13 +63,14 @@ test.serial('should load workspace config from yaml', async (t) => { const project = await parseProject({ root: '/ws' }); t.deepEqual(project.config, { + credentials: 'credentials.yaml', x: 1, dirs: { projects: '.projects', workflows: 'workflows' }, formats: { openfn: 'yaml', project: 'yaml', workflow: 'yaml' }, }); }); -test.serial('should load single workflow', async (t) => { +test.serial('should load single workflow in new flat format', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { @@ -80,6 +82,7 @@ test.serial('should load single workflow', async (t) => { expression: 'job.js', }, ], + start: 'a', }); mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); @@ -92,8 +95,46 @@ test.serial('should load single workflow', async (t) => { t.truthy(wf); t.is(wf.id, 'my-workflow'); t.is(wf.name, 'My Workflow'); + t.is(wf.start, 'a'); }); +// hmm, maybe I shouldn't support this, because it puts some wierd stuff in the code +// and new CLI will just use the new format +test.serial( + 'should load single workflow in old { workflow, options } format', + async (t) => { + mockFile('/ws/openfn.yaml', buildConfig()); + + mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { + workflow: { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }, + options: { + start: 'a', + }, + }); + + mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); + + t.is(project.workflows.length, 1); + + const wf = project.getWorkflow('my-workflow'); + t.truthy(wf); + t.is(wf.id, 'my-workflow'); + t.is(wf.name, 'My Workflow'); + t.is(wf.start, 'a'); + } +); + test.serial('should load single workflow from json', async (t) => { mockFile( '/ws/openfn.yaml', diff --git a/packages/project/test/parse/from-path.test.ts b/packages/project/test/parse/from-path.test.ts index 9d31f65bf..dd47a5e56 100644 --- a/packages/project/test/parse/from-path.test.ts +++ b/packages/project/test/parse/from-path.test.ts @@ -76,6 +76,7 @@ test.serial('should use workspace config', async (t) => { t.is(project.name, proj.name); t.deepEqual(project.config, { + credentials: 'credentials.yaml', dirs: { projects: 'p', workflows: 'w', diff --git a/packages/project/test/parse/from-project.test.ts b/packages/project/test/parse/from-project.test.ts index d6c57644d..21911f0e4 100644 --- a/packages/project/test/parse/from-project.test.ts +++ b/packages/project/test/parse/from-project.test.ts @@ -3,20 +3,7 @@ import state_v1 from '../fixtures/sample-v1-project'; import Project from '../../src/Project'; import * as v2 from '../fixtures/sample-v2-project'; -test('import from a v1 state as JSON', async (t) => { - const proj = await Project.from('project', state_v1, {}); - - // make a few basic assertions about the project - t.is(proj.id, 'my-workflow'); - t.is(proj.name, 'My Workflow'); - t.is(proj.openfn.uuid, 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00'); - t.is(proj.options.retention_policy, 'retain_all'); - - t.is(proj.workflows.length, 1); -}); - -test('import from a v1 state as YAML', async (t) => { - const yaml = `id: '1234' +const v1_yaml = `id: '1234' name: aaa description: a project project_credentials: [] @@ -57,7 +44,21 @@ workflows: source_trigger_id: 4a06289c-15aa-4662-8dc6-f0aaacd8a058 condition_type: always `; - const proj = await Project.from('project', yaml, {}); + +test('import from a v1 state as JSON', async (t) => { + const proj = await Project.from('project', state_v1, {}); + + // make a few basic assertions about the project + t.is(proj.id, 'my-workflow'); + t.is(proj.name, 'My Workflow'); + t.is(proj.openfn.uuid, 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00'); + t.is(proj.options.retention_policy, 'retain_all'); + + t.is(proj.workflows.length, 1); +}); + +test('import from a v1 state as YAML', async (t) => { + const proj = await Project.from('project', v1_yaml, {}); // make a few basic assertions about the project t.is(proj.id, 'aaa'); @@ -86,6 +87,7 @@ test('import from a v2 project as JSON', async (t) => { uuid: 1, }, history: [], + start: 'trigger', steps: [ { name: 'b', @@ -140,6 +142,7 @@ test('import from a v2 project as YAML', async (t) => { openfn: { uuid: 1, }, + start: 'trigger', history: [], steps: [ { @@ -170,7 +173,38 @@ test('import from a v2 project as YAML', async (t) => { }); }); -test('import with custom config', async (t) => { +test('import v1 with custom config', async (t) => { + const config = { + x: 1234, + dirs: { + projects: 'p', + workflows: 'w', + }, + alias: 'staging', + format: 'yaml', + }; + const proj = await Project.from('project', v1_yaml, config); + t.is(proj.id, 'aaa'); + + t.is(proj.cli.alias, 'staging'); + + // note that alias and format should have been removed from config + t.deepEqual(proj.config, { + credentials: 'credentials.yaml', + dirs: { + projects: 'p', + workflows: 'w', + }, + formats: { + openfn: 'yaml', + project: 'yaml', + workflow: 'yaml', + }, + x: 1234, + }); +}); + +test('import v2 with custom config', async (t) => { const config = { x: 1234, dirs: { @@ -186,6 +220,7 @@ test('import with custom config', async (t) => { // note that alias should have been removed from config t.deepEqual(proj.config, { + credentials: 'credentials.yaml', dirs: { projects: 'p', workflows: 'w', diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index 75a4d195a..68a022140 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -256,6 +256,38 @@ test('should handle credentials', (t) => { t.is(job.project_credential_id, 'p'); }); +test('should ignore workflow start keys', (t) => { + const data = { + id: 'my-project', + workflows: [ + { + id: 'wf', + start: 'step', + steps: [ + { + id: 'trigger', + type: 'webhook', + next: { + step: {}, + }, + }, + { + id: 'step', + expression: '.', + configuration: 'p', + openfn: { + keychain_credential_id: 'k', + }, + }, + ], + }, + ], + }; + + const state = toAppState(new Project(data), { format: 'json' }); + t.falsy(state.workflows[0].start); +}); + test.todo('handle edge labels'); test('serialize steps and trigger in alphabetical order', (t) => { diff --git a/packages/project/test/serialize/to-fs.test.ts b/packages/project/test/serialize/to-fs.test.ts index 6f558ae20..12ce41f43 100644 --- a/packages/project/test/serialize/to-fs.test.ts +++ b/packages/project/test/serialize/to-fs.test.ts @@ -20,6 +20,7 @@ test('extractWorkflow: single simple workflow (yaml by default)', (t) => { id: 'my-workflow', name: 'My Workflow', steps: [step], + start: 'step', // should be ignored because this lives in the project file openfn: { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', @@ -31,11 +32,11 @@ test('extractWorkflow: single simple workflow (yaml by default)', (t) => { const { path, content } = extractWorkflow(project, 'my-workflow'); t.is(path, 'workflows/my-workflow/my-workflow.yaml'); - // TODO is the empty options object correct here?? t.deepEqual( content, `id: my-workflow name: My Workflow +start: step options: {} steps: - id: step @@ -215,6 +216,7 @@ test('toFs: extract a project with 1 workflow and 1 step', (t) => { const config = JSON.parse(files['openfn.json']); t.deepEqual(config, { workspace: { + credentials: 'credentials.yaml', formats: { openfn: 'json', project: 'yaml', workflow: 'json' }, dirs: { projects: '.projects', workflows: 'workflows' }, }, diff --git a/packages/project/test/serialize/to-project.test.ts b/packages/project/test/serialize/to-project.test.ts index 533350059..299356cb4 100644 --- a/packages/project/test/serialize/to-project.test.ts +++ b/packages/project/test/serialize/to-project.test.ts @@ -31,6 +31,7 @@ const createProject = () => { }); // hack delete proj.workflows[0].steps[0].name; + proj.workflows[0].start = 'trigger'; return proj; }; diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index cb336ad36..82cde69a4 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -176,6 +176,7 @@ test('generate openfn.yaml', (t) => { uuid: 1234 id: my-project workspace: + credentials: credentials.yaml formats: openfn: yaml project: yaml diff --git a/packages/project/test/workspace.test.ts b/packages/project/test/workspace.test.ts index 141b3cb96..0410c0cd9 100644 --- a/packages/project/test/workspace.test.ts +++ b/packages/project/test/workspace.test.ts @@ -260,6 +260,7 @@ test('load from custom path', (t) => { test('load config', (t) => { const ws = new Workspace('/ws'); t.deepEqual(ws.config, { + credentials: 'credentials.yaml', formats: { openfn: 'yaml', project: 'yaml', diff --git a/packages/runtime/CHANGELOG.md b/packages/runtime/CHANGELOG.md index aaec325b2..2ef24d31b 100644 --- a/packages/runtime/CHANGELOG.md +++ b/packages/runtime/CHANGELOG.md @@ -1,5 +1,12 @@ # @openfn/runtime +## 1.8.1 + +### Patch Changes + +- b262d10: Support a start key on a workflow (different from a start option) +- 147a431: Tweak error messaging when state objects exceed size limit + ## 1.8.0 ### Minor Changes diff --git a/packages/runtime/package.json b/packages/runtime/package.json index 9e6e62aa9..27d8478e4 100644 --- a/packages/runtime/package.json +++ b/packages/runtime/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/runtime", - "version": "1.8.0", + "version": "1.8.1", "description": "Job processing runtime.", "type": "module", "exports": { diff --git a/packages/runtime/src/execute/plan.ts b/packages/runtime/src/execute/plan.ts index 33914d746..e04ebed28 100644 --- a/packages/runtime/src/execute/plan.ts +++ b/packages/runtime/src/execute/plan.ts @@ -54,7 +54,7 @@ const executePlan = async ( } const queue: Array<{ stepName: string; input: any }> = [ - { stepName: options.start, input }, + { stepName: options.start ?? workflow.start, input }, ]; // count how many times each step has been called diff --git a/packages/runtime/src/execute/step.ts b/packages/runtime/src/execute/step.ts index d701ab337..819c35ade 100644 --- a/packages/runtime/src/execute/step.ts +++ b/packages/runtime/src/execute/step.ts @@ -90,8 +90,8 @@ const prepareFinalState = async ( if (state) { try { await ensureStateSize(state, stateLimit_mb); - } catch (e) { - logger.error('Critical error processing state:'); + } catch (e: any) { + logger.error('Critical error processing state: ', e.message); throw e; } diff --git a/packages/runtime/src/types.ts b/packages/runtime/src/types.ts index 4a9ac5929..957696233 100644 --- a/packages/runtime/src/types.ts +++ b/packages/runtime/src/types.ts @@ -44,8 +44,11 @@ export type CompiledExecutionPlan = { globals?: string; steps: Record; credentials?: Record; + /** The default start node - the one the workflow was designed for (the trigger) */ + start?: StepId; }; options: WorkflowOptions & { + /** User-specified start node */ start: StepId; }; }; diff --git a/packages/runtime/test/execute/plan.test.ts b/packages/runtime/test/execute/plan.test.ts index 4666e29c0..3efc6c3a1 100644 --- a/packages/runtime/test/execute/plan.test.ts +++ b/packages/runtime/test/execute/plan.test.ts @@ -589,7 +589,7 @@ test('only execute one job in a two-job execution plan', async (t) => { t.is(result.data.x, 1); }); -test('execute a two-job execution plan with custom start', async (t) => { +test('execute a two-job execution plan with an option start', async (t) => { const plan = createPlan( [ { @@ -609,6 +609,24 @@ test('execute a two-job execution plan with custom start', async (t) => { t.is(result.data.result, 11); }); +test('execute a two-job execution plan with a custom start', async (t) => { + const plan = createPlan([ + { + id: 'job1', + expression: 'export default [() => ({ data: { result: 11 } }) ]', + }, + { + id: 'job2', + expression: 'export default [() => ({ data: { result: 1 } }) ]', + next: { job1: true }, + }, + ]); + plan.workflow.start = 'job2'; + + const result: any = await executePlan(plan, {}, {}, mockLogger); + t.is(result.data.result, 11); +}); + test('Return when there are no more edges', async (t) => { const plan = createPlan( [ diff --git a/packages/ws-worker/CHANGELOG.md b/packages/ws-worker/CHANGELOG.md index 9deb376e3..0566877c0 100644 --- a/packages/ws-worker/CHANGELOG.md +++ b/packages/ws-worker/CHANGELOG.md @@ -1,5 +1,14 @@ # ws-worker +## 1.21.1 + +### Patch Changes + +- Updated dependencies [b262d10] +- Updated dependencies [147a431] + - @openfn/runtime@1.8.1 + - @openfn/engine-multi@1.10.1 + ## 1.21.0 ### Minor Changes diff --git a/packages/ws-worker/package.json b/packages/ws-worker/package.json index 149ab78b0..165cb788c 100644 --- a/packages/ws-worker/package.json +++ b/packages/ws-worker/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/ws-worker", - "version": "1.21.0", + "version": "1.21.1", "description": "A Websocket Worker to connect Lightning to a Runtime Engine", "main": "dist/index.js", "type": "module",