diff --git a/integration-tests/cli/CHANGELOG.md b/integration-tests/cli/CHANGELOG.md index dd9b43eaa..75268c427 100644 --- a/integration-tests/cli/CHANGELOG.md +++ b/integration-tests/cli/CHANGELOG.md @@ -1,5 +1,13 @@ # @openfn/integration-tests-cli +## 1.0.10 + +### Patch Changes + +- Updated dependencies [e33e362] +- Updated dependencies [1b5e837] + - @openfn/project@0.12.0 + ## 1.0.9 ### Patch Changes diff --git a/integration-tests/cli/package.json b/integration-tests/cli/package.json index 668c1421f..553480cad 100644 --- a/integration-tests/cli/package.json +++ b/integration-tests/cli/package.json @@ -1,7 +1,7 @@ { "name": "@openfn/integration-tests-cli", "private": true, - "version": "1.0.9", + "version": "1.0.10", "description": "CLI integration tests", "author": "Open Function Group ", "license": "ISC", diff --git a/integration-tests/cli/test/project-v1.test.ts b/integration-tests/cli/test/project-v1.test.ts index 078ca0e42..17f74171e 100644 --- a/integration-tests/cli/test/project-v1.test.ts +++ b/integration-tests/cli/test/project-v1.test.ts @@ -27,7 +27,8 @@ requires_mfa: false retention_policy: retain_all version_history: [] workflows: - - name: my workflow + my-workflow: + name: my workflow id: 0afbefab-5824-4911-aaae-a19f20106dec concurrency: null inserted_at: 2025-10-07T10:00:23Z @@ -35,18 +36,21 @@ workflows: deleted_at: null lock_version: 2 jobs: - - name: Transform data + transform-data: + name: Transform data body: | fn(() => ({ x: 1})) adaptor: "@openfn/language-common@latest" id: b8b780f3-98dd-4244-880b-e534d8f24547 project_credential_id: null triggers: - - type: webhook + webhook: + type: webhook enabled: true id: 3b4a47c0-7242-4f0c-8886-838e34762654 edges: - - id: 33dce70f-047f-4508-82fd-950eb508519b + webhook->transform-data: + id: 33dce70f-047f-4508-82fd-950eb508519b target_job_id: b8b780f3-98dd-4244-880b-e534d8f24547 enabled: true source_trigger_id: 3b4a47c0-7242-4f0c-8886-838e34762654 @@ -73,7 +77,8 @@ requires_mfa: false retention_policy: retain_all version_history: [] workflows: - - name: my workflow + my-workflow: + name: my workflow id: 9e2cc86a-8896-4a5a-9467-9c4128207fd3 concurrency: null inserted_at: 2025-10-07T10:00:36Z @@ -81,17 +86,20 @@ workflows: deleted_at: null lock_version: 3 jobs: - - name: Transform data + transform-data: + name: Transform data body: log('hello world') adaptor: "@openfn/language-common@latest" id: 8d627978-ebb9-4fb2-8cda-9b31c10c963e project_credential_id: null triggers: - - type: webhook + webhook: + type: webhook enabled: true id: 7bb476cc-0292-4573-89d0-b13417bc648e edges: - - id: 4c68d22a-4ba7-4d8f-8103-6f4f15c4e7d2 + webhook->transform-data: + id: 4c68d22a-4ba7-4d8f-8103-6f4f15c4e7d2 target_job_id: 8d627978-ebb9-4fb2-8cda-9b31c10c963e enabled: true source_trigger_id: 7bb476cc-0292-4573-89d0-b13417bc648e @@ -133,10 +141,10 @@ test.serial('Checkout a project', async (t) => { workflowYaml, `id: my-workflow name: my workflow -start: trigger-webhook +start: webhook options: {} steps: - - id: trigger + - id: webhook type: webhook next: transform-data: diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index fb4a462a0..5b6e432a4 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -1,5 +1,31 @@ # @openfn/cli +## 1.24.0 + +### Minor Changes + +- Total rewrite of project deploy (aka deploy --beta) + + ``` + openfn deploy + ``` + + This will deploy your currently checked out project to the synced app. + + Recommend passing `--log debug` to get richer output of what's happening. + + It will prompt for confirmation before posting. + + This new function appears stable but is currently undergoing testing. Use with caution. + +### Patch Changes + +- Updated dependencies [e33e362] +- Updated dependencies [ef06f98] +- Updated dependencies [1b5e837] + - @openfn/lexicon@1.4.0 + - @openfn/project@0.12.0 + ## 1.23.0 ### Minor Changes diff --git a/packages/cli/package.json b/packages/cli/package.json index cdd0c892d..119b12f2e 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/cli", - "version": "1.23.0", + "version": "1.24.0", "description": "CLI devtools for the OpenFn toolchain", "engines": { "node": ">=18", diff --git a/packages/cli/src/commands.ts b/packages/cli/src/commands.ts index b0cfa0880..9f56545c5 100644 --- a/packages/cli/src/commands.ts +++ b/packages/cli/src/commands.ts @@ -9,7 +9,7 @@ import docgen from './docgen/handler'; import docs from './docs/handler'; import metadata from './metadata/handler'; import pull from './pull/handler'; -import * as projects from './projects/handler'; +import * as projects from './projects'; import * as repo from './repo/handler'; import createLogger, { CLI, Logger } from './util/logger'; @@ -23,28 +23,29 @@ import { CLIError } from './errors'; export type CommandList = | 'apollo' - | 'compile' | 'collections-get' - | 'collections-set' | 'collections-remove' + | 'collections-set' + | 'compile' | 'deploy' | 'docgen' | 'docs' | 'execute' | 'metadata' - | 'pull' - | 'projects' + | 'project-checkout' + | 'project-deploy' + | 'project-fetch' + | 'project-list' + | 'project-merge' + | 'project-pull' + | 'project-version' | 'project' + | 'projects' + | 'pull' | 'repo-clean' | 'repo-install' | 'repo-list' | 'repo-pwd' - | 'project-pull' - | 'project-list' - | 'project-version' - | 'project-merge' - | 'project-checkout' - | 'project-fetch' | 'test' | 'version'; @@ -67,6 +68,7 @@ const handlers = { ['repo-install']: repo.install, ['repo-pwd']: repo.pwd, ['repo-list']: repo.list, + ['project-deploy']: projects.deploy, ['project-pull']: projects.pull, ['project-list']: projects.list, ['project-version']: projects.version, diff --git a/packages/cli/src/deploy/beta.ts b/packages/cli/src/deploy/beta.ts deleted file mode 100644 index 834d37039..000000000 --- a/packages/cli/src/deploy/beta.ts +++ /dev/null @@ -1,42 +0,0 @@ -// beta v2 version of CLI deploy - -import Project from '@openfn/project'; -import { DeployConfig, deployProject } from '@openfn/deploy'; -import type { Logger } from '../util/logger'; -import { Opts } from '../options'; -import { loadAppAuthConfig } from '../projects/util'; - -export type DeployOptionsBeta = Required< - Pick< - Opts, - 'beta' | 'command' | 'log' | 'logJson' | 'apiKey' | 'endpoint' | 'path' - > ->; - -export async function handler(options: DeployOptionsBeta, logger: Logger) { - const config = loadAppAuthConfig(options, logger); - - // TMP use options.path to set the directory for now - // We'll need to manage this a bit better - // TODO this is fixed on another branch - const project = await Project.from('fs', { - root: (options as any).workspace || '.', - }); - // TODO: work out if there's any diff - - // generate state for the provisioner - const state = project.serialize('state', { format: 'json' }); - - logger.debug('Converted local project to app state:'); - logger.debug(JSON.stringify(state, null, 2)); - - // TODO not totally sold on endpoint handling right now - config.endpoint ??= project.openfn?.endpoint!; - - logger.info('Sending project to app...'); - - // TODO do I really want to use this deploy function? Is it suitable? - await deployProject(config as DeployConfig, state); - - logger.success('Updated project at', config.endpoint); -} diff --git a/packages/cli/src/deploy/handler.ts b/packages/cli/src/deploy/handler.ts index f5fe682f7..c61b70ba9 100644 --- a/packages/cli/src/deploy/handler.ts +++ b/packages/cli/src/deploy/handler.ts @@ -7,7 +7,7 @@ import { } from '@openfn/deploy'; import type { Logger } from '../util/logger'; import { DeployOptions } from './command'; -import * as beta from './beta'; +import * as beta from '../projects/deploy'; export type DeployFn = typeof deploy; diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index cc3f5917a..4dc0e44da 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -230,12 +230,16 @@ export const compile: CLIOption = { }; export const confirm: CLIOption = { - name: 'no-confirm', + name: 'confirm', yargs: { + alias: ['y'], boolean: true, description: "Skip confirmation prompts (e.g. 'Are you sure?')", }, ensure: (opts) => { + if ((opts as any).y) { + opts.confirm = false; + } setDefaultValue(opts, 'confirm', true); }, }; diff --git a/packages/cli/src/projects/command.ts b/packages/cli/src/projects/command.ts index 258ce0f21..ea2ebb92c 100644 --- a/packages/cli/src/projects/command.ts +++ b/packages/cli/src/projects/command.ts @@ -4,6 +4,7 @@ import merge from './merge'; import checkout from './checkout'; import fetch from './fetch'; import { command as pull } from './pull'; +import { command as deploy } from './deploy'; import type yargs from 'yargs'; @@ -15,6 +16,7 @@ export const projectsCommand = { builder: (yargs: yargs.Argv) => yargs .command(pull) + .command(deploy as any) .command(list) .command(version) .command(merge) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts new file mode 100644 index 000000000..abe14dbe8 --- /dev/null +++ b/packages/cli/src/projects/deploy.ts @@ -0,0 +1,250 @@ +import yargs from 'yargs'; +import Project from '@openfn/project'; +import c from 'chalk'; + +import * as o from '../options'; +import * as o2 from './options'; +import { + loadAppAuthConfig, + deployProject, + fetchProject, + serialize, + getSerializePath, +} from './util'; +import { build, ensure } from '../util/command-builders'; + +import type { Provisioner } from '@openfn/lexicon/lightning'; +import type { Logger } from '../util/logger'; +import type { Opts } from '../options'; + +export type DeployOptions = Pick< + Opts, + | 'apiKey' + | 'command' + | 'confirm' + | 'endpoint' + | 'force' + | 'log' + | 'logJson' + | 'confirm' +> & { workspace?: string; dryRun?: boolean }; + +const options = [ + // local options + o2.env, + o2.workspace, + o2.dryRun, + + // general options + o.apiKey, + o.endpoint, + o.log, + o.logJson, + o.snapshots, + o.force, + o.confirm, +]; + +const printProjectName = (project: Project) => + `${project.id} (${project.openfn?.uuid || ''})`; + +export const command: yargs.CommandModule = { + command: 'deploy', + describe: `Deploy the checked out project to a Lightning Instance`, + builder: (yargs: yargs.Argv) => + build(options, yargs) + .positional('project', { + describe: + 'The UUID, local id or local alias of the project to deploy to', + }) + .example( + 'deploy', + 'Deploy the checkout project to the connected instance' + ), + handler: ensure('project-deploy', options), +}; + +export async function handler(options: DeployOptions, logger: Logger) { + logger.warn( + 'WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects.' + ); + const config = loadAppAuthConfig(options, logger); + + // TODO: allow users to specify which project to deploy + // Should be able to take any project.yaml file via id, uuid, alias or path + // Note that it's a little wierd to deploy a project you haven't checked out, + // so put good safeguards here + logger.info('Attempting to load checked-out project from workspace'); + const localProject = await Project.from('fs', { + root: options.workspace || '.', + }); + + // TODO if there's no local metadata, the user must pass a UUID or alias to post to + + logger.success(`Loaded local project ${printProjectName(localProject)}`); + // First step, fetch the latest version and write + // this may throw! + let remoteProject: Project; + try { + const { data } = await fetchProject( + config.endpoint, + config.apiKey, + localProject.uuid ?? localProject.id, + logger + ); + + remoteProject = await Project.from('state', data!, { + endpoint: config.endpoint, + }); + + logger.success('Downloaded latest version of project at ', config.endpoint); + } catch (e) { + console.log(e); + throw e; + // If fetch failed because of compatiblity with the local project, what do we do? + // Well, actually I don't think I want this fetch to write to disk yet + // So if force is passed, we merge and write it anyway + // otherwise we throw because we've diverged + // this will actually happen later + } + + // warn if the remote UUID is different to the local UUID + // This shouldn't happen? + if (!options.force && localProject.uuid !== remoteProject.uuid) { + logger.error(`UUID conflict! + +Your local project (${localProject.uuid}) has a different UUID to the remote project (${remoteProject.uuid}). + +Pass --force to override this error and deploy anyway.`); + return false; + } + + const diffs = reportDiff(remoteProject!, localProject, logger); + if (!diffs.length) { + logger.success('Nothing to deploy'); + return; + } + + // Ensure there's no divergence + if (!localProject.canMergeInto(remoteProject!)) { + if (!options.force) { + logger.error(`Error: Projects have diverged! + +The remote project has been edited since the local project was branched. Changes may be lost. + +Pass --force to override this error and deploy anyway.`); + return; + } else { + logger.warn( + 'Remote project has not diverged from local project! Pushing anyway as -f passed' + ); + } + } else { + logger.info( + 'Remote project has not diverged from local project - it is safe to deploy 🎉' + ); + } + + logger.info('Merging changes into remote project'); + const merged = Project.merge(localProject, remoteProject!, { + mode: 'replace', + force: true, + }); + // generate state for the provisioner + const state = merged.serialize('state', { + format: 'json', + }) as Provisioner.Project_v1; + + // TODO only do this if asked + // or maybe write it to output with -o? + // maybe we can write state.app, state.local and state.result + // this is heavy debug stuff + logger.debug('Converted merged local project to app state:'); + logger.debug(JSON.stringify(state, null, 2)); + + // TODO not totally sold on endpoint handling right now + config.endpoint ??= localProject.openfn?.endpoint!; + + if (options.dryRun) { + logger.always('dryRun option set: skipping upload step'); + } else { + if (options.confirm) { + if ( + !(await logger.confirm( + `Ready to deploy changes to ${config.endpoint}?` + )) + ) { + logger.always('Cancelled deployment'); + return false; + } + } + + logger.info('Sending project to app...'); + + const { data: result } = await deployProject( + config.endpoint, + config.apiKey, + state, + logger + ); + + // TODO do we think this final project is right? + // We need to restore CLI stuff like alias, meta + const finalProject = await Project.from( + 'state', + result, + { + endpoint: config.endpoint, + }, + merged.config + ); + + const finalOutputPath = getSerializePath(localProject, options.workspace!); + logger.debug('Updating local project at ', finalOutputPath); + await serialize(finalProject, finalOutputPath); + } + + logger.success('Updated project at', config.endpoint); +} + +export const reportDiff = (local: Project, remote: Project, logger: Logger) => { + const diffs = remote.diff(local); + + if (diffs.length === 0) { + logger.info('No workflow changes detected'); + return diffs; + } + + const added = diffs.filter((d) => d.type === 'added'); + const changed = diffs.filter((d) => d.type === 'changed'); + const removed = diffs.filter((d) => d.type === 'removed'); + + if (added.length > 0) { + logger.break(); + logger.always(c.green('Workflows added:')); + for (const diff of added) { + logger.always(c.green(` - ${diff.id}`)); + } + logger.break(); + } + + if (changed.length > 0) { + logger.break(); + logger.always(c.yellow('Workflows modified:')); + for (const diff of changed) { + logger.always(c.yellow(` - ${diff.id}`)); + } + logger.break(); + } + + if (removed.length > 0) { + logger.break(); + logger.always(c.red('Workflows removed:')); + for (const diff of removed) { + logger.always(c.red(` - ${diff.id}`)); + } + logger.break(); + } + + return diffs; +}; diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 6358a2602..d10c4626e 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -2,20 +2,18 @@ import yargs from 'yargs'; import path from 'node:path'; import Project, { Workspace } from '@openfn/project'; -import resolvePath from '../util/resolve-path'; import { build, ensure, override } from '../util/command-builders'; import type { Logger } from '../util/logger'; import * as o from '../options'; import * as po from './options'; import type { Opts } from './options'; -import { serialize, fetchProject, loadAppAuthConfig } from './util'; - -// TODO need to implement these -// type Config = { -// requireConfirmation?: boolean; // alias to y maybe -// dryRun?: boolean; -// }; +import { + serialize, + fetchProject, + loadAppAuthConfig, + getSerializePath, +} from './util'; export type FetchOptions = Pick< Opts, @@ -90,12 +88,13 @@ export const handler = async (options: FetchOptions, logger: Logger) => { // TODO should we use the local target project for output? // Work out where and how to serialize the project - const outputRoot = resolvePath(outputPath || workspacePath); - const projectsDir = remoteProject?.config.dirs.projects ?? '.projects'; - const finalOutputPath = - outputPath ?? `${outputRoot}/${projectsDir}/${remoteProject.qname}`; - let format: undefined | 'json' | 'yaml' = undefined; + const finalOutputPath = getSerializePath( + remoteProject, + workspacePath, + outputPath + ); + let format: undefined | 'json' | 'yaml' = undefined; if (outputPath) { // If the user gave us a path for output, we need to respect the format we've been given const ext = path.extname(outputPath!).substring(1) as any; @@ -176,7 +175,7 @@ async function resolveOutputProject( // This will fetch the remote project the user wants -async function fetchRemoteProject( +export async function fetchRemoteProject( workspace: Workspace, options: FetchOptions, logger: Logger diff --git a/packages/cli/src/projects/handler.ts b/packages/cli/src/projects/index.ts similarity index 85% rename from packages/cli/src/projects/handler.ts rename to packages/cli/src/projects/index.ts index 27b33100c..d9982fecf 100644 --- a/packages/cli/src/projects/handler.ts +++ b/packages/cli/src/projects/index.ts @@ -4,3 +4,4 @@ export { handler as merge } from './merge'; export { handler as checkout } from './checkout'; export { handler as fetch } from './fetch'; export { handler as pull } from './pull'; +export { handler as deploy } from './deploy'; diff --git a/packages/cli/src/projects/options.ts b/packages/cli/src/projects/options.ts index 6cbc3d52b..dc35a74b8 100644 --- a/packages/cli/src/projects/options.ts +++ b/packages/cli/src/projects/options.ts @@ -28,6 +28,14 @@ export const alias: CLIOption = { }, }; +export const dryRun: CLIOption = { + name: 'dryRun', + yargs: { + description: + 'Runs the command but does not commit any changes to disk or app', + }, +}; + export const removeUnmapped: CLIOption = { name: 'remove-unmapped', yargs: { diff --git a/packages/cli/src/projects/pull.ts b/packages/cli/src/projects/pull.ts index 12bf8e6d8..8943c0021 100644 --- a/packages/cli/src/projects/pull.ts +++ b/packages/cli/src/projects/pull.ts @@ -10,13 +10,14 @@ import type { Opts } from './options'; export type PullOptions = Pick< Opts, - | 'beta' | 'command' + | 'alias' + | 'workspace' + | 'apiKey' + | 'endpoint' | 'log' | 'logJson' | 'statePath' - | 'projectPath' - | 'configPath' | 'project' | 'confirm' | 'snapshots' @@ -37,7 +38,6 @@ const options = [ description: 'path to output the project to', }), o.logJson, - o.projectPath, o.snapshots, o.path, o.force, diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index 503d2076a..e9202ba50 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -6,6 +6,7 @@ import type { Opts } from '../options'; import type { Logger } from '@openfn/logger'; import type Project from '@openfn/project'; import { CLIError } from '../errors'; +import resolvePath from '../util/resolve-path'; type AuthOptions = Pick; @@ -42,6 +43,16 @@ const ensureExt = (filePath: string, ext: string) => { return filePath; }; +export const getSerializePath = ( + project: Project, + workspacePath: string, + outputPath?: string +) => { + const outputRoot = resolvePath(outputPath || workspacePath); + const projectsDir = project?.config.dirs.projects ?? '.projects'; + return outputPath ?? `${outputRoot}/${projectsDir}/${project.qname}`; +}; + export const serialize = async ( project: Project, outputPath: string, @@ -82,6 +93,7 @@ export const getLightningUrl = ( return new URL(`/api/provision/${path}?${params.toString()}`, endpoint); }; +// TODO move to client.ts export async function fetchProject( endpoint: string, apiKey: string, @@ -123,6 +135,41 @@ export async function fetchProject( } } +export async function deployProject( + endpoint: string, + apiKey: string, + state: Provisioner.Project_v1, + logger?: Logger +): Promise<{ data: Provisioner.Project_v1 }> { + try { + const url = getLightningUrl(endpoint); + const response = await fetch(url, { + method: 'POST', + headers: { + Authorization: `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(state), + }); + + if (!response.ok) { + const body = await response.json(); + + logger?.error('Failed to deploy project:'); + logger?.error(JSON.stringify(body, null, 2)); + throw new CLIError( + `Failed to deploy project ${state.name}: ${response.status}` + ); + } + + return await response.json(); + } catch (error: any) { + handleCommonErrors({ endpoint, apiKey }, error); + + throw error; + } +} + function handleCommonErrors(config: AuthOptions, error: any) { if (error.cause?.code === 'ECONNREFUSED') { throw new DeployError( diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index 93ecc56f9..55a5485bf 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -429,6 +429,7 @@ test.serial('respect openfn.yaml settings', async (t) => { `project: uuid: id: staging + name: Staging workspace: credentials: credentials.yaml dirs: @@ -451,10 +452,10 @@ workspace: id: 'simple-workflow', name: 'Simple Workflow', options: {}, - start: 'trigger-webhook', + start: 'webhook', steps: [ { - id: 'trigger', + id: 'webhook', type: 'webhook', next: { 'transform-data-to-fhir-standard': { diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts new file mode 100644 index 000000000..5809fe736 --- /dev/null +++ b/packages/cli/test/projects/deploy.test.ts @@ -0,0 +1,150 @@ +import test from 'ava'; +import Project, { generateWorkflow } from '@openfn/project'; +import { createMockLogger } from '@openfn/logger'; +import { reportDiff } from '../../src/projects/deploy'; + +const logger = createMockLogger(undefined, { level: 'debug' }); + +// what will deploy tests look like? + +// deploy a project for the first time (this doesn't work though?) + +// deploy a change to a project + +// deploy a change to a project but fetch latest first + +// throw when trying to deploy to a diverged remote project + +// force deploy an incompatible project + +// don't post the final version if dry-run is set + +// TODO diff + confirm + +test('reportDiff: should report no changes for identical projects', (t) => { + const wf = generateWorkflow('@id a trigger-x'); + + const local = new Project({ + name: 'local', + workflows: [wf], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 0); + + const { message, level } = logger._parse(logger._last); + t.is(level, 'info'); + t.is(message, 'No workflow changes detected'); +}); + +test('reportDiff: should report changed workflow', (t) => { + const wfRemote = generateWorkflow('@id a trigger-x'); + const wfLocal = generateWorkflow('@id a trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wfLocal], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wfRemote], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'a', type: 'changed' }); + + t.truthy(logger._find('always', /workflows modified/i)); + t.truthy(logger._find('always', /- a/i)); +}); + +test('reportDiff: should report added workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wf1, wf2], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'b', type: 'added' }); + + t.truthy(logger._find('always', /workflows added/i)); + t.truthy(logger._find('always', /- b/i)); +}); + +test('reportDiff: should report removed workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wf1], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1, wf2], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'b', type: 'removed' }); + + t.truthy(logger._find('always', /workflows removed/i)); + t.truthy(logger._find('always', /- b/i)); +}); + +test('reportDiff: should report mix of added, changed, and removed workflows', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2Remote = generateWorkflow('@id b trigger-y'); + const wf2Local = generateWorkflow('@id b trigger-different'); + const wf3 = generateWorkflow('@id c trigger-z'); + const wf4 = generateWorkflow('@id d trigger-w'); + + const local = new Project({ + name: 'local', + workflows: [wf1, wf2Local, wf4], // has a, b (changed), d (new) + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1, wf2Remote, wf3], // has a, b, c + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 3); + + t.deepEqual( + diffs.find((d) => d.id === 'b'), + { id: 'b', type: 'changed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'c'), + { id: 'c', type: 'removed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'd'), + { id: 'd', type: 'added' } + ); + + t.truthy(logger._find('always', /workflows added/i)); + t.truthy(logger._find('always', /- d/i)); + t.truthy(logger._find('always', /workflows modified/i)); + t.truthy(logger._find('always', /- b/i)); + t.truthy(logger._find('always', /workflows removed/i)); + t.truthy(logger._find('always', /- c/i)); +}); diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 8c8b4d8de..ff6ef9922 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -65,7 +65,7 @@ test.serial('fetch by UUID to default new alias', async (t) => { ); const fileContent = await readFile(getYamlPath('main'), 'utf-8'); - + t.log(fileContent); t.is(fileContent.trim(), yaml_v2); }); @@ -433,7 +433,7 @@ test.serial( }, }, { - id: 'trigger', + id: 'webhook', type: 'webhook', openfn: { enabled: true, @@ -450,7 +450,7 @@ test.serial( }, }, ], - start: 'trigger-webhook', + start: 'webhook', openfn: { uuid: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', inserted_at: '2025-04-23T11:19:32Z', diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index 9cb301162..f3d8b9eb9 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -7,24 +7,24 @@ export const myProject_v1: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + 'my-workflow': { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'My Workflow', - edges: [ - { + edges: { + 'trigger-webhook': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn()', @@ -32,21 +32,21 @@ export const myProject_v1: Provisioner.Project = { project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, version_history: [ 'cli:02582f3bb088', // alterstate ], }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [], scheduled_deletion: null, @@ -82,7 +82,7 @@ workflows: adaptor: "@openfn/language-common@latest" openfn: uuid: 66add020-e6eb-4eec-836b-20008afca816 - - id: trigger + - id: webhook type: webhook openfn: enabled: true @@ -101,4 +101,4 @@ workflows: updated_at: 2025-04-23T11:19:32Z lock_version: 1 id: my-workflow - start: trigger-webhook`; + start: webhook`; diff --git a/packages/cli/turtle-power/output.json b/packages/cli/turtle-power/output.json deleted file mode 100644 index 3699bcced..000000000 --- a/packages/cli/turtle-power/output.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "x": 1 -} \ No newline at end of file diff --git a/packages/lexicon/CHANGELOG.md b/packages/lexicon/CHANGELOG.md index cb91ab008..d5584addc 100644 --- a/packages/lexicon/CHANGELOG.md +++ b/packages/lexicon/CHANGELOG.md @@ -1,5 +1,15 @@ # lexicon +## 1.4.0 + +### Minor Changes + +- e33e362: Update v1 provisioner project structure (workflows, jobs, edges and triggers as record, not array) + +### Patch Changes + +- ef06f98: Support sandboxy keys in serialized projects + ## 1.3.0 ### Minor Changes diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 5e596d5e5..187da091f 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -10,6 +10,11 @@ export type SourceMapWithOperations = RawSourceMap & { operations: [{ line: number; order: number; name: string }]; }; +export type SandboxMeta = { + parentId?: string; + parentName?: string; // not supported yet +}; + // The serialised shape of of a project, as JSON // this is what is saved to project.yaml export type Project = { @@ -23,7 +28,14 @@ export type Project = { workflows: Workflow[]; - options: {}; + options?: { + env?: string; + color?: string; + + [key: string]: any; + }; + + sandbox?: SandboxMeta; credentials: any; collections: string[]; diff --git a/packages/lexicon/lightning.d.ts b/packages/lexicon/lightning.d.ts index 29439d464..695bd7662 100644 --- a/packages/lexicon/lightning.d.ts +++ b/packages/lexicon/lightning.d.ts @@ -228,7 +228,7 @@ export namespace Provisioner { name: string; description: string | null; - workflows: Workflow[]; + workflows: Record; concurrency?: any; // TODO // TODO typing isn't quite right here either @@ -251,14 +251,18 @@ export namespace Provisioner { retention_policy?: string; history_retention_period: string | null; dataclip_retention_period: string | null; + + env?: string; + color?: string; + parent_id?: string; } export interface Workflow { id: string; name: string; - jobs: Job[]; - triggers: Trigger[]; - edges: Edge[]; + jobs: Record; + triggers: Record; + edges: Record; delete?: boolean; project_id?: string; diff --git a/packages/lexicon/package.json b/packages/lexicon/package.json index 8105c97b7..2832d2fb0 100644 --- a/packages/lexicon/package.json +++ b/packages/lexicon/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/lexicon", - "version": "1.3.0", + "version": "1.4.0", "description": "Central repo of names and type definitions", "author": "Open Function Group ", "license": "ISC", diff --git a/packages/project/CHANGELOG.md b/packages/project/CHANGELOG.md index 8bb546557..a1b2d67a4 100644 --- a/packages/project/CHANGELOG.md +++ b/packages/project/CHANGELOG.md @@ -1,5 +1,18 @@ # @openfn/project +## 0.12.0 + +### Minor Changes + +- e33e362: Update v1 provisioner project structure (workflows, jobs, edges and triggers as record, not array) + +### Patch Changes + +- 1b5e837: Include project name in openfn.yaml +- Updated dependencies [e33e362] +- Updated dependencies [ef06f98] + - @openfn/lexicon@1.4.0 + ## 0.11.0 ### Minor Changes diff --git a/packages/project/README.md b/packages/project/README.md index 954a016e2..6d0d6d33e 100644 --- a/packages/project/README.md +++ b/packages/project/README.md @@ -8,6 +8,18 @@ A single Project can be Checked Out to disk at a time, meaning its source workfl A Workspace is a set of related Projects , including a Project and its associated Sandboxes, or a Project deployed to apps in multiple web domains +## Structure and Artifects + +openfn.yaml + +project file + +sort of a mix of project.yaml, state.json and config.json + +This is strictly a representation of a server-side project, it's like the last-sync-state. CLI-only or offline projects do not have one. + +It's also a portable representation of the project + ### Serializing and Parsing The main idea of Projects is that a Project represents a set of OpenFn workflows defined in any format and present a standard JS-friendly interface to manipulate and reason about them. diff --git a/packages/project/package.json b/packages/project/package.json index b3185dbbc..02035cdf6 100644 --- a/packages/project/package.json +++ b/packages/project/package.json @@ -1,6 +1,6 @@ { "name": "@openfn/project", - "version": "0.11.0", + "version": "0.12.0", "description": "Read, serialize, replicate and sync OpenFn projects", "scripts": { "test": "pnpm ava", diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 6e72ee896..e1c425ed8 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -10,10 +10,11 @@ import fromProject, { SerializedProject } from './parse/from-project'; import slugify from './util/slugify'; import { getUuidForEdge, getUuidForStep } from './util/uuid'; import { merge, MergeProjectOptions } from './merge/merge-project'; +import { diff as projectDiff } from './util/project-diff'; import { Workspace } from './Workspace'; import { buildConfig } from './util/config'; import { Provisioner } from '@openfn/lexicon/lightning'; -import { UUID, WorkspaceConfig } from '@openfn/lexicon'; +import { SandboxMeta, UUID, WorkspaceConfig } from '@openfn/lexicon'; const maybeCreateWorkflow = (wf: any) => wf instanceof Workflow ? wf : new Workflow(wf); @@ -70,6 +71,8 @@ export class Project { credentials: string[]; + sandbox?: SandboxMeta; + // project v2. Default. // doens't take any options static async from( @@ -124,11 +127,6 @@ export class Project { return merge(source, target, options); } - // env is excluded because it's not really part of the project - // uh maybe - // maybe this second arg is config - like env, branch rules, serialisation rules - // stuff that's external to the actual project and managed by the repo - // TODO maybe the constructor is (data, Workspace) constructor( data: Partial = {}, @@ -158,6 +156,7 @@ export class Project { this.workflows = data.workflows?.map(maybeCreateWorkflow) ?? []; this.collections = data.collections; this.credentials = data.credentials; + this.sandbox = data.sandbox; } /** Local alias for the project. Comes from the file name. Not shared with Lightning. */ @@ -236,6 +235,11 @@ export class Project { return result; } + // Compare this project with another and return a list of workflow changes + diff(project: Project) { + return projectDiff(this, project); + } + canMergeInto(target: Project) { const potentialConflicts: Record = {}; for (const sourceWorkflow of this.workflows) { diff --git a/packages/project/src/index.ts b/packages/project/src/index.ts index b49203bcf..5ccb4b034 100644 --- a/packages/project/src/index.ts +++ b/packages/project/src/index.ts @@ -7,3 +7,6 @@ export default Project; export { Workspace, yamlToJson, jsonToYaml }; export { generateWorkflow, generateProject } from './gen/generator'; + +export { diff } from './util/project-diff'; +export type { WorkflowDiff, DiffType } from './util/project-diff'; diff --git a/packages/project/src/merge/merge-node.ts b/packages/project/src/merge/merge-node.ts index 542af93a2..b9c98fcac 100644 --- a/packages/project/src/merge/merge-node.ts +++ b/packages/project/src/merge/merge-node.ts @@ -15,6 +15,7 @@ type Node = Workflow['steps'][number]; const clone = (obj: any) => JSON.parse(JSON.stringify(obj)); +// TODO merge needs to include openfn props and eg lock_version export function mergeWorkflows( source: Workflow, target: Workflow, @@ -76,6 +77,15 @@ export function mergeWorkflows( return { ...target, ...newSource, - openfn: { ...target.openfn }, // preserving the target uuid. we might need a proper helper function for this. + openfn: { + ...target.openfn, + ...source.openfn, + // preserving the target uuid. we might need a proper helper function for this + uuid: target.openfn?.uuid, + }, + options: { + ...target.options, + ...source.options, + }, }; } diff --git a/packages/project/src/merge/merge-project.ts b/packages/project/src/merge/merge-project.ts index 5452401fd..0d2e79c38 100644 --- a/packages/project/src/merge/merge-project.ts +++ b/packages/project/src/merge/merge-project.ts @@ -7,12 +7,28 @@ import baseMerge from '../util/base-merge'; import getDuplicates from '../util/get-duplicates'; import Workflow from '../Workflow'; +export const SANDBOX_MERGE = 'sandbox'; + +export const REPLACE_MERGE = 'replace'; + export class UnsafeMergeError extends Error {} export type MergeProjectOptions = { workflowMappings: Record; // removeUnmapped: boolean; force: boolean; + mode: typeof SANDBOX_MERGE | typeof REPLACE_MERGE; +}; + +const defaultOptions: MergeProjectOptions = { + workflowMappings: {}, + removeUnmapped: false, + force: true, + /** + * If mode is sandbox, basically only content will be merged and all metadata/settings/options/config is ignored + * If mode is replace, all properties on the source will override the target (including UUIDs, name) + */ + mode: SANDBOX_MERGE, }; /** @@ -30,11 +46,6 @@ export function merge( target: Project, opts?: Partial ) { - const defaultOptions: MergeProjectOptions = { - workflowMappings: {}, - removeUnmapped: false, - force: true, - }; const options = defaultsDeep( opts, defaultOptions @@ -109,13 +120,31 @@ export function merge( } } - // TODO: clarify repo preservation strategy - // TODO: how other properties of a project are being merged. + // Work out what metadata to preserve from the target + // in the merge + const assigns = + options.mode === SANDBOX_MERGE + ? { + workflows: finalWorkflows, + } + : { + workflows: finalWorkflows, + openfn: { + ...target.openfn, + ...source.openfn, + }, + options: { + ...target.options, + ...source.options, + }, + name: source.name ?? target.name, + description: source.description ?? target.description, + credentials: source.credentials ?? target.credentials, + collections: source.collections ?? target.collections, + }; // with project level props merging, target goes into source because we want to preserve the target props. return new Project( - baseMerge(target, source, ['collections'], { - workflows: finalWorkflows, - } as any) + baseMerge(target, source, ['collections'], assigns as any) ); } diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index 9f08e83c2..ef3c355de 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -30,6 +30,7 @@ export default ( collections, inserted_at, updated_at, + parent_id, ...options } = stateJson; @@ -52,12 +53,13 @@ export default ( updated_at, }; - // TODO maybe this for local metadata, stuff that isn't synced? - // proj.meta = { - // fetched_at: config.fetchedAt, - // }; + if (parent_id) { + proj.sandbox = { + parentId: parent_id, + }; + } - proj.workflows = stateJson.workflows.map(mapWorkflow); + proj.workflows = Object.values(stateJson.workflows).map(mapWorkflow); return new Project(proj as l.Project, config); }; @@ -104,22 +106,24 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { // TODO what do we do if the condition is disabled? // I don't think that's the same as edge condition false? - workflow.triggers.forEach((trigger: Provisioner.Trigger) => { + Object.values(workflow.triggers).forEach((trigger: Provisioner.Trigger) => { const { type, ...otherProps } = trigger; if (!mapped.start) { - mapped.start = `trigger-${type}`; + mapped.start = type; } - const connectedEdges = edges.filter( + const connectedEdges = Object.values(edges).filter( (e) => e.source_trigger_id === trigger.id ); mapped.steps.push({ - id: 'trigger', + id: type, type, openfn: renameKeys(otherProps, { id: 'uuid' }), next: connectedEdges.reduce((obj: any, edge) => { - const target = jobs.find((j) => j.id === edge.target_job_id); + const target = Object.values(jobs).find( + (j) => j.id === edge.target_job_id + ); if (!target) { throw new Error(`Failed to find ${edge.target_job_id}`); } @@ -130,8 +134,8 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { } as l.Trigger); }); - workflow.jobs.forEach((step: Provisioner.Job) => { - const outboundEdges = edges.filter( + Object.values(workflow.jobs).forEach((step: Provisioner.Job) => { + const outboundEdges = Object.values(edges).filter( (e) => e.source_job_id === step.id || e.source_trigger_id === step.id ); @@ -156,7 +160,9 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { if (outboundEdges.length) { s.next = outboundEdges.reduce((next, edge) => { - const target = jobs.find((j) => j.id === edge.target_job_id); + const target = Object.values(jobs).find( + (j) => j.id === edge.target_job_id + ); // @ts-ignore next[slugify(target.name)] = mapEdge(edge); return next; diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index a08786466..2eb4cb8f4 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -6,6 +6,7 @@ import { Project } from '../Project'; import renameKeys from '../util/rename-keys'; import { jsonToYaml } from '../util/yaml'; import Workflow from '../Workflow'; +import slugify from '../util/slugify'; type Options = { format?: 'json' | 'yaml' }; @@ -38,7 +39,12 @@ export default function ( Object.assign(state, rest, project.options); state.project_credentials = project.credentials ?? []; - state.workflows = project.workflows.map(mapWorkflow); + state.workflows = project.workflows + .map(mapWorkflow) + .reduce((obj: any, wf) => { + obj[slugify(wf.name ?? wf.id)] = wf; + return obj; + }, {}); const shouldReturnYaml = options.format === 'yaml' || @@ -61,11 +67,11 @@ const mapWorkflow = (workflow: Workflow) => { const wfState = { ...originalOpenfnProps, id: workflow.openfn?.uuid ?? randomUUID(), - jobs: [], - triggers: [], - edges: [], + jobs: {}, + triggers: {}, + edges: {}, lock_version: workflow.openfn?.lock_version ?? null, // TODO needs testing - } as unknown as Provisioner.Workflow; + } as Provisioner.Workflow; if (workflow.name) { wfState.name = workflow.name; @@ -96,7 +102,7 @@ const mapWorkflow = (workflow: Workflow) => { type: s.type, ...renameKeys(s.openfn, { uuid: 'id' }), } as Provisioner.Trigger; - wfState.triggers.push(node); + wfState.triggers[node.type] = node; } else { node = omitBy(pick(s, ['name', 'adaptor']), isNil) as Provisioner.Job; const { uuid, ...otherOpenFnProps } = s.openfn ?? {}; @@ -118,7 +124,7 @@ const mapWorkflow = (workflow: Workflow) => { Object.assign(node, defaultJobProps, otherOpenFnProps); - wfState.jobs.push(node); + wfState.jobs[s.id ?? slugify(s.name)] = node; } // create an edge to each linked node @@ -155,12 +161,20 @@ const mapWorkflow = (workflow: Workflow) => { e.condition_expression = rules.condition; } } - wfState.edges.push(e); + wfState.edges[`${s.id}->${next}`] = e; }); }); // Sort edges by UUID (for more predictable comparisons in test) - wfState.edges = sortBy(wfState.edges, 'id'); + wfState.edges = Object.keys(wfState.edges) + // convert edge ids to strings just in case a number creeps in (it might in test) + .sort((a, b) => + `${wfState.edges[a].id}`.localeCompare('' + wfState.edges[b].id) + ) + .reduce((obj: any, key) => { + obj[key] = wfState.edges[key]; + return obj; + }, {}); return wfState; }; diff --git a/packages/project/src/serialize/to-project.ts b/packages/project/src/serialize/to-project.ts index e6fd528cd..1b7d7b221 100644 --- a/packages/project/src/serialize/to-project.ts +++ b/packages/project/src/serialize/to-project.ts @@ -61,6 +61,13 @@ export default (project: Project, options: ToProjectOptions = {}) => { isNil ) as SerializedProject; + // only write the sandbox key if this project is itself a sandbox + if (project.sandbox?.parentId) { + proj.sandbox = { + parentId: project.sandbox.parentId, + }; + } + const format = options.format ?? proj.config?.formats.project; if (format === 'json') { diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index 67f7e9fec..c1d80c433 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -22,18 +22,21 @@ export const buildConfig = (config: Partial = {}) => ({ }); // Generate a workspace config (openfn.yaml) file for a project -export const extractConfig = (source: Project) => { - const project = { +export const extractConfig = (source: Project, format?: 'yaml' | 'json') => { + const project: any = { ...(source.openfn || {}), id: source.id, }; + if (source.name) { + project.name = source.name; + } const workspace = { ...source.config, }; const content = { project, workspace }; - const format = workspace.formats.openfn; + format = format ?? workspace.formats.openfn; if (format === 'yaml') { return { path: 'openfn.yaml', diff --git a/packages/project/src/util/project-diff.ts b/packages/project/src/util/project-diff.ts new file mode 100644 index 000000000..7e849130a --- /dev/null +++ b/packages/project/src/util/project-diff.ts @@ -0,0 +1,56 @@ +import { Project } from '../Project'; + +export type DiffType = 'added' | 'changed' | 'removed'; + +export type WorkflowDiff = { + id: string; + type: DiffType; +}; + +/** + * Compare two projects and return a list of workflow changes showing how + * project B has diverged from project A. + * + * Workflows are identified by their ID and compared using version hashes. + * + * @param a - The baseline project (e.g., main branch) + * @param b - The comparison project (e.g., staging branch) + * @returns Array of workflow diffs indicating how B differs from A: + * - 'added': workflow exists in B but not in A + * - 'removed': workflow exists in A but not in B + * - 'changed': workflow exists in both but has different version hashes + * + * @example + * ```typescript + * const main = await Project.from('fs', { root: '.' }); + * const staging = await Project.from('state', stagingState); + * const diffs = diff(main, staging); + * // Shows how staging has diverged from main + * ``` + */ +export function diff(a: Project, b: Project): WorkflowDiff[] { + const diffs: WorkflowDiff[] = []; + + // Check all of project A's workflows + for (const workflowA of a.workflows) { + const workflowB = b.getWorkflow(workflowA.id); + + if (!workflowB) { + // workflow exists in A but not in B = removed + diffs.push({ id: workflowA.id, type: 'removed' }); + } else if (workflowA.getVersionHash() !== workflowB.getVersionHash()) { + // workflow exists in both but with different content = changed + diffs.push({ id: workflowA.id, type: 'changed' }); + } + } + + // Check for workflows that were added in B + for (const workflowB of b.workflows) { + if (!a.getWorkflow(workflowB.id)) { + // workflow exists in B but not in A = added + diffs.push({ id: workflowB.id, type: 'added' }); + } + } + + return diffs; +} diff --git a/packages/project/test/fixtures/sample-v1-project.ts b/packages/project/test/fixtures/sample-v1-project.ts index 26ca2e099..dcf222ddc 100644 --- a/packages/project/test/fixtures/sample-v1-project.ts +++ b/packages/project/test/fixtures/sample-v1-project.ts @@ -8,24 +8,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + 'my-workflow': { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'My Workflow', - edges: [ - { + edges: { + 'trigger->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -33,18 +33,18 @@ const state: Provisioner.Project = { project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [], scheduled_deletion: null, @@ -58,7 +58,7 @@ const state: Provisioner.Project = { export default state; const withCreds = cloneDeep(state); -Object.assign(withCreds.workflows[0].jobs[0], { +Object.assign(withCreds.workflows['my-workflow'].jobs['transform-data'], { project_credential_id: 'p', keychain_credential_id: 'k', }); diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index 05e9be6fb..b8202ecf9 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -9,7 +9,10 @@ export const json: SerializedProject = { description: 'my lovely project', cli: { version: 2 }, openfn: { uuid: '1234', endpoint: 'https://app.openfn.org' }, - options: { allow_support_access: false }, + options: { allow_support_access: false, env: 'dev', color: 'red' }, + sandbox: { + parentId: 'abcd', + }, workflows: [ { steps: [ @@ -46,6 +49,8 @@ openfn: endpoint: https://app.openfn.org options: allow_support_access: false + env: dev + color: red workflows: - steps: - name: b @@ -69,4 +74,6 @@ workflows: uuid: 1 history: [] start: trigger +sandbox: + parentId: abcd `; diff --git a/packages/project/test/merge/merge-project.test.ts b/packages/project/test/merge/merge-project.test.ts index 0a3004f51..63862fd4f 100644 --- a/packages/project/test/merge/merge-project.test.ts +++ b/packages/project/test/merge/merge-project.test.ts @@ -1,7 +1,7 @@ import test from 'ava'; import { randomUUID } from 'node:crypto'; import Project from '../../src'; -import { merge } from '../../src/merge/merge-project'; +import { merge, REPLACE_MERGE } from '../../src/merge/merge-project'; import { join } from 'node:path'; import { generateWorkflow } from '../../src/gen/generator'; import slugify from '../../src/util/slugify'; @@ -84,6 +84,34 @@ test('Preserve the name and UUID of the target project', (t) => { t.is(result.openfn.uuid, main.openfn.uuid); }); +test('replace mode: replace the name and UUID of the target project', (t) => { + const wf = { + steps: [ + { + id: 'x', + name: 'X', + adaptor: 'common', + expression: 'fn(s => s)', + }, + ], + }; + + // step up two copies with UUIDS + const wf_a = assignUUIDs(wf); + const wf_b = assignUUIDs(wf); + + const remote = createProject(wf_a, 'a'); + const local = createProject(wf_b, 'b'); + + // merge staging into main + const result = merge(local, remote, { mode: REPLACE_MERGE }); + const step = result.workflows[0].steps[0]; + + // Ensure that the result has the name and UUID of local + t.is(result.name, 'b'); + t.is(result.openfn.uuid, local.openfn.uuid); +}); + test('merge a simple change between single-step workflows with preserved uuids', (t) => { // create a base workflow const wf = { diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 699853cb4..8b5f138e2 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -3,7 +3,7 @@ import fromAppState, { mapEdge, mapWorkflow, } from '../../src/parse/from-app-state'; -import { clone, cloneDeep } from 'lodash-es'; +import { cloneDeep } from 'lodash-es'; import state, { withCreds } from '../fixtures/sample-v1-project'; import { Job } from '@openfn/lexicon'; @@ -54,6 +54,20 @@ test('should create a Project from prov state with collections', (t) => { t.deepEqual(project.collections, []); }); +test('should create a Project from prov state with sandbox stuff', (t) => { + const stateWithSandbox = { + ...state, + color: 'red', + parent_id: 'abc', + env: 'dev', + }; + const project = fromAppState(stateWithSandbox, meta, { format: 'json' }); + + t.is(project.sandbox.parentId, 'abc'); + t.is(project.options.env, 'dev'); + t.is(project.options.color, 'red'); +}); + test('should create a Project from prov state with credentials', (t) => { const project = fromAppState(state, meta); @@ -66,7 +80,7 @@ test('should create a Project from prov state with positions', (t) => { // assign a fake positions object // the provisioner right now doesn't include positions // - but one day it will, and Project needs to be able to sync it - newState.workflows[0].positions = { + newState.workflows['my-workflow'].positions = { x: 1, y: 1, }; @@ -86,10 +100,10 @@ test('should create a Project from prov state with a workflow', (t) => { id: 'my-workflow', name: 'My Workflow', history: [], - start: 'trigger-webhook', + start: 'webhook', steps: [ { - id: 'trigger', + id: 'webhook', type: 'webhook', openfn: { enabled: true, uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058' }, next: { @@ -125,12 +139,12 @@ test('should create a Project from prov state with a workflow', (t) => { }); test('mapWorkflow: map a simple trigger', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); const [trigger] = mapped.steps; t.deepEqual(trigger, { - id: 'trigger', + id: 'webhook', type: 'webhook', next: { 'transform-data': { @@ -148,8 +162,21 @@ test('mapWorkflow: map a simple trigger', (t) => { }); }); +test('mapWorkflow: use a triggers type as its id', (t) => { + const wf = state.workflows['my-workflow']; + + // trigger id in the state is a UUID + t.is(wf.triggers.webhook.id, '4a06289c-15aa-4662-8dc6-f0aaacd8a058'); + + const mapped = mapWorkflow(wf); + const [trigger] = mapped.steps; + + // trigger ID in the Project is the type + t.is(trigger.id, 'webhook'); +}); + test('mapWorkflow: handle openfn meta (uuid, lock_version, deleted_at)', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); t.deepEqual(mapped.openfn, { lock_version: 1, @@ -163,7 +190,7 @@ test('mapWorkflow: handle openfn meta (uuid, lock_version, deleted_at)', (t) => // TODO need to test various trigger conditions and states test('mapWorkflow: map a simple job', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); const [_trigger, job] = mapped.steps; t.deepEqual(job, { @@ -179,7 +206,7 @@ test('mapWorkflow: map a simple job', (t) => { }); test('mapWorkflow: map a job with keychain credentials onto .openfn', (t) => { - const wf = withCreds.workflows[0]; + const wf = withCreds.workflows['my-workflow']; const mapped = mapWorkflow(wf); const [_trigger, job] = mapped.steps; @@ -202,7 +229,7 @@ test('mapWorkflow: map a job with keychain credentials onto .openfn', (t) => { }); test('mapWorkflow: map a job with projcet credentials onto job.configuration', (t) => { - const wf = withCreds.workflows[0]; + const wf = withCreds.workflows['my-workflow']; const mapped = mapWorkflow(wf); const [_trigger, job] = mapped.steps; @@ -365,8 +392,8 @@ workflows: const project = fromAppState(yaml, meta, { format: 'yaml', }); - console.log(project.workflows[0].steps); - const { next } = project.workflows[0].steps[1]; + console.log(project.workflows['my-workflow'].steps); + const { next } = project.workflows['my-workflow'].steps[1]; console.log({ next }); // make sure that the condition_types get mapped to condition // also make sure that custom conditions work (both ways) diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index c5fa400a2..6a0785f52 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -22,186 +22,6 @@ function mockFile(path: string, content: string | object) { mock(files); } -test.serial('should load workspace config from json', async (t) => { - mockFile( - '/ws/openfn.json', - buildConfig({ - formats: { - openfn: 'json', - project: 'json', - workflow: 'json', - }, - // @ts-ignore ensure we include custom properties - x: 1, - }) - ); - - const project = await parseProject({ root: '/ws' }); - - t.deepEqual(project.config, { - x: 1, - credentials: 'credentials.yaml', - dirs: { projects: '.projects', workflows: 'workflows' }, - formats: { openfn: 'json', project: 'json', workflow: 'json' }, - }); -}); - -test.serial('should load workspace config from yaml', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - formats: { - openfn: 'yaml', - project: 'yaml', - workflow: 'yaml', - }, - // @ts-ignore ensure we include custom properties - x: 1, - }) - ); - - const project = await parseProject({ root: '/ws' }); - - t.deepEqual(project.config, { - credentials: 'credentials.yaml', - x: 1, - dirs: { projects: '.projects', workflows: 'workflows' }, - formats: { openfn: 'yaml', project: 'yaml', workflow: 'yaml' }, - }); -}); - -test.serial('should load single workflow in new flat format', async (t) => { - mockFile('/ws/openfn.yaml', buildConfig()); - - mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - start: 'a', - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); - t.is(wf.start, 'a'); -}); - -// hmm, maybe I shouldn't support this, because it puts some wierd stuff in the code -// and new CLI will just use the new format -test.serial( - 'should load single workflow in old { workflow, options } format', - async (t) => { - mockFile('/ws/openfn.yaml', buildConfig()); - - mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { - workflow: { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }, - options: { - start: 'a', - }, - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); - t.is(wf.start, 'a'); - } -); - -test.serial('should load single workflow from json', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - formats: { - workflow: 'json', - }, - }) - ); - - mockFile('/ws/workflows/my-workflow/my-workflow.json', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); -}); - -test.serial('should load single workflow from custom path', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - dirs: { - workflows: 'custom-wfs', - projects: '.projects', - }, - }) - ); - - mockFile('/ws/custom-wfs/my-workflow/my-workflow.yaml', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }); - - mockFile('/ws/custom-wfs/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); -}); - test.serial('should include multiple workflows', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); diff --git a/packages/project/test/parse/from-project.test.ts b/packages/project/test/parse/from-project.test.ts index 21911f0e4..6a328b1bb 100644 --- a/packages/project/test/parse/from-project.test.ts +++ b/packages/project/test/parse/from-project.test.ts @@ -18,7 +18,8 @@ history_retention_period: null dataclip_retention_period: null concurrency: null workflows: - - name: wf1 + wf1: + name: wf1 id: 72ca3eb0-042c-47a0-a2a1-a545ed4a8406 inserted_at: 2025-04-23T11:19:32Z updated_at: 2025-04-23T11:19:32Z @@ -26,7 +27,8 @@ workflows: deleted_at: null concurrency: null jobs: - - name: Transform data + transform-data: + name: Transform data body: | // Check out the Job Writing Guide for help getting started: // https://docs.openfn.org/documentation/jobs/job-writing-guide @@ -34,11 +36,13 @@ workflows: id: 66add020-e6eb-4eec-836b-20008afca816 project_credential_id: null triggers: - - type: webhook + webhook: + type: webhook enabled: true id: 4a06289c-15aa-4662-8dc6-f0aaacd8a058 edges: - - id: a9a3adef-b394-4405-814d-3ac4323f4b4b + webhook->transform-data: + id: a9a3adef-b394-4405-814d-3ac4323f4b4b target_job_id: 66add020-e6eb-4eec-836b-20008afca816 enabled: true source_trigger_id: 4a06289c-15aa-4662-8dc6-f0aaacd8a058 @@ -75,6 +79,9 @@ test('import from a v2 project as JSON', async (t) => { t.is(proj.id, 'my-project'); t.is(proj.name, 'My Project'); t.is(proj.cli.alias, 'main'); + t.is(proj.sandbox.parentId, 'abcd'); + t.is(proj.options.env, 'dev'); + t.is(proj.options.color, 'red'); t.is(proj.openfn!.uuid, '1234'); t.is(proj.openfn!.endpoint, 'https://app.openfn.org'); @@ -132,7 +139,9 @@ test('import from a v2 project as YAML', async (t) => { t.is(proj.cli.alias, 'main'); t.is(proj.openfn!.uuid, '1234'); t.is(proj.openfn!.endpoint, 'https://app.openfn.org'); - // t.is(proj.options.retention_policy, 'retain_all'); + t.is(proj.sandbox.parentId, 'abcd'); + t.is(proj.options.env, 'dev'); + t.is(proj.options.color, 'red'); t.is(proj.workflows.length, 1); diff --git a/packages/project/test/project.test.ts b/packages/project/test/project.test.ts index c62a9e157..67d5dccc8 100644 --- a/packages/project/test/project.test.ts +++ b/packages/project/test/project.test.ts @@ -14,24 +14,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + wf1: { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'wf1', - edges: [ - { + edges: { + 'webhook->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -39,18 +39,18 @@ const state: Provisioner.Project = { project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [], scheduled_deletion: null, @@ -112,7 +112,7 @@ test('should default alias to "main"', (t) => { t.is(project.alias, 'main'); }); -test('should convert a state file to a project and back again', async (t) => { +test.only('should convert a state file to a project and back again', async (t) => { const meta = { endpoint: 'app.openfn.org', env: 'test', diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index 68a022140..de6e4117e 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -1,9 +1,10 @@ import test from 'ava'; -import type { Provisioner } from '@openfn/lexicon/lightning'; import { Project } from '../../src/Project'; import toAppState from '../../src/serialize/to-app-state'; import { generateProject } from '../../src/gen/generator'; +import type { Provisioner } from '@openfn/lexicon/lightning'; + const state: Provisioner.Project = { id: 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00', name: 'aaa', @@ -11,24 +12,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + wf1: { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'wf1', - edges: [ - { + edges: { + 'trigger->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -36,18 +37,18 @@ const state: Provisioner.Project = { project_credential_id: '', keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [''], scheduled_deletion: null, @@ -60,7 +61,7 @@ const state: Provisioner.Project = { test('should set defaults for keys that Lightning needs', (t) => { // set up a very minimal project - const data = { + const data: any = { id: 'my-project', openfn: { uuid: '', @@ -68,6 +69,7 @@ test('should set defaults for keys that Lightning needs', (t) => { workflows: [ { id: 'wf', + name: 'my workflow', openfn: { uuid: 0, }, @@ -107,29 +109,30 @@ test('should set defaults for keys that Lightning needs', (t) => { t.deepEqual(defaultState, { id: '', project_credentials: [], - workflows: [ - { + workflows: { + 'my-workflow': { id: 0, - jobs: [ - { + name: 'my workflow', + jobs: { + step: { body: '.', id: 2, project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [{ type: 'webhook', id: 1 }], - edges: [ - { + }, + triggers: { webhook: { type: 'webhook', id: 1 } }, + edges: { + ['trigger->step']: { id: '', target_job_id: 2, enabled: true, source_trigger_id: 1, }, - ], + }, lock_version: null, }, - ], + }, }); }); @@ -139,6 +142,7 @@ test('should serialize workflow positions', (t) => { workflows: [ { id: 'wf', + name: 'wf', openfn: { positions: { step: { @@ -170,7 +174,7 @@ test('should serialize workflow positions', (t) => { }); const state = toAppState(project); - t.deepEqual(state.workflows[0].positions, { + t.deepEqual(state.workflows['wf'].positions, { step: { x: 1, y: 1, @@ -188,6 +192,7 @@ test('should write openfn keys to objects', (t) => { workflows: [ { id: 'wf', + name: 'wf', openfn, steps: [ { @@ -217,10 +222,10 @@ test('should write openfn keys to objects', (t) => { const state = toAppState(project); t.is(state.x, 1); - t.is(state.workflows[0].x, 1); - t.is(state.workflows[0].jobs[0].x, 1); - t.is(state.workflows[0].triggers[0].x, 1); - t.is(state.workflows[0].edges[0].x, 1); + t.is(state.workflows['wf'].x, 1); + t.is(state.workflows['wf'].jobs.step.x, 1); + t.is(state.workflows['wf'].triggers.webhook.x, 1); + t.is(state.workflows['wf'].edges['trigger->step'].x, 1); }); test('should handle credentials', (t) => { @@ -229,6 +234,7 @@ test('should handle credentials', (t) => { workflows: [ { id: 'wf', + name: 'wf', steps: [ { id: 'trigger', @@ -251,9 +257,9 @@ test('should handle credentials', (t) => { }; const state = toAppState(new Project(data), { format: 'json' }); - const [job] = state.workflows[0].jobs; - t.is(job.keychain_credential_id, 'k'); - t.is(job.project_credential_id, 'p'); + const { step } = state.workflows['wf'].jobs; + t.is(step.keychain_credential_id, 'k'); + t.is(step.project_credential_id, 'p'); }); test('should ignore workflow start keys', (t) => { @@ -262,6 +268,7 @@ test('should ignore workflow start keys', (t) => { workflows: [ { id: 'wf', + name: 'wf', start: 'step', steps: [ { @@ -285,13 +292,13 @@ test('should ignore workflow start keys', (t) => { }; const state = toAppState(new Project(data), { format: 'json' }); - t.falsy(state.workflows[0].start); + t.falsy(state.workflows['wf'].start); }); test.todo('handle edge labels'); test('serialize steps and trigger in alphabetical order', (t) => { - const wf = ` + const wf = `@name wf z-b y-x c-p @@ -300,17 +307,17 @@ c-p const state = toAppState(project, { format: 'json' }); - const jobs = state.workflows[0].jobs.map((j) => j.name); + const jobs = Object.keys(state.workflows['wf'].jobs); // short be sorted by name t.deepEqual(jobs, ['b', 'c', 'p', 'x', 'y', 'z']); - const edges = state.workflows[0].edges.map((e) => e.id); + const edges = Object.keys(state.workflows['wf'].edges); // edges are sorted by uuid - t.deepEqual(edges, [3, 6, 9]); + t.deepEqual(edges, ['z->b', 'y->x', 'c->p']); }); test('should handle edge conditions', (t) => { - const wf = ` + const wf = `@name wf a-(condition=always)-b a-(condition="on_job_success")-c a-(condition="on_job_failure")-d @@ -322,7 +329,13 @@ a-(condition=x)-f }); const state = toAppState(project, { format: 'json' }); - const [a_b, a_c, a_d, a_e, a_f] = state.workflows[0].edges; + const { + 'a->b': a_b, + 'a->c': a_c, + 'a->d': a_d, + 'a->e': a_e, + 'a->f': a_f, + } = state.workflows.wf.edges; t.is(a_b.condition_type, 'always'); t.falsy(a_b.condition_expression); diff --git a/packages/project/test/serialize/to-fs.test.ts b/packages/project/test/serialize/to-fs.test.ts index 12ce41f43..63c29c4d2 100644 --- a/packages/project/test/serialize/to-fs.test.ts +++ b/packages/project/test/serialize/to-fs.test.ts @@ -222,6 +222,7 @@ test('toFs: extract a project with 1 workflow and 1 step', (t) => { }, project: { id: 'my-project', + name: 'My Project', }, }); diff --git a/packages/project/test/serialize/to-project.test.ts b/packages/project/test/serialize/to-project.test.ts index 299356cb4..76eeced78 100644 --- a/packages/project/test/serialize/to-project.test.ts +++ b/packages/project/test/serialize/to-project.test.ts @@ -1,14 +1,16 @@ +import * as l from '@openfn/lexicon'; import test from 'ava'; import { Project } from '../../src/Project'; import generateWorkflow, { generateProject } from '../../src/gen/generator'; import * as v2 from '../fixtures/sample-v2-project'; -const createProject = () => { +const createProject = (props: Partial = {}) => { const proj = new Project({ id: 'my-project', name: 'My Project', description: 'my lovely project', + sandbox: { parentId: 'abcd' }, cli: { alias: 'main', }, @@ -18,6 +20,8 @@ const createProject = () => { }, options: { allow_support_access: false, + env: 'dev', + color: 'red', }, workflows: [ generateWorkflow( @@ -28,6 +32,7 @@ const createProject = () => { } ), ], + ...props, }); // hack delete proj.workflows[0].steps[0].name; @@ -61,12 +66,28 @@ test('should serialize to JSON format v2 project', (t) => { // should load a project and serialize it back test('should exclude null values in yaml', (t) => { - const proj = createProject(); + const proj = createProject({ + options: { + concurrency: null, + allow_support_access: false, + env: 'dev', + color: 'red', + }, + }); // force some null values into the workflow structure - proj.workflows[0].openfn.concurrency = null; proj.workflows[0].steps[1].openfn.keychain_credential_id = null; const yaml = proj.serialize('project', { format: 'yaml' }); t.deepEqual(yaml, v2.yaml); }); + +test('should include sandboxy metadata', (t) => { + const proj = createProject({}); + + const json = proj.serialize('project', { format: 'json' }); + + t.is(json.sandbox.parentId, 'abcd'); + t.is(json.options.env, 'dev'); + t.is(json.options.color, 'red'); +}); diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index 82cde69a4..db6837c09 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -175,6 +175,7 @@ test('generate openfn.yaml', (t) => { `project: uuid: 1234 id: my-project + name: My Project workspace: credentials: credentials.yaml formats: @@ -189,3 +190,15 @@ workspace: }); test.todo('generate openfn.json'); + +test('include project name', (t) => { + const proj = new Project({ + id: 'my-project', + name: 'My Project', + }); + const result = extractConfig(proj, 'json'); + const json = JSON.parse(result.content); + t.is(json.project.name, 'My Project'); +}); + +test.todo('include parent project name'); diff --git a/packages/project/test/util/project-diff.test.ts b/packages/project/test/util/project-diff.test.ts new file mode 100644 index 000000000..82d668113 --- /dev/null +++ b/packages/project/test/util/project-diff.test.ts @@ -0,0 +1,169 @@ +import test from 'ava'; +import { Project } from '../../src/Project'; +import { diff } from '../../src/util/project-diff'; +import generateWorkflow from '../../src/gen/generator'; + +test('diff: should return empty array for identical projects', (t) => { + const wf = generateWorkflow('trigger-x'); + + const projectA = new Project({ + name: 'project-a', + workflows: [wf], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 0); +}); + +test('diff: should detect changed workflow', (t) => { + const wfA = generateWorkflow('trigger-x'); + const wfB = generateWorkflow('trigger-y'); + // Make sure they have the same id but different content + wfB.id = wfA.id; + + const projectA = new Project({ + name: 'project-a', + workflows: [wfA], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wfB], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wfA.id, type: 'changed' }); +}); + +test('diff: should detect added workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1, wf2], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'added' }); +}); + +test('diff: should detect removed workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1, wf2], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'removed' }); +}); + +test('diff: should detect multiple changes at once', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-z'); + const wf4 = generateWorkflow('@id d trigger-w'); + + // wf2 will be changed in projectB + const wf2Changed = generateWorkflow('@id b trigger-different'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1, wf2, wf3], // has a, b, c + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1, wf2Changed, wf4], // has a, b (changed), d (new) + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 3); + t.deepEqual( + diffs.find((d) => d.id === 'b'), + { id: 'b', type: 'changed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'c'), + { id: 'c', type: 'removed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'd'), + { id: 'd', type: 'added' } + ); +}); + +test('diff: should detect multiple workflows with same type of change', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-z'); + + const wf1Changed = generateWorkflow('@id a trigger-X'); + const wf2Changed = generateWorkflow('@id b trigger-Y'); + + const projectA = new Project({ + name: 'project-a', + workflows: [wf1, wf2, wf3], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf1Changed, wf2Changed, wf3], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 2); + t.deepEqual(diffs[0], { id: 'a', type: 'changed' }); + t.deepEqual(diffs[1], { id: 'b', type: 'changed' }); +}); + +test('diff: should detect change when workflow has same ID but different name', (t) => { + const wf1 = generateWorkflow('@id my-workflow trigger-x'); + const wf2 = generateWorkflow('@id my-workflow trigger-y'); + + // Ensure they have the same ID but different content + wf1.name = 'Original Name'; + wf2.name = 'Different Name'; + + const projectA = new Project({ + name: 'project-a', + workflows: [wf1], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf2], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'my-workflow', type: 'changed' }); +});