diff --git a/.changeset/big-moments-grin.md b/.changeset/big-moments-grin.md new file mode 100644 index 000000000..e132842d6 --- /dev/null +++ b/.changeset/big-moments-grin.md @@ -0,0 +1,6 @@ +--- +'@openfn/lexicon': minor +'@openfn/project': minor +--- + +Update v1 provisioner project structure (workflows, jobs, edges and triggers as record, not array) diff --git a/packages/cli/src/commands.ts b/packages/cli/src/commands.ts index b0cfa0880..9f56545c5 100644 --- a/packages/cli/src/commands.ts +++ b/packages/cli/src/commands.ts @@ -9,7 +9,7 @@ import docgen from './docgen/handler'; import docs from './docs/handler'; import metadata from './metadata/handler'; import pull from './pull/handler'; -import * as projects from './projects/handler'; +import * as projects from './projects'; import * as repo from './repo/handler'; import createLogger, { CLI, Logger } from './util/logger'; @@ -23,28 +23,29 @@ import { CLIError } from './errors'; export type CommandList = | 'apollo' - | 'compile' | 'collections-get' - | 'collections-set' | 'collections-remove' + | 'collections-set' + | 'compile' | 'deploy' | 'docgen' | 'docs' | 'execute' | 'metadata' - | 'pull' - | 'projects' + | 'project-checkout' + | 'project-deploy' + | 'project-fetch' + | 'project-list' + | 'project-merge' + | 'project-pull' + | 'project-version' | 'project' + | 'projects' + | 'pull' | 'repo-clean' | 'repo-install' | 'repo-list' | 'repo-pwd' - | 'project-pull' - | 'project-list' - | 'project-version' - | 'project-merge' - | 'project-checkout' - | 'project-fetch' | 'test' | 'version'; @@ -67,6 +68,7 @@ const handlers = { ['repo-install']: repo.install, ['repo-pwd']: repo.pwd, ['repo-list']: repo.list, + ['project-deploy']: projects.deploy, ['project-pull']: projects.pull, ['project-list']: projects.list, ['project-version']: projects.version, diff --git a/packages/cli/src/deploy/beta.ts b/packages/cli/src/deploy/beta.ts deleted file mode 100644 index 834d37039..000000000 --- a/packages/cli/src/deploy/beta.ts +++ /dev/null @@ -1,42 +0,0 @@ -// beta v2 version of CLI deploy - -import Project from '@openfn/project'; -import { DeployConfig, deployProject } from '@openfn/deploy'; -import type { Logger } from '../util/logger'; -import { Opts } from '../options'; -import { loadAppAuthConfig } from '../projects/util'; - -export type DeployOptionsBeta = Required< - Pick< - Opts, - 'beta' | 'command' | 'log' | 'logJson' | 'apiKey' | 'endpoint' | 'path' - > ->; - -export async function handler(options: DeployOptionsBeta, logger: Logger) { - const config = loadAppAuthConfig(options, logger); - - // TMP use options.path to set the directory for now - // We'll need to manage this a bit better - // TODO this is fixed on another branch - const project = await Project.from('fs', { - root: (options as any).workspace || '.', - }); - // TODO: work out if there's any diff - - // generate state for the provisioner - const state = project.serialize('state', { format: 'json' }); - - logger.debug('Converted local project to app state:'); - logger.debug(JSON.stringify(state, null, 2)); - - // TODO not totally sold on endpoint handling right now - config.endpoint ??= project.openfn?.endpoint!; - - logger.info('Sending project to app...'); - - // TODO do I really want to use this deploy function? Is it suitable? - await deployProject(config as DeployConfig, state); - - logger.success('Updated project at', config.endpoint); -} diff --git a/packages/cli/src/deploy/handler.ts b/packages/cli/src/deploy/handler.ts index f5fe682f7..c61b70ba9 100644 --- a/packages/cli/src/deploy/handler.ts +++ b/packages/cli/src/deploy/handler.ts @@ -7,7 +7,7 @@ import { } from '@openfn/deploy'; import type { Logger } from '../util/logger'; import { DeployOptions } from './command'; -import * as beta from './beta'; +import * as beta from '../projects/deploy'; export type DeployFn = typeof deploy; diff --git a/packages/cli/src/projects/command.ts b/packages/cli/src/projects/command.ts index 258ce0f21..8fe17a514 100644 --- a/packages/cli/src/projects/command.ts +++ b/packages/cli/src/projects/command.ts @@ -4,6 +4,7 @@ import merge from './merge'; import checkout from './checkout'; import fetch from './fetch'; import { command as pull } from './pull'; +import { command as deploy } from './deploy'; import type yargs from 'yargs'; @@ -15,6 +16,7 @@ export const projectsCommand = { builder: (yargs: yargs.Argv) => yargs .command(pull) + .command(deploy) .command(list) .command(version) .command(merge) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts new file mode 100644 index 000000000..f412d692a --- /dev/null +++ b/packages/cli/src/projects/deploy.ts @@ -0,0 +1,278 @@ +// beta v2 version of CLI deploy + +/** + * New plan for great glory + * + * - from('fs') does NOT take project file into account + * - deploy must first fetch (and ensure no conflcits) + * - deploy must then load the project from disk + * - deploy must then merge into that project + * - then call provisioner + * - finally write to disk + * + * + * PLUS: diff summary (changed workflows and steps) + * PLUS: confirm + * PLUS: dry run + * + * + * + * One possible probllem for deploy + * + * The idea is we fetch the latest server version, + * write that to disk, merge our changes, and push + * + * But what if our project file is ahead of the server? A fetch + * will conflict and we don't want to throw. + * + * The project may be ahead because: a), we checked out another branch + * and stashed changes, b) we can ran some kind of reconcilation/merge, + * c) we did a manual export (take my fs and write it to the project) + * + * So basically when fetching, we need to check for divergence in history. + * When fetching, for each workflow, we need to decide whether to keep or reject the + * server version based on the history. + * + * + * + * This is super complex and we're getting into merge territory + * First priority is: if there's a problem (that's a super difficult thing!) warn the user + * Second priority is: help the user resolve it + * + * + * The local project files are giving me a headache. But we should be strict and say: + * the project is ALWAYS a representation of the remote. It is invalid for that project + * to represent the local system + * + * So this idea that I can "save" the local to the project file is wrong + * The idea thatwhen I checkout, I "stash" to a project file is wrong + * + * I should be able to export a project to any arbitrary file, yes + * And when checking out and there are conflicts, I should be able to create a duplicate + * file to save my changes without git. + * I think that means checkout errors (it detects changes will be lost), but you have the option to + * stash a temporary local project to be checkedout later + * + * + * This clarify and strictness will I think really help + * + * So: the local project is NEVER ahead of the server + * (but what if the user edited it and it is? I think the system igores it and that's just a force push) + */ + +import yargs from 'yargs'; +import Project from '@openfn/project'; + +import { handler as fetch } from './fetch'; +import * as o from '../options'; +import * as o2 from './options'; +import { loadAppAuthConfig, deployProject } from './util'; +import { build, ensure } from '../util/command-builders'; + +import type { Provisioner } from '@openfn/lexicon/lightning'; +import type { Logger } from '../util/logger'; +import type { Opts } from '../options'; + +export type DeployOptions = Pick< + Opts, + | 'apiKey' + | 'command' + | 'confirm' + | 'endpoint' + | 'force' + | 'log' + | 'logJson' + | 'workspace' +>; + +const options = [ + // local options + o2.env, + o2.workspace, + + // general options + o.apiKey, + o.endpoint, + o.log, + o.logJson, + o.snapshots, + o.force, + o.confirm, +]; + +const printProjectName = (project: Project) => + `${project.id} (${project.openfn?.uuid || ''})`; + +export const command: yargs.CommandModule = { + command: 'deploy', + describe: `Deploy the checked out project to a Lightning Instance`, + builder: (yargs: yargs.Argv) => + build(options, yargs) + .positional('project', { + describe: + 'The UUID, local id or local alias of the project to deploy to', + }) + .example( + 'deploy', + 'Deploy the checkout project to the connected instance' + ), + handler: ensure('project-deploy', options), +}; + +export async function handler(options: DeployOptions, logger: Logger) { + const config = loadAppAuthConfig(options, logger); + + logger.info('Attempting to load checked-out project from workspace'); + const localProject = await Project.from('fs', { + root: options.workspace || '.', + }); + + // TODO if there's no local metadata, the user must pass a UUID or alias to post to + + logger.success(`Loaded local project ${printProjectName(localProject)}`); + // First step, fetch the latest version and write + // this may throw! + let remoteProject: Project; + try { + remoteProject = await fetch( + { + ...options, + // Prefer the UUID since it's most specific + project: localProject.uuid ?? localProject.id, + }, + logger + ); + logger.success('Downloaded latest version of project at ', config.endpoint); + } catch (e) { + console.log(e); + // If fetch failed because of compatiblity, what do we do? + // + // Basically we failed to write to the local project file + // If -f is true, do we: + // a) force-fetch the latest project + // b) or force merge into the old project, and then force push? + // + // The file system may be in a real mess if fs, project and app are all diverged! + // So I think we: + // Log an error: the server has diverged from your local copy + // Run fetch to resolve the conflict (it'll throw too!) + // Pass -f to ignore your local project and pull the latest app changes + // (changes shouldn't be lost here, because its the file system that's kind) + // + // Actually, the FS is king here. + // + // What if: + // Locally I've changed workflow A + // Remove has changed workflow B + // I basically want to keep my workflow A changes and keep the workflow B changes + // But if we force, we'll force our local workflow into the project, overriding it + // Gods its complicated + // What I think you actually want to do is: + // force pull the remote version + // merge only your changed workflows onto the remote + // but merge doesn't work like that + // So either I need merge to be able to merge the fs with a project (sort of like an expand-and-merge) + // Or deploy should accept a list of workflows (only apply these workflows) + // The problem with the deploy is that the local fs will still be out of date + // + // What about openfn project reconcile + // This will fetch the remote project + // check it out into your fs + // any changed workflows you'll be promoted to: + // - keep local + // - keep app + // - keep both + // if keep both, two folders will be created. The user must manually merge + // this leaves you with a file system that can either be merged, deployed or exported + } + + // TODO warn if the remote UUID is different to the local UUID + // That suggests you're doing something wrong! + // force will suppress + + const diffs = reportDiff(remoteProject, localProject, logger); + if (!diffs.length) { + logger.success('Nothing to deploy'); + return; + } + + // Ensure there's no divergence + if (!localProject.canMergeInto(remoteProject)) { + if (!options.force) { + } + } + + logger.info( + 'Remote project has not diverged from local project - it is safe to deploy 🎉' + ); + + // TODO I think we now gotta merge local into the remote, because + // when we deploy we want to keep all the remote metadata + + // TODO the only difficulty I see with this is: what if the user makes + // a project change locally? It'll a) diverge and b) get ignored + // So that needs thinking about + + logger.info('Merging changes into remote project'); + const merged = Project.merge(localProject, remoteProject, { force: true }); + // generate state for the provisioner + const state = merged.serialize('state', { + format: 'json', + }) as Provisioner.Project_v1; + + // // // hack! needs fixing + // state.workflows['turtle-power'].lock_version = + // remoteProject.workflows[0].openfn?.lock_version; + + // TODO only do this if asked + // or maybe write it to output with -o? + // maybe we can write state.app, state.local and state.result + // this is heavy debug stuff + logger.debug('Converted merged local project to app state:'); + logger.debug(JSON.stringify(state, null, 2)); + + // TODO not totally sold on endpoint handling right now + config.endpoint ??= localProject.openfn?.endpoint!; + + logger.info('Sending project to app...'); + + await deployProject(config.endpoint, config.apiKey, state, logger); + + logger.success('Updated project at', config.endpoint); +} + +export const reportDiff = (local: Project, remote: Project, logger: Logger) => { + const diffs = remote.diff(local); + + if (diffs.length === 0) { + logger.info('No workflow changes detected'); + return diffs; + } + + const added = diffs.filter((d) => d.type === 'added'); + const changed = diffs.filter((d) => d.type === 'changed'); + const removed = diffs.filter((d) => d.type === 'removed'); + + if (added.length > 0) { + logger.info('Workflows added:'); + for (const diff of added) { + logger.info(` - ${diff.id}`); + } + } + + if (changed.length > 0) { + logger.info('Workflows modified:'); + for (const diff of changed) { + logger.info(` - ${diff.id}`); + } + } + + if (removed.length > 0) { + logger.info('Workflows removed:'); + for (const diff of removed) { + logger.info(` - ${diff.id}`); + } + } + + return diffs; +}; diff --git a/packages/cli/src/projects/handler.ts b/packages/cli/src/projects/index.ts similarity index 85% rename from packages/cli/src/projects/handler.ts rename to packages/cli/src/projects/index.ts index 27b33100c..d9982fecf 100644 --- a/packages/cli/src/projects/handler.ts +++ b/packages/cli/src/projects/index.ts @@ -4,3 +4,4 @@ export { handler as merge } from './merge'; export { handler as checkout } from './checkout'; export { handler as fetch } from './fetch'; export { handler as pull } from './pull'; +export { handler as deploy } from './deploy'; diff --git a/packages/cli/src/projects/pull.ts b/packages/cli/src/projects/pull.ts index 12bf8e6d8..8943c0021 100644 --- a/packages/cli/src/projects/pull.ts +++ b/packages/cli/src/projects/pull.ts @@ -10,13 +10,14 @@ import type { Opts } from './options'; export type PullOptions = Pick< Opts, - | 'beta' | 'command' + | 'alias' + | 'workspace' + | 'apiKey' + | 'endpoint' | 'log' | 'logJson' | 'statePath' - | 'projectPath' - | 'configPath' | 'project' | 'confirm' | 'snapshots' @@ -37,7 +38,6 @@ const options = [ description: 'path to output the project to', }), o.logJson, - o.projectPath, o.snapshots, o.path, o.force, diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index 503d2076a..88d4f1139 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -82,6 +82,7 @@ export const getLightningUrl = ( return new URL(`/api/provision/${path}?${params.toString()}`, endpoint); }; +// TODO move to client.ts export async function fetchProject( endpoint: string, apiKey: string, @@ -123,6 +124,43 @@ export async function fetchProject( } } +export async function deployProject( + endpoint: string, + apiKey: string, + state: Provisioner.Project_v1, + logger?: Logger +): Promise<{ data: Provisioner.Project_v1 }> { + try { + const url = getLightningUrl(endpoint); + console.log(url); + const response = await fetch(url, { + method: 'POST', + headers: { + Authorization: `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(state), + }); + console.log(response); + + if (!response.ok) { + const body = await response.json(); + + logger?.debug('Failed to deploy project: response'); + logger?.debug(JSON.stringify(body, null, 2)); + throw new CLIError( + `Failed to deploy project ${state.name}: ${response.status}` + ); + } + + return response.json(); + } catch (error: any) { + handleCommonErrors({ endpoint, apiKey }, error); + + throw error; + } +} + function handleCommonErrors(config: AuthOptions, error: any) { if (error.cause?.code === 'ECONNREFUSED') { throw new DeployError( diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts new file mode 100644 index 000000000..8ba1ff699 --- /dev/null +++ b/packages/cli/test/projects/deploy.test.ts @@ -0,0 +1,154 @@ +import test from 'ava'; +import Project, { generateWorkflow } from '@openfn/project'; +import { createMockLogger } from '@openfn/logger'; +import { reportDiff } from '../../src/projects/deploy'; + +const logger = createMockLogger(undefined, { level: 'debug' }); + +// what will deploy tests look like? + +// deploy a project for the first time (this doesn't work though?) + +// deploy a change to a project + +// deploy a change to a project but fetch latest first + +// throw when trying to deploy to a diverged remote project + +// force deploy an incompatible project + +// don't post the final version if dry-run is set + +// TODO diff + confirm + +test('reportDiff: should report no changes for identical projects', (t) => { + const wf = generateWorkflow('@id a trigger-x'); + + const local = new Project({ + name: 'local', + workflows: [wf], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 0); + + const { message, level } = logger._parse(logger._last); + t.is(level, 'info'); + t.is(message, 'No workflow changes detected'); +}); + +test('reportDiff: should report changed workflow', (t) => { + const wfRemote = generateWorkflow('@id a trigger-x'); + const wfLocal = generateWorkflow('@id a trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wfLocal], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wfRemote], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'a', type: 'changed' }); + + const messages = logger._history.map((h) => logger._parse(h).message); + t.true(messages.includes('Workflows modified:')); + t.true(messages.includes(' - a')); +}); + +test('reportDiff: should report added workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wf1, wf2], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'b', type: 'added' }); + + const messages = logger._history.map((h) => logger._parse(h).message); + t.true(messages.includes('Workflows added:')); + t.true(messages.includes(' - b')); +}); + +test('reportDiff: should report removed workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const local = new Project({ + name: 'local', + workflows: [wf1], + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1, wf2], + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'b', type: 'removed' }); + + const messages = logger._history.map((h) => logger._parse(h).message); + t.true(messages.includes('Workflows removed:')); + t.true(messages.includes(' - b')); +}); + +test('reportDiff: should report mix of added, changed, and removed workflows', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2Remote = generateWorkflow('@id b trigger-y'); + const wf2Local = generateWorkflow('@id b trigger-different'); + const wf3 = generateWorkflow('@id c trigger-z'); + const wf4 = generateWorkflow('@id d trigger-w'); + + const local = new Project({ + name: 'local', + workflows: [wf1, wf2Local, wf4], // has a, b (changed), d (new) + }); + + const remote = new Project({ + name: 'remote', + workflows: [wf1, wf2Remote, wf3], // has a, b, c + }); + + const diffs = reportDiff(local, remote, logger); + t.is(diffs.length, 3); + + t.deepEqual( + diffs.find((d) => d.id === 'b'), + { id: 'b', type: 'changed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'c'), + { id: 'c', type: 'removed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'd'), + { id: 'd', type: 'added' } + ); + + const messages = logger._history.map((h) => logger._parse(h).message); + t.true(messages.includes('Workflows added:')); + t.true(messages.includes(' - d')); + t.true(messages.includes('Workflows modified:')); + t.true(messages.includes(' - b')); + t.true(messages.includes('Workflows removed:')); + t.true(messages.includes(' - c')); +}); diff --git a/packages/lexicon/lightning.d.ts b/packages/lexicon/lightning.d.ts index 29439d464..4232b1883 100644 --- a/packages/lexicon/lightning.d.ts +++ b/packages/lexicon/lightning.d.ts @@ -228,7 +228,7 @@ export namespace Provisioner { name: string; description: string | null; - workflows: Workflow[]; + workflows: Record; concurrency?: any; // TODO // TODO typing isn't quite right here either @@ -256,9 +256,9 @@ export namespace Provisioner { export interface Workflow { id: string; name: string; - jobs: Job[]; - triggers: Trigger[]; - edges: Edge[]; + jobs: Record; + triggers: Record; + edges: Record; delete?: boolean; project_id?: string; diff --git a/packages/project/README.md b/packages/project/README.md index 954a016e2..6d0d6d33e 100644 --- a/packages/project/README.md +++ b/packages/project/README.md @@ -8,6 +8,18 @@ A single Project can be Checked Out to disk at a time, meaning its source workfl A Workspace is a set of related Projects , including a Project and its associated Sandboxes, or a Project deployed to apps in multiple web domains +## Structure and Artifects + +openfn.yaml + +project file + +sort of a mix of project.yaml, state.json and config.json + +This is strictly a representation of a server-side project, it's like the last-sync-state. CLI-only or offline projects do not have one. + +It's also a portable representation of the project + ### Serializing and Parsing The main idea of Projects is that a Project represents a set of OpenFn workflows defined in any format and present a standard JS-friendly interface to manipulate and reason about them. diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 6e72ee896..7659f5c38 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -10,6 +10,7 @@ import fromProject, { SerializedProject } from './parse/from-project'; import slugify from './util/slugify'; import { getUuidForEdge, getUuidForStep } from './util/uuid'; import { merge, MergeProjectOptions } from './merge/merge-project'; +import { diff as projectDiff } from './util/project-diff'; import { Workspace } from './Workspace'; import { buildConfig } from './util/config'; import { Provisioner } from '@openfn/lexicon/lightning'; @@ -236,6 +237,11 @@ export class Project { return result; } + // Compare this project with another and return a list of workflow changes + diff(project: Project) { + return projectDiff(this, project); + } + canMergeInto(target: Project) { const potentialConflicts: Record = {}; for (const sourceWorkflow of this.workflows) { diff --git a/packages/project/src/index.ts b/packages/project/src/index.ts index b49203bcf..5ccb4b034 100644 --- a/packages/project/src/index.ts +++ b/packages/project/src/index.ts @@ -7,3 +7,6 @@ export default Project; export { Workspace, yamlToJson, jsonToYaml }; export { generateWorkflow, generateProject } from './gen/generator'; + +export { diff } from './util/project-diff'; +export type { WorkflowDiff, DiffType } from './util/project-diff'; diff --git a/packages/project/src/merge/merge-node.ts b/packages/project/src/merge/merge-node.ts index 542af93a2..b9c98fcac 100644 --- a/packages/project/src/merge/merge-node.ts +++ b/packages/project/src/merge/merge-node.ts @@ -15,6 +15,7 @@ type Node = Workflow['steps'][number]; const clone = (obj: any) => JSON.parse(JSON.stringify(obj)); +// TODO merge needs to include openfn props and eg lock_version export function mergeWorkflows( source: Workflow, target: Workflow, @@ -76,6 +77,15 @@ export function mergeWorkflows( return { ...target, ...newSource, - openfn: { ...target.openfn }, // preserving the target uuid. we might need a proper helper function for this. + openfn: { + ...target.openfn, + ...source.openfn, + // preserving the target uuid. we might need a proper helper function for this + uuid: target.openfn?.uuid, + }, + options: { + ...target.options, + ...source.options, + }, }; } diff --git a/packages/project/src/merge/merge-project.ts b/packages/project/src/merge/merge-project.ts index 5452401fd..54b97757a 100644 --- a/packages/project/src/merge/merge-project.ts +++ b/packages/project/src/merge/merge-project.ts @@ -111,11 +111,26 @@ export function merge( // TODO: clarify repo preservation strategy // TODO: how other properties of a project are being merged. + // - handle basic metadata (name, desc) + // - handle openfn stuff - the source // with project level props merging, target goes into source because we want to preserve the target props. return new Project( baseMerge(target, source, ['collections'], { workflows: finalWorkflows, + // TODO all this needs testing + openfn: { + ...target.openfn, + ...source.openfn, + }, + options: { + ...target.options, + ...source.options, + }, + name: source.name ?? target.name, + description: source.description ?? target.description, + credentials: source.credentials ?? target.credentials, + collections: source.collections ?? target.collections, } as any) ); } diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index 9f08e83c2..2ed32e61f 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -57,7 +57,7 @@ export default ( // fetched_at: config.fetchedAt, // }; - proj.workflows = stateJson.workflows.map(mapWorkflow); + proj.workflows = Object.values(stateJson.workflows).map(mapWorkflow); return new Project(proj as l.Project, config); }; @@ -104,22 +104,24 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { // TODO what do we do if the condition is disabled? // I don't think that's the same as edge condition false? - workflow.triggers.forEach((trigger: Provisioner.Trigger) => { + Object.values(workflow.triggers).forEach((trigger: Provisioner.Trigger) => { const { type, ...otherProps } = trigger; if (!mapped.start) { - mapped.start = `trigger-${type}`; + mapped.start = type; } - const connectedEdges = edges.filter( + const connectedEdges = Object.values(edges).filter( (e) => e.source_trigger_id === trigger.id ); mapped.steps.push({ - id: 'trigger', + id: type, type, openfn: renameKeys(otherProps, { id: 'uuid' }), next: connectedEdges.reduce((obj: any, edge) => { - const target = jobs.find((j) => j.id === edge.target_job_id); + const target = Object.values(jobs).find( + (j) => j.id === edge.target_job_id + ); if (!target) { throw new Error(`Failed to find ${edge.target_job_id}`); } @@ -130,8 +132,8 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { } as l.Trigger); }); - workflow.jobs.forEach((step: Provisioner.Job) => { - const outboundEdges = edges.filter( + Object.values(workflow.jobs).forEach((step: Provisioner.Job) => { + const outboundEdges = Object.values(edges).filter( (e) => e.source_job_id === step.id || e.source_trigger_id === step.id ); @@ -156,7 +158,9 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { if (outboundEdges.length) { s.next = outboundEdges.reduce((next, edge) => { - const target = jobs.find((j) => j.id === edge.target_job_id); + const target = Object.values(jobs).find( + (j) => j.id === edge.target_job_id + ); // @ts-ignore next[slugify(target.name)] = mapEdge(edge); return next; diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index a08786466..2eb4cb8f4 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -6,6 +6,7 @@ import { Project } from '../Project'; import renameKeys from '../util/rename-keys'; import { jsonToYaml } from '../util/yaml'; import Workflow from '../Workflow'; +import slugify from '../util/slugify'; type Options = { format?: 'json' | 'yaml' }; @@ -38,7 +39,12 @@ export default function ( Object.assign(state, rest, project.options); state.project_credentials = project.credentials ?? []; - state.workflows = project.workflows.map(mapWorkflow); + state.workflows = project.workflows + .map(mapWorkflow) + .reduce((obj: any, wf) => { + obj[slugify(wf.name ?? wf.id)] = wf; + return obj; + }, {}); const shouldReturnYaml = options.format === 'yaml' || @@ -61,11 +67,11 @@ const mapWorkflow = (workflow: Workflow) => { const wfState = { ...originalOpenfnProps, id: workflow.openfn?.uuid ?? randomUUID(), - jobs: [], - triggers: [], - edges: [], + jobs: {}, + triggers: {}, + edges: {}, lock_version: workflow.openfn?.lock_version ?? null, // TODO needs testing - } as unknown as Provisioner.Workflow; + } as Provisioner.Workflow; if (workflow.name) { wfState.name = workflow.name; @@ -96,7 +102,7 @@ const mapWorkflow = (workflow: Workflow) => { type: s.type, ...renameKeys(s.openfn, { uuid: 'id' }), } as Provisioner.Trigger; - wfState.triggers.push(node); + wfState.triggers[node.type] = node; } else { node = omitBy(pick(s, ['name', 'adaptor']), isNil) as Provisioner.Job; const { uuid, ...otherOpenFnProps } = s.openfn ?? {}; @@ -118,7 +124,7 @@ const mapWorkflow = (workflow: Workflow) => { Object.assign(node, defaultJobProps, otherOpenFnProps); - wfState.jobs.push(node); + wfState.jobs[s.id ?? slugify(s.name)] = node; } // create an edge to each linked node @@ -155,12 +161,20 @@ const mapWorkflow = (workflow: Workflow) => { e.condition_expression = rules.condition; } } - wfState.edges.push(e); + wfState.edges[`${s.id}->${next}`] = e; }); }); // Sort edges by UUID (for more predictable comparisons in test) - wfState.edges = sortBy(wfState.edges, 'id'); + wfState.edges = Object.keys(wfState.edges) + // convert edge ids to strings just in case a number creeps in (it might in test) + .sort((a, b) => + `${wfState.edges[a].id}`.localeCompare('' + wfState.edges[b].id) + ) + .reduce((obj: any, key) => { + obj[key] = wfState.edges[key]; + return obj; + }, {}); return wfState; }; diff --git a/packages/project/src/util/project-diff.ts b/packages/project/src/util/project-diff.ts new file mode 100644 index 000000000..7e849130a --- /dev/null +++ b/packages/project/src/util/project-diff.ts @@ -0,0 +1,56 @@ +import { Project } from '../Project'; + +export type DiffType = 'added' | 'changed' | 'removed'; + +export type WorkflowDiff = { + id: string; + type: DiffType; +}; + +/** + * Compare two projects and return a list of workflow changes showing how + * project B has diverged from project A. + * + * Workflows are identified by their ID and compared using version hashes. + * + * @param a - The baseline project (e.g., main branch) + * @param b - The comparison project (e.g., staging branch) + * @returns Array of workflow diffs indicating how B differs from A: + * - 'added': workflow exists in B but not in A + * - 'removed': workflow exists in A but not in B + * - 'changed': workflow exists in both but has different version hashes + * + * @example + * ```typescript + * const main = await Project.from('fs', { root: '.' }); + * const staging = await Project.from('state', stagingState); + * const diffs = diff(main, staging); + * // Shows how staging has diverged from main + * ``` + */ +export function diff(a: Project, b: Project): WorkflowDiff[] { + const diffs: WorkflowDiff[] = []; + + // Check all of project A's workflows + for (const workflowA of a.workflows) { + const workflowB = b.getWorkflow(workflowA.id); + + if (!workflowB) { + // workflow exists in A but not in B = removed + diffs.push({ id: workflowA.id, type: 'removed' }); + } else if (workflowA.getVersionHash() !== workflowB.getVersionHash()) { + // workflow exists in both but with different content = changed + diffs.push({ id: workflowA.id, type: 'changed' }); + } + } + + // Check for workflows that were added in B + for (const workflowB of b.workflows) { + if (!a.getWorkflow(workflowB.id)) { + // workflow exists in B but not in A = added + diffs.push({ id: workflowB.id, type: 'added' }); + } + } + + return diffs; +} diff --git a/packages/project/test/fixtures/sample-v1-project.ts b/packages/project/test/fixtures/sample-v1-project.ts index 26ca2e099..dcf222ddc 100644 --- a/packages/project/test/fixtures/sample-v1-project.ts +++ b/packages/project/test/fixtures/sample-v1-project.ts @@ -8,24 +8,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + 'my-workflow': { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'My Workflow', - edges: [ - { + edges: { + 'trigger->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -33,18 +33,18 @@ const state: Provisioner.Project = { project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [], scheduled_deletion: null, @@ -58,7 +58,7 @@ const state: Provisioner.Project = { export default state; const withCreds = cloneDeep(state); -Object.assign(withCreds.workflows[0].jobs[0], { +Object.assign(withCreds.workflows['my-workflow'].jobs['transform-data'], { project_credential_id: 'p', keychain_credential_id: 'k', }); diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 699853cb4..f81f3ed87 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -3,7 +3,7 @@ import fromAppState, { mapEdge, mapWorkflow, } from '../../src/parse/from-app-state'; -import { clone, cloneDeep } from 'lodash-es'; +import { cloneDeep } from 'lodash-es'; import state, { withCreds } from '../fixtures/sample-v1-project'; import { Job } from '@openfn/lexicon'; @@ -66,7 +66,7 @@ test('should create a Project from prov state with positions', (t) => { // assign a fake positions object // the provisioner right now doesn't include positions // - but one day it will, and Project needs to be able to sync it - newState.workflows[0].positions = { + newState.workflows['my-workflow'].positions = { x: 1, y: 1, }; @@ -86,10 +86,10 @@ test('should create a Project from prov state with a workflow', (t) => { id: 'my-workflow', name: 'My Workflow', history: [], - start: 'trigger-webhook', + start: 'webhook', steps: [ { - id: 'trigger', + id: 'webhook', type: 'webhook', openfn: { enabled: true, uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058' }, next: { @@ -125,12 +125,12 @@ test('should create a Project from prov state with a workflow', (t) => { }); test('mapWorkflow: map a simple trigger', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); const [trigger] = mapped.steps; t.deepEqual(trigger, { - id: 'trigger', + id: 'webhook', type: 'webhook', next: { 'transform-data': { @@ -148,8 +148,21 @@ test('mapWorkflow: map a simple trigger', (t) => { }); }); +test('mapWorkflow: use a triggers type as its id', (t) => { + const wf = state.workflows['my-workflow']; + + // trigger id in the state is a UUID + t.is(wf.triggers.webhook.id, '4a06289c-15aa-4662-8dc6-f0aaacd8a058'); + + const mapped = mapWorkflow(wf); + const [trigger] = mapped.steps; + + // trigger ID in the Project is the type + t.is(trigger.id, 'webhook'); +}); + test('mapWorkflow: handle openfn meta (uuid, lock_version, deleted_at)', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); t.deepEqual(mapped.openfn, { lock_version: 1, @@ -163,7 +176,7 @@ test('mapWorkflow: handle openfn meta (uuid, lock_version, deleted_at)', (t) => // TODO need to test various trigger conditions and states test('mapWorkflow: map a simple job', (t) => { - const mapped = mapWorkflow(state.workflows[0]); + const mapped = mapWorkflow(state.workflows['my-workflow']); const [_trigger, job] = mapped.steps; t.deepEqual(job, { @@ -179,7 +192,7 @@ test('mapWorkflow: map a simple job', (t) => { }); test('mapWorkflow: map a job with keychain credentials onto .openfn', (t) => { - const wf = withCreds.workflows[0]; + const wf = withCreds.workflows['my-workflow']; const mapped = mapWorkflow(wf); const [_trigger, job] = mapped.steps; @@ -202,7 +215,7 @@ test('mapWorkflow: map a job with keychain credentials onto .openfn', (t) => { }); test('mapWorkflow: map a job with projcet credentials onto job.configuration', (t) => { - const wf = withCreds.workflows[0]; + const wf = withCreds.workflows['my-workflow']; const mapped = mapWorkflow(wf); const [_trigger, job] = mapped.steps; @@ -365,8 +378,8 @@ workflows: const project = fromAppState(yaml, meta, { format: 'yaml', }); - console.log(project.workflows[0].steps); - const { next } = project.workflows[0].steps[1]; + console.log(project.workflows['my-workflow'].steps); + const { next } = project.workflows['my-workflow'].steps[1]; console.log({ next }); // make sure that the condition_types get mapped to condition // also make sure that custom conditions work (both ways) diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index c5fa400a2..6a0785f52 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -22,186 +22,6 @@ function mockFile(path: string, content: string | object) { mock(files); } -test.serial('should load workspace config from json', async (t) => { - mockFile( - '/ws/openfn.json', - buildConfig({ - formats: { - openfn: 'json', - project: 'json', - workflow: 'json', - }, - // @ts-ignore ensure we include custom properties - x: 1, - }) - ); - - const project = await parseProject({ root: '/ws' }); - - t.deepEqual(project.config, { - x: 1, - credentials: 'credentials.yaml', - dirs: { projects: '.projects', workflows: 'workflows' }, - formats: { openfn: 'json', project: 'json', workflow: 'json' }, - }); -}); - -test.serial('should load workspace config from yaml', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - formats: { - openfn: 'yaml', - project: 'yaml', - workflow: 'yaml', - }, - // @ts-ignore ensure we include custom properties - x: 1, - }) - ); - - const project = await parseProject({ root: '/ws' }); - - t.deepEqual(project.config, { - credentials: 'credentials.yaml', - x: 1, - dirs: { projects: '.projects', workflows: 'workflows' }, - formats: { openfn: 'yaml', project: 'yaml', workflow: 'yaml' }, - }); -}); - -test.serial('should load single workflow in new flat format', async (t) => { - mockFile('/ws/openfn.yaml', buildConfig()); - - mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - start: 'a', - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); - t.is(wf.start, 'a'); -}); - -// hmm, maybe I shouldn't support this, because it puts some wierd stuff in the code -// and new CLI will just use the new format -test.serial( - 'should load single workflow in old { workflow, options } format', - async (t) => { - mockFile('/ws/openfn.yaml', buildConfig()); - - mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { - workflow: { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }, - options: { - start: 'a', - }, - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); - t.is(wf.start, 'a'); - } -); - -test.serial('should load single workflow from json', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - formats: { - workflow: 'json', - }, - }) - ); - - mockFile('/ws/workflows/my-workflow/my-workflow.json', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }); - - mockFile('/ws/workflows/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); -}); - -test.serial('should load single workflow from custom path', async (t) => { - mockFile( - '/ws/openfn.yaml', - buildConfig({ - dirs: { - workflows: 'custom-wfs', - projects: '.projects', - }, - }) - ); - - mockFile('/ws/custom-wfs/my-workflow/my-workflow.yaml', { - id: 'my-workflow', - name: 'My Workflow', - steps: [ - { - id: 'a', - expression: 'job.js', - }, - ], - }); - - mockFile('/ws/custom-wfs/my-workflow/job.js', `fn(s => s)`); - - const project = await parseProject({ root: '/ws' }); - - t.is(project.workflows.length, 1); - - const wf = project.getWorkflow('my-workflow'); - t.truthy(wf); - t.is(wf.id, 'my-workflow'); - t.is(wf.name, 'My Workflow'); -}); - test.serial('should include multiple workflows', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); diff --git a/packages/project/test/project.test.ts b/packages/project/test/project.test.ts index c62a9e157..67d5dccc8 100644 --- a/packages/project/test/project.test.ts +++ b/packages/project/test/project.test.ts @@ -14,24 +14,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + wf1: { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'wf1', - edges: [ - { + edges: { + 'webhook->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -39,18 +39,18 @@ const state: Provisioner.Project = { project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [], scheduled_deletion: null, @@ -112,7 +112,7 @@ test('should default alias to "main"', (t) => { t.is(project.alias, 'main'); }); -test('should convert a state file to a project and back again', async (t) => { +test.only('should convert a state file to a project and back again', async (t) => { const meta = { endpoint: 'app.openfn.org', env: 'test', diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index 68a022140..de6e4117e 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -1,9 +1,10 @@ import test from 'ava'; -import type { Provisioner } from '@openfn/lexicon/lightning'; import { Project } from '../../src/Project'; import toAppState from '../../src/serialize/to-app-state'; import { generateProject } from '../../src/gen/generator'; +import type { Provisioner } from '@openfn/lexicon/lightning'; + const state: Provisioner.Project = { id: 'e16c5f09-f0cb-4ba7-a4c2-73fcb2f29d00', name: 'aaa', @@ -11,24 +12,24 @@ const state: Provisioner.Project = { concurrency: null, inserted_at: '2025-04-23T11:15:59Z', collections: [], - workflows: [ - { + workflows: { + wf1: { id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', name: 'wf1', - edges: [ - { + edges: { + 'trigger->transform-data': { enabled: true, id: 'a9a3adef-b394-4405-814d-3ac4323f4b4b', source_trigger_id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', condition_type: 'always', target_job_id: '66add020-e6eb-4eec-836b-20008afca816', }, - ], + }, concurrency: null, inserted_at: '2025-04-23T11:19:32Z', updated_at: '2025-04-23T11:19:32Z', - jobs: [ - { + jobs: { + 'transform-data': { id: '66add020-e6eb-4eec-836b-20008afca816', name: 'Transform data', body: 'fn(s => s)', @@ -36,18 +37,18 @@ const state: Provisioner.Project = { project_credential_id: '', keychain_credential_id: null, }, - ], - triggers: [ - { + }, + triggers: { + webhook: { enabled: true, // TODO enabled: false is a bit interesting id: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', type: 'webhook', }, - ], + }, lock_version: 1, deleted_at: null, }, - ], + }, updated_at: '2025-04-23T11:15:59Z', project_credentials: [''], scheduled_deletion: null, @@ -60,7 +61,7 @@ const state: Provisioner.Project = { test('should set defaults for keys that Lightning needs', (t) => { // set up a very minimal project - const data = { + const data: any = { id: 'my-project', openfn: { uuid: '', @@ -68,6 +69,7 @@ test('should set defaults for keys that Lightning needs', (t) => { workflows: [ { id: 'wf', + name: 'my workflow', openfn: { uuid: 0, }, @@ -107,29 +109,30 @@ test('should set defaults for keys that Lightning needs', (t) => { t.deepEqual(defaultState, { id: '', project_credentials: [], - workflows: [ - { + workflows: { + 'my-workflow': { id: 0, - jobs: [ - { + name: 'my workflow', + jobs: { + step: { body: '.', id: 2, project_credential_id: null, keychain_credential_id: null, }, - ], - triggers: [{ type: 'webhook', id: 1 }], - edges: [ - { + }, + triggers: { webhook: { type: 'webhook', id: 1 } }, + edges: { + ['trigger->step']: { id: '', target_job_id: 2, enabled: true, source_trigger_id: 1, }, - ], + }, lock_version: null, }, - ], + }, }); }); @@ -139,6 +142,7 @@ test('should serialize workflow positions', (t) => { workflows: [ { id: 'wf', + name: 'wf', openfn: { positions: { step: { @@ -170,7 +174,7 @@ test('should serialize workflow positions', (t) => { }); const state = toAppState(project); - t.deepEqual(state.workflows[0].positions, { + t.deepEqual(state.workflows['wf'].positions, { step: { x: 1, y: 1, @@ -188,6 +192,7 @@ test('should write openfn keys to objects', (t) => { workflows: [ { id: 'wf', + name: 'wf', openfn, steps: [ { @@ -217,10 +222,10 @@ test('should write openfn keys to objects', (t) => { const state = toAppState(project); t.is(state.x, 1); - t.is(state.workflows[0].x, 1); - t.is(state.workflows[0].jobs[0].x, 1); - t.is(state.workflows[0].triggers[0].x, 1); - t.is(state.workflows[0].edges[0].x, 1); + t.is(state.workflows['wf'].x, 1); + t.is(state.workflows['wf'].jobs.step.x, 1); + t.is(state.workflows['wf'].triggers.webhook.x, 1); + t.is(state.workflows['wf'].edges['trigger->step'].x, 1); }); test('should handle credentials', (t) => { @@ -229,6 +234,7 @@ test('should handle credentials', (t) => { workflows: [ { id: 'wf', + name: 'wf', steps: [ { id: 'trigger', @@ -251,9 +257,9 @@ test('should handle credentials', (t) => { }; const state = toAppState(new Project(data), { format: 'json' }); - const [job] = state.workflows[0].jobs; - t.is(job.keychain_credential_id, 'k'); - t.is(job.project_credential_id, 'p'); + const { step } = state.workflows['wf'].jobs; + t.is(step.keychain_credential_id, 'k'); + t.is(step.project_credential_id, 'p'); }); test('should ignore workflow start keys', (t) => { @@ -262,6 +268,7 @@ test('should ignore workflow start keys', (t) => { workflows: [ { id: 'wf', + name: 'wf', start: 'step', steps: [ { @@ -285,13 +292,13 @@ test('should ignore workflow start keys', (t) => { }; const state = toAppState(new Project(data), { format: 'json' }); - t.falsy(state.workflows[0].start); + t.falsy(state.workflows['wf'].start); }); test.todo('handle edge labels'); test('serialize steps and trigger in alphabetical order', (t) => { - const wf = ` + const wf = `@name wf z-b y-x c-p @@ -300,17 +307,17 @@ c-p const state = toAppState(project, { format: 'json' }); - const jobs = state.workflows[0].jobs.map((j) => j.name); + const jobs = Object.keys(state.workflows['wf'].jobs); // short be sorted by name t.deepEqual(jobs, ['b', 'c', 'p', 'x', 'y', 'z']); - const edges = state.workflows[0].edges.map((e) => e.id); + const edges = Object.keys(state.workflows['wf'].edges); // edges are sorted by uuid - t.deepEqual(edges, [3, 6, 9]); + t.deepEqual(edges, ['z->b', 'y->x', 'c->p']); }); test('should handle edge conditions', (t) => { - const wf = ` + const wf = `@name wf a-(condition=always)-b a-(condition="on_job_success")-c a-(condition="on_job_failure")-d @@ -322,7 +329,13 @@ a-(condition=x)-f }); const state = toAppState(project, { format: 'json' }); - const [a_b, a_c, a_d, a_e, a_f] = state.workflows[0].edges; + const { + 'a->b': a_b, + 'a->c': a_c, + 'a->d': a_d, + 'a->e': a_e, + 'a->f': a_f, + } = state.workflows.wf.edges; t.is(a_b.condition_type, 'always'); t.falsy(a_b.condition_expression); diff --git a/packages/project/test/util/project-diff.test.ts b/packages/project/test/util/project-diff.test.ts new file mode 100644 index 000000000..82d668113 --- /dev/null +++ b/packages/project/test/util/project-diff.test.ts @@ -0,0 +1,169 @@ +import test from 'ava'; +import { Project } from '../../src/Project'; +import { diff } from '../../src/util/project-diff'; +import generateWorkflow from '../../src/gen/generator'; + +test('diff: should return empty array for identical projects', (t) => { + const wf = generateWorkflow('trigger-x'); + + const projectA = new Project({ + name: 'project-a', + workflows: [wf], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 0); +}); + +test('diff: should detect changed workflow', (t) => { + const wfA = generateWorkflow('trigger-x'); + const wfB = generateWorkflow('trigger-y'); + // Make sure they have the same id but different content + wfB.id = wfA.id; + + const projectA = new Project({ + name: 'project-a', + workflows: [wfA], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wfB], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wfA.id, type: 'changed' }); +}); + +test('diff: should detect added workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1, wf2], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'added' }); +}); + +test('diff: should detect removed workflow', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1, wf2], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'removed' }); +}); + +test('diff: should detect multiple changes at once', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-z'); + const wf4 = generateWorkflow('@id d trigger-w'); + + // wf2 will be changed in projectB + const wf2Changed = generateWorkflow('@id b trigger-different'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1, wf2, wf3], // has a, b, c + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1, wf2Changed, wf4], // has a, b (changed), d (new) + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 3); + t.deepEqual( + diffs.find((d) => d.id === 'b'), + { id: 'b', type: 'changed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'c'), + { id: 'c', type: 'removed' } + ); + t.deepEqual( + diffs.find((d) => d.id === 'd'), + { id: 'd', type: 'added' } + ); +}); + +test('diff: should detect multiple workflows with same type of change', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-z'); + + const wf1Changed = generateWorkflow('@id a trigger-X'); + const wf2Changed = generateWorkflow('@id b trigger-Y'); + + const projectA = new Project({ + name: 'project-a', + workflows: [wf1, wf2, wf3], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf1Changed, wf2Changed, wf3], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 2); + t.deepEqual(diffs[0], { id: 'a', type: 'changed' }); + t.deepEqual(diffs[1], { id: 'b', type: 'changed' }); +}); + +test('diff: should detect change when workflow has same ID but different name', (t) => { + const wf1 = generateWorkflow('@id my-workflow trigger-x'); + const wf2 = generateWorkflow('@id my-workflow trigger-y'); + + // Ensure they have the same ID but different content + wf1.name = 'Original Name'; + wf2.name = 'Different Name'; + + const projectA = new Project({ + name: 'project-a', + workflows: [wf1], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wf2], + }); + + const diffs = diff(projectA, projectB); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: 'my-workflow', type: 'changed' }); +});