diff --git a/plugins/catalog-backend-module-rhaap/src/module.ts b/plugins/catalog-backend-module-rhaap/src/module.ts index 19e1721e..0c37bbca 100644 --- a/plugins/catalog-backend-module-rhaap/src/module.ts +++ b/plugins/catalog-backend-module-rhaap/src/module.ts @@ -26,6 +26,8 @@ export const catalogModuleRhaap = createBackendModule({ scheduler: coreServices.scheduler, ansibleService: ansibleServiceRef, httpRouter: coreServices.httpRouter, + discovery: coreServices.discovery, + auth: coreServices.auth, }, async init({ logger, @@ -56,7 +58,6 @@ export const catalogModuleRhaap = createBackendModule({ scheduler, }, ); - const jobTemplateProvider = AAPJobTemplateProvider.fromConfig( config, ansibleService, diff --git a/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.test.ts b/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.test.ts index ded65dee..578394eb 100644 --- a/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.test.ts +++ b/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.test.ts @@ -850,6 +850,160 @@ describe('AAPEntityProvider', () => { }); }); + describe('registerExecutionEnvironment', () => { + let provider: AAPEntityProvider; + let mockConnection: EntityProviderConnection; + let logger: ReturnType; + let executionEnvironmentEntity: any; + + beforeEach(() => { + const config = new ConfigReader(MOCK_CONFIG.data); + logger = mockServices.logger.mock(); + const schedule = new PersistingTaskRunner(); + executionEnvironmentEntity = { + apiVersion: 'backstage.io/v1alpha1', + kind: 'Component', + metadata: { + name: 'test-ee', + title: 'test-ee', + description: 'test-ee', + tags: ['test-ee'], + annotations: { + 'backstage.io/managed-by-location': `url:127.0.0.1`, + 'backstage.io/managed-by-origin-location': `url:127.0.0.1`, + 'ansible.io/download-experience': 'true', + }, + }, + spec: { + type: 'execution-environment', + lifecycle: 'production', + owner: 'team-a', + definition: 'sample \ntest-ee \ndefinition', + readme: 'sample \ntest-ee \nreadme', + }, + }; + + provider = AAPEntityProvider.fromConfig(config, mockAnsibleService, { + schedule, + logger, + })[0]; + + mockConnection = { + applyMutation: jest.fn().mockResolvedValue(undefined), + refresh: jest.fn(), + }; + + provider.connect(mockConnection); + }); + + it('should successfully register an execution environment entity', async () => { + await provider.registerExecutionEnvironment(executionEnvironmentEntity); + + expect(mockConnection.applyMutation).toHaveBeenCalledWith({ + type: 'delta', + added: [ + { + entity: executionEnvironmentEntity, + locationKey: 'AapEntityProvider:development', + }, + ], + removed: [], + }); + }); + + it('should raise error if entity has missing metadata name', async () => { + // remove metadata.name from executionEnvironmentEntity + delete executionEnvironmentEntity.metadata.name; + await expect( + provider.registerExecutionEnvironment(executionEnvironmentEntity), + ).rejects.toThrow( + 'Name [metadata.name] is required for Execution Environment registration', + ); + }); + + it('should raise error if EEentity has no type', async () => { + // remove metadata.name from executionEnvironmentEntity + delete executionEnvironmentEntity.spec.type; + await expect( + provider.registerExecutionEnvironment(executionEnvironmentEntity), + ).rejects.toThrow( + 'Type [spec.type] must be "execution-environment" for Execution Environment registration', + ); + }); + + it('should raise error if EEentity has incorrect type', async () => { + // remove metadata.name from executionEnvironmentEntity + executionEnvironmentEntity.spec.type = 'not-an-execution-environment'; + await expect( + provider.registerExecutionEnvironment(executionEnvironmentEntity), + ).rejects.toThrow( + 'Type [spec.type] must be "execution-environment" for Execution Environment registration', + ); + }); + + it('should throw error when connection is not initialized', async () => { + const uninitializedProvider = AAPEntityProvider.fromConfig( + new ConfigReader(MOCK_CONFIG.data), + mockAnsibleService, + { + schedule: new PersistingTaskRunner(), + logger: mockServices.logger.mock(), + }, + )[0]; + await expect( + uninitializedProvider.registerExecutionEnvironment( + executionEnvironmentEntity, + ), + ).rejects.toThrow('AAPEntityProvider is not connected yet'); + }); + + it('should register entity with correct locationKey', async () => { + await provider.registerExecutionEnvironment(executionEnvironmentEntity); + + expect(mockConnection.applyMutation).toHaveBeenCalledWith( + expect.objectContaining({ + added: expect.arrayContaining([ + expect.objectContaining({ + locationKey: 'AapEntityProvider:development', + }), + ]), + }), + ); + }); + + it('should register multiple entities independently', async () => { + const entity1 = JSON.parse(JSON.stringify(executionEnvironmentEntity)); + const entity2 = JSON.parse(JSON.stringify(executionEnvironmentEntity)); + entity1.metadata.name = 'entity1'; + entity2.metadata.name = 'entity2'; + + await provider.registerExecutionEnvironment(entity1); + await provider.registerExecutionEnvironment(entity2); + + expect(mockConnection.applyMutation).toHaveBeenCalledTimes(2); + expect(mockConnection.applyMutation).toHaveBeenNthCalledWith(1, { + type: 'delta', + added: [ + { + entity: entity1, + locationKey: 'AapEntityProvider:development', + }, + ], + removed: [], + }); + expect(mockConnection.applyMutation).toHaveBeenNthCalledWith(2, { + type: 'delta', + added: [ + { + entity: entity2, + locationKey: 'AapEntityProvider:development', + }, + ], + removed: [], + }); + }); + }); + it('handles errors gracefully', async () => { const config = new ConfigReader(MOCK_CONFIG.data); const logger = mockServices.logger.mock(); diff --git a/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.ts b/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.ts index 45ba9e98..1e859027 100644 --- a/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.ts +++ b/plugins/catalog-backend-module-rhaap/src/providers/AAPEntityProvider.ts @@ -627,4 +627,35 @@ export class AAPEntityProvider implements EntityProvider { // Note: Admin access is now handled via dynamic aap-admins group membership // No separate API-based assignment needed + + async registerExecutionEnvironment(entity: any): Promise { + if (!this.connection) { + throw new Error('AAPEntityProvider is not connected yet'); + } + + if (!entity.metadata?.name) { + throw new Error( + 'Name [metadata.name] is required for Execution Environment registration', + ); + } + + if (!entity.spec?.type || entity.spec.type !== 'execution-environment') { + throw new Error( + 'Type [spec.type] must be "execution-environment" for Execution Environment registration', + ); + } + + this.logger.info(`Registering entity ${entity.metadata?.name}`); + + await this.connection.applyMutation({ + type: 'delta', + added: [ + { + entity, + locationKey: this.getProviderName(), + }, + ], + removed: [], + }); + } } diff --git a/plugins/catalog-backend-module-rhaap/src/router.test.ts b/plugins/catalog-backend-module-rhaap/src/router.test.ts index 68d4cc9b..308194cf 100644 --- a/plugins/catalog-backend-module-rhaap/src/router.test.ts +++ b/plugins/catalog-backend-module-rhaap/src/router.test.ts @@ -428,6 +428,196 @@ describe('createRouter', () => { }); }); + describe('POST /aap/register_ee', () => { + it('should successfully register an execution environment', async () => { + const mockRegisterExecutionEnvironment = jest + .fn() + .mockResolvedValue(undefined); + const mockProvider = { + registerExecutionEnvironment: mockRegisterExecutionEnvironment, + }; + + const testApp = express(); + testApp.use(express.json()); + testApp.use( + '/', + await createRouter({ + logger: mockLogger, + aapEntityProvider: mockProvider as any, + jobTemplateProvider: {} as any, + }), + ); + + const mockEntity = { + apiVersion: 'backstage.io/v1alpha1', + kind: 'Component', + metadata: { + name: 'test-ee', + title: 'test-ee', + description: 'test-ee', + tags: ['test-ee'], + annotations: { + 'backstage.io/managed-by-location': `url:127.0.0.1`, + 'backstage.io/managed-by-origin-location': `url:127.0.0.1`, + 'ansible.io/download-experience': 'true', + }, + }, + spec: { + type: 'execution-environment', + lifecycle: 'production', + owner: 'team-a', + definition: 'sample \ntest-ee \ndefinition', + readme: 'sample \ntest-ee \nreadme', + }, + }; + + const response = await request(testApp) + .post('/aap/register_ee') + .send({ entity: mockEntity }) + .expect(200); + + expect(response.body).toEqual({ + success: true, + }); + expect(mockRegisterExecutionEnvironment).toHaveBeenCalledWith(mockEntity); + }); + + it('should return 400 when entity is missing', async () => { + const mockProvider = { + registerExecutionEnvironment: jest.fn(), + }; + + const testApp = express(); + testApp.use(express.json()); + testApp.use( + '/', + await createRouter({ + logger: mockLogger, + aapEntityProvider: mockProvider as any, + jobTemplateProvider: {} as any, + }), + ); + + const response = await request(testApp) + .post('/aap/register_ee') + .send({}) + .expect(400); + + expect(response.body).toEqual({ + error: 'Missing entity in request body.', + }); + expect(mockProvider.registerExecutionEnvironment).not.toHaveBeenCalled(); + }); + + it('should return 400 when entity is null', async () => { + const mockProvider = { + registerExecutionEnvironment: jest.fn(), + }; + + const testApp = express(); + testApp.use(express.json()); + testApp.use( + '/', + await createRouter({ + logger: mockLogger, + aapEntityProvider: mockProvider as any, + jobTemplateProvider: {} as any, + }), + ); + + const response = await request(testApp) + .post('/aap/register_ee') + .send({ entity: null }) + .expect(400); + + expect(response.body).toEqual({ + error: 'Missing entity in request body.', + }); + expect(mockProvider.registerExecutionEnvironment).not.toHaveBeenCalled(); + }); + + it('should handle registerExecutionEnvironment failure with proper error response', async () => { + const mockRegisterExecutionEnvironment = jest + .fn() + .mockRejectedValue( + new Error('Execution Environment registration failed'), + ); + const mockProvider = { + registerExecutionEnvironment: mockRegisterExecutionEnvironment, + }; + + const testApp = express(); + testApp.use(express.json()); + testApp.use( + '/', + await createRouter({ + logger: mockLogger, + aapEntityProvider: mockProvider as any, + jobTemplateProvider: {} as any, + }), + ); + + const mockEntity = { + apiVersion: 'backstage.io/v1alpha1', + kind: 'Component', + metadata: { name: 'test-ee' }, + }; + + const response = await request(testApp) + .post('/aap/register_ee') + .send({ entity: mockEntity }) + .expect(500); + + expect(response.body).toEqual({ + error: + 'Failed to register Execution Environment: Execution Environment registration failed', + }); + expect(mockRegisterExecutionEnvironment).toHaveBeenCalledWith(mockEntity); + expect(mockLogger.error).toHaveBeenCalledWith( + 'Failed to register Execution Environment: Execution Environment registration failed', + ); + }); + + it('should handle non-Error exceptions gracefully', async () => { + const mockRegisterExecutionEnvironment = jest + .fn() + .mockRejectedValue('String error'); + const mockProvider = { + registerExecutionEnvironment: mockRegisterExecutionEnvironment, + }; + + const testApp = express(); + testApp.use(express.json()); + testApp.use( + '/', + await createRouter({ + logger: mockLogger, + aapEntityProvider: mockProvider as any, + jobTemplateProvider: {} as any, + }), + ); + + const mockEntity = { + apiVersion: 'backstage.io/v1alpha1', + kind: 'Component', + metadata: { name: 'test-ee' }, + }; + + const response = await request(testApp) + .post('/aap/register_ee') + .send({ entity: mockEntity }) + .expect(500); + + expect(response.body).toEqual({ + error: 'Failed to register Execution Environment: String error', + }); + expect(mockRegisterExecutionEnvironment).toHaveBeenCalledWith(mockEntity); + expect(mockLogger.error).toHaveBeenCalledWith( + 'Failed to register Execution Environment: String error', + ); + }); + }); + describe('Router setup', () => { it('should use express.json() middleware', async () => { const response = await request(app) diff --git a/plugins/catalog-backend-module-rhaap/src/router.ts b/plugins/catalog-backend-module-rhaap/src/router.ts index 2854f8ae..914d4841 100644 --- a/plugins/catalog-backend-module-rhaap/src/router.ts +++ b/plugins/catalog-backend-module-rhaap/src/router.ts @@ -73,5 +73,26 @@ export async function createRouter(options: { } }); + router.post('/aap/register_ee', express.json(), async (request, response) => { + const { entity } = request.body; + + if (!entity) { + response.status(400).json({ error: 'Missing entity in request body.' }); + return; + } + + try { + await aapEntityProvider.registerExecutionEnvironment(entity); + response.status(200).json({ success: true }); + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + logger.error(`Failed to register Execution Environment: ${errorMessage}`); + response.status(500).json({ + error: `Failed to register Execution Environment: ${errorMessage}`, + }); + } + }); + return router; } diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/package.json b/plugins/scaffolder-backend-module-backstage-rhaap/package.json index ad8bcd99..87f39b28 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/package.json +++ b/plugins/scaffolder-backend-module-backstage-rhaap/package.json @@ -32,6 +32,7 @@ "@backstage/backend-defaults": "^0.11.1", "@backstage/backend-dynamic-feature-service": "^0.7.0", "@backstage/backend-plugin-api": "^1.3.1", + "@backstage/catalog-model": "^1.7.5", "@backstage/config": "^1.3.2", "@backstage/errors": "^1.2.7", "@backstage/integration": "^1.17.0", @@ -41,10 +42,15 @@ "@backstage/plugin-scaffolder-node": "^0.8.2", "@backstage/types": "^1.2.1", "@octokit/core": "^5.0.0", + "express": "^5.1.0", + "express-promise-router": "^4.1.1", "isomorphic-git": "^1.23.0", + "js-yaml": "^4.1.0", "node-fetch": "^2.6.7", + "semver": "^7.7.3", "undici": "6.21.2", - "yaml": "^2.0.0" + "yaml": "^2.0.0", + "zod": "^4.1.12" }, "peerDependencies": { "react": "^17.0.0 || ^18.0.0" @@ -55,10 +61,15 @@ "@backstage/plugin-scaffolder-node-test-utils": "^0.2.2", "@backstage/test-utils": "^1.7.8", "@janus-idp/cli": "^3.6.1", + "@types/express": "^5.0.3", "@types/jest": "^29.5.12", + "@types/js-yaml": "^4", "@types/node": "^22.13.4", "@types/node-fetch": "2.6.11", - "msw": "2.4.9" + "@types/semver": "^7", + "dedent": "^1.7.0", + "msw": "2.4.9", + "supertest": "^7.1.4" }, "files": [ "dist", diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.test.ts new file mode 100644 index 00000000..d9d836e8 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.test.ts @@ -0,0 +1,2465 @@ +/* + * Copyright 2024 The Ansible plugin Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Mock external dependencies first (before imports for proper hoisting) +jest.mock('fs/promises', () => ({ + mkdir: jest.fn(), + writeFile: jest.fn(), +})); + +jest.mock('js-yaml', () => ({ + load: jest.fn(), + dump: jest.fn(), + default: { + load: jest.fn(), + dump: jest.fn(), + }, +})); + +jest.mock('semver', () => ({ + gt: jest.fn(), +})); + +jest.mock('./helpers/schemas', () => ({ + CollectionRequirementsSchema: { + parse: jest.fn(), + }, + EEDefinitionSchema: { + parse: jest.fn(), + }, +})); + +jest.mock('./utils/utils', () => ({ + parseUploadedFileContent: jest.fn(), +})); + +// Mock global fetch +global.fetch = jest.fn(); + +import dedent from 'dedent'; +import * as fs from 'fs/promises'; +import * as yaml from 'js-yaml'; +import * as semver from 'semver'; +import { z } from 'zod'; +import { mockServices } from '@backstage/backend-test-utils'; +import { + CollectionRequirementsSchema, + EEDefinitionSchema, +} from './helpers/schemas'; +import { parseUploadedFileContent } from './utils/utils'; +import { createEEDefinitionAction } from './createEEDefinition'; + +const mockMkdir = fs.mkdir as jest.MockedFunction; +const mockWriteFile = fs.writeFile as jest.MockedFunction; +const mockYamlLoad = yaml.load as jest.MockedFunction; +const mockYamlDump = yaml.dump as jest.MockedFunction; +const mockSemverGt = semver.gt as jest.MockedFunction; +const mockCollectionRequirementsSchemaParse = ( + CollectionRequirementsSchema as any +).parse as jest.MockedFunction; +const mockEEDefinitionSchemaParse = (EEDefinitionSchema as any) + .parse as jest.MockedFunction; +const mockParseUploadedFileContent = + parseUploadedFileContent as jest.MockedFunction< + typeof parseUploadedFileContent + >; +const mockFetch = global.fetch as jest.MockedFunction; + +// Import internal functions for testing (we'll need to export them or test via the action) +// Since we can't easily test private functions, we'll test through the action handler +// But let's create a test file that focuses on testing the logic through the action + +describe('createEEDefinition', () => { + const logger = mockServices.logger.mock(); + const auth = mockServices.auth.mock(); + const discovery = mockServices.discovery.mock(); + const mockWorkspacePath = '/tmp/test-workspace'; + + beforeEach(() => { + jest.clearAllMocks(); + mockMkdir.mockResolvedValue(undefined); + mockWriteFile.mockResolvedValue(undefined); + mockParseUploadedFileContent.mockReturnValue(''); + // Use real yaml.load and yaml.dump implementation by default so validation works + const realYaml = jest.requireActual('js-yaml'); + mockYamlLoad.mockImplementation(realYaml.load); + mockYamlDump.mockImplementation(realYaml.dump); + // Use real EEDefinitionSchema.parse implementation by default + const realSchemas = jest.requireActual('./helpers/schemas'); + mockEEDefinitionSchemaParse.mockImplementation( + realSchemas.EEDefinitionSchema.parse, + ); + discovery.getBaseUrl.mockResolvedValue('http://localhost:7007/api/catalog'); + // Mock server manifest for MCP vars generation + const mockServerManifest = `--- +- role: common + servers: [] + vars: + common_mcp_base_path: /opt/mcp + common_golang_version: 1.25.4 + common_nodejs_min_version: 20 + common_system_bin_path: /usr/local/bin + common_uv_installer_url: https://astral.sh/uv/install.sh +- role: github_mcp + servers: + - name: github-mcp-server + type: stdio + lang: go + args: + - stdio + description: GitHub MCP Server - Access GitHub repositories, issues, and pull + requests + vars: + github_mcp_mode: local + github_mcp_build_repo: https://github.com/github/github-mcp-server.git + github_mcp_build_repo_branch: main + github_mcp_build_path: github/build +`; + mockFetch.mockResolvedValue({ + ok: true, + status: 200, + text: jest.fn().mockResolvedValue(mockServerManifest), + } as any); + auth.getOwnServiceCredentials.mockResolvedValue({ + token: 'service-token', + } as any); + auth.getPluginRequestToken.mockResolvedValue({ + token: 'plugin-token', + } as any); + }); + + describe('generateEEDefinition functionality', () => { + it('should generate EE definition with base image only', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(mockWriteFile).toHaveBeenCalled(); + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with collections', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [ + { name: 'community.general', version: '1.0.0' }, + { name: 'ansible.netcommon' }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + dependencies: + galaxy: + collections: + - name: community.general + version: 1.0.0 + - name: ansible.netcommon + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with only Python requirements', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirements: ['requests==2.28.0', 'jinja2>=3.0.0'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + dependencies: + python: + - requests==2.28.0 + - jinja2>=3.0.0 + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with system packages', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + systemPackages: [ + 'libssh-devel [platform:rpm]', + 'gcc-c++ [platform:dpkg]', + 'libffi-devel [platform:base-py3]', + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + dependencies: + system: + - libssh-devel [platform:rpm] + - gcc-c++ [platform:dpkg] + - libffi-devel [platform:base-py3] + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with collection signatures', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [ + { + name: 'community.general', + version: '1.0.0', + signatures: [ + 'https://examplehost.com/detached_signature.asc', + 'file:///path/to/local/detached_signature.asc', + ], + }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + dependencies: + galaxy: + collections: + - name: community.general + version: 1.0.0 + signatures: + - https://examplehost.com/detached_signature.asc + - file:///path/to/local/detached_signature.asc + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with additional build steps', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + additionalBuildSteps: [ + { + stepType: 'append_builder', + commands: ['RUN whoami', 'RUN pwd'], + }, + { + stepType: 'prepend_final', + commands: ['RUN ls -la'], + }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + + additional_build_files: + - src: ./ansible.cfg + dest: configs + + additional_build_steps: + append_builder: + - RUN whoami + - RUN pwd + prepend_final: + - RUN ls -la + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + append_final: + - RUN rm -f /etc/ansible/ansible.cfg\n`; + expect(content).toEqual(expectedContent); + }); + + it('should generate EE definition with all inputs provided', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [ + { + name: 'community.general', + version: '1.0.0', + signatures: [ + 'https://examplehost.com/detached_signature.asc', + 'file:///path/to/local/detached_signature.asc', + ], + }, + ], + pythonRequirements: ['requests==2.28.0', 'jinja2>=3.0.0'], + systemPackages: [ + 'libssh-devel [platform:rpm]', + 'gcc-c++ [platform:dpkg]', + 'libffi-devel [platform:base-py3]', + ], + mcpServers: ['github', 'gitlab'], + additionalBuildSteps: [ + { + stepType: 'append_builder', + commands: ['RUN whoami', 'RUN pwd'], + }, + { + stepType: 'prepend_final', + commands: ['RUN ls -la'], + }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const expectedContent = dedent`--- + version: 3 + + images: + base_image: + name: 'quay.io/ansible/ee-base:latest' + + dependencies: + python: + - requests==2.28.0 + - jinja2>=3.0.0 + system: + - libssh-devel [platform:rpm] + - gcc-c++ [platform:dpkg] + - libffi-devel [platform:base-py3] + galaxy: + collections: + - name: community.general + version: 1.0.0 + signatures: + - https://examplehost.com/detached_signature.asc + - file:///path/to/local/detached_signature.asc + - name: ansible.mcp_builder + - name: ansible.mcp + + additional_build_files: + - src: ./ansible.cfg + dest: configs + - src: ./mcp-vars.yaml + dest: configs + + additional_build_steps: + append_builder: + - RUN whoami + - RUN pwd + prepend_final: + - RUN ls -la + prepend_base: + - COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg + - COPY _build/configs/mcp-vars.yaml /tmp/mcp-vars.yaml + append_final: + - RUN ansible-playbook ansible.mcp_builder.install_mcp -e mcp_servers=github,gitlab -e @/tmp/mcp-vars.yaml + - RUN rm -f /etc/ansible/ansible.cfg /tmp/mcp-vars.yaml\n`; + expect(content).toEqual(expectedContent); + }); + + it('should group multiple commands for same step type', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + additionalBuildSteps: [ + { + stepType: 'append_builder', + commands: ['RUN echo "first"'], + }, + { + stepType: 'append_builder', + commands: ['RUN echo "second"'], + }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const appendBuilderIndex = content.indexOf('append_builder:'); + const prependFinalIndex = content.indexOf('prepend_final:'); + expect(appendBuilderIndex).toBeGreaterThan(-1); + // Should only have one append_builder section + const appendBuilderSection = content.substring( + appendBuilderIndex, + prependFinalIndex > -1 ? prependFinalIndex : content.length, + ); + expect(appendBuilderSection).toContain('RUN echo "first"'); + expect(appendBuilderSection).toContain('RUN echo "second"'); + }); + }); + + describe('generateReadme functionality', () => { + it('should include build instructions in README', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('README.md'), + ); + const content = writeCall![1] as string; + expect(content).toContain('ansible-builder build'); + expect(content).toContain('ansible-navigator'); + }); + }); + + describe('mergeCollections functionality', () => { + it('should merge collections from different sources', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [{ name: 'collection1' }], + popularCollections: ['collection2', 'collection3'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + expect(collections).toHaveLength(3); + expect(collections.map((c: any) => c.name)).toContain('collection1'); + expect(collections.map((c: any) => c.name)).toContain('collection2'); + expect(collections.map((c: any) => c.name)).toContain('collection3'); + }); + + it('should remove duplicate collections by name', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [{ name: 'collection1' }], + popularCollections: ['collection1'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + expect(collections).toHaveLength(1); + expect(collections[0].name).toBe('collection1'); + }); + + it('should prefer collection without version over versioned one', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [{ name: 'collection1', version: '1.0.0' }], + popularCollections: ['collection1'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + // When a versioned collection exists, it should be kept + // But if a non-versioned one comes later, it should win + // The non-versioned one from popularCollections should win + expect(collections).toHaveLength(1); + expect(collections[0].name).toBe('collection1'); + // Non-versioned collection should win (no version property) + expect(collections[0].version).toBeUndefined(); + }); + + it('should prefer higher version when both have versions', async () => { + mockSemverGt.mockImplementation((v1, v2) => { + if (v1 === '2.0.0' && v2 === '1.0.0') return true; + return false; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [ + { name: 'collection1', version: '1.0.0' }, + { name: 'collection1', version: '2.0.0' }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + expect(collections).toHaveLength(1); + expect(collections[0].version).toBe('2.0.0'); + }); + + it('should merge collections from uploaded file', async () => { + mockParseUploadedFileContent.mockImplementation((dataUrl: string) => { + if (dataUrl.includes('text/yaml')) { + return 'collections:\n - name: collection-from-file\n version: 1.0.0'; + } + return ''; + }); + // Use real yaml.load implementation to parse the YAML string + const realYaml = jest.requireActual('js-yaml'); + mockYamlLoad.mockImplementation(realYaml.load); + // Use real schema parse implementation + const realSchemas = jest.requireActual('./helpers/schemas'); + mockCollectionRequirementsSchemaParse.mockImplementation( + realSchemas.CollectionRequirementsSchema.parse, + ); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [{ name: 'manual-collection' }], + collectionsFile: 'data:text/yaml;base64,test', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + expect(collections.length).toBeGreaterThanOrEqual(1); + expect(collections).toEqual( + expect.arrayContaining([ + expect.objectContaining({ name: 'manual-collection' }), + expect.objectContaining({ name: 'collection-from-file' }), + ]), + ); + }); + }); + + describe('mergeRequirements functionality', () => { + it('should merge Python requirements from different sources', async () => { + mockParseUploadedFileContent.mockImplementation((dataUrl: string) => { + if (dataUrl.includes('text/plain')) { + return 'paramiko==5.0.0'; + } + return ''; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirements: ['requests==2.28.0'], + pythonRequirementsFile: 'data:text/plain;base64,paramiko==5.0.0', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const requirements = parsed.dependencies?.python || []; + expect(requirements).toContain('requests==2.28.0'); + expect(requirements).toContain('paramiko==5.0.0'); + }); + + it('should remove duplicate requirements', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirements: ['requests==2.28.0', 'requests==2.28.0'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const requirements = parsed.dependencies?.python || []; + expect(requirements).toHaveLength(1); + expect(requirements[0]).toBe('requests==2.28.0'); + }); + }); + + describe('mergePackages functionality', () => { + it('should merge system packages from different sources', async () => { + mockParseUploadedFileContent.mockImplementation((dataUrl: string) => { + if (dataUrl.includes('text/plain')) { + return 'curl'; + } + return ''; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + systemPackages: ['git'], + systemPackagesFile: 'data:text/plain;base64,curl', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const packages = parsed.dependencies?.system || []; + expect(packages).toContain('git'); + expect(packages).toContain('curl'); + }); + + it('should remove duplicate packages', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + systemPackages: ['git', 'git'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const packages = parsed.dependencies?.system || []; + expect(packages).toHaveLength(1); + expect(packages[0]).toBe('git'); + }); + }); + + describe('parseTextRequirementsFile functionality', () => { + it('should parse text requirements file correctly', async () => { + mockParseUploadedFileContent.mockImplementation((dataUrl: string) => { + if (dataUrl.includes('text/plain')) { + return 'requests==2.28.0\njinja2>=3.0.0\n\n# comment line'; + } + return ''; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirementsFile: 'data:text/plain;base64,test', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const requirements = parsed.dependencies?.python || []; + expect(requirements).toContain('requests==2.28.0'); + expect(requirements).toContain('jinja2>=3.0.0'); + // Empty lines and comment lines should be filtered out + expect(requirements).not.toContain(''); + expect(requirements).not.toContain('# comment line'); + }); + + it('should handle empty text requirements file', async () => { + mockParseUploadedFileContent.mockReturnValue(''); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirementsFile: 'data:text/plain;base64,', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const requirements = parsed.dependencies?.python || []; + expect(requirements).toEqual([]); + }); + }); + + describe('parseCollectionsFile functionality', () => { + it('should parse valid collections YAML file', async () => { + mockParseUploadedFileContent.mockImplementation((dataUrl: string) => { + if (dataUrl.includes('text/yaml')) { + return 'collections:\n - name: collection1\n version: 1.0.0\n - name: collection2'; + } + return ''; + }); + // Use real yaml.load implementation to parse the YAML string + const realYaml = jest.requireActual('js-yaml'); + mockYamlLoad.mockImplementation(realYaml.load); + // Use real schema parse implementation + const realSchemas = jest.requireActual('./helpers/schemas'); + mockCollectionRequirementsSchemaParse.mockImplementation( + realSchemas.CollectionRequirementsSchema.parse, + ); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collectionsFile: 'data:text/yaml;base64,test', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(mockYamlLoad).toHaveBeenCalled(); + expect(mockCollectionRequirementsSchemaParse).toHaveBeenCalled(); + }); + + it('should handle empty collections file', async () => { + mockParseUploadedFileContent.mockReturnValue(''); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collectionsFile: '', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Should not throw and should complete successfully + expect(mockWriteFile).toHaveBeenCalled(); + }); + + it('should throw error for invalid YAML in collections file', async () => { + mockParseUploadedFileContent.mockReturnValue('invalid: yaml: content: ['); + mockYamlLoad.mockImplementation(() => { + throw new Error('YAML parse error'); + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collectionsFile: 'data:text/yaml;base64,invalid', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow(); + }); + + it('should throw error for invalid schema in collections file', async () => { + mockParseUploadedFileContent.mockReturnValue('invalid: content'); + mockYamlLoad.mockReturnValue({ invalid: 'content' }); + const zodError = new z.ZodError([ + { + code: 'invalid_type', + expected: 'array', + path: ['collections'], + message: 'Expected array, received string', + } as z.ZodIssue, + ]); + mockCollectionRequirementsSchemaParse.mockImplementation(() => { + throw zodError; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + collectionsFile: 'data:text/yaml;base64,invalid', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow( + 'Invalid collections file structure', + ); + }); + }); + + describe('generateMCPBuilderSteps functionality', () => { + it('should add MCP collections and build steps when MCP servers are specified', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['github_mcp', 'aws_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + + // Verify MCP collections are added + const collections = parsed.dependencies?.galaxy?.collections || []; + expect( + collections.some((c: any) => c.name === 'ansible.mcp_builder'), + ).toBeTruthy(); + expect( + collections.some((c: any) => c.name === 'ansible.mcp'), + ).toBeTruthy(); + + // Verify MCP build steps are added + const buildSteps = parsed.additional_build_steps || {}; + const appendFinalCommands = buildSteps.append_final || []; + expect(appendFinalCommands).toEqual([ + 'RUN ansible-playbook ansible.mcp_builder.install_mcp -e mcp_servers=github_mcp,aws_mcp -e @/tmp/mcp-vars.yaml', + 'RUN rm -f /etc/ansible/ansible.cfg /tmp/mcp-vars.yaml', + ]); + }); + + it('should append to existing append_builder step', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['github_mcp'], + additionalBuildSteps: [ + { + stepType: 'append_final', + commands: ['RUN echo "existing command"'], + }, + ], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const buildSteps = parsed.additional_build_steps || {}; + const appendFinalCommands = buildSteps.append_final || []; + + const expectedCommands = [ + 'RUN ansible-playbook ansible.mcp_builder.install_mcp -e mcp_servers=github_mcp -e @/tmp/mcp-vars.yaml', + 'RUN echo "existing command"', + 'RUN rm -f /etc/ansible/ansible.cfg /tmp/mcp-vars.yaml', + ]; + + expect(appendFinalCommands).toEqual(expectedCommands); + }); + + it('should not add MCP steps when no MCP servers specified', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: [], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check the generated EE definition file content + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('test-ee.yaml'), + ); + const content = writeCall![1] as string; + const parsed = yaml.load(content) as any; + const collections = parsed.dependencies?.galaxy?.collections || []; + expect(collections).toEqual([]); + + // Verify no MCP build steps are added + const buildSteps = parsed.additional_build_steps || {}; + expect(buildSteps.append_builder).toBeUndefined(); + }); + }); + + describe('generateMCPVarsContent functionality', () => { + it('should generate MCP vars content with only common vars when mcpServers contains only servers with empty vars', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['aws_ccapi_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check that mcp-vars.yaml file was written + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should include common vars (common is always added internally) + expect(content).toContain('# vars for common'); + expect(content).toContain('common_mcp_base_path: /opt/mcp'); + expect(content).toContain('common_golang_version: 1.25'); + expect(content).toContain('common_nodejs_min_version: 20'); + expect(content).toContain('common_system_bin_path: /usr/local/bin'); + expect(content).toContain( + 'common_uv_installer_url: https://astral.sh/uv/install.sh', + ); + + // Should not include vars for aws_ccapi_mcp (it has empty vars) + expect(content).not.toContain('# vars for aws_ccapi_mcp'); + + // Check output + const outputCall = ctx.output.mock.calls.find( + (call: any[]) => call[0] === 'mcpVarsContent', + ); + expect(outputCall).toBeDefined(); + expect(outputCall![1]).toBe(content); + }); + + it('should generate MCP vars content for azure_mcp server', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['azure_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should include azure_mcp vars + expect(content).toContain('# vars for azure_mcp'); + expect(content).toContain('azure_mcp_namespaces:'); + expect(content).toContain('- az'); + + // Should also include common vars + expect(content).toContain('# vars for common'); + expect(content).toContain('common_mcp_base_path: /opt/mcp'); + }); + + it('should generate MCP vars content for github_mcp server', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['github_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should include github_mcp vars + expect(content).toContain('# vars for github_mcp'); + expect(content).toContain('github_mcp_mode: local'); + expect(content).toContain( + 'github_mcp_build_repo: https://github.com/github/github-mcp-server.git', + ); + expect(content).toContain('github_mcp_build_repo_branch: main'); + expect(content).toContain('github_mcp_build_path: github/build'); + + // Should also include common vars + expect(content).toContain('# vars for common'); + }); + + it('should not include vars for MCP servers with empty vars', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['aws_ccapi_mcp', 'aws_cdk_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should not include vars for aws_ccapi_mcp or aws_cdk_mcp (they have empty vars) + expect(content).not.toContain('# vars for aws_ccapi_mcp'); + expect(content).not.toContain('# vars for aws_cdk_mcp'); + + // Should only include common vars + expect(content).toContain('# vars for common'); + expect(content).toContain('common_mcp_base_path: /opt/mcp'); + }); + + it('should generate MCP vars content for multiple servers with mixed vars', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['azure_mcp', 'github_mcp', 'aws_ccapi_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should include vars for azure_mcp + expect(content).toContain('# vars for azure_mcp'); + expect(content).toContain('azure_mcp_namespaces:'); + + // Should include vars for github_mcp + expect(content).toContain('# vars for github_mcp'); + expect(content).toContain('github_mcp_mode: local'); + + // Should not include vars for aws_ccapi_mcp (empty vars) + expect(content).not.toContain('# vars for aws_ccapi_mcp'); + + // Should include common vars + expect(content).toContain('# vars for common'); + expect(content).toContain('common_mcp_base_path: /opt/mcp'); + }); + + it('should generate valid YAML format for MCP vars content', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['azure_mcp', 'github_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should start with YAML document marker + expect(content).toMatch(/^---\n/); + + // Should be valid YAML (can be parsed) + const parsed = yaml.load(content); + expect(parsed).toBeDefined(); + + // Should end with exactly one newline + expect(content.endsWith('\n')).toBe(true); + expect(content.match(/\n$/g)?.length).toBe(1); + }); + + it('should not write mcp-vars.yaml file when no MCP servers are specified', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: [], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Should not write mcp-vars.yaml when mcpServers is empty + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + // Actually, looking at the code, it seems mcp-vars.yaml is only written if mcpServers.length > 0 + // But generateMCPVarsContent is only called when mcpServers.length > 0 + // So when empty, the file should not be written + expect(writeCall).toBeUndefined(); + + // mcpVarsContent should not be output + const outputCall = ctx.output.mock.calls.find( + (call: any[]) => call[0] === 'mcpVarsContent', + ); + expect(outputCall).toBeUndefined(); + }); + + it('should output mcpVarsContent when MCP servers are specified', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['azure_mcp'], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Check that mcpVarsContent was output + const outputCall = ctx.output.mock.calls.find( + (call: any[]) => call[0] === 'mcpVarsContent', + ); + expect(outputCall).toBeDefined(); + const mcpVarsContent = outputCall![1] as string; + + // Verify the content matches what was written to file + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + expect(mcpVarsContent).toBe(writeCall![1]); + }); + + it('should preserve mcpServers array after generating vars (common should be removed)', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const originalMcpServers = ['azure_mcp', 'github_mcp']; + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: [...originalMcpServers], + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('mcp-vars.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Should include vars for both servers and common + expect(content).toContain('# vars for azure_mcp'); + expect(content).toContain('# vars for github_mcp'); + expect(content).toContain('# vars for common'); + }); + }); + + describe('validateEEDefinition functionality', () => { + it('should validate valid EE definition', async () => { + const validEEDefinition = { + version: 3, + images: { base_image: { name: 'quay.io/ansible/ee-base:latest' } }, + }; + mockYamlLoad.mockReturnValue(validEEDefinition); + mockEEDefinitionSchemaParse.mockReturnValue(validEEDefinition); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(mockEEDefinitionSchemaParse).toHaveBeenCalled(); + }); + + it('should throw error for schema validation failure', async () => { + /* + The invalid EE definition is just for reference. It is not used in the test. + It shows an example of what circumstances the schema validation will fail. + const invalidEEDefinition = { + version: 3, + // missing required images field + }; + */ + const zodError = new z.ZodError([ + { + code: 'invalid_type', + expected: 'object', + path: ['images'], + message: 'Required', + } as z.ZodIssue, + ]); + mockEEDefinitionSchemaParse.mockImplementation(() => { + throw zodError; + }); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow( + 'Schema validation failed for the generated EE definition:\n- images: Required', + ); + }); + }); + + describe('contextDirName generation', () => { + it('should generate sanitized directory name from eeFileName', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'My Test EE!', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const outputCall = ctx.output.mock.calls.find( + (call: any[]) => call[0] === 'contextDirName', + ); + expect(outputCall).toBeDefined(); + const dirName = outputCall![1]; + expect(dirName).toBe('my-test-ee'); + expect(dirName).toMatch(/^[a-z0-9-_]+$/); + }); + + it('should handle special characters in eeFileName', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'EE@#$%^&*()', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const outputCall = ctx.output.mock.calls.find( + (call: any[]) => call[0] === 'contextDirName', + ); + const dirName = outputCall![1]; + expect(dirName).toEqual('ee'); + }); + }); + + describe('error handling', () => { + it('should handle file write errors', async () => { + mockWriteFile.mockRejectedValueOnce(new Error('Write failed')); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow('Write failed'); + }); + + it('should handle directory creation errors', async () => { + mockMkdir.mockRejectedValueOnce(new Error('Mkdir failed')); + + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow('Mkdir failed'); + }); + }); + + describe('generateEETemplate functionality', () => { + it('should generate EE template with minimal inputs', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE Description', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + expect(writeCall).toBeDefined(); + const content = writeCall![1] as string; + + // Verify basic template structure + expect(content).toContain('apiVersion: scaffolder.backstage.io/v1beta3'); + expect(content).toContain('kind: Template'); + expect(content).toContain('name: test-ee'); + expect(content).toContain('title: test-ee'); + expect(content).toContain('description: Test EE Description'); + expect(content).toContain( + 'ansible.io/template-type: execution-environment', + ); + expect(content).toContain("ansible.io/saved-template: 'true'"); + expect(content).toContain('type: execution-environment'); + }); + + it('should generate EE template with default description when not provided', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + expect(content).toContain( + 'description: Saved Ansible Execution Environment Definition template', + ); + }); + + it('should generate EE template with collections', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [ + { name: 'community.general', version: '1.0.0' }, + { name: 'ansible.netcommon' }, + ], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify collections are included in the template + expect(content).toContain( + 'default: [{"name":"community.general","version":"1.0.0"},{"name":"ansible.netcommon"}]', + ); + }); + + it('should generate EE template with Python requirements', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + pythonRequirements: ['requests==2.28.0', 'jinja2>=3.0.0'], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify Python requirements are included + expect(content).toContain( + 'default: ["requests==2.28.0","jinja2>=3.0.0"]', + ); + }); + + it('should generate EE template with system packages', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + systemPackages: ['git', 'curl'], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify system packages are included + expect(content).toContain('default: ["git","curl"]'); + }); + + it('should generate EE template with MCP servers', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + mcpServers: ['github', 'aws'], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify MCP servers are included + expect(content).toContain('default: ["github","aws"]'); + }); + + it('should generate EE template with additional build steps', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + additionalBuildSteps: [ + { + stepType: 'append_builder', + commands: ['RUN whoami', 'RUN pwd'], + }, + ], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify additional build steps are included + expect(content).toContain( + 'default: [{"stepType":"append_builder","commands":["RUN whoami","RUN pwd"]},{"stepType":"prepend_base","commands":["COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg"]},{"stepType":"append_final","commands":["RUN rm -f /etc/ansible/ansible.cfg"]}]', + ); + }); + + it('should generate EE template with tags', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: ['ansible', 'automation', 'ee'], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify tags are included + expect(content).toContain('tags: ["ansible","automation","ee"]'); + }); + + it('should generate EE template with custom base image', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + customBaseImage: 'quay.io/custom/ee-image:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify custom base image is included in enum + expect(content).toContain("- 'quay.io/custom/ee-image:latest'"); + // Verify custom base image is in enumNames + const lines = content.split('\n'); + const enumIndex = lines.findIndex(line => line.includes('enum:')); + const enumNamesIndex = lines.findIndex(line => + line.includes('enumNames:'), + ); + expect(enumIndex).toBeGreaterThan(-1); + expect(enumNamesIndex).toBeGreaterThan(-1); + }); + + it('should generate EE template with all inputs provided', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Complete Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + customBaseImage: 'quay.io/custom/ee:latest', + collections: [{ name: 'community.general', version: '1.0.0' }], + pythonRequirements: ['requests==2.28.0'], + systemPackages: ['git'], + mcpServers: ['github'], + additionalBuildSteps: [ + { + stepType: 'append_builder', + commands: ['RUN echo "test"'], + }, + ], + tags: ['ansible', 'test'], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify all sections are present + expect(content).toContain('name: test-ee'); + expect(content).toContain('description: Complete Test EE'); + expect(content).toContain('tags: ["ansible","test"]'); + expect(content).toContain( + 'default: [{"name":"community.general","version":"1.0.0"},{"name":"ansible.mcp_builder"},{"name":"ansible.mcp"}]', + ); + expect(content).toContain('default: ["requests==2.28.0"]'); + expect(content).toContain('default: ["git"]'); + expect(content).toContain('default: ["github"]'); + expect(content).toContain("- 'quay.io/custom/ee:latest'"); + }); + + it('should handle empty arrays in template generation', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + collections: [], + pythonRequirements: [], + systemPackages: [], + mcpServers: [], + additionalBuildSteps: [], + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify empty arrays are serialized correctly + expect(content).toContain('default: []'); + expect(content).toContain('tags: []'); + }); + + it('should include all template steps in generated template', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify key steps are present + expect(content).toContain('id: create-ee-definition'); + expect(content).toContain('id: create-catalog-info-file'); + }); + + it('should include base image enum options in template', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify default base image enum options are present + expect(content).toContain( + "- 'registry.access.redhat.com/ubi9/python-311:latest'", + ); + expect(content).toContain( + "- 'registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest'", + ); + }); + + it('should include popular collections enum in template', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify popular collections enum includes expected collections + expect(content).toContain("- 'ansible.posix'"); + expect(content).toContain("- 'community.general'"); + expect(content).toContain("- 'community.crypto'"); + expect(content).toContain("- 'amazon.aws'"); + expect(content).toContain("- 'azure.azcollection'"); + }); + + it('should include MCP servers enum in template', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify MCP servers enum includes expected options + expect(content).toContain('- Github'); + expect(content).toContain('- AWS'); + expect(content).toContain('- Azure'); + }); + + it('should include all build step types in enum', async () => { + const action = createEEDefinitionAction({ + frontendUrl: 'http://localhost:3000', + auth, + discovery, + }); + const ctx = { + input: { + values: { + eeFileName: 'test-ee', + eeDescription: 'Test EE', + baseImage: 'quay.io/ansible/ee-base:latest', + tags: [], + publishToSCM: true, + }, + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + const writeCall = mockWriteFile.mock.calls.find((call: any[]) => + call[0].toString().endsWith('template.yaml'), + ); + const content = writeCall![1] as string; + + // Verify all build step types are in enum + expect(content).toContain("- 'prepend_base'"); + expect(content).toContain("- 'append_base'"); + expect(content).toContain("- 'prepend_galaxy'"); + expect(content).toContain("- 'append_galaxy'"); + expect(content).toContain("- 'prepend_builder'"); + expect(content).toContain("- 'append_builder'"); + expect(content).toContain("- 'prepend_final'"); + expect(content).toContain("- 'append_final'"); + }); + }); +}); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.ts new file mode 100644 index 00000000..16781740 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/createEEDefinition.ts @@ -0,0 +1,1695 @@ +import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import yaml from 'js-yaml'; +import semver from 'semver'; +import { z } from 'zod'; +import { + CollectionRequirementsSchema, + EEDefinitionSchema, +} from './helpers/schemas'; +import { parseUploadedFileContent } from './utils/utils'; +import { AuthService } from '@backstage/backend-plugin-api'; +import { DiscoveryService } from '@backstage/backend-plugin-api'; + +interface Collection { + name: string; + version?: string; + signatures?: string[]; + source?: string; + type?: string; +} + +const MCPSERVER_VARS = [ + { + role: 'aws_ccapi_mcp', + vars: {}, + }, + { + role: 'aws_cdk_mcp', + vars: {}, + }, + { + role: 'aws_core_mcp', + vars: {}, + }, + { + role: 'aws_iam_mcp', + vars: {}, + }, + { + role: 'azure_mcp', + vars: { + azure_mcp_namespaces: ['az'], + }, + }, + { + role: 'common', + vars: { + common_mcp_base_path: '/opt/mcp', + common_golang_version: 1.25, + common_nodejs_min_version: 20, + common_system_bin_path: '/usr/local/bin', + common_uv_installer_url: 'https://astral.sh/uv/install.sh', + }, + }, + { + role: 'github_mcp', + vars: { + github_mcp_mode: 'local', + github_mcp_build_repo: 'https://github.com/github/github-mcp-server.git', + github_mcp_build_repo_branch: 'main', + github_mcp_build_path: 'github/build', + }, + }, +]; + +const PRESET_IMAGES = { + minimal: { + name: 'registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest', + pkgMgrPath: '/usr/bin/microdnf', + }, +}; + +interface AdditionalBuildStep { + stepType: + | 'prepend_base' + | 'append_base' + | 'prepend_galaxy' + | 'append_galaxy' + | 'prepend_builder' + | 'append_builder' + | 'prepend_final' + | 'append_final'; + commands: string[]; +} + +interface EEDefinitionInput { + eeFileName: string; + eeDescription: string; + customBaseImage?: string; + tags: string[]; + publishToSCM: boolean; + baseImage: string; + collections?: Collection[]; + popularCollections?: string[]; + collectionsFile?: string; + pythonRequirements?: string[]; + pythonRequirementsFile?: string; + systemPackages?: string[]; + systemPackagesFile?: string; + mcpServers?: string[]; + additionalBuildSteps?: AdditionalBuildStep[]; +} + +export function createEEDefinitionAction(options: { + frontendUrl: string; + auth: AuthService; + discovery: DiscoveryService; +}) { + const { frontendUrl, auth, discovery } = options; + return createTemplateAction({ + id: 'ansible:create:ee-definition', + description: 'Creates Ansible Execution Environment definition files', + schema: { + input: { + type: 'object', + required: ['values'], + properties: { + values: { + type: 'object', + required: [ + 'baseImage', + 'eeFileName', + 'eeDescription', + 'publishToSCM', + ], + properties: { + eeFileName: { + title: 'Execution Environment File Name', + description: 'Name of the execution environment file', + type: 'string', + }, + eeDescription: { + title: 'Execution Environment Description', + description: 'Description for the saved Execution Environment', + type: 'string', + }, + tags: { + title: 'Tags', + description: + 'Tags to be included in the execution environment definition file', + type: 'array', + items: { type: 'string' }, + }, + publishToSCM: { + title: 'Publish to a SCM repository', + description: + 'Publish the Execution Environment definition and template to a SCM repository', + type: 'boolean', + }, + customBaseImage: { + title: 'Custom Base Image', + description: 'Custom base image for the execution environment', + type: 'string', + }, + collections: { + title: 'Ansible Collections', + description: 'List of Ansible collections to include', + type: 'array', + items: { + type: 'object', + properties: { + name: { + type: 'string', + description: 'Collection name (e.g., community.general)', + }, + version: { + type: 'string', + description: 'Collection version (optional)', + }, + source: { + type: 'string', + description: 'Collection source (optional)', + }, + type: { + type: 'string', + description: 'Collection type (optional)', + }, + signatures: { + type: 'array', + description: 'Collection signatures (optional)', + items: { + type: 'string', + }, + }, + }, + required: ['name'], + }, + }, + popularCollections: { + title: 'Popular Collections', + description: 'List of popular collection names to include', + type: 'array', + items: { + type: 'string', + }, + }, + collectionsFile: { + title: 'Collections File Content', + description: 'Content of uploaded requirements.yml file', + type: 'data-url', + }, + pythonRequirements: { + title: 'Python Requirements', + description: 'List of Python package requirements', + type: 'array', + items: { + type: 'string', + }, + }, + pythonRequirementsFile: { + title: 'Python Requirements File Content', + description: 'Content of uploaded requirements.txt file', + type: 'data-url', + }, + systemPackages: { + title: 'System Packages', + description: 'List of system packages to install', + type: 'array', + items: { + type: 'string', + }, + }, + systemPackagesFile: { + title: 'System Packages File Content', + description: 'Content of uploaded bindep.txt file', + type: 'data-url', + }, + mcpServers: { + title: 'MCP Servers', + description: 'List of MCP servers to install', + type: 'array', + items: { + type: 'string', + }, + }, + additionalBuildSteps: { + title: 'Additional Build Steps', + description: 'Custom build steps for the execution environment', + type: 'array', + default: [], + items: { + type: 'object', + properties: { + stepType: { + type: 'string', + enum: [ + 'prepend_base', + 'append_base', + 'prepend_galaxy', + 'append_galaxy', + 'prepend_builder', + 'append_builder', + 'prepend_final', + 'append_final', + ], + }, + commands: { + type: 'array', + items: { + type: 'string', + }, + }, + }, + required: ['stepType', 'commands'], + }, + }, + }, + }, + }, + }, + output: { + type: 'object', + properties: { + contextDirName: { + title: 'Directory in the workspace where the files will created', + type: 'string', + }, + eeDefinitionContent: { + title: 'EE Definition Content', + type: 'string', + }, + generatedEntityRef: { + title: + 'Generated entity reference (for dynamically registered catalog entities ONLY)', + type: 'string', + }, + owner: { + title: 'Owner of the execution environment', + type: 'string', + }, + readmeContent: { + title: 'README Content', + type: 'string', + }, + mcpVarsContent: { + title: 'MCP Vars Content', + type: 'string', + }, + catalogInfoPath: { + title: + 'Relative path for the catalog-info.yaml file (for SCM publishing only)', + type: 'string', + }, + }, + }, + }, + async handler(ctx) { + const { input, logger, workspacePath } = ctx; + const values = input.values as unknown as EEDefinitionInput; + const baseImage = values.baseImage; + const collections = values.collections || []; + const popularCollections = values.popularCollections || []; + const collectionsFile = values.collectionsFile || ''; + const pythonRequirements = values.pythonRequirements || []; + const pythonRequirementsFile = values.pythonRequirementsFile || ''; + const systemPackages = values.systemPackages || []; + const systemPackagesFile = values.systemPackagesFile || ''; + const mcpServers = values.mcpServers || []; + const additionalBuildSteps = values.additionalBuildSteps || []; + const eeFileName = values.eeFileName || 'execution-environment'; + const eeDescription = values.eeDescription || 'Execution Environment'; + const tags = values.tags || []; + const owner = ctx.user?.ref || ''; + + // required for catalog component registration + ctx.output('owner', owner); + + // each EE created in a repository should be self contained in its own directory + const contextDirName = (eeFileName || 'execution-environment') + .toString() + .trim() + .toLowerCase() + .replace(/[^a-z0-9-_]/g, '-') + .replace(/-+/g, '-') // Replace multiple consecutive dashes with a single dash + .replace(/^-|-$/g, ''); // Remove leading and trailing dashes + + ctx.output('contextDirName', contextDirName); + + // create the directory path for the EE files + const eeDir = path.join(workspacePath, contextDirName); + // Ensure the directory exists (recursively) + await fs.mkdir(eeDir, { recursive: true }); + + // create the path for the EE definition file + const eeDefinitionPath = path.join(eeDir, `${eeFileName}.yaml`); + // create the path for the ansible.cfg file + const ansibleConfigPath = path.join(eeDir, 'ansible.cfg'); + // create the path for the README file + const readmePath = path.join(eeDir, 'README.md'); + // create docs directory for techdocs + const docsDir = path.join(eeDir, 'docs'); + await fs.mkdir(docsDir, { recursive: true }); + + // symlink the README file to the docs directory so that techdocs can pick it up + const docsMdPath = path.join(docsDir, 'index.md'); + + logger.info(`[ansible:create:ee-definition] EE base image: ${baseImage}`); + + const decodedCollectionsContent = + parseUploadedFileContent(collectionsFile); + const decodedPythonRequirementsContent = parseUploadedFileContent( + pythonRequirementsFile, + ); + const decodedSystemPackagesContent = + parseUploadedFileContent(systemPackagesFile); + + const parsedCollections = parseCollectionsFile(decodedCollectionsContent); + const parsedPythonRequirements = parseTextRequirementsFile( + decodedPythonRequirementsContent, + ); + const parsedSystemPackages = parseTextRequirementsFile( + decodedSystemPackagesContent, + ); + + // modify the additional build steps (generic) + modifyAdditionalBuildSteps(additionalBuildSteps, mcpServers); + + // generate MCP builder steps + // if any MCP servers are specified, we need to add the ansible.mcp ansible.mcp_builder collections + // for that we use the parsedCollections list + let mcpVarsContent: string = ''; + if (mcpServers.length > 0) { + generateMCPBuilderSteps( + mcpServers, + parsedCollections, + additionalBuildSteps, + ); + + // create mcp-vars.yaml content + mcpVarsContent = generateMCPVarsContent(mcpServers); + } + + try { + // Merge collections from different sources + const allCollections = mergeCollections( + collections, + popularCollections, + parsedCollections, + ); + + // Merge requirements from different sources + const allRequirements = mergeRequirements( + pythonRequirements, + parsedPythonRequirements, + ); + + // Merge packages from different sources + const allPackages = mergePackages(systemPackages, parsedSystemPackages); + logger.info( + `[ansible:create:ee-definition] collections: ${JSON.stringify(allCollections)}`, + ); + logger.info( + `[ansible:create:ee-definition] pythonRequirements: ${JSON.stringify(allRequirements)}`, + ); + logger.info( + `[ansible:create:ee-definition] systemPackages: ${JSON.stringify(allPackages)}`, + ); + logger.info( + `[ansible:create:ee-definition] additionalBuildSteps: ${JSON.stringify(additionalBuildSteps)}`, + ); + + // Create merged values object + const mergedValues = { + ...values, + // these are the merged/created/updated values from the different sources + collections: allCollections, + pythonRequirements: allRequirements, + systemPackages: allPackages, + additionalBuildSteps: additionalBuildSteps, + }; + // Generate EE definition file + const eeDefinition = generateEEDefinition(mergedValues); + // validate the generated EE definition YAML content + // this will throw an error if the generated EE definition YAML content is invalid + validateEEDefinition(eeDefinition); + + await fs.writeFile(eeDefinitionPath, eeDefinition); + logger.info( + `[ansible:create:ee-definition] created EE definition file ${eeFileName}.yaml at ${eeDefinitionPath}`, + ); + ctx.output('eeDefinitionContent', eeDefinition); + + // Generate README with instructions + const readmeContent = generateReadme( + mergedValues, + mcpServers, + values.publishToSCM, + ); + await fs.writeFile(readmePath, readmeContent); + ctx.output('readmeContent', readmeContent); + + // write MCP vars contents to mcp-vars.yaml + if (mcpVarsContent.length > 0) { + // create the path for the mcp-vars.yaml file + const mcpVarsPath = path.join(eeDir, 'mcp-vars.yaml'); + await fs.writeFile(mcpVarsPath, mcpVarsContent); + ctx.output('mcpVarsContent', mcpVarsContent); + } + + // write README contents to docs/index.md + await fs.writeFile(docsMdPath, readmeContent); + + // write ansible.cfg contents to ansible.cfg file + const ansibleConfigContent = await generateAnsibleConfigContent(); + await fs.writeFile(ansibleConfigPath, ansibleConfigContent); + + const eeTemplateContent = generateEETemplate(mergedValues); + + // perform the following only if the user has chosen to publish to a SCM repository + if (values.publishToSCM) { + const templatePath = path.join(eeDir, `${eeFileName}-template.yaml`); + await fs.writeFile(templatePath, eeTemplateContent); + logger.info( + `[ansible:create:ee-definition created EE template.yaml at ${templatePath}`, + ); + // generate catalog descriptor file path for the Execution Environment + // this is only needed if the user has chosen to publish to a SCM repository + // and we are creating a catalog-info.yaml file using the built-in `catalog:write` action + const catalogInfoPath = path.join( + contextDirName, + 'catalog-info.yaml', + ); + ctx.output('catalogInfoPath', catalogInfoPath); + } else { + // dynamically register the execution environment entity in the catalog + const baseUrl = await discovery.getBaseUrl('catalog'); + const { token } = await auth.getPluginRequestToken({ + onBehalfOf: await auth.getOwnServiceCredentials(), + targetPluginId: 'catalog', + }); + + // create the EE catalog entity dict + const entity = generateEECatalogEntity( + eeFileName, + eeDescription, + tags, + owner, + eeDefinition, + readmeContent, + mcpVarsContent, + ansibleConfigContent, + eeTemplateContent, + ); + // register the EE catalog entity with the catalog + const response = await fetch(`${baseUrl}/aap/register_ee`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ entity }), + }); + + if (response.ok) { + logger.info( + `[ansible:create:ee-definition] successfully registered EE catalog entity ${eeFileName} in the catalog`, + ); + } else if (!response.ok) { + const errorText = await response.text(); + throw new Error(`Failed to register EE definition: ${errorText}`); + } + } + + ctx.output( + 'generatedEntityRef', + `${frontendUrl}/self-service/catalog/${eeFileName}`, + ); + logger.info( + '[ansible:create:ee-definition] successfully created all Execution Environment files', + ); + } catch (error: any) { + throw new Error( + `[ansible:create:ee-definition] Failed to create EE definition files: ${error.message}`, + ); + } + }, + }); +} + +function generateEEDefinition(values: EEDefinitionInput): string { + const collections = values.collections || []; + const requirements = values.pythonRequirements || []; + const packages = values.systemPackages || []; + const additionalBuildSteps = values.additionalBuildSteps || []; + let overridePkgMgrPath = false; + + if (values.baseImage === PRESET_IMAGES.minimal.name) { + overridePkgMgrPath = true; + } + + // Build dependencies section using inline values (no separate files) + let dependenciesContent = ''; + + // Add Python requirements inline + if (requirements.length > 0) { + dependenciesContent += '\n python:'; + requirements.forEach(req => { + dependenciesContent += `\n - ${req}`; + }); + } + + // Add system packages inline + if (packages.length > 0) { + dependenciesContent += '\n system:'; + packages.forEach(pkg => { + dependenciesContent += `\n - ${pkg}`; + }); + } + + // Add galaxy collections inline + if (collections.length > 0) { + dependenciesContent += '\n galaxy:\n collections:'; + collections.forEach(collection => { + dependenciesContent += `\n - name: ${collection.name}`; + if (collection.version) { + dependenciesContent += `\n version: ${collection.version}`; + } + if (collection.type) { + dependenciesContent += `\n type: ${collection.type}`; + } + if (collection.source) { + dependenciesContent += `\n source: ${collection.source}`; + } + if (collection.signatures && collection.signatures.length > 0) { + dependenciesContent += `\n signatures:`; + collection.signatures.forEach(signature => { + dependenciesContent += `\n - ${signature}`; + }); + } + }); + } + + // Add dependencies: prefix if any dependencies exist + if (dependenciesContent.length > 0) { + dependenciesContent = `\ndependencies:${dependenciesContent}`; + } + + let additional_build_files = `\nadditional_build_files:\n - src: ./ansible.cfg\n dest: configs`; + if (values.mcpServers && values.mcpServers.length > 0) { + additional_build_files += `\n - src: ./mcp-vars.yaml\n dest: configs`; + } + + let content = `--- +version: 3 + +images: + base_image: + name: '${values.baseImage}' +${dependenciesContent.trimEnd()} +${additional_build_files} +${overridePkgMgrPath ? `\noptions:\n package_manager_path: /usr/bin/microdnf\n` : ''}`; + + // Add additional_build_steps if any are defined + if (additionalBuildSteps.length > 0) { + const buildStepsGroups: Record = {}; + additionalBuildSteps.forEach(step => { + if (!buildStepsGroups[step.stepType]) { + buildStepsGroups[step.stepType] = []; + } + buildStepsGroups[step.stepType].push(...step.commands); + }); + + content.trimEnd(); + content += '\nadditional_build_steps:'; + Object.entries(buildStepsGroups).forEach(([stepType, commands]) => { + content += `\n ${stepType}:`; + commands.forEach(command => { + content += `\n - ${command}`; + }); + }); + } + + return `${content.trimEnd()}\n`; +} + +function generateReadme( + values: EEDefinitionInput, + mcpServers: string[], + publishToSCM: boolean, +): string { + const eeFileName = values.eeFileName || 'execution-environment'; + + return `# Ansible Execution Environment Definition File: Getting Started Guide + +This file tells how to build your defined **execution environment (EE)** using **Ansible Builder** (the tool used to build EEs). An **EE** is a container image that bundles all the tools and collections your automation needs to run consistently. + +## TL;DR: Build Your Execution Environment + +**Quick Start**: Install \`ansible-builder\`, \`podman\` (or Docker), and \`ansible-navigator\`, then run: + +\`\`\`bash +ansible-builder build --file ${eeFileName}.yaml --tag ${eeFileName}:latest --container-runtime podman +\`\`\` + +**Important**: This quick start only builds the EE. Please continue reading to configure collection sources, test your EE, push it to a registry, and use it in AAP. + +## Step 1: Review What Was Generated + +First, let us review the files that were just created for you: + +- **${values.eeFileName}.yaml**: This is your EE's "blueprint." It's the main definition file that ansible-builder will use to construct your image. +- **${values.eeFileName}-template.yaml**: This is the Ansible self-service automation portal template file that generated this. You can import it and use it as a base to create new templates for your portal. +- **ansible.cfg**: This Ansible configuration file specifies the sources from which your collections will be retrieved, by default it includes **Automation Hub** and **Ansible Galaxy**. +${mcpServers && mcpServers.length > 0 ? '- **mcp-vars.yaml**: This Ansible variables file contains variables for the selected **Model Context Protocol (MCP) servers** which will be used when installing them in the Execution Environment.' : ''} +${publishToSCM ? '- **catalog-info.yaml**: This is the Ansible self-service automation portal file that registers this as a "component" in your portal\'s catalog.' : ''} + +## Step 2: Confirm Access to Collection Sources + +If your execution environment (EE) uses only collections that are available in Ansible Galaxy (such as \`community.general\`), you can skip this step and continue to **Step 3**. + +If your EE relies on collections from **Automation Hub**, **Private Automation Hub** or another private Galaxy server, you must update the generated **ansible.cfg** file so that \`ansible-builder\` can authenticate and download those collections. + +**Configure Automation Hub access** + +**Automation Hub** is already configured as a source in the generated **ansible.cfg** file. Open the file in your favorite text editor and update both the \`token\` fields with your **Automation Hub** token. If you already have a token, please ensure that it has not expired. + +If you do not have a token, please follow these steps: + +1. Navigate to [Ansible Automation Platform on the Red Hat Hybrid Cloud Console](https://console.redhat.com/ansible/automation-hub/token/). +2. From the navigation panel, select **Automation Hub** → **Connect to Hub**. +3. Under **Offline token**, click **Load Token**. +4. Click the [**Copy to clipboard**] icon to copy the offline token. +5. Paste the token into a file and store in a secure location. + +**Configure Private Automation Hub access** + +If you do not have a **Private Automation Hub (PAH)** or the EE does not require collection(s) to be installed from one you can skip this step and continue to **Step 3**. + +For **PAH**, an additional entry needs to be added to the generated **ansible.cfg** file in the same format as the existing Automation Hub entries with the appropriate \`url\`, \`auth_url\` and \`token\` for your **PAH**. + +To obtain your **Private Automation Hub** token: + +1. Log in to your private automation hub. +2. From the navigation panel, select **Automation Content** → **API token**. +3. Click **[Load Token]**. +4. To copy the API token, click the **[Copy to clipboard]** icon. + +For detailed instructions, refer to the official Red Hat Ansible Automation Platform 2.6 documentation for [managing automation content](https://docs.redhat.com/en/documentation/red_hat_ansible_automation_platform/2.6/html-single/managing_automation_content/index#proc-create-api-token-pah_cloud-sync). + +## Step 3: Install Required Tools + +With your configuration ready, you'll need the following tools on your local machine to build the image: + +- **ansible-builder** (The tool that builds the EE) +- **A container engine**: Podman (recommended) or Docker +- **ansible-navigator** (For testing your EE) + +### Red Hat Supported Installation (Recommended for RHEL/AAP environments) + +For Red Hat Enterprise Linux systems with Red Hat Ansible Automation Platform subscriptions: + +\`\`\`bash +# Install all tools via system package manager (Red Hat supported) +sudo dnf install -y ansible-core podman ansible-builder ansible-navigator +\`\`\` + +**Note**: \`ansible-builder\` and \`ansible-navigator\` availability via \`dnf\` depends on your RHEL version and AAP subscription. If not available via \`dnf\`, use the community method below. + +### Community-supported Installation Method + +For other systems or when Red Hat packages are not available: + +\`\`\`bash +# Install Ansible tools via pip +pip install ansible-core ansible-builder ansible-navigator +\`\`\` + +## Step 4: Build Your Execution Environment + +Now you're ready to build. Open your terminal in this directory and run the build command: + +\`\`\`bash +# This command uses your '${values.eeFileName}.yaml' file to build an image +# and tags it as '${values.eeFileName}:latest' + +ansible-builder build --file ${values.eeFileName}.yaml --tag ${values.eeFileName}:latest --container-runtime podman +\`\`\` + +### Command Options: +- You can change the \`tag\` (e.g., --tag my-custom-ee:1.0) +- If you're using Docker, change the runtime (\`--container-runtime docker\`) +- Add \`--verbosity 2\` for more detailed build output + +## Step 5 (Recommended): Test Your EE Locally + +This is the best way to verify your EE works before you share it. To do this, you can use \`ansible-navigator\`. + +### Create a Test Playbook + +Create a file named \`playbook.yaml\` in this directory: + +\`\`\`yaml +--- +- name: Test my new EE + hosts: localhost + connection: local + gather_facts: false + tasks: + - name: Print ansible version + ansible.builtin.command: ansible --version + register: ansible_version + + - name: Display version + ansible.builtin.debug: + var: ansible_version.stdout_lines + + - name: Test collection availability + ansible.builtin.debug: + msg: "EE is working correctly!" +\`\`\` + +### Run the Test Playbook + +\`\`\`bash +ansible-navigator run playbook.yml --eei ${eeFileName}:latest --pull-policy missing +\`\`\` + +If it runs successfully, your EE is working! + +**Note**: The playbook provided is a generic example compatible with all correctly built EEs. You may tailor it to better match the EE you have built. + +## Step 6: Push to a Container Registry + +To use this EE in Ansible Automation Platform (AAP), it must live in a registry. Red Hat recommends using **Private Automation Hub** as your primary registry for enterprise environments. + +### Private Automation Hub (Recommended for Red Hat AAP) + +Private Automation Hub is the Red Hat supported registry for execution environments in enterprise AAP deployments. + +\`\`\`bash +# Tag the image for your Private Automation Hub +podman tag ${eeFileName}:latest your-pah-hostname/${eeFileName}:latest + +# Login to your Private Automation Hub +podman login your-pah-hostname + +# Push the image +podman push your-pah-hostname/${eeFileName}:latest +\`\`\` + +### Internal/Corporate Registry + +\`\`\`bash +# Use your organization's internal registry URL +podman tag ${eeFileName}:latest your-internal-registry.com/${eeFileName}:latest +podman login your-internal-registry.com +podman push your-internal-registry.com/${eeFileName}:latest +\`\`\` + +## Step 7: Use Your EE in Ansible Automation Platform + +Once your execution environment is built and pushed to a registry, you need to register it in AAP. + +#### Adding Your EE to AAP Controller: + +1. Log into **AAP** +2. Navigate to **Automation Execution** → **Infrastructure** → **Execution Environments** +3. Click **Create execution environment** and provide the details of your execution environment. + +#### Using Your EE in Job Templates: + +1. Navigate to **Automation Execution** → **Templates** +2. Create a new AAP Job Template or edit an existing one +3. In the **Execution Environment** field, select your newly added EE from the dropdown +4. Save and launch - your playbooks now run in your custom environment + +For detailed instructions, see the official Red Hat Ansible Automation Platform documentation: + +- [Creating and using execution environments](https://docs.redhat.com/en/documentation/red_hat_ansible_automation_platform/2.6/html/creating_and_using_execution_environments/index) +- [Ansible Automation Platform Job Templates](https://docs.redhat.com/en/documentation/red_hat_ansible_automation_platform/2.6/html/using_automation_execution/controller-job-templates#controller-create-job-template) + +## Step 8 (Optional): Import EE template into self-service automation portal + +If you want to reuse this execution environment template for future projects, you can import the generated **${eeFileName}.yaml** file into your self-service automation portal. + +#### Prerequisites: + +- You must be logged in to self-service automation portal as an Ansible Automation Platform administrator + +#### How to Import: + +1. **Access the portal and add template**: Navigate to your self-service automation portal, go to the **Templates** page, and click **Add template**. +2. **Import from Git repository**: Enter the Git SCM URL containing your \`${eeFileName}.yaml\` file, click **Analyze** to validate, review the details, then click **Import**. +3. **Configure RBAC**: Set up Role-Based Access Control (RBAC) to allow users to view and run your custom Execution Environment template + +Once imported and configured, other users can use your template as a starting point for their own execution environment projects, promoting consistency and best practices across your automation initiatives. + +For detailed instructions, see the [self-service automation portal documentation](https://docs.redhat.com/en/documentation/red_hat_ansible_automation_platform/2.6/html/using_self-service_automation_portal/self-service-working-templates_aap-self-service-using#self-service-add-template_self-service-working-templates). +`; +} + +function generateEETemplate(values: EEDefinitionInput): string { + const collectionsJson = JSON.stringify(values.collections); + const requirementsJson = JSON.stringify(values.pythonRequirements); + const packagesJson = JSON.stringify(values.systemPackages); + const buildStepsJson = JSON.stringify(values.additionalBuildSteps); + const tagsJson = JSON.stringify(values.tags); + const mcpServersJson = JSON.stringify(values.mcpServers); + + return `--- +apiVersion: scaffolder.backstage.io/v1beta3 +kind: Template +metadata: + name: ${values.eeFileName} + title: ${values.eeFileName} + description: ${values.eeDescription || 'Saved Ansible Execution Environment Definition template'} + annotations: + ansible.io/template-type: execution-environment + ansible.io/saved-template: 'true' + tags: ${tagsJson} +spec: + type: execution-environment + + parameters: + # Step 1: Base Image Selection + - title: Base Image + description: Configure the base image for your execution environment + properties: + baseImage: + title: Base execution environment image + type: string + default: '${values.customBaseImage || values.baseImage}' + enum: + - 'registry.access.redhat.com/ubi9/python-311:latest' + - 'registry.redhat.io/ansible-automation-platform-25/ee-minimal-rhel9:latest'${values.customBaseImage?.trim() ? `\n - '${values.customBaseImage}'` : ''} + enumNames: + - 'Red Hat Universal Base Image 9 w/ Python 3.11 (Recommended)' + - 'Red Hat Ansible Minimal EE base (RHEL 9) (Requires subscription)'${values.customBaseImage?.trim() ? `\n - '${values.customBaseImage}'` : ''} + ui:field: BaseImagePicker + dependencies: + baseImage: + oneOf: + # Case 1: When "Custom Image" is selected + - properties: + baseImage: + const: 'custom' + customBaseImage: + title: Custom Base Image + type: string + description: Enter a custom execution environment base image + ui: + field: EntityNamePicker + options: + allowArbitraryValues: true + help: 'Format: [registry[:port]/][namespace/]name[:tag]' + placeholder: 'e.g., quay.io/org/custom-ee:latest' + required: + - customBaseImage + + # Case 2: When any predefined base image is selected + - properties: + baseImage: + not: + const: 'custom' + + # Step 2: Ansible Collections + - title: Collections + description: Add collections to be included in your execution environment definition file (optional). + properties: + popularCollections: + title: Add Popular Collections + type: array + items: + type: string + enum: + - 'ansible.posix' + - 'community.general' + - 'community.crypto' + - 'ansible.windows' + - 'community.kubernetes' + - 'community.docker' + - 'cisco.ios' + - 'arista.eos' + - 'amazon.aws' + - 'azure.azcollection' + - 'google.cloud' + uniqueItems: true + ui:widget: checkboxes + ui:options: + layout: horizontal + collections: + title: Ansible Collections + type: array + default: ${collectionsJson} + description: Add collections manually + items: + type: object + properties: + name: + type: string + title: Collection Name + description: The name of the collection in namespace.collection format + pattern: '^[a-zA-Z0-9_]+\.[a-zA-Z0-9_]+$' + ui:placeholder: 'e.g., community.general' + version: + type: string + title: Version (Optional) + description: | + The version of the collection to install. + If not specified, the latest version will be installed. + ui:placeholder: 'e.g., 7.2.1' + source: + type: string + title: Source (Optional) + description: | + The Galaxy URL to pull the collection from. + If type is 'file', 'dir', or 'subdirs', this should be a local path to the collection. + ui:placeholder: 'e.g., https://github.com/ansible-collections/community.general' + type: + type: string + title: Type (Optional) + description: Determines the source of the collection. + enum: + - 'file' + - 'galaxy' + - 'git' + - 'url' + - 'dir' + - 'subdirs' + signatures: + type: array + title: Signatures (Optional) + description: | + A list of signature sources that are used to supplement those found on the Galaxy server during collection installation and ansible-galaxy collection verify. + Signature sources should be URIs that contain the detached signature. + items: + type: string + title: Signature + description: URI of the signature file + ui:field: CollectionsPicker + collectionsFile: + title: Upload a requirements.yml file + description: Optionally upload a requirements file with collection details + type: string + format: data-url + ui:field: FileUploadPicker + specifyRequirements: + title: Specify additional Python requirements and System packages + type: boolean + default: false + ui:help: "Check this box to define additional Python or system dependencies to include in your EE." + dependencies: + specifyRequirements: + oneOf: + - properties: + specifyRequirements: + const: true + pythonRequirements: + title: Additional Python Requirements + type: array + default: ${requirementsJson} + description: | + Specify additional python packages that are required in addition to what the selected collections already specify as dependencies. + Packages already specified in the collections as a dependency should not be repeated here. + items: + type: string + title: Python package + description: Python package (with optional version specification) + ui:placeholder: 'e.g., requests>=2.28.0' + ui:field: PackagesPicker + pythonRequirementsFile: + type: string + format: data-url + title: Pick a file with Python requirements + description: Upload a requirements.txt file with python package details + ui:field: FileUploadPicker + systemPackages: + title: Additional System Packages + type: array + default: ${packagesJson} + description: | + Specify additional system-level packages that are required in addition to what the selected collections already specify as dependencies. + Packages already specified in the collections as a dependency should not be repeated here. + items: + type: string + title: System package + description: System package + ui:placeholder: 'e.g., libxml2-dev [platform:dpkg], libssh-devel [platform:rpm]' + ui:field: PackagesPicker + systemPackagesFile: + type: string + format: data-url + title: Pick a file with system packages + description: Upload a bindep.txt file with system package details + ui:field: FileUploadPicker + - properties: + specifyRequirements: + const: false + + # Step 3: MCP servers + - title: MCP servers + description: Add MCP servers to be installed in the execution environment definition file (optional). + properties: + mcpServers: + title: MCP Servers + type: array + default: ${mcpServersJson} + items: + type: string + title: MCP Server + enum: + - aws_ccapi_mcp + - aws_cdk_mcp + - aws_core_mcp + - aws_iam_mcp + - azure_mcp + - github_mcp + enumNames: + - AWS CCAPI + - AWS CDK + - AWS Core + - AWS IAM + - Azure + - GitHub + ui:field: MCPServersPicker + + # Step 4: Additional Build Steps + - title: Additional Build Steps + description: Add custom build steps that will be executed at specific points during the build process. These map to ansible-builder's additional_build_steps configuration. + properties: + additionalBuildSteps: + title: Additional Build Steps + type: array + default: ${buildStepsJson} + items: + type: object + properties: + stepType: + title: Step Type + type: string + description: When this build step should execute + enum: + - 'prepend_base' + - 'append_base' + - 'prepend_galaxy' + - 'append_galaxy' + - 'prepend_builder' + - 'append_builder' + - 'prepend_final' + - 'append_final' + enumNames: + - 'Prepend Base - Before base image dependencies' + - 'Append Base - After base image dependencies' + - 'Prepend Galaxy - Before Ansible collections' + - 'Append Galaxy - After Ansible collections' + - 'Prepend Builder - Before main build steps' + - 'Append Builder - After main build steps' + - 'Prepend Final - Before final image steps' + - 'Append Final - After final image steps' + default: 'prepend_base' + commands: + title: Commands + type: array + description: List of commands to execute + items: + type: string + required: ['stepType', 'commands'] + ui:field: AdditionalBuildStepsPicker + + # Step 9: Repository Configuration + - title: Generate and publish + description: Generate and publish the EE definition file and template. + properties: + eeFileName: + title: EE File Name + type: string + description: Name of the Execution Environment file. + ui:field: EEFileNamePicker + ui:help: "Specify the filename for the Execution Environment definition file." + templateDescription: + title: Description + type: string + description: | + Description for the generated Execution Environment definition. + This description is used when displaying the Execution Environment definition in the catalog. + Additionally, this description is also used in the Software Template that is generated with SCM-based publishing. + tags: + title: Tags + description: | + Add tags to make this EE definition discoverable in the catalog. + The default execution-environment tag identifies this as an EE component; keeping it is highly recommended + type: array + default: + - 'execution-environment' + items: + type: string + ui: + options: + addable: true + orderable: true + removable: true + help: "Add one or more tags for the generated template." + publishToSCM: + title: Publish to a SCM repository + description: Publish the EE definition file and template to a SCM repository. + type: boolean + default: true + ui:help: "If unchecked, the EE definition file and template will not be pushed to a SCM repository. Regardless of your selection, you will get a link to download the files locally." + required: + - eeFileName + - templateDescription + dependencies: + publishToSCM: + oneOf: + - properties: + publishToSCM: + const: true + sourceControlProvider: + title: Select source control provider + description: Choose your source control provider + type: string + enum: + - Github + - Gitlab + ui: + component: select + help: Select the source control provider to publish the EE definition files to. + repositoryOwner: + title: SCM repository organization or username + type: string + description: The organization or username that owns the SCM repository + repositoryName: + title: Repository Name + type: string + description: Specify the name of the repository where the EE definition files will be published. + createNewRepository: + title: Create new repository + type: boolean + description: Create a new repository, if the specified one does not exist. + default: false + ui:help: "If unchecked, a new repository will not be created if the specified one does not exist. The generated files will not be published to a repository." + required: + - sourceControlProvider + - repositoryOwner + - repositoryName + - createNewRepository + - properties: + publishToSCM: + const: false + + steps: + # Step 1: Create EE definition files + - id: create-ee-definition + name: Create Execution Environment Definition + action: ansible:create:ee-definition + input: + values: + eeFileName: \${{ parameters.eeFileName }} + eeDescription: \${{ parameters.templateDescription }} + tags: \${{ parameters.tags or [] }} + publishToSCM: \${{ parameters.publishToSCM }} + baseImage: \${{ parameters.baseImage === 'custom' and parameters.customBaseImage or parameters.baseImage }} + customBaseImage: \${{ parameters.customBaseImage or '' }} + popularCollections: \${{ parameters.popularCollections or [] }} + collections: \${{ parameters.collections or [] }} + collectionsFile: \${{ parameters.collectionsFile or [] }} + pythonRequirements: \${{ parameters.pythonRequirements or [] }} + pythonRequirementsFile: \${{ parameters.pythonRequirementsFile or [] }} + systemPackages: \${{ parameters.systemPackages or [] }} + systemPackagesFile: \${{ parameters.systemPackagesFile or [] }} + mcpServers: \${{ parameters.mcpServers or [] }} + additionalBuildSteps: \${{ parameters.additionalBuildSteps or [] }} + + # Step 3: Validate the SCM repository (optional) + - id: prepare-publish + action: ansible:prepare:publish + name: Prepare for publishing + if: \${{ parameters.publishToSCM }} + input: + sourceControlProvider: \${{ parameters.sourceControlProvider }} + repositoryOwner: \${{ parameters.repositoryOwner }} + repositoryName: \${{ parameters.repositoryName }} + createNewRepository: \${{ parameters.createNewRepository }} + eeFileName: \${{ parameters.eeFileName }} + contextDirName: \${{ steps['create-ee-definition'].output.contextDirName }} + + - id: create-catalog-info-file + action: catalog:write + if: \${{ parameters.publishToSCM }} + name: Create catalog component file for the EE Definition + input: + filePath: \${{ steps['create-ee-definition'].output.catalogInfoPath }} + entity: + apiVersion: backstage.io/v1alpha1 + kind: Component + metadata: + name: \${{ parameters.eeFileName }} + description: \${{ parameters.templateDescription }} + tags: \${{ parameters.tags or [] }} + annotations: + backstage.io/techdocs-ref: dir:. + backstage.io/managed-by-location: \${{ steps['prepare-publish'].output.generatedRepoUrl }} + ansible.io/scm-provider: \${{ parameters.sourceControlProvider }} + spec: + type: execution-environment + owner: \${{ steps['create-ee-definition'].output.owner }} + lifecycle: production + + # Step 5: Create and publish to a new GitHub Repository + - id: publish-github + name: Create and publish to a new GitHub Repository + action: publish:github + if: \${{ (parameters.publishToSCM) and (steps['prepare-publish'].output.createNewRepo) and (parameters.sourceControlProvider == 'Github') }} + input: + description: \${{ parameters.templateDescription }} + repoUrl: \${{ steps['prepare-publish'].output.generatedRepoUrl }} + defaultBranch: 'main' + repoVisibility: 'public' + + # Step 5: Create and publish to a new Gitlab Repository + - id: publish-gitlab + name: Create and publish to a new GitLab Repository + action: publish:gitlab + if: \${{ (parameters.publishToSCM) and (steps['prepare-publish'].output.createNewRepo) and parameters.sourceControlProvider == 'Gitlab' }} + input: + repoUrl: \${{ steps['prepare-publish'].output.generatedRepoUrl }} + defaultBranch: 'main' + repoVisibility: 'public' + + # Step 5: Publish generated files as a Github Pull Request + - id: publish-github-pull-request + name: Publish generated files as a Github Pull Request + action: publish:github:pull-request + if: \${{ parameters.publishToSCM and (not steps['prepare-publish'].output.createNewRepo) and (parameters.sourceControlProvider == 'Github') }} + input: + repoUrl: \${{ steps['prepare-publish'].output.generatedRepoUrl }} + branchName: \${{ steps['prepare-publish'].output.generatedBranchName }} + title: \${{ steps['prepare-publish'].output.generatedTitle }} + description: \${{ steps['prepare-publish'].output.generatedDescription }} + + # Step 5: Publish generated files as a Gitlab Merge Request + - id: publish-gitlab-merge-request + name: Publish generated files as a Gitlab Merge Request + action: publish:gitlab:merge-request + if: \${{ parameters.publishToSCM and (not steps['prepare-publish'].output.createNewRepo) and (parameters.sourceControlProvider == 'Gitlab') }} + input: + repoUrl: \${{ steps['prepare-publish'].output.generatedRepoUrl }} + branchName: \${{ steps['prepare-publish'].output.generatedBranchName }} + title: \${{ steps['prepare-publish'].output.generatedTitle }} + description: \${{ steps['prepare-publish'].output.generatedDescription }} + + - id: register-catalog-component + name: Register published EE as a Catalog Component + action: catalog:register + if: \${{ parameters.publishToSCM }} + input: + catalogInfoUrl: \${{ steps['prepare-publish'].output.generatedCatalogInfoUrl }} + optional: true + + output: + links: + - title: \${{ parameters.sourceControlProvider }} Repository + url: \${{ steps['prepare-publish'].output.generatedFullRepoUrl }} + if: \${{ (parameters.publishToSCM) and (steps['prepare-publish'].output.createNewRepo) }} + icon: \${{ parameters.sourceControlProvider | lower }} + + - title: GitHub Pull Request + url: \${{ steps['publish-github-pull-request'].output.remoteUrl }} + if: \${{ (parameters.publishToSCM) and (not steps['prepare-publish'].output.createNewRepo) and (parameters.sourceControlProvider == 'Github') }} + icon: github + + - title: GitLab Merge Request + url: \${{ steps['publish-gitlab-merge-request'].output.mergeRequestUrl }} + if: \${{ (parameters.publishToSCM) and (not steps['prepare-publish'].output.createNewRepo) and (parameters.sourceControlProvider == 'Gitlab') }} + + - title: View details in catalog + icon: catalog + url: \${{ steps['create-ee-definition'].output.generatedEntityRef }} + if: \${{ not (steps['publish-github-pull-request'].output.remoteUrl or steps['publish-gitlab-merge-request'].output.mergeRequestUrl) }} + + text: + - title: Next Steps + content: | + \${{ steps['create-ee-definition'].output.readmeContent }} +`; +} + +function generateAnsibleConfigContent(): string { + return `[galaxy] +server_list=automation_hub_published, automation_hub_validated, release_galaxy + +[galaxy_server.release_galaxy] +url=https://galaxy.ansible.com/ + +[galaxy_server.automation_hub_published] +url=https://console.redhat.com/api/automation-hub/content/published/ +auth_url=https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token +# Add the token for the automation hub published server +token= + +[galaxy_server.automation_hub_validated] +url=https://console.redhat.com/api/automation-hub/content/validated/ +auth_url=https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token +# Add the token for the automation hub validated server +token= +`; +} + +function generateEECatalogEntity( + componentName: string, + description: string, + tags: string[], + owner: string, + eeDefinitionContent: string, + readmeContent: string, + mcpVarsContent: string, + ansibleConfigContent: string, + eeTemplateContent: string, +) { + const catalogEntity: any = { + apiVersion: 'backstage.io/v1alpha1', + kind: 'Component', + metadata: { + name: componentName, + title: componentName, + description: description, + tags: tags, + annotations: { + 'backstage.io/managed-by-location': `url:127.0.0.1`, + 'backstage.io/managed-by-origin-location': `url:127.0.0.1`, + 'ansible.io/download-experience': 'true', + }, + }, + spec: { + type: 'execution-environment', + lifecycle: 'production', + owner: owner, + definition: eeDefinitionContent, + template: eeTemplateContent, + readme: readmeContent, + ansible_cfg: ansibleConfigContent, + }, + }; + + if (mcpVarsContent !== '') { + catalogEntity.spec.mcp_vars = mcpVarsContent; + } + return catalogEntity; +} + +function mergeCollections( + collections: Collection[], + popularCollections: string[], + parsedCollections: Array>, +): Collection[] { + const collectionsRequirements: Collection[] = []; + + // Add individual collections + if (collections) { + collectionsRequirements.push(...collections); + } + + // Add popular collections (convert string names to Collection objects) + if (popularCollections) { + const popularCollectionObjects = popularCollections.map(name => ({ name })); + collectionsRequirements.push(...popularCollectionObjects); + } + + // Add content from uploaded collection requirements file + if (parsedCollections && Array.isArray(parsedCollections)) { + parsedCollections.forEach(item => { + if (item && typeof item === 'object' && 'name' in item) { + collectionsRequirements.push(item as Collection); + } + }); + } + + // Remove duplicates based on collection name + const uniqueCollections = Object.values( + collectionsRequirements.reduce>((acc, curr) => { + const existing = acc[curr.name]; + + // If nothing stored yet, take current + if (!existing) { + acc[curr.name] = curr; + return acc; + } + + // Rule 1: Any entry without version wins immediately (no comparison needed) + // the most recent version will automatically be pulled from AH/Galaxy + if (!existing.version) { + return acc; // existing stays + } + + // if the current entry has no version, it wins + // discarding the other ones + if (!curr.version) { + acc[curr.name] = curr; // curr wins due to no version + return acc; + } + + // Rule 2: Compare semantic versions, keep higher + if (semver.gt(curr.version, existing.version)) { + acc[curr.name] = curr; + } + + return acc; + }, {}), + ); + + return uniqueCollections; +} + +function mergeRequirements( + pythonRequirements: string[], + parsedPythonRequirements: string[], +): string[] { + const requirements: string[] = []; + + // Add individual requirements + if (pythonRequirements) { + requirements.push(...pythonRequirements); + } + + // Add content from uploaded Python requirements file + if (parsedPythonRequirements) { + requirements.push(...parsedPythonRequirements); + } + + // Remove duplicates + return Array.from(new Set(requirements)); +} + +function mergePackages( + systemPackages: string[], + parsedSystemPackages: string[], +): string[] { + const packages: string[] = []; + + // Add individual packages + if (systemPackages) { + packages.push(...systemPackages); + } + + // Add content from uploaded Python requirements file + if (parsedSystemPackages) { + packages.push(...parsedSystemPackages); + } + + // Remove duplicates + return Array.from(new Set(packages)); +} + +function parseTextRequirementsFile(decodedContent: string): string[] { + let parsedRequirements: string[] = []; + try { + if (decodedContent) { + parsedRequirements = decodedContent + .split('\n') + .map(line => line.trim()) + .filter(line => line.length > 0 && !line.startsWith('#')); + } + } catch (error: any) { + throw new Error( + `Failed to parse Python requirements file: ${error.message}`, + ); + } + return parsedRequirements; +} + +function parseCollectionsFile(decodedCollectionsContent: string): Collection[] { + if (!decodedCollectionsContent?.trim()) { + return []; + } + + try { + const parsedYaml = yaml.load(decodedCollectionsContent.trim()); + + const validated = CollectionRequirementsSchema.parse(parsedYaml); + + return validated.collections; + } catch (err: any) { + // this will result from the content not conforming to the schema defined above + if (err instanceof z.ZodError) { + throw new Error( + `Invalid collections file structure:\n${err.issues.map(e => `- ${e.path.join('.')}: ${e.message}`).join('\n')}`, + ); + } + + // this will result from the content not being valid YAML or any other error + throw new Error(`Failed to parse collections file: ${err.message}`); + } +} + +function generateMCPBuilderSteps( + mcpServers: string[], + parsedCollections: Collection[], + additionalBuildSteps: AdditionalBuildStep[], +) { + // If mcpServers are specified, add them to the collections list + // and add the MCP install playbook command to the additional build steps + const mcpInstallCmd = `RUN ansible-playbook ansible.mcp_builder.install_mcp -e mcp_servers=${mcpServers.join(',')} -e @/tmp/mcp-vars.yaml`; + + parsedCollections.push( + { name: 'ansible.mcp_builder' }, + { name: 'ansible.mcp' }, + ); + + // Find if there's already a step with stepType 'append_final' + const appendFinalStep = additionalBuildSteps.find( + step => step.stepType === 'append_final', + ); + + if (appendFinalStep) { + // If found, add the MCP install playbook command to its commands array as the first command + appendFinalStep.commands.unshift(mcpInstallCmd); + } else { + // Otherwise, create a new step entry + additionalBuildSteps.push({ + stepType: 'append_final', + commands: [mcpInstallCmd], + }); + } +} + +function modifyAdditionalBuildSteps( + additionalBuildSteps: AdditionalBuildStep[], + mcpServers: string[], +) { + // the ansible.cfg step is mandatory + const prependBaseStepCommands: string[] = [ + 'COPY _build/configs/ansible.cfg /etc/ansible/ansible.cfg', + ]; + let appendFinalStepCommands: string = 'RUN rm -f /etc/ansible/ansible.cfg'; + + if (mcpServers.length > 0) { + // the mcp-vars.yaml step is required only if MCP servers are specified + prependBaseStepCommands.push( + 'COPY _build/configs/mcp-vars.yaml /tmp/mcp-vars.yaml', + ); + // remove the mcp-vars.yaml file after the build only if MCP servers are specified + appendFinalStepCommands += ' /tmp/mcp-vars.yaml'; + } + + // Find if there's already a step with stepType 'prepend_base' + const prependBaseStep = additionalBuildSteps.find( + step => step.stepType === 'prepend_base', + ); + + if (prependBaseStep) { + // If found, add the MCP install playbook command to its commands array + prependBaseStep.commands.push(...prependBaseStepCommands); + } else { + // Otherwise, create a new step entry + additionalBuildSteps.push({ + stepType: 'prepend_base', + commands: prependBaseStepCommands, + }); + } + + // Find if there's already a step with stepType 'append_final' + const appendFinalStep = additionalBuildSteps.find( + step => step.stepType === 'append_final', + ); + + if (appendFinalStep) { + // If found, add the MCP install playbook command to its commands array + appendFinalStep.commands.push(appendFinalStepCommands); + } else { + // Otherwise, create a new step entry + additionalBuildSteps.push({ + stepType: 'append_final', + commands: [appendFinalStepCommands], + }); + } +} + +function generateMCPVarsContent(mcpServers: string[]): string { + // this does not need to be explicitly installed + // but it's vars should be included in the MCP vars file + mcpServers.push('common'); + + // Filter sections matching roles + const filtered = MCPSERVER_VARS.filter((entry: any) => + mcpServers.includes(entry.role), + ); + + // Build final YAML string + let output: string = '---\n'; + + for (const entry of filtered) { + // Dump only the "vars" section (if it exists and is not empty) + if (entry.vars && Object.keys(entry.vars).length > 0) { + output += `# vars for ${entry.role}\n`; + const varsYaml = yaml.dump(entry.vars); + // Indentation safety: yaml.dump already returns valid YAML + // yaml.dump adds a trailing newline, so we append it directly + output += varsYaml; + output += '\n'; + } + } + // drop common from the list of MCP servers + // it was only added to get it's vars + mcpServers.pop(); + + // Ensure exactly one trailing newline (yaml.dump already adds one, but trim to be safe) + return `${output.trimEnd()}\n`; +} + +function validateEEDefinition(eeDefinition: string): boolean { + if (!eeDefinition?.trim()) { + throw new Error('EE definition content is empty'); + } + + // load the generated EE definition YAML content + let parsed: {}; + try { + parsed = yaml.load(eeDefinition.trim()) as {}; + } catch (e: any) { + throw new Error( + `Invalid YAML syntax in the generated EE definition: ${e.message}`, + ); + } + + // validate the generated EE definition YAML content against the schema + try { + EEDefinitionSchema.parse(parsed); + return true; + } catch (e: any) { + if (e instanceof z.ZodError) { + const formatted = e.issues + .map(err => `- ${err.path.join('.')}: ${err.message}`) + .join('\n'); + + throw new Error( + `Schema validation failed for the generated EE definition:\n${formatted}`, + ); + } + + throw new Error( + `Unknown error validating the generated EE definition: ${e.message}`, + ); + } +} diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/schemas.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/schemas.ts new file mode 100644 index 00000000..661ac055 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/schemas.ts @@ -0,0 +1,87 @@ +/* +Various schema definitions for validating the input data +*/ + +import { z } from 'zod'; + +export const CollectionSchema = z + .object({ + name: z.string(), + version: z.string().optional(), + signatures: z.array(z.string()).optional(), + source: z.string().optional(), + type: z.enum(['file', 'galaxy', 'git', 'url', 'dir', 'subdirs']).optional(), + }) + .strict(); + +export const CollectionRequirementsSchema = z + .object({ + collections: z.array(CollectionSchema), + }) + .strict(); + +export const GalaxyDependenciesSchema = z + .object({ + collections: z.array(CollectionSchema), + }) + .strict(); + +export const DependenciesSchema = z + .object({ + python: z.array(z.string()).optional(), + system: z.array(z.string()).optional(), + galaxy: GalaxyDependenciesSchema.optional(), + }) + .strict(); + +export const ImagesSchema = z + .object({ + base_image: z + .object({ + name: z.string(), + }) + .strict(), + }) + .strict(); + +export const AdditionalBuildFilesSchema = z + .array( + z + .object({ + src: z.string(), + dest: z.string(), + }) + .strict(), + ) + .optional(); + +export const AdditionalBuildStepsSchema = z + .object({ + prepend_base: z.array(z.string()).optional(), + append_base: z.array(z.string()).optional(), + prepend_galaxy: z.array(z.string()).optional(), + append_galaxy: z.array(z.string()).optional(), + prepend_builder: z.array(z.string()).optional(), + append_builder: z.array(z.string()).optional(), + prepend_final: z.array(z.string()).optional(), + append_final: z.array(z.string()).optional(), + }) + .strict(); + +export const OptionsSchema = z + .object({ + package_manager_path: z.string().optional(), + }) + .strict(); + +// final schema for the entire EE definition YAML file +export const EEDefinitionSchema = z + .object({ + version: z.number(), + images: ImagesSchema, + dependencies: DependenciesSchema.optional(), + additional_build_files: AdditionalBuildFilesSchema.optional(), + additional_build_steps: AdditionalBuildStepsSchema.optional(), + options: OptionsSchema.optional(), + }) + .strict(); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.test.ts index 8e2a7336..05fcef40 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.test.ts +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.test.ts @@ -196,6 +196,20 @@ describe('ansible-aap:useCaseMaker:github', () => { ); }, ), + http.head( + 'https://api.github.com/repos/testOwner/existingRepo', + // @ts-ignore + () => { + return HttpResponse.json({}, { status: 200 }); + }, + ), + http.head( + 'https://api.github.com/repos/testOwner/nonExistentRepo', + // @ts-ignore + () => { + return HttpResponse.json({}, { status: 404 }); + }, + ), ]; const server = setupServer(...handlers); @@ -328,6 +342,103 @@ describe('ansible-aap:useCaseMaker:github', () => { useCaseMaker.devfilePushToGithub(validOptions), ).rejects.toThrow("Cannot read properties of undefined (reading 'status')"); }); + + it('generateRepositoryUrl - should generate correct GitHub URL', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const url = await useCaseMaker.generateRepositoryUrl({ + repoOwner: 'testOwner', + repoName: 'testRepo', + }); + + expect(url).toBe('github.com?repo=testRepo&owner=testOwner'); + }); + + it('checkIfRepositoryExists - should return true when repository exists', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const exists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'existingRepo', + }); + + expect(exists).toBe(true); + }); + + it('checkIfRepositoryExists - should return false when repository does not exist', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const exists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'nonExistentRepo', + }); + + expect(exists).toBe(false); + }); + + it('checkIfRepositoryExists - should throw error for non-404 errors', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + // Add a handler that returns 500 error + server.use( + http.head( + 'https://api.github.com/repos/testOwner/testRepo', + // @ts-ignore + () => { + return HttpResponse.json({}, { status: 500 }); + }, + ), + ); + + await expect( + useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'testRepo', + }), + ).rejects.toThrow( + 'Error checking if Github Repository testOwner/testRepo exists', + ); + }); }); describe('ansible-aap:useCaseMaker:gitlab', () => { @@ -536,6 +647,18 @@ spec: `); }, ), + http.get( + 'https://gitlab.com/api/v4/projects/testOwner%2FexistingRepo', + () => { + return HttpResponse.json({ id: 12345 }, { status: 200 }); + }, + ), + http.get( + 'https://gitlab.com/api/v4/projects/testOwner%2FnonExistentRepo', + () => { + return HttpResponse.json({}, { status: 404 }); + }, + ), ]; const server = setupServer(...handlers); @@ -676,7 +799,7 @@ spec: expect(result).toBeNull(); }); - it('should throw an error if tfails to fetch repository details', async () => { + it('should throw an error if it fails to fetch repository details', async () => { const invalidOptions = { value: 'devfile content', repositoryUrl: 'https://gitlab.com/invalid-url', @@ -716,4 +839,264 @@ spec: useCaseMaker.devfilePushToGitLab(validOptions), ).rejects.toThrow('Failed to fetch repository details: Unauthorized'); }); + + it('generateRepositoryUrl - should generate correct GitLab URL', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const url = await useCaseMaker.generateRepositoryUrl({ + repoOwner: 'testOwner', + repoName: 'testRepo', + }); + + expect(url).toBe('gitlab.com?repo=testRepo&owner=testOwner'); + }); + + it('checkIfRepositoryExists - should return true when GitLab repository exists', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const exists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'existingRepo', + }); + + expect(exists).toBe(true); + }); + + it('checkIfRepositoryExists - should return false when GitLab repository does not exist', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const exists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'nonExistentRepo', + }); + + expect(exists).toBe(false); + }); + + it('checkIfRepositoryExists - should throw error for non-404 GitLab errors', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + // Mock fetch to throw an error (simulating network error) + const originalFetch = global.fetch; + global.fetch = jest.fn().mockRejectedValueOnce(new Error('Server error')); + + await expect( + useCaseMaker.checkIfRepositoryExists({ + repoOwner: 'testOwner', + repoName: 'testRepo', + }), + ).rejects.toThrow( + 'Error checking if Gitlab Repository testOwner/testRepo exists: Server error', + ); + + // Restore original fetch + global.fetch = originalFetch; + }); + + it('fetchGitlabFileContent - should successfully fetch file content from GitLab', async () => { + const fileContent = '# Test File\n\nThis is a test file content.'; + server.use( + http.get( + 'https://gitlab.com/testOwner/testRepo/-/raw/main/path/to/file.md', + () => { + return HttpResponse.text(fileContent); + }, + ), + ); + + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const content = await useCaseMaker.fetchGitlabFileContent({ + owner: 'testOwner', + repo: 'testRepo', + filePath: 'path/to/file.md', + branch: 'main', + }); + + expect(content).toBe(fileContent); + }); + + it('fetchGitlabFileContent - should throw error when GitLab host is not configured', async () => { + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + // Directly set scmIntegration to an object without host to test the error case + useCaseMaker.scmIntegration = { + token: 'mockToken', + // host is missing + } as any; + + await expect( + useCaseMaker.fetchGitlabFileContent({ + owner: 'testOwner', + repo: 'testRepo', + filePath: 'path/to/file.md', + branch: 'main', + }), + ).rejects.toThrow('Not Gitlab host configured.'); + }); + + it('fetchGitlabFileContent - should handle fetch errors', async () => { + const originalFetch = global.fetch; + global.fetch = jest + .fn() + .mockRejectedValueOnce(new Error('Failed to fetch')); + + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + await expect( + useCaseMaker.fetchGitlabFileContent({ + owner: 'testOwner', + repo: 'testRepo', + filePath: 'path/to/file.md', + branch: 'main', + }), + ).rejects.toThrow('Error fetching file content: Failed to fetch'); + + global.fetch = originalFetch; + }); + + it('fetchGitlabFileContent - should work without token', async () => { + const fileContent = '# Test File\n\nThis is a test file content.'; + const MOCK_CONF_NO_TOKEN = { + data: { + ...MOCK_CONF.data, + integrations: { + gitlab: [ + { + host: 'gitlab.com', + // No token + }, + ], + }, + }, + }; + + server.use( + http.get( + 'https://gitlab.com/testOwner/testRepo/-/raw/main/path/to/file.md', + () => { + return HttpResponse.text(fileContent); + }, + ), + ); + + config = new ConfigReader(MOCK_CONF_NO_TOKEN.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + const content = await useCaseMaker.fetchGitlabFileContent({ + owner: 'testOwner', + repo: 'testRepo', + filePath: 'path/to/file.md', + branch: 'main', + }); + + expect(content).toBe(fileContent); + }); + + it('fetchGitlabFileContent - should handle network errors', async () => { + const originalFetch = global.fetch; + global.fetch = jest.fn().mockRejectedValueOnce(new Error('Network error')); + + config = new ConfigReader(MOCK_CONF.data); + ansibleConfig = getAnsibleConfig(config); + useCaseMaker = new UseCaseMaker({ + ansibleConfig, + logger, + organization, + scmType, + apiClient: mockAnsibleService, + useCases, + token: MOCK_TOKEN, + }); + + await expect( + useCaseMaker.fetchGitlabFileContent({ + owner: 'testOwner', + repo: 'testRepo', + filePath: 'path/to/file.md', + branch: 'main', + }), + ).rejects.toThrow('Error fetching file content: Network error'); + + global.fetch = originalFetch; + }); }); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.ts index 0d811887..01e1aaa0 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.ts +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/helpers/useCaseMaker.ts @@ -749,6 +749,141 @@ export class UseCaseMaker { this.logger.info(`End saving templates locally.`); } + async generateRepositoryUrl(options: { + repoOwner: string; + repoName: string; + }): Promise { + const { repoOwner, repoName } = options; + return `${this.scmIntegration?.host}?repo=${repoName}&owner=${repoOwner}`; + } + + async fetchGithubFileContent(options: { + owner: string; + repo: string; + filePath: string; + branch: string; + }): Promise { + const { owner, repo, filePath, branch } = options; + let readmeContent: string = ''; + + this.logger.info( + `Fetching file content from ${owner}/${repo}/${filePath} on branch ${branch}`, + ); + + // we have initialized octokit in the constructor + // so a host must already be configured + // hence an explicit check is not needed + try { + const response = await this.octokit.request( + 'GET /repos/{owner}/{repo}/contents/{path}', + { + owner: owner, + repo: repo, + path: filePath, + ref: branch, + headers: { + accept: 'application/vnd.github.raw', + }, + }, + ); + if (response && response.status === 200 && response.data) { + readmeContent = response.data as unknown as string; + } + } catch (e: any) { + throw new Error(`Error fetching file content: ${e.message}`); + } + return readmeContent; + } + + async fetchGitlabFileContent(options: { + owner: string; + repo: string; + filePath: string; + branch: string; + }): Promise { + const { owner, repo, filePath, branch } = options; + let response; + + if (!this.scmIntegration?.host) { + throw new Error('Not Gitlab host configured.'); + } + + const host = this.scmIntegration.host; + + try { + const headers = { + 'Content-Type': 'application/json', + ...(this.scmIntegration?.token && { + 'PRIVATE-TOKEN': this.scmIntegration.token, + }), + }; + + this.logger.info( + `Fetching file content from ${owner}/${repo}/${filePath} on branch ${branch}`, + ); + response = await fetch( + `https://${host}/${owner}/${repo}/-/raw/${branch}/${filePath}`, + { headers }, + ); + + return response.text(); + } catch (error: any) { + throw new Error(`Error fetching file content: ${error.message}`); + } + } + + async checkIfRepositoryExists(options: { + repoOwner: string; + repoName: string; + }): Promise { + const { repoOwner, repoName } = options; + let exists = false; + let response; + + this.logger.info( + `[${UseCaseMaker.pluginLogName}] Checking if ${this.scmType} Repository ${repoOwner}/${repoName} exists`, + ); + + try { + if (this.scmType === 'Github') { + response = await this.octokit.request('HEAD /repos/{owner}/{repo}', { + owner: repoOwner, + repo: repoName, + }); + } else if (this.scmType === 'Gitlab') { + const gitlabApiUrl = this.scmIntegration?.apiBaseUrl; + const headers = { + 'Content-Type': 'application/json', + ...(this.scmIntegration?.token && { + 'PRIVATE-TOKEN': this.scmIntegration.token, + }), + }; + + response = await fetch( + `${gitlabApiUrl}/projects/${encodeURIComponent( + repoOwner, + )}%2F${encodeURIComponent(repoName)}`, + { headers }, + ); + } + + if (response && response.status === 200) { + exists = true; + } + } catch (error: any) { + if (error.status === 404) { + this.logger.info( + `[${UseCaseMaker.pluginLogName}] ${this.scmType} Repository ${repoOwner}/${repoName} does not exist`, + ); + } else { + throw new Error( + `Error checking if ${this.scmType} Repository ${repoOwner}/${repoName} exists: ${error.message}`, + ); + } + } + return exists; + } + private async createRepositoryIfNotExists(options: { githubConfig: GithubConfig; }): Promise { diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/index.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/index.ts index d0d30b1b..1731ded3 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/index.ts +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/index.ts @@ -21,3 +21,5 @@ export { createJobTemplate } from './aapCreateJobTemplate'; export { launchJobTemplate } from './aapLaunchJobTemplate'; export { cleanUp } from './aapCleanUp'; export { createShowCases } from './aapCreateShowCases'; +export { createEEDefinitionAction } from './createEEDefinition'; +export { prepareForPublishAction } from './prepareForPublish'; diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.test.ts new file mode 100644 index 00000000..85786fc4 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.test.ts @@ -0,0 +1,745 @@ +/* + * Copyright 2025 The Ansible plugin Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Mock external dependencies first (before imports for proper hoisting) +jest.mock('./helpers', () => ({ + UseCaseMaker: jest.fn(), +})); + +jest.mock('crypto', () => ({ + randomBytes: jest.fn(), +})); + +import { randomBytes } from 'crypto'; +import { mockServices } from '@backstage/backend-test-utils'; +import { prepareForPublishAction } from './prepareForPublish'; +import { UseCaseMaker } from './helpers'; +import { AnsibleConfig } from '@ansible/backstage-rhaap-common'; + +const mockRandomBytes = randomBytes as jest.MockedFunction< + (size: number) => Buffer +>; +const MockUseCaseMaker = UseCaseMaker as jest.MockedClass; + +describe('prepareForPublish', () => { + const logger = mockServices.logger.mock(); + const mockWorkspacePath = '/tmp/test-workspace'; + const mockAnsibleConfig: AnsibleConfig = { + githubIntegration: { + host: 'github.com', + }, + gitlabIntegration: { + host: 'gitlab.com', + }, + } as AnsibleConfig; + + let mockUseCaseMakerInstance: { + checkIfRepositoryExists: jest.Mock; + generateRepositoryUrl: jest.Mock; + }; + + beforeEach(() => { + jest.clearAllMocks(); + mockRandomBytes.mockReturnValue(Buffer.from('abcd', 'hex')); + + mockUseCaseMakerInstance = { + checkIfRepositoryExists: jest.fn(), + generateRepositoryUrl: jest.fn(), + }; + + MockUseCaseMaker.mockImplementation(() => { + return mockUseCaseMakerInstance as any; + }); + }); + + describe('repository existence check functionality', () => { + it('should set createNewRepo to false when repository exists', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect(ctx.output).toHaveBeenCalledWith('createNewRepo', false); + expect(logger.info).toHaveBeenCalledWith( + 'Github Repository test-owner/test-repo exists: true', + ); + }); + + it('should set createNewRepo to true when repository does not exist and createNewRepository is true', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: true, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith('createNewRepo', true); + expect(logger.info).toHaveBeenCalledWith( + 'A new Github repository test-owner/test-repo will be created.', + ); + }); + + it('should throw error when repository does not exist and createNewRepository is false', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow( + 'Github Repository test-owner/test-repo does not exist and creating a new repository was not enabled.', + ); + }); + + it('should work with Gitlab provider', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'gitlab.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Gitlab', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(logger.info).toHaveBeenCalledWith( + 'Gitlab Repository test-owner/test-repo exists: true', + ); + }); + }); + + describe('generateRepositoryUrl functionality', () => { + it('should generate and output repository URL', async () => { + const mockRepoUrl = 'github.com?repo=test-repo&owner=test-owner'; + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + mockRepoUrl, + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect( + mockUseCaseMakerInstance.generateRepositoryUrl, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect(ctx.output).toHaveBeenCalledWith('generatedRepoUrl', mockRepoUrl); + expect(logger.info).toHaveBeenCalledWith( + `Generated repository URL: ${mockRepoUrl}`, + ); + }); + + it('should generate repository URL even when creating new repository', async () => { + const mockRepoUrl = 'github.com?repo=new-repo&owner=test-owner'; + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + mockRepoUrl, + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'new-repo', + createNewRepository: true, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(mockUseCaseMakerInstance.generateRepositoryUrl).toHaveBeenCalled(); + expect(ctx.output).toHaveBeenCalledWith('generatedRepoUrl', mockRepoUrl); + }); + }); + + describe('PR/MR generation functionality', () => { + it('should generate PR title, description, and branch name when repository exists', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + mockRandomBytes.mockReturnValue(Buffer.from('1234', 'hex')); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'MyTestEE', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedTitle', + '[AAP] Adds/updates files for Execution Environment MyTestEE', + ); + expect(ctx.output).toHaveBeenCalledWith( + 'generatedDescription', + 'This Pull Request adds Execution Environment files generated from Ansible Portal.', + ); + expect(ctx.output).toHaveBeenCalledWith( + 'generatedBranchName', + 'mytestee-1234', + ); + }); + + it('should generate MR title and description for Gitlab', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'gitlab.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Gitlab', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'TestEE', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedDescription', + 'This Merge Request adds Execution Environment files generated from Ansible Portal.', + ); + }); + + it('should not generate PR/MR fields when creating new repository', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=new-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'new-repo', + createNewRepository: true, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).not.toHaveBeenCalledWith( + 'generatedTitle', + expect.any(String), + ); + expect(ctx.output).not.toHaveBeenCalledWith( + 'generatedDescription', + expect.any(String), + ); + expect(ctx.output).not.toHaveBeenCalledWith( + 'generatedBranchName', + expect.any(String), + ); + }); + + it('should lowercase EE file name in branch name', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + mockRandomBytes.mockReturnValue(Buffer.from('abcd', 'hex')); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'MyCustomEE', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedBranchName', + 'mycustomee-abcd', + ); + }); + }); + + describe('catalog info URL generation functionality', () => { + it('should generate catalog info URL for Github', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'my-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedCatalogInfoUrl', + 'https://github.com/test-owner/test-repo/blob/main/my-ee/catalog-info.yaml', + ); + expect(logger.info).toHaveBeenCalledWith( + 'Generated repository contents URL: https://github.com/test-owner/test-repo/blob/main/my-ee/catalog-info.yaml', + ); + }); + + it('should generate catalog info URL for Gitlab', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'gitlab.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Gitlab', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'my-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedCatalogInfoUrl', + 'https://gitlab.com/test-owner/test-repo/-/blob/main/my-ee/catalog-info.yaml', + ); + }); + + it('should handle repository URL with query parameters', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner&other=param', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + // Should split on '?' and use only the host part + expect(ctx.output).toHaveBeenCalledWith( + 'generatedCatalogInfoUrl', + 'https://github.com/test-owner/test-repo/blob/main/test-ee/catalog-info.yaml', + ); + }); + }); + + describe('full repo URL generation functionality', () => { + it('should generate full repo URL for Github when creating new repository', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=new-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'new-repo', + createNewRepository: true, + eeFileName: 'test-ee', + contextDirName: 'my-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedFullRepoUrl', + 'https://github.com/test-owner/new-repo/blob/main/my-ee/', + ); + }); + + it('should generate full repo URL for Gitlab when creating new repository', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'gitlab.com?repo=new-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Gitlab', + repositoryOwner: 'test-owner', + repositoryName: 'new-repo', + createNewRepository: true, + eeFileName: 'test-ee', + contextDirName: 'my-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).toHaveBeenCalledWith( + 'generatedFullRepoUrl', + 'https://gitlab.com/test-owner/new-repo/-/blob/main/my-ee/', + ); + }); + + it('should not generate full repo URL when repository exists', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(ctx.output).not.toHaveBeenCalledWith( + 'generatedFullRepoUrl', + expect.any(String), + ); + }); + }); + + describe('UseCaseMaker initialization', () => { + it('should initialize UseCaseMaker with correct parameters', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'github.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(MockUseCaseMaker).toHaveBeenCalledWith({ + ansibleConfig: mockAnsibleConfig, + logger, + scmType: 'Github', + apiClient: null, + useCases: [], + organization: null, + token: null, + }); + }); + + it('should initialize UseCaseMaker with Gitlab scmType', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockResolvedValue( + 'gitlab.com?repo=test-repo&owner=test-owner', + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Gitlab', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await action.handler(ctx); + + expect(MockUseCaseMaker).toHaveBeenCalledWith({ + ansibleConfig: mockAnsibleConfig, + logger, + scmType: 'Gitlab', + apiClient: null, + useCases: [], + organization: null, + token: null, + }); + }); + }); + + describe('error handling', () => { + it('should handle checkIfRepositoryExists errors', async () => { + const errorMessage = 'Repository check failed'; + mockUseCaseMakerInstance.checkIfRepositoryExists.mockRejectedValue( + new Error(errorMessage), + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow(errorMessage); + }); + + it('should handle generateRepositoryUrl errors', async () => { + const errorMessage = 'URL generation failed'; + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + mockUseCaseMakerInstance.generateRepositoryUrl.mockRejectedValue( + new Error(errorMessage), + ); + + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + const ctx = { + input: { + sourceControlProvider: 'Github', + repositoryOwner: 'test-owner', + repositoryName: 'test-repo', + createNewRepository: false, + eeFileName: 'test-ee', + contextDirName: 'test-ee', + }, + logger, + workspacePath: mockWorkspacePath, + output: jest.fn(), + } as any; + + await expect(action.handler(ctx)).rejects.toThrow(errorMessage); + }); + }); + + describe('action schema and metadata', () => { + it('should have correct action id', () => { + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + expect(action.id).toBe('ansible:prepare:publish'); + }); + + it('should have correct action description', () => { + const action = prepareForPublishAction({ + ansibleConfig: mockAnsibleConfig, + }); + expect(action.description).toBe('Check if a repository exists'); + }); + }); +}); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.ts new file mode 100644 index 00000000..402d7157 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/prepareForPublish.ts @@ -0,0 +1,195 @@ +import { createTemplateAction } from '@backstage/plugin-scaffolder-node'; +import { AnsibleConfig } from '@ansible/backstage-rhaap-common'; +import { UseCaseMaker } from './helpers'; +import { randomBytes } from 'crypto'; + +interface CheckRepositoryExistsInput { + sourceControlProvider: string; + repositoryOwner: string; + repositoryName: string; + createNewRepository: boolean; + eeFileName: string; + contextDirName: string; +} + +export function prepareForPublishAction(options: { + ansibleConfig: AnsibleConfig; +}) { + const { ansibleConfig } = options; + return createTemplateAction({ + id: 'ansible:prepare:publish', + description: 'Check if a repository exists', + schema: { + input: { + type: 'object', + required: [ + 'sourceControlProvider', + 'repositoryOwner', + 'repositoryName', + 'eeFileName', + 'contextDirName', + ], + properties: { + sourceControlProvider: { type: 'string' }, + repositoryOwner: { type: 'string' }, + repositoryName: { type: 'string' }, + eeFileName: { type: 'string' }, + createNewRepository: { type: 'boolean' }, + contextDirName: { type: 'string' }, + }, + }, + output: { + type: 'object', + properties: { + createNewRepo: { + title: + 'Specifies if the specified repository needs to be created or not', + type: 'boolean', + }, + generatedRepoUrl: { + title: + 'The URL of the repository generated from SCM integration settings', + type: 'string', + }, + normalizedRepoUrl: { + title: + 'The normalized URL of the repository (used for catalog component registration)', + type: 'string', + }, + generatedTitle: { + title: 'The title of the PR/MR', + type: 'string', + }, + generatedDescription: { + title: 'The description of the PR/MR', + type: 'string', + }, + generatedBranchName: { + title: 'The name of the branch to be created', + type: 'string', + }, + generatedCatalogInfoUrl: { + title: 'The (generated) URL of the catalog-info.yaml file', + type: 'string', + }, + generatedFullRepoUrl: { + title: 'The (generated) URL of the repository contents', + type: 'string', + }, + }, + }, + }, + async handler(ctx) { + const { input, logger } = ctx; + const values = input as unknown as CheckRepositoryExistsInput; + const sourceControlProvider = values.sourceControlProvider; + const repositoryOwner = values.repositoryOwner; + const repositoryName = values.repositoryName; + const createNewRepository = values.createNewRepository; + const eeFileName = values.eeFileName; + const contextDirName = values.contextDirName; + let createNewRepo = false; + + try { + const useCaseMaker = new UseCaseMaker({ + ansibleConfig: ansibleConfig, + logger, + scmType: sourceControlProvider, + apiClient: null, + useCases: [], + organization: null, + token: null, + }); + + const exists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: repositoryOwner, + repoName: repositoryName, + }); + + logger.info( + `${sourceControlProvider} Repository ${repositoryOwner}/${repositoryName} exists: ${exists}`, + ); + + if (exists) { + createNewRepo = false; + } else if (!exists && createNewRepository) { + logger.info( + `A new ${sourceControlProvider} repository ${repositoryOwner}/${repositoryName} will be created.`, + ); + createNewRepo = true; + } else { + throw new Error( + `${sourceControlProvider} Repository ${repositoryOwner}/${repositoryName} does not exist and creating a new repository was not enabled.`, + ); + } + + ctx.output('createNewRepo', createNewRepo); + + // Generate the repository URL from SCM integration settings for further publish steps + // Required in both cases - repo exists or not + const generatedRepoUrl = await useCaseMaker.generateRepositoryUrl({ + repoOwner: repositoryOwner, + repoName: repositoryName, + }); + logger.info(`Generated repository URL: ${generatedRepoUrl}`); + ctx.output('generatedRepoUrl', generatedRepoUrl); + + // create a normalized repository URL (required for catalog component registration) + let normalizedRepoUrl; + try { + const [hostPart, queryPart] = generatedRepoUrl.split('?'); + const params = new URLSearchParams(queryPart); + const repo = params.get('repo'); + const repoOwner = params.get('owner'); + + if (repo && repoOwner) { + normalizedRepoUrl = `${hostPart}/${repoOwner}/${repo}`; + } + } catch (e) { + normalizedRepoUrl = ''; + } + logger.info(`Normalized repository URL: ${normalizedRepoUrl}`); + ctx.output('normalizedRepoUrl', normalizedRepoUrl); + + // TO-DO: make the default branch name configurable + let branchName = 'main'; + + // If a new repository does not have to be created + // and we have reached this far it means that a PR/MR needs to be created + if (!createNewRepo) { + const title = `[AAP] Adds/updates files for Execution Environment ${eeFileName}`; + const description = `This ${ + sourceControlProvider === 'Gitlab' + ? 'Merge Request' + : 'Pull Request' + } adds Execution Environment files generated from Ansible Portal.`; + branchName = `${eeFileName.toLowerCase()}-${randomBytes(2).toString('hex')}`; + + ctx.output('generatedTitle', title); + ctx.output('generatedDescription', description); + ctx.output('generatedBranchName', branchName); + } + + // Required for catalog component registration + const [hostPart, _] = generatedRepoUrl.split('?'); + let catalogInfoUrl = ''; + let fullRepoUrl = ''; + // The URL structure is different for Github and Gitlab + if (sourceControlProvider === 'Github') { + catalogInfoUrl = `https://${hostPart}/${repositoryOwner}/${repositoryName}/blob/main/${contextDirName}/catalog-info.yaml`; + fullRepoUrl = `https://${hostPart}/${repositoryOwner}/${repositoryName}/blob/main/${contextDirName}/`; + } else if (sourceControlProvider === 'Gitlab') { + catalogInfoUrl = `https://${hostPart}/${repositoryOwner}/${repositoryName}/-/blob/main/${contextDirName}/catalog-info.yaml`; + fullRepoUrl = `https://${hostPart}/${repositoryOwner}/${repositoryName}/-/blob/main/${contextDirName}/`; + } + logger.info(`Generated repository contents URL: ${catalogInfoUrl}`); + ctx.output('generatedCatalogInfoUrl', catalogInfoUrl); + if (createNewRepo) { + ctx.output('generatedFullRepoUrl', fullRepoUrl); + } + } catch (error: any) { + throw new Error(`${error.message}`); + } + }, + }); +} diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.test.ts new file mode 100644 index 00000000..30afe065 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.test.ts @@ -0,0 +1,177 @@ +/* + * Copyright 2024 The Ansible plugin Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { parseUploadedFileContent } from './utils'; + +describe('parseUploadedFileContent', () => { + it('should parse valid base64 data URL with text/plain content type', () => { + const content = 'Hello, World!'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should parse valid base64 data URL with application/json content type', () => { + const content = '{"key": "value"}'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:application/json;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should parse valid base64 data URL with yaml content type', () => { + const content = 'name: test\nversion: 1.0.0'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/yaml;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should trim whitespace from decoded content', () => { + const content = ' Hello, World! \n'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content.trim()); + }); + + it('should parse empty content', () => { + const content = ''; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(''); + }); + + it('should parse multiline content', () => { + const content = 'Line 1\nLine 2\nLine 3'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should parse content with special characters', () => { + const content = 'Hello! @#$%^&*()_+-=[]{}|;:,.<>?'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should return empty string for input without base64 marker', () => { + const dataUrl = 'not a base64 data URL'; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(''); + }); + + it('should return empty string for empty input string', () => { + const result = parseUploadedFileContent(''); + + expect(result).toBe(''); + }); + + it('should throw error for invalid data URL format missing semicolon', () => { + const invalidDataUrl = 'data:text/plainbase64,SGVsbG8='; + + expect(() => { + parseUploadedFileContent(invalidDataUrl); + }).toThrow('Invalid data URL format for the file uploaded'); + }); + + it('should throw error for invalid data URL format missing data prefix', () => { + const invalidDataUrl = 'text/plain;base64,SGVsbG8='; + + expect(() => { + parseUploadedFileContent(invalidDataUrl); + }).toThrow('Invalid data URL format for the file uploaded'); + }); + + it('should return empty string for data URL with empty base64 data', () => { + const invalidDataUrl = 'data:text/plain;base64,'; + + const result = parseUploadedFileContent(invalidDataUrl); + + expect(result).toBe(''); + }); + + it('should decode invalid base64 data without throwing (Buffer.from behavior)', () => { + // Invalid base64 characters - Buffer.from doesn't throw, it decodes what it can + const invalidDataUrl = 'data:text/plain;base64,!!!invalid!!!'; + + const result = parseUploadedFileContent(invalidDataUrl); + + // Buffer.from will decode invalid base64 without throwing + // The result will be decoded bytes (may be garbage) + expect(typeof result).toBe('string'); + }); + + it('should handle data URL with charset parameter', () => { + const content = 'Hello, World!'; + const base64Content = Buffer.from(content).toString('base64'); + const dataUrl = `data:text/plain;charset=utf-8;base64,${base64Content}`; + + const result = parseUploadedFileContent(dataUrl); + + expect(result).toBe(content); + }); + + it('should handle string input that is not a string type (TypeScript type check)', () => { + // This test ensures the function handles the type check correctly + // In JavaScript/TypeScript runtime, if a non-string is passed, it should still work + // but the function checks typeof dataUrl === 'string' + const result = parseUploadedFileContent( + 'data:text/plain;base64,SGVsbG8=' as string, + ); + + expect(result).toBe('Hello'); + }); + + it('should throw error when Buffer.from fails to parse base64 data', () => { + const dataUrl = 'data:text/plain;base64,SGVsbG8='; + const originalBufferFrom = Buffer.from; + const mockError = new Error('Invalid base64 encoding'); + + // Mock Buffer.from to throw an error + Buffer.from = jest.fn(() => { + throw mockError; + }); + + expect(() => { + parseUploadedFileContent(dataUrl); + }).toThrow('Failed to parse data URL: Invalid base64 encoding'); + + // Restore original Buffer.from + Buffer.from = originalBufferFrom; + }); +}); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.ts new file mode 100644 index 00000000..7ebc83e4 --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/actions/utils/utils.ts @@ -0,0 +1,24 @@ +/* +Utility functions for the scaffolder backend module. +*/ + +export function parseUploadedFileContent(dataUrl: string): string { + // Start parsing of uploaded file content + let decodedContent = ''; + + if (typeof dataUrl === 'string' && dataUrl.includes('base64,')) { + const matches = dataUrl.match(/^data:(.*?);base64,(.*)$/); + if (!matches) { + throw new Error('Invalid data URL format for the file uploaded'); + } + const base64Data = matches[2]; + try { + decodedContent = Buffer.from(base64Data, 'base64') + .toString('utf-8') + .trim(); + } catch (error: any) { + throw new Error(`Failed to parse data URL: ${error.message}`); + } + } + return decodedContent; +} diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/module.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/module.test.ts index 1f1fc21a..ab94b29f 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/src/module.test.ts +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/module.test.ts @@ -13,6 +13,10 @@ jest.mock('./actions', () => ({ launchJobTemplate: jest.fn(() => 'action5'), cleanUp: jest.fn(() => 'action6'), createShowCases: jest.fn(() => 'action7'), + createEEDefinitionAction: jest.fn(() => 'action8'), + createEETemplateAction: jest.fn(() => 'action9'), + prepareForPublishAction: jest.fn(() => 'action10'), + createEECatalogInfoAction: jest.fn(() => 'action11'), })); jest.mock('./filters', () => ({ @@ -25,6 +29,10 @@ jest.mock('./autocomplete', () => ({ handleAutocompleteRequest: jest.fn(() => Promise.resolve({ results: [] })), })); +jest.mock('./router', () => ({ + createRouter: jest.fn(() => Promise.resolve(jest.fn())), +})); + jest.mock('@ansible/backstage-rhaap-common', () => ({ getAnsibleConfig: jest.fn(() => ({ ansible: 'config' })), ansibleServiceRef: Symbol('ansibleServiceRef'), @@ -51,9 +59,24 @@ describe('scaffolderModuleAnsible', () => { scaffolder: { addActions: jest.fn() }, scaffolderTemplating: { addTemplateFilters: jest.fn() }, autocomplete: { addAutocompleteProvider: jest.fn() }, - config: { some: 'config' }, + config: { + getString: jest.fn((key: string) => { + if (key === 'app.baseUrl') { + return 'http://localhost:3000'; + } + return ''; + }), + }, logger: { info: jest.fn(), debug: jest.fn(), error: jest.fn() }, ansibleService: { name: 'ansibleService' }, + httpRouter: { use: jest.fn() }, + auth: { + getOwnServiceCredentials: jest.fn(), + getPluginRequestToken: jest.fn(), + }, + discovery: { + getBaseUrl: jest.fn(), + }, }; // --- Get registrations from module --- @@ -122,5 +145,8 @@ describe('scaffolderModuleAnsible', () => { fakeEnv.ansibleService, (getAnsibleConfig as jest.Mock).mock.results[0].value, ); + + // --- Verify httpRouter.use call --- + expect(fakeEnv.httpRouter.use).toHaveBeenCalledTimes(1); }); }); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/module.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/module.ts index 1836ba5d..eb05a11a 100644 --- a/plugins/scaffolder-backend-module-backstage-rhaap/src/module.ts +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/module.ts @@ -35,6 +35,8 @@ import { createProjectAction, createShowCases, launchJobTemplate, + createEEDefinitionAction, + prepareForPublishAction, } from './actions'; import { @@ -43,6 +45,9 @@ import { useCaseNameFilter, } from './filters'; import { handleAutocompleteRequest } from './autocomplete'; + +import { createRouter } from './router'; + /** * @public * The Ansible Module for the Scaffolder Backend @@ -59,6 +64,9 @@ export const scaffolderModuleAnsible = createBackendModule({ scaffolderTemplating: scaffolderTemplatingExtensionPoint, autocomplete: scaffolderAutocompleteExtensionPoint, ansibleService: ansibleServiceRef, + auth: coreServices.auth, + discovery: coreServices.discovery, + httpRouter: coreServices.httpRouter, }, async init({ scaffolder, @@ -67,8 +75,12 @@ export const scaffolderModuleAnsible = createBackendModule({ scaffolderTemplating, autocomplete, ansibleService, + auth, + discovery, + httpRouter, }) { const ansibleConfig = getAnsibleConfig(config); + const frontendUrl = config.getString('app.baseUrl'); scaffolder.addActions( createAnsibleContentAction(config, ansibleConfig), createProjectAction(ansibleService), @@ -77,6 +89,14 @@ export const scaffolderModuleAnsible = createBackendModule({ launchJobTemplate(ansibleService), cleanUp(ansibleService), createShowCases(ansibleService, ansibleConfig), + createEEDefinitionAction({ + frontendUrl, + auth, + discovery, + }), + prepareForPublishAction({ + ansibleConfig: ansibleConfig, + }), ); scaffolderTemplating.addTemplateFilters({ useCaseNameFilter: useCaseNameFilter, @@ -103,6 +123,12 @@ export const scaffolderModuleAnsible = createBackendModule({ ansibleService, }), }); + httpRouter.use( + (await createRouter({ + logger, + ansibleConfig, + })) as any, + ); }, }); }, diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/router.test.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/router.test.ts new file mode 100644 index 00000000..b24fce1f --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/router.test.ts @@ -0,0 +1,515 @@ +/* + * Copyright 2025 The Ansible plugin Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Mock external dependencies first (before imports for proper hoisting) +jest.mock('./actions/helpers/useCaseMaker', () => ({ + UseCaseMaker: jest.fn(), +})); + +import express from 'express'; +import request from 'supertest'; +import { createRouter } from './router'; +import { UseCaseMaker } from './actions/helpers/useCaseMaker'; +import { LoggerService } from '@backstage/backend-plugin-api'; +import { AnsibleConfig } from '@ansible/backstage-rhaap-common'; + +const MockUseCaseMaker = UseCaseMaker as jest.MockedClass; + +describe('createRouter', () => { + let app: express.Express; + let mockLogger: jest.Mocked; + let mockAnsibleConfig: AnsibleConfig; + let mockUseCaseMakerInstance: { + checkIfRepositoryExists: jest.Mock; + fetchGithubFileContent: jest.Mock; + fetchGitlabFileContent: jest.Mock; + }; + + beforeEach(async () => { + mockLogger = { + info: jest.fn(), + error: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + child: jest.fn().mockReturnThis(), + } as unknown as jest.Mocked; + + mockAnsibleConfig = { + githubIntegration: { + host: 'github.com', + }, + gitlabIntegration: { + host: 'gitlab.com', + }, + } as AnsibleConfig; + + mockUseCaseMakerInstance = { + checkIfRepositoryExists: jest.fn(), + fetchGithubFileContent: jest.fn(), + fetchGitlabFileContent: jest.fn(), + }; + + MockUseCaseMaker.mockImplementation(() => { + return mockUseCaseMakerInstance as any; + }); + + const router = await createRouter({ + logger: mockLogger, + ansibleConfig: mockAnsibleConfig, + }); + + app = express().use(router); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + describe('GET /aap/get_ee_readme', () => { + it('should return 400 when scm parameter is missing', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: scm\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when owner parameter is missing', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: owner\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when repository parameter is missing', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: repository\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when multiple required parameters are missing', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: owner, repository, subdir\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 404 when repository does not exist', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(404); + expect(response.text).toBe( + 'Unable to fetch EE README because the repository does not exist\n', + ); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when SCM type is unsupported', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'bitbucket', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: + "Unsupported SCM type 'bitbucket'. Supported values are: Github, Gitlab", + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should successfully fetch README for GitHub repository', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + const mockReadmeContent = '# Test README\n\nThis is a test README.'; + mockUseCaseMakerInstance.fetchGithubFileContent.mockResolvedValue( + mockReadmeContent, + ); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(200); + expect(response.text).toBe(mockReadmeContent); + expect(response.headers['content-type']).toContain('text/markdown'); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).toHaveBeenCalledWith({ + owner: 'test-owner', + repo: 'test-repo', + filePath: 'ee/README.md', + branch: 'main', + }); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should successfully fetch README for GitHub repository without host parameter', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + const mockReadmeContent = '# Test README\n\nThis is a test README.'; + mockUseCaseMakerInstance.fetchGithubFileContent.mockResolvedValue( + mockReadmeContent, + ); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + // host is intentionally omitted for GitHub + }); + + expect(response.status).toBe(200); + expect(response.text).toBe(mockReadmeContent); + expect(response.headers['content-type']).toContain('text/markdown'); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).toHaveBeenCalledWith({ + owner: 'test-owner', + repo: 'test-repo', + filePath: 'ee/README.md', + branch: 'main', + }); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should successfully fetch README for GitLab repository', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + const mockReadmeContent = '# GitLab README\n\nThis is a GitLab README.'; + mockUseCaseMakerInstance.fetchGitlabFileContent.mockResolvedValue( + mockReadmeContent, + ); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Gitlab', + host: 'gitlab.example.com', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(200); + expect(response.text).toBe(mockReadmeContent); + expect(response.headers['content-type']).toContain('text/markdown'); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).toHaveBeenCalledWith({ + owner: 'test-owner', + repo: 'test-repo', + filePath: 'ee/README.md', + branch: 'main', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when SCM type is case-sensitive (GITHUB)', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'GITHUB', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: + "Unsupported SCM type 'GITHUB'. Supported values are: Github, Gitlab", + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when SCM type is case-sensitive (GITLAB)', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'GITLAB', + host: 'gitlab.example.com', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: + "Unsupported SCM type 'GITLAB'. Supported values are: Github, Gitlab", + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should handle errors when checkIfRepositoryExists throws', async () => { + const mockError = new Error('Repository check failed'); + mockUseCaseMakerInstance.checkIfRepositoryExists.mockRejectedValue( + mockError, + ); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(500); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should handle errors when fetchGithubFileContent throws', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(true); + const mockError = new Error('Failed to fetch README'); + mockUseCaseMakerInstance.fetchGithubFileContent.mockRejectedValue( + mockError, + ); + + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(response.status).toBe(500); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).toHaveBeenCalledWith({ + repoOwner: 'test-owner', + repoName: 'test-repo', + }); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).toHaveBeenCalledWith({ + owner: 'test-owner', + repo: 'test-repo', + filePath: 'ee/README.md', + branch: 'main', + }); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when subdir parameter is empty', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: '', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: subdir\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should return 400 when subdir parameter is missing', async () => { + const response = await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ + error: 'Missing required query parameters: subdir\n', + }); + expect( + mockUseCaseMakerInstance.checkIfRepositoryExists, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGithubFileContent, + ).not.toHaveBeenCalled(); + expect( + mockUseCaseMakerInstance.fetchGitlabFileContent, + ).not.toHaveBeenCalled(); + }); + + it('should create UseCaseMaker with correct parameters', async () => { + mockUseCaseMakerInstance.checkIfRepositoryExists.mockResolvedValue(false); + + await request(app).get('/aap/get_ee_readme').query({ + scm: 'Github', + owner: 'test-owner', + repository: 'test-repo', + subdir: 'ee', + }); + + expect(MockUseCaseMaker).toHaveBeenCalledWith({ + ansibleConfig: mockAnsibleConfig, + logger: mockLogger, + scmType: 'Github', + apiClient: null, + useCases: [], + organization: null, + token: null, + }); + }); + }); + + describe('Router setup', () => { + it('should handle undefined routes', async () => { + const response = await request(app).get('/nonexistent'); + + expect(response.status).toBe(404); + }); + }); +}); diff --git a/plugins/scaffolder-backend-module-backstage-rhaap/src/router.ts b/plugins/scaffolder-backend-module-backstage-rhaap/src/router.ts new file mode 100644 index 00000000..c0f941ff --- /dev/null +++ b/plugins/scaffolder-backend-module-backstage-rhaap/src/router.ts @@ -0,0 +1,99 @@ +/* + * Copyright 2025 The Ansible plugin Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import express from 'express'; +import Router from 'express-promise-router'; + +import { LoggerService } from '@backstage/backend-plugin-api'; +import { UseCaseMaker } from './actions/helpers/useCaseMaker'; +import { AnsibleConfig } from '@ansible/backstage-rhaap-common'; + +export async function createRouter(options: { + logger: LoggerService; + ansibleConfig: AnsibleConfig; +}): Promise { + const { logger, ansibleConfig } = options; + const router = Router(); + + router.get('/aap/get_ee_readme', async (req, res) => { + // these query parameters are required + // host is optional for now with Github + const required = ['scm', 'owner', 'repository', 'subdir']; + + const missing = required.filter(p => !req.query[p]); + if (missing.length > 0) { + return res.status(400).json({ + error: `Missing required query parameters: ${missing.join(', ')}\n`, + }); + } + + const scm = req.query.scm!.toString(); + const owner = req.query.owner!.toString(); + const repository = req.query.repository!.toString(); + const subdir = req.query.subdir!.toString(); + + // Only allow supported SCM types + const allowedScm = ['Github', 'Gitlab']; + if (!allowedScm.includes(scm)) { + return res.status(400).json({ + error: `Unsupported SCM type '${scm}'. Supported values are: ${allowedScm.join(', ')}`, + }); + } + const useCaseMaker = new UseCaseMaker({ + ansibleConfig: ansibleConfig, + logger, + scmType: scm as string, + apiClient: null, + useCases: [], + organization: null, + token: null, + }); + + const repoExists = await useCaseMaker.checkIfRepositoryExists({ + repoOwner: owner as string, + repoName: repository as string, + }); + + if (!repoExists) { + return res + .status(404) + .send( + 'Unable to fetch EE README because the repository does not exist\n', + ); + } + + // Determine the correct README URL + let readmeContent: string = ''; + if (scm === 'Github') { + readmeContent = await useCaseMaker.fetchGithubFileContent({ + owner: owner, + repo: repository, + filePath: `${subdir}/README.md`, + branch: 'main', + }); + } else if (scm === 'Gitlab') { + readmeContent = await useCaseMaker.fetchGitlabFileContent({ + owner: owner, + repo: repository, + filePath: `${subdir}/README.md`, + branch: 'main', + }); + } + res.type('text/markdown'); + return res.send(readmeContent); + }); + + return router; +} diff --git a/yarn.lock b/yarn.lock index d6f7a50d..a3060737 100644 --- a/yarn.lock +++ b/yarn.lock @@ -201,6 +201,7 @@ __metadata: "@backstage/backend-dynamic-feature-service": "npm:^0.7.0" "@backstage/backend-plugin-api": "npm:^1.3.1" "@backstage/backend-test-utils": "npm:^1.5.0" + "@backstage/catalog-model": "npm:^1.7.5" "@backstage/cli": "npm:^0.33.1" "@backstage/config": "npm:^1.3.2" "@backstage/errors": "npm:^1.2.7" @@ -214,14 +215,24 @@ __metadata: "@backstage/types": "npm:^1.2.1" "@janus-idp/cli": "npm:^3.6.1" "@octokit/core": "npm:^5.0.0" + "@types/express": "npm:^5.0.3" "@types/jest": "npm:^29.5.12" + "@types/js-yaml": "npm:^4" "@types/node": "npm:^22.13.4" "@types/node-fetch": "npm:2.6.11" + "@types/semver": "npm:^7" + dedent: "npm:^1.7.0" + express: "npm:^5.1.0" + express-promise-router: "npm:^4.1.1" isomorphic-git: "npm:^1.23.0" + js-yaml: "npm:^4.1.0" msw: "npm:2.4.9" node-fetch: "npm:^2.6.7" + semver: "npm:^7.7.3" + supertest: "npm:^7.1.4" undici: "npm:6.21.2" yaml: "npm:^2.0.0" + zod: "npm:^4.1.12" peerDependencies: react: ^17.0.0 || ^18.0.0 languageName: unknown @@ -15685,7 +15696,7 @@ __metadata: languageName: node linkType: hard -"@types/js-yaml@npm:^4.0.1": +"@types/js-yaml@npm:^4, @types/js-yaml@npm:^4.0.1": version: 4.0.9 resolution: "@types/js-yaml@npm:4.0.9" checksum: 10c0/24de857aa8d61526bbfbbaa383aa538283ad17363fcd5bb5148e2c7f604547db36646440e739d78241ed008702a8920665d1add5618687b6743858fae00da211 @@ -16092,6 +16103,13 @@ __metadata: languageName: node linkType: hard +"@types/semver@npm:^7": + version: 7.7.1 + resolution: "@types/semver@npm:7.7.1" + checksum: 10c0/c938aef3bf79a73f0f3f6037c16e2e759ff40c54122ddf0b2583703393d8d3127130823facb880e694caa324eb6845628186aac1997ee8b31dc2d18fafe26268 + languageName: node + linkType: hard + "@types/send@npm:*": version: 0.17.5 resolution: "@types/send@npm:0.17.5" @@ -20749,6 +20767,18 @@ __metadata: languageName: node linkType: hard +"dedent@npm:^1.7.0": + version: 1.7.0 + resolution: "dedent@npm:1.7.0" + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + checksum: 10c0/c5e8a8beb5072bd5e520cb64b27a82d7ec3c2a63ee5ce47dbc2a05d5b7700cefd77a992a752cd0a8b1d979c1db06b14fb9486e805f3ad6088eda6e07cd9bf2d5 + languageName: node + linkType: hard + "deep-equal@npm:^2.0.5": version: 2.2.3 resolution: "deep-equal@npm:2.2.3" @@ -34243,6 +34273,15 @@ __metadata: languageName: node linkType: hard +"semver@npm:^7.7.3": + version: 7.7.3 + resolution: "semver@npm:7.7.3" + bin: + semver: bin/semver.js + checksum: 10c0/4afe5c986567db82f44c8c6faef8fe9df2a9b1d98098fc1721f57c696c4c21cebd572f297fc21002f81889492345b8470473bc6f4aff5fb032a6ea59ea2bc45e + languageName: node + linkType: hard + "send@npm:0.19.0": version: 0.19.0 resolution: "send@npm:0.19.0" @@ -35589,7 +35628,7 @@ __metadata: languageName: node linkType: hard -"supertest@npm:^7.1.1": +"supertest@npm:^7.1.1, supertest@npm:^7.1.4": version: 7.1.4 resolution: "supertest@npm:7.1.4" dependencies: @@ -38618,6 +38657,13 @@ __metadata: languageName: node linkType: hard +"zod@npm:^4.1.12": + version: 4.1.12 + resolution: "zod@npm:4.1.12" + checksum: 10c0/b64c1feb19e99d77075261eaf613e0b2be4dfcd3551eff65ad8b4f2a079b61e379854d066f7d447491fcf193f45babd8095551a9d47973d30b46b6d8e2c46774 + languageName: node + linkType: hard + "zstd-codec@npm:^0.1.5": version: 0.1.5 resolution: "zstd-codec@npm:0.1.5"