diff --git a/src/errors.ts b/src/errors.ts index 924c4ec..775a25d 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -8,4 +8,5 @@ export const ErrorMessage = { referenceNotAllowed: (ref: string) => `${JSON_SCHEMA_PROPERTY_REF} not allowed here: ${ref}`, refNotFound: (ref: string) => `${JSON_SCHEMA_PROPERTY_REF} can't be resolved: ${ref}`, refNotValidFormat: (ref: string) => `${JSON_SCHEMA_PROPERTY_REF} can't be parsed: ${ref}`, + duplicateParameter: (name: string, location: string) => `Duplicate parameter detected: name='${name}', in='${location}'`, } as const diff --git a/src/origins.ts b/src/origins.ts index 64ed023..1954692 100644 --- a/src/origins.ts +++ b/src/origins.ts @@ -100,6 +100,111 @@ export function concatenateArraysInProperty(source: Jso, target: Jso, propertyKe }) } +export function removeDuplicates( + array: T[], + getUniqueKey: (item: T) => string | undefined, + onDuplicate?: (item: T, index: number, firstIndex: number) => void +): T[] { + const seen = new Map() // key -> first index + + for (let i = 0; i < array.length; i++) { + const item = array[i] + const key = getUniqueKey(item) + + if (key === undefined) { + continue // Skip items without valid keys + } + + const firstIndex = seen.get(key) + if (firstIndex !== undefined) { + // Duplicate found + onDuplicate?.(item, i, firstIndex) + delete array[i] // Create sparse array + } else { + seen.set(key, i) + } + } + + return array // Returns sparse array +} + +export function densify(sparseArray: T[], originsFlag: symbol | undefined): T[] { + const dense: T[] = [] + const originsRecord: OriginsMetaRecord = {} + + for (let i = 0; i < sparseArray.length; i++) { + if (i in sparseArray) { // Check if index exists (not deleted) + const newIndex = dense.length + dense[newIndex] = sparseArray[i] + + // Copy origins if they exist + if (originsFlag) { + const sourceOrigins = resolveOrigins(sparseArray, i, originsFlag) + if (sourceOrigins) { + originsRecord[newIndex] = sourceOrigins + } + } + } + } + + // Set origins for the dense array + if (originsFlag && Object.keys(originsRecord).length > 0) { + setJsoProperty(dense as any, originsFlag, originsRecord) + } + + return dense +} + +export function addUniqueElements( + source: Jso, + target: Jso, + propertyKey: PropertyKey, + originsFlag: symbol | undefined, + getUniqueKey: (item: any) => string | undefined +): void { + const sourceArray = getJsoProperty(source, propertyKey) + if (!isArray(sourceArray)) { + return + } + + let targetArray = getJsoProperty(target, propertyKey) + if (targetArray === undefined) { + targetArray = [] + copyOrigins(source, target, propertyKey, propertyKey, originsFlag) + } + + if (!isArray(targetArray)) { + return + } + + setJsoProperty(target, propertyKey, targetArray) + + // Build set of existing keys in target + const existingKeys = new Set() + for (const item of targetArray) { + const key = getUniqueKey(item) + if (key !== undefined) { + existingKeys.add(key) + } + } + + // Add source items that don't exist in target + for (let i = 0; i < sourceArray.length; i++) { + const item = sourceArray[i] + const key = getUniqueKey(item) + + // Skip if already exists in target (operation overrides path) or key is undefined + if (key === undefined || existingKeys.has(key)) { + continue + } + + // Add to target + const targetIndex = targetArray.length + targetArray[targetIndex] = item + copyOrigins(sourceArray, targetArray, i, targetIndex, originsFlag) + } +} + export function resolveOriginsMetaRecord(source: Jso, originsFlag: symbol | undefined): OriginsMetaRecord | undefined { if (!originsFlag) { return undefined @@ -214,4 +319,4 @@ export function copyOriginsForArray(sourceArray: unknown[], targetArray: unknown export const createSelfOriginsCloneHook: (originsFlag: symbol | undefined) => SyncCloneHook = (originsFlag: symbol | undefined) => { return createSelfMetaCloneHook('selfOriginResolver', originsFlag, []) -} \ No newline at end of file +} diff --git a/src/rules/openapi.ts b/src/rules/openapi.ts index 6aa9b7a..9efcd29 100644 --- a/src/rules/openapi.ts +++ b/src/rules/openapi.ts @@ -76,7 +76,7 @@ import { OPEN_API_PROPERTY_TAGS, } from './openapi.const' -import { pathItemsUnification } from '../unifies/openapi' +import { pathItemsUnification, deduplicateParameters } from '../unifies/openapi' import { calculateHeaderName, calculateHeaderPlace, @@ -626,6 +626,7 @@ const openApiParametersRules = (version: OpenApiSpecVersion): NormalizationRules hashOwner: true, }, validate: checkType(TYPE_ARRAY), + unify: deduplicateParameters, }) const openApiRequestRules = (version: OpenApiSpecVersion): NormalizationRules => ({ diff --git a/src/unifies/openapi.ts b/src/unifies/openapi.ts index bbf271c..400feb0 100644 --- a/src/unifies/openapi.ts +++ b/src/unifies/openapi.ts @@ -1,14 +1,53 @@ import { UnifyFunction } from '../types' -import { isObject } from '@netcracker/qubership-apihub-json-crawl' +import { isArray, isObject } from '@netcracker/qubership-apihub-json-crawl' import { OpenAPIV3 } from 'openapi-types' import { OPEN_API_HTTP_METHODS } from '../rules/openapi.const' -import { cleanSeveralOrigins, concatenateArraysInProperty, copyProperty } from '../origins' +import { + addUniqueElements, + cleanSeveralOrigins, + concatenateArraysInProperty, + copyProperty, + densify, + removeDuplicates, +} from '../origins' +import { ErrorMessage } from '../errors' import PathItemObject = OpenAPIV3.PathItemObject +import ParameterObject = OpenAPIV3.ParameterObject const EXTENSION_PREFIX = 'x-' +const getParameterUniqueKey = (param: ParameterObject): string | undefined => { + if (!isObject(param)) { + return undefined + } + const name = param.name + const inValue = param.in + if (name !== undefined && inValue !== undefined) { + return `${name}:${inValue}` + } + return undefined +} + +export const deduplicateParameters: UnifyFunction = (jso, ctx) => { + if (!isArray(jso)) { + return jso + } + + const sparseArray = removeDuplicates( + jso, + getParameterUniqueKey, + (item, index, firstIndex) => { + // Report duplicate error + const message = ErrorMessage.duplicateParameter(item?.name, item?.in) + ctx.options.onUnifyError?.(message, ctx.path, jso, new Error(message)) + } + ) + + return densify(sparseArray, ctx.options.originsFlag) +} + export const pathItemsUnification: UnifyFunction = (value, { options }) => { - if (!isObject(value)) { return value} + if (!isObject(value)) { return value } const pathItem: PathItemObject = value const { parameters: pathParameters, @@ -41,7 +80,7 @@ export const pathItemsUnification: UnifyFunction = (value, { options }) => { } result[method] = operation //origin copy already. // Deep copy not allowed!!! So only mutations :( - concatenateArraysInProperty(pathItem, operation, 'parameters', options.originsFlag) + addUniqueElements(pathItem, operation, 'parameters', options.originsFlag, getParameterUniqueKey) concatenateArraysInProperty(pathItem, operation, 'servers', options.originsFlag) if (operation.summary === undefined && pathSummary !== undefined) { copyProperty(pathItem, operation, 'summary', options.originsFlag) diff --git a/src/utils.ts b/src/utils.ts index 14bfb3a..cd0c1b2 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -383,4 +383,4 @@ export const removeDuplicatesWithMergeOrigins = (array: T[], originFlag: symb setOriginsForArray(uniqueItems, originFlag, itemOrigins) return uniqueItems -} \ No newline at end of file +} diff --git a/test/oas/bugs.test.ts b/test/oas/bugs.test.ts index 7d451d1..7b06d94 100644 --- a/test/oas/bugs.test.ts +++ b/test/oas/bugs.test.ts @@ -2,7 +2,7 @@ import { TEST_DEFAULTS_FLAG, TEST_HASH_FLAG, TEST_SYNTHETIC_ALL_OF_FLAG } from ' import { denormalize, JSON_SCHEMA_PROPERTY_DEPRECATED, normalize, OriginLeafs, resolveOrigins } from '../../src' import outOfMemoryCauseTooManyCombinations from '../resources/out-of-memory-cause-too-many-combinations.json' import bugWithWrongOrigins from '../resources/bug-with-wrong-origins.json' -import bugWithSpearedArray from '../resources/bug-with-speared-array.json' +import bugWithSparseArray from '../resources/bug-with-sparse-array.json' import { TEST_INLINE_REFS_FLAG, TEST_ORIGINS_FLAG, TEST_ORIGINS_FOR_DEFAULTS, TEST_SYNTHETIC_TITLE_FLAG } from '../helpers' import { isObject, syncCrawl } from '@netcracker/qubership-apihub-json-crawl' import 'jest-extended' @@ -239,8 +239,8 @@ describe('Bugs', () => { expect(result.paths['customer'].get.parameters[0].schema).not.toHaveProperty([TEST_INLINE_REFS_FLAG]) }) - it('speared array', () => { - const result = normalize(bugWithSpearedArray, { + it('sparse array', () => { + const result = normalize(bugWithSparseArray, { validate: true, liftCombiners: true, unify: true, @@ -250,6 +250,10 @@ describe('Bugs', () => { hashFlag: TEST_HASH_FLAG, inlineRefsFlag: TEST_INLINE_REFS_FLAG, }) as any - expect(result).toHaveProperty(['paths', '/datasets/events', 'get', 'parameters', 'length'], 10) /*1 and 3 index will be missing*/ + + expect(result).toHaveProperty(['paths', '/datasets/events', 'get', 'parameters', 'length'], 8) + // Verify all parameters are valid objects + const params = result.paths['/datasets/events'].get.parameters + expect(params.every((p: any) => p && typeof p === 'object')).toBe(true) }) }) diff --git a/test/oas/unify.origins.test.ts b/test/oas/unify.origins.test.ts index 5d96f7a..56d894d 100644 --- a/test/oas/unify.origins.test.ts +++ b/test/oas/unify.origins.test.ts @@ -273,4 +273,43 @@ describe('unify origins', function () { }) }) + + it('should preserve origins when unifying path item and deduplicating operation parameters', () => { + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + 'test': { + parameters: [ + { name: 'id', in: 'query', schema: { type: 'string' } }, + { name: 'id', in: 'query', schema: { type: 'string' } }, // duplicate + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + ], + get: { + parameters: [ + { name: 'offset', in: 'query', schema: { type: 'string' } }, + { name: 'offset', in: 'query', schema: { type: 'string' } }, // duplicate + { name: 'maxItems', in: 'query', schema: { type: 'integer' } }, + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, OPTIONS) as any + + // Should have 2 parameters after deduplication + const params = result.paths?.['test']?.get?.parameters + expect(params).toHaveLength(4) + + commonOriginsCheck(result, { source: spec }) + const resultWithHmr = convertOriginToHumanReadable(result, TEST_ORIGINS_FLAG) + expect(resultWithHmr).toHaveProperty(['paths', 'test', 'get', 'parameters', TEST_ORIGINS_FLAG, 0], ['paths/test/get/parameters/0']) + expect(resultWithHmr).toHaveProperty(['paths', 'test', 'get', 'parameters', TEST_ORIGINS_FLAG, 1], ['paths/test/get/parameters/2']) + expect(resultWithHmr).toHaveProperty(['paths', 'test', 'get', 'parameters', TEST_ORIGINS_FLAG, 2], ['paths/test/parameters/0']) + expect(resultWithHmr).toHaveProperty(['paths', 'test', 'get', 'parameters', TEST_ORIGINS_FLAG, 3], ['paths/test/parameters/2']) + }) }) diff --git a/test/oas/unify.parameters.test.ts b/test/oas/unify.parameters.test.ts new file mode 100644 index 0000000..6f83942 --- /dev/null +++ b/test/oas/unify.parameters.test.ts @@ -0,0 +1,438 @@ +import { normalize } from '../../src' +import { TEST_HASH_FLAG, TEST_INLINE_REFS_FLAG, TEST_ORIGINS_FLAG, TEST_SYNTHETIC_TITLE_FLAG } from '../helpers' +import { expect, describe, it } from '@jest/globals' +import 'jest-extended' + +describe('Parameter Deduplication', () => { + const baseOptions = { + validate: true, + liftCombiners: true, + unify: true, + allowNotValidSyntheticChanges: true, + syntheticTitleFlag: TEST_SYNTHETIC_TITLE_FLAG, + originsFlag: TEST_ORIGINS_FLAG, + hashFlag: TEST_HASH_FLAG, + inlineRefsFlag: TEST_INLINE_REFS_FLAG, + } + + describe('Duplicates in Path Item Parameters', () => { + it('should detect and remove duplicate parameters at path level', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users/{id}': { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, // duplicate + ], + get: { + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should report error for duplicate + expect(errors).toHaveLength(1) + expect(errors[0]).toContain("Duplicate parameter detected: name='id', in='path'") + + // Path-level parameters are moved to operations, so check operation parameters + // After pathItemsUnification, path.parameters is removed and moved to each operation + const operationParams = result.paths?.['/users/{id}']?.get?.parameters + expect(operationParams).toHaveLength(1) + expect(operationParams[0]).toMatchObject({ + name: 'id', + in: 'path', + }) + }) + + it('should allow parameters with same name but different location', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users': { + parameters: [ + { name: 'id', in: 'query', schema: { type: 'string' } }, + { name: 'id', in: 'header', schema: { type: 'string' } }, // different location + ], + get: { + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should NOT report error + expect(errors).toHaveLength(0) + + // Should keep both parameters (in operation after pathItemsUnification) + const operationParams = result.paths?.['/users']?.get?.parameters + expect(operationParams).toHaveLength(2) + }) + }) + + describe('Duplicates in Operation Parameters', () => { + it('should detect and remove duplicate parameters at operation level', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users': { + get: { + parameters: [ + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + { name: 'limit', in: 'query', schema: { type: 'integer' } }, // duplicate + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should report error for duplicate + expect(errors).toHaveLength(1) + expect(errors[0]).toContain("Duplicate parameter detected: name='limit', in='query'") + + // Should keep only first occurrence + const operationParams = result.paths?.['/users']?.get?.parameters + expect(operationParams).toHaveLength(1) + expect(operationParams[0]).toMatchObject({ + name: 'limit', + in: 'query', + }) + }) + }) + + describe('Operation Parameters Override Path Parameters', () => { + it('should allow operation parameter to override path parameter without error', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users/{id}': { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'string' }, description: 'Path level' }, + ], + get: { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'integer' }, description: 'Operation level' }, + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should NOT report error (override is valid) + expect(errors).toHaveLength(0) + + // Should keep operation-level definition + const operationParams = result.paths?.['/users/{id}']?.get?.parameters + expect(operationParams).toHaveLength(1) + expect(operationParams[0]).toMatchObject({ + name: 'id', + in: 'path', + description: 'Operation level', + schema: { type: 'integer' }, + }) + }) + + it('should merge non-overridden path parameters with operation parameters', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users/{id}': { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, + { name: 'apiKey', in: 'header', required: true, schema: { type: 'string' } }, + ], + get: { + parameters: [ + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should NOT report any errors + expect(errors).toHaveLength(0) + + // Should have all three parameters + const operationParams = result.paths?.['/users/{id}']?.get?.parameters + expect(operationParams).toHaveLength(3) + + // Operation parameters come first, then non-overridden path parameters are appended + expect(operationParams[0]).toMatchObject({ name: 'limit', in: 'query' }) + expect(operationParams[1]).toMatchObject({ name: 'id', in: 'path' }) + expect(operationParams[2]).toMatchObject({ name: 'apiKey', in: 'header' }) + }) + }) + + describe('Mixed Scenarios', () => { + it('should handle path duplicates + operation override correctly', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users/{id}': { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, // duplicate at path level + { name: 'apiKey', in: 'header', schema: { type: 'string' } }, + ], + get: { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'integer' } }, // overrides path + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + { name: 'limit', in: 'query', schema: { type: 'integer' } }, // duplicate at operation level + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Note: pathItemsUnification happens before deduplication, so only one error is reported + expect(errors).toHaveLength(1) + expect(errors[0]).toContain("Duplicate parameter detected: name='limit', in='query'") + + // Final result should have 3 unique parameters + const operationParams = result.paths?.['/users/{id}']?.get?.parameters + expect(operationParams).toHaveLength(3) + + // apiKey from path (not overridden) + expect(operationParams.find((p: any) => p.name === 'apiKey')).toMatchObject({ + name: 'apiKey', + in: 'header', + }) + + // id from operation (overrides path) + const idParam = operationParams.find((p: any) => p.name === 'id') + expect(idParam).toMatchObject({ + name: 'id', + in: 'path', + schema: { type: 'integer' }, // operation version + }) + + // limit from operation + expect(operationParams.find((p: any) => p.name === 'limit')).toMatchObject({ + name: 'limit', + in: 'query', + }) + }) + + it('should handle multiple operations with different overrides', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users/{id}': { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'string' } }, + ], + get: { + parameters: [ + { name: 'id', in: 'path', required: true, schema: { type: 'integer' } }, // override + ], + responses: { + '200': { description: 'OK' }, + }, + }, + post: { + parameters: [ + { name: 'limit', in: 'query', schema: { type: 'integer' } }, // no override + ], + responses: { + '201': { description: 'Created' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should NOT report any errors + expect(errors).toHaveLength(0) + + // GET should have overridden id + const getParams = result.paths?.['/users/{id}']?.get?.parameters + expect(getParams).toHaveLength(1) + expect(getParams[0]).toMatchObject({ + name: 'id', + in: 'path', + schema: { type: 'integer' }, + }) + + // POST should have both operation limit and path id (operation params first) + const postParams = result.paths?.['/users/{id}']?.post?.parameters + expect(postParams).toHaveLength(2) + expect(postParams[0]).toMatchObject({ name: 'limit', in: 'query' }) + expect(postParams[1]).toMatchObject({ name: 'id', in: 'path', schema: { type: 'string' } }) + }) + }) + + describe('No Duplicates', () => { + it('should not report errors when there are no duplicates', () => { + const errors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/users': { + parameters: [ + { name: 'apiKey', in: 'header', schema: { type: 'string' } }, + ], + get: { + parameters: [ + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + { name: 'offset', in: 'query', schema: { type: 'integer' } }, + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onUnifyError: (message) => { + errors.push(message) + }, + }) as any + + // Should NOT report any errors + expect(errors).toHaveLength(0) + + // Should have all 3 parameters + const operationParams = result.paths?.['/users']?.get?.parameters + expect(operationParams).toHaveLength(3) + }) + }) + + describe('Sparse Array Handling', () => { + it('should handle sparse arrays with null/invalid parameters', () => { + const validateErrors: string[] = [] + const unifyErrors: string[] = [] + const spec = { + openapi: '3.0.0', + info: { title: 'Test API', version: '1.0.0' }, + paths: { + '/test': { + parameters: [ + null, // invalid - creates sparse array + { name: 'id', in: 'path', schema: { type: 'string' } }, + { name: 'limit', in: 'query', schema: { type: 'integer' } }, + ], + get: { + parameters: [ + { name: 'offset', in: 'query', schema: { type: 'integer' } }, + 'invalid', // invalid - creates sparse array + ], + responses: { + '200': { description: 'OK' }, + }, + }, + }, + }, + } + + const result = normalize(spec, { + ...baseOptions, + onValidateError: (message) => { + validateErrors.push(message) + }, + onUnifyError: (message) => { + unifyErrors.push(message) + }, + }) as any + + // Should not report errors for invalid items (they don't have name+in so can't be duplicates) + expect(unifyErrors).toHaveLength(0) + expect(validateErrors).toHaveLength(2) + + // After densification, parameters array should be dense (no holes) + const params = result.paths?.['/test']?.get?.parameters + expect(params).toBeDefined() + expect(Array.isArray(params)).toBe(true) + expect(params).toHaveLength(3) + expect(params[0]).toMatchObject({ name: 'offset', in: 'query', schema: { type: 'integer' } }) + expect(params[1]).toMatchObject({ name: 'id', in: 'path', schema: { type: 'string' } }) + expect(params[2]).toMatchObject({ name: 'limit', in: 'query', schema: { type: 'integer' } }) + }) + }) +}) + diff --git a/test/resources/bug-with-speared-array.json b/test/resources/bug-with-sparse-array.json similarity index 100% rename from test/resources/bug-with-speared-array.json rename to test/resources/bug-with-sparse-array.json