From 07d431f2cefdcb62295525bc47b84afba0f3abd1 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Thu, 10 Apr 2025 15:26:27 -0500 Subject: [PATCH 01/22] WIP: Switch integration tests to build code instead of run dynamically --- .dockerignore | 3 + .gitignore | 3 + .npmignore | 3 + package.json | 6 +- scripts/download-artifacts.js | 169 ++--- scripts/generate-docs-examples.js | 2 +- test/integration/helper.js | 124 ---- test/integration/index.js | 450 +----------- test/integration/reporter.js | 115 ---- test/integration/test-builder.js | 391 +++++++++++ test/integration/test-runner.js | 1072 ----------------------------- 11 files changed, 489 insertions(+), 1849 deletions(-) delete mode 100644 test/integration/helper.js delete mode 100644 test/integration/reporter.js create mode 100644 test/integration/test-builder.js delete mode 100644 test/integration/test-runner.js diff --git a/.dockerignore b/.dockerignore index a448fae9c..c2031b20f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,3 +6,6 @@ elasticsearch lib junit-output .tap +rest-api-spec +yaml-rest-tests +generated-tests diff --git a/.gitignore b/.gitignore index adec49623..2d7e63d04 100644 --- a/.gitignore +++ b/.gitignore @@ -68,3 +68,6 @@ bun.lockb test-results processinfo .tap +rest-api-spec +yaml-rest-tests +generated-tests diff --git a/.npmignore b/.npmignore index 8a921bbd6..3f909d8c7 100644 --- a/.npmignore +++ b/.npmignore @@ -74,3 +74,6 @@ CONTRIBUTING.md src bun.lockb .tap +rest-api-spec +yaml-rest-tests +generated-tests diff --git a/package.json b/package.json index 1fa5906d4..d86ba62ca 100644 --- a/package.json +++ b/package.json @@ -18,7 +18,9 @@ "test:coverage-100": "npm run build && tap --coverage --100", "test:coverage-report": "npm run build && tap --coverage && nyc report --reporter=text-lcov > coverage.lcov", "test:coverage-ui": "npm run build && tap --coverage --coverage-report=html", - "test:integration": "tsc && node test/integration/index.js", + "test:integration-build": "npm run build && node test/integration/index.js", + "test:integration": "npm run test:integration-build && env TEST_ES_STACK=1 tap run --jobs=1 generated-tests/", + "test:integration-serverless": "npm run test:integration-build && env TEST_ES_SERVERLESS=1 tap run --jobs=1 generated-tests/", "lint": "ts-standard src", "lint:fix": "ts-standard --fix src", "license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause;0BSD'", @@ -77,7 +79,7 @@ "node-fetch": "2.7.0", "ora": "5.4.1", "proxy": "1.0.2", - "rimraf": "3.0.2", + "rimraf": "5.0.10", "semver": "7.7.1", "split2": "4.2.0", "stoppable": "1.1.0", diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js index d8d5e189e..5be3617c5 100644 --- a/scripts/download-artifacts.js +++ b/scripts/download-artifacts.js @@ -3,162 +3,103 @@ * SPDX-License-Identifier: Apache-2.0 */ -'use strict' - const { join } = require('path') -const minimist = require('minimist') const stream = require('stream') const { promisify } = require('util') const { createWriteStream, promises } = require('fs') -const rimraf = require('rimraf') +const { rimraf } = require('rimraf') const fetch = require('node-fetch') const crossZip = require('cross-zip') const ora = require('ora') -const { mkdir, writeFile } = promises +const { mkdir, cp } = promises const pipeline = promisify(stream.pipeline) const unzip = promisify(crossZip.unzip) -const rm = promisify(rimraf) - -const esFolder = join(__dirname, '..', 'elasticsearch') -const zipFolder = join(esFolder, 'artifacts.zip') -const specFolder = join(esFolder, 'rest-api-spec', 'api') -const freeTestFolder = join(esFolder, 'rest-api-spec', 'test', 'free') -const xPackTestFolder = join(esFolder, 'rest-api-spec', 'test', 'platinum') -const artifactInfo = join(esFolder, 'info.json') - -async function downloadArtifacts (opts) { - if (typeof opts.version !== 'string') { - throw new Error('Missing version') - } +const testYamlFolder = join(__dirname, '..', 'yaml-rest-tests') +const zipFile = join(__dirname, '..', 'elasticsearch-clients-tests.zip') + +const schemaFolder = join(__dirname, '..', 'schema') +const schemaJson = join(schemaFolder, 'schema.json') + +async function downloadArtifacts (localTests, version = 'main') { const log = ora('Checking out spec and test').start() - log.text = 'Resolving versions' - let resolved - try { - resolved = await resolve(opts.version, opts.hash) - } catch (err) { - log.fail(err.message) - process.exit(1) - } + const { GITHUB_TOKEN } = process.env - opts.id = opts.id || resolved.id - opts.hash = opts.hash || resolved.hash - opts.version = resolved.version + log.text = 'Clean tests folder' + await rimraf(testYamlFolder) + await mkdir(testYamlFolder, { recursive: true }) - const info = loadInfo() + log.text = 'Fetching test YAML files' - if (info && info.version === opts.version) { - if (info.hash === opts.hash && info.id === opts.id) { - log.succeed('The artifact copy present locally is already up to date') - return + if (localTests) { + log.text = `Copying local tests from ${localTests}` + await cp(localTests, testYamlFolder, { recursive: true }) + } else { + if (!GITHUB_TOKEN) { + log.fail("Missing required environment variable 'GITHUB_TOKEN'") + process.exit(1) } - } - log.text = 'Cleanup checkouts/elasticsearch' - await rm(esFolder) - await mkdir(esFolder, { recursive: true }) + const response = await fetch('https://api.github.com/repos/elastic/elasticsearch-clients-tests/zipball/main', { + headers: { + Authorization: `Bearer ${GITHUB_TOKEN}`, + Accept: 'application/vnd.github+json' + } + }) - log.text = 'Downloading artifacts' - const response = await fetch(resolved.url) - if (!response.ok) { - log.fail(`unexpected response ${response.statusText}`) - process.exit(1) - } - await pipeline(response.body, createWriteStream(zipFolder)) + if (!response.ok) { + log.fail(`unexpected response ${response.statusText}`) + process.exit(1) + } - log.text = 'Unzipping' - await unzip(zipFolder, esFolder) + log.text = 'Downloading tests zipball' + await pipeline(response.body, createWriteStream(zipFile)) - log.text = 'Cleanup' - await rm(zipFolder) + log.text = 'Unzipping tests' + await unzip(zipFile, testYamlFolder) - log.text = 'Update info' - await writeFile(artifactInfo, JSON.stringify(opts), 'utf8') + log.text = 'Cleanup' + await rimraf(zipFile) + } - log.succeed('Done') -} + log.text = 'Fetching Elasticsearch specification' + await rimraf(schemaFolder) + await mkdir(schemaFolder, { recursive: true }) -function loadInfo () { - try { - return require(artifactInfo) - } catch (err) { - return null + let specVersion = version + if (version !== 'main') { + specVersion = version.split('.').slice(0, 2).join('.') } -} -async function resolve (version, hash) { - const response = await fetch(`https://artifacts-api.elastic.co/v1/versions/${version}`) + const response = await fetch(`https://raw.githubusercontent.com/elastic/elasticsearch-specification/${specVersion}/output/schema/schema.json`) if (!response.ok) { - throw new Error(`unexpected response ${response.statusText}`) + log.fail(`unexpected response ${response.statusText}`) + process.exit(1) } - const data = await response.json() - const esBuilds = data.version.builds - .filter(build => build.projects.elasticsearch != null) - .map(build => { - return { - projects: build.projects.elasticsearch, - buildId: build.build_id, - date: build.start_time, - version: build.version - } - }) - .sort((a, b) => { - const dA = new Date(a.date) - const dB = new Date(b.date) - if (dA > dB) return -1 - if (dA < dB) return 1 - return 0 - }) - - if (hash != null) { - const build = esBuilds.find(build => build.projects.commit_hash === hash) - if (!build) { - throw new Error(`Can't find any build with hash '${hash}'`) - } - const zipKey = Object.keys(build.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip')) - return { - url: build.projects.packages[zipKey].url, - id: build.buildId, - hash: build.projects.commit_hash, - version: build.version - } - } + log.text = 'Downloading schema.json' + await pipeline(response.body, createWriteStream(schemaJson)) - const lastBuild = esBuilds[0] - const zipKey = Object.keys(lastBuild.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip')) - return { - url: lastBuild.projects.packages[zipKey].url, - id: lastBuild.buildId, - hash: lastBuild.projects.commit_hash, - version: lastBuild.version - } + log.succeed('Done') } -async function main (options) { - delete options._ - await downloadArtifacts(options) +async function main () { + await downloadArtifacts() } + if (require.main === module) { process.on('unhandledRejection', function (err) { console.error(err) process.exit(1) }) - const options = minimist(process.argv.slice(2), { - string: ['id', 'version', 'hash'] - }) - main(options).catch(t => { + main().catch(t => { console.log(t) process.exit(2) }) } module.exports = downloadArtifacts -module.exports.locations = { - specFolder, - freeTestFolder, - xPackTestFolder -} +module.exports.locations = { testYamlFolder, zipFile, schemaJson } diff --git a/scripts/generate-docs-examples.js b/scripts/generate-docs-examples.js index 3a6813f7a..8026547c3 100644 --- a/scripts/generate-docs-examples.js +++ b/scripts/generate-docs-examples.js @@ -6,7 +6,7 @@ const { join } = require('path') const { writeFile } = require('fs/promises') const fetch = require('node-fetch') -const rimraf = require('rimraf') +const { rimraf } = require('rimraf') const ora = require('ora') const { convertRequests } = require('@elastic/request-converter') const minimist = require('minimist') diff --git a/test/integration/helper.js b/test/integration/helper.js deleted file mode 100644 index bfe2535fa..000000000 --- a/test/integration/helper.js +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -'use strict' - -const assert = require('node:assert') -const fetch = require('node-fetch') - -function runInParallel (client, operation, options, clientOptions) { - if (options.length === 0) return Promise.resolve() - const operations = options.map(opts => { - const api = delve(client, operation).bind(client) - return api(opts, clientOptions) - }) - - return Promise.all(operations) -} - -// code from https://github.com/developit/dlv -// needed to support an edge case: `a\.b` -// where `a.b` is a single field: { 'a.b': true } -function delve (obj, key, def, p) { - p = 0 - // handle the key with a dot inside that is not a part of the path - // and removes the backslashes from the key - key = key.split - ? key.split(/(? k.replace(/\\/g, '')) - : key.replace(/\\/g, '') - while (obj && p < key.length) obj = obj[key[p++]] - return (obj === undefined || p < key.length) ? def : obj -} - -function to (promise) { - return promise.then(data => [null, data], err => [err, undefined]) -} - -const sleep = ms => new Promise(resolve => setTimeout(resolve, ms)) - -function isXPackTemplate (name) { - if (name.startsWith('.monitoring-')) { - return true - } - if (name.startsWith('.watch') || name.startsWith('.triggered_watches')) { - return true - } - if (name.startsWith('.data-frame-')) { - return true - } - if (name.startsWith('.ml-')) { - return true - } - if (name.startsWith('.transform-')) { - return true - } - if (name.startsWith('.deprecation-')) { - return true - } - switch (name) { - case '.watches': - case 'logstash-index-template': - case '.logstash-management': - case 'security_audit_log': - case '.slm-history': - case '.async-search': - case 'saml-service-provider': - case 'ilm-history': - case 'logs': - case 'logs-settings': - case 'logs-mappings': - case 'metrics': - case 'metrics-settings': - case 'metrics-mappings': - case 'synthetics': - case 'synthetics-settings': - case 'synthetics-mappings': - case '.snapshot-blob-cache': - case 'data-streams-mappings': - return true - } - return false -} - -async function getSpec () { - const response = await fetch('https://raw.githubusercontent.com/elastic/elasticsearch-specification/main/output/schema/schema.json') - return await response.json() -} - -let spec = null - -// some keys for the path used in the yaml test are not support in the client -// for example: snapshot.createRepository({ repository }) will not work. -// This code changes the params to the appropriate name, in the example above, -// "repository" will be renamed to "name" -async function updateParams (cmd) { - if (spec == null) { - spec = await getSpec() - } - const endpoint = spec.endpoints.find(endpoint => endpoint.name === cmd.api) - assert(endpoint != null) - if (endpoint.request == null) return cmd - - const type = spec.types.find(type => type.name.name === endpoint.request.name && type.name.namespace === endpoint.request.namespace) - assert(type != null) - - const pathParams = type.path.reduce((acc, val) => { - if (val.codegenName != null) { - acc[val.name] = val.codegenName - } - return acc - }, {}) - - for (const key in cmd.params) { - if (pathParams[key] != null) { - cmd.params[pathParams[key]] = cmd.params[key] - delete cmd.params[key] - } - } - - return cmd -} - -module.exports = { runInParallel, delve, to, sleep, isXPackTemplate, updateParams } diff --git a/test/integration/index.js b/test/integration/index.js index f226ee893..7ab553f15 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -10,436 +10,44 @@ process.on('unhandledRejection', function (err) { process.exit(1) }) -const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs') -const { join, sep } = require('path') -const yaml = require('js-yaml') -const minimist = require('minimist') -const ms = require('ms') -const { Client } = require('../../index') -const build = require('./test-runner') -const { sleep } = require('./helper') -const createJunitReporter = require('./reporter') +const assert = require('node:assert') +const globby = require('globby') const downloadArtifacts = require('../../scripts/download-artifacts') -const yamlFolder = downloadArtifacts.locations.freeTestFolder -const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder +const buildTests = require('./test-builder') -const MAX_API_TIME = 1000 * 90 -const MAX_FILE_TIME = 1000 * 30 -const MAX_TEST_TIME = 1000 * 3 +const yamlFolder = downloadArtifacts.locations.testYamlFolder -const options = minimist(process.argv.slice(2), { - boolean: ['bail'], - string: ['suite', 'test'] -}) - -const freeSkips = { - // working on fixes for these - '/free/aggregations/bucket_selector.yml': ['bad script'], - '/free/aggregations/bucket_script.yml': ['bad script'], - - // either the YAML test definition is wrong, or this fails because JSON.stringify is coercing "1.0" to "1" - '/free/aggregations/percentiles_bucket.yml': ['*'], - - // not supported yet - '/free/cluster.desired_nodes/10_basic.yml': ['*'], - - // Cannot find methods on `Internal` object - '/free/cluster.desired_balance/10_basic.yml': ['*'], - '/free/cluster.desired_nodes/20_dry_run.yml': ['*'], - '/free/cluster.prevalidate_node_removal/10_basic.yml': ['*'], - - // the v8 client never sends the scroll_id in querystring, - // the way the test is structured causes a security exception - 'free/scroll/10_basic.yml': ['Body params override query string'], - 'free/scroll/11_clear.yml': [ - 'Body params with array param override query string', - 'Body params with string param scroll id override query string' - ], - 'free/cat.allocation/10_basic.yml': ['*'], - 'free/cat.snapshots/10_basic.yml': ['Test cat snapshots output'], - - 'indices.stats/50_disk_usage.yml': ['Disk usage stats'], - 'indices.stats/60_field_usage.yml': ['Field usage stats'], - - // skipping because we are booting ES with `discovery.type=single-node` - // and this test will fail because of this configuration - 'nodes.stats/30_discovery.yml': ['*'], - - // the expected error is returning a 503, - // which triggers a retry and the node to be marked as dead - 'search.aggregation/240_max_buckets.yml': ['*'], - - // long values and json do not play nicely together - 'search.aggregation/40_range.yml': ['Min and max long range bounds'], - - // the yaml runner assumes that null means "does not exists", - // while null is a valid json value, so the check will fail - 'search/320_disallow_queries.yml': ['Test disallow expensive queries'], - 'free/tsdb/90_unsupported_operations.yml': ['noop update'] +const getAllFiles = async dir => { + const files = await globby(dir, { + expandDirectories: { + extensions: ['yml', 'yaml'] + } + }) + return files.sort() } -const platinumDenyList = { - 'api_key/10_basic.yml': ['Test get api key'], - 'api_key/20_query.yml': ['*'], - 'api_key/11_invalidation.yml': ['Test invalidate api key by realm name'], - 'analytics/histogram.yml': ['Histogram requires values in increasing order'], - - // object keys must me strings, and `0.0.toString()` is `0` - 'ml/evaluate_data_frame.yml': [ - 'Test binary_soft_classifition precision', - 'Test binary_soft_classifition recall', - 'Test binary_soft_classifition confusion_matrix' - ], - - // The cleanup fails with a index not found when retrieving the jobs - 'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'], - 'ml/bucket_correlation_agg.yml': ['Test correlation bucket agg simple'], - - // start should be a string - 'ml/jobs_get_result_overall_buckets.yml': ['Test overall buckets given epoch start and end params'], - - // this can't happen with the client - 'ml/start_data_frame_analytics.yml': ['Test start with inconsistent body/param ids'], - 'ml/stop_data_frame_analytics.yml': ['Test stop with inconsistent body/param ids'], - 'ml/preview_datafeed.yml': ['*'], - - // Investigate why is failing - 'ml/inference_crud.yml': ['*'], - 'ml/categorization_agg.yml': ['Test categorization aggregation with poor settings'], - 'ml/filter_crud.yml': ['*'], - - // investigate why this is failing - 'monitoring/bulk/10_basic.yml': ['*'], - 'monitoring/bulk/20_privileges.yml': ['*'], - 'license/20_put_license.yml': ['*'], - 'snapshot/10_basic.yml': ['*'], - 'snapshot/20_operator_privileges_disabled.yml': ['*'], - - // the body is correct, but the regex is failing - 'sql/sql.yml': ['Getting textual representation'], - 'searchable_snapshots/10_usage.yml': ['*'], - 'service_accounts/10_basic.yml': ['*'], - - // we are setting two certificates in the docker config - 'ssl/10_basic.yml': ['*'], - 'token/10_basic.yml': ['*'], - 'token/11_invalidation.yml': ['*'], - - // very likely, the index template has not been loaded yet. - // we should run a indices.existsTemplate, but the name of the - // template may vary during time. - 'transforms_crud.yml': [ - 'Test basic transform crud', - 'Test transform with query and array of indices in source', - 'Test PUT continuous transform', - 'Test PUT continuous transform without delay set' - ], - 'transforms_force_delete.yml': [ - 'Test force deleting a running transform' - ], - 'transforms_cat_apis.yml': ['*'], - 'transforms_start_stop.yml': ['*'], - 'transforms_stats.yml': ['*'], - 'transforms_stats_continuous.yml': ['*'], - 'transforms_update.yml': ['*'], - - // js does not support ulongs - 'unsigned_long/10_basic.yml': ['*'], - 'unsigned_long/20_null_value.yml': ['*'], - 'unsigned_long/30_multi_fields.yml': ['*'], - 'unsigned_long/40_different_numeric.yml': ['*'], - 'unsigned_long/50_script_values.yml': ['*'], - - // the v8 client flattens the body into the parent object - 'platinum/users/10_basic.yml': ['Test put user with different username in body'], - - // docker issue? - 'watcher/execute_watch/60_http_input.yml': ['*'], - - // the checks are correct, but for some reason the test is failing on js side - // I bet is because the backslashes in the rg - 'watcher/execute_watch/70_invalid.yml': ['*'], - 'watcher/put_watch/10_basic.yml': ['*'], - 'xpack/15_basic.yml': ['*'], - - // test that are failing that needs to be investigated - // the error cause can either be in the yaml test or in the specification - - // start should be a string in the yaml test - 'platinum/ml/delete_job_force.yml': ['Test force delete an open job that is referred by a started datafeed'], - 'platinum/ml/evaluate_data_frame.yml': ['*'], - 'platinum/ml/get_datafeed_stats.yml': ['*'], - - // start should be a string in the yaml test - 'platinum/ml/start_stop_datafeed.yml': ['*'] +async function doTestBuilder (clientOptions) { + const files = await getAllFiles(yamlFolder) + await buildTests(files, clientOptions) } -function runner (opts = {}) { - const options = { node: opts.node } - if (opts.isXPack) { - options.tls = { - ca: readFileSync(join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'), - rejectUnauthorized: false - } - } - const client = new Client(options) - log('Loading yaml suite') - start({ client, isXPack: opts.isXPack }) +if (require.main === module) { + const node = process.env.TEST_ES_SERVER + const apiKey = process.env.ES_API_SECRET_KEY + const password = process.env.ELASTIC_PASSWORD + assert(node != null, 'Environment variable missing: TEST_ES_SERVER') + assert(apiKey != null || password != null, 'Environment variable missing: ES_API_SECRET_KEY or ELASTIC_PASSWORD') + const clientOptions = { node } + if (apiKey != null) { + clientOptions.auth = { apiKey } + } else { + clientOptions.auth = { username: 'elastic', password } + } + doTestBuilder(clientOptions) + .then(() => process.exit(0)) .catch(err => { - if (err.name === 'ResponseError') { - console.error(err) - console.log(JSON.stringify(err.meta, null, 2)) - } else { - console.error(err) - } + console.error(err) process.exit(1) }) } - -async function waitCluster (client, times = 0) { - try { - await client.cluster.health({ wait_for_status: 'green', timeout: '50s' }) - } catch (err) { - if (++times < 10) { - await sleep(5000) - return waitCluster(client, times) - } - console.error(err) - process.exit(1) - } -} - -async function start ({ client, isXPack }) { - log('Waiting for Elasticsearch') - await waitCluster(client) - - const body = await client.info() - const { number: version, build_hash: hash } = body.version - - log(`Downloading artifacts for hash ${hash}...`) - await downloadArtifacts({ hash, version }) - - log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`) - const junit = createJunitReporter() - const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'Platinum' : 'Free'} api`) - - const stats = { - total: 0, - skip: 0, - pass: 0, - assertions: 0 - } - const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder) - .filter(t => !/(README|TODO)/g.test(t)) - // we cluster the array based on the folder names, - // to provide a better test log output - .reduce((arr, file) => { - const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/')) - let inserted = false - for (let i = 0; i < arr.length; i++) { - if (arr[i][0].includes(path)) { - inserted = true - arr[i].push(file) - break - } - } - if (!inserted) arr.push([file]) - return arr - }, []) - - const totalTime = now() - for (const folder of folders) { - // pretty name - const apiName = folder[0].slice( - folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19, - folder[0].lastIndexOf(sep) - ) - - log('Testing ' + apiName.slice(1)) - const apiTime = now() - - for (const file of folder) { - const testRunner = build({ - client, - version, - isXPack: file.includes('platinum') - }) - const fileTime = now() - const data = readFileSync(file, 'utf8') - // get the test yaml (as object), some file has multiple yaml documents inside, - // every document is separated by '---', so we split on the separator - // and then we remove the empty strings, finally we parse them - const tests = data - .split('\n---\n') - .map(s => s.trim()) - // empty strings - .filter(Boolean) - .map(parse) - // null values - .filter(Boolean) - - // get setup and teardown if present - let setupTest = null - let teardownTest = null - for (const test of tests) { - if (test.setup) setupTest = test.setup - if (test.teardown) teardownTest = test.teardown - } - - const cleanPath = file.slice(file.lastIndexOf(apiName)) - - // skip if --suite CLI arg doesn't match - if (options.suite && !cleanPath.endsWith(options.suite)) continue - - log(' ' + cleanPath) - const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath) - - for (const test of tests) { - const testTime = now() - const name = Object.keys(test)[0] - - // skip setups, teardowns and anything that doesn't match --test flag when present - if (name === 'setup' || name === 'teardown') continue - if (options.test && !name.endsWith(options.test)) continue - - const junitTestCase = junitTestSuite.testcase(name, `node_${process.version}: ${cleanPath}`) - - stats.total += 1 - if (shouldSkip(isXPack, file, name)) { - stats.skip += 1 - junitTestCase.skip('This test is in the skip list of the client') - junitTestCase.end() - continue - } - log(' - ' + name) - try { - await testRunner.run(setupTest, test[name], teardownTest, stats, junitTestCase) - stats.pass += 1 - } catch (err) { - junitTestCase.failure(err) - junitTestCase.end() - junitTestSuite.end() - junitTestSuites.end() - generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free') - err.meta = JSON.stringify(err.meta ?? {}, null, 2) - console.error(err) - - if (options.bail) { - process.exit(1) - } else { - continue - } - } - const totalTestTime = now() - testTime - junitTestCase.end() - if (totalTestTime > MAX_TEST_TIME) { - log(' took too long: ' + ms(totalTestTime)) - } else { - log(' took: ' + ms(totalTestTime)) - } - } - junitTestSuite.end() - const totalFileTime = now() - fileTime - if (totalFileTime > MAX_FILE_TIME) { - log(` ${cleanPath} took too long: ` + ms(totalFileTime)) - } else { - log(` ${cleanPath} took: ` + ms(totalFileTime)) - } - } - const totalApiTime = now() - apiTime - if (totalApiTime > MAX_API_TIME) { - log(`${apiName} took too long: ` + ms(totalApiTime)) - } else { - log(`${apiName} took: ` + ms(totalApiTime)) - } - } - junitTestSuites.end() - generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free') - log(`Total testing time: ${ms(now() - totalTime)}`) - log(`Test stats: - - Total: ${stats.total} - - Skip: ${stats.skip} - - Pass: ${stats.pass} - - Fail: ${stats.total - (stats.pass + stats.skip)} - - Assertions: ${stats.assertions} - `) -} - -function log (text) { - process.stdout.write(text + '\n') -} - -function now () { - const ts = process.hrtime() - return (ts[0] * 1e3) + (ts[1] / 1e6) -} - -function parse (data) { - let doc - try { - doc = yaml.load(data, { schema: yaml.CORE_SCHEMA }) - } catch (err) { - console.error(err) - return - } - return doc -} - -function generateJunitXmlReport (junit, suite) { - writeFileSync( - join(__dirname, '..', '..', `${suite}-report-junit.xml`), - junit.prettyPrint() - ) -} - -if (require.main === module) { - const scheme = process.env.TEST_SUITE === 'platinum' ? 'https' : 'http' - const node = process.env.TEST_ES_SERVER || `${scheme}://elastic:changeme@localhost:9200` - const opts = { - node, - isXPack: process.env.TEST_SUITE !== 'free' - } - runner(opts) -} - -const shouldSkip = (isXPack, file, name) => { - if (options.suite || options.test) return false - - let list = Object.keys(freeSkips) - for (let i = 0; i < list.length; i++) { - const freeTest = freeSkips[list[i]] - for (let j = 0; j < freeTest.length; j++) { - if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) { - const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name - log(`Skipping test ${testName} because it is denylisted in the free test suite`) - return true - } - } - } - - if (file.includes('x-pack') || isXPack) { - list = Object.keys(platinumDenyList) - for (let i = 0; i < list.length; i++) { - const platTest = platinumDenyList[list[i]] - for (let j = 0; j < platTest.length; j++) { - if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) { - const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name - log(`Skipping test ${testName} because it is denylisted in the platinum test suite`) - return true - } - } - } - } - - return false -} - -const getAllFiles = dir => - readdirSync(dir).reduce((files, file) => { - const name = join(dir, file) - const isDirectory = statSync(name).isDirectory() - return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name] - }, []) - -module.exports = runner diff --git a/test/integration/reporter.js b/test/integration/reporter.js deleted file mode 100644 index 165478c50..000000000 --- a/test/integration/reporter.js +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -'use strict' - -const assert = require('node:assert') -const { create } = require('xmlbuilder2') - -function createJunitReporter () { - const report = {} - - return { testsuites, prettyPrint } - - function prettyPrint () { - return create(report).end({ prettyPrint: true }) - } - - function testsuites (name) { - assert(name, 'The testsuites name is required') - assert(report.testsuites === undefined, 'Cannot set more than one testsuites block') - const startTime = Date.now() - - report.testsuites = { - '@id': new Date().toISOString(), - '@name': name - } - - const testsuiteList = [] - - return { - testsuite: createTestSuite(testsuiteList), - end () { - report.testsuites['@time'] = Math.round((Date.now() - startTime) / 1000) - report.testsuites['@tests'] = testsuiteList.reduce((acc, val) => { - acc += val['@tests'] - return acc - }, 0) - report.testsuites['@failures'] = testsuiteList.reduce((acc, val) => { - acc += val['@failures'] - return acc - }, 0) - report.testsuites['@skipped'] = testsuiteList.reduce((acc, val) => { - acc += val['@skipped'] - return acc - }, 0) - if (testsuiteList.length) { - report.testsuites.testsuite = testsuiteList - } - } - } - } - - function createTestSuite (testsuiteList) { - return function testsuite (name) { - assert(name, 'The testsuite name is required') - const startTime = Date.now() - const suite = { - '@id': new Date().toISOString(), - '@name': name - } - const testcaseList = [] - testsuiteList.push(suite) - return { - testcase: createTestCase(testcaseList), - end () { - suite['@time'] = Math.round((Date.now() - startTime) / 1000) - suite['@tests'] = testcaseList.length - suite['@failures'] = testcaseList.filter(t => t.failure).length - suite['@skipped'] = testcaseList.filter(t => t.skipped).length - if (testcaseList.length) { - suite.testcase = testcaseList - } - } - } - } - } - - function createTestCase (testcaseList) { - return function testcase (name, file) { - assert(name, 'The testcase name is required') - const startTime = Date.now() - const tcase = { - '@id': new Date().toISOString(), - '@name': name - } - if (file) tcase['@file'] = file - testcaseList.push(tcase) - return { - failure (error) { - assert(error, 'The failure error object is required') - tcase.failure = { - '#': error.stack, - '@message': error.message, - '@type': error.code - } - }, - skip (reason) { - if (typeof reason !== 'string') { - reason = JSON.stringify(reason, null, 2) - } - tcase.skipped = { - '#': reason - } - }, - end () { - tcase['@time'] = Math.round((Date.now() - startTime) / 1000) - } - } - } - } -} - -module.exports = createJunitReporter diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js new file mode 100644 index 000000000..be75a6e59 --- /dev/null +++ b/test/integration/test-builder.js @@ -0,0 +1,391 @@ +/* + * Copyright Elasticsearch B.V. and contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const { join, sep } = require('node:path') +const { readFileSync, writeFileSync, promises } = require('node:fs') +const yaml = require('js-yaml') +const { rimraf } = require('rimraf') +const { mkdir } = promises + +const generatedTestsPath = join(__dirname, '..', '..', 'generated-tests') + +const stackSkips = [ + // test builder doesn't support "(local)" or `exists` action + 'indices/resolve_cluster.yml' +] + +const serverlessSkips = [ + // TODO: sql.getAsync does not set a content-type header but ES expects one + // transport only sets a content-type if the body is not empty + 'sql/10_basic.yml', + // TODO: bulk call in setup fails due to "malformed action/metadata line" + // bulk body is being sent as a Buffer, unsure if related. + 'transform/10_basic.yml', + // TODO: scripts_painless_execute expects {"result":"0.1"}, gets {"result":"0"} + // body sent as Buffer, unsure if related + 'script/10_basic.yml', + // TODO: expects {"outlier_detection.auc_roc.value":0.99995}, gets {"outlier_detection.auc_roc.value":0.5} + // remove if/when https://github.com/elastic/elasticsearch-clients-tests/issues/37 is resolved + 'machine_learning/data_frame_evaluate.yml', + // TODO: Cannot perform requested action because job [job-crud-test-apis] is not open + 'machine_learning/jobs_crud.yml', + // TODO: test runner needs to support ignoring 410 errors + 'enrich/10_basic.yml', + // TODO: parameter `enabled` is not allowed in source + // Same underlying problem as https://github.com/elastic/elasticsearch-clients-tests/issues/55 + 'cluster/component_templates.yml', + // TODO: expecting `ct_field` field mapping to be returned, but instead only finds `field` + 'indices/simulate_template.yml', + 'indices/simulate_index_template.yml', + // TODO: test currently times out + 'inference/10_basic.yml', + // TODO: Fix: "Trained model deployment [test_model] is not allocated to any nodes" + 'machine_learning/20_trained_model_serverless.yml', + // TODO: query_rules api not available yet + 'query_rules/10_query_rules.yml', + 'query_rules/20_rulesets.yml', + 'query_rules/30_test.yml', + // TODO: security.putRole API not available + 'security/50_roles_serverless.yml', + // TODO: expected undefined to equal 'some_table' + 'entsearch/50_connector_updates.yml', + // TODO: resource_not_found_exception + 'tasks_serverless.yml', +] + +function parse (data) { + let doc + try { + doc = yaml.load(data, { schema: yaml.CORE_SCHEMA }) + } catch (err) { + console.error(err) + return + } + return doc +} + +async function build (yamlFiles, clientOptions) { + await rimraf(generatedTestsPath) + await mkdir(generatedTestsPath, { recursive: true }) + + for (const file of yamlFiles) { + const apiName = file.split(`${sep}tests${sep}`)[1] + const data = readFileSync(file, 'utf8') + + const tests = data + .split('\n---\n') + .map(s => s.trim()) + // empty strings + .filter(Boolean) + .map(parse) + // null values + .filter(Boolean) + + let code = "import { test } from 'tap'\n" + code += "import { Client } from '@elastic/elasticsearch'\n\n" + + const requires = tests.find(test => test.requires != null) + let skip = new Set() + if (requires != null) { + const { serverless = true, stack = true } = requires.requires + if (!serverless) skip.add('process.env.TEST_ES_SERVERLESS === "1"') + if (!stack) skip.add('process.env.TEST_ES_STACK === "1"') + } + + if (stackSkips.includes(apiName)) skip.add('process.env.TEST_ES_STACK === "1"') + if (serverlessSkips.includes(apiName)) skip.add('process.env.TEST_ES_SERVERLESS === "1"') + + if (skip.size > 0) { + code += `test('${apiName}', { skip: ${Array.from(skip).join(' || ')} }, t => {\n` + } else { + code += `test('${apiName}', t => {\n` + } + + for (const test of tests) { + if (test.setup != null) { + code += ' t.before(async () => {\n' + code += indent(buildActions(test.setup), 4) + code += ' })\n\n' + } + + if (test.teardown != null) { + code += ' t.after(async () => {\n' + code += indent(buildActions(test.teardown), 4) + code += ' })\n\n' + } + + for (const key of Object.keys(test).filter(k => !['setup', 'teardown', 'requires'].includes(k))) { + if (test[key].find(action => Object.keys(action)[0] === 'skip') != null) { + code += ` t.test('${key}', { skip: true }, async t => {\n` + } else { + code += ` t.test('${key}', async t => {\n` + } + code += indent(buildActions(test[key]), 4) + code += '\n t.end()\n' + code += ' })\n' + } + // if (test.requires != null) requires = test.requires + } + + code += '\n t.end()\n' + code += '})\n' + + const testDir = join(generatedTestsPath, apiName.split(sep).slice(0, -1).join(sep)) + const testFile = join(testDir, apiName.split(sep).pop().replace(/\.ya?ml$/, '.mjs')) + await mkdir(testDir, { recursive: true }) + writeFileSync(testFile, code, 'utf8') + } + + function buildActions (actions) { + let code = `const client = new Client(${JSON.stringify(clientOptions, null, 2)})\n` + code += 'let response\n\n' + + for (const action of actions) { + const key = Object.keys(action)[0] + switch (key) { + case 'do': + code += buildDo(action.do) + break + case 'set': + code += buildSet(action.set) + break + case 'transform_and_set': + code += buildTransformAndSet(action.transform_and_set) + break + case 'match': + code += buildMatch(action.match) + break + case 'lt': + code += buildLt(action.lt) + break + case 'lte': + code += buildLte(action.lte) + break + case 'gt': + code += buildGt(action.gt) + break + case 'gte': + code += buildGte(action.gte) + break + case 'length': + code += buildLength(action.length) + break + case 'is_true': + code += buildIsTrue(action.is_true) + break + case 'is_false': + code += buildIsFalse(action.is_false) + break + case 'contains': + code += buildContains(action.contains) + case 'skip': + break + default: + break + } + } + return code + } +} + +function buildDo (action) { + let code = '' + const keys = Object.keys(action) + if (keys.includes('catch')) { + code += 'try {\n' + code += indent(buildRequest(action)) + code += '} catch (err) {\n' + code += ` t.match(err.message, ${buildValLiteral(action.catch)})\n` + code += '}\n' + } else { + code += buildRequest(action) + } + return code +} + +function buildRequest(action) { + let code = '' + for (const key of Object.keys(action)) { + if (key === 'catch') continue + const params = action[key] + const options = { meta: true } + if (params.ignore != null) { + if (Array.isArray(params.ignore)) { + options.ignore = params.ignore + } else { + options.ignore = [params.ignore] + } + } + code += `response = await client.${toCamelCase(key)}(${buildApiParams(action[key])}, ${JSON.stringify(options)})\n` + } + return code +} + +function buildSet (action) { + const key = Object.keys(action)[0] + const varName = action[key] + const path = buildPath(key) + return `let ${varName} = response.body${path}\n` +} + +function buildTransformAndSet (action) { + return `// TODO buildTransformAndSet: ${JSON.stringify(action)}\n` +} + +function buildMatch (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + let lookup = `response.body${path}` + + if (lookup === 'response.body[body]') lookup = 'JSON.stringify(response.body) === "" ? null : JSON.stringify(response.body)' + if (lookup === "response.body['']") lookup = 'JSON.stringify(response.body) === "" ? null : JSON.stringify(response.body)' + + if (val.startsWith('/')) { + return `t.ok(${lookup}.match(${val}), '${key} should match regex ${val}')\n` + } else if (typeof action[key] === 'object') { + return `t.match(${lookup}, ${val})\n` + } else { + return `t.equal(${lookup}, ${val})\n` + } +} + +function buildLt (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.ok(response.body${path} < ${val})\n` +} + +function buildLte (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.ok(response.body${path} <= ${val})\n` +} + +function buildGt (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.ok(response.body${path} > ${val})\n` +} + +function buildGte (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.ok(response.body${path} >= ${val})\n` +} + +function buildLength (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.equal(response.body${path}.length, ${val})\n` +} + +function buildIsTrue (action) { + let lookup = `response.body${buildPath(action)}` + return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), \`${lookup} should be truthy. found: \$\{JSON.stringify(${lookup})\}\`)\n` +} + +function buildIsFalse (action) { + let lookup = `response.body${buildPath(action)}` + return `t.ok(${lookup} === "false" || !Boolean(${lookup}), \`${lookup} should be falsy. found: \$\{JSON.stringify(${lookup})\}\`)\n` +} + +function buildContains (action) { + const key = Object.keys(action)[0] + const path = buildPath(key) + const val = buildValLiteral(action[key]) + return `t.ok(response.body${path}.includes(${val}))\n` +} + +function buildApiParams (params) { + if (Object.keys(params).length === 0) { + return 'undefined' + } else { + const out = {} + Object.keys(params).filter(k => k !== 'ignore').forEach(k => out[k] = params[k]) + return buildValLiteral(out) + } +} + +function toCamelCase (name) { + return name.replace(/_([a-z])/g, g => g[1].toUpperCase()) +} + +function indent (str, spaces) { + const tabs = ' '.repeat(spaces) + return str.replace(/\s+$/, '').split('\n').map(l => `${tabs}${l}`).join('\n') + '\n' +} + +function buildPath (path) { + if (path === 'response') return '' + return path.split('.').map(step => { + if (step === 'response' || step === 'body') { + return '' + } else if (parseInt(step, 10).toString() === step) { + return `[${step}]` + } else if (step.match(/^\$[a-zA-Z0-9_]+$/)) { + const lookup = step.replace(/^\$/, '') + if (lookup === 'body') return '' + return `[${lookup}]` + } else if (step === '') { + return '' + } else { + return `['${step}']` + } + }).join('') +} + +function buildValLiteral (val) { + if (isRegExp(val)) { + return JSON.stringify(val).replace(/^"/, '').replace(/"$/, '').replace(/\\\\/, '\\') + } else if (isVariable(val)) { + if (val === '$body') return '' + return val.replace(/^\$/, '') + } else if (isPlainObject(val)) { + return JSON.stringify(cleanObject(val), null, 2).replace(/"\$([a-zA-Z0-9_]+)"/g, '$1') + } else { + return JSON.stringify(val) + } +} + +function isRegExp (str) { + return typeof str === 'string' && str.startsWith('/') && str.endsWith('/') +} + +function isVariable (str) { + return typeof str === 'string' && str.match(/^\$[a-zA-Z0-9_]+$/) != null +} + +function cleanObject (obj) { + Object.keys(obj).forEach(key => { + let val = obj[key] + if (typeof val === 'string' && val.trim().startsWith('{') && val.trim().endsWith('}')) { + // attempt to parse as object + try { + val = JSON.parse(val) + } catch { + } + } else if (isPlainObject(val)) { + val = cleanObject(val) + } else if (Array.isArray(val)) { + val = val.map(item => isPlainObject(item) ? cleanObject(item) : item) + } + obj[key] = val + }) + return obj +} + +function isPlainObject(obj) { + return typeof obj === 'object' && !Array.isArray(obj) && obj != null +} + +module.exports = build diff --git a/test/integration/test-runner.js b/test/integration/test-runner.js deleted file mode 100644 index 856b23567..000000000 --- a/test/integration/test-runner.js +++ /dev/null @@ -1,1072 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -'use strict' - -/* eslint camelcase: 0 */ - -const chai = require('chai') -const semver = require('semver') -const helper = require('./helper') -const { join } = require('path') -const { locations } = require('../../scripts/download-artifacts') -const packageJson = require('../../package.json') - -chai.config.showDiff = true -chai.config.truncateThreshold = 0 -const { assert } = chai - -const { delve, to, isXPackTemplate, sleep, updateParams } = helper - -const supportedFeatures = [ - 'gtelte', - 'regex', - 'benchmark', - 'stash_in_path', - 'groovy_scripting', - 'headers', - 'transform_and_set', - 'catch_unauthorized', - 'arbitrary_key' -] - -function build (opts = {}) { - const client = opts.client - const esVersion = opts.version - const isXPack = opts.isXPack - const stash = new Map() - let response = null - - /** - * Runs a cleanup, removes all indices, aliases, templates, and snapshots - * @returns {Promise} - */ - async function cleanup (isXPack) { - response = null - stash.clear() - - await client.cluster.health({ - wait_for_no_initializing_shards: true, - timeout: '70s', - level: 'shards' - }) - - if (isXPack) { - // wipe rollup jobs - const jobsList = await client.rollup.getJobs({ id: '_all' }) - const jobsIds = jobsList.jobs.map(j => j.config.id) - await helper.runInParallel( - client, 'rollup.stopJob', - jobsIds.map(j => ({ id: j, wait_for_completion: true })) - ) - await helper.runInParallel( - client, 'rollup.deleteJob', - jobsIds.map(j => ({ id: j })) - ) - - // delete slm policies - const policies = await client.slm.getLifecycle() - await helper.runInParallel( - client, 'slm.deleteLifecycle', - Object.keys(policies).map(p => ({ policy_id: p })) - ) - - // remove 'x_pack_rest_user', used in some xpack test - try { - await client.security.deleteUser({ username: 'x_pack_rest_user' }, { ignore: [404] }) - } catch { - // do nothing - } - - const searchableSnapshotIndices = await client.cluster.state({ - metric: 'metadata', - filter_path: 'metadata.indices.*.settings.index.store.snapshot' - }) - if (searchableSnapshotIndices.metadata != null && searchableSnapshotIndices.metadata.indices != null) { - await helper.runInParallel( - client, 'indices.delete', - Object.keys(searchableSnapshotIndices.metadata.indices).map(i => ({ index: i })), - { ignore: [404] } - ) - } - } - - // clean snapshots - const repositories = await client.snapshot.getRepository() - for (const repository of Object.keys(repositories)) { - await client.snapshot.delete({ repository, snapshot: '*' }, { ignore: [404] }) - await client.snapshot.deleteRepository({ name: repository }, { ignore: [404] }) - } - - if (isXPack) { - // clean data streams - await client.indices.deleteDataStream({ name: '*', expand_wildcards: 'all' }) - } - - // clean all indices - await client.indices.delete({ - index: [ - '*', - '-.ds-ilm-history-*' - ], - expand_wildcards: 'open,closed,hidden' - }, { - ignore: [404] - }) - - // delete templates - const templates = await client.cat.templates({ h: 'name' }) - for (const template of templates.split('\n').filter(Boolean)) { - if (isXPackTemplate(template)) continue - const body = await client.indices.deleteTemplate({ name: template }, { ignore: [404] }) - if (JSON.stringify(body).includes(`index_template [${template}] missing`)) { - await client.indices.deleteIndexTemplate({ name: template }, { ignore: [404] }) - } - } - - // delete component template - const body = await client.cluster.getComponentTemplate() - const components = body.component_templates.filter(c => !isXPackTemplate(c.name)).map(c => c.name) - if (components.length > 0) { - try { - await client.cluster.deleteComponentTemplate({ name: components.join(',') }, { ignore: [404] }) - } catch { - // do nothing - } - } - - // Remove any cluster setting - const settings = await client.cluster.getSettings() - const newSettings = {} - for (const setting in settings) { - if (Object.keys(settings[setting]).length === 0) continue - newSettings[setting] = {} - for (const key in settings[setting]) { - newSettings[setting][`${key}.*`] = null - } - } - if (Object.keys(newSettings).length > 0) { - await client.cluster.putSettings(newSettings) - } - - if (isXPack) { - // delete ilm policies - const preserveIlmPolicies = [ - 'ilm-history-ilm-policy', - 'slm-history-ilm-policy', - 'watch-history-ilm-policy', - 'watch-history-ilm-policy-16', - 'ml-size-based-ilm-policy', - 'logs', - 'metrics', - 'synthetics', - '7-days-default', - '30-days-default', - '90-days-default', - '180-days-default', - '365-days-default', - '.fleet-actions-results-ilm-policy', - '.fleet-file-data-ilm-policy', - '.fleet-files-ilm-policy', - '.deprecation-indexing-ilm-policy', - '.monitoring-8-ilm-policy', - 'behavioral_analytics-events-default_policy' - ] - const policies = await client.ilm.getLifecycle() - for (const policy in policies) { - if (preserveIlmPolicies.includes(policy)) continue - await client.ilm.deleteLifecycle({ name: policy }) - } - - // delete autofollow patterns - const patterns = await client.ccr.getAutoFollowPattern() - for (const { name } of patterns.patterns) { - await client.ccr.deleteAutoFollowPattern({ name }) - } - - // delete all tasks - const nodesTask = await client.tasks.list() - const tasks = Object.keys(nodesTask.nodes) - .reduce((acc, node) => { - const { tasks } = nodesTask.nodes[node] - Object.keys(tasks).forEach(id => { - if (tasks[id].cancellable) acc.push(id) - }) - return acc - }, []) - - await helper.runInParallel( - client, 'tasks.cancel', - tasks.map(id => ({ task_id: id })) - ) - - // cleanup ml - const jobsList = await client.ml.getJobs() - const jobsIds = jobsList.jobs.map(j => j.job_id) - await helper.runInParallel( - client, 'ml.deleteJob', - jobsIds.map(j => ({ job_id: j, force: true })) - ) - - const dataFrame = await client.ml.getDataFrameAnalytics() - const dataFrameIds = dataFrame.data_frame_analytics.map(d => d.id) - await helper.runInParallel( - client, 'ml.deleteDataFrameAnalytics', - dataFrameIds.map(d => ({ id: d, force: true })) - ) - - const calendars = await client.ml.getCalendars() - const calendarsId = calendars.calendars.map(c => c.calendar_id) - await helper.runInParallel( - client, 'ml.deleteCalendar', - calendarsId.map(c => ({ calendar_id: c })) - ) - - const training = await client.ml.getTrainedModels() - const trainingId = training.trained_model_configs - .filter(t => t.created_by !== '_xpack') - .map(t => t.model_id) - await helper.runInParallel( - client, 'ml.deleteTrainedModel', - trainingId.map(t => ({ model_id: t, force: true })) - ) - - // cleanup transforms - const transforms = await client.transform.getTransform() - const transformsId = transforms.transforms.map(t => t.id) - await helper.runInParallel( - client, 'transform.deleteTransform', - transformsId.map(t => ({ transform_id: t, force: true })) - ) - } - - const shutdownNodes = await client.shutdown.getNode() - if (shutdownNodes._nodes == null && shutdownNodes.cluster_name == null) { - for (const node of shutdownNodes.nodes) { - await client.shutdown.deleteNode({ node_id: node.node_id }) - } - } - - // wait for pending task before resolving the promise - await sleep(100) - while (true) { - const body = await client.cluster.pendingTasks() - if (body.tasks.length === 0) break - await sleep(500) - } - } - - /** - * Runs the given test. - * It runs the test components in the following order: - * - skip check - * - xpack user - * - setup - * - the actual test - * - teardown - * - xpack cleanup - * - cleanup - * @param {object} setup (null if not needed) - * @param {object} test - * @param {object} teardown (null if not needed) - * @returns {Promise} - */ - async function run (setup, test, teardown, stats, junit) { - // if we should skip a feature in the setup/teardown section - // we should skip the entire test file - const skip = getSkip(setup) || getSkip(teardown) - if (skip && shouldSkip(esVersion, skip)) { - junit.skip(skip) - logSkip(skip) - return - } - - if (isXPack) { - // Some xpack test requires this user - // tap.comment('Creating x-pack user') - try { - await client.security.putUser({ - username: 'x_pack_rest_user', - password: 'x-pack-test-password', - roles: ['superuser'] - }) - } catch (err) { - assert.ifError(err, 'should not error: security.putUser') - } - } - - if (setup) await exec('Setup', setup, stats, junit) - - await exec('Test', test, stats, junit) - - if (teardown) await exec('Teardown', teardown, stats, junit) - - await cleanup(isXPack) - } - - /** - * Fill the stashed values of a command - * let's say the we have stashed the `master` value, - * is_true: nodes.$master.transport.profiles - * becomes - * is_true: nodes.new_value.transport.profiles - * @param {object|string} the action to update - * @returns {object|string} the updated action - */ - function fillStashedValues (obj) { - if (typeof obj === 'string') { - return getStashedValues(obj) - } - // iterate every key of the object - for (const key in obj) { - const val = obj[key] - // if the key value is a string, and the string includes '${' - // that we must update the content of '${...}'. - // eg: 'Basic ${auth}' we search the stahed value 'auth' - // and the resulting value will be 'Basic valueOfAuth' - if (typeof val === 'string' && val.includes('${')) { - while (obj[key].includes('${')) { - const val = obj[key] - const start = val.indexOf('${') - const end = val.indexOf('}', val.indexOf('${')) - const stashedKey = val.slice(start + 2, end) - const stashed = stash.get(stashedKey) - obj[key] = val.slice(0, start) + stashed + val.slice(end + 1) - } - continue - } - // handle json strings, eg: '{"hello":"$world"}' - if (typeof val === 'string' && val.includes('"$')) { - while (obj[key].includes('"$')) { - const val = obj[key] - const start = val.indexOf('"$') - const end = val.indexOf('"', start + 1) - const stashedKey = val.slice(start + 2, end) - const stashed = '"' + stash.get(stashedKey) + '"' - obj[key] = val.slice(0, start) + stashed + val.slice(end + 1) - } - continue - } - // if the key value is a string, and the string includes '$' - // we run the "update value" code - if (typeof val === 'string' && val.includes('$')) { - // update the key value - obj[key] = getStashedValues(val) - continue - } - - // go deep in the object - if (val !== null && typeof val === 'object') { - fillStashedValues(val) - } - } - - return obj - - function getStashedValues (str) { - const arr = str - // we split the string on the dots - // handle the key with a dot inside that is not a part of the path - .split(/(? { - if (part[0] === '$') { - const stashed = stash.get(part.slice(1)) - if (stashed == null) { - throw new Error(`Cannot find stashed value '${part}' for '${JSON.stringify(obj)}'`) - } - return stashed - } - return part - }) - - // recreate the string value only if the array length is higher than one - // otherwise return the first element which in some test this could be a number, - // and call `.join` will coerce it to a string. - return arr.length > 1 ? arr.join('.') : arr[0] - } - } - - /** - * Stashes a value - * @param {string} the key to search in the previous response - * @param {string} the name to identify the stashed value - * @returns {TestRunner} - */ - function set (key, name) { - if (key.includes('_arbitrary_key_')) { - let currentVisit = null - for (const path of key.split('.')) { - if (path === '_arbitrary_key_') { - const keys = Object.keys(currentVisit) - const arbitraryKey = keys[getRandomInt(0, keys.length)] - stash.set(name, arbitraryKey) - } else { - currentVisit = delve(response, path) - } - } - } else { - stash.set(name, delve(response, key)) - } - } - - /** - * Applies a given transformation and stashes the result. - * @param {string} the name to identify the stashed value - * @param {string} the transformation function as string - * @returns {TestRunner} - */ - function transform_and_set (name, transform) { - if (/base64EncodeCredentials/.test(transform)) { - const [user, password] = transform - .slice(transform.indexOf('(') + 1, -1) - .replace(/ /g, '') - .split(',') - const userAndPassword = `${delve(response, user)}:${delve(response, password)}` - stash.set(name, Buffer.from(userAndPassword).toString('base64')) - } else { - throw new Error(`Unknown transform: '${transform}'`) - } - } - - /** - * Runs a client command - * @param {object} the action to perform - * @returns {Promise} - */ - async function doAction (action, stats) { - const cmd = await updateParams(parseDo(action)) - let api - try { - api = delve(client, cmd.method).bind(client) - } catch (err) { - console.error(`\nError: Cannot find the method '${cmd.method}' in the client.\n`) - process.exit(1) - } - - if (action.headers) { - switch (action.headers['Content-Type'] || action.headers['content-type']) { - case 'application/json': - delete action.headers['Content-Type'] - delete action.headers['content-type'] - action.headers['Content-Type'] = `application/vnd.elasticsearch+json; compatible-with=${packageJson.version.split('.')[0]}` - break - case 'application/x-ndjson': - delete action.headers['Content-Type'] - delete action.headers['content-type'] - action.headers['Content-Type'] = `application/vnd.elasticsearch+x-ndjson; compatible-with=${packageJson.version.split('.')[0]}` - break - } - } - - const options = { ignore: cmd.params.ignore, headers: action.headers, meta: true } - if (!Array.isArray(options.ignore)) options.ignore = [options.ignore] - if (cmd.params.ignore) delete cmd.params.ignore - - // ndjson apis should always send the body as an array - if (isNDJson(cmd.api) && !Array.isArray(cmd.params.body)) { - cmd.params.body = [cmd.params.body] - } - - if (typeof cmd.params.body === 'string' && !isNDJson(cmd.api)) { - cmd.params.body = JSON.parse(cmd.params.body) - } - - let err, result - try { - [err, result] = await to(api(cmd.params, options)) - } catch (exc) { - if (JSON.stringify(exc).includes('resource_already_exists_exception')) { - console.warn(`Resource already exists: ${JSON.stringify(cmd.params)}`) - // setup task was already done because cleanup didn't catch it? do nothing - } else { - throw exc - } - } - let warnings = result ? result.warnings : null - const body = result ? result.body : null - - if (action.warnings && warnings === null) { - assert.fail('We should get a warning header', action.warnings) - } else if (!action.warnings && warnings !== null) { - // if there is only the 'default shard will change' - // warning we skip the check, because the yaml - // spec may not be updated - let hasDefaultShardsWarning = false - warnings.forEach(h => { - if (/default\snumber\sof\sshards/g.test(h)) { - hasDefaultShardsWarning = true - } - }) - - if (hasDefaultShardsWarning === true && warnings.length > 1) { - assert.fail('We are not expecting warnings', warnings) - } - } else if (action.warnings && warnings !== null) { - // if the yaml warnings do not contain the - // 'default shard will change' warning - // we do not check it presence in the warnings array - // because the yaml spec may not be updated - let hasDefaultShardsWarning = false - action.warnings.forEach(h => { - if (/default\snumber\sof\sshards/g.test(h)) { - hasDefaultShardsWarning = true - } - }) - - if (hasDefaultShardsWarning === false) { - warnings = warnings.filter(h => !h.test(/default\snumber\sof\sshards/g)) - } - - stats.assertions += 1 - assert.deepEqual(warnings, action.warnings) - } - - if (action.catch) { - stats.assertions += 1 - assert.ok(err, `Expecting an error, but instead got ${JSON.stringify(err)}, the response was ${JSON.stringify(result)}`) - assert.ok( - parseDoError(err, action.catch), - `the error should match: ${action.catch}, found ${JSON.stringify(err.body)}` - ) - try { - response = JSON.parse(err.body) - } catch (e) { - response = err.body - } - } else { - stats.assertions += 1 - assert.ifError(err, `should not error: ${cmd.method}`, action) - response = body - } - } - - /** - * Runs an actual test - * @param {string} the name of the test - * @param {object} the actions to perform - * @returns {Promise} - */ - async function exec (name, actions, stats, junit) { - // tap.comment(name) - for (const action of actions) { - if (action.skip) { - if (shouldSkip(esVersion, action.skip)) { - junit.skip(fillStashedValues(action.skip)) - logSkip(fillStashedValues(action.skip)) - break - } - } - - if (action.do) { - await doAction(fillStashedValues(action.do), stats) - } - - if (action.set) { - const key = Object.keys(action.set)[0] - set(fillStashedValues(key), action.set[key]) - } - - if (action.transform_and_set) { - const key = Object.keys(action.transform_and_set)[0] - transform_and_set(key, action.transform_and_set[key]) - } - - if (action.match) { - stats.assertions += 1 - const key = Object.keys(action.match)[0] - match( - // in some cases, the yaml refers to the body with an empty string - key.split('.')[0] === '$body' || key === '' - ? response - : delve(response, fillStashedValues(key)), - key.split('.')[0] === '$body' - ? action.match[key] - : fillStashedValues(action.match)[key], - action.match, - response - ) - } - - if (action.lt) { - stats.assertions += 1 - const key = Object.keys(action.lt)[0] - lt( - delve(response, fillStashedValues(key)), - fillStashedValues(action.lt)[key], - response - ) - } - - if (action.gt) { - stats.assertions += 1 - const key = Object.keys(action.gt)[0] - gt( - delve(response, fillStashedValues(key)), - fillStashedValues(action.gt)[key], - response - ) - } - - if (action.lte) { - stats.assertions += 1 - const key = Object.keys(action.lte)[0] - lte( - delve(response, fillStashedValues(key)), - fillStashedValues(action.lte)[key], - response - ) - } - - if (action.gte) { - stats.assertions += 1 - const key = Object.keys(action.gte)[0] - gte( - delve(response, fillStashedValues(key)), - fillStashedValues(action.gte)[key], - response - ) - } - - if (action.length) { - stats.assertions += 1 - const key = Object.keys(action.length)[0] - length( - key === '$body' || key === '' - ? response - : delve(response, fillStashedValues(key)), - key === '$body' - ? action.length[key] - : fillStashedValues(action.length)[key], - response - ) - } - - if (action.is_true) { - stats.assertions += 1 - const isTrue = fillStashedValues(action.is_true) - is_true( - delve(response, isTrue), - isTrue, - response - ) - } - - if (action.is_false) { - stats.assertions += 1 - const isFalse = fillStashedValues(action.is_false) - is_false( - delve(response, isFalse), - isFalse, - response - ) - } - } - } - - return { run } -} - -/** - * Asserts that the given value is truthy - * @param {any} the value to check - * @param {string} an optional message - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function is_true (val, msg, response) { - try { - assert.ok((typeof val === 'string' && val.toLowerCase() === 'true') || val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the given value is falsey - * @param {any} the value to check - * @param {string} an optional message - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function is_false (val, msg, response) { - try { - assert.ok((typeof val === 'string' && val.toLowerCase() === 'false') || !val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that two values are the same - * @param {any} the first value - * @param {any} the second value - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function match (val1, val2, action, response) { - try { - // both values are objects - if (typeof val1 === 'object' && typeof val2 === 'object') { - assert.deepEqual(val1, val2, typeof action === 'object' ? JSON.stringify(action) : action) - // the first value is the body as string and the second a pattern string - } else if ( - typeof val1 === 'string' && typeof val2 === 'string' && - val2.startsWith('/') && (val2.endsWith('/\n') || val2.endsWith('/')) - ) { - const regStr = val2 - .replace(/(^|[^\\])#.*/g, '$1') - .replace(/(^|[^\\])\s+/g, '$1') - .slice(1, -1) - // 'm' adds the support for multiline regex - assert.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, but got: ${val1}: ${JSON.stringify(action)}`) - } else if (typeof val1 === 'string' && typeof val2 === 'string') { - // string comparison - assert.include(val1, val2, `should include pattern provided: ${val2}, but got: ${val1}: ${JSON.stringify(action)}`) - } else { - // everything else - assert.equal(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`) - } - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the first value is less than the second - * It also verifies that the two values are numbers - * @param {any} the first value - * @param {any} the second value - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function lt (val1, val2, response) { - try { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 < val2) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the first value is greater than the second - * It also verifies that the two values are numbers - * @param {any} the first value - * @param {any} the second value - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function gt (val1, val2, response) { - try { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 > val2) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the first value is less than or equal the second - * It also verifies that the two values are numbers - * @param {any} the first value - * @param {any} the second value - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function lte (val1, val2, response) { - try { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 <= val2) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the first value is greater than or equal the second - * It also verifies that the two values are numbers - * @param {any} the first value - * @param {any} the second value - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} -*/ -function gte (val1, val2, response) { - try { - ;[val1, val2] = getNumbers(val1, val2) - assert.ok(val1 >= val2) - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Asserts that the given value has the specified length - * @param {string|object|array} the object to check - * @param {number} the expected length - * @param {any} debugging metadata to attach to any assertion errors - * @returns {TestRunner} - */ -function length (val, len, response) { - try { - if (typeof val === 'string' || Array.isArray(val)) { - assert.equal(val.length, len) - } else if (typeof val === 'object' && val !== null) { - assert.equal(Object.keys(val).length, len) - } else { - assert.fail(`length: the given value is invalid: ${val}`) - } - } catch (err) { - err.response = JSON.stringify(response) - throw err - } -} - -/** - * Gets a `do` action object and returns a structured object, - * where the action is the key and the parameter is the value. - * Eg: - * { - * 'indices.create': { - * 'index': 'test' - * }, - * 'warnings': [ - * '[index] is deprecated' - * ] - * } - * becomes - * { - * method: 'indices.create', - * params: { - * index: 'test' - * }, - * warnings: [ - * '[index] is deprecated' - * ] - * } - * @param {object} - * @returns {object} - */ -function parseDo (action) { - action = JSON.parse(JSON.stringify(action)) - - if (typeof action === 'string') action = { [action]: {} } - if (Array.isArray(action)) action = action[0] - - return Object.keys(action).reduce((acc, val) => { - switch (val) { - case 'catch': - acc.catch = action.catch - break - case 'warnings': - acc.warnings = action.warnings - break - case 'node_selector': - acc.node_selector = action.node_selector - break - default: - // converts underscore to camelCase - // eg: put_mapping => putMapping - acc.method = val.replace(/_([a-z])/g, g => g[1].toUpperCase()) - acc.api = val - acc.params = action[val] // camelify(action[val]) - if (typeof acc.params.body === 'string') { - try { - acc.params.body = JSON.parse(acc.params.body) - } catch (err) {} - } - } - return acc - }, {}) - - // function camelify (obj) { - // const newObj = {} - - // // TODO: add camelCase support for this fields - // const doNotCamelify = ['copy_settings'] - - // for (const key in obj) { - // const val = obj[key] - // let newKey = key - // if (!~doNotCamelify.indexOf(key)) { - // // if the key starts with `_` we should not camelify the first occurence - // // eg: _source_include => _sourceInclude - // newKey = key[0] === '_' - // ? '_' + key.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase()) - // : key.replace(/_([a-z])/g, k => k[1].toUpperCase()) - // } - - // if ( - // val !== null && - // typeof val === 'object' && - // !Array.isArray(val) && - // key !== 'body' - // ) { - // newObj[newKey] = camelify(val) - // } else { - // newObj[newKey] = val - // } - // } - - // return newObj - // } -} - -function parseDoError (err, spec) { - const httpErrors = { - bad_request: 400, - unauthorized: 401, - forbidden: 403, - missing: 404, - request_timeout: 408, - conflict: 409, - unavailable: 503 - } - - if (httpErrors[spec]) { - return err.statusCode === httpErrors[spec] - } - - if (spec === 'request') { - return err.statusCode >= 400 && err.statusCode < 600 - } - - if (spec.startsWith('/') && spec.endsWith('/')) { - return new RegExp(spec.slice(1, -1), 'g').test(JSON.stringify(err.body)) - } - - if (spec === 'param') { - // the new client do not perform runtime checks, - // but it relies on typescript informing the user - return true - // return err instanceof ConfigurationError - } - - return false -} - -function getSkip (arr) { - if (!Array.isArray(arr)) return null - for (let i = 0; i < arr.length; i++) { - if (arr[i].skip) return arr[i].skip - } - return null -} - -// Gets two *maybe* numbers and returns two valida numbers -// it throws if one or both are not a valid number -// the returned value is an array with the new values -function getNumbers (val1, val2) { - const val1Numeric = Number(val1) - if (isNaN(val1Numeric)) { - throw new TypeError(`val1 is not a valid number: ${val1}`) - } - const val2Numeric = Number(val2) - if (isNaN(val2Numeric)) { - throw new TypeError(`val2 is not a valid number: ${val2}`) - } - return [val1Numeric, val2Numeric] -} - -function getRandomInt (min, max) { - return Math.floor(Math.random() * (max - min)) + min -} - -/** - * Logs a skip - * @param {object} the actions - * @returns {TestRunner} - */ -function logSkip (action) { - if (action.reason && action.version) { - console.log(`Skip: ${action.reason} (${action.version})`) - } else if (action.features) { - console.log(`Skip: ${JSON.stringify(action.features)})`) - } else { - console.log('Skipped') - } -} - -/** - * Decides if a test should be skipped - * @param {object} the actions - * @returns {boolean} - */ -function shouldSkip (esVersion, action) { - let shouldSkip = false - // skip based on the version - if (action.version) { - if (action.version.trim() === 'all') return true - const versions = action.version.split(',').filter(Boolean) - for (const version of versions) { - const [min, max] = version.split('-').map(v => v.trim()) - // if both `min` and `max` are specified - if (min && max) { - shouldSkip = semver.satisfies(esVersion, action.version) - // if only `min` is specified - } else if (min) { - shouldSkip = semver.gte(esVersion, min) - // if only `max` is specified - } else if (max) { - shouldSkip = semver.lte(esVersion, max) - // something went wrong! - } else { - throw new Error(`skip: Bad version range: ${action.version}`) - } - } - } - - if (shouldSkip) return true - - if (action.features) { - if (!Array.isArray(action.features)) action.features = [action.features] - // returns true if one of the features is not present in the supportedFeatures - shouldSkip = !!action.features.filter(f => !~supportedFeatures.indexOf(f)).length - } - - if (shouldSkip) return true - - return false -} - -function isNDJson (api) { - const spec = require(join(locations.specFolder, `${api}.json`)) - const { content_type } = spec[Object.keys(spec)[0]].headers - return Boolean(content_type && content_type.includes('application/x-ndjson')) -} - -/** - * Updates the array syntax of keys and values - * eg: 'hits.hits.1.stuff' to 'hits.hits[1].stuff' - * @param {object} the action to update - * @returns {obj} the updated action - */ -// function updateArraySyntax (obj) { -// const newObj = {} - -// for (const key in obj) { -// const newKey = key.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`) -// const val = obj[key] - -// if (typeof val === 'string') { -// newObj[newKey] = val.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`) -// } else if (val !== null && typeof val === 'object') { -// newObj[newKey] = updateArraySyntax(val) -// } else { -// newObj[newKey] = val -// } -// } - -// return newObj -// } - -module.exports = build From e06b54b3724d2b793d3218b76434854c6b93f8c1 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 14 Apr 2025 15:18:27 -0500 Subject: [PATCH 02/22] Pass test cert when running https locally --- test/integration/index.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/integration/index.js b/test/integration/index.js index 7ab553f15..d4be0aaab 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -11,6 +11,9 @@ process.on('unhandledRejection', function (err) { }) const assert = require('node:assert') +const url = require('node:url') +const fs = require('node:fs') +const path = require('node:path') const globby = require('globby') const downloadArtifacts = require('../../scripts/download-artifacts') @@ -44,6 +47,13 @@ if (require.main === module) { } else { clientOptions.auth = { username: 'elastic', password } } + const nodeUrl = new url.URL(node) + if (nodeUrl.protocol === 'https:') { + clientOptions.tls = { + ca: fs.readFileSync(path.join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'), + rejectUnauthorized: false + } + } doTestBuilder(clientOptions) .then(() => process.exit(0)) .catch(err => { From caeee835ea77a96b7d293c0ec097fe2f89f3d359 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 14 Apr 2025 15:19:12 -0500 Subject: [PATCH 03/22] Don't try to reinitialize existing variables --- test/integration/test-builder.js | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index be75a6e59..d40da2268 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -144,6 +144,8 @@ async function build (yamlFiles, clientOptions) { let code = `const client = new Client(${JSON.stringify(clientOptions, null, 2)})\n` code += 'let response\n\n' + const vars = new Set() + for (const action of actions) { const key = Object.keys(action)[0] switch (key) { @@ -151,7 +153,9 @@ async function build (yamlFiles, clientOptions) { code += buildDo(action.do) break case 'set': - code += buildSet(action.set) + const setResult = buildSet(action.set, vars) + vars.add(setResult.varName) + code += setResult.code break case 'transform_and_set': code += buildTransformAndSet(action.transform_and_set) @@ -225,11 +229,18 @@ function buildRequest(action) { return code } -function buildSet (action) { +function buildSet (action, vars) { const key = Object.keys(action)[0] const varName = action[key] - const path = buildPath(key) - return `let ${varName} = response.body${path}\n` + const lookup = buildLookup(key) + + let code = '' + if (vars.has(varName)) { + code = `${varName} = ${lookup}\n` + } else { + code =`let ${varName} = ${lookup}\n` + } + return { code, varName } } function buildTransformAndSet (action) { From 133b361dec9037837479f08a6f52a2c8c91f7494 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 14 Apr 2025 15:20:26 -0500 Subject: [PATCH 04/22] Support for overriding HTTP headers --- test/integration/test-builder.js | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index d40da2268..62b025044 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -213,10 +213,18 @@ function buildDo (action) { function buildRequest(action) { let code = '' + + const options = { meta: true } + for (const key of Object.keys(action)) { if (key === 'catch') continue + + if (key === 'headers') { + options.headers = action.headers + continue + } + const params = action[key] - const options = { meta: true } if (params.ignore != null) { if (Array.isArray(params.ignore)) { options.ignore = params.ignore @@ -224,6 +232,7 @@ function buildRequest(action) { options.ignore = [params.ignore] } } + code += `response = await client.${toCamelCase(key)}(${buildApiParams(action[key])}, ${JSON.stringify(options)})\n` } return code From b3b9262bc80cbea0146c0033faee3ff5ced4e9b0 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 12:46:12 -0500 Subject: [PATCH 05/22] Add support for 'exists' action --- test/integration/test-builder.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index 62b025044..728167b73 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -186,9 +186,14 @@ async function build (yamlFiles, clientOptions) { break case 'contains': code += buildContains(action.contains) + break + case 'exists': + code += buildExists(action.exists) + break case 'skip': break default: + console.warn(`Action not supported: ${key}`) break } } @@ -326,6 +331,11 @@ function buildContains (action) { return `t.ok(response.body${path}.includes(${val}))\n` } +function buildExists (keyName) { + const lookup = buildLookup(key) + return `t.ok(${lookup} != null, \`Key "${keyName}" not found in response body: \$\{JSON.stringify(response.body, null, 2)\}\`)\n` +} + function buildApiParams (params) { if (Object.keys(params).length === 0) { return 'undefined' From 38d2383f967fe5f57a5c4a601e4bea9768fd5aa0 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 12:46:35 -0500 Subject: [PATCH 06/22] Add a bunch of skips to stack tests --- test/integration/test-builder.js | 54 ++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index 728167b73..29de176b7 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -14,8 +14,58 @@ const { mkdir } = promises const generatedTestsPath = join(__dirname, '..', '..', 'generated-tests') const stackSkips = [ - // test builder doesn't support "(local)" or `exists` action - 'indices/resolve_cluster.yml' + // client bug: ILM request takes a "body" param, but "body" is a special keyword in the JS client + 'ilm/10_basic.yml', + // health report is... not healthy + 'health_report.yml', + // TODO: `contains` action only supports checking for primitives inside arrays or strings inside strings, not referenced values like objects inside arrays + 'entsearch/10_basic.yml', + // no handler found for uri [/knn_test/_knn_search] + 'knn_search.yml', + // TODO: fix license on ES startup - "Operation failed: Current license is basic." + 'license/10_stack.yml', + // response.body should be truthy. found: "" + 'logstash/10_basic.yml', + // test definition bug? security_exception: unable to authenticate user [x_pack_rest_user] for REST request [/_ml/trained_models/test_model/definition/0] + 'machine_learning/clear_tm_deployment_cache.yml', + // client bug: 0.99995 does not equal 0.5 + 'machine_learning/data_frame_evaluate.yml', + // test definition bug? regex has whitespace, maybe needs to be removed + 'machine_learning/explain_data_frame_analytics.yml', + // client bug: 4 != 227 + 'machine_learning/preview_datafeed.yml', + // test definition bug: error message does not match + 'machine_learning/revert_model_snapshot.yml', + // test definition bug: error message does not match + 'machine_learning/update_model_snapshot.yml', + // version_conflict_engine_exception + 'machine_learning/jobs_crud.yml', + // test definition bug: error message does not match + 'machine_learning/model_snapshots.yml', + // test definition bug: error message does not match + 'query_rules/30_test.yml', + // client bug: 0 != 0.1 + 'script/10_basic.yml', + // client bug: request takes a "body" param, but "body" is a special keyword in the JS client + 'searchable_snapshots/10_basic.yml', + // test builder bug: does `match` action need to support "array contains value"? + 'security/10_api_key_basic.yml', + // test definition bug: error message does not match + 'security/140_user.yml', + // test definition bug: error message does not match + 'security/30_privileges_stack.yml', + // test definition bug: error message does not match + 'security/change_password.yml', + // test builder bug: media_type_header_exception + 'simulate/ingest.yml', + // client bug: request takes a "body" param, but "body" is a special keyword in the JS client + 'snapshot/10_basic.yml', + // test definition bug: illegal_argument_exception + 'sql/10_basic.yml', + // test definition bug: illegal_argument_exception + 'text_structure/10_basic.yml', + // test definition bug: illegal_argument_exception + 'transform/10_basic.yml', ] const serverlessSkips = [ From 2b304f7ae468d27ccc377d8e8ba0f0437141538b Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 12:47:03 -0500 Subject: [PATCH 07/22] Tweaks to try/catch tests --- test/integration/test-builder.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index 29de176b7..c5dc34bcb 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -256,9 +256,9 @@ function buildDo (action) { const keys = Object.keys(action) if (keys.includes('catch')) { code += 'try {\n' - code += indent(buildRequest(action)) + code += indent(buildRequest(action), 2) code += '} catch (err) {\n' - code += ` t.match(err.message, ${buildValLiteral(action.catch)})\n` + code += ` t.match(err.toString(), ${buildValLiteral(action.catch)})\n` code += '}\n' } else { code += buildRequest(action) From 07b22e2cbf9510f07883c1f2ce3dd27cb090f627 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 12:48:43 -0500 Subject: [PATCH 08/22] Improve accuracy of value lookup generation --- test/integration/test-builder.js | 70 +++++++++++++++----------------- 1 file changed, 33 insertions(+), 37 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index c5dc34bcb..2c58c6f90 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -313,76 +313,72 @@ function buildTransformAndSet (action) { function buildMatch (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + let lookup = buildLookup(key) const val = buildValLiteral(action[key]) - let lookup = `response.body${path}` - - if (lookup === 'response.body[body]') lookup = 'JSON.stringify(response.body) === "" ? null : JSON.stringify(response.body)' - if (lookup === "response.body['']") lookup = 'JSON.stringify(response.body) === "" ? null : JSON.stringify(response.body)' - - if (val.startsWith('/')) { - return `t.ok(${lookup}.match(${val}), '${key} should match regex ${val}')\n` - } else if (typeof action[key] === 'object') { - return `t.match(${lookup}, ${val})\n` - } else { - return `t.equal(${lookup}, ${val})\n` - } + return `t.match(${lookup}, ${val})\n` } function buildLt (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(response.body${path} < ${val})\n` + return `t.ok(${lookup} < ${val})\n` } function buildLte (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(response.body${path} <= ${val})\n` + return `t.ok(${lookup} <= ${val})\n` } function buildGt (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(response.body${path} > ${val})\n` + return `t.ok(${lookup} > ${val})\n` } function buildGte (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(response.body${path} >= ${val})\n` + return `t.ok(${lookup} >= ${val})\n` } function buildLength (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.equal(response.body${path}.length, ${val})\n` + + let code = '' + code += `if (typeof ${lookup} === 'object' && !Array.isArray(${lookup})) {\n` + code += ` t.equal(Object.keys(${lookup}).length, ${val})\n` + code += `} else {\n` + code += ` t.equal(${lookup}.length, ${val})\n` + code += `}\n` + return code } function buildIsTrue (action) { - let lookup = `response.body${buildPath(action)}` - return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), \`${lookup} should be truthy. found: \$\{JSON.stringify(${lookup})\}\`)\n` + let lookup = `${buildLookup(action)}` + return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), \`${action} should be truthy. found: \$\{JSON.stringify(${lookup})\}\`)\n` } function buildIsFalse (action) { - let lookup = `response.body${buildPath(action)}` - return `t.ok(${lookup} === "false" || !Boolean(${lookup}), \`${lookup} should be falsy. found: \$\{JSON.stringify(${lookup})\}\`)\n` + let lookup = `${buildLookup(action)}` + return `t.ok(${lookup} === "false" || !Boolean(${lookup}), \`${action} should be falsy. found: \$\{JSON.stringify(${lookup})\}\`)\n` } function buildContains (action) { const key = Object.keys(action)[0] - const path = buildPath(key) + const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(response.body${path}.includes(${val}))\n` + return `t.ok(${lookup}.includes(${val}), '"${val}" not found in ${key}')\n` } function buildExists (keyName) { - const lookup = buildLookup(key) + const lookup = buildLookup(keyName) return `t.ok(${lookup} != null, \`Key "${keyName}" not found in response body: \$\{JSON.stringify(response.body, null, 2)\}\`)\n` } @@ -391,7 +387,7 @@ function buildApiParams (params) { return 'undefined' } else { const out = {} - Object.keys(params).filter(k => k !== 'ignore').forEach(k => out[k] = params[k]) + Object.keys(params).filter(k => k !== 'ignore' && k !== 'headers').forEach(k => out[k] = params[k]) return buildValLiteral(out) } } @@ -405,12 +401,11 @@ function indent (str, spaces) { return str.replace(/\s+$/, '').split('\n').map(l => `${tabs}${l}`).join('\n') + '\n' } -function buildPath (path) { - if (path === 'response') return '' - return path.split('.').map(step => { - if (step === 'response' || step === 'body') { - return '' - } else if (parseInt(step, 10).toString() === step) { +function buildLookup (path) { + if (path === '$body') return 'JSON.stringify(response.body)' + + const outPath = path.split('.').map(step => { + if (parseInt(step, 10).toString() === step) { return `[${step}]` } else if (step.match(/^\$[a-zA-Z0-9_]+$/)) { const lookup = step.replace(/^\$/, '') @@ -422,6 +417,7 @@ function buildPath (path) { return `['${step}']` } }).join('') + return `response.body${outPath}` } function buildValLiteral (val) { From 6f0bee8b9b045e7ba90524c7615839b080230538 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 12:49:14 -0500 Subject: [PATCH 09/22] Improve accuracy of value literal generation --- test/integration/test-builder.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index 2c58c6f90..be3fb06c0 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -421,10 +421,11 @@ function buildLookup (path) { } function buildValLiteral (val) { + if (typeof val === 'string') val = val.trim() if (isRegExp(val)) { - return JSON.stringify(val).replace(/^"/, '').replace(/"$/, '').replace(/\\\\/, '\\') + return JSON.stringify(val).replace(/^"/, '').replace(/"$/, '').replaceAll('\\\\', '\\') } else if (isVariable(val)) { - if (val === '$body') return '' + if (val === '$body') return 'JSON.stringify(response.body)' return val.replace(/^\$/, '') } else if (isPlainObject(val)) { return JSON.stringify(cleanObject(val), null, 2).replace(/"\$([a-zA-Z0-9_]+)"/g, '$1') From f76037ddd947dce2208cb55f168e7a94528c5531 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 14:00:35 -0500 Subject: [PATCH 10/22] Improvements to $body value expression --- test/integration/test-builder.js | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index be3fb06c0..c40d5e54f 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -362,19 +362,23 @@ function buildLength (action) { function buildIsTrue (action) { let lookup = `${buildLookup(action)}` - return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), \`${action} should be truthy. found: \$\{JSON.stringify(${lookup})\}\`)\n` + let errMessage = `\`${action} should be truthy. found: '\$\{JSON.stringify(${lookup})\}'\`` + if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be truthy. found: '\$\{${lookup}\}'\`` + return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), ${errMessage})\n` } function buildIsFalse (action) { let lookup = `${buildLookup(action)}` - return `t.ok(${lookup} === "false" || !Boolean(${lookup}), \`${action} should be falsy. found: \$\{JSON.stringify(${lookup})\}\`)\n` + let errMessage = `\`${action} should be falsy. found: '\$\{JSON.stringify(${lookup})\}'\`` + if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be falsy. found: '\$\{${lookup}\}'\`` + return `t.ok(${lookup} === "false" || !Boolean(${lookup}), ${errMessage})\n` } function buildContains (action) { const key = Object.keys(action)[0] const lookup = buildLookup(key) const val = buildValLiteral(action[key]) - return `t.ok(${lookup}.includes(${val}), '"${val}" not found in ${key}')\n` + return `t.ok(${lookup}.includes(${val}), '${JSON.stringify(val)} not found in ${key}')\n` } function buildExists (keyName) { @@ -402,7 +406,7 @@ function indent (str, spaces) { } function buildLookup (path) { - if (path === '$body') return 'JSON.stringify(response.body)' + if (path === '$body') return '(typeof response.body === "string" ? response.body : JSON.stringify(response.body))' const outPath = path.split('.').map(step => { if (parseInt(step, 10).toString() === step) { From 52c90532f8fbbbf4e51245a5947a945394ad5d64 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 14:00:47 -0500 Subject: [PATCH 11/22] More test skips --- test/integration/test-builder.js | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/integration/test-builder.js b/test/integration/test-builder.js index c40d5e54f..64ce97dd2 100644 --- a/test/integration/test-builder.js +++ b/test/integration/test-builder.js @@ -14,12 +14,20 @@ const { mkdir } = promises const generatedTestsPath = join(__dirname, '..', '..', 'generated-tests') const stackSkips = [ + // test definition bug: response is empty string + 'cat/fielddata.yml', + // test definition bug: response is empty string + 'cluster/delete_voting_config_exclusions.yml', + // test definition bug: response is empty string + 'cluster/voting_config_exclusions.yml', // client bug: ILM request takes a "body" param, but "body" is a special keyword in the JS client 'ilm/10_basic.yml', // health report is... not healthy 'health_report.yml', // TODO: `contains` action only supports checking for primitives inside arrays or strings inside strings, not referenced values like objects inside arrays 'entsearch/10_basic.yml', + // test definition bug: error message does not match + 'entsearch/30_sync_jobs_stack.yml', // no handler found for uri [/knn_test/_knn_search] 'knn_search.yml', // TODO: fix license on ES startup - "Operation failed: Current license is basic." @@ -54,6 +62,8 @@ const stackSkips = [ 'security/140_user.yml', // test definition bug: error message does not match 'security/30_privileges_stack.yml', + // unknown issue: $profile.enabled path doesn't exist in response + 'security/130_user_profile.yml', // test definition bug: error message does not match 'security/change_password.yml', // test builder bug: media_type_header_exception From 51e85571104d44a7771ca7264f106eac252596ac Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 14:11:20 -0500 Subject: [PATCH 12/22] Ensure test runner pulls correct tests by version --- scripts/download-artifacts.js | 15 +++++++-------- test/integration/index.js | 13 +++++++++++-- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js index 5be3617c5..c15ed4ae1 100644 --- a/scripts/download-artifacts.js +++ b/scripts/download-artifacts.js @@ -27,11 +27,15 @@ async function downloadArtifacts (localTests, version = 'main') { const { GITHUB_TOKEN } = process.env + if (version !== 'main') { + version = version.split('.').slice(0, 2).join('.') + } + log.text = 'Clean tests folder' await rimraf(testYamlFolder) await mkdir(testYamlFolder, { recursive: true }) - log.text = 'Fetching test YAML files' + log.text = `Fetch test YAML files for version ${version}` if (localTests) { log.text = `Copying local tests from ${localTests}` @@ -42,7 +46,7 @@ async function downloadArtifacts (localTests, version = 'main') { process.exit(1) } - const response = await fetch('https://api.github.com/repos/elastic/elasticsearch-clients-tests/zipball/main', { + const response = await fetch(`https://api.github.com/repos/elastic/elasticsearch-clients-tests/zipball/${version}`, { headers: { Authorization: `Bearer ${GITHUB_TOKEN}`, Accept: 'application/vnd.github+json' @@ -68,12 +72,7 @@ async function downloadArtifacts (localTests, version = 'main') { await rimraf(schemaFolder) await mkdir(schemaFolder, { recursive: true }) - let specVersion = version - if (version !== 'main') { - specVersion = version.split('.').slice(0, 2).join('.') - } - - const response = await fetch(`https://raw.githubusercontent.com/elastic/elasticsearch-specification/${specVersion}/output/schema/schema.json`) + const response = await fetch(`https://raw.githubusercontent.com/elastic/elasticsearch-specification/${version}/output/schema/schema.json`) if (!response.ok) { log.fail(`unexpected response ${response.statusText}`) process.exit(1) diff --git a/test/integration/index.js b/test/integration/index.js index d4be0aaab..a4d51ea4e 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -15,6 +15,7 @@ const url = require('node:url') const fs = require('node:fs') const path = require('node:path') const globby = require('globby') +const semver = require('semver') const downloadArtifacts = require('../../scripts/download-artifacts') const buildTests = require('./test-builder') @@ -30,7 +31,8 @@ const getAllFiles = async dir => { return files.sort() } -async function doTestBuilder (clientOptions) { +async function doTestBuilder (version, clientOptions) { + await downloadArtifacts(undefined, version) const files = await getAllFiles(yamlFolder) await buildTests(files, clientOptions) } @@ -39,8 +41,14 @@ if (require.main === module) { const node = process.env.TEST_ES_SERVER const apiKey = process.env.ES_API_SECRET_KEY const password = process.env.ELASTIC_PASSWORD + let version = process.env.STACK_VERSION + assert(node != null, 'Environment variable missing: TEST_ES_SERVER') assert(apiKey != null || password != null, 'Environment variable missing: ES_API_SECRET_KEY or ELASTIC_PASSWORD') + assert(version != null, 'Environment variable missing: STACK_VERSION') + + version = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version) + const clientOptions = { node } if (apiKey != null) { clientOptions.auth = { apiKey } @@ -54,7 +62,8 @@ if (require.main === module) { rejectUnauthorized: false } } - doTestBuilder(clientOptions) + + doTestBuilder(version, clientOptions) .then(() => process.exit(0)) .catch(err => { console.error(err) From e7fb0e6239079c948c5fa22ce61df85087073438 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Tue, 15 Apr 2025 14:35:46 -0500 Subject: [PATCH 13/22] Update CI pipeline to reflect changes to integration tests --- .buildkite/pipeline.yml | 10 ++++------ .buildkite/run-client.sh | 36 ++++++++++++++++++++---------------- 2 files changed, 24 insertions(+), 22 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index c5146fc68..477dcadc3 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,17 +1,15 @@ --- steps: - - label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}" + - label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})" agents: provider: "gcp" env: NODE_VERSION: "{{ matrix.nodejs }}" - TEST_SUITE: "{{ matrix.suite }}" - STACK_VERSION: 8.16.0 + TEST_SUITE: "platinum" + STACK_VERSION: 9.0.0 + GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token" matrix: setup: - suite: - - "free" - - "platinum" nodejs: - "18" - "20" diff --git a/.buildkite/run-client.sh b/.buildkite/run-client.sh index 59ed168e7..c089c4c22 100755 --- a/.buildkite/run-client.sh +++ b/.buildkite/run-client.sh @@ -10,22 +10,26 @@ export NODE_VERSION=${NODE_VERSION:-18} echo "--- :javascript: Building Docker image" docker build \ - --file "$script_path/Dockerfile" \ - --tag elastic/elasticsearch-js \ - --build-arg NODE_VERSION="$NODE_VERSION" \ - . + --file "$script_path/Dockerfile" \ + --tag elastic/elasticsearch-js \ + --build-arg NODE_VERSION="$NODE_VERSION" \ + . -echo "--- :javascript: Running $TEST_SUITE tests" +GITHUB_TOKEN=$(vault read -field=token "$GITHUB_TOKEN_PATH") +export GITHUB_TOKEN + +echo "--- :javascript: Running tests" mkdir -p "$repo/junit-output" docker run \ - --network="${network_name}" \ - --env "TEST_ES_SERVER=${elasticsearch_url}" \ - --env "ELASTIC_PASSWORD=${elastic_password}" \ - --env "TEST_SUITE=${TEST_SUITE}" \ - --env "ELASTIC_USER=elastic" \ - --env "BUILDKITE=true" \ - --volume "$repo/junit-output:/junit-output" \ - --name elasticsearch-js \ - --rm \ - elastic/elasticsearch-js \ - bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" + --network="${network_name}" \ + --env "TEST_ES_SERVER=${elasticsearch_url}" \ + --env "ELASTIC_PASSWORD=${elastic_password}" \ + --env "STACK_VERSION=$STACK_VERSION" \ + --env "GITHUB_TOKEN=$GITHUB_TOKEN" \ + --env "ELASTIC_USER=elastic" \ + --env "BUILDKITE=true" \ + --volume "$repo/junit-output:/junit-output" \ + --name elasticsearch-js \ + --rm \ + elastic/elasticsearch-js \ + bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" From 3d83998dd9b53ed3f2c8e1bac1530e6ae11a33ba Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Wed, 16 Apr 2025 09:38:51 -0500 Subject: [PATCH 14/22] Add back deleted module that is used elsewhere --- test/integration/helper.js | 124 +++++++++++++++++++++++++++++++++++++ 1 file changed, 124 insertions(+) create mode 100644 test/integration/helper.js diff --git a/test/integration/helper.js b/test/integration/helper.js new file mode 100644 index 000000000..bfe2535fa --- /dev/null +++ b/test/integration/helper.js @@ -0,0 +1,124 @@ +/* + * Copyright Elasticsearch B.V. and contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const assert = require('node:assert') +const fetch = require('node-fetch') + +function runInParallel (client, operation, options, clientOptions) { + if (options.length === 0) return Promise.resolve() + const operations = options.map(opts => { + const api = delve(client, operation).bind(client) + return api(opts, clientOptions) + }) + + return Promise.all(operations) +} + +// code from https://github.com/developit/dlv +// needed to support an edge case: `a\.b` +// where `a.b` is a single field: { 'a.b': true } +function delve (obj, key, def, p) { + p = 0 + // handle the key with a dot inside that is not a part of the path + // and removes the backslashes from the key + key = key.split + ? key.split(/(? k.replace(/\\/g, '')) + : key.replace(/\\/g, '') + while (obj && p < key.length) obj = obj[key[p++]] + return (obj === undefined || p < key.length) ? def : obj +} + +function to (promise) { + return promise.then(data => [null, data], err => [err, undefined]) +} + +const sleep = ms => new Promise(resolve => setTimeout(resolve, ms)) + +function isXPackTemplate (name) { + if (name.startsWith('.monitoring-')) { + return true + } + if (name.startsWith('.watch') || name.startsWith('.triggered_watches')) { + return true + } + if (name.startsWith('.data-frame-')) { + return true + } + if (name.startsWith('.ml-')) { + return true + } + if (name.startsWith('.transform-')) { + return true + } + if (name.startsWith('.deprecation-')) { + return true + } + switch (name) { + case '.watches': + case 'logstash-index-template': + case '.logstash-management': + case 'security_audit_log': + case '.slm-history': + case '.async-search': + case 'saml-service-provider': + case 'ilm-history': + case 'logs': + case 'logs-settings': + case 'logs-mappings': + case 'metrics': + case 'metrics-settings': + case 'metrics-mappings': + case 'synthetics': + case 'synthetics-settings': + case 'synthetics-mappings': + case '.snapshot-blob-cache': + case 'data-streams-mappings': + return true + } + return false +} + +async function getSpec () { + const response = await fetch('https://raw.githubusercontent.com/elastic/elasticsearch-specification/main/output/schema/schema.json') + return await response.json() +} + +let spec = null + +// some keys for the path used in the yaml test are not support in the client +// for example: snapshot.createRepository({ repository }) will not work. +// This code changes the params to the appropriate name, in the example above, +// "repository" will be renamed to "name" +async function updateParams (cmd) { + if (spec == null) { + spec = await getSpec() + } + const endpoint = spec.endpoints.find(endpoint => endpoint.name === cmd.api) + assert(endpoint != null) + if (endpoint.request == null) return cmd + + const type = spec.types.find(type => type.name.name === endpoint.request.name && type.name.namespace === endpoint.request.namespace) + assert(type != null) + + const pathParams = type.path.reduce((acc, val) => { + if (val.codegenName != null) { + acc[val.name] = val.codegenName + } + return acc + }, {}) + + for (const key in cmd.params) { + if (pathParams[key] != null) { + cmd.params[pathParams[key]] = cmd.params[key] + delete cmd.params[key] + } + } + + return cmd +} + +module.exports = { runInParallel, delve, to, sleep, isXPackTemplate, updateParams } From 8c542fc59ddb8738e2f5b151260a5482799953ac Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 13:11:13 -0500 Subject: [PATCH 15/22] Ensure tests are reported in JUnit format --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index f29350877..39bf68e3b 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,8 @@ "test:coverage-report": "npm run build && tap --coverage && nyc report --reporter=text-lcov > coverage.lcov", "test:coverage-ui": "npm run build && tap --coverage --coverage-report=html", "test:integration-build": "npm run build && node test/integration/index.js", - "test:integration": "npm run test:integration-build && env TEST_ES_STACK=1 tap run --jobs=1 generated-tests/", - "test:integration-serverless": "npm run test:integration-build && env TEST_ES_SERVERLESS=1 tap run --jobs=1 generated-tests/", + "test:integration": "npm run test:integration-build && env TEST_ES_STACK=1 tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/", + "test:integration-serverless": "npm run test:integration-build && env TEST_ES_SERVERLESS=1 tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/", "lint": "ts-standard src", "lint:fix": "ts-standard --fix src", "license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause;0BSD'", From ba8255921e8dd1f7828404a23f8250c97130a82f Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 13:29:41 -0500 Subject: [PATCH 16/22] Add debugging info to Buildkite step to figure out why it's failing --- .buildkite/run-client.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/run-client.sh b/.buildkite/run-client.sh index c089c4c22..38d915432 100755 --- a/.buildkite/run-client.sh +++ b/.buildkite/run-client.sh @@ -32,4 +32,4 @@ docker run \ --name elasticsearch-js \ --rm \ elastic/elasticsearch-js \ - bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" + bash -c "pwd; ls -la; npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" From e9129cb16c9c718318a080afda2c55b52ee9ecb4 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 13:38:40 -0500 Subject: [PATCH 17/22] Make sure to mount the repo, silly --- .buildkite/run-client.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.buildkite/run-client.sh b/.buildkite/run-client.sh index 38d915432..b3fc9b863 100755 --- a/.buildkite/run-client.sh +++ b/.buildkite/run-client.sh @@ -28,6 +28,8 @@ docker run \ --env "GITHUB_TOKEN=$GITHUB_TOKEN" \ --env "ELASTIC_USER=elastic" \ --env "BUILDKITE=true" \ + --volume "/usr/src/app/node_modules" \ + --volume "$repo:/usr/src/app" \ --volume "$repo/junit-output:/junit-output" \ --name elasticsearch-js \ --rm \ From 6f334571bcde3e57e75d973d9c16cf1c50b90961 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 13:59:20 -0500 Subject: [PATCH 18/22] Turn off JUnit debugging --- .buildkite/run-client.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/run-client.sh b/.buildkite/run-client.sh index b3fc9b863..34431ac25 100755 --- a/.buildkite/run-client.sh +++ b/.buildkite/run-client.sh @@ -34,4 +34,4 @@ docker run \ --name elasticsearch-js \ --rm \ elastic/elasticsearch-js \ - bash -c "pwd; ls -la; npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" + bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'" From f60388fee621b55464f022c5d9e4c3dd78279717 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 13:59:47 -0500 Subject: [PATCH 19/22] Let pipeline manage stack/serverless env vars --- .buildkite/pipeline.yml | 1 + .buildkite/run-client.sh | 5 +++-- package.json | 3 +-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 477dcadc3..7b9b3e75e 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -8,6 +8,7 @@ steps: TEST_SUITE: "platinum" STACK_VERSION: 9.0.0 GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token" + TEST_ES_STACK: "1" matrix: setup: nodejs: diff --git a/.buildkite/run-client.sh b/.buildkite/run-client.sh index 34431ac25..872d57812 100755 --- a/.buildkite/run-client.sh +++ b/.buildkite/run-client.sh @@ -22,10 +22,11 @@ echo "--- :javascript: Running tests" mkdir -p "$repo/junit-output" docker run \ --network="${network_name}" \ + --env TEST_ES_STACK \ + --env STACK_VERSION \ + --env GITHUB_TOKEN \ --env "TEST_ES_SERVER=${elasticsearch_url}" \ --env "ELASTIC_PASSWORD=${elastic_password}" \ - --env "STACK_VERSION=$STACK_VERSION" \ - --env "GITHUB_TOKEN=$GITHUB_TOKEN" \ --env "ELASTIC_USER=elastic" \ --env "BUILDKITE=true" \ --volume "/usr/src/app/node_modules" \ diff --git a/package.json b/package.json index 39bf68e3b..7da89b905 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,7 @@ "test:coverage-report": "npm run build && tap --coverage && nyc report --reporter=text-lcov > coverage.lcov", "test:coverage-ui": "npm run build && tap --coverage --coverage-report=html", "test:integration-build": "npm run build && node test/integration/index.js", - "test:integration": "npm run test:integration-build && env TEST_ES_STACK=1 tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/", - "test:integration-serverless": "npm run test:integration-build && env TEST_ES_SERVERLESS=1 tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/", + "test:integration": "npm run test:integration-build && env tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/", "lint": "ts-standard src", "lint:fix": "ts-standard --fix src", "license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause;0BSD'", From c789bb101f804bfcd4a4270c6602e77f569b9e0e Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 14:00:02 -0500 Subject: [PATCH 20/22] Adjustments to CI agent specs --- .buildkite/pipeline.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 7b9b3e75e..5c1439376 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,8 +1,11 @@ --- +agents: + image: family/core-ubuntu-2204 + memory: "8G" + cpu: "2" + steps: - label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})" - agents: - provider: "gcp" env: NODE_VERSION: "{{ matrix.nodejs }}" TEST_SUITE: "platinum" @@ -20,9 +23,6 @@ steps: - wait: ~ continue_on_failure: true - label: ":junit: Test results" - agents: - provider: "gcp" - image: family/core-ubuntu-2204 plugins: - junit-annotate#v2.6.0: artifacts: "junit-output/junit-*.xml" From d806f41986be1c2c53bec574f82153b0e10e58d5 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 14:09:34 -0500 Subject: [PATCH 21/22] Don't commit downloaded artifacts --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 2d7e63d04..07e49ff7b 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,4 @@ processinfo rest-api-spec yaml-rest-tests generated-tests +schema From 54b6f291f717ed82e9dcd97ab4acf05d3679e234 Mon Sep 17 00:00:00 2001 From: Josh Mock Date: Mon, 21 Apr 2025 14:11:20 -0500 Subject: [PATCH 22/22] Use GCP as agent provider --- .buildkite/pipeline.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 5c1439376..8a7e176b1 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,5 +1,6 @@ --- agents: + provider: "gcp" image: family/core-ubuntu-2204 memory: "8G" cpu: "2"