mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[ftr] add first-class support for playwrite journeys (#140680)
* [ftr] add first-class support for playwrite journeys * [CI] Auto-commit changed files from 'node scripts/generate codeowners' * fix jest test * remove ability to customize kibana server args, if we need it we can add it back * remove dev dir that doesn't exist * fix typo * prevent duplicated array converstion logic by sharing flag reader * remove destructuring of option * fix scalability config and config_path import * fix start_servers args and tests * include simple readme * fix jest tests and support build re-use when changes are just to jest tests Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
2bc9b77e9c
commit
50b3b57d9e
197 changed files with 4828 additions and 4096 deletions
|
@ -56,7 +56,7 @@ disabled:
|
|||
- x-pack/test/fleet_packages/config.ts
|
||||
|
||||
# Scalability testing config that we run in its own pipeline
|
||||
- x-pack/test/performance/scalability/config.ts
|
||||
- x-pack/test/scalability/config.ts
|
||||
|
||||
defaultQueue: 'n2-4-spot'
|
||||
enabled:
|
||||
|
@ -267,10 +267,10 @@ enabled:
|
|||
- x-pack/test/ui_capabilities/spaces_only/config.ts
|
||||
- x-pack/test/upgrade_assistant_integration/config.js
|
||||
- x-pack/test/usage_collection/config.ts
|
||||
- x-pack/test/performance/journeys/ecommerce_dashboard/config.ts
|
||||
- x-pack/test/performance/journeys/flight_dashboard/config.ts
|
||||
- x-pack/test/performance/journeys/login/config.ts
|
||||
- x-pack/test/performance/journeys/many_fields_discover/config.ts
|
||||
- x-pack/test/performance/journeys/promotion_tracking_dashboard/config.ts
|
||||
- x-pack/test/performance/journeys/web_logs_dashboard/config.ts
|
||||
- x-pack/test/performance/journeys/data_stress_test_lens/config.ts
|
||||
- x-pack/performance/journeys/ecommerce_dashboard.ts
|
||||
- x-pack/performance/journeys/flight_dashboard.ts
|
||||
- x-pack/performance/journeys/login.ts
|
||||
- x-pack/performance/journeys/many_fields_discover.ts
|
||||
- x-pack/performance/journeys/promotion_tracking_dashboard.ts
|
||||
- x-pack/performance/journeys/web_logs_dashboard.ts
|
||||
- x-pack/performance/journeys/data_stress_test_lens.ts
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
steps:
|
||||
- label: ':male-mechanic::skin-tone-2: Pre-Build'
|
||||
- label: '👨🔧 Pre-Build'
|
||||
command: .buildkite/scripts/lifecycle/pre_build.sh
|
||||
agents:
|
||||
queue: kibana-default
|
||||
|
||||
- wait
|
||||
|
||||
- label: ':factory_worker: Build Kibana Distribution and Plugins'
|
||||
- label: '🧑🏭 Build Kibana Distribution and Plugins'
|
||||
command: .buildkite/scripts/steps/build_kibana.sh
|
||||
agents:
|
||||
queue: c2-16
|
||||
key: build
|
||||
if: "build.env('KIBANA_BUILD_ID') == null || build.env('KIBANA_BUILD_ID') == ''"
|
||||
|
||||
- label: ':muscle: Performance Tests with Playwright config'
|
||||
- label: '💪 Performance Tests with Playwright config'
|
||||
command: .buildkite/scripts/steps/functional/performance_playwright.sh
|
||||
agents:
|
||||
queue: kb-static-ubuntu
|
||||
|
@ -21,13 +21,13 @@ steps:
|
|||
key: tests
|
||||
timeout_in_minutes: 60
|
||||
|
||||
- label: ':shipit: Performance Tests dataset extraction for scalability benchmarking'
|
||||
- label: '🚢 Performance Tests dataset extraction for scalability benchmarking'
|
||||
command: .buildkite/scripts/steps/functional/scalability_dataset_extraction.sh
|
||||
agents:
|
||||
queue: n2-2
|
||||
depends_on: tests
|
||||
|
||||
- label: ':chart_with_upwards_trend: Report performance metrics to ci-stats'
|
||||
- label: '📈 Report performance metrics to ci-stats'
|
||||
command: .buildkite/scripts/steps/functional/report_performance_metrics.sh
|
||||
agents:
|
||||
queue: n2-2
|
||||
|
@ -36,7 +36,7 @@ steps:
|
|||
- wait: ~
|
||||
continue_on_failure: true
|
||||
|
||||
- label: ':male_superhero::skin-tone-2: Post-Build'
|
||||
- label: '🦸 Post-Build'
|
||||
command: .buildkite/scripts/lifecycle/post_build.sh
|
||||
agents:
|
||||
queue: kibana-default
|
||||
|
|
|
@ -42,7 +42,12 @@
|
|||
"kibana_versions_check": true,
|
||||
"kibana_build_reuse": true,
|
||||
"kibana_build_reuse_pipeline_slugs": ["kibana-pull-request", "kibana-on-merge"],
|
||||
"kibana_build_reuse_regexes": ["^test/", "^x-pack/test/"]
|
||||
"kibana_build_reuse_regexes": [
|
||||
"^test/",
|
||||
"^x-pack/test/",
|
||||
"/__snapshots__/",
|
||||
"\\.test\\.(ts|tsx|js|jsx)"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -4,16 +4,33 @@ set -euo pipefail
|
|||
|
||||
source .buildkite/scripts/common/util.sh
|
||||
|
||||
is_test_execution_step
|
||||
|
||||
.buildkite/scripts/bootstrap.sh
|
||||
# These tests are running on static workers so we have to make sure we delete previous build of Kibana
|
||||
rm -rf "$KIBANA_BUILD_LOCATION"
|
||||
.buildkite/scripts/download_build_artifacts.sh
|
||||
|
||||
echo --- Run Performance Tests with Playwright config
|
||||
echo "--- 🦺 Starting Elasticsearch"
|
||||
|
||||
node scripts/es snapshot&
|
||||
export esPid=$!
|
||||
trap 'kill ${esPid}' EXIT
|
||||
|
||||
esPid=$!
|
||||
export TEST_ES_URL=http://elastic:changeme@localhost:9200
|
||||
export TEST_ES_DISABLE_STARTUP=true
|
||||
|
||||
# Pings the es server every second for up to 2 minutes until it is green
|
||||
curl \
|
||||
--fail \
|
||||
--silent \
|
||||
--retry 120 \
|
||||
--retry-delay 1 \
|
||||
--retry-connrefused \
|
||||
-XGET "${TEST_ES_URL}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow" \
|
||||
> /dev/null
|
||||
|
||||
echo "✅ ES is ready and will continue to run in the background"
|
||||
|
||||
# unset env vars defined in other parts of CI for automatic APM collection of
|
||||
# Kibana. We manage APM config in our FTR config and performance service, and
|
||||
|
@ -29,39 +46,27 @@ unset ELASTIC_APM_SERVER_URL
|
|||
unset ELASTIC_APM_SECRET_TOKEN
|
||||
unset ELASTIC_APM_GLOBAL_LABELS
|
||||
|
||||
for journey in x-pack/performance/journeys/*; do
|
||||
set +e
|
||||
|
||||
export TEST_ES_URL=http://elastic:changeme@localhost:9200
|
||||
export TEST_ES_DISABLE_STARTUP=true
|
||||
phases=("WARMUP" "TEST")
|
||||
for phase in "${phases[@]}"; do
|
||||
echo "--- $journey - $phase"
|
||||
|
||||
# Pings the es server every seconds 2 mins until it is status is green
|
||||
curl --retry 120 \
|
||||
--retry-delay 1 \
|
||||
--retry-all-errors \
|
||||
-I -XGET "${TEST_ES_URL}/_cluster/health?wait_for_nodes=>=1&wait_for_status=yellow"
|
||||
export TEST_PERFORMANCE_PHASE="$phase"
|
||||
node scripts/functional_tests \
|
||||
--config "$journey" \
|
||||
--kibana-install-dir "$KIBANA_BUILD_LOCATION" \
|
||||
--debug \
|
||||
--bail
|
||||
|
||||
journeys=("login" "ecommerce_dashboard" "flight_dashboard" "web_logs_dashboard" "promotion_tracking_dashboard" "many_fields_discover" "data_stress_test_lens")
|
||||
status=$?
|
||||
if [ $status -ne 0 ]; then
|
||||
echo "^^^ +++"
|
||||
echo "❌ FTR failed with status code: $status"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
for i in "${journeys[@]}"; do
|
||||
echo "JOURNEY[${i}] is running"
|
||||
|
||||
export TEST_PERFORMANCE_PHASE=WARMUP
|
||||
export JOURNEY_NAME="${i}"
|
||||
|
||||
checks-reporter-with-killswitch "Run Performance Tests with Playwright Config (Journey:${i},Phase: WARMUP)" \
|
||||
node scripts/functional_tests \
|
||||
--config "x-pack/test/performance/journeys/${i}/config.ts" \
|
||||
--kibana-install-dir "$KIBANA_BUILD_LOCATION" \
|
||||
--debug \
|
||||
--bail
|
||||
|
||||
export TEST_PERFORMANCE_PHASE=TEST
|
||||
|
||||
checks-reporter-with-killswitch "Run Performance Tests with Playwright Config (Journey:${i},Phase: TEST)" \
|
||||
node scripts/functional_tests \
|
||||
--config "x-pack/test/performance/journeys/${i}/config.ts" \
|
||||
--kibana-install-dir "$KIBANA_BUILD_LOCATION" \
|
||||
--debug \
|
||||
--bail
|
||||
set -e
|
||||
done
|
||||
|
||||
kill "$esPid"
|
||||
|
|
|
@ -15,19 +15,16 @@ OUTPUT_DIR="${KIBANA_DIR}/${OUTPUT_REL}"
|
|||
.buildkite/scripts/bootstrap.sh
|
||||
|
||||
echo "--- Extract APM metrics"
|
||||
scalabilityJourneys=("login" "ecommerce_dashboard" "flight_dashboard" "web_logs_dashboard" "promotion_tracking_dashboard" "many_fields_discover")
|
||||
for journey in x-pack/performance/journeys/*; do
|
||||
echo "Looking for journey=${journey} and BUILD_ID=${BUILD_ID} in APM traces"
|
||||
|
||||
for i in "${scalabilityJourneys[@]}"; do
|
||||
JOURNEY_NAME="${i}"
|
||||
echo "Looking for JOURNEY=${JOURNEY_NAME} and BUILD_ID=${BUILD_ID} in APM traces"
|
||||
|
||||
node scripts/extract_performance_testing_dataset \
|
||||
--config "x-pack/test/performance/journeys/${i}/config.ts" \
|
||||
--buildId "${BUILD_ID}" \
|
||||
--es-url "${ES_SERVER_URL}" \
|
||||
--es-username "${USER_FROM_VAULT}" \
|
||||
--es-password "${PASS_FROM_VAULT}" \
|
||||
--without-static-resources
|
||||
node scripts/extract_performance_testing_dataset \
|
||||
--config "${journey}" \
|
||||
--buildId "${BUILD_ID}" \
|
||||
--es-url "${ES_SERVER_URL}" \
|
||||
--es-username "${USER_FROM_VAULT}" \
|
||||
--es-password "${PASS_FROM_VAULT}" \
|
||||
--without-static-resources
|
||||
done
|
||||
|
||||
echo "--- Creating scalability dataset in ${OUTPUT_REL}"
|
||||
|
|
|
@ -178,6 +178,7 @@ const DEV_PATTERNS = [
|
|||
'x-pack/{dev-tools,tasks,scripts,test,build_chromium}/**/*',
|
||||
'x-pack/plugins/*/server/scripts/**/*',
|
||||
'x-pack/plugins/fleet/cypress',
|
||||
'x-pack/performance/**/*',
|
||||
];
|
||||
|
||||
/** Restricted imports with suggested alternatives */
|
||||
|
|
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
|
@ -861,9 +861,12 @@ packages/kbn-eslint-plugin-disable @elastic/kibana-operations
|
|||
packages/kbn-eslint-plugin-eslint @elastic/kibana-operations
|
||||
packages/kbn-eslint-plugin-imports @elastic/kibana-operations
|
||||
packages/kbn-expect @elastic/kibana-operations
|
||||
packages/kbn-failed-test-reporter-cli @elastic/kibana-operations
|
||||
packages/kbn-field-types @elastic/kibana-app-services
|
||||
packages/kbn-find-used-node-modules @elastic/kibana-operations
|
||||
packages/kbn-flot-charts @elastic/kibana-operations
|
||||
packages/kbn-ftr-common-functional-services @elastic/kibana-operations
|
||||
packages/kbn-ftr-screenshot-filename @elastic/kibana-operations
|
||||
packages/kbn-generate @elastic/kibana-operations
|
||||
packages/kbn-get-repo-files @elastic/kibana-operations
|
||||
packages/kbn-handlebars @elastic/kibana-security
|
||||
|
@ -874,6 +877,7 @@ packages/kbn-import-resolver @elastic/kibana-operations
|
|||
packages/kbn-interpreter @elastic/kibana-app-services
|
||||
packages/kbn-io-ts-utils @elastic/apm-ui
|
||||
packages/kbn-jest-serializers @elastic/kibana-operations
|
||||
packages/kbn-journeys @elastic/kibana-operations
|
||||
packages/kbn-kibana-manifest-schema @elastic/kibana-operations
|
||||
packages/kbn-logging @elastic/kibana-core
|
||||
packages/kbn-logging-mocks @elastic/kibana-core
|
||||
|
|
|
@ -26,6 +26,7 @@ layout: landing
|
|||
{ pageId: "kibDevDocsOpsWritingStableFunctionalTests" },
|
||||
{ pageId: "kibDevDocsOpsFlakyTestRunner" },
|
||||
{ pageId: "kibDevDocsOpsCiStats" },
|
||||
{ pageId: "kibDevDocsOpsJourneys" },
|
||||
]}
|
||||
/>
|
||||
|
||||
|
|
|
@ -702,11 +702,15 @@
|
|||
"@kbn/eslint-plugin-eslint": "link:bazel-bin/packages/kbn-eslint-plugin-eslint",
|
||||
"@kbn/eslint-plugin-imports": "link:bazel-bin/packages/kbn-eslint-plugin-imports",
|
||||
"@kbn/expect": "link:bazel-bin/packages/kbn-expect",
|
||||
"@kbn/failed-test-reporter-cli": "link:bazel-bin/packages/kbn-failed-test-reporter-cli",
|
||||
"@kbn/find-used-node-modules": "link:bazel-bin/packages/kbn-find-used-node-modules",
|
||||
"@kbn/ftr-common-functional-services": "link:bazel-bin/packages/kbn-ftr-common-functional-services",
|
||||
"@kbn/ftr-screenshot-filename": "link:bazel-bin/packages/kbn-ftr-screenshot-filename",
|
||||
"@kbn/generate": "link:bazel-bin/packages/kbn-generate",
|
||||
"@kbn/get-repo-files": "link:bazel-bin/packages/kbn-get-repo-files",
|
||||
"@kbn/import-resolver": "link:bazel-bin/packages/kbn-import-resolver",
|
||||
"@kbn/jest-serializers": "link:bazel-bin/packages/kbn-jest-serializers",
|
||||
"@kbn/journeys": "link:bazel-bin/packages/kbn-journeys",
|
||||
"@kbn/kibana-manifest-schema": "link:bazel-bin/packages/kbn-kibana-manifest-schema",
|
||||
"@kbn/managed-vscode-config": "link:bazel-bin/packages/kbn-managed-vscode-config",
|
||||
"@kbn/managed-vscode-config-cli": "link:bazel-bin/packages/kbn-managed-vscode-config-cli",
|
||||
|
@ -1017,8 +1021,11 @@
|
|||
"@types/kbn__es-types": "link:bazel-bin/packages/kbn-es-types/npm_module_types",
|
||||
"@types/kbn__eslint-plugin-disable": "link:bazel-bin/packages/kbn-eslint-plugin-disable/npm_module_types",
|
||||
"@types/kbn__eslint-plugin-imports": "link:bazel-bin/packages/kbn-eslint-plugin-imports/npm_module_types",
|
||||
"@types/kbn__failed-test-reporter-cli": "link:bazel-bin/packages/kbn-failed-test-reporter-cli/npm_module_types",
|
||||
"@types/kbn__field-types": "link:bazel-bin/packages/kbn-field-types/npm_module_types",
|
||||
"@types/kbn__find-used-node-modules": "link:bazel-bin/packages/kbn-find-used-node-modules/npm_module_types",
|
||||
"@types/kbn__ftr-common-functional-services": "link:bazel-bin/packages/kbn-ftr-common-functional-services/npm_module_types",
|
||||
"@types/kbn__ftr-screenshot-filename": "link:bazel-bin/packages/kbn-ftr-screenshot-filename/npm_module_types",
|
||||
"@types/kbn__generate": "link:bazel-bin/packages/kbn-generate/npm_module_types",
|
||||
"@types/kbn__get-repo-files": "link:bazel-bin/packages/kbn-get-repo-files/npm_module_types",
|
||||
"@types/kbn__handlebars": "link:bazel-bin/packages/kbn-handlebars/npm_module_types",
|
||||
|
@ -1032,6 +1039,7 @@
|
|||
"@types/kbn__interpreter": "link:bazel-bin/packages/kbn-interpreter/npm_module_types",
|
||||
"@types/kbn__io-ts-utils": "link:bazel-bin/packages/kbn-io-ts-utils/npm_module_types",
|
||||
"@types/kbn__jest-serializers": "link:bazel-bin/packages/kbn-jest-serializers/npm_module_types",
|
||||
"@types/kbn__journeys": "link:bazel-bin/packages/kbn-journeys/npm_module_types",
|
||||
"@types/kbn__kbn-ci-stats-performance-metrics": "link:bazel-bin/packages/kbn-kbn-ci-stats-performance-metrics/npm_module_types",
|
||||
"@types/kbn__kibana-manifest-schema": "link:bazel-bin/packages/kbn-kibana-manifest-schema/npm_module_types",
|
||||
"@types/kbn__logging": "link:bazel-bin/packages/kbn-logging/npm_module_types",
|
||||
|
|
|
@ -204,9 +204,12 @@ filegroup(
|
|||
"//packages/kbn-eslint-plugin-eslint:build",
|
||||
"//packages/kbn-eslint-plugin-imports:build",
|
||||
"//packages/kbn-expect:build",
|
||||
"//packages/kbn-failed-test-reporter-cli:build",
|
||||
"//packages/kbn-field-types:build",
|
||||
"//packages/kbn-find-used-node-modules:build",
|
||||
"//packages/kbn-flot-charts:build",
|
||||
"//packages/kbn-ftr-common-functional-services:build",
|
||||
"//packages/kbn-ftr-screenshot-filename:build",
|
||||
"//packages/kbn-generate:build",
|
||||
"//packages/kbn-get-repo-files:build",
|
||||
"//packages/kbn-handlebars:build",
|
||||
|
@ -217,6 +220,7 @@ filegroup(
|
|||
"//packages/kbn-interpreter:build",
|
||||
"//packages/kbn-io-ts-utils:build",
|
||||
"//packages/kbn-jest-serializers:build",
|
||||
"//packages/kbn-journeys:build",
|
||||
"//packages/kbn-kibana-manifest-schema:build",
|
||||
"//packages/kbn-logging:build",
|
||||
"//packages/kbn-logging-mocks:build",
|
||||
|
@ -514,8 +518,11 @@ filegroup(
|
|||
"//packages/kbn-es-types:build_types",
|
||||
"//packages/kbn-eslint-plugin-disable:build_types",
|
||||
"//packages/kbn-eslint-plugin-imports:build_types",
|
||||
"//packages/kbn-failed-test-reporter-cli:build_types",
|
||||
"//packages/kbn-field-types:build_types",
|
||||
"//packages/kbn-find-used-node-modules:build_types",
|
||||
"//packages/kbn-ftr-common-functional-services:build_types",
|
||||
"//packages/kbn-ftr-screenshot-filename:build_types",
|
||||
"//packages/kbn-generate:build_types",
|
||||
"//packages/kbn-get-repo-files:build_types",
|
||||
"//packages/kbn-handlebars:build_types",
|
||||
|
@ -526,6 +533,7 @@ filegroup(
|
|||
"//packages/kbn-interpreter:build_types",
|
||||
"//packages/kbn-io-ts-utils:build_types",
|
||||
"//packages/kbn-jest-serializers:build_types",
|
||||
"//packages/kbn-journeys:build_types",
|
||||
"//packages/kbn-kibana-manifest-schema:build_types",
|
||||
"//packages/kbn-logging:build_types",
|
||||
"//packages/kbn-logging-mocks:build_types",
|
||||
|
|
|
@ -9,4 +9,5 @@
|
|||
export * from './src/run';
|
||||
export * from './src/run_with_commands';
|
||||
export * from './src/flags';
|
||||
export * from './src/flags_reader';
|
||||
export type { CleanupTask } from './src/cleanup';
|
||||
|
|
|
@ -53,6 +53,10 @@ export function mergeFlagOptions(global: FlagOptions = {}, local: FlagOptions =
|
|||
};
|
||||
}
|
||||
|
||||
export const DEFAULT_FLAG_ALIASES = {
|
||||
v: 'verbose',
|
||||
};
|
||||
|
||||
export function getFlags(
|
||||
argv: string[],
|
||||
flagOptions: RunOptions['flags'] = {},
|
||||
|
@ -67,7 +71,7 @@ export function getFlags(
|
|||
boolean: [...(flagOptions.boolean || []), ...logLevelFlags, 'help'],
|
||||
alias: {
|
||||
...flagOptions.alias,
|
||||
v: 'verbose',
|
||||
...DEFAULT_FLAG_ALIASES,
|
||||
},
|
||||
default: flagOptions.default,
|
||||
unknown: (name: string) => {
|
||||
|
|
344
packages/kbn-dev-cli-runner/src/flags_reader.test.ts
Normal file
344
packages/kbn-dev-cli-runner/src/flags_reader.test.ts
Normal file
|
@ -0,0 +1,344 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { createAbsolutePathSerializer } from '@kbn/jest-serializers';
|
||||
|
||||
import { getFlags } from './flags';
|
||||
import { FlagsReader } from './flags_reader';
|
||||
|
||||
const FLAGS = {
|
||||
string: 'string',
|
||||
astring: ['foo', 'bar'],
|
||||
num: '1234',
|
||||
bool: true,
|
||||
missing: undefined,
|
||||
};
|
||||
|
||||
const basic = new FlagsReader(FLAGS);
|
||||
|
||||
expect.addSnapshotSerializer(createAbsolutePathSerializer());
|
||||
|
||||
describe('#string()', () => {
|
||||
it('returns a single string, regardless of flag count', () => {
|
||||
expect(basic.string('string')).toMatchInlineSnapshot(`"string"`);
|
||||
expect(basic.string('astring')).toBe(FLAGS.astring.at(-1));
|
||||
});
|
||||
|
||||
it('returns undefined when flag is missing', () => {
|
||||
expect(basic.string('missing')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('throws for non-string flags', () => {
|
||||
expect(() => basic.string('bool')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --bool to be a string"`
|
||||
);
|
||||
});
|
||||
|
||||
describe('required version', () => {
|
||||
it('throws when flag is missing', () => {
|
||||
expect(() => basic.requiredString('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"missing required flag --missing"`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#arrayOfStrings()', () => {
|
||||
it('returns an array of strings for string flags, regardless of count', () => {
|
||||
expect(basic.arrayOfStrings('string')).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"string",
|
||||
]
|
||||
`);
|
||||
expect(basic.arrayOfStrings('astring')).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"foo",
|
||||
"bar",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('returns undefined when flag is missing', () => {
|
||||
expect(basic.arrayOfStrings('missing')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('throws for non-string flags', () => {
|
||||
expect(() => basic.arrayOfStrings('bool')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --bool to be a string"`
|
||||
);
|
||||
});
|
||||
|
||||
describe('required version', () => {
|
||||
it('throws when flag is missing', () => {
|
||||
expect(() => basic.requiredArrayOfStrings('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"missing required flag --missing"`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#enum()', () => {
|
||||
it('validates that values match options', () => {
|
||||
expect(basic.enum('string', ['a', 'string', 'b'])).toMatchInlineSnapshot(`"string"`);
|
||||
expect(basic.enum('missing', ['a', 'b'])).toMatchInlineSnapshot(`undefined`);
|
||||
expect(() => basic.enum('string', ['a', 'b'])).toThrowErrorMatchingInlineSnapshot(
|
||||
`"invalid --string, expected one of \\"a\\", \\"b\\""`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#path()', () => {
|
||||
it('parses the string to an absolute path based on CWD', () => {
|
||||
expect(basic.path('string')).toMatchInlineSnapshot(`<absolute path>/string`);
|
||||
expect(basic.path('missing')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
describe('required version', () => {
|
||||
it('throws if the flag is missing', () => {
|
||||
expect(() => basic.requiredPath('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"missing required flag --missing"`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('array version', () => {
|
||||
it('parses a list of paths', () => {
|
||||
expect(basic.arrayOfPaths('astring')).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
<absolute path>/foo,
|
||||
<absolute path>/bar,
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
describe('required version', () => {
|
||||
it('throws if the flag is missing', () => {
|
||||
expect(() => basic.requiredArrayOfPaths('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"missing required flag --missing"`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#number()', () => {
|
||||
it('parses strings as numbers', () => {
|
||||
expect(basic.number('num')).toMatchInlineSnapshot(`1234`);
|
||||
expect(basic.number('missing')).toMatchInlineSnapshot(`undefined`);
|
||||
expect(() => basic.number('bool')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --bool to be a string"`
|
||||
);
|
||||
expect(() => basic.number('string')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"unable to parse --string value [string] as a number"`
|
||||
);
|
||||
expect(() => basic.number('astring')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"unable to parse --astring value [bar] as a number"`
|
||||
);
|
||||
});
|
||||
|
||||
describe('required version', () => {
|
||||
it('throws if the flag is missing', () => {
|
||||
expect(() => basic.requiredNumber('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"missing required flag --missing"`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#boolean()', () => {
|
||||
it('ensures flag is boolean, requires value', () => {
|
||||
expect(basic.boolean('bool')).toMatchInlineSnapshot(`true`);
|
||||
expect(() => basic.boolean('missing')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --missing to be a boolean"`
|
||||
);
|
||||
expect(() => basic.boolean('string')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --string to be a boolean"`
|
||||
);
|
||||
expect(() => basic.boolean('astring')).toThrowErrorMatchingInlineSnapshot(
|
||||
`"expected --astring to be a boolean"`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getPositionals()', () => {
|
||||
it('returns all positional arguments in flags', () => {
|
||||
const flags = new FlagsReader({
|
||||
...FLAGS,
|
||||
_: ['a', 'b', 'c'],
|
||||
});
|
||||
|
||||
expect(flags.getPositionals()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('handles missing _ flag', () => {
|
||||
const flags = new FlagsReader({});
|
||||
expect(flags.getPositionals()).toMatchInlineSnapshot(`Array []`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getUnused()', () => {
|
||||
it('returns a map of all unused flags', () => {
|
||||
const flags = new FlagsReader({
|
||||
a: '1',
|
||||
b: '2',
|
||||
c: '3',
|
||||
});
|
||||
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"a" => "1",
|
||||
"b" => "2",
|
||||
"c" => "3",
|
||||
}
|
||||
`);
|
||||
|
||||
flags.number('a');
|
||||
flags.number('b');
|
||||
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"c" => "3",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('ignores the default flags which are forced on commands', () => {
|
||||
const rawFlags = getFlags(['--a=1'], {
|
||||
string: ['a'],
|
||||
});
|
||||
|
||||
const flags = new FlagsReader(rawFlags, {
|
||||
aliases: {
|
||||
v: 'verbose',
|
||||
},
|
||||
});
|
||||
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"a" => "1",
|
||||
}
|
||||
`);
|
||||
flags.number('a');
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`Map {}`);
|
||||
});
|
||||
|
||||
it('treats aliased flags as used', () => {
|
||||
const flags = new FlagsReader(
|
||||
{
|
||||
f: true,
|
||||
force: true,
|
||||
v: true,
|
||||
verbose: true,
|
||||
},
|
||||
{
|
||||
aliases: {
|
||||
f: 'force',
|
||||
v: 'verbose',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"f" => true,
|
||||
"force" => true,
|
||||
}
|
||||
`);
|
||||
flags.boolean('force');
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`Map {}`);
|
||||
flags.boolean('v');
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`Map {}`);
|
||||
});
|
||||
|
||||
it('treats failed reads as "uses"', () => {
|
||||
const flags = new FlagsReader({ a: 'b' });
|
||||
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"a" => "b",
|
||||
}
|
||||
`);
|
||||
expect(() => flags.number('a')).toThrowError();
|
||||
expect(flags.getUnused()).toMatchInlineSnapshot(`Map {}`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getUsed()', () => {
|
||||
it('returns a map of all used flags', () => {
|
||||
const flags = new FlagsReader({
|
||||
a: '1',
|
||||
b: '2',
|
||||
c: '3',
|
||||
});
|
||||
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`Map {}`);
|
||||
|
||||
flags.number('a');
|
||||
flags.number('b');
|
||||
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"a" => "1",
|
||||
"b" => "2",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('treats aliases flags as used', () => {
|
||||
const flags = new FlagsReader(
|
||||
{
|
||||
f: true,
|
||||
force: true,
|
||||
v: true,
|
||||
verbose: true,
|
||||
},
|
||||
{
|
||||
aliases: {
|
||||
f: 'force',
|
||||
v: 'verbose',
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`Map {}`);
|
||||
flags.boolean('force');
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"force" => true,
|
||||
"f" => true,
|
||||
}
|
||||
`);
|
||||
flags.boolean('v');
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"force" => true,
|
||||
"f" => true,
|
||||
"v" => true,
|
||||
"verbose" => true,
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
it('treats failed reads as "uses"', () => {
|
||||
const flags = new FlagsReader({ a: 'b' });
|
||||
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`Map {}`);
|
||||
expect(() => flags.number('a')).toThrowError();
|
||||
expect(flags.getUsed()).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"a" => "b",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
267
packages/kbn-dev-cli-runner/src/flags_reader.ts
Normal file
267
packages/kbn-dev-cli-runner/src/flags_reader.ts
Normal file
|
@ -0,0 +1,267 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import { createFlagError } from '@kbn/dev-cli-errors';
|
||||
import { LOG_LEVEL_FLAGS } from '@kbn/tooling-log';
|
||||
|
||||
type FlagValue = string | string[] | boolean;
|
||||
const FORCED_FLAGS = new Set([...LOG_LEVEL_FLAGS.map((l) => l.name), 'help']);
|
||||
|
||||
const makeAbsolute = (rel: string) => Path.resolve(process.cwd(), rel);
|
||||
|
||||
const nonUndefinedValues = (e: [string, FlagValue | undefined]): e is [string, FlagValue] =>
|
||||
e[1] !== undefined;
|
||||
|
||||
export class FlagsReader {
|
||||
private readonly used: Map<string, FlagValue>;
|
||||
private readonly unused: Map<string, FlagValue>;
|
||||
private readonly _: string[];
|
||||
private readonly aliasMap: Map<string, string>;
|
||||
|
||||
constructor(
|
||||
flags: Record<string, FlagValue | undefined>,
|
||||
private readonly opts?: { aliases?: Record<string, string> }
|
||||
) {
|
||||
this.used = new Map();
|
||||
this.unused = new Map(
|
||||
Object.entries(flags)
|
||||
.filter(nonUndefinedValues)
|
||||
.filter((e) => e[0] !== 'unexpected')
|
||||
);
|
||||
this.aliasMap = new Map(
|
||||
Object.entries(this.opts?.aliases ?? []).flatMap(([a, b]) => [
|
||||
[a, b],
|
||||
[b, a],
|
||||
])
|
||||
);
|
||||
|
||||
this._ = this.arrayOfStrings('_') ?? [];
|
||||
}
|
||||
|
||||
private use(key: string) {
|
||||
const alias = this.aliasMap.get(key);
|
||||
|
||||
const used = this.used.get(key);
|
||||
if (used !== undefined) {
|
||||
return used;
|
||||
}
|
||||
|
||||
const unused = this.unused.get(key);
|
||||
if (unused !== undefined) {
|
||||
this.used.set(key, unused);
|
||||
this.unused.delete(key);
|
||||
|
||||
if (alias !== undefined) {
|
||||
this.used.set(alias, unused);
|
||||
this.unused.delete(alias);
|
||||
}
|
||||
}
|
||||
|
||||
return unused;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a string flag that supports multiple instances into an array of strings. If the
|
||||
* flag is only passed once an array with a single item will be returned. If the flag is not
|
||||
* passed then undefined will be returned.
|
||||
*/
|
||||
arrayOfStrings(key: string) {
|
||||
const value = this.use(key);
|
||||
|
||||
switch (typeof value) {
|
||||
case 'boolean':
|
||||
throw createFlagError(`expected --${key} to be a string`);
|
||||
case 'string':
|
||||
return value ? [value] : [];
|
||||
default:
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #arrayOfStrings() except when the flag is not passed a "flag error" is thrown telling
|
||||
* the user that the flag is required and shows them the help text.
|
||||
*/
|
||||
requiredArrayOfStrings(key: string) {
|
||||
const value = this.arrayOfStrings(key);
|
||||
if (value === undefined) {
|
||||
throw createFlagError(`missing required flag --${key}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the value of a string flag. If the flag is passed multiple times the last value is returned. If
|
||||
* the flag is not passed then undefined is returned.
|
||||
*/
|
||||
string(key: string) {
|
||||
const value = this.use(key);
|
||||
|
||||
switch (typeof value) {
|
||||
case 'undefined':
|
||||
return undefined;
|
||||
case 'string':
|
||||
return value || undefined; // convert "" to undefined
|
||||
case 'object':
|
||||
const last = value.at(-1);
|
||||
if (last === undefined) {
|
||||
throw createFlagError(`expected --${key} to be a string`);
|
||||
}
|
||||
return last || undefined; // convert "" to undefined
|
||||
default:
|
||||
throw createFlagError(`expected --${key} to be a string`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #string() except when the flag is passed it is validated against a list
|
||||
* of valid values
|
||||
*/
|
||||
enum<T extends string>(key: string, values: readonly T[]) {
|
||||
const value = this.string(key);
|
||||
if (value === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (values.includes(value as T)) {
|
||||
return value as T;
|
||||
}
|
||||
|
||||
throw createFlagError(`invalid --${key}, expected one of "${values.join('", "')}"`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #string() except when a flag is not passed a "flag error" is thrown telling the user
|
||||
* that the flag is required and shows them the help text.
|
||||
*/
|
||||
requiredString(key: string) {
|
||||
const value = this.string(key);
|
||||
if (value === undefined) {
|
||||
throw createFlagError(`missing required flag --${key}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #string(), except that when there is a value for the string it is resolved to an
|
||||
* absolute path based on the current working directory
|
||||
*/
|
||||
path(key: string) {
|
||||
const value = this.string(key);
|
||||
if (value !== undefined) {
|
||||
return makeAbsolute(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #requiredString() except that values are converted to absolute paths based on the
|
||||
* current working directory
|
||||
*/
|
||||
requiredPath(key: string) {
|
||||
return makeAbsolute(this.requiredString(key));
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #arrayOfStrings(), except that when there are values they are resolved to
|
||||
* absolute paths based on the current working directory
|
||||
*/
|
||||
arrayOfPaths(key: string) {
|
||||
const value = this.arrayOfStrings(key);
|
||||
if (value !== undefined) {
|
||||
return value.map(makeAbsolute);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #requiredArrayOfStrings(), except that values are resolved to absolute paths
|
||||
* based on the current working directory
|
||||
*/
|
||||
requiredArrayOfPaths(key: string) {
|
||||
return this.requiredArrayOfStrings(key).map(makeAbsolute);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parsed the provided flag as a number, if the value does not parse to a valid number
|
||||
* using Number.parseFloat() then a "flag error" is thrown. If the flag is not passed
|
||||
* undefined is returned.
|
||||
*/
|
||||
number(key: string) {
|
||||
const value = this.string(key);
|
||||
if (value === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const num = Number.parseFloat(value);
|
||||
if (Number.isNaN(num)) {
|
||||
throw createFlagError(`unable to parse --${key} value [${value}] as a number`);
|
||||
}
|
||||
|
||||
return num;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #number() except that when the flag is missing a "flag error" is thrown
|
||||
*/
|
||||
requiredNumber(key: string) {
|
||||
const value = this.number(key);
|
||||
if (value === undefined) {
|
||||
throw createFlagError(`missing required flag --${key}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a boolean flag value, if the flag is properly defined as a "boolean" in the run options
|
||||
* then the value will always be a boolean, defaulting to `false`, so there is no need for an
|
||||
* optional/requiredBoolean() method.
|
||||
*/
|
||||
boolean(key: string) {
|
||||
const value = this.use(key);
|
||||
if (typeof value !== 'boolean') {
|
||||
throw createFlagError(`expected --${key} to be a boolean`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the positional arguments passed, includes any values that are not associated with
|
||||
* a specific --flag
|
||||
*/
|
||||
getPositionals() {
|
||||
return this._.slice(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all of the unused flags. When a flag is read via any of the key-specific methods
|
||||
* the key is marked as "used" and this method will return a map of just the flags which
|
||||
* have not been used yet (excluding the default flags like --debug, --verbose, and --help)
|
||||
*/
|
||||
getUnused() {
|
||||
return new Map(
|
||||
[...this.unused.entries()].filter(([key]) => {
|
||||
const alias = this.aliasMap.get(key);
|
||||
if (alias !== undefined && FORCED_FLAGS.has(alias)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !FORCED_FLAGS.has(key);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all of the used flags. When a flag is read via any of the key-specific methods
|
||||
* the key is marked as "used" and from then on this method will return a map including that
|
||||
* and any other key used by these methods.
|
||||
*/
|
||||
getUsed() {
|
||||
return new Map(this.used);
|
||||
}
|
||||
}
|
|
@ -10,7 +10,8 @@ import { pickLevelFromFlags, ToolingLog, LogLevel } from '@kbn/tooling-log';
|
|||
import { ProcRunner, withProcRunner } from '@kbn/dev-proc-runner';
|
||||
import { createFlagError } from '@kbn/dev-cli-errors';
|
||||
|
||||
import { Flags, getFlags, FlagOptions } from './flags';
|
||||
import { Flags, getFlags, FlagOptions, DEFAULT_FLAG_ALIASES } from './flags';
|
||||
import { FlagsReader } from './flags_reader';
|
||||
import { getHelp } from './help';
|
||||
import { CleanupTask, Cleanup } from './cleanup';
|
||||
import { Metrics, MetricsMeta } from './metrics';
|
||||
|
@ -21,6 +22,7 @@ export interface RunContext {
|
|||
procRunner: ProcRunner;
|
||||
statsMeta: MetricsMeta;
|
||||
addCleanupTask: (task: CleanupTask) => void;
|
||||
flagsReader: FlagsReader;
|
||||
}
|
||||
export type RunFn = (context: RunContext) => Promise<void> | void;
|
||||
|
||||
|
@ -71,6 +73,12 @@ export async function run(fn: RunFn, options: RunOptions = {}) {
|
|||
procRunner,
|
||||
statsMeta: metrics.meta,
|
||||
addCleanupTask: cleanup.add.bind(cleanup),
|
||||
flagsReader: new FlagsReader(flags, {
|
||||
aliases: {
|
||||
...options.flags?.alias,
|
||||
...DEFAULT_FLAG_ALIASES,
|
||||
},
|
||||
}),
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import { ToolingLog, ToolingLogCollectingWriter } from '@kbn/tooling-log';
|
||||
import { ProcRunner } from '@kbn/dev-proc-runner';
|
||||
|
||||
import { FlagsReader } from './flags_reader';
|
||||
import { RunWithCommands } from './run_with_commands';
|
||||
|
||||
const testLog = new ToolingLog();
|
||||
|
@ -44,6 +45,7 @@ it('extends the context using extendContext()', async () => {
|
|||
expect(context).toEqual({
|
||||
log: expect.any(ToolingLog),
|
||||
flags: expect.any(Object),
|
||||
flagsReader: expect.any(FlagsReader),
|
||||
addCleanupTask: expect.any(Function),
|
||||
procRunner: expect.any(ProcRunner),
|
||||
statsMeta: expect.any(Map),
|
||||
|
|
|
@ -11,7 +11,8 @@ import { withProcRunner } from '@kbn/dev-proc-runner';
|
|||
import { createFlagError } from '@kbn/dev-cli-errors';
|
||||
|
||||
import { RunContext, RunOptions } from './run';
|
||||
import { getFlags, FlagOptions, mergeFlagOptions } from './flags';
|
||||
import { getFlags, FlagOptions, mergeFlagOptions, DEFAULT_FLAG_ALIASES } from './flags';
|
||||
import { FlagsReader } from './flags_reader';
|
||||
import { Cleanup } from './cleanup';
|
||||
import { getHelpForAllCommands, getCommandLevelHelp } from './help';
|
||||
import { Metrics } from './metrics';
|
||||
|
@ -116,6 +117,12 @@ export class RunWithCommands<T> {
|
|||
procRunner,
|
||||
statsMeta: metrics.meta,
|
||||
addCleanupTask: cleanup.add.bind(cleanup),
|
||||
flagsReader: new FlagsReader(commandFlags, {
|
||||
aliases: {
|
||||
...commandFlagOptions.alias,
|
||||
...DEFAULT_FLAG_ALIASES,
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const extendedContext = {
|
||||
|
|
145
packages/kbn-failed-test-reporter-cli/BUILD.bazel
Normal file
145
packages/kbn-failed-test-reporter-cli/BUILD.bazel
Normal file
|
@ -0,0 +1,145 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
|
||||
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
|
||||
|
||||
PKG_DIRNAME = "kbn-failed-test-reporter-cli"
|
||||
PKG_REQUIRE_NAME = "@kbn/failed-test-reporter-cli"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"**/*.ts",
|
||||
"**/*.html",
|
||||
],
|
||||
exclude = [
|
||||
"**/*.config.js",
|
||||
"**/*.mock.*",
|
||||
"**/*.test.*",
|
||||
"**/*.stories.*",
|
||||
"**/__snapshots__/**",
|
||||
"**/integration_tests/**",
|
||||
"**/mocks/**",
|
||||
"**/scripts/**",
|
||||
"**/storybook/**",
|
||||
"**/test_fixtures/**",
|
||||
"**/test_helpers/**",
|
||||
],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
]
|
||||
|
||||
# In this array place runtime dependencies, including other packages and NPM packages
|
||||
# which must be available for this code to run.
|
||||
#
|
||||
# To reference other packages use:
|
||||
# "//repo/relative/path/to/package"
|
||||
# eg. "//packages/kbn-utils"
|
||||
#
|
||||
# To reference a NPM package use:
|
||||
# "@npm//name-of-package"
|
||||
# eg. "@npm//lodash"
|
||||
RUNTIME_DEPS = [
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
# :npm_module_types target of other packages and packages from NPM, including @types/*
|
||||
# packages.
|
||||
#
|
||||
# To reference the types for another package use:
|
||||
# "//repo/relative/path/to/package:npm_module_types"
|
||||
# eg. "//packages/kbn-utils:npm_module_types"
|
||||
#
|
||||
# References to NPM packages work the same as RUNTIME_DEPS
|
||||
TYPES_DEPS = [
|
||||
"//packages/kbn-utils:npm_module_types",
|
||||
"//packages/kbn-ci-stats-reporter:npm_module_types",
|
||||
"//packages/kbn-dev-cli-runner:npm_module_types",
|
||||
"//packages/kbn-dev-cli-errors:npm_module_types",
|
||||
"//packages/kbn-dev-utils:npm_module_types",
|
||||
"//packages/kbn-tooling-log:npm_module_types",
|
||||
"//packages/kbn-ftr-screenshot-filename:npm_module_types",
|
||||
"//packages/kbn-jest-serializers:npm_module_types",
|
||||
"//packages/kbn-journeys:npm_module_types",
|
||||
"@npm//@elastic/elasticsearch",
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/he",
|
||||
"@npm//@types/jest",
|
||||
"@npm//@types/strip-ansi",
|
||||
"@npm//@types/normalize-path",
|
||||
"@npm//@types/xml2js",
|
||||
"@npm//axios",
|
||||
"@npm//dedent",
|
||||
"@npm//globby",
|
||||
]
|
||||
|
||||
jsts_transpiler(
|
||||
name = "target_node",
|
||||
srcs = SRCS,
|
||||
build_pkg_name = package_name(),
|
||||
additional_args = [
|
||||
"--copy-files"
|
||||
],
|
||||
)
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
"//:tsconfig.bazel.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc_types",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = TYPES_DEPS,
|
||||
declaration = True,
|
||||
declaration_map = True,
|
||||
emit_declaration_only = True,
|
||||
out_dir = "target_types",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_DIRNAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = RUNTIME_DEPS + [":target_node"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [":" + PKG_DIRNAME],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [":npm_module"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm_types(
|
||||
name = "npm_module_types",
|
||||
srcs = SRCS,
|
||||
deps = [":tsc_types"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
tsconfig = ":tsconfig",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build_types",
|
||||
srcs = [":npm_module_types"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
3
packages/kbn-failed-test-reporter-cli/README.md
Normal file
3
packages/kbn-failed-test-reporter-cli/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/failed-test-reporter-cli
|
||||
|
||||
Empty package generated by @kbn/generate
|
|
@ -0,0 +1,208 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { createFailError, createFlagError } from '@kbn/dev-cli-errors';
|
||||
import { CiStatsReporter } from '@kbn/ci-stats-reporter';
|
||||
import globby from 'globby';
|
||||
import normalize from 'normalize-path';
|
||||
|
||||
import { getFailures } from './get_failures';
|
||||
import { GithubApi } from './github_api';
|
||||
import { updateFailureIssue, createFailureIssue } from './report_failure';
|
||||
import { readTestReport, getRootMetadata } from './test_report';
|
||||
import { addMessagesToReport } from './add_messages_to_report';
|
||||
import { getReportMessageIter } from './report_metadata';
|
||||
import { reportFailuresToEs } from './report_failures_to_es';
|
||||
import { reportFailuresToFile } from './report_failures_to_file';
|
||||
import { getBuildkiteMetadata } from './buildkite_metadata';
|
||||
import { ExistingFailedTestIssues } from './existing_failed_test_issues';
|
||||
|
||||
const DEFAULT_PATTERNS = [Path.resolve(REPO_ROOT, 'target/junit/**/*.xml')];
|
||||
const DISABLE_MISSING_TEST_REPORT_ERRORS =
|
||||
process.env.DISABLE_MISSING_TEST_REPORT_ERRORS === 'true';
|
||||
|
||||
run(
|
||||
async ({ log, flags }) => {
|
||||
const indexInEs = flags['index-errors'];
|
||||
|
||||
let updateGithub = flags['github-update'];
|
||||
if (updateGithub && !process.env.GITHUB_TOKEN) {
|
||||
throw createFailError(
|
||||
'GITHUB_TOKEN environment variable must be set, otherwise use --no-github-update flag'
|
||||
);
|
||||
}
|
||||
|
||||
let branch: string = '';
|
||||
if (updateGithub) {
|
||||
let isPr = false;
|
||||
|
||||
if (process.env.BUILDKITE === 'true') {
|
||||
branch = process.env.BUILDKITE_BRANCH || '';
|
||||
isPr = process.env.BUILDKITE_PULL_REQUEST === 'true';
|
||||
updateGithub = process.env.REPORT_FAILED_TESTS_TO_GITHUB === 'true';
|
||||
} else {
|
||||
// JOB_NAME is formatted as `elastic+kibana+7.x` in some places and `elastic+kibana+7.x/JOB=kibana-intake,node=immutable` in others
|
||||
const jobNameSplit = (process.env.JOB_NAME || '').split(/\+|\//);
|
||||
branch = jobNameSplit.length >= 3 ? jobNameSplit[2] : process.env.GIT_BRANCH || '';
|
||||
isPr = !!process.env.ghprbPullId;
|
||||
|
||||
const isMainOrVersion = branch === 'main' || branch.match(/^\d+\.(x|\d+)$/);
|
||||
if (!isMainOrVersion || isPr) {
|
||||
log.info('Failure issues only created on main/version branch jobs');
|
||||
updateGithub = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!branch) {
|
||||
throw createFailError(
|
||||
'Unable to determine originating branch from job name or other environment variables'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const githubApi = new GithubApi({
|
||||
log,
|
||||
token: process.env.GITHUB_TOKEN,
|
||||
dryRun: !updateGithub,
|
||||
});
|
||||
|
||||
const bkMeta = getBuildkiteMetadata();
|
||||
|
||||
try {
|
||||
const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
|
||||
if (typeof buildUrl !== 'string' || !buildUrl) {
|
||||
throw createFlagError('Missing --build-url or process.env.BUILD_URL');
|
||||
}
|
||||
|
||||
const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
|
||||
normalize(Path.resolve(p))
|
||||
);
|
||||
log.info('Searching for reports at', patterns);
|
||||
const reportPaths = await globby(patterns, {
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
if (!reportPaths.length && DISABLE_MISSING_TEST_REPORT_ERRORS) {
|
||||
// it is fine for code coverage to not have test results
|
||||
return;
|
||||
}
|
||||
|
||||
if (!reportPaths.length) {
|
||||
throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
|
||||
}
|
||||
|
||||
log.info('found', reportPaths.length, 'junit reports', reportPaths);
|
||||
|
||||
const existingIssues = new ExistingFailedTestIssues(log);
|
||||
for (const reportPath of reportPaths) {
|
||||
const report = await readTestReport(reportPath);
|
||||
const messages = Array.from(getReportMessageIter(report));
|
||||
const failures = getFailures(report);
|
||||
|
||||
await existingIssues.loadForFailures(failures);
|
||||
|
||||
if (indexInEs) {
|
||||
await reportFailuresToEs(log, failures);
|
||||
}
|
||||
|
||||
for (const failure of failures) {
|
||||
const pushMessage = (msg: string) => {
|
||||
messages.push({
|
||||
classname: failure.classname,
|
||||
name: failure.name,
|
||||
message: msg,
|
||||
});
|
||||
};
|
||||
|
||||
if (failure.likelyIrrelevant) {
|
||||
pushMessage(
|
||||
'Failure is likely irrelevant' +
|
||||
(updateGithub ? ', so an issue was not created or updated' : '')
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingIssue = existingIssues.getForFailure(failure);
|
||||
if (existingIssue) {
|
||||
const { newBody, newCount } = await updateFailureIssue(
|
||||
buildUrl,
|
||||
existingIssue,
|
||||
githubApi,
|
||||
branch
|
||||
);
|
||||
const url = existingIssue.github.htmlUrl;
|
||||
existingIssue.github.body = newBody;
|
||||
failure.githubIssue = url;
|
||||
failure.failureCount = updateGithub ? newCount : newCount - 1;
|
||||
pushMessage(`Test has failed ${newCount - 1} times on tracked branches: ${url}`);
|
||||
if (updateGithub) {
|
||||
pushMessage(`Updated existing issue: ${url} (fail count: ${newCount})`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
|
||||
existingIssues.addNewlyCreated(failure, newIssue);
|
||||
pushMessage('Test has not failed recently on tracked branches');
|
||||
if (updateGithub) {
|
||||
pushMessage(`Created new issue: ${newIssue.html_url}`);
|
||||
failure.githubIssue = newIssue.html_url;
|
||||
}
|
||||
failure.failureCount = updateGithub ? 1 : 0;
|
||||
}
|
||||
|
||||
// mutates report to include messages and writes updated report to disk
|
||||
await addMessagesToReport({
|
||||
report,
|
||||
messages,
|
||||
log,
|
||||
reportPath,
|
||||
dryRun: !flags['report-update'],
|
||||
});
|
||||
|
||||
await reportFailuresToFile(log, failures, bkMeta, getRootMetadata(report));
|
||||
}
|
||||
} finally {
|
||||
await CiStatsReporter.fromEnv(log).metrics([
|
||||
{
|
||||
group: 'github api request count',
|
||||
id: `failed test reporter`,
|
||||
value: githubApi.getRequestCount(),
|
||||
meta: Object.fromEntries(
|
||||
Object.entries(bkMeta).map(
|
||||
([k, v]) => [`buildkite${k[0].toUpperCase()}${k.slice(1)}`, v] as const
|
||||
)
|
||||
),
|
||||
},
|
||||
]);
|
||||
}
|
||||
},
|
||||
{
|
||||
description: `a cli that opens issues or updates existing issues based on junit reports`,
|
||||
flags: {
|
||||
boolean: ['github-update', 'report-update'],
|
||||
string: ['build-url'],
|
||||
default: {
|
||||
'github-update': true,
|
||||
'report-update': true,
|
||||
'index-errors': true,
|
||||
'build-url': process.env.BUILD_URL,
|
||||
},
|
||||
help: `
|
||||
--no-github-update Execute the CLI without writing to Github
|
||||
--no-report-update Execute the CLI without writing to the JUnit reports
|
||||
--no-index-errors Execute the CLI without indexing failures into Elasticsearch
|
||||
--build-url URL of the failed build, defaults to process.env.BUILD_URL
|
||||
`,
|
||||
},
|
||||
}
|
||||
);
|
|
@ -0,0 +1,199 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs';
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
import globby from 'globby';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { escape } from 'he';
|
||||
import { FtrScreenshotFilename } from '@kbn/ftr-screenshot-filename';
|
||||
import { JourneyScreenshots } from '@kbn/journeys';
|
||||
|
||||
import { BuildkiteMetadata } from './buildkite_metadata';
|
||||
import { TestFailure } from './get_failures';
|
||||
|
||||
interface JourneyMeta {
|
||||
journeyName: string;
|
||||
}
|
||||
function getJourneyMetadata(rootMeta: Record<string, unknown>): JourneyMeta | undefined {
|
||||
const { journeyName } = rootMeta;
|
||||
if (typeof journeyName === 'string') {
|
||||
return { journeyName };
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function getJourneySnapshotHtml(log: ToolingLog, journeyMeta: JourneyMeta) {
|
||||
let screenshots;
|
||||
try {
|
||||
screenshots = await JourneyScreenshots.load(journeyMeta.journeyName);
|
||||
} catch (error) {
|
||||
log.error(`Failed to load journey screenshots: ${error.message}`);
|
||||
return '';
|
||||
}
|
||||
|
||||
return [
|
||||
'<section>',
|
||||
'<h5>Steps</h5>',
|
||||
...screenshots.get().flatMap(({ title, path }) => {
|
||||
const base64 = Fs.readFileSync(path, 'base64');
|
||||
|
||||
return [
|
||||
`<p><strong>${escape(title)}</strong></p>`,
|
||||
`<img class="screenshot img-fluid img-thumbnail" src="data:image/png;base64,${base64}" />`,
|
||||
];
|
||||
}),
|
||||
'</section>',
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
let _allScreenshotsCache: Array<{ path: string; name: string }> | undefined;
|
||||
function getAllScreenshots(log: ToolingLog) {
|
||||
return (_allScreenshotsCache ??= findAllScreenshots(log));
|
||||
}
|
||||
function findAllScreenshots(log: ToolingLog) {
|
||||
try {
|
||||
return globby
|
||||
.sync(
|
||||
[
|
||||
'test/functional/**/screenshots/failure/*.png',
|
||||
'x-pack/test/functional/**/screenshots/failure/*.png',
|
||||
],
|
||||
{
|
||||
cwd: REPO_ROOT,
|
||||
absolute: true,
|
||||
}
|
||||
)
|
||||
.map((path) => ({
|
||||
path,
|
||||
name: Path.basename(path, Path.extname(path)),
|
||||
}));
|
||||
} catch (error) {
|
||||
log.error(`Failed to find screenshots: ${error.message}`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function getFtrScreenshotHtml(log: ToolingLog, failureName: string) {
|
||||
return getAllScreenshots(log)
|
||||
.filter((s) => s.name.startsWith(FtrScreenshotFilename.create(failureName, { ext: false })))
|
||||
.map((s) => {
|
||||
const base64 = Fs.readFileSync(s.path).toString('base64');
|
||||
return `<img class="screenshot img-fluid img-thumbnail" src="data:image/png;base64,${base64}" />`;
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
export async function reportFailuresToFile(
|
||||
log: ToolingLog,
|
||||
failures: TestFailure[],
|
||||
bkMeta: BuildkiteMetadata,
|
||||
rootMeta: Record<string, unknown>
|
||||
) {
|
||||
if (!failures?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const journeyMeta = getJourneyMetadata(rootMeta);
|
||||
|
||||
// Jest could, in theory, fail 1000s of tests and write 1000s of failures
|
||||
// So let's just write files for the first 20
|
||||
for (const failure of failures.slice(0, 20)) {
|
||||
const hash = createHash('md5').update(failure.name).digest('hex');
|
||||
const filenameBase = `${
|
||||
process.env.BUILDKITE_JOB_ID ? process.env.BUILDKITE_JOB_ID + '_' : ''
|
||||
}${hash}`;
|
||||
const dir = Path.join('target', 'test_failures');
|
||||
|
||||
const failureLog = [
|
||||
['Test:', '-----', failure.classname, failure.name, ''],
|
||||
['Failure:', '--------', failure.failure],
|
||||
failure['system-out'] ? ['', 'Standard Out:', '-------------', failure['system-out']] : [],
|
||||
]
|
||||
.flat()
|
||||
.join('\n');
|
||||
|
||||
const failureJSON = JSON.stringify(
|
||||
{
|
||||
...failure,
|
||||
hash,
|
||||
buildId: bkMeta.buildId,
|
||||
jobId: bkMeta.jobId,
|
||||
url: bkMeta.url,
|
||||
jobUrl: bkMeta.jobUrl,
|
||||
jobName: bkMeta.jobName,
|
||||
},
|
||||
null,
|
||||
2
|
||||
);
|
||||
|
||||
const failureHTML = Fs.readFileSync(
|
||||
require.resolve('./report_failures_to_file_html_template.html')
|
||||
)
|
||||
.toString()
|
||||
.replace('$TITLE', escape(failure.name))
|
||||
.replace(
|
||||
'$MAIN',
|
||||
`
|
||||
${failure.classname
|
||||
.split('.')
|
||||
.map((part) => `<h5>${escape(part.replace('·', '.'))}</h5>`)
|
||||
.join('')}
|
||||
<hr />
|
||||
<p><strong>${escape(failure.name)}</strong></p>
|
||||
<p>
|
||||
<small>
|
||||
<strong>Failures in tracked branches</strong>: <span class="badge rounded-pill bg-danger">${
|
||||
failure.failureCount || 0
|
||||
}</span>
|
||||
${
|
||||
failure.githubIssue
|
||||
? `<br /><a href="${escape(failure.githubIssue)}">${escape(
|
||||
failure.githubIssue
|
||||
)}</a>`
|
||||
: ''
|
||||
}
|
||||
</small>
|
||||
</p>
|
||||
${
|
||||
bkMeta.jobUrl
|
||||
? `<p>
|
||||
<small>
|
||||
<strong>Buildkite Job</strong><br />
|
||||
<a href="${escape(bkMeta.jobUrl)}">${escape(bkMeta.jobUrl)}</a>
|
||||
</small>
|
||||
</p>`
|
||||
: ''
|
||||
}
|
||||
<pre>${escape(failure.failure)}</pre>
|
||||
${
|
||||
journeyMeta
|
||||
? await getJourneySnapshotHtml(log, journeyMeta)
|
||||
: getFtrScreenshotHtml(log, failure.name)
|
||||
}
|
||||
${
|
||||
failure['system-out']
|
||||
? `
|
||||
<h5>Stdout</h5>
|
||||
<pre>${escape(failure['system-out'] || '')}</pre>
|
||||
`
|
||||
: ''
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
Fs.mkdirSync(dir, { recursive: true });
|
||||
Fs.writeFileSync(Path.join(dir, `${filenameBase}.log`), failureLog, 'utf8');
|
||||
Fs.writeFileSync(Path.join(dir, `${filenameBase}.html`), failureHTML, 'utf8');
|
||||
Fs.writeFileSync(Path.join(dir, `${filenameBase}.json`), failureJSON, 'utf8');
|
||||
}
|
||||
}
|
|
@ -35,6 +35,8 @@ export interface TestSuite {
|
|||
failures: string;
|
||||
/* number of skipped tests as a string */
|
||||
skipped: string;
|
||||
/* optional JSON encoded metadata */
|
||||
'metadata-json'?: string;
|
||||
};
|
||||
testcase?: TestCase[];
|
||||
}
|
||||
|
@ -93,3 +95,22 @@ export function* makeFailedTestCaseIter(report: TestReport) {
|
|||
yield testCase as FailedTestCase;
|
||||
}
|
||||
}
|
||||
|
||||
export function getRootMetadata(report: TestReport): Record<string, unknown> {
|
||||
const json =
|
||||
('testsuites' in report
|
||||
? report.testsuites?.testsuite?.[0]?.$?.['metadata-json']
|
||||
: report.testsuite?.$?.['metadata-json']) ?? '{}';
|
||||
|
||||
try {
|
||||
const obj = JSON.parse(json);
|
||||
|
||||
if (typeof obj === 'object' && obj !== null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
return {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
|
@ -6,4 +6,4 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { runFailedTestsReporterCli } from './run_failed_tests_reporter_cli';
|
||||
import './failed_tests_reporter/failed_tests_reporter_cli';
|
|
@ -6,7 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { runTestsCli } from './run_tests/cli';
|
||||
export { processOptions as processRunTestsCliOptions } from './run_tests/args';
|
||||
export { startServersCli } from './start_servers/cli';
|
||||
export { processOptions as processStartServersCliOptions } from './start_servers/args';
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-failed-test-reporter-cli'],
|
||||
};
|
8
packages/kbn-failed-test-reporter-cli/kibana.jsonc
Normal file
8
packages/kbn-failed-test-reporter-cli/kibana.jsonc
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/failed-test-reporter-cli",
|
||||
"owner": "@elastic/kibana-operations",
|
||||
"devOnly": true,
|
||||
"runtimeDeps": [],
|
||||
"typeDeps": [],
|
||||
}
|
7
packages/kbn-failed-test-reporter-cli/package.json
Normal file
7
packages/kbn-failed-test-reporter-cli/package.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@kbn/failed-test-reporter-cli",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "./target_node/index.js",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0"
|
||||
}
|
17
packages/kbn-failed-test-reporter-cli/tsconfig.json
Normal file
17
packages/kbn-failed-test-reporter-cli/tsconfig.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "../../tsconfig.bazel.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "target_types",
|
||||
"stripInternal": false,
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
]
|
||||
}
|
127
packages/kbn-ftr-common-functional-services/BUILD.bazel
Normal file
127
packages/kbn-ftr-common-functional-services/BUILD.bazel
Normal file
|
@ -0,0 +1,127 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
|
||||
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
|
||||
|
||||
PKG_DIRNAME = "kbn-ftr-common-functional-services"
|
||||
PKG_REQUIRE_NAME = "@kbn/ftr-common-functional-services"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"**/*.ts",
|
||||
],
|
||||
exclude = [
|
||||
"**/*.config.js",
|
||||
"**/*.mock.*",
|
||||
"**/*.test.*",
|
||||
"**/*.stories.*",
|
||||
"**/__snapshots__/**",
|
||||
"**/integration_tests/**",
|
||||
"**/mocks/**",
|
||||
"**/scripts/**",
|
||||
"**/storybook/**",
|
||||
"**/test_fixtures/**",
|
||||
"**/test_helpers/**",
|
||||
],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
]
|
||||
|
||||
# In this array place runtime dependencies, including other packages and NPM packages
|
||||
# which must be available for this code to run.
|
||||
#
|
||||
# To reference other packages use:
|
||||
# "//repo/relative/path/to/package"
|
||||
# eg. "//packages/kbn-utils"
|
||||
#
|
||||
# To reference a NPM package use:
|
||||
# "@npm//name-of-package"
|
||||
# eg. "@npm//lodash"
|
||||
RUNTIME_DEPS = [
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
# :npm_module_types target of other packages and packages from NPM, including @types/*
|
||||
# packages.
|
||||
#
|
||||
# To reference the types for another package use:
|
||||
# "//repo/relative/path/to/package:npm_module_types"
|
||||
# eg. "//packages/kbn-utils:npm_module_types"
|
||||
#
|
||||
# References to NPM packages work the same as RUNTIME_DEPS
|
||||
TYPES_DEPS = [
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/jest",
|
||||
"//packages/kbn-tooling-log:npm_module_types",
|
||||
"//packages/kbn-es-archiver:npm_module_types",
|
||||
"//packages/kbn-test:npm_module_types",
|
||||
]
|
||||
|
||||
jsts_transpiler(
|
||||
name = "target_node",
|
||||
srcs = SRCS,
|
||||
build_pkg_name = package_name(),
|
||||
)
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
"//:tsconfig.bazel.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc_types",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = TYPES_DEPS,
|
||||
declaration = True,
|
||||
declaration_map = True,
|
||||
emit_declaration_only = True,
|
||||
out_dir = "target_types",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_DIRNAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = RUNTIME_DEPS + [":target_node"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [":" + PKG_DIRNAME],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [":npm_module"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm_types(
|
||||
name = "npm_module_types",
|
||||
srcs = SRCS,
|
||||
deps = [":tsc_types"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
tsconfig = ":tsconfig",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build_types",
|
||||
srcs = [":npm_module_types"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
3
packages/kbn-ftr-common-functional-services/README.md
Normal file
3
packages/kbn-ftr-common-functional-services/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/ftr-common-functional-services
|
||||
|
||||
A collection of very common services used by all functional FTR configs, moved to a package so that we can start putting FTR configs in packages.
|
21
packages/kbn-ftr-common-functional-services/index.ts
Normal file
21
packages/kbn-ftr-common-functional-services/index.ts
Normal file
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ProvidedType } from '@kbn/test';
|
||||
export { services as commonFunctionalServices } from './services/all';
|
||||
|
||||
import { KibanaServerProvider } from './services/kibana_server';
|
||||
export type KibanaServer = ProvidedType<typeof KibanaServerProvider>;
|
||||
|
||||
export { RetryService } from './services/retry';
|
||||
|
||||
import { EsArchiverProvider } from './services/es_archiver';
|
||||
export type EsArchiver = ProvidedType<typeof EsArchiverProvider>;
|
||||
|
||||
import { EsProvider } from './services/es';
|
||||
export type Es = ProvidedType<typeof EsProvider>;
|
13
packages/kbn-ftr-common-functional-services/jest.config.js
Normal file
13
packages/kbn-ftr-common-functional-services/jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-ftr-common-functional-services'],
|
||||
};
|
8
packages/kbn-ftr-common-functional-services/kibana.jsonc
Normal file
8
packages/kbn-ftr-common-functional-services/kibana.jsonc
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/ftr-common-functional-services",
|
||||
"owner": "@elastic/kibana-operations",
|
||||
"devOnly": true,
|
||||
"runtimeDeps": [],
|
||||
"typeDeps": [],
|
||||
}
|
7
packages/kbn-ftr-common-functional-services/package.json
Normal file
7
packages/kbn-ftr-common-functional-services/package.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@kbn/ftr-common-functional-services",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "./target_node/index.js",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0"
|
||||
}
|
19
packages/kbn-ftr-common-functional-services/services/all.ts
Normal file
19
packages/kbn-ftr-common-functional-services/services/all.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { EsArchiverProvider } from './es_archiver';
|
||||
import { EsProvider } from './es';
|
||||
import { KibanaServerProvider } from './kibana_server';
|
||||
import { RetryService } from './retry';
|
||||
|
||||
export const services = {
|
||||
es: EsProvider,
|
||||
kibanaServer: KibanaServerProvider,
|
||||
esArchiver: EsArchiverProvider,
|
||||
retry: RetryService,
|
||||
};
|
|
@ -9,12 +9,9 @@
|
|||
import { Client } from '@elastic/elasticsearch';
|
||||
|
||||
import { systemIndicesSuperuser, createEsClientForFtrConfig } from '@kbn/test';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
import { FtrProviderContext } from './ftr_provider_context';
|
||||
|
||||
/*
|
||||
registers Kibana-specific @elastic/elasticsearch client instance.
|
||||
*/
|
||||
export function ElasticsearchProvider({ getService }: FtrProviderContext): Client {
|
||||
export function EsProvider({ getService }: FtrProviderContext): Client {
|
||||
const config = getService('config');
|
||||
|
||||
return createEsClientForFtrConfig(config, {
|
|
@ -7,17 +7,15 @@
|
|||
*/
|
||||
|
||||
import { EsArchiver } from '@kbn/es-archiver';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
import * as KibanaServer from './kibana_server';
|
||||
import { FtrProviderContext } from './ftr_provider_context';
|
||||
import { extendEsArchiver } from './kibana_server';
|
||||
|
||||
export function EsArchiverProvider({ getService }: FtrProviderContext): EsArchiver {
|
||||
const config = getService('config');
|
||||
const client = getService('es');
|
||||
const lifecycle = getService('lifecycle');
|
||||
const log = getService('log');
|
||||
const kibanaServer = getService('kibanaServer');
|
||||
const retry = getService('retry');
|
||||
const esArchives: string[] = config.get('testData.esArchives');
|
||||
|
||||
const esArchiver = new EsArchiver({
|
||||
client,
|
||||
|
@ -25,26 +23,12 @@ export function EsArchiverProvider({ getService }: FtrProviderContext): EsArchiv
|
|||
kbnClient: kibanaServer,
|
||||
});
|
||||
|
||||
KibanaServer.extendEsArchiver({
|
||||
extendEsArchiver({
|
||||
esArchiver,
|
||||
kibanaServer,
|
||||
retry,
|
||||
defaults: config.get('uiSettings.defaults'),
|
||||
});
|
||||
|
||||
if (esArchives.length) {
|
||||
lifecycle.beforeTests.add(async () => {
|
||||
for (const archive of esArchives) {
|
||||
await esArchiver.load(archive);
|
||||
}
|
||||
});
|
||||
|
||||
lifecycle.cleanup.add(async () => {
|
||||
for (const archive of esArchives) {
|
||||
await esArchiver.unload(archive);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return esArchiver;
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { GenericFtrProviderContext, GenericFtrService } from '@kbn/test';
|
||||
|
||||
import type { services } from './all';
|
||||
|
||||
type Services = typeof services;
|
||||
|
||||
export type FtrProviderContext = GenericFtrProviderContext<Services, {}>;
|
||||
export class FtrService extends GenericFtrService<FtrProviderContext> {}
|
|
@ -9,7 +9,7 @@
|
|||
import Url from 'url';
|
||||
import { KbnClient } from '@kbn/test';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
export function KibanaServerProvider({ getService }: FtrProviderContext): KbnClient {
|
||||
const log = getService('log');
|
||||
|
@ -17,7 +17,6 @@ export function KibanaServerProvider({ getService }: FtrProviderContext): KbnCli
|
|||
const lifecycle = getService('lifecycle');
|
||||
const url = Url.format(config.get('servers.kibana'));
|
||||
const defaults = config.get('uiSettings.defaults');
|
||||
const kbnArchives: string[] = config.get('testData.kbnArchives');
|
||||
|
||||
const kbn = new KbnClient({
|
||||
log,
|
||||
|
@ -32,18 +31,5 @@ export function KibanaServerProvider({ getService }: FtrProviderContext): KbnCli
|
|||
});
|
||||
}
|
||||
|
||||
if (kbnArchives.length) {
|
||||
lifecycle.beforeTests.add(async () => {
|
||||
for (const archive of kbnArchives) {
|
||||
await kbn.importExport.load(archive);
|
||||
}
|
||||
});
|
||||
lifecycle.cleanup.add(async () => {
|
||||
for (const archive of kbnArchives) {
|
||||
await kbn.importExport.unload(archive);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return kbn;
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { FtrService } from '../../ftr_provider_context';
|
||||
import { FtrService } from '../ftr_provider_context';
|
||||
import { retryForSuccess } from './retry_for_success';
|
||||
import { retryForTruthy } from './retry_for_truthy';
|
||||
|
17
packages/kbn-ftr-common-functional-services/tsconfig.json
Normal file
17
packages/kbn-ftr-common-functional-services/tsconfig.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "../../tsconfig.bazel.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "target_types",
|
||||
"stripInternal": false,
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
]
|
||||
}
|
125
packages/kbn-ftr-screenshot-filename/BUILD.bazel
Normal file
125
packages/kbn-ftr-screenshot-filename/BUILD.bazel
Normal file
|
@ -0,0 +1,125 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
|
||||
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
|
||||
|
||||
PKG_DIRNAME = "kbn-ftr-screenshot-filename"
|
||||
PKG_REQUIRE_NAME = "@kbn/ftr-screenshot-filename"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"**/*.ts",
|
||||
],
|
||||
exclude = [
|
||||
"**/*.config.js",
|
||||
"**/*.mock.*",
|
||||
"**/*.test.*",
|
||||
"**/*.stories.*",
|
||||
"**/__snapshots__/**",
|
||||
"**/integration_tests/**",
|
||||
"**/mocks/**",
|
||||
"**/scripts/**",
|
||||
"**/storybook/**",
|
||||
"**/test_fixtures/**",
|
||||
"**/test_helpers/**",
|
||||
],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
]
|
||||
|
||||
# In this array place runtime dependencies, including other packages and NPM packages
|
||||
# which must be available for this code to run.
|
||||
#
|
||||
# To reference other packages use:
|
||||
# "//repo/relative/path/to/package"
|
||||
# eg. "//packages/kbn-utils"
|
||||
#
|
||||
# To reference a NPM package use:
|
||||
# "@npm//name-of-package"
|
||||
# eg. "@npm//lodash"
|
||||
RUNTIME_DEPS = [
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
# :npm_module_types target of other packages and packages from NPM, including @types/*
|
||||
# packages.
|
||||
#
|
||||
# To reference the types for another package use:
|
||||
# "//repo/relative/path/to/package:npm_module_types"
|
||||
# eg. "//packages/kbn-utils:npm_module_types"
|
||||
#
|
||||
# References to NPM packages work the same as RUNTIME_DEPS
|
||||
TYPES_DEPS = [
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/jest",
|
||||
"@npm//tslib",
|
||||
]
|
||||
|
||||
jsts_transpiler(
|
||||
name = "target_node",
|
||||
srcs = SRCS,
|
||||
build_pkg_name = package_name(),
|
||||
)
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
"//:tsconfig.bazel.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc_types",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = TYPES_DEPS,
|
||||
declaration = True,
|
||||
declaration_map = True,
|
||||
emit_declaration_only = True,
|
||||
out_dir = "target_types",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_DIRNAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = RUNTIME_DEPS + [":target_node"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [":" + PKG_DIRNAME],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [":npm_module"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm_types(
|
||||
name = "npm_module_types",
|
||||
srcs = SRCS,
|
||||
deps = [":tsc_types"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
tsconfig = ":tsconfig",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build_types",
|
||||
srcs = [":npm_module_types"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
3
packages/kbn-ftr-screenshot-filename/README.md
Normal file
3
packages/kbn-ftr-screenshot-filename/README.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
# @kbn/ftr-screenshot-filename
|
||||
|
||||
A simple package that exposes a helper function for generating a unique screenshot filename that can be found by `node scripts/failed_test_reporter`.
|
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { createHash } from 'crypto';
|
||||
|
||||
export function create(fullTitle: string, opts?: { ext?: boolean }) {
|
||||
const truncatedName = fullTitle.replaceAll(/[^ a-zA-Z0-9-]+/g, '').slice(0, 80);
|
||||
const failureNameHash = createHash('sha256').update(fullTitle).digest('hex');
|
||||
return `${truncatedName}-${failureNameHash}${opts?.ext === false ? '' : `.png`}`;
|
||||
}
|
11
packages/kbn-ftr-screenshot-filename/index.ts
Normal file
11
packages/kbn-ftr-screenshot-filename/index.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as FtrScreenshotFilename from './ftr_screenshot_filename';
|
||||
|
||||
export { FtrScreenshotFilename };
|
13
packages/kbn-ftr-screenshot-filename/jest.config.js
Normal file
13
packages/kbn-ftr-screenshot-filename/jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-ftr-screenshot-filename'],
|
||||
};
|
8
packages/kbn-ftr-screenshot-filename/kibana.jsonc
Normal file
8
packages/kbn-ftr-screenshot-filename/kibana.jsonc
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/ftr-screenshot-filename",
|
||||
"owner": "@elastic/kibana-operations",
|
||||
"devOnly": true,
|
||||
"runtimeDeps": [],
|
||||
"typeDeps": [],
|
||||
}
|
7
packages/kbn-ftr-screenshot-filename/package.json
Normal file
7
packages/kbn-ftr-screenshot-filename/package.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@kbn/ftr-screenshot-filename",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "./target_node/index.js",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0"
|
||||
}
|
17
packages/kbn-ftr-screenshot-filename/tsconfig.json
Normal file
17
packages/kbn-ftr-screenshot-filename/tsconfig.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "../../tsconfig.bazel.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "target_types",
|
||||
"stripInternal": false,
|
||||
"types": [
|
||||
"jest",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
]
|
||||
}
|
134
packages/kbn-journeys/BUILD.bazel
Normal file
134
packages/kbn-journeys/BUILD.bazel
Normal file
|
@ -0,0 +1,134 @@
|
|||
load("@npm//@bazel/typescript:index.bzl", "ts_config")
|
||||
load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
|
||||
load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
|
||||
|
||||
PKG_DIRNAME = "kbn-journeys"
|
||||
PKG_REQUIRE_NAME = "@kbn/journeys"
|
||||
|
||||
SOURCE_FILES = glob(
|
||||
[
|
||||
"**/*.ts",
|
||||
],
|
||||
exclude = [
|
||||
"**/*.config.js",
|
||||
"**/*.mock.*",
|
||||
"**/*.test.*",
|
||||
"**/*.stories.*",
|
||||
"**/__snapshots__/**",
|
||||
"**/integration_tests/**",
|
||||
"**/mocks/**",
|
||||
"**/scripts/**",
|
||||
"**/storybook/**",
|
||||
"**/test_fixtures/**",
|
||||
"**/test_helpers/**",
|
||||
],
|
||||
)
|
||||
|
||||
SRCS = SOURCE_FILES
|
||||
|
||||
filegroup(
|
||||
name = "srcs",
|
||||
srcs = SRCS,
|
||||
)
|
||||
|
||||
NPM_MODULE_EXTRA_FILES = [
|
||||
"package.json",
|
||||
]
|
||||
|
||||
# In this array place runtime dependencies, including other packages and NPM packages
|
||||
# which must be available for this code to run.
|
||||
#
|
||||
# To reference other packages use:
|
||||
# "//repo/relative/path/to/package"
|
||||
# eg. "//packages/kbn-utils"
|
||||
#
|
||||
# To reference a NPM package use:
|
||||
# "@npm//name-of-package"
|
||||
# eg. "@npm//lodash"
|
||||
RUNTIME_DEPS = [
|
||||
]
|
||||
|
||||
# In this array place dependencies necessary to build the types, which will include the
|
||||
# :npm_module_types target of other packages and packages from NPM, including @types/*
|
||||
# packages.
|
||||
#
|
||||
# To reference the types for another package use:
|
||||
# "//repo/relative/path/to/package:npm_module_types"
|
||||
# eg. "//packages/kbn-utils:npm_module_types"
|
||||
#
|
||||
# References to NPM packages work the same as RUNTIME_DEPS
|
||||
TYPES_DEPS = [
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/mocha",
|
||||
"@npm//playwright",
|
||||
"@npm//uuid",
|
||||
"@npm//axios",
|
||||
"@npm//callsites",
|
||||
"@npm//rxjs",
|
||||
"@npm//elastic-apm-node",
|
||||
"//packages/kbn-ftr-common-functional-services:npm_module_types",
|
||||
"//packages/kbn-ftr-screenshot-filename:npm_module_types",
|
||||
"//packages/kbn-test:npm_module_types",
|
||||
"//packages/kbn-utils:npm_module_types",
|
||||
]
|
||||
|
||||
jsts_transpiler(
|
||||
name = "target_node",
|
||||
srcs = SRCS,
|
||||
build_pkg_name = package_name(),
|
||||
)
|
||||
|
||||
ts_config(
|
||||
name = "tsconfig",
|
||||
src = "tsconfig.json",
|
||||
deps = [
|
||||
"//:tsconfig.base.json",
|
||||
"//:tsconfig.bazel.json",
|
||||
],
|
||||
)
|
||||
|
||||
ts_project(
|
||||
name = "tsc_types",
|
||||
args = ['--pretty'],
|
||||
srcs = SRCS,
|
||||
deps = TYPES_DEPS,
|
||||
declaration = True,
|
||||
declaration_map = True,
|
||||
emit_declaration_only = True,
|
||||
out_dir = "target_types",
|
||||
tsconfig = ":tsconfig",
|
||||
)
|
||||
|
||||
js_library(
|
||||
name = PKG_DIRNAME,
|
||||
srcs = NPM_MODULE_EXTRA_FILES,
|
||||
deps = RUNTIME_DEPS + [":target_node"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm(
|
||||
name = "npm_module",
|
||||
deps = [":" + PKG_DIRNAME],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build",
|
||||
srcs = [":npm_module"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
pkg_npm_types(
|
||||
name = "npm_module_types",
|
||||
srcs = SRCS,
|
||||
deps = [":tsc_types"],
|
||||
package_name = PKG_REQUIRE_NAME,
|
||||
tsconfig = ":tsconfig",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "build_types",
|
||||
srcs = [":npm_module_types"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
32
packages/kbn-journeys/README.mdx
Normal file
32
packages/kbn-journeys/README.mdx
Normal file
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
id: kibDevDocsOpsJourneys
|
||||
slug: /kibana-dev-docs/ops/journeys
|
||||
title: Journeys
|
||||
description: A new style of functional test, focused on performance testing for now
|
||||
tags: ['kibana', 'dev', 'contributor', 'operations', 'performance', 'functional', 'testing']
|
||||
---
|
||||
|
||||
Journeys are a slightly newer take on Functional Tests, currently powered by [playwright](https://playwright.dev/docs).
|
||||
|
||||
A Journey is a single pathway through Kibana and looks something like this:
|
||||
|
||||
```ts
|
||||
import { Journey } from '@kbn/journeys';
|
||||
import { subj } from '@kbn/test-subj-selector';
|
||||
|
||||
export const journey = new Journey({
|
||||
esArchives: [ ... ],
|
||||
kbnArchives: [ ... ],
|
||||
scalabilitySetup: { ... },
|
||||
})
|
||||
.step('Go to Discover Page', async ({ page, kbnUrl }) => {
|
||||
await page.goto(kbnUrl.get(`/app/discover`));
|
||||
await page.waitForSelector(subj('discoverDocTable'));
|
||||
})
|
||||
|
||||
.step('Expand the first document', async ({ page }) => {
|
||||
const expandButtons = page.locator(subj('docTableExpandToggleColumn'));
|
||||
await expandButtons.first().click();
|
||||
await page.locator('text="Expanded document"');
|
||||
});
|
||||
```
|
15
packages/kbn-journeys/index.ts
Normal file
15
packages/kbn-journeys/index.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { JourneyConfig } from './journey/journey_config';
|
||||
export type { ScalabilityAction, ScalabilitySetup } from './journey/journey_config';
|
||||
|
||||
export { Journey } from './journey/journey';
|
||||
export type { Step } from './journey/journey';
|
||||
|
||||
export { JourneyScreenshots } from './journey/journey_screenshots';
|
13
packages/kbn-journeys/jest.config.js
Normal file
13
packages/kbn-journeys/jest.config.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test/jest_node',
|
||||
rootDir: '../..',
|
||||
roots: ['<rootDir>/packages/kbn-journeys'],
|
||||
};
|
125
packages/kbn-journeys/journey/journey.ts
Normal file
125
packages/kbn-journeys/journey/journey.ts
Normal file
|
@ -0,0 +1,125 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { inspect } from 'util';
|
||||
|
||||
import { Page } from 'playwright';
|
||||
import callsites from 'callsites';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { FtrConfigProvider } from '@kbn/test';
|
||||
import { FtrProviderContext } from '@kbn/ftr-common-functional-services';
|
||||
|
||||
import { Auth } from '../services/auth';
|
||||
import { InputDelays } from '../services/input_delays';
|
||||
import { KibanaUrl } from '../services/kibana_url';
|
||||
|
||||
import { JourneyFtrHarness } from './journey_ftr_harness';
|
||||
import { makeFtrConfigProvider } from './journey_ftr_config';
|
||||
import { JourneyConfig, JourneyConfigOptions } from './journey_config';
|
||||
|
||||
export interface BaseStepCtx {
|
||||
page: Page;
|
||||
log: ToolingLog;
|
||||
inputDelays: InputDelays;
|
||||
kbnUrl: KibanaUrl;
|
||||
}
|
||||
|
||||
export type AnyStep = Step<{}>;
|
||||
|
||||
export interface Step<CtxExt extends object> {
|
||||
name: string;
|
||||
index: number;
|
||||
fn(ctx: BaseStepCtx & CtxExt): Promise<void>;
|
||||
}
|
||||
|
||||
const CONFIG_PROVIDER_CACHE = new WeakMap<Journey<any>, FtrConfigProvider>();
|
||||
|
||||
export class Journey<CtxExt extends object> {
|
||||
static convertToFtrConfigProvider(journey: Journey<any>) {
|
||||
const cached = CONFIG_PROVIDER_CACHE.get(journey);
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const provider = makeFtrConfigProvider(journey.config, journey.#steps);
|
||||
CONFIG_PROVIDER_CACHE.set(journey, provider);
|
||||
return provider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a journey from a file path
|
||||
*/
|
||||
static async load(path: string) {
|
||||
let m;
|
||||
try {
|
||||
m = await import(path);
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to load file: ${path}`);
|
||||
}
|
||||
|
||||
if (!m || !m.journey) {
|
||||
throw new Error(`[${path}] is not a journey`);
|
||||
}
|
||||
|
||||
const journey = m.journey;
|
||||
if (journey instanceof Journey) {
|
||||
return journey;
|
||||
}
|
||||
|
||||
const dbg = inspect(journey);
|
||||
throw new Error(`[${path}] does not export a Journey like it should, received ${dbg}`);
|
||||
}
|
||||
|
||||
#steps: Array<Step<CtxExt>> = [];
|
||||
|
||||
config: JourneyConfig<CtxExt>;
|
||||
|
||||
/**
|
||||
* Create a Journey which should be exported from a file in the
|
||||
* x-pack/performance/journeys directory.
|
||||
*/
|
||||
constructor(opts?: JourneyConfigOptions<CtxExt>) {
|
||||
const path = callsites().at(1)?.getFileName();
|
||||
|
||||
if (!path) {
|
||||
throw new Error('unable to determine path of journey config file');
|
||||
}
|
||||
|
||||
this.config = new JourneyConfig(path, opts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a step of this Journey. Steps are only separated from each other
|
||||
* to aid in reading/debuging the journey and reading it's logging output.
|
||||
*
|
||||
* If a journey fails, a failure report will be created with a screenshot
|
||||
* at the point of failure as well as a screenshot at the end of every
|
||||
* step.
|
||||
*/
|
||||
step(name: string, fn: (ctx: BaseStepCtx & CtxExt) => Promise<void>) {
|
||||
this.#steps.push({
|
||||
name,
|
||||
index: this.#steps.length,
|
||||
fn,
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/** called by FTR to setup tests */
|
||||
protected testProvider({ getService }: FtrProviderContext) {
|
||||
new JourneyFtrHarness(
|
||||
getService('log'),
|
||||
getService('config'),
|
||||
getService('esArchiver'),
|
||||
getService('kibanaServer'),
|
||||
new Auth(getService('config'), getService('log'), getService('kibanaServer')),
|
||||
this.config
|
||||
).initMochaSuite(this.#steps);
|
||||
}
|
||||
}
|
155
packages/kbn-journeys/journey/journey_config.ts
Normal file
155
packages/kbn-journeys/journey/journey_config.ts
Normal file
|
@ -0,0 +1,155 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
|
||||
import { BaseStepCtx } from './journey';
|
||||
|
||||
export interface RampConcurrentUsersAction {
|
||||
action: 'rampConcurrentUsers';
|
||||
/**
|
||||
* Duration strings must be formatted as string that starts with an integer and
|
||||
* ends with either "m" or "s" for minutes and seconds, respectively
|
||||
*
|
||||
* eg: "1m" or "30s"
|
||||
*/
|
||||
duration: string;
|
||||
minUsersCount: number;
|
||||
maxUsersCount: number;
|
||||
}
|
||||
|
||||
export interface ConstantConcurrentUsersAction {
|
||||
action: 'constantConcurrentUsers';
|
||||
/**
|
||||
* Duration strings must be formatted as string that starts with an integer and
|
||||
* ends with either "m" or "s" for minutes and seconds, respectively
|
||||
*
|
||||
* eg: "1m" or "30s"
|
||||
*/
|
||||
duration: string;
|
||||
userCount: number;
|
||||
}
|
||||
|
||||
export type ScalabilityAction = RampConcurrentUsersAction | ConstantConcurrentUsersAction;
|
||||
|
||||
export interface ScalabilitySetup {
|
||||
/**
|
||||
* Duration strings must be formatted as string that starts with an integer and
|
||||
* ends with either "m" or "s" for minutes and seconds, respectively
|
||||
*
|
||||
* eg: "1m" or "30s"
|
||||
*/
|
||||
maxDuration: string;
|
||||
warmup: ScalabilityAction[];
|
||||
test: ScalabilityAction[];
|
||||
}
|
||||
|
||||
export interface JourneyConfigOptions<CtxExt> {
|
||||
/**
|
||||
* Set to `true` to skip this journey. should probably be preceded
|
||||
* by a link to a Github issue where the reasoning for why this was
|
||||
* skipped and not just deleted is outlined.
|
||||
*/
|
||||
skipped?: boolean;
|
||||
/**
|
||||
* Scalability configuration used to customize automatically generated
|
||||
* scalability tests. For now chat with Dima/Operations if you want to
|
||||
* use this option.
|
||||
*/
|
||||
scalabilitySetup?: ScalabilitySetup;
|
||||
/**
|
||||
* These labels will be attached to all APM data created when running
|
||||
* this journey.
|
||||
*/
|
||||
extraApmLabels?: Record<string, string>;
|
||||
/**
|
||||
* A list of kbnArchives which will be automatically loaded/unloaded
|
||||
* for this journey.
|
||||
*/
|
||||
kbnArchives?: string[];
|
||||
/**
|
||||
* A list of esArchives which will be automatically loaded/unloaded
|
||||
* for this journey.
|
||||
*/
|
||||
esArchives?: string[];
|
||||
/**
|
||||
* By default the API is used to get a cookie that can be used for all
|
||||
* navigation requests to Kibana, so that we don't ever see the login
|
||||
* screen. Set this to `false` to disable this behavior.
|
||||
*/
|
||||
skipAutoLogin?: boolean;
|
||||
/**
|
||||
* Use this to extend the context provided to each step. This function
|
||||
* is called with the default context and returns an object that will
|
||||
* be merged with the default context provided to each step function.
|
||||
*/
|
||||
extendContext?: (ctx: BaseStepCtx) => CtxExt;
|
||||
}
|
||||
|
||||
export class JourneyConfig<CtxExt extends object> {
|
||||
#opts: JourneyConfigOptions<CtxExt>;
|
||||
#path: string;
|
||||
#name: string;
|
||||
|
||||
constructor(path: string, opts: JourneyConfigOptions<CtxExt> = {}) {
|
||||
this.#path = path;
|
||||
this.#name = Path.basename(this.#path, Path.extname(this.#path));
|
||||
this.#opts = opts;
|
||||
}
|
||||
|
||||
getEsArchives() {
|
||||
return this.#opts.esArchives ?? [];
|
||||
}
|
||||
|
||||
getKbnArchives() {
|
||||
return this.#opts.kbnArchives ?? [];
|
||||
}
|
||||
|
||||
isXpack() {
|
||||
return this.getRepoRelPath().split(Path.sep).at(0) === 'x-pack';
|
||||
}
|
||||
|
||||
getExtraApmLabels() {
|
||||
return this.#opts.extraApmLabels ? { ...this.#opts.extraApmLabels } : {};
|
||||
}
|
||||
|
||||
getRepoRelPath() {
|
||||
return Path.relative(REPO_ROOT, this.getPath());
|
||||
}
|
||||
|
||||
getPath() {
|
||||
return this.#path;
|
||||
}
|
||||
|
||||
getName() {
|
||||
return this.#name;
|
||||
}
|
||||
|
||||
shouldAutoLogin() {
|
||||
return !this.#opts.skipAutoLogin;
|
||||
}
|
||||
|
||||
isSkipped() {
|
||||
return !!this.#opts.skipped;
|
||||
}
|
||||
|
||||
getScalabilityConfig() {
|
||||
return this.#opts.scalabilitySetup;
|
||||
}
|
||||
|
||||
getExtendedStepCtx(ctx: BaseStepCtx): BaseStepCtx & CtxExt {
|
||||
const ext = this.#opts.extendContext ?? (() => ({} as CtxExt));
|
||||
|
||||
return {
|
||||
...ctx,
|
||||
...ext(ctx),
|
||||
};
|
||||
}
|
||||
}
|
127
packages/kbn-journeys/journey/journey_ftr_config.ts
Normal file
127
packages/kbn-journeys/journey/journey_ftr_config.ts
Normal file
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import { v4 as uuidV4 } from 'uuid';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { FtrConfigProviderContext, FtrConfigProvider } from '@kbn/test';
|
||||
import { commonFunctionalServices } from '@kbn/ftr-common-functional-services';
|
||||
|
||||
import { AnyStep } from './journey';
|
||||
import { JourneyConfig } from './journey_config';
|
||||
|
||||
// These "secret" values are intentionally written in the source. We would make the APM server accept anonymous traffic if we could
|
||||
const APM_SERVER_URL = 'https://kibana-ops-e2e-perf.apm.us-central1.gcp.cloud.es.io:443';
|
||||
const APM_PUBLIC_TOKEN = 'CTs9y3cvcfq13bQqsB';
|
||||
|
||||
export function makeFtrConfigProvider(
|
||||
config: JourneyConfig<any>,
|
||||
steps: AnyStep[]
|
||||
): FtrConfigProvider {
|
||||
return async ({ readConfigFile }: FtrConfigProviderContext) => {
|
||||
const baseConfig = (
|
||||
await readConfigFile(
|
||||
Path.resolve(
|
||||
REPO_ROOT,
|
||||
config.isXpack()
|
||||
? 'x-pack/test/functional/config.base.js'
|
||||
: 'test/functional/config.base.js'
|
||||
)
|
||||
)
|
||||
).getAll();
|
||||
|
||||
const testBuildId = process.env.BUILDKITE_BUILD_ID ?? `local-${uuidV4()}`;
|
||||
const testJobId = process.env.BUILDKITE_JOB_ID ?? `local-${uuidV4()}`;
|
||||
const prId = process.env.GITHUB_PR_NUMBER
|
||||
? Number.parseInt(process.env.GITHUB_PR_NUMBER, 10)
|
||||
: undefined;
|
||||
|
||||
if (Number.isNaN(prId)) {
|
||||
throw new Error('invalid GITHUB_PR_NUMBER environment variable');
|
||||
}
|
||||
|
||||
const telemetryLabels: Record<string, string | boolean | undefined | number> = {
|
||||
branch: process.env.BUILDKITE_BRANCH,
|
||||
ciBuildId: process.env.BUILDKITE_BUILD_ID,
|
||||
ciBuildJobId: process.env.BUILDKITE_JOB_ID,
|
||||
ciBuildNumber: Number(process.env.BUILDKITE_BUILD_NUMBER) || 0,
|
||||
gitRev: process.env.BUILDKITE_COMMIT,
|
||||
isPr: prId !== undefined,
|
||||
...(prId !== undefined ? { prId } : {}),
|
||||
ciBuildName: process.env.BUILDKITE_PIPELINE_SLUG,
|
||||
journeyName: config.getName(),
|
||||
};
|
||||
|
||||
return {
|
||||
...baseConfig,
|
||||
|
||||
mochaOpts: {
|
||||
...baseConfig.mochaOpts,
|
||||
bail: true,
|
||||
},
|
||||
|
||||
services: commonFunctionalServices,
|
||||
pageObjects: {},
|
||||
|
||||
servicesRequiredForTestAnalysis: ['performance', 'journeyConfig'],
|
||||
|
||||
junit: {
|
||||
reportName: `Journey: ${config.getName()}`,
|
||||
metadata: {
|
||||
journeyName: config.getName(),
|
||||
stepNames: steps.map((s) => s.name),
|
||||
},
|
||||
},
|
||||
|
||||
kbnTestServer: {
|
||||
...baseConfig.kbnTestServer,
|
||||
// delay shutdown by 15 seconds to ensure that APM can report the data it collects during test execution
|
||||
delayShutdown: 15_000,
|
||||
|
||||
serverArgs: [
|
||||
...baseConfig.kbnTestServer.serverArgs,
|
||||
`--telemetry.optIn=${process.env.TEST_PERFORMANCE_PHASE === 'TEST'}`,
|
||||
`--telemetry.labels=${JSON.stringify(telemetryLabels)}`,
|
||||
'--csp.strict=false',
|
||||
'--csp.warnLegacyBrowsers=false',
|
||||
],
|
||||
|
||||
env: {
|
||||
ELASTIC_APM_ACTIVE: process.env.TEST_PERFORMANCE_PHASE ? 'true' : 'false',
|
||||
ELASTIC_APM_CONTEXT_PROPAGATION_ONLY: 'false',
|
||||
ELASTIC_APM_ENVIRONMENT: process.env.CI ? 'ci' : 'development',
|
||||
ELASTIC_APM_TRANSACTION_SAMPLE_RATE: '1.0',
|
||||
ELASTIC_APM_SERVER_URL: APM_SERVER_URL,
|
||||
ELASTIC_APM_SECRET_TOKEN: APM_PUBLIC_TOKEN,
|
||||
// capture request body for both errors and request transactions
|
||||
// https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#capture-body
|
||||
ELASTIC_APM_CAPTURE_BODY: 'all',
|
||||
// capture request headers
|
||||
// https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#capture-headers
|
||||
ELASTIC_APM_CAPTURE_HEADERS: true,
|
||||
// request body with bigger size will be trimmed.
|
||||
// 300_000 is the default of the APM server.
|
||||
// for a body with larger size, we might need to reconfigure the APM server to increase the limit.
|
||||
// https://www.elastic.co/guide/en/apm/agent/nodejs/current/configuration.html#long-field-max-length
|
||||
ELASTIC_APM_LONG_FIELD_MAX_LENGTH: 300_000,
|
||||
ELASTIC_APM_GLOBAL_LABELS: Object.entries({
|
||||
...config.getExtraApmLabels(),
|
||||
testJobId,
|
||||
testBuildId,
|
||||
journeyName: config.getName(),
|
||||
ftrConfig: config.getRepoRelPath(),
|
||||
performancePhase: process.env.TEST_PERFORMANCE_PHASE,
|
||||
})
|
||||
.flatMap(([key, value]) => (value == null ? [] : `${key}=${value}`))
|
||||
.join(','),
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
410
packages/kbn-journeys/journey/journey_ftr_harness.ts
Normal file
410
packages/kbn-journeys/journey/journey_ftr_harness.ts
Normal file
|
@ -0,0 +1,410 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Url from 'url';
|
||||
import { inspect, format } from 'util';
|
||||
import { setTimeout } from 'timers/promises';
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import apmNode from 'elastic-apm-node';
|
||||
import playwright, { ChromiumBrowser, Page, BrowserContext, CDPSession, Request } from 'playwright';
|
||||
import { asyncMap, asyncForEach } from '@kbn/std';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { Config } from '@kbn/test';
|
||||
import { EsArchiver, KibanaServer } from '@kbn/ftr-common-functional-services';
|
||||
|
||||
import { Auth } from '../services/auth';
|
||||
import { getInputDelays } from '../services/input_delays';
|
||||
import { KibanaUrl } from '../services/kibana_url';
|
||||
|
||||
import type { Step, AnyStep } from './journey';
|
||||
import type { JourneyConfig } from './journey_config';
|
||||
import { JourneyScreenshots } from './journey_screenshots';
|
||||
|
||||
export class JourneyFtrHarness {
|
||||
private readonly screenshots: JourneyScreenshots;
|
||||
|
||||
constructor(
|
||||
private readonly log: ToolingLog,
|
||||
private readonly config: Config,
|
||||
private readonly esArchiver: EsArchiver,
|
||||
private readonly kibanaServer: KibanaServer,
|
||||
private readonly auth: Auth,
|
||||
private readonly journeyConfig: JourneyConfig<any>
|
||||
) {
|
||||
this.screenshots = new JourneyScreenshots(this.journeyConfig.getName());
|
||||
}
|
||||
|
||||
private browser: ChromiumBrowser | undefined;
|
||||
private page: Page | undefined;
|
||||
private client: CDPSession | undefined;
|
||||
private context: BrowserContext | undefined;
|
||||
private currentSpanStack: Array<apmNode.Span | null> = [];
|
||||
private currentTransaction: apmNode.Transaction | undefined | null = undefined;
|
||||
|
||||
private pageTeardown$ = new Rx.Subject<Page>();
|
||||
private telemetryTrackerSubs = new Map<Page, Rx.Subscription>();
|
||||
|
||||
private apm: apmNode.Agent | null = null;
|
||||
|
||||
private async setupApm() {
|
||||
const kbnTestServerEnv = this.config.get(`kbnTestServer.env`);
|
||||
|
||||
this.apm = apmNode.start({
|
||||
serviceName: 'functional test runner',
|
||||
environment: process.env.CI ? 'ci' : 'development',
|
||||
active: kbnTestServerEnv.ELASTIC_APM_ACTIVE !== 'false',
|
||||
serverUrl: kbnTestServerEnv.ELASTIC_APM_SERVER_URL,
|
||||
secretToken: kbnTestServerEnv.ELASTIC_APM_SECRET_TOKEN,
|
||||
globalLabels: kbnTestServerEnv.ELASTIC_APM_GLOBAL_LABELS,
|
||||
transactionSampleRate: kbnTestServerEnv.ELASTIC_APM_TRANSACTION_SAMPLE_RATE,
|
||||
logger: {
|
||||
warn: (...args: any[]) => {
|
||||
this.log.warning('APM WARN', ...args);
|
||||
},
|
||||
info: (...args: any[]) => {
|
||||
this.log.info('APM INFO', ...args);
|
||||
},
|
||||
fatal: (...args: any[]) => {
|
||||
this.log.error(format('APM FATAL', ...args));
|
||||
},
|
||||
error: (...args: any[]) => {
|
||||
this.log.error(format('APM ERROR', ...args));
|
||||
},
|
||||
debug: (...args: any[]) => {
|
||||
this.log.debug('APM DEBUG', ...args);
|
||||
},
|
||||
trace: (...args: any[]) => {
|
||||
this.log.verbose('APM TRACE', ...args);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (this.currentTransaction) {
|
||||
throw new Error(`Transaction exist, end prev transaction ${this.currentTransaction?.name}`);
|
||||
}
|
||||
|
||||
this.currentTransaction = this.apm?.startTransaction(
|
||||
`Journey: ${this.journeyConfig.getName()}`,
|
||||
'performance'
|
||||
);
|
||||
}
|
||||
|
||||
private async setupBrowserAndPage() {
|
||||
const browser = await this.getBrowserInstance();
|
||||
this.context = await browser.newContext({ bypassCSP: true });
|
||||
|
||||
if (this.journeyConfig.shouldAutoLogin()) {
|
||||
const cookie = await this.auth.login({ username: 'elastic', password: 'changeme' });
|
||||
await this.context.addCookies([cookie]);
|
||||
}
|
||||
|
||||
this.page = await this.context.newPage();
|
||||
|
||||
if (!process.env.NO_BROWSER_LOG) {
|
||||
this.page.on('console', this.onConsoleEvent);
|
||||
}
|
||||
|
||||
await this.sendCDPCommands(this.context, this.page);
|
||||
|
||||
this.trackTelemetryRequests(this.page);
|
||||
await this.interceptBrowserRequests(this.page);
|
||||
}
|
||||
|
||||
private async onSetup() {
|
||||
await Promise.all([
|
||||
this.setupApm(),
|
||||
this.setupBrowserAndPage(),
|
||||
asyncForEach(this.journeyConfig.getEsArchives(), async (esArchive) => {
|
||||
await this.esArchiver.load(esArchive);
|
||||
}),
|
||||
asyncForEach(this.journeyConfig.getKbnArchives(), async (kbnArchive) => {
|
||||
await this.kibanaServer.importExport.load(kbnArchive);
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
private async tearDownBrowserAndPage() {
|
||||
if (this.page) {
|
||||
const telemetryTracker = this.telemetryTrackerSubs.get(this.page);
|
||||
this.telemetryTrackerSubs.delete(this.page);
|
||||
|
||||
if (telemetryTracker && !telemetryTracker.closed) {
|
||||
this.log.info(`Waiting for telemetry requests, including starting within next 3 secs`);
|
||||
this.pageTeardown$.next(this.page);
|
||||
await new Promise<void>((resolve) => telemetryTracker.add(resolve));
|
||||
}
|
||||
|
||||
this.log.info('destroying page');
|
||||
await this.client?.detach();
|
||||
await this.page.close();
|
||||
await this.context?.close();
|
||||
}
|
||||
|
||||
if (this.browser) {
|
||||
this.log.info('closing browser');
|
||||
await this.browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
private async teardownApm() {
|
||||
if (!this.apm) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.currentTransaction) {
|
||||
this.currentTransaction.end('Success');
|
||||
this.currentTransaction = undefined;
|
||||
}
|
||||
|
||||
const apmStarted = this.apm.isStarted();
|
||||
// @ts-expect-error
|
||||
const apmActive = apmStarted && this.apm._conf.active;
|
||||
|
||||
if (!apmActive) {
|
||||
this.log.warning('APM is not active');
|
||||
return;
|
||||
}
|
||||
|
||||
this.log.info('Flushing APM');
|
||||
await new Promise<void>((resolve) => this.apm?.flush(() => resolve()));
|
||||
// wait for the HTTP request that apm.flush() starts, which we
|
||||
// can't track but hope it is started within 3 seconds, node will stay
|
||||
// alive for active requests
|
||||
// https://github.com/elastic/apm-agent-nodejs/issues/2088
|
||||
await setTimeout(3000);
|
||||
}
|
||||
|
||||
private async onTeardown() {
|
||||
await Promise.all([
|
||||
this.tearDownBrowserAndPage(),
|
||||
this.teardownApm(),
|
||||
asyncForEach(this.journeyConfig.getEsArchives(), async (esArchive) => {
|
||||
await this.esArchiver.unload(esArchive);
|
||||
}),
|
||||
asyncForEach(this.journeyConfig.getKbnArchives(), async (kbnArchive) => {
|
||||
await this.kibanaServer.importExport.unload(kbnArchive);
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
private async onStepSuccess(step: AnyStep) {
|
||||
if (!this.page) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.screenshots.addSuccess(step, await this.page.screenshot());
|
||||
}
|
||||
|
||||
private async onStepError(step: AnyStep, err: Error) {
|
||||
if (this.currentTransaction) {
|
||||
this.currentTransaction.end(`Failure ${err.message}`);
|
||||
this.currentTransaction = undefined;
|
||||
}
|
||||
|
||||
if (this.page) {
|
||||
await this.screenshots.addError(step, await this.page.screenshot());
|
||||
}
|
||||
}
|
||||
|
||||
private async withSpan<T>(name: string, type: string | undefined, block: () => Promise<T>) {
|
||||
if (!this.currentTransaction) {
|
||||
return await block();
|
||||
}
|
||||
|
||||
const span = this.apm?.startSpan(name, type ?? null, {
|
||||
childOf: this.currentTransaction,
|
||||
});
|
||||
if (!span) {
|
||||
return await block();
|
||||
}
|
||||
|
||||
try {
|
||||
this.currentSpanStack.unshift(span);
|
||||
const result = await block();
|
||||
span.setOutcome('success');
|
||||
span.end();
|
||||
return result;
|
||||
} catch (error) {
|
||||
span.setOutcome('failure');
|
||||
span.end();
|
||||
throw error;
|
||||
} finally {
|
||||
if (span !== this.currentSpanStack.shift()) {
|
||||
// eslint-disable-next-line no-unsafe-finally
|
||||
throw new Error('span stack mismatch');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private getCurrentTraceparent() {
|
||||
return (this.currentSpanStack.length ? this.currentSpanStack[0] : this.currentTransaction)
|
||||
?.traceparent;
|
||||
}
|
||||
|
||||
private async getBrowserInstance() {
|
||||
if (this.browser) {
|
||||
return this.browser;
|
||||
}
|
||||
return await this.withSpan('Browser creation', 'setup', async () => {
|
||||
const headless = !!(process.env.TEST_BROWSER_HEADLESS || process.env.CI);
|
||||
this.browser = await playwright.chromium.launch({ headless, timeout: 60_000 });
|
||||
return this.browser;
|
||||
});
|
||||
}
|
||||
|
||||
private async sendCDPCommands(context: BrowserContext, page: Page) {
|
||||
const client = await context.newCDPSession(page);
|
||||
|
||||
await client.send('Network.clearBrowserCache');
|
||||
await client.send('Network.setCacheDisabled', { cacheDisabled: true });
|
||||
await client.send('Network.emulateNetworkConditions', {
|
||||
latency: 100,
|
||||
downloadThroughput: 750_000,
|
||||
uploadThroughput: 750_000,
|
||||
offline: false,
|
||||
});
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
private telemetryTrackerCount = 0;
|
||||
|
||||
private trackTelemetryRequests(page: Page) {
|
||||
const id = ++this.telemetryTrackerCount;
|
||||
|
||||
const requestFailure$ = Rx.fromEvent<Request>(page, 'requestfailed');
|
||||
const requestSuccess$ = Rx.fromEvent<Request>(page, 'requestfinished');
|
||||
const request$ = Rx.fromEvent<Request>(page, 'request').pipe(
|
||||
Rx.takeUntil(
|
||||
this.pageTeardown$.pipe(
|
||||
Rx.first((p) => p === page),
|
||||
Rx.delay(3000)
|
||||
// If EBT client buffers:
|
||||
// Rx.mergeMap(async () => {
|
||||
// await page.waitForFunction(() => {
|
||||
// // return window.kibana_ebt_client.buffer_size == 0
|
||||
// });
|
||||
// })
|
||||
)
|
||||
),
|
||||
Rx.mergeMap((request) => {
|
||||
if (!request.url().includes('telemetry-staging.elastic.co')) {
|
||||
return Rx.EMPTY;
|
||||
}
|
||||
|
||||
this.log.debug(`Waiting for telemetry request #${id} to complete`);
|
||||
return Rx.merge(requestFailure$, requestSuccess$).pipe(
|
||||
Rx.first((r) => r === request),
|
||||
Rx.tap({
|
||||
complete: () => this.log.debug(`Telemetry request #${id} complete`),
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
this.telemetryTrackerSubs.set(page, request$.subscribe());
|
||||
}
|
||||
|
||||
private async interceptBrowserRequests(page: Page) {
|
||||
await page.route('**', async (route, request) => {
|
||||
const headers = await request.allHeaders();
|
||||
const traceparent = this.getCurrentTraceparent();
|
||||
if (traceparent && request.isNavigationRequest()) {
|
||||
await route.continue({ headers: { traceparent, ...headers } });
|
||||
} else {
|
||||
await route.continue();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#_ctx?: Record<string, unknown>;
|
||||
private getCtx() {
|
||||
if (this.#_ctx) {
|
||||
return this.#_ctx;
|
||||
}
|
||||
|
||||
const page = this.page;
|
||||
|
||||
if (!page) {
|
||||
throw new Error('performance service is not properly initialized');
|
||||
}
|
||||
|
||||
this.#_ctx = this.journeyConfig.getExtendedStepCtx({
|
||||
page,
|
||||
log: this.log,
|
||||
inputDelays: getInputDelays(),
|
||||
kbnUrl: new KibanaUrl(
|
||||
new URL(
|
||||
Url.format({
|
||||
protocol: this.config.get('servers.kibana.protocol'),
|
||||
hostname: this.config.get('servers.kibana.hostname'),
|
||||
port: this.config.get('servers.kibana.port'),
|
||||
})
|
||||
)
|
||||
),
|
||||
});
|
||||
|
||||
return this.#_ctx;
|
||||
}
|
||||
|
||||
public initMochaSuite(steps: Array<Step<any>>) {
|
||||
const journeyName = this.journeyConfig.getName();
|
||||
|
||||
(this.journeyConfig.isSkipped() ? describe.skip : describe)(`Journey[${journeyName}]`, () => {
|
||||
before(async () => await this.onSetup());
|
||||
after(async () => await this.onTeardown());
|
||||
|
||||
for (const step of steps) {
|
||||
it(step.name, async () => {
|
||||
await this.withSpan(`step: ${step.name}`, 'step', async () => {
|
||||
try {
|
||||
await step.fn(this.getCtx());
|
||||
await this.onStepSuccess(step);
|
||||
} catch (e) {
|
||||
const error = new Error(`Step [${step.name}] failed: ${e.message}`);
|
||||
error.stack = e.stack;
|
||||
await this.onStepError(step, error);
|
||||
throw error; // Rethrow error if step fails otherwise it is silently passing
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private onConsoleEvent = async (message: playwright.ConsoleMessage) => {
|
||||
try {
|
||||
const { url, lineNumber, columnNumber } = message.location();
|
||||
const location = `${url}:${lineNumber}:${columnNumber}`;
|
||||
|
||||
const args = await asyncMap(message.args(), (handle) => handle.jsonValue());
|
||||
const text = args.length
|
||||
? args.map((arg) => (typeof arg === 'string' ? arg : inspect(arg, false, null))).join(' ')
|
||||
: message.text();
|
||||
|
||||
if (
|
||||
url.includes('kbn-ui-shared-deps-npm.dll.js') &&
|
||||
text.includes('moment construction falls')
|
||||
) {
|
||||
// ignore errors from moment about constructing dates with invalid formats
|
||||
return;
|
||||
}
|
||||
|
||||
const type = message.type();
|
||||
const method = type === 'debug' ? type : type === 'warning' ? 'error' : 'info';
|
||||
const name = type === 'warning' ? 'error' : 'log';
|
||||
this.log[method](`[console.${name}] @ ${location}:\n${text}`);
|
||||
} catch (error) {
|
||||
const dbg = inspect(message);
|
||||
this.log.error(
|
||||
`Error interpreting browser console.log:\nerror:${error.message}\nmessage:\n${dbg}`
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
128
packages/kbn-journeys/journey/journey_screenshots.ts
Normal file
128
packages/kbn-journeys/journey/journey_screenshots.ts
Normal file
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fsp from 'fs/promises';
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { FtrScreenshotFilename } from '@kbn/ftr-screenshot-filename';
|
||||
|
||||
import type { AnyStep } from './journey';
|
||||
|
||||
interface StepShot {
|
||||
type: 'success' | 'failure';
|
||||
title: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
interface Manifest {
|
||||
steps: StepShot[];
|
||||
}
|
||||
|
||||
const isObj = (v: unknown): v is Record<string, unknown> => typeof v === 'object' && v !== null;
|
||||
const isString = (v: unknown): v is string => typeof v === 'string';
|
||||
const isStepShot = (v: unknown): v is StepShot =>
|
||||
isObj(v) &&
|
||||
(v.type === 'success' || v.type === 'failure') &&
|
||||
isString(v.title) &&
|
||||
isString(v.filename);
|
||||
|
||||
const write = async (path: string, content: string | Buffer) => {
|
||||
await Fsp.mkdir(Path.dirname(path), { recursive: true });
|
||||
await Fsp.writeFile(path, content);
|
||||
};
|
||||
|
||||
export class JourneyScreenshots {
|
||||
static async load(journeyName: string) {
|
||||
const screenshots = new JourneyScreenshots(journeyName);
|
||||
|
||||
const json = await Fsp.readFile(screenshots.#manifestPath, 'utf8');
|
||||
const manifest = JSON.parse(json);
|
||||
|
||||
if (!isObj(manifest)) {
|
||||
throw new Error('invalid manifest, json parsed but not to an object');
|
||||
}
|
||||
|
||||
const { steps } = manifest;
|
||||
|
||||
if (!Array.isArray(steps) || !steps.every(isStepShot)) {
|
||||
throw new Error('invalid manifest, steps must be an array of StepShot objects');
|
||||
}
|
||||
|
||||
screenshots.#manifest = { steps };
|
||||
return screenshots;
|
||||
}
|
||||
|
||||
readonly #dir: string;
|
||||
readonly #manifestPath: string;
|
||||
|
||||
#manifest: Manifest = {
|
||||
steps: [],
|
||||
};
|
||||
|
||||
constructor(journeyName: string) {
|
||||
this.#dir = Path.resolve(REPO_ROOT, 'data/journey_screenshots', journeyName);
|
||||
this.#manifestPath = Path.resolve(this.#dir, 'manifest.json');
|
||||
}
|
||||
|
||||
readonly #isLocked = new Rx.BehaviorSubject<boolean>(false);
|
||||
async lock(fn: () => Promise<void>) {
|
||||
if (this.#isLocked.getValue()) {
|
||||
do {
|
||||
await Rx.firstValueFrom(this.#isLocked.pipe(Rx.skip(1)));
|
||||
} while (this.#isLocked.getValue());
|
||||
}
|
||||
|
||||
try {
|
||||
this.#isLocked.next(true);
|
||||
await fn();
|
||||
} finally {
|
||||
this.#isLocked.next(false);
|
||||
}
|
||||
}
|
||||
|
||||
async addError(step: AnyStep, screenshot: Buffer) {
|
||||
await this.lock(async () => {
|
||||
const filename = FtrScreenshotFilename.create(`${step.index}-${step.name}-failure`);
|
||||
this.#manifest.steps.push({
|
||||
type: 'failure',
|
||||
title: `Step #${step.index + 1}: ${step.name} - FAILED`,
|
||||
filename,
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
write(Path.resolve(this.#dir, 'manifest.json'), JSON.stringify(this.#manifest)),
|
||||
write(Path.resolve(this.#dir, filename), screenshot),
|
||||
]);
|
||||
});
|
||||
}
|
||||
|
||||
async addSuccess(step: AnyStep, screenshot: Buffer) {
|
||||
await this.lock(async () => {
|
||||
const filename = FtrScreenshotFilename.create(`${step.index}-${step.name}`);
|
||||
this.#manifest.steps.push({
|
||||
type: 'success',
|
||||
title: `Step #${step.index + 1}: ${step.name} - DONE`,
|
||||
filename,
|
||||
});
|
||||
|
||||
await Promise.all([
|
||||
write(Path.resolve(this.#dir, 'manifest.json'), JSON.stringify(this.#manifest)),
|
||||
write(Path.resolve(this.#dir, filename), screenshot),
|
||||
]);
|
||||
});
|
||||
}
|
||||
|
||||
get() {
|
||||
return this.#manifest.steps.map((stepShot) => ({
|
||||
...stepShot,
|
||||
path: Path.resolve(this.#dir, stepShot.filename),
|
||||
}));
|
||||
}
|
||||
}
|
8
packages/kbn-journeys/kibana.jsonc
Normal file
8
packages/kbn-journeys/kibana.jsonc
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/journeys",
|
||||
"owner": "@elastic/kibana-operations",
|
||||
"devOnly": true,
|
||||
"runtimeDeps": [],
|
||||
"typeDeps": [],
|
||||
}
|
7
packages/kbn-journeys/package.json
Normal file
7
packages/kbn-journeys/package.json
Normal file
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "@kbn/journeys",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"main": "./target_node/index.js",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0"
|
||||
}
|
85
packages/kbn-journeys/services/auth.ts
Normal file
85
packages/kbn-journeys/services/auth.ts
Normal file
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Url from 'url';
|
||||
import { format } from 'util';
|
||||
|
||||
import axios, { AxiosResponse } from 'axios';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { Config } from '@kbn/test';
|
||||
import { KibanaServer } from '@kbn/ftr-common-functional-services';
|
||||
|
||||
export interface Credentials {
|
||||
username: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
function extractCookieValue(authResponse: AxiosResponse) {
|
||||
return authResponse.headers['set-cookie']?.[0].toString().split(';')[0].split('sid=')[1] ?? '';
|
||||
}
|
||||
export class Auth {
|
||||
constructor(
|
||||
private readonly config: Config,
|
||||
private readonly log: ToolingLog,
|
||||
private readonly kibanaServer: KibanaServer
|
||||
) {}
|
||||
|
||||
public async login({ username, password }: Credentials) {
|
||||
const baseUrl = new URL(
|
||||
Url.format({
|
||||
protocol: this.config.get('servers.kibana.protocol'),
|
||||
hostname: this.config.get('servers.kibana.hostname'),
|
||||
port: this.config.get('servers.kibana.port'),
|
||||
})
|
||||
);
|
||||
|
||||
const loginUrl = new URL('/internal/security/login', baseUrl);
|
||||
const provider = baseUrl.hostname === 'localhost' ? 'basic' : 'cloud-basic';
|
||||
|
||||
this.log.info('fetching auth cookie from', loginUrl.href);
|
||||
const authResponse = await axios.request({
|
||||
url: loginUrl.href,
|
||||
method: 'post',
|
||||
data: {
|
||||
providerType: 'basic',
|
||||
providerName: provider,
|
||||
currentURL: new URL('/login?next=%2F', baseUrl).href,
|
||||
params: { username, password },
|
||||
},
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'kbn-version': await this.kibanaServer.version.get(),
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
},
|
||||
validateStatus: () => true,
|
||||
maxRedirects: 0,
|
||||
});
|
||||
|
||||
const cookie = extractCookieValue(authResponse);
|
||||
if (cookie) {
|
||||
this.log.info('captured auth cookie');
|
||||
} else {
|
||||
this.log.error(
|
||||
format('unable to determine auth cookie from response', {
|
||||
status: `${authResponse.status} ${authResponse.statusText}`,
|
||||
body: authResponse.data,
|
||||
headers: authResponse.headers,
|
||||
})
|
||||
);
|
||||
|
||||
throw new Error(`failed to determine auth cookie`);
|
||||
}
|
||||
|
||||
return {
|
||||
name: 'sid',
|
||||
value: cookie,
|
||||
url: baseUrl.href,
|
||||
};
|
||||
}
|
||||
}
|
|
@ -1,10 +1,12 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
interface InputDelays {
|
||||
|
||||
export interface InputDelays {
|
||||
TYPING: number;
|
||||
MOUSE_CLICK: number;
|
||||
}
|
||||
|
@ -20,7 +22,7 @@ const PROFILES: Record<string, InputDelays> = {
|
|||
},
|
||||
};
|
||||
|
||||
export function InputDelaysProvider(): InputDelays {
|
||||
export function getInputDelays(): InputDelays {
|
||||
const profile = PROFILES[process.env.INPUT_DELAY_PROFILE ?? 'user'];
|
||||
|
||||
if (!profile) {
|
60
packages/kbn-journeys/services/kibana_url.ts
Normal file
60
packages/kbn-journeys/services/kibana_url.ts
Normal file
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export interface PathOptions {
|
||||
/**
|
||||
* Query string parameters
|
||||
*/
|
||||
params?: Record<string, string>;
|
||||
/**
|
||||
* The hash value of the URL
|
||||
*/
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
export class KibanaUrl {
|
||||
#baseUrl: URL;
|
||||
|
||||
constructor(baseUrl: URL) {
|
||||
this.#baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an absolute URL based on Kibana's URL
|
||||
* @param rel relative url, resolved relative to Kibana's url
|
||||
* @param options optional modifications to apply to the URL
|
||||
*/
|
||||
get(rel?: string, options?: PathOptions) {
|
||||
const url = new URL(rel ?? '/', this.#baseUrl);
|
||||
|
||||
if (options?.params) {
|
||||
for (const [key, value] of Object.entries(options.params)) {
|
||||
url.searchParams.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
if (options?.hash !== undefined) {
|
||||
url.hash = options.hash;
|
||||
}
|
||||
|
||||
return url.href;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the URL for an app
|
||||
* @param appName name of the app to get the URL for
|
||||
* @param options optional modifications to apply to the URL
|
||||
*/
|
||||
app(appName: string, options?: PathOptions) {
|
||||
return this.get(`/app/${appName}`, options);
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.#baseUrl.href;
|
||||
}
|
||||
}
|
17
packages/kbn-journeys/tsconfig.json
Normal file
17
packages/kbn-journeys/tsconfig.json
Normal file
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"extends": "../../tsconfig.bazel.json",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"outDir": "target_types",
|
||||
"stripInternal": false,
|
||||
"types": [
|
||||
"mocha",
|
||||
"node"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
]
|
||||
}
|
|
@ -66,8 +66,8 @@ RUNTIME_DEPS = [
|
|||
TYPES_DEPS = [
|
||||
"//packages/kbn-dev-cli-errors:npm_module_types",
|
||||
"//packages/kbn-dev-cli-runner:npm_module_types",
|
||||
"//packages/kbn-test:npm_module_types",
|
||||
"//packages/kbn-tooling-log:npm_module_types",
|
||||
"//packages/kbn-journeys:npm_module_types",
|
||||
"@npm//@elastic/elasticsearch",
|
||||
"@npm//@types/node",
|
||||
"@npm//@types/jest",
|
||||
|
|
|
@ -12,16 +12,13 @@
|
|||
*
|
||||
*************************************************************/
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { createFlagError } from '@kbn/dev-cli-errors';
|
||||
import { EsVersion, readConfigFile } from '@kbn/test';
|
||||
import path from 'path';
|
||||
import { extractor } from './extractor';
|
||||
import { ScalabilitySetup, TestData } from './types';
|
||||
import { Journey } from '@kbn/journeys';
|
||||
|
||||
interface Vars {
|
||||
[key: string]: string;
|
||||
}
|
||||
import { extractor } from './extractor';
|
||||
|
||||
export async function runExtractor() {
|
||||
run(
|
||||
|
@ -50,50 +47,7 @@ export async function runExtractor() {
|
|||
throw createFlagError('--es-password must be defined');
|
||||
}
|
||||
|
||||
const configPath = flags.config;
|
||||
if (typeof configPath !== 'string') {
|
||||
throw createFlagError('--config must be a string');
|
||||
}
|
||||
const config = await readConfigFile(log, EsVersion.getDefault(), path.resolve(configPath));
|
||||
|
||||
const scalabilitySetup: ScalabilitySetup = config.get('scalabilitySetup');
|
||||
|
||||
if (!scalabilitySetup) {
|
||||
log.warning(
|
||||
`'scalabilitySetup' is not defined in config file, output file for Kibana scalability run won't be generated`
|
||||
);
|
||||
}
|
||||
|
||||
const testData: TestData = config.get('testData');
|
||||
|
||||
const env = config.get(`kbnTestServer.env`);
|
||||
if (
|
||||
typeof env !== 'object' ||
|
||||
typeof env.ELASTIC_APM_GLOBAL_LABELS !== 'string' ||
|
||||
!env.ELASTIC_APM_GLOBAL_LABELS.includes('journeyName=')
|
||||
) {
|
||||
log.error(
|
||||
`'journeyName' must be defined in config file:
|
||||
|
||||
env: {
|
||||
...config.kbnTestServer.env,
|
||||
ELASTIC_APM_GLOBAL_LABELS: Object.entries({
|
||||
journeyName: <journey name>,
|
||||
})
|
||||
},`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const envVars: Vars = env.ELASTIC_APM_GLOBAL_LABELS.split(',').reduce(
|
||||
(acc: Vars, pair: string) => {
|
||||
const [key, value] = pair.split('=');
|
||||
return { ...acc, [key]: value };
|
||||
},
|
||||
{}
|
||||
);
|
||||
const journeyName = envVars.journeyName;
|
||||
|
||||
const withoutStaticResources = !!flags['without-static-resources'] || false;
|
||||
const buildId = flags.buildId;
|
||||
if (buildId && typeof buildId !== 'string') {
|
||||
throw createFlagError('--buildId must be a string');
|
||||
|
@ -102,11 +56,37 @@ export async function runExtractor() {
|
|||
throw createFlagError('--buildId must be defined');
|
||||
}
|
||||
|
||||
const withoutStaticResources = !!flags['without-static-resources'] || false;
|
||||
const configPath = flags.config;
|
||||
if (typeof configPath !== 'string') {
|
||||
throw createFlagError('--config must be a string');
|
||||
}
|
||||
const journey = await Journey.load(path.resolve(configPath));
|
||||
|
||||
const scalabilitySetup = journey.config.getScalabilityConfig();
|
||||
if (!scalabilitySetup) {
|
||||
log.warning(
|
||||
`'scalabilitySetup' is not defined in config file, output file for Kibana scalability run won't be generated`
|
||||
);
|
||||
}
|
||||
|
||||
const testData = {
|
||||
esArchives: journey.config.getEsArchives(),
|
||||
kbnArchives: journey.config.getKbnArchives(),
|
||||
};
|
||||
|
||||
return extractor({
|
||||
param: { journeyName, scalabilitySetup, testData, buildId, withoutStaticResources },
|
||||
client: { baseURL, username, password },
|
||||
param: {
|
||||
journeyName: journey.config.getName(),
|
||||
scalabilitySetup,
|
||||
testData,
|
||||
buildId,
|
||||
withoutStaticResources,
|
||||
},
|
||||
client: {
|
||||
baseURL,
|
||||
username,
|
||||
password,
|
||||
},
|
||||
log,
|
||||
});
|
||||
},
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
*/
|
||||
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { ScalabilitySetup } from '@kbn/journeys';
|
||||
|
||||
export interface Request {
|
||||
transactionId: string;
|
||||
|
@ -31,19 +32,6 @@ export interface Stream<T extends Request> {
|
|||
requests: T[];
|
||||
}
|
||||
|
||||
export interface InjectionStep {
|
||||
action: string;
|
||||
minUsersCount?: number;
|
||||
maxUsersCount: number;
|
||||
duration: string;
|
||||
}
|
||||
|
||||
export interface ScalabilitySetup {
|
||||
warmup: InjectionStep[];
|
||||
test: InjectionStep[];
|
||||
maxDuration: string;
|
||||
}
|
||||
|
||||
export interface TestData {
|
||||
kbnArchives?: string[];
|
||||
esArchives?: string[];
|
||||
|
@ -52,7 +40,7 @@ export interface TestData {
|
|||
export interface CLIParams {
|
||||
param: {
|
||||
journeyName: string;
|
||||
scalabilitySetup: ScalabilitySetup;
|
||||
scalabilitySetup?: ScalabilitySetup;
|
||||
testData: TestData;
|
||||
buildId: string;
|
||||
withoutStaticResources: boolean;
|
||||
|
|
|
@ -99,6 +99,7 @@ TYPES_DEPS = [
|
|||
"//packages/kbn-tooling-log:npm_module_types",
|
||||
"//packages/kbn-bazel-packages:npm_module_types",
|
||||
"//packages/kbn-get-repo-files:npm_module_types",
|
||||
"//packages/kbn-ftr-screenshot-filename:npm_module_types",
|
||||
"@npm//@elastic/elasticsearch",
|
||||
"@npm//@jest/console",
|
||||
"@npm//@jest/reporters",
|
||||
|
@ -116,6 +117,7 @@ TYPES_DEPS = [
|
|||
"@npm//jest-snapshot",
|
||||
"@npm//redux",
|
||||
"@npm//rxjs",
|
||||
"@npm//playwright",
|
||||
"@npm//xmlbuilder",
|
||||
"@npm//@types/archiver",
|
||||
"@npm//@types/chance",
|
||||
|
|
|
@ -6,23 +6,13 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// @internal
|
||||
import {
|
||||
runTestsCli,
|
||||
processRunTestsCliOptions,
|
||||
startServersCli,
|
||||
processStartServersCliOptions,
|
||||
// @ts-ignore not typed yet
|
||||
} from './src/functional_tests/cli';
|
||||
|
||||
export { KbnClientRequesterError } from './src/kbn_client/kbn_client_requester_error';
|
||||
|
||||
// @internal
|
||||
export { runTestsCli, processRunTestsCliOptions, startServersCli, processStartServersCliOptions };
|
||||
export { startServersCli, startServers } from './src/functional_tests/start_servers';
|
||||
|
||||
// @ts-ignore not typed yet
|
||||
// @internal
|
||||
export { runTests, startServers } from './src/functional_tests/tasks';
|
||||
export { runTestsCli, runTests } from './src/functional_tests/run_tests';
|
||||
|
||||
export { getKibanaCliArg, getKibanaCliLoggers } from './src/functional_tests/lib/kibana_cli_args';
|
||||
|
||||
|
@ -48,15 +38,9 @@ export {
|
|||
systemIndicesSuperuser,
|
||||
} from './src/kbn';
|
||||
|
||||
export { readConfigFile } from './src/functional_test_runner/lib/config/read_config_file';
|
||||
|
||||
export { runFtrCli } from './src/functional_test_runner/cli';
|
||||
|
||||
// @internal
|
||||
export { setupJUnitReportGeneration, escapeCdata } from './src/mocha';
|
||||
|
||||
export { runFailedTestsReporterCli } from './src/failed_tests_reporter';
|
||||
|
||||
export { CI_PARALLEL_PROCESS_PREFIX } from './src/ci_parallel_process_prefix';
|
||||
|
||||
export * from './src/functional_test_runner';
|
||||
|
|
|
@ -1,167 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { createHash } from 'crypto';
|
||||
import { mkdirSync, readdirSync, readFileSync, statSync, writeFileSync } from 'fs';
|
||||
import { join, basename, resolve } from 'path';
|
||||
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { escape } from 'he';
|
||||
|
||||
import { BuildkiteMetadata } from './buildkite_metadata';
|
||||
import { TestFailure } from './get_failures';
|
||||
|
||||
const findScreenshots = (dirPath: string, allScreenshots: string[] = []) => {
|
||||
const files = readdirSync(dirPath);
|
||||
|
||||
for (const file of files) {
|
||||
if (statSync(join(dirPath, file)).isDirectory()) {
|
||||
if (file.match(/node_modules/)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
allScreenshots = findScreenshots(join(dirPath, file), allScreenshots);
|
||||
} else {
|
||||
const fullPath = join(dirPath, file);
|
||||
if (fullPath.match(/screenshots\/failure\/.+\.png$/)) {
|
||||
allScreenshots.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allScreenshots;
|
||||
};
|
||||
|
||||
export function reportFailuresToFile(
|
||||
log: ToolingLog,
|
||||
failures: TestFailure[],
|
||||
bkMeta: BuildkiteMetadata
|
||||
) {
|
||||
if (!failures?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
let screenshots: string[];
|
||||
try {
|
||||
screenshots = [
|
||||
...findScreenshots(join(REPO_ROOT, 'test', 'functional')),
|
||||
...findScreenshots(join(REPO_ROOT, 'x-pack', 'test', 'functional')),
|
||||
];
|
||||
} catch (e) {
|
||||
log.error(e as Error);
|
||||
screenshots = [];
|
||||
}
|
||||
|
||||
const screenshotsByName: Record<string, string> = {};
|
||||
for (const screenshot of screenshots) {
|
||||
const [name] = basename(screenshot).split('.');
|
||||
screenshotsByName[name] = screenshot;
|
||||
}
|
||||
|
||||
// Jest could, in theory, fail 1000s of tests and write 1000s of failures
|
||||
// So let's just write files for the first 20
|
||||
for (const failure of failures.slice(0, 20)) {
|
||||
const hash = createHash('md5').update(failure.name).digest('hex');
|
||||
const filenameBase = `${
|
||||
process.env.BUILDKITE_JOB_ID ? process.env.BUILDKITE_JOB_ID + '_' : ''
|
||||
}${hash}`;
|
||||
const dir = join('target', 'test_failures');
|
||||
|
||||
const failureLog = [
|
||||
['Test:', '-----', failure.classname, failure.name, ''],
|
||||
['Failure:', '--------', failure.failure],
|
||||
failure['system-out'] ? ['', 'Standard Out:', '-------------', failure['system-out']] : [],
|
||||
]
|
||||
.flat()
|
||||
.join('\n');
|
||||
|
||||
const failureJSON = JSON.stringify(
|
||||
{
|
||||
...failure,
|
||||
hash,
|
||||
buildId: bkMeta.buildId,
|
||||
jobId: bkMeta.jobId,
|
||||
url: bkMeta.url,
|
||||
jobUrl: bkMeta.jobUrl,
|
||||
jobName: bkMeta.jobName,
|
||||
},
|
||||
null,
|
||||
2
|
||||
);
|
||||
|
||||
let screenshot = '';
|
||||
const truncatedName = failure.name.replace(/([^ a-zA-Z0-9-]+)/g, '_').slice(0, 80);
|
||||
const failureNameHash = createHash('sha256').update(failure.name).digest('hex');
|
||||
const screenshotName = `${truncatedName}-${failureNameHash}`;
|
||||
|
||||
if (screenshotsByName[screenshotName]) {
|
||||
try {
|
||||
screenshot = readFileSync(screenshotsByName[screenshotName]).toString('base64');
|
||||
} catch (e) {
|
||||
log.error(e as Error);
|
||||
}
|
||||
}
|
||||
|
||||
const screenshotHtml = screenshot
|
||||
? `<img class="screenshot img-fluid img-thumbnail" src="data:image/png;base64,${screenshot}" />`
|
||||
: '';
|
||||
|
||||
const failureHTML = readFileSync(
|
||||
resolve(
|
||||
REPO_ROOT,
|
||||
'packages/kbn-test/src/failed_tests_reporter/report_failures_to_file_html_template.html'
|
||||
)
|
||||
)
|
||||
.toString()
|
||||
.replace('$TITLE', escape(failure.name))
|
||||
.replace(
|
||||
'$MAIN',
|
||||
`
|
||||
${failure.classname
|
||||
.split('.')
|
||||
.map((part) => `<h5>${escape(part.replace('·', '.'))}</h5>`)
|
||||
.join('')}
|
||||
<hr />
|
||||
<p><strong>${escape(failure.name)}</strong></p>
|
||||
<p>
|
||||
<small>
|
||||
<strong>Failures in tracked branches</strong>: <span class="badge rounded-pill bg-danger">${
|
||||
failure.failureCount || 0
|
||||
}</span>
|
||||
${
|
||||
failure.githubIssue
|
||||
? `<br /><a href="${escape(failure.githubIssue)}">${escape(
|
||||
failure.githubIssue
|
||||
)}</a>`
|
||||
: ''
|
||||
}
|
||||
</small>
|
||||
</p>
|
||||
${
|
||||
bkMeta.jobUrl
|
||||
? `<p>
|
||||
<small>
|
||||
<strong>Buildkite Job</strong><br />
|
||||
<a href="${escape(bkMeta.jobUrl)}">${escape(bkMeta.jobUrl)}</a>
|
||||
</small>
|
||||
</p>`
|
||||
: ''
|
||||
}
|
||||
<pre>${escape(failure.failure)}</pre>
|
||||
${screenshotHtml}
|
||||
<pre>${escape(failure['system-out'] || '')}</pre>
|
||||
`
|
||||
);
|
||||
|
||||
mkdirSync(dir, { recursive: true });
|
||||
writeFileSync(join(dir, `${filenameBase}.log`), failureLog, 'utf8');
|
||||
writeFileSync(join(dir, `${filenameBase}.html`), failureHTML, 'utf8');
|
||||
writeFileSync(join(dir, `${filenameBase}.json`), failureJSON, 'utf8');
|
||||
}
|
||||
}
|
|
@ -1,210 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { createFailError, createFlagError } from '@kbn/dev-cli-errors';
|
||||
import { CiStatsReporter } from '@kbn/ci-stats-reporter';
|
||||
import globby from 'globby';
|
||||
import normalize from 'normalize-path';
|
||||
|
||||
import { getFailures } from './get_failures';
|
||||
import { GithubApi } from './github_api';
|
||||
import { updateFailureIssue, createFailureIssue } from './report_failure';
|
||||
import { readTestReport } from './test_report';
|
||||
import { addMessagesToReport } from './add_messages_to_report';
|
||||
import { getReportMessageIter } from './report_metadata';
|
||||
import { reportFailuresToEs } from './report_failures_to_es';
|
||||
import { reportFailuresToFile } from './report_failures_to_file';
|
||||
import { getBuildkiteMetadata } from './buildkite_metadata';
|
||||
import { ExistingFailedTestIssues } from './existing_failed_test_issues';
|
||||
|
||||
const DEFAULT_PATTERNS = [Path.resolve(REPO_ROOT, 'target/junit/**/*.xml')];
|
||||
const DISABLE_MISSING_TEST_REPORT_ERRORS =
|
||||
process.env.DISABLE_MISSING_TEST_REPORT_ERRORS === 'true';
|
||||
|
||||
export function runFailedTestsReporterCli() {
|
||||
run(
|
||||
async ({ log, flags }) => {
|
||||
const indexInEs = flags['index-errors'];
|
||||
|
||||
let updateGithub = flags['github-update'];
|
||||
if (updateGithub && !process.env.GITHUB_TOKEN) {
|
||||
throw createFailError(
|
||||
'GITHUB_TOKEN environment variable must be set, otherwise use --no-github-update flag'
|
||||
);
|
||||
}
|
||||
|
||||
let branch: string = '';
|
||||
if (updateGithub) {
|
||||
let isPr = false;
|
||||
|
||||
if (process.env.BUILDKITE === 'true') {
|
||||
branch = process.env.BUILDKITE_BRANCH || '';
|
||||
isPr = process.env.BUILDKITE_PULL_REQUEST === 'true';
|
||||
updateGithub = process.env.REPORT_FAILED_TESTS_TO_GITHUB === 'true';
|
||||
} else {
|
||||
// JOB_NAME is formatted as `elastic+kibana+7.x` in some places and `elastic+kibana+7.x/JOB=kibana-intake,node=immutable` in others
|
||||
const jobNameSplit = (process.env.JOB_NAME || '').split(/\+|\//);
|
||||
branch = jobNameSplit.length >= 3 ? jobNameSplit[2] : process.env.GIT_BRANCH || '';
|
||||
isPr = !!process.env.ghprbPullId;
|
||||
|
||||
const isMainOrVersion = branch === 'main' || branch.match(/^\d+\.(x|\d+)$/);
|
||||
if (!isMainOrVersion || isPr) {
|
||||
log.info('Failure issues only created on main/version branch jobs');
|
||||
updateGithub = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!branch) {
|
||||
throw createFailError(
|
||||
'Unable to determine originating branch from job name or other environment variables'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const githubApi = new GithubApi({
|
||||
log,
|
||||
token: process.env.GITHUB_TOKEN,
|
||||
dryRun: !updateGithub,
|
||||
});
|
||||
|
||||
const bkMeta = getBuildkiteMetadata();
|
||||
|
||||
try {
|
||||
const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
|
||||
if (typeof buildUrl !== 'string' || !buildUrl) {
|
||||
throw createFlagError('Missing --build-url or process.env.BUILD_URL');
|
||||
}
|
||||
|
||||
const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
|
||||
normalize(Path.resolve(p))
|
||||
);
|
||||
log.info('Searching for reports at', patterns);
|
||||
const reportPaths = await globby(patterns, {
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
if (!reportPaths.length && DISABLE_MISSING_TEST_REPORT_ERRORS) {
|
||||
// it is fine for code coverage to not have test results
|
||||
return;
|
||||
}
|
||||
|
||||
if (!reportPaths.length) {
|
||||
throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
|
||||
}
|
||||
|
||||
log.info('found', reportPaths.length, 'junit reports', reportPaths);
|
||||
|
||||
const existingIssues = new ExistingFailedTestIssues(log);
|
||||
for (const reportPath of reportPaths) {
|
||||
const report = await readTestReport(reportPath);
|
||||
const messages = Array.from(getReportMessageIter(report));
|
||||
const failures = getFailures(report);
|
||||
|
||||
await existingIssues.loadForFailures(failures);
|
||||
|
||||
if (indexInEs) {
|
||||
await reportFailuresToEs(log, failures);
|
||||
}
|
||||
|
||||
for (const failure of failures) {
|
||||
const pushMessage = (msg: string) => {
|
||||
messages.push({
|
||||
classname: failure.classname,
|
||||
name: failure.name,
|
||||
message: msg,
|
||||
});
|
||||
};
|
||||
|
||||
if (failure.likelyIrrelevant) {
|
||||
pushMessage(
|
||||
'Failure is likely irrelevant' +
|
||||
(updateGithub ? ', so an issue was not created or updated' : '')
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const existingIssue = existingIssues.getForFailure(failure);
|
||||
if (existingIssue) {
|
||||
const { newBody, newCount } = await updateFailureIssue(
|
||||
buildUrl,
|
||||
existingIssue,
|
||||
githubApi,
|
||||
branch
|
||||
);
|
||||
const url = existingIssue.github.htmlUrl;
|
||||
existingIssue.github.body = newBody;
|
||||
failure.githubIssue = url;
|
||||
failure.failureCount = updateGithub ? newCount : newCount - 1;
|
||||
pushMessage(`Test has failed ${newCount - 1} times on tracked branches: ${url}`);
|
||||
if (updateGithub) {
|
||||
pushMessage(`Updated existing issue: ${url} (fail count: ${newCount})`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
|
||||
existingIssues.addNewlyCreated(failure, newIssue);
|
||||
pushMessage('Test has not failed recently on tracked branches');
|
||||
if (updateGithub) {
|
||||
pushMessage(`Created new issue: ${newIssue.html_url}`);
|
||||
failure.githubIssue = newIssue.html_url;
|
||||
}
|
||||
failure.failureCount = updateGithub ? 1 : 0;
|
||||
}
|
||||
|
||||
// mutates report to include messages and writes updated report to disk
|
||||
await addMessagesToReport({
|
||||
report,
|
||||
messages,
|
||||
log,
|
||||
reportPath,
|
||||
dryRun: !flags['report-update'],
|
||||
});
|
||||
|
||||
reportFailuresToFile(log, failures, bkMeta);
|
||||
}
|
||||
} finally {
|
||||
await CiStatsReporter.fromEnv(log).metrics([
|
||||
{
|
||||
group: 'github api request count',
|
||||
id: `failed test reporter`,
|
||||
value: githubApi.getRequestCount(),
|
||||
meta: Object.fromEntries(
|
||||
Object.entries(bkMeta).map(
|
||||
([k, v]) => [`buildkite${k[0].toUpperCase()}${k.slice(1)}`, v] as const
|
||||
)
|
||||
),
|
||||
},
|
||||
]);
|
||||
}
|
||||
},
|
||||
{
|
||||
description: `a cli that opens issues or updates existing issues based on junit reports`,
|
||||
flags: {
|
||||
boolean: ['github-update', 'report-update'],
|
||||
string: ['build-url'],
|
||||
default: {
|
||||
'github-update': true,
|
||||
'report-update': true,
|
||||
'index-errors': true,
|
||||
'build-url': process.env.BUILD_URL,
|
||||
},
|
||||
help: `
|
||||
--no-github-update Execute the CLI without writing to Github
|
||||
--no-report-update Execute the CLI without writing to the JUnit reports
|
||||
--no-index-errors Execute the CLI without indexing failures into Elasticsearch
|
||||
--build-url URL of the failed build, defaults to process.env.BUILD_URL
|
||||
`,
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
|
@ -9,26 +9,15 @@
|
|||
import Path from 'path';
|
||||
import { inspect } from 'util';
|
||||
|
||||
import { run, Flags } from '@kbn/dev-cli-runner';
|
||||
import { run } from '@kbn/dev-cli-runner';
|
||||
import { createFlagError } from '@kbn/dev-cli-errors';
|
||||
import { ToolingLog } from '@kbn/tooling-log';
|
||||
import { getTimeReporter } from '@kbn/ci-stats-reporter';
|
||||
import exitHook from 'exit-hook';
|
||||
|
||||
import { readConfigFile, EsVersion } from './lib';
|
||||
import { FunctionalTestRunner } from './functional_test_runner';
|
||||
|
||||
const makeAbsolutePath = (v: string) => Path.resolve(process.cwd(), v);
|
||||
const toArray = (v: string | string[]) => ([] as string[]).concat(v || []);
|
||||
const parseInstallDir = (flags: Flags) => {
|
||||
const flag = flags['kibana-install-dir'];
|
||||
|
||||
if (typeof flag !== 'string' && flag !== undefined) {
|
||||
throw createFlagError('--kibana-install-dir must be a string or not defined');
|
||||
}
|
||||
|
||||
return flag ? makeAbsolutePath(flag) : undefined;
|
||||
};
|
||||
|
||||
export function runFtrCli() {
|
||||
const runStartTime = Date.now();
|
||||
const toolingLog = new ToolingLog({
|
||||
|
@ -37,52 +26,49 @@ export function runFtrCli() {
|
|||
});
|
||||
const reportTime = getTimeReporter(toolingLog, 'scripts/functional_test_runner');
|
||||
run(
|
||||
async ({ flags, log }) => {
|
||||
const esVersion = flags['es-version'] || undefined; // convert "" to undefined
|
||||
if (esVersion !== undefined && typeof esVersion !== 'string') {
|
||||
throw createFlagError('expected --es-version to be a string');
|
||||
async ({ flagsReader, log }) => {
|
||||
const esVersionInput = flagsReader.string('es-version');
|
||||
|
||||
const configPaths = [
|
||||
...(flagsReader.arrayOfStrings('config') ?? []),
|
||||
...(flagsReader.arrayOfStrings('journey') ?? []),
|
||||
].map((rel) => Path.resolve(rel));
|
||||
if (configPaths.length !== 1) {
|
||||
throw createFlagError(`Expected there to be exactly one --config/--journey flag`);
|
||||
}
|
||||
|
||||
const configRel = flags.config;
|
||||
if (typeof configRel !== 'string' || !configRel) {
|
||||
throw createFlagError('--config is required');
|
||||
}
|
||||
const configPath = makeAbsolutePath(configRel);
|
||||
|
||||
const functionalTestRunner = new FunctionalTestRunner(
|
||||
log,
|
||||
configPath,
|
||||
{
|
||||
mochaOpts: {
|
||||
bail: flags.bail,
|
||||
dryRun: flags['dry-run'],
|
||||
grep: flags.grep || undefined,
|
||||
invert: flags.invert,
|
||||
},
|
||||
kbnTestServer: {
|
||||
installDir: parseInstallDir(flags),
|
||||
},
|
||||
suiteFiles: {
|
||||
include: toArray(flags.include as string | string[]).map(makeAbsolutePath),
|
||||
exclude: toArray(flags.exclude as string | string[]).map(makeAbsolutePath),
|
||||
},
|
||||
suiteTags: {
|
||||
include: toArray(flags['include-tag'] as string | string[]),
|
||||
exclude: toArray(flags['exclude-tag'] as string | string[]),
|
||||
},
|
||||
updateBaselines: flags.updateBaselines || flags.u,
|
||||
updateSnapshots: flags.updateSnapshots || flags.u,
|
||||
const esVersion = esVersionInput ? new EsVersion(esVersionInput) : EsVersion.getDefault();
|
||||
const settingOverrides = {
|
||||
mochaOpts: {
|
||||
bail: flagsReader.boolean('bail'),
|
||||
dryRun: flagsReader.boolean('dry-run'),
|
||||
grep: flagsReader.string('grep'),
|
||||
invert: flagsReader.boolean('invert'),
|
||||
},
|
||||
esVersion
|
||||
);
|
||||
kbnTestServer: {
|
||||
installDir: flagsReader.path('kibana-install-dir'),
|
||||
},
|
||||
suiteFiles: {
|
||||
include: flagsReader.arrayOfPaths('include') ?? [],
|
||||
exclude: flagsReader.arrayOfPaths('exclude') ?? [],
|
||||
},
|
||||
suiteTags: {
|
||||
include: flagsReader.arrayOfStrings('include-tag') ?? [],
|
||||
exclude: flagsReader.arrayOfStrings('exclude-tag') ?? [],
|
||||
},
|
||||
updateBaselines: flagsReader.boolean('updateBaselines') || flagsReader.boolean('u'),
|
||||
updateSnapshots: flagsReader.boolean('updateSnapshots') || flagsReader.boolean('u'),
|
||||
};
|
||||
|
||||
await functionalTestRunner.readConfigFile();
|
||||
const config = await readConfigFile(log, esVersion, configPaths[0], settingOverrides);
|
||||
|
||||
if (flags.throttle) {
|
||||
const functionalTestRunner = new FunctionalTestRunner(log, config, esVersion);
|
||||
|
||||
if (flagsReader.boolean('throttle')) {
|
||||
process.env.TEST_THROTTLE_NETWORK = '1';
|
||||
}
|
||||
|
||||
if (flags.headless) {
|
||||
if (flagsReader.boolean('headless')) {
|
||||
process.env.TEST_BROWSER_HEADLESS = '1';
|
||||
}
|
||||
|
||||
|
@ -95,7 +81,7 @@ export function runFtrCli() {
|
|||
await reportTime(runStartTime, 'total', {
|
||||
success: false,
|
||||
err: err.message,
|
||||
...flags,
|
||||
...Object.fromEntries(flagsReader.getUsed().entries()),
|
||||
});
|
||||
log.indent(-log.getIndent());
|
||||
log.error(err);
|
||||
|
@ -103,7 +89,7 @@ export function runFtrCli() {
|
|||
} else {
|
||||
await reportTime(runStartTime, 'total', {
|
||||
success: true,
|
||||
...flags,
|
||||
...Object.fromEntries(flagsReader.getUsed().entries()),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -118,7 +104,7 @@ export function runFtrCli() {
|
|||
exitHook(teardown);
|
||||
|
||||
try {
|
||||
if (flags['test-stats']) {
|
||||
if (flagsReader.boolean('test-stats')) {
|
||||
process.stderr.write(
|
||||
JSON.stringify(await functionalTestRunner.getTestStats(), null, 2) + '\n'
|
||||
);
|
||||
|
@ -139,6 +125,7 @@ export function runFtrCli() {
|
|||
flags: {
|
||||
string: [
|
||||
'config',
|
||||
'journey',
|
||||
'grep',
|
||||
'include',
|
||||
'exclude',
|
||||
|
@ -159,7 +146,8 @@ export function runFtrCli() {
|
|||
'dry-run',
|
||||
],
|
||||
help: `
|
||||
--config=path path to a config file
|
||||
--config=path path to a config file (either this or --journey is required)
|
||||
--journey=path path to a journey file (either this or --config is required)
|
||||
--bail stop tests after the first failure
|
||||
--grep <pattern> pattern used to select which tests to run
|
||||
--invert invert grep to exclude tests
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
import { EventEmitter } from 'events';
|
||||
|
||||
export interface Suite {
|
||||
currentTest?: Test;
|
||||
suites: Suite[];
|
||||
tests: Test[];
|
||||
title: string;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue