[8.3] [CI] Move kibana-buildkite-library into kibana (#134787) (#135350)

* [CI] Move kibana-buildkite-library into kibana (#134787)

(cherry picked from commit 0b0b68786a)

# Conflicts:
#	.buildkite/package-lock.json
#	.buildkite/package.json
#	.buildkite/pipelines/on_merge.yml
#	.buildkite/scripts/pipelines/pull_request/pipeline.ts
#	.buildkite/scripts/steps/code_coverage/clean_coverage_paths.ts
#	.buildkite/scripts/steps/code_coverage/util.sh
#	.buildkite/scripts/steps/es_snapshots/promote.sh
#	.buildkite/scripts/steps/trigger_pipeline.ts

* Update package-lock.json
This commit is contained in:
Brian Seeders 2022-06-28 15:00:44 -04:00 committed by GitHub
parent f6b319f142
commit ae972e1b3a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
64 changed files with 4470 additions and 110 deletions

5
.buildkite/.mocharc.json Normal file
View file

@ -0,0 +1,5 @@
{
"extension": ["ts"],
"spec": "**/*.test.ts",
"require": "ts-node/register"
}

1
.buildkite/.npmrc Normal file
View file

@ -0,0 +1 @@
legacy-peer-deps=true

View file

@ -3,6 +3,7 @@
## Directory Structure
- `hooks` - special directory used by Buildkite agents for [hooks](https://buildkite.com/docs/agent/v3/hooks)
- `pipeline-utils` - Shared TypeScript utils for use in pipeline scripts
- `pipelines` - contains pipeline definitions
- `scripts/common` - scripts that get `source`d by other scripts to set environment variables or import shared functions
- `scripts/lifecycle` - general scripts for tasks that run before or after individual steps or the entire build

File diff suppressed because it is too large Load diff

View file

@ -1,8 +1,33 @@
{
"name": "kibana-buildkite",
"version": "1.0.0",
"private": true,
"description": "Kibana Buildkite",
"scripts": {
"test": "mocha",
"test:watch": "mocha --watch"
},
"dependencies": {
"kibana-buildkite-library": "git+https://git@github.com/elastic/kibana-buildkite-library#4ecaba35293fb635cf92ca205ee84fca52f19e2e"
"@octokit/rest": "^18.10.0",
"axios": "^0.21.4",
"globby": "^11.1.0",
"js-yaml": "^4.1.0",
"minimatch": "^5.0.1",
"tslib": "*"
},
"devDependencies": {
"@types/chai": "^4.2.10",
"@types/js-yaml": "^4.0.5",
"@types/minimatch": "^3.0.5",
"@types/mocha": "^7.0.2",
"@types/node": "^15.12.2",
"chai": "^4.2.0",
"mocha": "^8.2.1",
"nock": "^12.0.2",
"ts-node": "^10.7.0",
"typescript": "^4.6.4"
},
"imports": {
"#pipeline-utils": "./pipeline-utils/index.ts",
"#pipeline-utils/*": "./pipeline-utils/*"
}
}

View file

@ -0,0 +1,267 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { expect } from 'chai';
import { BuildkiteClient } from './client';
import { Build } from './types/build';
import { Job } from './types/job';
describe('BuildkiteClient', () => {
let buildkite: BuildkiteClient;
beforeEach(() => {
buildkite = new BuildkiteClient();
});
describe('getBuildStatus', () => {
it('does not have hasNonPreemptionRetries for preemption retries', async () => {
const job: Job = {
id: 'id-1',
retried_in_job_id: 'id-2',
state: 'failed',
agent: {
meta_data: ['spot=true'],
},
retried: true,
exit_status: -1,
type: 'script',
} as Job;
const retry: Job = {
id: 'id-2',
state: 'passed',
agent: {
meta_data: ['spot=true'],
},
type: 'script',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job, retry],
} as Build;
const buildStatus = buildkite.getBuildStatus(build);
expect(buildStatus.success).to.eql(true);
expect(buildStatus.hasRetries).to.eql(true);
expect(buildStatus.hasNonPreemptionRetries).to.eql(false);
});
it('has hasNonPreemptionRetries for spot non-preemption retries', async () => {
const job: Job = {
id: 'id-1',
retried_in_job_id: 'id-2',
state: 'failed',
agent: {
meta_data: ['spot=true'],
},
retried: true,
exit_status: 1,
type: 'script',
} as Job;
const retry: Job = {
id: 'id-2',
state: 'passed',
agent: {
meta_data: ['spot=true'],
},
type: 'script',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job, retry],
} as Build;
const buildStatus = buildkite.getBuildStatus(build);
expect(buildStatus.success).to.eql(true);
expect(buildStatus.hasRetries).to.eql(true);
expect(buildStatus.hasNonPreemptionRetries).to.eql(true);
});
it('has hasNonPreemptionRetries for non-spot retries with exit code -1', async () => {
const job: Job = {
id: 'id-1',
retried_in_job_id: 'id-2',
state: 'failed',
retried: true,
exit_status: -1,
type: 'script',
} as Job;
const retry: Job = {
id: 'id-2',
state: 'passed',
type: 'script',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job, retry],
} as Build;
const buildStatus = buildkite.getBuildStatus(build);
expect(buildStatus.success).to.eql(true);
expect(buildStatus.hasRetries).to.eql(true);
expect(buildStatus.hasNonPreemptionRetries).to.eql(true);
});
it('returns failure if build is failed and all jobs passed', async () => {
const job = {
id: 'id_1',
state: 'passed',
} as Job;
const build = {
id: 'id',
state: 'failed',
jobs: [job],
} as Build;
const result = buildkite.getBuildStatus(build);
expect(result.success).to.eql(false);
});
});
describe('getJobStatus', () => {
it('returns success if job is successful', async () => {
const job = {
id: 'id',
state: 'passed',
type: 'script',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(true);
});
it('returns failure if job is unsuccessful', async () => {
const job = {
id: 'id',
state: 'failed',
type: 'script',
} as Job;
const build = {
id: 'id',
state: 'failed',
jobs: [job],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(false);
});
it('returns success if retried job is successful', async () => {
const job = {
id: 'id_1',
state: 'failed',
retried: true,
retried_in_job_id: 'id_2',
} as Job;
const jobRetry = {
id: 'id_2',
state: 'passed',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job, jobRetry],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(true);
});
it('returns failure if retried job is unsuccessful', async () => {
const job = {
id: 'id_1',
state: 'failed',
retried: true,
retried_in_job_id: 'id_2',
} as Job;
const jobRetry = {
id: 'id_2',
state: 'failed',
} as Job;
const build = {
id: 'id',
state: 'failed',
jobs: [job, jobRetry],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(false);
});
it('returns failure if job is waiting_failed', async () => {
const job = {
id: 'id_1',
state: 'waiting_failed',
} as Job;
const build = {
id: 'id',
state: 'failed',
jobs: [job],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(false);
});
it('returns success if job is broken but of type: manual', async () => {
const job = {
id: 'id',
state: 'broken',
type: 'manual',
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(true);
});
it('returns success if job is broken but has no exit status', async () => {
const job = {
id: 'id',
state: 'broken',
type: 'script',
exit_status: null,
} as Job;
const build = {
id: 'id',
state: 'passed',
jobs: [job],
} as Build;
const result = buildkite.getJobStatus(build, job);
expect(result.success).to.eql(true);
});
});
});

View file

@ -0,0 +1,267 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import axios, { AxiosInstance } from 'axios';
import { execSync } from 'child_process';
import { dump } from 'js-yaml';
import { parseLinkHeader } from './parse_link_header';
import { Artifact } from './types/artifact';
import { Build, BuildStatus } from './types/build';
import { Job, JobState } from './types/job';
export interface BuildkiteClientConfig {
baseUrl?: string;
token?: string;
}
export interface BuildkiteGroup {
group: string;
steps: BuildkiteStep[];
}
export interface BuildkiteStep {
command: string;
label: string;
parallelism?: number;
agents: {
queue: string;
};
timeout_in_minutes?: number;
key?: string;
depends_on?: string | string[];
retry?: {
automatic: Array<{
exit_status: string;
limit: number;
}>;
};
env?: { [key: string]: string };
}
export interface BuildkiteTriggerBuildParams {
commit: string;
branch: string;
env?: Record<string, string>;
author?: {
name: string;
email: string;
};
ignore_pipeline_branch_filters?: boolean;
message?: string;
meta_data?: Record<string, string>;
pull_request_base_branch?: string;
pull_request_id?: string | number;
pull_request_repository?: string;
}
export class BuildkiteClient {
http: AxiosInstance;
constructor(config: BuildkiteClientConfig = {}) {
const BUILDKITE_BASE_URL =
config.baseUrl ?? process.env.BUILDKITE_BASE_URL ?? 'https://api.buildkite.com';
const BUILDKITE_TOKEN = config.token ?? process.env.BUILDKITE_TOKEN;
// const BUILDKITE_AGENT_BASE_URL =
// process.env.BUILDKITE_AGENT_BASE_URL || 'https://agent.buildkite.com/v3';
// const BUILDKITE_AGENT_TOKEN = process.env.BUILDKITE_AGENT_TOKEN;
this.http = axios.create({
baseURL: BUILDKITE_BASE_URL,
headers: {
Authorization: `Bearer ${BUILDKITE_TOKEN}`,
},
});
// this.agentHttp = axios.create({
// baseURL: BUILDKITE_AGENT_BASE_URL,
// headers: {
// Authorization: `Token ${BUILDKITE_AGENT_TOKEN}`,
// },
// });
}
getBuild = async (
pipelineSlug: string,
buildNumber: string | number,
includeRetriedJobs = false
): Promise<Build> => {
// TODO properly assemble URL
const link = `v2/organizations/elastic/pipelines/${pipelineSlug}/builds/${buildNumber}?include_retried_jobs=${includeRetriedJobs.toString()}`;
const resp = await this.http.get(link);
return resp.data as Build;
};
getCurrentBuild = (includeRetriedJobs = false) => {
if (!process.env.BUILDKITE_PIPELINE_SLUG || !process.env.BUILDKITE_BUILD_NUMBER) {
throw new Error(
'BUILDKITE_PIPELINE_SLUG and BUILDKITE_BUILD_NUMBER must be set to get current build'
);
}
return this.getBuild(
process.env.BUILDKITE_PIPELINE_SLUG,
process.env.BUILDKITE_BUILD_NUMBER,
includeRetriedJobs
);
};
getJobStatus = (build: Build, job: Job): { success: boolean; state: JobState } => {
if (job.retried) {
const retriedJob = build.jobs.find((j) => j.id === job.retried_in_job_id);
if (!retriedJob) {
throw Error(`Couldn't find retried job ID ${job.retried_in_job_id} for job ${job.id}`);
}
return this.getJobStatus(build, retriedJob);
}
let success: boolean;
// "Manual" steps are for input, when they are skipped, they have state: broken in the API
// So let's always mark them as successful, they can't really fail
// `broken` used to be in this list, but has been removed, it's essentially another type of skip status
// https://buildkite.com/docs/pipelines/defining-steps#job-states - See "Differentiating between broken, skipped and canceled states:"
success =
job.type === 'manual' ||
![
'failed',
'timed_out',
'timing_out',
'waiting_failed',
'unblocked_failed',
'blocked_failed',
].includes(job.state);
if (job.soft_failed) {
success = true;
}
return {
success,
state: job.state,
};
};
getBuildStatus = (build: Build): BuildStatus => {
let hasRetries = false;
let hasNonPreemptionRetries = false;
let success = build.state !== 'failed';
for (const job of build.jobs) {
if (job.retried) {
hasRetries = true;
const isPreemptionFailure =
job.state === 'failed' &&
job.agent?.meta_data?.includes('spot=true') &&
job.exit_status === -1;
if (!isPreemptionFailure) {
hasNonPreemptionRetries = true;
}
}
const state = this.getJobStatus(build, job);
success = success && state.success;
}
return {
state: build.state,
success,
hasRetries,
hasNonPreemptionRetries,
};
};
getCurrentBuildStatus = async (includeRetriedJobs = false) => {
return this.getBuildStatus(await this.getCurrentBuild(includeRetriedJobs));
};
getArtifacts = async (
pipelineSlug: string,
buildNumber: string | number
): Promise<Artifact[]> => {
let link = `v2/organizations/elastic/pipelines/${pipelineSlug}/builds/${buildNumber}/artifacts?per_page=100`;
const artifacts = [];
// Don't get stuck in an infinite loop or follow more than 50 pages
for (let i = 0; i < 50; i++) {
if (!link) {
break;
}
const resp = await this.http.get(link);
link = '';
artifacts.push(await resp.data);
if (resp.headers.link) {
const result = parseLinkHeader(resp.headers.link as string);
if (result?.next) {
link = result.next;
}
}
}
return artifacts.flat();
};
getArtifactsForCurrentBuild = (): Promise<Artifact[]> => {
if (!process.env.BUILDKITE_PIPELINE_SLUG || !process.env.BUILDKITE_BUILD_NUMBER) {
throw new Error(
'BUILDKITE_PIPELINE_SLUG and BUILDKITE_BUILD_NUMBER must be set to get current build'
);
}
return this.getArtifacts(
process.env.BUILDKITE_PIPELINE_SLUG,
process.env.BUILDKITE_BUILD_NUMBER
);
};
// https://buildkite.com/docs/apis/rest-api/builds#create-a-build
triggerBuild = async (
pipelineSlug: string,
options: BuildkiteTriggerBuildParams
): Promise<Build> => {
const url = `v2/organizations/elastic/pipelines/${pipelineSlug}/builds`;
return (await this.http.post(url, options)).data;
};
setMetadata = (key: string, value: string) => {
execSync(`buildkite-agent meta-data set '${key}'`, {
input: value,
stdio: ['pipe', 'inherit', 'inherit'],
});
};
setAnnotation = (
context: string,
style: 'info' | 'success' | 'warning' | 'error',
value: string
) => {
execSync(`buildkite-agent annotate --context '${context}' --style '${style}'`, {
input: value,
stdio: ['pipe', 'inherit', 'inherit'],
});
};
uploadArtifacts = (pattern: string) => {
execSync(`buildkite-agent artifact upload '${pattern}'`, {
stdio: ['ignore', 'inherit', 'inherit'],
});
};
uploadSteps = (steps: Array<BuildkiteStep | BuildkiteGroup>) => {
execSync(`buildkite-agent pipeline upload`, {
input: dump({ steps }),
stdio: ['pipe', 'inherit', 'inherit'],
});
};
}

View file

@ -6,7 +6,5 @@
* Side Public License, v 1.
*/
module.exports = {
BASE_BUCKET_DAILY: 'kibana-ci-es-snapshots-daily',
BASE_BUCKET_PERMANENT: 'kibana-ci-es-snapshots-permanent',
};
export * from './client';
export * from './types';

View file

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { expect } from 'chai';
import { parseLinkHeader } from './parse_link_header';
describe('parseLinkHeader', () => {
it('should parse link header', () => {
const result = parseLinkHeader(
'<https://api.buildkite.com/v2/organizations/elastic/agents?page=2&per_page=1>; rel="next", <https://api.buildkite.com/v2/organizations/elastic/agents?page=5&per_page=1>; rel="last"'
);
expect(result).to.eql({
last: 'https://api.buildkite.com/v2/organizations/elastic/agents?page=5&per_page=1',
next: 'https://api.buildkite.com/v2/organizations/elastic/agents?page=2&per_page=1',
});
});
});

View file

@ -0,0 +1,25 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export function parseLinkHeader(header: string): null | Record<string, string> {
if (!header) {
return null;
}
const entries = header.split(',').map((p) => p.trim());
const parsed: Record<string, string> = {};
for (const entry of entries) {
const parts = entry.split(';', 2).map((p) => p.trim());
const url = parts[0].slice(1, -1);
const rel = parts[1].replace(/rel="?([^"]+)"?$/, '$1');
parsed[rel] = url;
}
return parsed;
}

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export interface Agent {
id: string;
url: string;
web_url: string;
name: string;
connection_state: string;
ip_address: string;
hostname: string;
user_agent: string;
version: string;
creator?: string | null;
created_at: string;
last_job_finished_at?: string | null;
priority: number;
meta_data?: null | [string];
}

View file

@ -0,0 +1,23 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export interface Artifact {
id: string;
job_id: string;
url: string;
download_url: string;
state: 'new' | 'error' | 'finished' | 'deleted';
path: string;
dirname: string;
filename: string;
mime_type: string;
file_size: number;
glob_path?: string;
original_path?: string;
sha1sum: string;
}

View file

@ -0,0 +1,64 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Job } from './job';
import { Pipeline } from './pipeline';
export type BuildState =
| 'running'
| 'scheduled'
| 'passed'
| 'failed'
| 'blocked'
| 'canceled'
| 'canceling'
| 'skipped'
| 'not_run'
| 'finished';
export interface BuildStatus {
state: BuildState;
success: boolean;
hasRetries: boolean;
hasNonPreemptionRetries: boolean;
}
export interface Build {
id: string;
url: string;
web_url: string;
number: number;
state: BuildState;
blocked: boolean;
message: string;
commit: string;
branch: string;
author: { name: string; email: string };
env: Record<string, string>;
created_at: string;
scheduled_at: string;
started_at: string;
finished_at: string;
meta_data: Record<string, string>;
creator: {
avatar_url: string;
created_at: string;
email: string;
id: string;
name: string;
};
source: string;
jobs: Job[];
pipeline: Pipeline;
pull_request?: {
id: string;
base: string;
repository: string;
};
}

View file

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './build';
export * from './job';
export * from './pipeline';

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Agent } from './agent';
export type JobState =
| 'pending'
| 'waiting'
| 'waiting_failed'
| 'blocked'
| 'blocked_failed'
| 'unblocked'
| 'unblocked_failed'
| 'limiting'
| 'limited'
| 'scheduled'
| 'assigned'
| 'accepted'
| 'running'
| 'passed'
| 'failed'
| 'canceling'
| 'canceled'
| 'timing_out'
| 'timed_out'
| 'skipped'
| 'broken';
export interface Job {
id: string;
type: string;
name: string;
step_key: string;
state: JobState;
logs_url: string;
raw_log_url: string;
command: string;
exit_status: null | number;
artifact_paths: string;
artifacts_url: string;
created_at: string;
scheduled_at: string;
runnable_at: string;
started_at: string;
finished_at: string;
agent: Agent;
agent_query_rules: string[];
web_url: string;
retried: boolean;
retried_in_job_id: string;
retries_count: number;
soft_failed: boolean;
unblocked_by: {
id: string;
name: string;
email: string;
avatar_url: string;
created_at: string;
};
unblockable: boolean;
unblock_url: string;
parallel_group_index?: null | number;
parallel_group_total?: null | number;
}

View file

@ -0,0 +1,54 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export interface Pipeline {
id: string;
url: string;
web_url: string;
name: string;
slug: string;
repository: string;
builds_url: string;
badge_url: string;
created_at: string;
default_branch: string;
description: string;
branch_configuration: string;
skip_queued_branch_builds: boolean;
skip_queued_branch_builds_filter: string;
cancel_running_branch_builds: boolean;
cancel_running_branch_builds_filter: string;
cluster_id: string;
scheduled_builds_count: number;
running_builds_count: number;
scheduled_jobs_count: number;
running_jobs_count: number;
waiting_jobs_count: number;
provider: {
id: string;
webhook_url: string;
settings: Record<string, string>;
};
steps: Step[];
configuration: string;
env: Record<string, string>;
}
export interface Step {
type: string;
name: string;
command: string;
artifact_paths: string;
branch_configuration: string;
env: Record<string, string>;
timeout_in_minutes: number;
agent_query_rules: string[];
}

View file

@ -0,0 +1,211 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import axios, { Method, AxiosRequestConfig } from 'axios';
export interface CiStatsClientConfig {
baseUrl?: string;
token?: string;
}
export interface CiStatsBuild {
id: string;
}
export interface CiStatsPrReport {
md: string;
success: boolean;
}
export interface CompleteSuccessBuildSource {
jobName: string;
jobRunner: string;
completedAt: string;
commit: string;
startedAt: string;
branch: string;
result: 'SUCCESS';
jobId: string;
targetBranch: string | null;
fromKibanaCiProduction: boolean;
requiresValidMetrics: boolean | null;
jobUrl: string;
mergeBase: string | null;
}
export interface TestGroupRunOrderResponse {
sources: unknown;
types: Array<{
type: string;
count: number;
groups: Array<{
durationMin: number;
names: string[];
}>;
tooLong?: Array<{ config: string; durationMin: number }>;
namesWithoutDurations: string[];
}>;
}
interface RequestOptions {
path: string;
method?: Method;
params?: AxiosRequestConfig['params'];
body?: AxiosRequestConfig['data'];
maxAttempts?: number;
}
export class CiStatsClient {
private readonly baseUrl: string;
private readonly defaultHeaders: Record<string, string>;
constructor(config: CiStatsClientConfig = {}) {
const CI_STATS_HOST = config.baseUrl ?? process.env.CI_STATS_HOST;
const CI_STATS_TOKEN = config.token ?? process.env.CI_STATS_TOKEN;
this.baseUrl = `https://${CI_STATS_HOST}`;
this.defaultHeaders = {
Authorization: `token ${CI_STATS_TOKEN}`,
};
}
createBuild = async () => {
const resp = await this.request<CiStatsBuild>({
method: 'POST',
path: '/v1/build',
body: {
jenkinsJobName: process.env.BUILDKITE_PIPELINE_SLUG,
jenkinsJobId: process.env.BUILDKITE_BUILD_NUMBER,
jenkinsUrl: process.env.BUILDKITE_BUILD_URL,
prId: process.env.GITHUB_PR_NUMBER || null,
},
});
return resp.data;
};
addGitInfo = async (buildId: string) => {
await this.request({
method: 'POST',
path: '/v1/git_info',
params: {
buildId,
},
body: {
branch: (process.env.BUILDKITE_BRANCH || '').replace(/^(refs\/heads\/|origin\/)/, ''),
commit: process.env.BUILDKITE_COMMIT,
targetBranch:
process.env.GITHUB_PR_TARGET_BRANCH ||
process.env.BUILDKITE_PULL_REQUEST_BASE_BRANCH ||
null,
mergeBase: process.env.GITHUB_PR_MERGE_BASE || null,
},
});
};
markBuildAsValidBaseline = async (buildId: string) => {
await this.request({
method: 'POST',
path: `/v1/build/_is_valid_baseline`,
params: {
id: buildId,
},
});
};
completeBuild = async (buildStatus: string, buildId: string) => {
await this.request({
method: 'POST',
path: `/v1/build/_complete`,
params: {
id: buildId,
},
body: {
result: buildStatus,
},
});
};
getPrReport = async (buildId: string) => {
const resp = await this.request<CiStatsPrReport>({
path: `v2/pr_report`,
params: {
buildId,
},
});
return resp.data;
};
pickTestGroupRunOrder = async (body: {
sources: Array<
| {
branch: string;
jobName: string;
}
| {
prId: string;
jobName: string;
}
| {
commit: string;
jobName: string;
}
>;
groups: Array<{
type: string;
defaultMin?: number;
maxMin: number;
minimumIsolationMin?: number;
overheadMin?: number;
names: string[];
}>;
}) => {
console.log('requesting test group run order from ci-stats:');
console.log(JSON.stringify(body, null, 2));
const resp = await axios.request<TestGroupRunOrderResponse>({
method: 'POST',
baseURL: this.baseUrl,
headers: this.defaultHeaders,
url: '/v2/_pick_test_group_run_order',
data: body,
});
return resp.data;
};
private async request<T>({ method, path, params, body, maxAttempts = 3 }: RequestOptions) {
let attempt = 0;
while (true) {
attempt += 1;
try {
return await axios.request<T>({
method,
baseURL: this.baseUrl,
url: path,
params,
data: body,
headers: this.defaultHeaders,
});
} catch (error) {
console.error('CI Stats request error:', error);
if (attempt < maxAttempts) {
const sec = attempt * 3;
console.log('waiting', sec, 'seconds before retrying');
await new Promise((resolve) => setTimeout(resolve, sec * 1000));
continue;
}
throw error;
}
}
}
}

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './client';
export * from './on_complete';
export * from './on_metrics_viable';
export * from './on_start';
export * from './pick_test_group_run_order';

View file

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { BuildkiteClient } from '../buildkite';
import { CiStatsClient } from './client';
const buildkite = new BuildkiteClient();
const ciStats = new CiStatsClient();
export async function onComplete() {
if (!process.env.CI_STATS_BUILD_ID) {
return;
}
const result = buildkite.getBuildStatus(await buildkite.getCurrentBuild());
const status = result.success ? 'SUCCESS' : 'FAILURE';
console.log('Job Status:', result);
await ciStats.completeBuild(status, process.env.CI_STATS_BUILD_ID);
if (!process.env.GITHUB_PR_NUMBER) {
return;
}
const report = await ciStats.getPrReport(process.env.CI_STATS_BUILD_ID);
if (report?.md) {
buildkite.setMetadata('pr_comment:ci_stats_report:body', report.md);
const annotationType = report?.success ? 'info' : 'error';
buildkite.setAnnotation('ci-stats-report', annotationType, report.md);
}
if (report && !report.success) {
console.log('+++ CI Stats Report');
console.error('Failing build due to CI Stats report. See annotation at top of build.');
process.exit(1);
}
}

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { CiStatsClient } from './client';
const ciStats = new CiStatsClient();
export async function onMetricsViable() {
if (!process.env.CI_STATS_BUILD_ID) {
return;
}
console.log('Marking build as a "valid baseline" so that it can be used to power PR reports');
await ciStats.markBuildAsValidBaseline(process.env.CI_STATS_BUILD_ID);
}

View file

@ -0,0 +1,18 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { execSync } from 'child_process';
import { CiStatsClient } from './client';
const ciStats = new CiStatsClient();
export async function onStart() {
const build = await ciStats.createBuild();
execSync(`buildkite-agent meta-data set ci_stats_build_id "${build.id}"`);
await ciStats.addGitInfo(build.id);
}

View file

@ -0,0 +1,397 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as Fs from 'fs';
import * as globby from 'globby';
import minimatch from 'minimatch';
import { load as loadYaml } from 'js-yaml';
import { BuildkiteClient, BuildkiteStep } from '../buildkite';
import { CiStatsClient, TestGroupRunOrderResponse } from './client';
type RunGroup = TestGroupRunOrderResponse['types'][0];
const getRequiredEnv = (name: string) => {
const value = process.env[name];
if (typeof value !== 'string' || !value) {
throw new Error(`Missing required environment variable "${name}"`);
}
return value;
};
function getRunGroup(bk: BuildkiteClient, types: RunGroup[], typeName: string): RunGroup {
const type = types.find((t) => t.type === typeName);
if (!type) {
throw new Error(`missing test group run order for group [${typeName}]`);
}
const misses = type.namesWithoutDurations.length;
if (misses > 0) {
bk.setAnnotation(
`test-group-missing-durations:${typeName}`,
'warning',
[
misses === 1
? `The following "${typeName}" config doesn't have a recorded time in ci-stats so the automatically-determined test groups might be a little unbalanced.`
: `The following "${typeName}" configs don't have recorded times in ci-stats so the automatically-determined test groups might be a little unbalanced.`,
misses === 1
? `If this is a new config then this warning can be ignored as times will be reported soon.`
: `If these are new configs then this warning can be ignored as times will be reported soon.`,
misses === 1
? `The other possibility is that there aren't any tests in this config, so times are never reported.`
: `The other possibility is that there aren't any tests in these configs, so times are never reported.`,
'Empty test configs should be removed',
'',
...type.namesWithoutDurations.map((n) => ` - ${n}`),
].join('\n')
);
}
const tooLongs = type.tooLong?.length ?? 0;
if (tooLongs > 0) {
bk.setAnnotation(
`test-group-too-long:${typeName}`,
'error',
[
tooLongs === 1
? `The following "${typeName}" config has a duration that exceeds the maximum amount of time desired for a single CI job. Please split it up.`
: `The following "${typeName}" configs have durations that exceed the maximum amount of time desired for a single CI job. Please split them up.`,
'',
...(type.tooLong ?? []).map(
({ config, durationMin }) => ` - ${config}: ${durationMin} minutes`
),
].join('\n')
);
}
return type;
}
function getTrackedBranch(): string {
let pkg;
try {
pkg = JSON.parse(Fs.readFileSync('package.json', 'utf8'));
} catch (_) {
const error = _ instanceof Error ? _ : new Error(`${_} thrown`);
throw new Error(`unable to read kibana's package.json file: ${error.message}`);
}
const branch = pkg.branch;
if (typeof branch !== 'string') {
throw new Error('missing `branch` field from package.json file');
}
return branch;
}
function isObj(x: unknown): x is Record<string, unknown> {
return typeof x === 'object' && x !== null;
}
function getEnabledFtrConfigs(patterns?: string[]) {
try {
const configs = loadYaml(Fs.readFileSync('.buildkite/ftr_configs.yml', 'utf8'));
if (!isObj(configs)) {
throw new Error('expected yaml file to parse to an object');
}
if (!configs.enabled) {
throw new Error('expected yaml file to have an "enabled" key');
}
if (
!Array.isArray(configs.enabled) ||
!configs.enabled.every((p): p is string => typeof p === 'string')
) {
throw new Error('expected "enabled" value to be an array of strings');
}
if (!patterns) {
return configs.enabled;
}
return configs.enabled.filter((path) => patterns.some((pattern) => minimatch(path, pattern)));
} catch (_) {
const error = _ instanceof Error ? _ : new Error(`${_} thrown`);
throw new Error(`unable to parse ftr_configs.yml file: ${error.message}`);
}
}
export async function pickTestGroupRunOrder() {
const bk = new BuildkiteClient();
const ciStats = new CiStatsClient();
// these keys are synchronized in a few placed by storing them in the env during builds
const UNIT_TYPE = getRequiredEnv('TEST_GROUP_TYPE_UNIT');
const INTEGRATION_TYPE = getRequiredEnv('TEST_GROUP_TYPE_INTEGRATION');
const FUNCTIONAL_TYPE = getRequiredEnv('TEST_GROUP_TYPE_FUNCTIONAL');
const JEST_MAX_MINUTES = process.env.JEST_MAX_MINUTES
? parseFloat(process.env.JEST_MAX_MINUTES)
: 50;
if (Number.isNaN(JEST_MAX_MINUTES)) {
throw new Error(`invalid JEST_MAX_MINUTES: ${process.env.JEST_MAX_MINUTES}`);
}
const FUNCTIONAL_MAX_MINUTES = process.env.FUNCTIONAL_MAX_MINUTES
? parseFloat(process.env.FUNCTIONAL_MAX_MINUTES)
: 37;
if (Number.isNaN(FUNCTIONAL_MAX_MINUTES)) {
throw new Error(`invalid FUNCTIONAL_MAX_MINUTES: ${process.env.FUNCTIONAL_MAX_MINUTES}`);
}
const LIMIT_CONFIG_TYPE = process.env.LIMIT_CONFIG_TYPE
? process.env.LIMIT_CONFIG_TYPE.split(',')
.map((t) => t.trim())
.filter(Boolean)
: ['unit', 'integration', 'functional'];
const FTR_CONFIG_PATTERNS = process.env.FTR_CONFIG_PATTERNS
? process.env.FTR_CONFIG_PATTERNS.split(',')
.map((t) => t.trim())
.filter(Boolean)
: undefined;
const FUNCTIONAL_MINIMUM_ISOLATION_MIN = process.env.FUNCTIONAL_MINIMUM_ISOLATION_MIN
? parseFloat(process.env.FUNCTIONAL_MINIMUM_ISOLATION_MIN)
: undefined;
if (
FUNCTIONAL_MINIMUM_ISOLATION_MIN !== undefined &&
Number.isNaN(FUNCTIONAL_MINIMUM_ISOLATION_MIN)
) {
throw new Error(
`invalid FUNCTIONAL_MINIMUM_ISOLATION_MIN: ${process.env.FUNCTIONAL_MINIMUM_ISOLATION_MIN}`
);
}
const FTR_CONFIGS_RETRY_COUNT = process.env.FTR_CONFIGS_RETRY_COUNT
? parseInt(process.env.FTR_CONFIGS_RETRY_COUNT, 10)
: 1;
if (Number.isNaN(FTR_CONFIGS_RETRY_COUNT)) {
throw new Error(`invalid FTR_CONFIGS_RETRY_COUNT: ${process.env.FTR_CONFIGS_RETRY_COUNT}`);
}
const FTR_CONFIGS_DEPS =
process.env.FTR_CONFIGS_DEPS !== undefined
? process.env.FTR_CONFIGS_DEPS.split(',')
.map((t) => t.trim())
.filter(Boolean)
: ['build'];
const ftrConfigs = LIMIT_CONFIG_TYPE.includes('functional')
? getEnabledFtrConfigs(FTR_CONFIG_PATTERNS)
: [];
const jestUnitConfigs = LIMIT_CONFIG_TYPE.includes('unit')
? globby.sync(['**/jest.config.js', '!**/__fixtures__/**'], {
cwd: process.cwd(),
absolute: false,
})
: [];
const jestIntegrationConfigs = LIMIT_CONFIG_TYPE.includes('integration')
? globby.sync(['**/jest.integration.config.js', '!**/__fixtures__/**'], {
cwd: process.cwd(),
absolute: false,
})
: [];
if (!ftrConfigs.length && !jestUnitConfigs.length && !jestIntegrationConfigs.length) {
throw new Error('unable to find any unit, integration, or FTR configs');
}
const trackedBranch = getTrackedBranch();
const ownBranch = process.env.BUILDKITE_BRANCH as string;
const pipelineSlug = process.env.BUILDKITE_PIPELINE_SLUG as string;
const prNumber = process.env.GITHUB_PR_NUMBER as string | undefined;
const { sources, types } = await ciStats.pickTestGroupRunOrder({
sources: [
// try to get times from a recent successful job on this PR
...(prNumber
? [
{
prId: prNumber,
jobName: 'kibana-pull-request',
},
]
: []),
// if we are running on a external job, like kibana-code-coverage-main, try finding times that are specific to that job
...(!prNumber && pipelineSlug !== 'kibana-on-merge'
? [
{
branch: ownBranch,
jobName: pipelineSlug,
},
{
branch: trackedBranch,
jobName: pipelineSlug,
},
]
: []),
// try to get times from the mergeBase commit
...(process.env.GITHUB_PR_MERGE_BASE
? [
{
commit: process.env.GITHUB_PR_MERGE_BASE,
jobName: 'kibana-on-merge',
},
]
: []),
// fallback to the latest times from the tracked branch
{
branch: trackedBranch,
jobName: 'kibana-on-merge',
},
// finally fallback to the latest times from the main branch in case this branch is brand new
{
branch: 'main',
jobName: 'kibana-on-merge',
},
],
groups: [
{
type: UNIT_TYPE,
defaultMin: 3,
maxMin: JEST_MAX_MINUTES,
overheadMin: 0.2,
names: jestUnitConfigs,
},
{
type: INTEGRATION_TYPE,
defaultMin: 10,
maxMin: JEST_MAX_MINUTES,
overheadMin: 0.2,
names: jestIntegrationConfigs,
},
{
type: FUNCTIONAL_TYPE,
defaultMin: 60,
maxMin: FUNCTIONAL_MAX_MINUTES,
minimumIsolationMin: FUNCTIONAL_MINIMUM_ISOLATION_MIN,
overheadMin: 1.5,
names: ftrConfigs,
},
],
});
console.log('test run order is determined by builds:');
console.dir(sources, { depth: Infinity, maxArrayLength: Infinity });
const unit = getRunGroup(bk, types, UNIT_TYPE);
const integration = getRunGroup(bk, types, INTEGRATION_TYPE);
const functional = getRunGroup(bk, types, FUNCTIONAL_TYPE);
// write the config for each step to an artifact that can be used by the individual jest jobs
Fs.writeFileSync('jest_run_order.json', JSON.stringify({ unit, integration }, null, 2));
bk.uploadArtifacts('jest_run_order.json');
// write the config for functional steps to an artifact that can be used by the individual functional jobs
Fs.writeFileSync('ftr_run_order.json', JSON.stringify(functional, null, 2));
bk.uploadArtifacts('ftr_run_order.json');
let smallFtrConfigsCounter = 0;
const getSmallFtrConfigsLabel = () => {
return `Super Quick FTR Configs #${++smallFtrConfigsCounter}`;
};
// upload the step definitions to Buildkite
bk.uploadSteps(
[
unit.count > 0
? {
label: 'Jest Tests',
command: getRequiredEnv('JEST_UNIT_SCRIPT'),
parallelism: unit.count,
timeout_in_minutes: 90,
key: 'jest',
agents: {
queue: 'n2-4-spot',
},
retry: {
automatic: [
{
exit_status: '-1',
limit: 3,
},
],
},
}
: [],
integration.count > 0
? {
label: 'Jest Integration Tests',
command: getRequiredEnv('JEST_INTEGRATION_SCRIPT'),
parallelism: integration.count,
timeout_in_minutes: 120,
key: 'jest-integration',
agents: {
queue: 'n2-4-spot',
},
retry: {
automatic: [
{
exit_status: '-1',
limit: 3,
},
],
},
}
: [],
functional.count > 0
? FUNCTIONAL_MINIMUM_ISOLATION_MIN === undefined
? {
label: 'FTR Configs',
key: 'ftr-configs',
depends_on: FTR_CONFIGS_DEPS,
parallelism: functional.count,
command: getRequiredEnv('FTR_CONFIGS_SCRIPT'),
timeout_in_minutes: 150,
agents: {
queue: 'n2-4-spot-2',
},
retry: {
automatic: [
{ exit_status: '-1', limit: 3 },
...(FTR_CONFIGS_RETRY_COUNT > 0
? [{ exit_status: '*', limit: FTR_CONFIGS_RETRY_COUNT }]
: []),
],
},
}
: {
group: 'FTR Configs',
key: 'ftr-configs',
depends_on: FTR_CONFIGS_DEPS,
steps: functional.groups
.map(
(group, i): BuildkiteStep => ({
label: group.names.length === 1 ? group.names[0] : getSmallFtrConfigsLabel(),
command: getRequiredEnv('FTR_CONFIGS_SCRIPT'),
timeout_in_minutes: 150,
agents: {
queue: 'n2-4-spot-2',
},
env: {
FTR_CONFIG_GROUP_INDEX: `${i}`,
},
retry: {
automatic: [
{ exit_status: '-1', limit: 3 },
...(FTR_CONFIGS_RETRY_COUNT > 0
? [{ exit_status: '*', limit: FTR_CONFIGS_RETRY_COUNT }]
: []),
],
},
})
)
.sort((a, b) => a.label.localeCompare(b.label)),
}
: [],
].flat()
);
}

View file

@ -0,0 +1,132 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { RestEndpointMethodTypes } from '@octokit/rest';
import { expect } from 'chai';
import { areChangesSkippable, doAnyChangesMatch } from './github';
describe('github', () => {
const getMockChangedFile = (filename: string, previousFilename = '') => {
return {
filename,
previous_filename: previousFilename || undefined,
} as RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][number];
};
describe('doAnyChangesMatch', () => {
const required = [/^\/required/];
describe('should return true', () => {
it('when any file matches', async () => {
const match = await doAnyChangesMatch(required, [
getMockChangedFile('/required/index.js'),
getMockChangedFile('/package.json'),
]);
expect(match).to.eql(true);
});
it('when all files match', async () => {
const match = await doAnyChangesMatch(required, [
getMockChangedFile('/required/index.js'),
getMockChangedFile('/required/package.json'),
]);
expect(match).to.eql(true);
});
});
describe('should return false', () => {
it('when no files match with one file', async () => {
const match = await doAnyChangesMatch(required, [getMockChangedFile('/index.js')]);
expect(match).to.eql(false);
});
it('when no files match with multiple files', async () => {
const match = await doAnyChangesMatch(required, [
getMockChangedFile('/index.js'),
getMockChangedFile('/package.json'),
]);
expect(match).to.eql(false);
});
});
});
describe('areChangesSkippable', () => {
const skippable = [/^docs\//, /^rfcs\//, /\.md$/];
const required = [/required\.md$/];
describe('should not be skippable', () => {
it('when non-skippable files are present', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('docs/required.md'),
getMockChangedFile('package.json'),
]);
expect(execute).to.eql(false);
});
it('when all files are non-skippable, non-required', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('package.json'),
]);
expect(execute).to.eql(false);
});
it('when a required file is present', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('docs/required.md'),
getMockChangedFile('docs/whatever.md'),
]);
expect(execute).to.eql(false);
});
it('when a required file is renamed', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('docs/skipme.md', 'docs/required.md'),
]);
expect(execute).to.eql(false);
});
});
describe('should be skippable', () => {
it('when all files are skippable', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('docs/index.js'),
getMockChangedFile('README.md'),
]);
expect(execute).to.eql(true);
});
it('when all files are skippable and no required files are passed in', async () => {
const execute = await areChangesSkippable(
skippable,
[],
[getMockChangedFile('docs/index.js'), getMockChangedFile('README.md')]
);
expect(execute).to.eql(true);
});
it('when renamed files new and old locations are skippable', async () => {
const execute = await areChangesSkippable(skippable, required, [
getMockChangedFile('docs/index.js', 'docs/old.js'),
getMockChangedFile('README.md', 'DOCS.md'),
]);
expect(execute).to.eql(true);
});
});
});
});

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { Octokit, RestEndpointMethodTypes } from '@octokit/rest';
const github = new Octokit({
auth: process.env.GITHUB_TOKEN,
});
let prChangesCache: null | RestEndpointMethodTypes['pulls']['listFiles']['response']['data'] = null;
export const getPrChanges = async (
owner = process.env.GITHUB_PR_BASE_OWNER,
repo = process.env.GITHUB_PR_BASE_REPO,
prNumber: undefined | string | number = process.env.GITHUB_PR_NUMBER
) => {
if (!owner || !repo || !prNumber) {
throw Error(
"Couldn't retrieve Github PR info from environment variables in order to retrieve PR changes"
);
}
const files = await github.paginate(github.pulls.listFiles, {
owner,
repo,
pull_number: typeof prNumber === 'number' ? prNumber : parseInt(prNumber, 10),
per_page: 100,
});
return files;
};
export const getPrChangesCached = async () => {
prChangesCache = prChangesCache || (await getPrChanges());
return prChangesCache;
};
export const areChangesSkippable = async (
skippablePaths: RegExp[],
requiredPaths: RegExp[] = [],
changes: null | RestEndpointMethodTypes['pulls']['listFiles']['response']['data'] = null
) => {
const prChanges = changes || (await getPrChangesCached());
if (prChanges.length >= 3000) {
return false;
}
if (requiredPaths?.length) {
const someFilesMatchRequired = requiredPaths.some((path) =>
prChanges.some(
(change) => change.filename.match(path) || change.previous_filename?.match(path)
)
);
if (someFilesMatchRequired) {
return false;
}
}
const someFilesNotSkippable = prChanges.some(
(change) =>
!skippablePaths.some(
(path) =>
change.filename.match(path) &&
(!change.previous_filename || change.previous_filename.match(path))
)
);
return !someFilesNotSkippable;
};
export const doAnyChangesMatch = async (
requiredPaths: RegExp[],
changes: null | RestEndpointMethodTypes['pulls']['listFiles']['response']['data'] = null
) => {
const prChanges = changes || (await getPrChangesCached());
if (prChanges.length >= 3000) {
return true;
}
const anyFilesMatchRequired = requiredPaths.some((path) =>
prChanges.some((change) => change.filename.match(path) || change.previous_filename?.match(path))
);
return anyFilesMatchRequired;
};

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './github';

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './buildkite';
export * as CiStats from './ci-stats';
export * from './github';
export * as TestFailures from './test-failures';

View file

@ -0,0 +1,110 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { expect } from 'chai';
import { Artifact } from '../buildkite/types/artifact';
import { TestFailure, getAnnotation, getSlackMessage, getPrComment } from './annotate';
let mockFailure: TestFailure;
let mockArtifacts: Record<string, Artifact>;
describe('Annotate', () => {
beforeEach(() => {
mockFailure = {
url: 'https://buildkite.com/elastic/kibana-pull-request/builds/53',
jobId: 'job-id',
buildId: 'build-id',
hash: 'hash',
name: 'test should fail',
classname: 'Chrome UI Functional Tests.test/functional/apps/console/_console·ts',
jobName: 'OSS CI Group #1',
} as TestFailure;
mockArtifacts = {
'job-idhash': {
id: 'artifact-id',
} as Artifact,
};
});
describe('getAnnotation', () => {
it('should create an annotation without logs link if artifact is missing', () => {
const annotation = getAnnotation([mockFailure], {});
expect(annotation).to.eql(
'**Test Failures**<br />\n[[job]](https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id) OSS CI Group #1 / test should fail'
);
});
it('should create an annotation with logs link if artifact is present', () => {
const annotation = getAnnotation([mockFailure], mockArtifacts);
expect(annotation).to.eql(
'**Test Failures**<br />\n[[job]](https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id) [[logs]](https://buildkite.com/organizations/elastic/pipelines/kibana-pull-request/builds/53/jobs/job-id/artifacts/artifact-id) OSS CI Group #1 / test should fail'
);
});
});
describe('getSlackMessage', () => {
it('should create an annotation without logs link if artifact is missing', () => {
const annotation = getSlackMessage([mockFailure, mockFailure], {});
expect(annotation).to.eql(
'*Test Failures*\n' +
'<https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id|[job]> OSS CI Group #1 / test should fail\n' +
'<https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id|[job]> OSS CI Group #1 / test should fail'
);
});
it('should create an annotation with logs link if artifact is present', () => {
const annotation = getSlackMessage([mockFailure], mockArtifacts);
expect(annotation).to.eql(
'*Test Failures*\n<https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id|[job]> <https://buildkite.com/organizations/elastic/pipelines/kibana-pull-request/builds/53/jobs/job-id/artifacts/artifact-id|[logs]> OSS CI Group #1 / test should fail'
);
});
it('should create an annotation with 1 failure count if count present', () => {
mockFailure.failureCount = 1;
mockFailure.githubIssue = 'https://github.com/some/failure/link/1234';
const annotation = getSlackMessage([mockFailure], mockArtifacts);
expect(annotation).to.eql(
'*Test Failures*\n<https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id|[job]> <https://buildkite.com/organizations/elastic/pipelines/kibana-pull-request/builds/53/jobs/job-id/artifacts/artifact-id|[logs]> <https://github.com/some/failure/link/1234|[1 failure]> OSS CI Group #1 / test should fail'
);
});
it('should create an annotation with 2+ failures count if count present', () => {
mockFailure.failureCount = 2;
mockFailure.githubIssue = 'https://github.com/some/failure/link/1234';
const annotation = getSlackMessage([mockFailure], mockArtifacts);
expect(annotation).to.eql(
'*Test Failures*\n<https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id|[job]> <https://buildkite.com/organizations/elastic/pipelines/kibana-pull-request/builds/53/jobs/job-id/artifacts/artifact-id|[logs]> <https://github.com/some/failure/link/1234|[2 failures]> OSS CI Group #1 / test should fail'
);
});
});
describe('getPrComment', () => {
it('should create an annotation without logs link if artifact is missing', () => {
const annotation = getPrComment([mockFailure], {});
expect(annotation).to.eql(
'### Test Failures\n* [[job]](https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id) OSS CI Group #<span></span>1 / test should fail'
);
});
it('should create an annotation with logs link if artifact is present', () => {
const annotation = getPrComment([mockFailure], mockArtifacts);
expect(annotation).to.eql(
'### Test Failures\n* [[job]](https://buildkite.com/elastic/kibana-pull-request/builds/53#job-id) [[logs]](https://buildkite.com/organizations/elastic/pipelines/kibana-pull-request/builds/53/jobs/job-id/artifacts/artifact-id) OSS CI Group #<span></span>1 / test should fail'
);
});
});
});

View file

@ -0,0 +1,185 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { execSync } from 'child_process';
import { mkdirSync, readdirSync, readFileSync, statSync } from 'fs';
import { join } from 'path';
import { BuildkiteClient } from '..';
import { Artifact } from '../buildkite/types/artifact';
const buildkite = new BuildkiteClient();
export interface TestFailure {
name: string;
classname: string;
time: string;
'metadata-json'?: string | undefined;
failure: string;
likelyIrrelevant: boolean;
'system-out'?: string | undefined;
hash: string;
buildId: string;
jobId: string;
url: string;
jobName: string;
githubIssue?: string;
failureCount?: number;
}
const recursiveReadDir = (dirPath: string, allFiles: string[] = []) => {
const files = readdirSync(dirPath);
for (const file of files) {
if (statSync(join(dirPath, file)).isDirectory()) {
allFiles = recursiveReadDir(join(dirPath, file), allFiles);
} else {
allFiles.push(join(dirPath, file));
}
}
return allFiles;
};
export const getAnnotation = (
failures: TestFailure[],
failureHtmlArtifacts: Record<string, Artifact>
): string => {
return (
`**Test Failures**<br />\n` +
failures
.map((failure) => {
const lookup = failure.jobId + failure.hash;
const jobUrl = `${failure.url}#${failure.jobId}`;
const artifactUrl =
lookup in failureHtmlArtifacts
? `${failure.url.replace(
'https://buildkite.com/elastic',
'https://buildkite.com/organizations/elastic/pipelines'
)}/jobs/${failure.jobId}/artifacts/${failureHtmlArtifacts[lookup].id}`
: '';
const logsLink = artifactUrl ? ` [[logs]](${artifactUrl})` : '';
return `[[job]](${jobUrl})${logsLink} ${failure.jobName} / ${failure.name}`;
})
.join('<br />\n')
);
};
export const getPrComment = (
failures: TestFailure[],
failureHtmlArtifacts: Record<string, Artifact>
): string => {
return (
`### Test Failures\n` +
failures
.map((failure) => {
const lookup = failure.jobId + failure.hash;
const jobUrl = `${failure.url}#${failure.jobId}`;
const artifactUrl =
lookup in failureHtmlArtifacts
? `${failure.url.replace(
'https://buildkite.com/elastic',
'https://buildkite.com/organizations/elastic/pipelines'
)}/jobs/${failure.jobId}/artifacts/${failureHtmlArtifacts[lookup].id}`
: '';
const logsLink = artifactUrl ? ` [[logs]](${artifactUrl})` : '';
// job name could have #<number> in it, which Github will link to an issue, so we need to "escape" it with spans
return `* [[job]](${jobUrl})${logsLink} ${failure.jobName.replace(
'#',
'#<span></span>'
)} / ${failure.name}`;
})
.join('\n')
);
};
export const getSlackMessage = (
failures: TestFailure[],
failureHtmlArtifacts: Record<string, Artifact>
): string => {
return (
`*Test Failures*\n` +
failures
.map((failure) => {
const lookup = failure.jobId + failure.hash;
const jobUrl = `${failure.url}#${failure.jobId}`;
const artifactUrl =
lookup in failureHtmlArtifacts
? `${failure.url.replace(
'https://buildkite.com/elastic',
'https://buildkite.com/organizations/elastic/pipelines'
)}/jobs/${failure.jobId}/artifacts/${failureHtmlArtifacts[lookup].id}`
: '';
const logsLink = artifactUrl ? ` <${artifactUrl}|[logs]>` : '';
const failuresCount =
failure.failureCount && failure.githubIssue
? ` <${failure.githubIssue}|[${failure.failureCount} failure${
failure.failureCount > 1 ? 's' : ''
}]>`
: '';
return `<${jobUrl}|[job]>${logsLink}${failuresCount} ${failure.jobName} / ${failure.name}`;
})
.join('\n')
);
};
export const annotateTestFailures = async () => {
const exec = (cmd: string) => execSync(cmd, { stdio: 'inherit' });
const failureDir = 'target/process-test-failures';
mkdirSync(failureDir, { recursive: true });
const artifacts = await buildkite.getArtifactsForCurrentBuild();
const failureHtmlArtifacts: Record<string, Artifact> = {};
for (const artifact of artifacts) {
if (artifact.path.match(/test_failures\/.*?\.html$/)) {
const [jobId, hash] = artifact.filename.split(/_|\./);
failureHtmlArtifacts[jobId + hash] = artifact;
}
}
exec(
`buildkite-agent artifact download --include-retried-jobs "target/test_failures/*.json" "${failureDir}"`
);
const failures: TestFailure[] = recursiveReadDir(failureDir)
.map((file) => {
try {
if (file.endsWith('.json')) {
return JSON.parse(readFileSync(file).toString());
}
} catch (ex) {
console.error((ex as Error).message);
}
return null;
})
.filter((f) => f)
.sort((a, b) => a.name.localeCompare(b.name));
buildkite.setAnnotation('test_failures', 'error', getAnnotation(failures, failureHtmlArtifacts));
if (process.env.PR_COMMENTS_ENABLED === 'true') {
buildkite.setMetadata(
'pr_comment:test_failures:body',
getPrComment(failures, failureHtmlArtifacts)
);
}
if (process.env.SLACK_NOTIFICATIONS_ENABLED === 'true') {
buildkite.setMetadata(
'slack:test_failures:body',
getSlackMessage(failures, failureHtmlArtifacts)
);
}
};

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export * from './annotate';

View file

@ -5,4 +5,4 @@ set -euo pipefail
UUID="$(cat /proc/sys/kernel/random/uuid)"
export UUID
node .buildkite/pipelines/flaky_tests/pipeline.js | buildkite-agent pipeline upload
ts-node .buildkite/pipelines/flaky_tests/pipeline.ts | buildkite-agent pipeline upload

View file

@ -6,6 +6,8 @@
* Side Public License, v 1.
*/
import { groups } from './groups.json';
const configJson = process.env.KIBANA_FLAKY_TEST_RUNNER_CONFIG;
if (!configJson) {
console.error('+++ Triggering directly is not supported anymore');
@ -16,10 +18,6 @@ if (!configJson) {
process.exit(1);
}
const groups = /** @type {Array<{key: string, name: string, ciGroups: number }>} */ (
require('./groups.json').groups
);
const concurrency = process.env.KIBANA_FLAKY_TEST_CONCURRENCY
? parseInt(process.env.KIBANA_FLAKY_TEST_CONCURRENCY, 10)
: 25;
@ -33,8 +31,8 @@ if (Number.isNaN(concurrency)) {
const BASE_JOBS = 1;
const MAX_JOBS = 500;
function getTestSuitesFromJson(json) {
const fail = (errorMsg) => {
function getTestSuitesFromJson(json: string) {
const fail = (errorMsg: string) => {
console.error('+++ Invalid test config provided');
console.error(`${errorMsg}: ${json}`);
process.exit(1);
@ -108,7 +106,7 @@ if (totalJobs > MAX_JOBS) {
process.exit(1);
}
const steps = [];
const steps: any[] = [];
const pipeline = {
env: {
IGNORE_SHIP_CI_STATS_ERROR: 'true',
@ -137,7 +135,7 @@ for (const testSuite of testSuites) {
},
label: `FTR Config: ${testSuite.ftrConfig}`,
parallelism: testSuite.count,
concurrency: concurrency,
concurrency,
concurrency_group: process.env.UUID,
concurrency_method: 'eager',
agents: {
@ -159,7 +157,7 @@ for (const testSuite of testSuites) {
switch (keyParts[0]) {
case 'cypress':
const CYPRESS_SUITE = keyParts[1];
const group = groups.find((group) => group.key.includes(CYPRESS_SUITE));
const group = groups.find((g) => g.key.includes(CYPRESS_SUITE));
if (!group) {
throw new Error(
`Group configuration was not found in groups.json for the following cypress suite: {${CYPRESS_SUITE}}.`
@ -171,7 +169,7 @@ for (const testSuite of testSuites) {
agents: { queue: 'ci-group-6' },
depends_on: 'build',
parallelism: testSuite.count,
concurrency: concurrency,
concurrency,
concurrency_group: process.env.UUID,
concurrency_method: 'eager',
});

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { TestFailures } = require('kibana-buildkite-library');
import { TestFailures } from '#pipeline-utils';
(async () => {
try {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { BuildkiteClient } = require('kibana-buildkite-library');
import { BuildkiteClient } from '#pipeline-utils';
(async () => {
try {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { CiStats } = require('kibana-buildkite-library');
import { CiStats } from '#pipeline-utils';
(async () => {
try {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { CiStats } = require('kibana-buildkite-library');
import { CiStats } from '#pipeline-utils';
(async () => {
try {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { CiStats } = require('kibana-buildkite-library');
import { CiStats } from '#pipeline-utils';
(async () => {
try {

View file

@ -2,14 +2,14 @@
set -euo pipefail
BUILD_SUCCESSFUL=$(node "$(dirname "${0}")/build_status.js")
BUILD_SUCCESSFUL=$(ts-node "$(dirname "${0}")/build_status.ts")
export BUILD_SUCCESSFUL
if [[ "${GITHUB_BUILD_COMMIT_STATUS_ENABLED:-}" != "true" ]]; then
"$(dirname "${0}")/commit_status_complete.sh"
fi
node "$(dirname "${0}")/ci_stats_complete.js"
ts-node "$(dirname "${0}")/ci_stats_complete.ts"
if [[ "${GITHUB_PR_NUMBER:-}" ]]; then
DOCS_CHANGES_URL="https://kibana_$GITHUB_PR_NUMBER}.docs-preview.app.elstc.co/diff"

View file

@ -3,7 +3,7 @@
set -euo pipefail
echo '--- Agent Debug Info'
node .buildkite/scripts/lifecycle/print_agent_links.js || true
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
IS_TEST_EXECUTION_STEP="$(buildkite-agent meta-data get "${BUILDKITE_JOB_ID}_is_test_execution_step" --default '')"
@ -33,6 +33,6 @@ if [[ "$IS_TEST_EXECUTION_STEP" == "true" ]]; then
if [[ -d 'target/test_failures' ]]; then
buildkite-agent artifact upload 'target/test_failures/**/*'
node .buildkite/scripts/lifecycle/annotate_test_failures.js
ts-node .buildkite/scripts/lifecycle/annotate_test_failures.ts
fi
fi

View file

@ -11,7 +11,7 @@ fi
export CI_STATS_TOKEN="$(retry 5 5 vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)"
export CI_STATS_HOST="$(retry 5 5 vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)"
node "$(dirname "${0}")/ci_stats_start.js"
ts-node "$(dirname "${0}")/ci_stats_start.ts"
# We resolve the latest manifest URL at the beginning of the build to ensure that all steps in the build will use the same manifest
# Otherwise, the manifest could change if a step is running around the time that a new one is promoted

View file

@ -4,16 +4,21 @@ set -euo pipefail
source .buildkite/scripts/common/util.sh
echo '--- Setup environment vars'
source .buildkite/scripts/common/env.sh
source .buildkite/scripts/common/setup_node.sh
BUILDKITE_TOKEN="$(retry 5 5 vault read -field=buildkite_token_all_jobs secret/kibana-issues/dev/buildkite-ci)"
export BUILDKITE_TOKEN
echo '--- Install buildkite dependencies'
echo '--- Install/build buildkite dependencies'
npm install -g ts-node
cd '.buildkite'
retry 5 15 npm ci
cd ..
echo '--- Agent Debug/SSH Info'
node .buildkite/scripts/lifecycle/print_agent_links.js || true
ts-node .buildkite/scripts/lifecycle/print_agent_links.ts || true
if [[ "$(curl -is metadata.google.internal || true)" ]]; then
echo ""
@ -22,7 +27,6 @@ if [[ "$(curl -is metadata.google.internal || true)" ]]; then
echo ""
fi
echo '--- Job Environment Setup'
# Set up a custom ES Snapshot Manifest if one has been specified for this build
@ -136,15 +140,6 @@ BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE="$HOME/.kibana-ci-bazel-remote-cache-loca
export BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE
retry 5 5 vault read -field=service_account_json secret/kibana-issues/dev/kibana-ci-bazel-remote-cache-local-dev > "$BAZEL_LOCAL_DEV_CACHE_CREDENTIALS_FILE"
# By default, all steps should set up these things to get a full environment before running
# It can be skipped for pipeline upload steps though, to make job start time a little faster
if [[ "${SKIP_CI_SETUP:-}" != "true" ]]; then
if [[ -d .buildkite/scripts && "${BUILDKITE_COMMAND:-}" != "buildkite-agent pipeline upload"* ]]; then
source .buildkite/scripts/common/env.sh
source .buildkite/scripts/common/setup_node.sh
fi
fi
PIPELINE_PRE_COMMAND=${PIPELINE_PRE_COMMAND:-".buildkite/scripts/lifecycle/pipelines/$BUILDKITE_PIPELINE_SLUG/pre_command.sh"}
if [[ -f "$PIPELINE_PRE_COMMAND" ]]; then
source "$PIPELINE_PRE_COMMAND"

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { BuildkiteClient } = require('kibana-buildkite-library');
import { BuildkiteClient } from '#pipeline-utils';
(async () => {
try {
@ -14,7 +14,9 @@ const { BuildkiteClient } = require('kibana-buildkite-library');
const build = await client.getCurrentBuild();
const job = build.jobs.find((j) => j.id === process.env.BUILDKITE_JOB_ID);
const startTime = job ? new Date(job.started_at) : new Date().getTime() - 60 * 60 * 1000;
const startTime = job
? new Date(job.started_at)
: new Date(new Date().getTime() - 60 * 60 * 1000);
const twoHours = new Date(startTime.getTime() + 2 * 60 * 60 * 1000);
const METRICS_URL = [

View file

@ -2,4 +2,4 @@
set -euo pipefail
node .buildkite/scripts/pipelines/pull_request/pipeline.js
ts-node .buildkite/scripts/pipelines/pull_request/pipeline.ts

View file

@ -6,21 +6,27 @@
* Side Public License, v 1.
*/
const execSync = require('child_process').execSync;
const fs = require('fs');
const { areChangesSkippable, doAnyChangesMatch } = require('kibana-buildkite-library');
const prConfigs = require('../../../pull_requests.json');
import { execSync } from 'child_process';
import fs from 'fs';
import prConfigs from '../../../pull_requests.json';
import { areChangesSkippable, doAnyChangesMatch } from '#pipeline-utils';
const prConfig = prConfigs.jobs.find((job) => job.pipelineSlug === 'kibana-pull-request');
if (!prConfig) {
console.error(`'kibana-pull-request' pipeline not found in .buildkite/pull_requests.json`);
process.exit(1);
}
const GITHUB_PR_LABELS = process.env.GITHUB_PR_LABELS ?? '';
const REQUIRED_PATHS = prConfig.always_require_ci_on_changed.map((r) => new RegExp(r, 'i'));
const SKIPPABLE_PR_MATCHERS = prConfig.skip_ci_on_only_changed.map((r) => new RegExp(r, 'i'));
const getPipeline = (filename, removeSteps = true) => {
const getPipeline = (filename: string, removeSteps = true) => {
const str = fs.readFileSync(filename).toString();
return removeSteps ? str.replace(/^steps:/, '') : str;
};
const uploadPipeline = (pipelineContent) => {
const uploadPipeline = (pipelineContent: string | object) => {
const str =
typeof pipelineContent === 'string' ? pipelineContent : JSON.stringify(pipelineContent);
@ -57,7 +63,7 @@ const uploadPipeline = (pipelineContent) => {
/^x-pack\/plugins\/triggers_actions_ui\/public\/application\/context\/actions_connectors_context\.tsx/,
/^x-pack\/test\/security_solution_cypress/,
])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/security_solution.yml'));
}
@ -71,35 +77,35 @@ const uploadPipeline = (pipelineContent) => {
/^x-pack\/plugins\/rule_registry/,
/^x-pack\/plugins\/task_manager/,
])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/response_ops.yml'));
}
if (
(await doAnyChangesMatch([/^x-pack\/plugins\/cases/])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/response_ops_cases.yml'));
}
if (
(await doAnyChangesMatch([/^x-pack\/plugins\/apm/])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/apm_cypress.yml'));
}
if (
(await doAnyChangesMatch([/^x-pack\/plugins\/fleet/, /^x-pack\/test\/fleet_cypress/])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/fleet_cypress.yml'));
}
if (
(await doAnyChangesMatch([/^x-pack\/plugins\/osquery/, /^x-pack\/test\/osquery_cypress/])) ||
process.env.GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
GITHUB_PR_LABELS.includes('ci:all-cypress-suites')
) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/osquery_cypress.yml'));
}
@ -108,7 +114,7 @@ const uploadPipeline = (pipelineContent) => {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/synthetics_plugin.yml'));
}
if (process.env.GITHUB_PR_LABELS.includes('ci:deploy-cloud')) {
if (GITHUB_PR_LABELS.includes('ci:deploy-cloud')) {
pipeline.push(getPipeline('.buildkite/pipelines/pull_request/deploy_cloud.yml'));
}

View file

@ -4,4 +4,4 @@ set -euo pipefail
source .buildkite/scripts/common/util.sh
node .buildkite/scripts/lifecycle/ci_stats_ready.js
ts-node .buildkite/scripts/lifecycle/ci_stats_ready.ts

View file

@ -2,4 +2,4 @@
set -euo pipefail
node .buildkite/scripts/steps/cloud/purge.js
ts-node .buildkite/scripts/steps/cloud/purge.ts

View file

@ -6,12 +6,14 @@
* Side Public License, v 1.
*/
const { execSync } = require('child_process');
import { execSync } from 'child_process';
const deploymentsListJson = execSync('ecctl deployment list --output json').toString();
const { deployments } = JSON.parse(deploymentsListJson);
const prDeployments = deployments.filter((deployment) => deployment.name.startsWith('kibana-pr-'));
const prDeployments = deployments.filter((deployment: any) =>
deployment.name.startsWith('kibana-pr-')
);
const deploymentsToPurge = [];
@ -29,7 +31,7 @@ for (const deployment of prDeployments) {
if (pullRequest.state !== 'OPEN') {
console.log(`Pull Request #${prNumber} is no longer open, will delete associated deployment`);
deploymentsToPurge.push(deployment);
} else if (!pullRequest.labels.filter((label) => label.name === 'ci:deploy-cloud')) {
} else if (!pullRequest.labels.filter((label: any) => label.name === 'ci:deploy-cloud')) {
console.log(
`Pull Request #${prNumber} no longer has the ci:deploy-cloud label, will delete associated deployment`
);

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { readFileSync, writeFileSync } from 'fs';
const file = process.argv[2];
const search = process.argv[3];
const replace = process.argv[4];
writeFileSync(file, readFileSync(file).toString().replaceAll(search, replace));

View file

@ -0,0 +1,85 @@
#!/usr/bin/env bash
set -euo pipefail
header() {
local fileName=$1
echo "" >"$fileName"
echo "### File Name:" >>"$fileName"
printf " %s\n\n" "$fileName" >>"$fileName"
}
# $1 file name, ex: "target/dir-listing-jest.txt"
# $2 directory to be listed, ex: target/kibana-coverage/jest
dirListing() {
local fileName=$1
local dir=$2
header "$fileName"
ls -l "$dir" >>"$fileName"
printf "\n### %s \n\tlisted to: %s\n" "$dir" "$fileName"
buildkite-agent artifact upload "$fileName"
printf "\n### %s Uploaded\n" "$fileName"
}
replacePaths() {
local dirName=$1
local search=$2
local replace=$3
for x in $(find "$dirName" -maxdepth 1 -type f -name '*.json'); do
ts-node .buildkite/scripts/steps/code_coverage/clean_coverage_paths.ts \
"$x" \
"$search" \
"$replace"
done
}
fileHeads() {
local fileName=$1
local dir=$2
local ext=${3:-'*.json'}
header "$fileName"
while read -r x; do
printf "\n### BEGIN %s\n\n" "$x" >>"$fileName"
head -2 "$x" >>"$fileName"
printf "\n### END %s\n\n" "$x" >>"$fileName"
done <<<"$(find "$dir" -maxdepth 1 -type f -name "$ext")"
buildkite-agent artifact upload "$fileName"
printf "\n### %s Uploaded\n" "$fileName"
}
collectAndUpload() {
local fileName=$1
local dir=$2
tar -czf "$fileName" "$dir"
buildkite-agent artifact upload "$fileName"
printf "\n### %s Uploaded\n" "$fileName"
}
# Jest, Jest Integration, and FTR Configs will use this to "tell"
# the last stage they ran.
uploadRanFile() {
local ran=$1
mkdir -p target/ran_files
local fileName="target/ran_files/$ran.txt"
echo "$ran" >"$fileName"
buildkite-agent artifact upload "$fileName"
}

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
export const BASE_BUCKET_DAILY = 'kibana-ci-es-snapshots-daily';
export const BASE_BUCKET_PERMANENT = 'kibana-ci-es-snapshots-permanent';

View file

@ -111,7 +111,7 @@ cd "$destination"
find ./* -exec bash -c "shasum -a 512 {} > {}.sha512" \;
cd "$BUILDKITE_BUILD_CHECKOUT_PATH"
node "$(dirname "${0}")/create_manifest.js" "$destination"
ts-node "$(dirname "${0}")/create_manifest.ts" "$destination"
ES_SNAPSHOT_MANIFEST="$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST)"

View file

@ -6,9 +6,19 @@
* Side Public License, v 1.
*/
const fs = require('fs');
const { execSync } = require('child_process');
const { BASE_BUCKET_DAILY } = require('./bucket_config');
import fs from 'fs';
import { execSync } from 'child_process';
import { BASE_BUCKET_DAILY } from './bucket_config';
interface ManifestEntry {
filename?: string;
checksum: string;
url: string;
version: string;
platform: string;
architecture: string;
license: string;
}
(async () => {
console.log('--- Create ES Snapshot Manifest');
@ -40,7 +50,7 @@ const { BASE_BUCKET_DAILY } = require('./bucket_config');
try {
const files = fs.readdirSync(destination);
const manifestEntries = files
const manifestEntries: ManifestEntry[] = files
.filter((filename) => !filename.match(/.sha512$/))
.filter((filename) => !filename.match(/.json$/))
.map((filename) => {
@ -51,7 +61,7 @@ const { BASE_BUCKET_DAILY } = require('./bucket_config');
DESTINATION = DESTINATION || `${VERSION}/archives/${SNAPSHOT_ID}`;
return {
filename: filename,
filename,
checksum: filename + '.sha512',
url: `https://storage.googleapis.com/${BASE_BUCKET_DAILY}/${DESTINATION}/${filename}`,
version: parts[1],

View file

@ -10,4 +10,4 @@ cat << EOF | buildkite-agent annotate --style "info"
$ES_SNAPSHOT_MANIFEST
EOF
node "$(dirname "${0}")/promote_manifest.js" "$ES_SNAPSHOT_MANIFEST"
ts-node "$(dirname "${0}")/promote_manifest.ts" "$ES_SNAPSHOT_MANIFEST"

View file

@ -6,9 +6,9 @@
* Side Public License, v 1.
*/
const fs = require('fs');
const { execSync } = require('child_process');
const { BASE_BUCKET_DAILY, BASE_BUCKET_PERMANENT } = require('./bucket_config');
import fs from 'fs';
import { execSync } from 'child_process';
import { BASE_BUCKET_DAILY, BASE_BUCKET_PERMANENT } from './bucket_config';
(async () => {
try {

View file

@ -4,4 +4,4 @@ set -euo pipefail
.buildkite/scripts/bootstrap.sh
node .buildkite/scripts/steps/storybooks/build_and_upload.js
ts-node .buildkite/scripts/steps/storybooks/build_and_upload.ts

View file

@ -6,9 +6,9 @@
* Side Public License, v 1.
*/
const execSync = require('child_process').execSync;
const fs = require('fs');
const path = require('path');
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
// TODO - how to generate this dynamically?
const STORYBOOKS = [
@ -46,14 +46,14 @@ const GITHUB_CONTEXT = 'Build and Publish Storybooks';
const STORYBOOK_DIRECTORY =
process.env.BUILDKITE_PULL_REQUEST && process.env.BUILDKITE_PULL_REQUEST !== 'false'
? `pr-${process.env.BUILDKITE_PULL_REQUEST}`
: process.env.BUILDKITE_BRANCH.replace('/', '__');
: (process.env.BUILDKITE_BRANCH ?? '').replace('/', '__');
const STORYBOOK_BUCKET = 'ci-artifacts.kibana.dev/storybooks';
const STORYBOOK_BUCKET_URL = `https://${STORYBOOK_BUCKET}/${STORYBOOK_DIRECTORY}`;
const STORYBOOK_BASE_URL = `${STORYBOOK_BUCKET_URL}/${process.env.BUILDKITE_COMMIT}`;
const exec = (...args) => execSync(args.join(' '), { stdio: 'inherit' });
const exec = (...args: string[]) => execSync(args.join(' '), { stdio: 'inherit' });
const ghStatus = (state, description) =>
const ghStatus = (state: string, description: string) =>
exec(
`gh api "repos/elastic/kibana/statuses/${process.env.BUILDKITE_COMMIT}"`,
`-f state=${state}`,
@ -83,8 +83,8 @@ const upload = () => {
.toString()
.trim()
.split('\n')
.map((path) => path.replace('/', ''))
.filter((path) => path !== 'composite');
.map((filePath) => filePath.replace('/', ''))
.filter((filePath) => filePath !== 'composite');
const listHtml = storybooks
.map((storybook) => `<li><a href="${STORYBOOK_BASE_URL}/${storybook}">${storybook}</a></li>`)

View file

@ -5,4 +5,4 @@ set -euo pipefail
source .buildkite/scripts/common/util.sh
echo '--- Pick Test Group Run Order'
node "$(dirname "${0}")/pick_test_group_run_order.js"
ts-node "$(dirname "${0}")/pick_test_group_run_order.ts"

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
const { CiStats } = require('kibana-buildkite-library');
import { CiStats } from '#pipeline-utils';
(async () => {
try {

View file

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { BuildkiteClient } from '#pipeline-utils';
const pipelineSlug = process.argv[2];
const branch = process.argv[3] || 'main';
const commit = process.argv[4] || 'HEAD';
(async () => {
try {
const client = new BuildkiteClient();
const build = await client.triggerBuild(pipelineSlug, {
commit,
branch,
ignore_pipeline_branch_filters: true, // Required because of a Buildkite bug
});
console.log(`Triggered build: ${build.web_url}`);
process.exit(0);
} catch (ex) {
console.error('Buildkite API Error', ex.toString());
if (ex.response) {
console.error('HTTP Error Response Status', ex.response.status);
console.error('HTTP Error Response Body', ex.response.data);
}
process.exit(1);
}
})();

View file

@ -12,4 +12,4 @@ find . -path "*target/public/*" -name "stats.json" | while read line; do
./node_modules/.bin/webpack-bundle-analyzer $line --report "built_assets/webpack_bundle_analyzer/$PLUGIN.html" --mode static --no-open
done
node .buildkite/scripts/steps/webpack_bundle_analyzer/upload.js
ts-node .buildkite/scripts/steps/webpack_bundle_analyzer/upload.ts

View file

@ -6,23 +6,23 @@
* Side Public License, v 1.
*/
const execSync = require('child_process').execSync;
const fs = require('fs');
const path = require('path');
import { execSync } from 'child_process';
import fs from 'fs';
import path from 'path';
const GITHUB_CONTEXT = 'Build and Publish Webpack bundle analyzer reports';
const WEBPACK_REPORTS =
process.env.BUILDKITE_PULL_REQUEST && process.env.BUILDKITE_PULL_REQUEST !== 'false'
? `pr-${process.env.BUILDKITE_PULL_REQUEST}`
: process.env.BUILDKITE_BRANCH.replace('/', '__');
: (process.env.BUILDKITE_BRANCH ?? '').replace('/', '__');
const WEBPACK_REPORTS_BUCKET = 'ci-artifacts.kibana.dev/webpack_bundle_analyzer';
const WEBPACK_REPORTS_BUCKET_URL = `https://${WEBPACK_REPORTS_BUCKET}/${WEBPACK_REPORTS}`;
const WEBPACK_REPORTS_BASE_URL = `${WEBPACK_REPORTS_BUCKET_URL}/${process.env.BUILDKITE_COMMIT}`;
const exec = (...args) => execSync(args.join(' '), { stdio: 'inherit' });
const exec = (...args: string[]) => execSync(args.join(' '), { stdio: 'inherit' });
const ghStatus = (state, description) =>
const ghStatus = (state: string, description: string) =>
exec(
`gh api "repos/elastic/kibana/statuses/${process.env.BUILDKITE_COMMIT}"`,
`-f state=${state}`,

23
.buildkite/tsconfig.json Normal file
View file

@ -0,0 +1,23 @@
{
"extends": "../tsconfig.base.json",
"compilerOptions": {
"declaration": true,
"declarationMap": true,
"incremental": false,
"composite": false,
"emitDeclarationOnly": true,
"outDir": "./target/types",
"types": ["node", "mocha"],
"paths": {
"#pipeline-utils": [".buildkite/pipeline-utils/index.ts"],
"#pipeline-utils/*": [".buildkite/pipeline-utils/*"]
}
},
"include": [
"pipeline-utils/**/*",
"pipelines/**/*",
"scripts/**/*",
"pipelines/flaky_tests/groups.json",
"pull_requests.json"
]
}

View file

@ -0,0 +1,11 @@
{
"compilerOptions": {
"strict": true,
"module": "commonjs",
"moduleResolution": "node",
"resolveJsonModule": true,
"noEmit": true,
"lib": ["es2020"]
},
"include": ["src/**/*.test.ts"]
}

View file

@ -1724,5 +1724,16 @@ module.exports = {
quotes: ['error', 'single', { avoidEscape: true, allowTemplateLiterals: false }],
},
},
/**
* Code inside .buildkite runs separately from everything else in CI, before bootstrap, with ts-node. It needs a few tweaks because of this.
*/
{
files: '.buildkite/**/*.{js,ts}',
rules: {
'no-console': 'off',
'@kbn/imports/no_unresolvable_imports': 'off',
},
},
],
};

View file

@ -31,6 +31,7 @@ export const PROJECTS = [
createProject('test/tsconfig.json', { name: 'kibana/test' }),
createProject('x-pack/test/tsconfig.json', { name: 'x-pack/test' }),
createProject('src/core/tsconfig.json'),
createProject('.buildkite/tsconfig.json'),
createProject('x-pack/plugins/drilldowns/url_drilldown/tsconfig.json', {
name: 'security_solution/cypress',