mirror of
https://github.com/elastic/kibana.git
synced 2025-04-25 02:09:32 -04:00
[7.4] [failedTestsReporter] load github issues on demand (#479… (#48003)
This commit is contained in:
parent
41233a880d
commit
efaedfd00c
3 changed files with 49 additions and 23 deletions
|
@ -10,7 +10,7 @@ To fetch some JUnit reports from a recent build on CI, visit its `Google Cloud S
|
||||||
copy(`wget "${Array.from($$('a[href$=".xml"]')).filter(a => a.innerText === 'Download').map(a => a.href.replace('https://storage.cloud.google.com/', 'https://storage.googleapis.com/')).join('" "')}"`)
|
copy(`wget "${Array.from($$('a[href$=".xml"]')).filter(a => a.innerText === 'Download').map(a => a.href.replace('https://storage.cloud.google.com/', 'https://storage.googleapis.com/')).join('" "')}"`)
|
||||||
```
|
```
|
||||||
|
|
||||||
This copies a script to download the reporets, which can be executed in the `test/junit` directory.
|
This copies a script to download the reports, which you should execute in the `test/junit` directory.
|
||||||
|
|
||||||
Next, run the CLI in `--dry-run` mode so that it doesn't actually communicate with Github.
|
Next, run the CLI in `--dry-run` mode so that it doesn't actually communicate with Github.
|
||||||
|
|
||||||
|
|
|
@ -61,10 +61,12 @@ export class GithubApi {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getAllFailedTestIssues() {
|
private failedTestIssuesPageCache: {
|
||||||
this.log.info('Fetching failed-test issues');
|
pages: GithubIssue[][];
|
||||||
const issues: GithubIssue[] = [];
|
nextRequest: RequestOptions | undefined;
|
||||||
let nextRequest: RequestOptions = {
|
} = {
|
||||||
|
pages: [],
|
||||||
|
nextRequest: {
|
||||||
safeForDryRun: true,
|
safeForDryRun: true,
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: Url.resolve(BASE_URL, 'issues'),
|
url: Url.resolve(BASE_URL, 'issues'),
|
||||||
|
@ -72,30 +74,55 @@ export class GithubApi {
|
||||||
state: 'all',
|
state: 'all',
|
||||||
per_page: '100',
|
per_page: '100',
|
||||||
labels: 'failed-test',
|
labels: 'failed-test',
|
||||||
|
sort: 'updated',
|
||||||
|
direction: 'desc',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
while (true) {
|
/**
|
||||||
const resp = await this.request<GithubIssue[]>(nextRequest, []);
|
* Iterate the `failed-test` issues from elastic/kibana, each response
|
||||||
|
* from Github is cached and subsequent calls to this method will first
|
||||||
|
* iterate the previous responses from Github, then start requesting
|
||||||
|
* more pages of issues from github until all pages have been cached.
|
||||||
|
*
|
||||||
|
* Aborting the iterator part way through will prevent unnecessary request
|
||||||
|
* to Github from being issued.
|
||||||
|
*/
|
||||||
|
async *iterateCachedFailedTestIssues() {
|
||||||
|
const cache = this.failedTestIssuesPageCache;
|
||||||
|
|
||||||
for (const issue of resp.data) {
|
// start from page 0, and progress forward if we have cache or a request that will load that cache page
|
||||||
issues.push(issue);
|
for (let page = 0; page < cache.pages.length || cache.nextRequest; page++) {
|
||||||
}
|
if (page >= cache.pages.length && cache.nextRequest) {
|
||||||
|
const resp = await this.request<GithubIssue[]>(cache.nextRequest, []);
|
||||||
|
cache.pages.push(resp.data);
|
||||||
|
|
||||||
const parsed =
|
const link =
|
||||||
typeof resp.headers.link === 'string' ? parseLinkHeader(resp.headers.link) : undefined;
|
typeof resp.headers.link === 'string' ? parseLinkHeader(resp.headers.link) : undefined;
|
||||||
if (parsed && parsed.next && parsed.next.url) {
|
|
||||||
nextRequest = {
|
cache.nextRequest =
|
||||||
|
link && link.next && link.next.url
|
||||||
|
? {
|
||||||
safeForDryRun: true,
|
safeForDryRun: true,
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: parsed.next.url,
|
url: link.next.url,
|
||||||
};
|
}
|
||||||
} else {
|
: undefined;
|
||||||
break;
|
}
|
||||||
|
|
||||||
|
for (const issue of cache.pages[page]) {
|
||||||
|
yield issue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return issues;
|
async findFailedTestIssue(test: (issue: GithubIssue) => boolean) {
|
||||||
|
for await (const issue of this.iterateCachedFailedTestIssues()) {
|
||||||
|
if (test(issue)) {
|
||||||
|
return issue;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async editIssueBodyAndEnsureOpen(issueNumber: number, newBody: string) {
|
async editIssueBodyAndEnsureOpen(issueNumber: number, newBody: string) {
|
||||||
|
|
|
@ -61,7 +61,6 @@ export function runFailedTestsReporterCli() {
|
||||||
}
|
}
|
||||||
|
|
||||||
const githubApi = new GithubApi(log, process.env.GITHUB_TOKEN, dryRun);
|
const githubApi = new GithubApi(log, process.env.GITHUB_TOKEN, dryRun);
|
||||||
const issues = await githubApi.getAllFailedTestIssues();
|
|
||||||
const reportPaths = await globby(['target/junit/**/*.xml'], {
|
const reportPaths = await globby(['target/junit/**/*.xml'], {
|
||||||
cwd: REPO_ROOT,
|
cwd: REPO_ROOT,
|
||||||
absolute: true,
|
absolute: true,
|
||||||
|
@ -69,7 +68,7 @@ export function runFailedTestsReporterCli() {
|
||||||
|
|
||||||
for (const reportPath of reportPaths) {
|
for (const reportPath of reportPaths) {
|
||||||
for (const failure of await getFailures(log, reportPath)) {
|
for (const failure of await getFailures(log, reportPath)) {
|
||||||
const existingIssue = issues.find(
|
const existingIssue = await githubApi.findFailedTestIssue(
|
||||||
i =>
|
i =>
|
||||||
getIssueMetadata(i.body, 'test.class') === failure.classname &&
|
getIssueMetadata(i.body, 'test.class') === failure.classname &&
|
||||||
getIssueMetadata(i.body, 'test.name') === failure.name
|
getIssueMetadata(i.body, 'test.name') === failure.name
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue