mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
This commit is contained in:
parent
40dcf12c37
commit
e43b2bc5c4
4 changed files with 354 additions and 6 deletions
|
@ -7,9 +7,11 @@
|
|||
|
||||
export default function ({ loadTestFile }) {
|
||||
describe('InfraOps GraphQL Endpoints', () => {
|
||||
loadTestFile(require.resolve('./metrics'));
|
||||
loadTestFile(require.resolve('./waffle'));
|
||||
loadTestFile(require.resolve('./capabilities'));
|
||||
loadTestFile(require.resolve('./log_entries'));
|
||||
loadTestFile(require.resolve('./log_summary'));
|
||||
loadTestFile(require.resolve('./metrics'));
|
||||
loadTestFile(require.resolve('./sources'));
|
||||
loadTestFile(require.resolve('./waffle'));
|
||||
});
|
||||
}
|
||||
|
|
256
x-pack/test/api_integration/apis/infra/log_entries.ts
Normal file
256
x-pack/test/api_integration/apis/infra/log_entries.ts
Normal file
|
@ -0,0 +1,256 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ascending, pairs } from 'd3-array';
|
||||
import expect from 'expect.js';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { InfraTimeKey } from '../../../../plugins/infra/common/graphql/types';
|
||||
import { KbnTestProvider } from './types';
|
||||
|
||||
const KEY_WITHIN_DATA_RANGE = {
|
||||
time: new Date('2018-10-17T19:50:00.000Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
};
|
||||
const EARLIEST_KEY_WITH_DATA = {
|
||||
time: new Date('2018-10-17T19:42:22.000Z').valueOf(),
|
||||
tiebreaker: 5497614,
|
||||
};
|
||||
const LATEST_KEY_WITH_DATA = {
|
||||
time: new Date('2018-10-17T19:57:21.611Z').valueOf(),
|
||||
tiebreaker: 5603910,
|
||||
};
|
||||
|
||||
const logEntriesAroundQuery = gql`
|
||||
query LogEntriesAroundQuery(
|
||||
$timeKey: InfraTimeKeyInput!
|
||||
$countBefore: Int = 0
|
||||
$countAfter: Int = 0
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesAround(
|
||||
key: $timeKey
|
||||
countBefore: $countBefore
|
||||
countAfter: $countAfter
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
end {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
gid
|
||||
key {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
message {
|
||||
... on InfraLogMessageFieldSegment {
|
||||
field
|
||||
value
|
||||
}
|
||||
... on InfraLogMessageConstantSegment {
|
||||
constant
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const logEntriesBetweenQuery = gql`
|
||||
query LogEntriesBetweenQuery(
|
||||
$startKey: InfraTimeKeyInput!
|
||||
$endKey: InfraTimeKeyInput!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: "default") {
|
||||
id
|
||||
logEntriesBetween(startKey: $startKey, endKey: $endKey, filterQuery: $filterQuery) {
|
||||
start {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
end {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
hasMoreBefore
|
||||
hasMoreAfter
|
||||
entries {
|
||||
gid
|
||||
key {
|
||||
time
|
||||
tiebreaker
|
||||
}
|
||||
message {
|
||||
... on InfraLogMessageFieldSegment {
|
||||
field
|
||||
value
|
||||
}
|
||||
... on InfraLogMessageConstantSegment {
|
||||
constant
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const logEntriesTests: KbnTestProvider = ({ getService }) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
|
||||
describe('log entry apis', () => {
|
||||
before(() => esArchiver.load('infra'));
|
||||
after(() => esArchiver.unload('infra'));
|
||||
|
||||
describe('logEntriesAround', () => {
|
||||
it('should return newer and older log entries when present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesAround).to.have.property('entries');
|
||||
expect(logEntriesAround.entries).to.have.length(200);
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesAround.entries)).to.equal(true);
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(true);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(true);
|
||||
});
|
||||
|
||||
it('should indicate if no older entries are present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: EARLIEST_KEY_WITH_DATA,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(false);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(true);
|
||||
});
|
||||
|
||||
it('should indicate if no newer entries are present', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: LATEST_KEY_WITH_DATA,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesAround.hasMoreBefore).to.equal(true);
|
||||
expect(logEntriesAround.hasMoreAfter).to.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('logEntriesBetween', () => {
|
||||
it('should return log entries between the start and end keys', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesBetweenQuery,
|
||||
variables: {
|
||||
startKey: EARLIEST_KEY_WITH_DATA,
|
||||
endKey: KEY_WITHIN_DATA_RANGE,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesBetween).to.have.property('entries');
|
||||
expect(logEntriesBetween.entries).to.not.be.empty();
|
||||
expect(isSorted(ascendingTimeKey)(logEntriesBetween.entries)).to.equal(true);
|
||||
|
||||
expect(
|
||||
ascendingTimeKey(logEntriesBetween.entries[0], { key: EARLIEST_KEY_WITH_DATA })
|
||||
).to.be.above(-1);
|
||||
expect(
|
||||
ascendingTimeKey(logEntriesBetween.entries[logEntriesBetween.entries.length - 1], {
|
||||
key: KEY_WITHIN_DATA_RANGE,
|
||||
})
|
||||
).to.be.below(1);
|
||||
});
|
||||
|
||||
it('should return results consistent with logEntriesAround', async () => {
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesAround },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesAroundQuery,
|
||||
variables: {
|
||||
timeKey: KEY_WITHIN_DATA_RANGE,
|
||||
countBefore: 100,
|
||||
countAfter: 100,
|
||||
},
|
||||
});
|
||||
|
||||
const {
|
||||
data: {
|
||||
source: { logEntriesBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logEntriesBetweenQuery,
|
||||
variables: {
|
||||
startKey: {
|
||||
time: logEntriesAround.start.time,
|
||||
tiebreaker: logEntriesAround.start.tiebreaker - 1,
|
||||
},
|
||||
endKey: {
|
||||
time: logEntriesAround.end.time,
|
||||
tiebreaker: logEntriesAround.end.tiebreaker + 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(logEntriesBetween).to.eql(logEntriesAround);
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// tslint:disable-next-line no-default-export
|
||||
export default logEntriesTests;
|
||||
|
||||
const isSorted = <Value>(comparator: (first: Value, second: Value) => number) => (
|
||||
values: Value[]
|
||||
) => pairs(values, comparator).every(order => order <= 0);
|
||||
|
||||
const ascendingTimeKey = (first: { key: InfraTimeKey }, second: { key: InfraTimeKey }) =>
|
||||
ascending(first.key.time, second.key.time) ||
|
||||
ascending(first.key.tiebreaker, second.key.tiebreaker);
|
86
x-pack/test/api_integration/apis/infra/log_summary.ts
Normal file
86
x-pack/test/api_integration/apis/infra/log_summary.ts
Normal file
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { pairs } from 'd3-array';
|
||||
import expect from 'expect.js';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { KbnTestProvider } from './types';
|
||||
|
||||
const EARLIEST_TIME_WITH_DATA = new Date('2018-10-17T19:42:22.000Z').valueOf();
|
||||
const LATEST_TIME_WITH_DATA = new Date('2018-10-17T19:57:21.611Z').valueOf();
|
||||
|
||||
const logSummaryBetweenQuery = gql`
|
||||
query LogSummary(
|
||||
$sourceId: ID = "default"
|
||||
$start: Float!
|
||||
$end: Float!
|
||||
$bucketSize: Float!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logSummaryBetween(
|
||||
start: $start
|
||||
end: $end
|
||||
bucketSize: $bucketSize
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start
|
||||
end
|
||||
buckets {
|
||||
start
|
||||
end
|
||||
entriesCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const logSummaryTests: KbnTestProvider = ({ getService }) => {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
|
||||
describe('logSummaryBetween', () => {
|
||||
before(() => esArchiver.load('infra'));
|
||||
after(() => esArchiver.unload('infra'));
|
||||
|
||||
it('should return empty and non-empty consecutive buckets', async () => {
|
||||
const start = EARLIEST_TIME_WITH_DATA;
|
||||
const end = LATEST_TIME_WITH_DATA + (LATEST_TIME_WITH_DATA - EARLIEST_TIME_WITH_DATA);
|
||||
const bucketSize = Math.ceil((end - start) / 10);
|
||||
|
||||
const {
|
||||
data: {
|
||||
source: { logSummaryBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logSummaryBetweenQuery,
|
||||
variables: {
|
||||
start,
|
||||
end,
|
||||
bucketSize,
|
||||
},
|
||||
});
|
||||
|
||||
expect(logSummaryBetween).to.have.property('buckets');
|
||||
expect(logSummaryBetween.buckets).to.have.length(10);
|
||||
expect(
|
||||
logSummaryBetween.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
).to.have.length(5);
|
||||
expect(
|
||||
pairs(
|
||||
logSummaryBetween.buckets,
|
||||
(first: any, second: any) => first.end === second.start
|
||||
).every(pair => pair)
|
||||
).to.equal(true);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
// tslint:disable-next-line no-default-export
|
||||
export default logSummaryTests;
|
|
@ -6,16 +6,22 @@
|
|||
|
||||
import { format as formatUrl } from 'url';
|
||||
import fetch from 'node-fetch';
|
||||
import { InMemoryCache } from 'apollo-cache-inmemory';
|
||||
import { InMemoryCache, IntrospectionFragmentMatcher } from 'apollo-cache-inmemory';
|
||||
import { ApolloClient } from 'apollo-client';
|
||||
import { HttpLink } from 'apollo-link-http';
|
||||
|
||||
import introspectionQueryResultData from '../../../plugins/infra/common/graphql/introspection.json';
|
||||
|
||||
export function InfraOpsGraphQLProvider({ getService }) {
|
||||
const config = getService('config');
|
||||
const kbnURL = formatUrl(config.get('servers.kibana'));
|
||||
|
||||
return new ApolloClient({
|
||||
cache: new InMemoryCache(),
|
||||
cache: new InMemoryCache({
|
||||
fragmentMatcher: new IntrospectionFragmentMatcher({
|
||||
introspectionQueryResultData,
|
||||
}),
|
||||
}),
|
||||
link: new HttpLink({
|
||||
credentials: 'same-origin',
|
||||
fetch,
|
||||
|
@ -25,6 +31,4 @@ export function InfraOpsGraphQLProvider({ getService }) {
|
|||
uri: `${kbnURL}/api/infra/graphql`,
|
||||
}),
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue