mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Logs UI] HTTP endpoint for Log Summary (#51903)
* Scaffold `/logs/summary` endpoint * Use HTTP API for the log summary * Handle queries in log summary * Simplify `useLogSummary` implementation * Scaffold `/logs/summary/highlights` API * Use HTTP endpoint for log summary highlights * Tweak `highlightTerms` * Deduplicate ES queries for summary endpoints * Clean GraphQL implementation * Make tests pass * Handle server errors * Pass source to the API * Cleanup tests * Future-proof response types Wrap the existing response into a `{ data: ... }` object to allow adding metadata in the future. * Refactor tests with `@testing-library/react-hooks` * Adapt endpoint to NP * Tweak types in the test * Split API methods into separate files * Flatten highlights API * Restructure `common/http_api/logs` folder We will group relevant codecs and types into `log_entries`, splitting summary and summary_highlights as two individual concepts. * Reorganize route files * Resurrect changes in `server/lib/{adapter,domain}` Replace some of the exported types from GraphQL with io-ts codecs * Wire the route with the domain library * Remove dead types * Clean up test file * Fix merge mishap Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
254b18c20b
commit
0a1ffd9921
24 changed files with 516 additions and 1492 deletions
|
@ -34,10 +34,6 @@ export interface InfraSource {
|
|||
logEntriesBetween: InfraLogEntryInterval;
|
||||
/** Sequences of log entries matching sets of highlighting queries within an interval */
|
||||
logEntryHighlights: InfraLogEntryInterval[];
|
||||
/** A consecutive span of summary buckets within an interval */
|
||||
logSummaryBetween: InfraLogSummaryInterval;
|
||||
/** Spans of summary highlight buckets within an interval */
|
||||
logSummaryHighlightsBetween: InfraLogSummaryHighlightInterval[];
|
||||
|
||||
logItem: InfraLogItem;
|
||||
/** A snapshot of nodes */
|
||||
|
@ -208,50 +204,6 @@ export interface InfraLogEntryFieldColumn {
|
|||
/** A list of highlighted substrings of the value */
|
||||
highlights: string[];
|
||||
}
|
||||
/** A consecutive sequence of log summary buckets */
|
||||
export interface InfraLogSummaryInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryBucket[];
|
||||
}
|
||||
/** A log summary bucket */
|
||||
export interface InfraLogSummaryBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of entries inside the bucket */
|
||||
entriesCount: number;
|
||||
}
|
||||
/** A consecutive sequence of log summary highlight buckets */
|
||||
export interface InfraLogSummaryHighlightInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** The query the log entries were highlighted with */
|
||||
highlightQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryHighlightBucket[];
|
||||
}
|
||||
/** A log summary highlight bucket */
|
||||
export interface InfraLogSummaryHighlightBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of highlighted entries inside the bucket */
|
||||
entriesCount: number;
|
||||
/** The time key of a representative of the highlighted log entries in this bucket */
|
||||
representativeKey: InfraTimeKey;
|
||||
}
|
||||
|
||||
export interface InfraLogItem {
|
||||
/** The ID of the document */
|
||||
|
@ -472,28 +424,6 @@ export interface LogEntryHighlightsInfraSourceArgs {
|
|||
/** The highlighting to apply to the log entries */
|
||||
highlights: InfraLogEntryHighlightInput[];
|
||||
}
|
||||
export interface LogSummaryBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
}
|
||||
export interface LogSummaryHighlightsBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
/** The highlighting to apply to the log entries */
|
||||
highlightQueries: string[];
|
||||
}
|
||||
export interface LogItemInfraSourceArgs {
|
||||
id: string;
|
||||
}
|
||||
|
@ -753,99 +683,6 @@ export namespace LogEntryHighlightsQuery {
|
|||
export type Entries = InfraLogEntryHighlightFields.Fragment;
|
||||
}
|
||||
|
||||
export namespace LogSummaryHighlightsQuery {
|
||||
export type Variables = {
|
||||
sourceId?: string | null;
|
||||
start: number;
|
||||
end: number;
|
||||
bucketSize: number;
|
||||
highlightQueries: string[];
|
||||
filterQuery?: string | null;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
logSummaryHighlightsBetween: LogSummaryHighlightsBetween[];
|
||||
};
|
||||
|
||||
export type LogSummaryHighlightsBetween = {
|
||||
__typename?: 'InfraLogSummaryHighlightInterval';
|
||||
|
||||
start?: number | null;
|
||||
|
||||
end?: number | null;
|
||||
|
||||
buckets: Buckets[];
|
||||
};
|
||||
|
||||
export type Buckets = {
|
||||
__typename?: 'InfraLogSummaryHighlightBucket';
|
||||
|
||||
start: number;
|
||||
|
||||
end: number;
|
||||
|
||||
entriesCount: number;
|
||||
|
||||
representativeKey: RepresentativeKey;
|
||||
};
|
||||
|
||||
export type RepresentativeKey = InfraTimeKeyFields.Fragment;
|
||||
}
|
||||
|
||||
export namespace LogSummary {
|
||||
export type Variables = {
|
||||
sourceId?: string | null;
|
||||
start: number;
|
||||
end: number;
|
||||
bucketSize: number;
|
||||
filterQuery?: string | null;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
logSummaryBetween: LogSummaryBetween;
|
||||
};
|
||||
|
||||
export type LogSummaryBetween = {
|
||||
__typename?: 'InfraLogSummaryInterval';
|
||||
|
||||
start?: number | null;
|
||||
|
||||
end?: number | null;
|
||||
|
||||
buckets: Buckets[];
|
||||
};
|
||||
|
||||
export type Buckets = {
|
||||
__typename?: 'InfraLogSummaryBucket';
|
||||
|
||||
start: number;
|
||||
|
||||
end: number;
|
||||
|
||||
entriesCount: number;
|
||||
};
|
||||
}
|
||||
|
||||
export namespace MetricsQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
|
|
|
@ -6,4 +6,5 @@
|
|||
|
||||
export * from './log_analysis';
|
||||
export * from './metadata_api';
|
||||
export * from './log_entries';
|
||||
export * from './metrics_explorer';
|
||||
|
|
|
@ -4,4 +4,5 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export * from './log_summary';
|
||||
export * from './summary';
|
||||
export * from './summary_highlights';
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
|
||||
export const LOG_ENTRIES_SUMMARY_PATH = '/api/log_entries/summary';
|
||||
|
||||
export const logEntriesSummaryRequestRT = rt.type({
|
||||
sourceId: rt.string,
|
||||
startDate: rt.number,
|
||||
endDate: rt.number,
|
||||
bucketSize: rt.number,
|
||||
query: rt.union([rt.string, rt.undefined, rt.null]),
|
||||
});
|
||||
|
||||
export type LogEntriesSummaryRequest = rt.TypeOf<typeof logEntriesSummaryRequestRT>;
|
||||
|
||||
export const logEntriesSummaryBucketRT = rt.type({
|
||||
start: rt.number,
|
||||
end: rt.number,
|
||||
entriesCount: rt.number,
|
||||
});
|
||||
|
||||
export type LogEntriesSummaryBucket = rt.TypeOf<typeof logEntriesSummaryBucketRT>;
|
||||
|
||||
export const logEntriesSummaryResponseRT = rt.type({
|
||||
data: rt.type({
|
||||
start: rt.number,
|
||||
end: rt.number,
|
||||
buckets: rt.array(logEntriesSummaryBucketRT),
|
||||
}),
|
||||
});
|
||||
|
||||
export type LogEntriesSummaryResponse = rt.TypeOf<typeof logEntriesSummaryResponseRT>;
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as rt from 'io-ts';
|
||||
import { logEntriesSummaryRequestRT, logEntriesSummaryBucketRT } from './summary';
|
||||
|
||||
export const LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH = '/api/log_entries/summary_highlights';
|
||||
|
||||
export const logEntriesSummaryHighlightsRequestRT = rt.intersection([
|
||||
logEntriesSummaryRequestRT,
|
||||
rt.type({
|
||||
highlightTerms: rt.array(rt.string),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type LogEntriesSummaryHighlightsRequest = rt.TypeOf<
|
||||
typeof logEntriesSummaryHighlightsRequestRT
|
||||
>;
|
||||
|
||||
export const logEntriesSummaryHighlightsBucketRT = rt.intersection([
|
||||
logEntriesSummaryBucketRT,
|
||||
rt.type({
|
||||
representativeKey: rt.type({
|
||||
time: rt.number,
|
||||
tiebreaker: rt.number,
|
||||
}),
|
||||
}),
|
||||
]);
|
||||
|
||||
export type LogEntriesSummaryHighlightsBucket = rt.TypeOf<
|
||||
typeof logEntriesSummaryHighlightsBucketRT
|
||||
>;
|
||||
|
||||
export const logEntriesSummaryHighlightsResponseRT = rt.type({
|
||||
data: rt.array(
|
||||
rt.type({
|
||||
start: rt.number,
|
||||
end: rt.number,
|
||||
buckets: rt.array(logEntriesSummaryHighlightsBucketRT),
|
||||
})
|
||||
),
|
||||
});
|
||||
export type LogEntriesSummaryHighlightsResponse = rt.TypeOf<
|
||||
typeof logEntriesSummaryHighlightsResponseRT
|
||||
>;
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
|
||||
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
|
||||
LogEntriesSummaryHighlightsRequest,
|
||||
logEntriesSummaryHighlightsRequestRT,
|
||||
logEntriesSummaryHighlightsResponseRT,
|
||||
} from '../../../../../common/http_api';
|
||||
|
||||
export const fetchLogSummaryHighlights = async (
|
||||
requestArgs: LogEntriesSummaryHighlightsRequest
|
||||
) => {
|
||||
const response = await kfetch({
|
||||
method: 'POST',
|
||||
pathname: LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
|
||||
body: JSON.stringify(logEntriesSummaryHighlightsRequestRT.encode(requestArgs)),
|
||||
});
|
||||
|
||||
return pipe(
|
||||
logEntriesSummaryHighlightsResponseRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { sharedFragments } from '../../../../common/graphql/shared';
|
||||
|
||||
export const logSummaryHighlightsQuery = gql`
|
||||
query LogSummaryHighlightsQuery(
|
||||
$sourceId: ID = "default"
|
||||
$start: Float!
|
||||
$end: Float!
|
||||
$bucketSize: Float!
|
||||
$highlightQueries: [String!]!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logSummaryHighlightsBetween(
|
||||
start: $start
|
||||
end: $end
|
||||
bucketSize: $bucketSize
|
||||
highlightQueries: $highlightQueries
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start
|
||||
end
|
||||
buckets {
|
||||
start
|
||||
end
|
||||
entriesCount
|
||||
representativeKey {
|
||||
...InfraTimeKeyFields
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
${sharedFragments.InfraTimeKey}
|
||||
`;
|
|
@ -7,12 +7,9 @@
|
|||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { debounce } from 'lodash';
|
||||
|
||||
import { LogSummaryHighlightsQuery } from '../../../graphql/types';
|
||||
import { DependencyError, useApolloClient } from '../../../utils/apollo_context';
|
||||
import { useTrackedPromise } from '../../../utils/use_tracked_promise';
|
||||
import { logSummaryHighlightsQuery } from './log_summary_highlights.gql_query';
|
||||
|
||||
export type LogSummaryHighlights = LogSummaryHighlightsQuery.Query['source']['logSummaryHighlightsBetween'];
|
||||
import { fetchLogSummaryHighlights } from './api/fetch_log_summary_highlights';
|
||||
import { LogEntriesSummaryHighlightsResponse } from '../../../../common/http_api';
|
||||
|
||||
export const useLogSummaryHighlights = (
|
||||
sourceId: string,
|
||||
|
@ -23,41 +20,32 @@ export const useLogSummaryHighlights = (
|
|||
filterQuery: string | null,
|
||||
highlightTerms: string[]
|
||||
) => {
|
||||
const apolloClient = useApolloClient();
|
||||
const [logSummaryHighlights, setLogSummaryHighlights] = useState<LogSummaryHighlights>([]);
|
||||
const [logSummaryHighlights, setLogSummaryHighlights] = useState<
|
||||
LogEntriesSummaryHighlightsResponse['data']
|
||||
>([]);
|
||||
|
||||
const [loadLogSummaryHighlightsRequest, loadLogSummaryHighlights] = useTrackedPromise(
|
||||
{
|
||||
cancelPreviousOn: 'resolution',
|
||||
createPromise: async () => {
|
||||
if (!apolloClient) {
|
||||
throw new DependencyError('Failed to load source: No apollo client available.');
|
||||
}
|
||||
if (!start || !end || !highlightTerms.length) {
|
||||
throw new Error('Skipping request: Insufficient parameters');
|
||||
}
|
||||
|
||||
return await apolloClient.query<
|
||||
LogSummaryHighlightsQuery.Query,
|
||||
LogSummaryHighlightsQuery.Variables
|
||||
>({
|
||||
fetchPolicy: 'no-cache',
|
||||
query: logSummaryHighlightsQuery,
|
||||
variables: {
|
||||
sourceId,
|
||||
start,
|
||||
end,
|
||||
bucketSize,
|
||||
highlightQueries: [highlightTerms[0]],
|
||||
filterQuery,
|
||||
},
|
||||
return await fetchLogSummaryHighlights({
|
||||
sourceId,
|
||||
startDate: start,
|
||||
endDate: end,
|
||||
bucketSize,
|
||||
query: filterQuery,
|
||||
highlightTerms,
|
||||
});
|
||||
},
|
||||
onResolve: response => {
|
||||
setLogSummaryHighlights(response.data.source.logSummaryHighlightsBetween);
|
||||
setLogSummaryHighlights(response.data);
|
||||
},
|
||||
},
|
||||
[apolloClient, sourceId, start, end, bucketSize, filterQuery, highlightTerms]
|
||||
[sourceId, start, end, bucketSize, filterQuery, highlightTerms]
|
||||
);
|
||||
|
||||
const debouncedLoadSummaryHighlights = useMemo(() => debounce(loadLogSummaryHighlights, 275), [
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
import { throwErrors, createPlainError } from '../../../../../common/runtime_types';
|
||||
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_PATH,
|
||||
LogEntriesSummaryRequest,
|
||||
logEntriesSummaryRequestRT,
|
||||
logEntriesSummaryResponseRT,
|
||||
} from '../../../../../common/http_api';
|
||||
|
||||
export const fetchLogSummary = async (requestArgs: LogEntriesSummaryRequest) => {
|
||||
const response = await kfetch({
|
||||
method: 'POST',
|
||||
pathname: LOG_ENTRIES_SUMMARY_PATH,
|
||||
body: JSON.stringify(logEntriesSummaryRequestRT.encode(requestArgs)),
|
||||
});
|
||||
|
||||
return pipe(
|
||||
logEntriesSummaryResponseRT.decode(response),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
};
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
export const logSummaryQuery = gql`
|
||||
query LogSummary(
|
||||
$sourceId: ID = "default"
|
||||
$start: Float!
|
||||
$end: Float!
|
||||
$bucketSize: Float!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logSummaryBetween(
|
||||
start: $start
|
||||
end: $end
|
||||
bucketSize: $bucketSize
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start
|
||||
end
|
||||
buckets {
|
||||
start
|
||||
end
|
||||
entriesCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
|
@ -4,271 +4,170 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { mountHook } from 'test_utils/enzyme_helpers';
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
|
||||
import { ApolloClientContext } from '../../../utils/apollo_context';
|
||||
import { useLogSummary } from './log_summary';
|
||||
|
||||
import { fetchLogSummary } from './api/fetch_log_summary';
|
||||
|
||||
// Typescript doesn't know that `fetchLogSummary` is a jest mock.
|
||||
// We use a second variable with a type cast to help the compiler further down the line.
|
||||
jest.mock('./api/fetch_log_summary', () => ({ fetchLogSummary: jest.fn() }));
|
||||
const fetchLogSummaryMock = fetchLogSummary as jest.MockedFunction<typeof fetchLogSummary>;
|
||||
|
||||
describe('useLogSummary hook', () => {
|
||||
beforeEach(() => {
|
||||
fetchLogSummaryMock.mockClear();
|
||||
});
|
||||
|
||||
it('provides an empty list of buckets by default', () => {
|
||||
const mockApolloClient = {
|
||||
query: jest.fn(),
|
||||
};
|
||||
|
||||
const { getLastHookValue } = mountHook(
|
||||
() => useLogSummary('SOURCE_ID', null, 1000, null),
|
||||
createMockApolloProvider(mockApolloClient)
|
||||
);
|
||||
|
||||
expect(getLastHookValue().buckets).toEqual([]);
|
||||
const { result } = renderHook(() => useLogSummary('SOURCE_ID', null, 1000, null));
|
||||
expect(result.current.buckets).toEqual([]);
|
||||
});
|
||||
|
||||
/**
|
||||
* This is skipped until `act` can deal with async operations, see comment
|
||||
* below.
|
||||
*
|
||||
* The test cases below this are a temporary alternative until the
|
||||
* shortcomings of the `act` function have been overcome.
|
||||
*/
|
||||
it.skip('queries for new summary buckets when the source id changes', async () => {
|
||||
it('queries for new summary buckets when the source id changes', async () => {
|
||||
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
|
||||
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
|
||||
const mockApolloClient = {
|
||||
query: jest
|
||||
.fn()
|
||||
.mockResolvedValueOnce(firstMockResponse)
|
||||
.mockResolvedValueOnce(secondMockResponse),
|
||||
};
|
||||
|
||||
const { act, getLastHookValue } = mountHook(
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(firstMockResponse)
|
||||
.mockResolvedValueOnce(secondMockResponse);
|
||||
|
||||
const { result, waitForNextUpdate, rerender } = renderHook(
|
||||
({ sourceId }) => useLogSummary(sourceId, 100000, 1000, null),
|
||||
createMockApolloProvider(mockApolloClient),
|
||||
{ sourceId: 'INITIAL_SOURCE_ID' }
|
||||
{
|
||||
initialProps: { sourceId: 'INITIAL_SOURCE_ID' },
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
sourceId: 'INITIAL_SOURCE_ID',
|
||||
}),
|
||||
sourceId: 'INITIAL_SOURCE_ID',
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
firstMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
|
||||
|
||||
// DOESN'T WORK YET until https://github.com/facebook/react/pull/14853 has been merged
|
||||
await act(async (_, setArgs) => {
|
||||
setArgs({ sourceId: 'CHANGED_SOURCE_ID' });
|
||||
rerender({ sourceId: 'CHANGED_SOURCE_ID' });
|
||||
await waitForNextUpdate();
|
||||
|
||||
// wait for the promise queue to be processed
|
||||
await mockApolloClient.query();
|
||||
});
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
sourceId: 'CHANGED_SOURCE_ID',
|
||||
}),
|
||||
sourceId: 'CHANGED_SOURCE_ID',
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
secondMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
|
||||
});
|
||||
|
||||
/**
|
||||
* The following test cases use a bad workaround to avoid the problems
|
||||
* exhibited by the skipped test case above. Instead of a real Promise we
|
||||
* fake a synchronously resolving promise-like return value to avoid any
|
||||
* async behavior.
|
||||
*
|
||||
* They should be rewritten to the cleaner async/await style shown in the
|
||||
* test case above once `act` is capable of dealing with it.
|
||||
*/
|
||||
|
||||
it('queries for new summary buckets when the source id changes - workaround', () => {
|
||||
it('queries for new summary buckets when the filter query changes', async () => {
|
||||
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
|
||||
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
|
||||
const mockApolloClient = {
|
||||
query: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(createSyncMockPromise(firstMockResponse))
|
||||
.mockReturnValueOnce(createSyncMockPromise(secondMockResponse)),
|
||||
};
|
||||
|
||||
const { act, getLastHookValue } = mountHook(
|
||||
({ sourceId }) => useLogSummary(sourceId, 100000, 1000, null),
|
||||
createMockApolloProvider(mockApolloClient),
|
||||
{ sourceId: 'INITIAL_SOURCE_ID' }
|
||||
);
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(firstMockResponse)
|
||||
.mockResolvedValueOnce(secondMockResponse);
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
sourceId: 'INITIAL_SOURCE_ID',
|
||||
}),
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
firstMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
|
||||
act((_, setArgs) => {
|
||||
setArgs({ sourceId: 'CHANGED_SOURCE_ID' });
|
||||
});
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
sourceId: 'CHANGED_SOURCE_ID',
|
||||
}),
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
secondMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the filter query changes', () => {
|
||||
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
|
||||
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
|
||||
const mockApolloClient = {
|
||||
query: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(createSyncMockPromise(firstMockResponse))
|
||||
.mockReturnValueOnce(createSyncMockPromise(secondMockResponse)),
|
||||
};
|
||||
|
||||
const { act, getLastHookValue } = mountHook(
|
||||
const { result, waitForNextUpdate, rerender } = renderHook(
|
||||
({ filterQuery }) => useLogSummary('SOURCE_ID', 100000, 1000, filterQuery),
|
||||
createMockApolloProvider(mockApolloClient),
|
||||
{ filterQuery: 'INITIAL_FILTER_QUERY' }
|
||||
{
|
||||
initialProps: { filterQuery: 'INITIAL_FILTER_QUERY' },
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
filterQuery: 'INITIAL_FILTER_QUERY',
|
||||
}),
|
||||
query: 'INITIAL_FILTER_QUERY',
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
firstMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
expect(result.current.buckets).toEqual(firstMockResponse.data.buckets);
|
||||
|
||||
act((_, setArgs) => {
|
||||
setArgs({ filterQuery: 'CHANGED_FILTER_QUERY' });
|
||||
});
|
||||
rerender({ filterQuery: 'CHANGED_FILTER_QUERY' });
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
filterQuery: 'CHANGED_FILTER_QUERY',
|
||||
}),
|
||||
query: 'CHANGED_FILTER_QUERY',
|
||||
})
|
||||
);
|
||||
expect(getLastHookValue().buckets).toEqual(
|
||||
secondMockResponse.data.source.logSummaryBetween.buckets
|
||||
);
|
||||
expect(result.current.buckets).toEqual(secondMockResponse.data.buckets);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the midpoint time changes', () => {
|
||||
const mockApolloClient = {
|
||||
query: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([])))
|
||||
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([]))),
|
||||
};
|
||||
it('queries for new summary buckets when the midpoint time changes', async () => {
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(createMockResponse([]))
|
||||
.mockResolvedValueOnce(createMockResponse([]));
|
||||
|
||||
const { act } = mountHook(
|
||||
const { waitForNextUpdate, rerender } = renderHook(
|
||||
({ midpointTime }) => useLogSummary('SOURCE_ID', midpointTime, 1000, null),
|
||||
createMockApolloProvider(mockApolloClient),
|
||||
{ midpointTime: 100000 }
|
||||
{
|
||||
initialProps: { midpointTime: 100000 },
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
await waitForNextUpdate();
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
start: 98500,
|
||||
end: 101500,
|
||||
}),
|
||||
startDate: 98500,
|
||||
endDate: 101500,
|
||||
})
|
||||
);
|
||||
|
||||
act((_, setArgs) => {
|
||||
setArgs({ midpointTime: 200000 });
|
||||
});
|
||||
rerender({ midpointTime: 200000 });
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
start: 198500,
|
||||
end: 201500,
|
||||
}),
|
||||
startDate: 198500,
|
||||
endDate: 201500,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('queries for new summary buckets when the interval size changes', () => {
|
||||
const mockApolloClient = {
|
||||
query: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([])))
|
||||
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([]))),
|
||||
};
|
||||
it('queries for new summary buckets when the interval size changes', async () => {
|
||||
fetchLogSummaryMock
|
||||
.mockResolvedValueOnce(createMockResponse([]))
|
||||
.mockResolvedValueOnce(createMockResponse([]));
|
||||
|
||||
const { act } = mountHook(
|
||||
const { waitForNextUpdate, rerender } = renderHook(
|
||||
({ intervalSize }) => useLogSummary('SOURCE_ID', 100000, intervalSize, null),
|
||||
createMockApolloProvider(mockApolloClient),
|
||||
{ intervalSize: 1000 }
|
||||
{
|
||||
initialProps: { intervalSize: 1000 },
|
||||
}
|
||||
);
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
await waitForNextUpdate();
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(1);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
bucketSize: 10,
|
||||
start: 98500,
|
||||
end: 101500,
|
||||
}),
|
||||
bucketSize: 10,
|
||||
startDate: 98500,
|
||||
endDate: 101500,
|
||||
})
|
||||
);
|
||||
|
||||
act((_, setArgs) => {
|
||||
setArgs({ intervalSize: 2000 });
|
||||
});
|
||||
rerender({ intervalSize: 2000 });
|
||||
await waitForNextUpdate();
|
||||
|
||||
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
|
||||
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
|
||||
expect(fetchLogSummaryMock).toHaveBeenCalledTimes(2);
|
||||
expect(fetchLogSummaryMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({
|
||||
variables: expect.objectContaining({
|
||||
bucketSize: 20,
|
||||
start: 97000,
|
||||
end: 103000,
|
||||
}),
|
||||
bucketSize: 20,
|
||||
startDate: 97000,
|
||||
endDate: 103000,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
const createMockApolloProvider = (mockClient: any): React.FunctionComponent => ({ children }) => (
|
||||
<ApolloClientContext.Provider value={mockClient}>{children}</ApolloClientContext.Provider>
|
||||
);
|
||||
|
||||
const createMockResponse = (
|
||||
buckets: Array<{ start: number; end: number; entriesCount: number }>
|
||||
) => ({ data: { source: { logSummaryBetween: { buckets } } } });
|
||||
|
||||
const createSyncMockPromise = <Value extends any>(value: Value) => ({
|
||||
then: (callback: (value: Value) => any) => callback(value),
|
||||
});
|
||||
) => ({ data: { buckets, start: Number.NEGATIVE_INFINITY, end: Number.POSITIVE_INFINITY } });
|
||||
|
|
|
@ -6,14 +6,12 @@
|
|||
|
||||
import { useState } from 'react';
|
||||
|
||||
import { LogSummary as LogSummaryQuery } from '../../../graphql/types';
|
||||
import { useApolloClient } from '../../../utils/apollo_context';
|
||||
import { useCancellableEffect } from '../../../utils/cancellable_effect';
|
||||
import { logSummaryQuery } from './log_summary.gql_query';
|
||||
import { useLogSummaryBufferInterval } from './use_log_summary_buffer_interval';
|
||||
import { fetchLogSummary } from './api/fetch_log_summary';
|
||||
import { LogEntriesSummaryResponse } from '../../../../common/http_api';
|
||||
|
||||
export type LogSummaryBetween = LogSummaryQuery.Query['source']['logSummaryBetween'];
|
||||
export type LogSummaryBuckets = LogSummaryBetween['buckets'];
|
||||
export type LogSummaryBuckets = LogEntriesSummaryResponse['data']['buckets'];
|
||||
|
||||
export const useLogSummary = (
|
||||
sourceId: string,
|
||||
|
@ -21,9 +19,7 @@ export const useLogSummary = (
|
|||
intervalSize: number,
|
||||
filterQuery: string | null
|
||||
) => {
|
||||
const [logSummaryBetween, setLogSummaryBetween] = useState<LogSummaryBetween>({ buckets: [] });
|
||||
const apolloClient = useApolloClient();
|
||||
|
||||
const [logSummaryBuckets, setLogSummaryBuckets] = useState<LogSummaryBuckets>([]);
|
||||
const { start: bufferStart, end: bufferEnd, bucketSize } = useLogSummaryBufferInterval(
|
||||
midpointTime,
|
||||
intervalSize
|
||||
|
@ -31,33 +27,27 @@ export const useLogSummary = (
|
|||
|
||||
useCancellableEffect(
|
||||
getIsCancelled => {
|
||||
if (!apolloClient || bufferStart === null || bufferEnd === null) {
|
||||
if (bufferStart === null || bufferEnd === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
apolloClient
|
||||
.query<LogSummaryQuery.Query, LogSummaryQuery.Variables>({
|
||||
fetchPolicy: 'no-cache',
|
||||
query: logSummaryQuery,
|
||||
variables: {
|
||||
filterQuery,
|
||||
sourceId,
|
||||
start: bufferStart,
|
||||
end: bufferEnd,
|
||||
bucketSize,
|
||||
},
|
||||
})
|
||||
.then(response => {
|
||||
if (!getIsCancelled()) {
|
||||
setLogSummaryBetween(response.data.source.logSummaryBetween);
|
||||
}
|
||||
});
|
||||
fetchLogSummary({
|
||||
sourceId,
|
||||
startDate: bufferStart,
|
||||
endDate: bufferEnd,
|
||||
bucketSize,
|
||||
query: filterQuery,
|
||||
}).then(response => {
|
||||
if (!getIsCancelled()) {
|
||||
setLogSummaryBuckets(response.data.buckets);
|
||||
}
|
||||
});
|
||||
},
|
||||
[apolloClient, sourceId, filterQuery, bufferStart, bufferEnd, bucketSize]
|
||||
[sourceId, filterQuery, bufferStart, bufferEnd, bucketSize]
|
||||
);
|
||||
|
||||
return {
|
||||
buckets: logSummaryBetween.buckets,
|
||||
buckets: logSummaryBuckets,
|
||||
start: bufferStart,
|
||||
end: bufferEnd,
|
||||
};
|
||||
|
|
|
@ -286,134 +286,6 @@
|
|||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "logSummaryBetween",
|
||||
"description": "A consecutive span of summary buckets within an interval",
|
||||
"args": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The millisecond timestamp that corresponds to the start of the interval",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The millisecond timestamp that corresponds to the end of the interval",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "bucketSize",
|
||||
"description": "The size of each bucket in milliseconds",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "filterQuery",
|
||||
"description": "The query to filter the log entries by",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraLogSummaryInterval", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "logSummaryHighlightsBetween",
|
||||
"description": "Spans of summary highlight buckets within an interval",
|
||||
"args": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The millisecond timestamp that corresponds to the start of the interval",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The millisecond timestamp that corresponds to the end of the interval",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "bucketSize",
|
||||
"description": "The size of each bucket in milliseconds",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "filterQuery",
|
||||
"description": "The query to filter the log entries by",
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"defaultValue": null
|
||||
},
|
||||
{
|
||||
"name": "highlightQueries",
|
||||
"description": "The highlighting to apply to the log entries",
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultValue": null
|
||||
}
|
||||
],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryHighlightInterval",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "logItem",
|
||||
"description": "",
|
||||
|
@ -1665,234 +1537,6 @@
|
|||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryInterval",
|
||||
"description": "A consecutive sequence of log summary buckets",
|
||||
"fields": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The millisecond timestamp corresponding to the start of the interval covered by the summary",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The millisecond timestamp corresponding to the end of the interval covered by the summary",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "filterQuery",
|
||||
"description": "The query the log entries were filtered by",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "buckets",
|
||||
"description": "A list of the log entries",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraLogSummaryBucket", "ofType": null }
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryBucket",
|
||||
"description": "A log summary bucket",
|
||||
"fields": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The start timestamp of the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The end timestamp of the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "entriesCount",
|
||||
"description": "The number of entries inside the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Int", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryHighlightInterval",
|
||||
"description": "A consecutive sequence of log summary highlight buckets",
|
||||
"fields": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The millisecond timestamp corresponding to the start of the interval covered by the summary",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The millisecond timestamp corresponding to the end of the interval covered by the summary",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "Float", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "filterQuery",
|
||||
"description": "The query the log entries were filtered by",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "highlightQuery",
|
||||
"description": "The query the log entries were highlighted with",
|
||||
"args": [],
|
||||
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "buckets",
|
||||
"description": "A list of the log entries",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "LIST",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": {
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryHighlightBucket",
|
||||
"ofType": null
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogSummaryHighlightBucket",
|
||||
"description": "A log summary highlight bucket",
|
||||
"fields": [
|
||||
{
|
||||
"name": "start",
|
||||
"description": "The start timestamp of the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "end",
|
||||
"description": "The end timestamp of the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "entriesCount",
|
||||
"description": "The number of highlighted entries inside the bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "SCALAR", "name": "Int", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
},
|
||||
{
|
||||
"name": "representativeKey",
|
||||
"description": "The time key of a representative of the highlighted log entries in this bucket",
|
||||
"args": [],
|
||||
"type": {
|
||||
"kind": "NON_NULL",
|
||||
"name": null,
|
||||
"ofType": { "kind": "OBJECT", "name": "InfraTimeKey", "ofType": null }
|
||||
},
|
||||
"isDeprecated": false,
|
||||
"deprecationReason": null
|
||||
}
|
||||
],
|
||||
"inputFields": null,
|
||||
"interfaces": [],
|
||||
"enumValues": null,
|
||||
"possibleTypes": null
|
||||
},
|
||||
{
|
||||
"kind": "OBJECT",
|
||||
"name": "InfraLogItem",
|
||||
|
|
|
@ -36,10 +36,6 @@ export interface InfraSource {
|
|||
logEntriesBetween: InfraLogEntryInterval;
|
||||
/** Sequences of log entries matching sets of highlighting queries within an interval */
|
||||
logEntryHighlights: InfraLogEntryInterval[];
|
||||
/** A consecutive span of summary buckets within an interval */
|
||||
logSummaryBetween: InfraLogSummaryInterval;
|
||||
/** Spans of summary highlight buckets within an interval */
|
||||
logSummaryHighlightsBetween: InfraLogSummaryHighlightInterval[];
|
||||
|
||||
logItem: InfraLogItem;
|
||||
/** A snapshot of nodes */
|
||||
|
@ -210,50 +206,6 @@ export interface InfraLogEntryFieldColumn {
|
|||
/** A list of highlighted substrings of the value */
|
||||
highlights: string[];
|
||||
}
|
||||
/** A consecutive sequence of log summary buckets */
|
||||
export interface InfraLogSummaryInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryBucket[];
|
||||
}
|
||||
/** A log summary bucket */
|
||||
export interface InfraLogSummaryBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of entries inside the bucket */
|
||||
entriesCount: number;
|
||||
}
|
||||
/** A consecutive sequence of log summary highlight buckets */
|
||||
export interface InfraLogSummaryHighlightInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** The query the log entries were highlighted with */
|
||||
highlightQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryHighlightBucket[];
|
||||
}
|
||||
/** A log summary highlight bucket */
|
||||
export interface InfraLogSummaryHighlightBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of highlighted entries inside the bucket */
|
||||
entriesCount: number;
|
||||
/** The time key of a representative of the highlighted log entries in this bucket */
|
||||
representativeKey: InfraTimeKey;
|
||||
}
|
||||
|
||||
export interface InfraLogItem {
|
||||
/** The ID of the document */
|
||||
|
@ -474,28 +426,6 @@ export interface LogEntryHighlightsInfraSourceArgs {
|
|||
/** The highlighting to apply to the log entries */
|
||||
highlights: InfraLogEntryHighlightInput[];
|
||||
}
|
||||
export interface LogSummaryBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
}
|
||||
export interface LogSummaryHighlightsBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
/** The highlighting to apply to the log entries */
|
||||
highlightQueries: string[];
|
||||
}
|
||||
export interface LogItemInfraSourceArgs {
|
||||
id: string;
|
||||
}
|
||||
|
@ -755,99 +685,6 @@ export namespace LogEntryHighlightsQuery {
|
|||
export type Entries = InfraLogEntryHighlightFields.Fragment;
|
||||
}
|
||||
|
||||
export namespace LogSummaryHighlightsQuery {
|
||||
export type Variables = {
|
||||
sourceId?: string | null;
|
||||
start: number;
|
||||
end: number;
|
||||
bucketSize: number;
|
||||
highlightQueries: string[];
|
||||
filterQuery?: string | null;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
logSummaryHighlightsBetween: LogSummaryHighlightsBetween[];
|
||||
};
|
||||
|
||||
export type LogSummaryHighlightsBetween = {
|
||||
__typename?: 'InfraLogSummaryHighlightInterval';
|
||||
|
||||
start?: number | null;
|
||||
|
||||
end?: number | null;
|
||||
|
||||
buckets: Buckets[];
|
||||
};
|
||||
|
||||
export type Buckets = {
|
||||
__typename?: 'InfraLogSummaryHighlightBucket';
|
||||
|
||||
start: number;
|
||||
|
||||
end: number;
|
||||
|
||||
entriesCount: number;
|
||||
|
||||
representativeKey: RepresentativeKey;
|
||||
};
|
||||
|
||||
export type RepresentativeKey = InfraTimeKeyFields.Fragment;
|
||||
}
|
||||
|
||||
export namespace LogSummary {
|
||||
export type Variables = {
|
||||
sourceId?: string | null;
|
||||
start: number;
|
||||
end: number;
|
||||
bucketSize: number;
|
||||
filterQuery?: string | null;
|
||||
};
|
||||
|
||||
export type Query = {
|
||||
__typename?: 'Query';
|
||||
|
||||
source: Source;
|
||||
};
|
||||
|
||||
export type Source = {
|
||||
__typename?: 'InfraSource';
|
||||
|
||||
id: string;
|
||||
|
||||
logSummaryBetween: LogSummaryBetween;
|
||||
};
|
||||
|
||||
export type LogSummaryBetween = {
|
||||
__typename?: 'InfraLogSummaryInterval';
|
||||
|
||||
start?: number | null;
|
||||
|
||||
end?: number | null;
|
||||
|
||||
buckets: Buckets[];
|
||||
};
|
||||
|
||||
export type Buckets = {
|
||||
__typename?: 'InfraLogSummaryBucket';
|
||||
|
||||
start: number;
|
||||
|
||||
end: number;
|
||||
|
||||
entriesCount: number;
|
||||
};
|
||||
}
|
||||
|
||||
export namespace MetricsQuery {
|
||||
export type Variables = {
|
||||
sourceId: string;
|
||||
|
|
|
@ -21,7 +21,6 @@ import {
|
|||
} from '../../graphql/types';
|
||||
import { InfraLogEntriesDomain } from '../../lib/domains/log_entries_domain';
|
||||
import { SourceConfigurationRuntimeType } from '../../lib/sources';
|
||||
import { UsageCollector } from '../../usage/usage_collector';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
import { ChildResolverOf, InfraResolverOf } from '../../utils/typed_resolvers';
|
||||
import { QuerySourceResolver } from '../sources/resolvers';
|
||||
|
@ -41,16 +40,6 @@ export type InfraSourceLogEntryHighlightsResolver = ChildResolverOf<
|
|||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type InfraSourceLogSummaryBetweenResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogSummaryBetweenResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type InfraSourceLogSummaryHighlightsBetweenResolver = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogSummaryHighlightsBetweenResolver>,
|
||||
QuerySourceResolver
|
||||
>;
|
||||
|
||||
export type InfraSourceLogItem = ChildResolverOf<
|
||||
InfraResolverOf<InfraSourceResolvers.LogItemResolver>,
|
||||
QuerySourceResolver
|
||||
|
@ -63,8 +52,6 @@ export const createLogEntriesResolvers = (libs: {
|
|||
logEntriesAround: InfraSourceLogEntriesAroundResolver;
|
||||
logEntriesBetween: InfraSourceLogEntriesBetweenResolver;
|
||||
logEntryHighlights: InfraSourceLogEntryHighlightsResolver;
|
||||
logSummaryBetween: InfraSourceLogSummaryBetweenResolver;
|
||||
logSummaryHighlightsBetween: InfraSourceLogSummaryHighlightsBetweenResolver;
|
||||
logItem: InfraSourceLogItem;
|
||||
};
|
||||
InfraLogEntryColumn: {
|
||||
|
@ -150,40 +137,6 @@ export const createLogEntriesResolvers = (libs: {
|
|||
entries,
|
||||
}));
|
||||
},
|
||||
async logSummaryBetween(source, args, { req }) {
|
||||
UsageCollector.countLogs();
|
||||
const buckets = await libs.logEntries.getLogSummaryBucketsBetween(
|
||||
req,
|
||||
source.id,
|
||||
args.start,
|
||||
args.end,
|
||||
args.bucketSize,
|
||||
parseFilterQuery(args.filterQuery)
|
||||
);
|
||||
|
||||
return {
|
||||
start: buckets.length > 0 ? buckets[0].start : null,
|
||||
end: buckets.length > 0 ? buckets[buckets.length - 1].end : null,
|
||||
buckets,
|
||||
};
|
||||
},
|
||||
async logSummaryHighlightsBetween(source, args, { req }) {
|
||||
const summaryHighlightSets = await libs.logEntries.getLogSummaryHighlightBucketsBetween(
|
||||
req,
|
||||
source.id,
|
||||
args.start,
|
||||
args.end,
|
||||
args.bucketSize,
|
||||
args.highlightQueries.filter(highlightQuery => !!highlightQuery),
|
||||
parseFilterQuery(args.filterQuery)
|
||||
);
|
||||
|
||||
return summaryHighlightSets.map(buckets => ({
|
||||
start: buckets.length > 0 ? buckets[0].start : null,
|
||||
end: buckets.length > 0 ? buckets[buckets.length - 1].end : null,
|
||||
buckets,
|
||||
}));
|
||||
},
|
||||
async logItem(source, args, { req }) {
|
||||
const sourceConfiguration = pipe(
|
||||
SourceConfigurationRuntimeType.decode(source.configuration),
|
||||
|
|
|
@ -82,28 +82,6 @@ export const logEntriesSchema = gql`
|
|||
countAfter: Int!
|
||||
}
|
||||
|
||||
"A log summary bucket"
|
||||
type InfraLogSummaryBucket {
|
||||
"The start timestamp of the bucket"
|
||||
start: Float!
|
||||
"The end timestamp of the bucket"
|
||||
end: Float!
|
||||
"The number of entries inside the bucket"
|
||||
entriesCount: Int!
|
||||
}
|
||||
|
||||
"A log summary highlight bucket"
|
||||
type InfraLogSummaryHighlightBucket {
|
||||
"The start timestamp of the bucket"
|
||||
start: Float!
|
||||
"The end timestamp of the bucket"
|
||||
end: Float!
|
||||
"The number of highlighted entries inside the bucket"
|
||||
entriesCount: Int!
|
||||
"The time key of a representative of the highlighted log entries in this bucket"
|
||||
representativeKey: InfraTimeKey!
|
||||
}
|
||||
|
||||
"A consecutive sequence of log entries"
|
||||
type InfraLogEntryInterval {
|
||||
"The key corresponding to the start of the interval covered by the entries"
|
||||
|
@ -122,32 +100,6 @@ export const logEntriesSchema = gql`
|
|||
entries: [InfraLogEntry!]!
|
||||
}
|
||||
|
||||
"A consecutive sequence of log summary buckets"
|
||||
type InfraLogSummaryInterval {
|
||||
"The millisecond timestamp corresponding to the start of the interval covered by the summary"
|
||||
start: Float
|
||||
"The millisecond timestamp corresponding to the end of the interval covered by the summary"
|
||||
end: Float
|
||||
"The query the log entries were filtered by"
|
||||
filterQuery: String
|
||||
"A list of the log entries"
|
||||
buckets: [InfraLogSummaryBucket!]!
|
||||
}
|
||||
|
||||
"A consecutive sequence of log summary highlight buckets"
|
||||
type InfraLogSummaryHighlightInterval {
|
||||
"The millisecond timestamp corresponding to the start of the interval covered by the summary"
|
||||
start: Float
|
||||
"The millisecond timestamp corresponding to the end of the interval covered by the summary"
|
||||
end: Float
|
||||
"The query the log entries were filtered by"
|
||||
filterQuery: String
|
||||
"The query the log entries were highlighted with"
|
||||
highlightQuery: String
|
||||
"A list of the log entries"
|
||||
buckets: [InfraLogSummaryHighlightBucket!]!
|
||||
}
|
||||
|
||||
type InfraLogItemField {
|
||||
"The flattened field name"
|
||||
field: String!
|
||||
|
@ -198,30 +150,6 @@ export const logEntriesSchema = gql`
|
|||
"The highlighting to apply to the log entries"
|
||||
highlights: [InfraLogEntryHighlightInput!]!
|
||||
): [InfraLogEntryInterval!]!
|
||||
"A consecutive span of summary buckets within an interval"
|
||||
logSummaryBetween(
|
||||
"The millisecond timestamp that corresponds to the start of the interval"
|
||||
start: Float!
|
||||
"The millisecond timestamp that corresponds to the end of the interval"
|
||||
end: Float!
|
||||
"The size of each bucket in milliseconds"
|
||||
bucketSize: Float!
|
||||
"The query to filter the log entries by"
|
||||
filterQuery: String
|
||||
): InfraLogSummaryInterval!
|
||||
"Spans of summary highlight buckets within an interval"
|
||||
logSummaryHighlightsBetween(
|
||||
"The millisecond timestamp that corresponds to the start of the interval"
|
||||
start: Float!
|
||||
"The millisecond timestamp that corresponds to the end of the interval"
|
||||
end: Float!
|
||||
"The size of each bucket in milliseconds"
|
||||
bucketSize: Float!
|
||||
"The query to filter the log entries by"
|
||||
filterQuery: String
|
||||
"The highlighting to apply to the log entries"
|
||||
highlightQueries: [String!]!
|
||||
): [InfraLogSummaryHighlightInterval!]!
|
||||
logItem(id: ID!): InfraLogItem!
|
||||
}
|
||||
`;
|
||||
|
|
|
@ -62,10 +62,6 @@ export interface InfraSource {
|
|||
logEntriesBetween: InfraLogEntryInterval;
|
||||
/** Sequences of log entries matching sets of highlighting queries within an interval */
|
||||
logEntryHighlights: InfraLogEntryInterval[];
|
||||
/** A consecutive span of summary buckets within an interval */
|
||||
logSummaryBetween: InfraLogSummaryInterval;
|
||||
/** Spans of summary highlight buckets within an interval */
|
||||
logSummaryHighlightsBetween: InfraLogSummaryHighlightInterval[];
|
||||
|
||||
logItem: InfraLogItem;
|
||||
/** A snapshot of nodes */
|
||||
|
@ -236,50 +232,6 @@ export interface InfraLogEntryFieldColumn {
|
|||
/** A list of highlighted substrings of the value */
|
||||
highlights: string[];
|
||||
}
|
||||
/** A consecutive sequence of log summary buckets */
|
||||
export interface InfraLogSummaryInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryBucket[];
|
||||
}
|
||||
/** A log summary bucket */
|
||||
export interface InfraLogSummaryBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of entries inside the bucket */
|
||||
entriesCount: number;
|
||||
}
|
||||
/** A consecutive sequence of log summary highlight buckets */
|
||||
export interface InfraLogSummaryHighlightInterval {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: number | null;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: number | null;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: string | null;
|
||||
/** The query the log entries were highlighted with */
|
||||
highlightQuery?: string | null;
|
||||
/** A list of the log entries */
|
||||
buckets: InfraLogSummaryHighlightBucket[];
|
||||
}
|
||||
/** A log summary highlight bucket */
|
||||
export interface InfraLogSummaryHighlightBucket {
|
||||
/** The start timestamp of the bucket */
|
||||
start: number;
|
||||
/** The end timestamp of the bucket */
|
||||
end: number;
|
||||
/** The number of highlighted entries inside the bucket */
|
||||
entriesCount: number;
|
||||
/** The time key of a representative of the highlighted log entries in this bucket */
|
||||
representativeKey: InfraTimeKey;
|
||||
}
|
||||
|
||||
export interface InfraLogItem {
|
||||
/** The ID of the document */
|
||||
|
@ -500,28 +452,6 @@ export interface LogEntryHighlightsInfraSourceArgs {
|
|||
/** The highlighting to apply to the log entries */
|
||||
highlights: InfraLogEntryHighlightInput[];
|
||||
}
|
||||
export interface LogSummaryBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
}
|
||||
export interface LogSummaryHighlightsBetweenInfraSourceArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
/** The highlighting to apply to the log entries */
|
||||
highlightQueries: string[];
|
||||
}
|
||||
export interface LogItemInfraSourceArgs {
|
||||
id: string;
|
||||
}
|
||||
|
@ -744,14 +674,6 @@ export namespace InfraSourceResolvers {
|
|||
logEntriesBetween?: LogEntriesBetweenResolver<InfraLogEntryInterval, TypeParent, Context>;
|
||||
/** Sequences of log entries matching sets of highlighting queries within an interval */
|
||||
logEntryHighlights?: LogEntryHighlightsResolver<InfraLogEntryInterval[], TypeParent, Context>;
|
||||
/** A consecutive span of summary buckets within an interval */
|
||||
logSummaryBetween?: LogSummaryBetweenResolver<InfraLogSummaryInterval, TypeParent, Context>;
|
||||
/** Spans of summary highlight buckets within an interval */
|
||||
logSummaryHighlightsBetween?: LogSummaryHighlightsBetweenResolver<
|
||||
InfraLogSummaryHighlightInterval[],
|
||||
TypeParent,
|
||||
Context
|
||||
>;
|
||||
|
||||
logItem?: LogItemResolver<InfraLogItem, TypeParent, Context>;
|
||||
/** A snapshot of nodes */
|
||||
|
@ -836,40 +758,6 @@ export namespace InfraSourceResolvers {
|
|||
highlights: InfraLogEntryHighlightInput[];
|
||||
}
|
||||
|
||||
export type LogSummaryBetweenResolver<
|
||||
R = InfraLogSummaryInterval,
|
||||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, LogSummaryBetweenArgs>;
|
||||
export interface LogSummaryBetweenArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
}
|
||||
|
||||
export type LogSummaryHighlightsBetweenResolver<
|
||||
R = InfraLogSummaryHighlightInterval[],
|
||||
Parent = InfraSource,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context, LogSummaryHighlightsBetweenArgs>;
|
||||
export interface LogSummaryHighlightsBetweenArgs {
|
||||
/** The millisecond timestamp that corresponds to the start of the interval */
|
||||
start: number;
|
||||
/** The millisecond timestamp that corresponds to the end of the interval */
|
||||
end: number;
|
||||
/** The size of each bucket in milliseconds */
|
||||
bucketSize: number;
|
||||
/** The query to filter the log entries by */
|
||||
filterQuery?: string | null;
|
||||
/** The highlighting to apply to the log entries */
|
||||
highlightQueries: string[];
|
||||
}
|
||||
|
||||
export type LogItemResolver<
|
||||
R = InfraLogItem,
|
||||
Parent = InfraSource,
|
||||
|
@ -1422,145 +1310,6 @@ export namespace InfraLogEntryFieldColumnResolvers {
|
|||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** A consecutive sequence of log summary buckets */
|
||||
export namespace InfraLogSummaryIntervalResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraLogSummaryInterval> {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: StartResolver<number | null, TypeParent, Context>;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: EndResolver<number | null, TypeParent, Context>;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: FilterQueryResolver<string | null, TypeParent, Context>;
|
||||
/** A list of the log entries */
|
||||
buckets?: BucketsResolver<InfraLogSummaryBucket[], TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type StartResolver<
|
||||
R = number | null,
|
||||
Parent = InfraLogSummaryInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EndResolver<
|
||||
R = number | null,
|
||||
Parent = InfraLogSummaryInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type FilterQueryResolver<
|
||||
R = string | null,
|
||||
Parent = InfraLogSummaryInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type BucketsResolver<
|
||||
R = InfraLogSummaryBucket[],
|
||||
Parent = InfraLogSummaryInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** A log summary bucket */
|
||||
export namespace InfraLogSummaryBucketResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraLogSummaryBucket> {
|
||||
/** The start timestamp of the bucket */
|
||||
start?: StartResolver<number, TypeParent, Context>;
|
||||
/** The end timestamp of the bucket */
|
||||
end?: EndResolver<number, TypeParent, Context>;
|
||||
/** The number of entries inside the bucket */
|
||||
entriesCount?: EntriesCountResolver<number, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type StartResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EndResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EntriesCountResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** A consecutive sequence of log summary highlight buckets */
|
||||
export namespace InfraLogSummaryHighlightIntervalResolvers {
|
||||
export interface Resolvers<
|
||||
Context = InfraContext,
|
||||
TypeParent = InfraLogSummaryHighlightInterval
|
||||
> {
|
||||
/** The millisecond timestamp corresponding to the start of the interval covered by the summary */
|
||||
start?: StartResolver<number | null, TypeParent, Context>;
|
||||
/** The millisecond timestamp corresponding to the end of the interval covered by the summary */
|
||||
end?: EndResolver<number | null, TypeParent, Context>;
|
||||
/** The query the log entries were filtered by */
|
||||
filterQuery?: FilterQueryResolver<string | null, TypeParent, Context>;
|
||||
/** The query the log entries were highlighted with */
|
||||
highlightQuery?: HighlightQueryResolver<string | null, TypeParent, Context>;
|
||||
/** A list of the log entries */
|
||||
buckets?: BucketsResolver<InfraLogSummaryHighlightBucket[], TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type StartResolver<
|
||||
R = number | null,
|
||||
Parent = InfraLogSummaryHighlightInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EndResolver<
|
||||
R = number | null,
|
||||
Parent = InfraLogSummaryHighlightInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type FilterQueryResolver<
|
||||
R = string | null,
|
||||
Parent = InfraLogSummaryHighlightInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type HighlightQueryResolver<
|
||||
R = string | null,
|
||||
Parent = InfraLogSummaryHighlightInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type BucketsResolver<
|
||||
R = InfraLogSummaryHighlightBucket[],
|
||||
Parent = InfraLogSummaryHighlightInterval,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
/** A log summary highlight bucket */
|
||||
export namespace InfraLogSummaryHighlightBucketResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraLogSummaryHighlightBucket> {
|
||||
/** The start timestamp of the bucket */
|
||||
start?: StartResolver<number, TypeParent, Context>;
|
||||
/** The end timestamp of the bucket */
|
||||
end?: EndResolver<number, TypeParent, Context>;
|
||||
/** The number of highlighted entries inside the bucket */
|
||||
entriesCount?: EntriesCountResolver<number, TypeParent, Context>;
|
||||
/** The time key of a representative of the highlighted log entries in this bucket */
|
||||
representativeKey?: RepresentativeKeyResolver<InfraTimeKey, TypeParent, Context>;
|
||||
}
|
||||
|
||||
export type StartResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryHighlightBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EndResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryHighlightBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type EntriesCountResolver<
|
||||
R = number,
|
||||
Parent = InfraLogSummaryHighlightBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
export type RepresentativeKeyResolver<
|
||||
R = InfraTimeKey,
|
||||
Parent = InfraLogSummaryHighlightBucket,
|
||||
Context = InfraContext
|
||||
> = Resolver<R, Parent, Context>;
|
||||
}
|
||||
|
||||
export namespace InfraLogItemResolvers {
|
||||
export interface Resolvers<Context = InfraContext, TypeParent = InfraLogItem> {
|
||||
|
|
|
@ -19,6 +19,10 @@ import { initMetricExplorerRoute } from './routes/metrics_explorer';
|
|||
import { initMetadataRoute } from './routes/metadata';
|
||||
import { initSnapshotRoute } from './routes/snapshot';
|
||||
import { initNodeDetailsRoute } from './routes/node_details';
|
||||
import {
|
||||
initLogEntriesSummaryRoute,
|
||||
initLogEntriesSummaryHighlightsRoute,
|
||||
} from './routes/log_entries';
|
||||
import { initInventoryMetaRoute } from './routes/inventory_metadata';
|
||||
|
||||
export const initInfraServer = (libs: InfraBackendLibs) => {
|
||||
|
@ -38,6 +42,8 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
|
|||
initSnapshotRoute(libs);
|
||||
initNodeDetailsRoute(libs);
|
||||
initValidateLogAnalysisIndicesRoute(libs);
|
||||
initLogEntriesSummaryRoute(libs);
|
||||
initLogEntriesSummaryHighlightsRoute(libs);
|
||||
initMetricExplorerRoute(libs);
|
||||
initMetadataRoute(libs);
|
||||
initInventoryMetaRoute(libs);
|
||||
|
|
|
@ -11,12 +11,10 @@ import { RequestHandlerContext } from 'src/core/server';
|
|||
import { TimeKey } from '../../../../common/time';
|
||||
import { JsonObject } from '../../../../common/typed_json';
|
||||
import {
|
||||
InfraLogEntry,
|
||||
InfraLogItem,
|
||||
InfraLogMessageSegment,
|
||||
InfraLogSummaryBucket,
|
||||
InfraLogSummaryHighlightBucket,
|
||||
} from '../../../graphql/types';
|
||||
LogEntriesSummaryBucket,
|
||||
LogEntriesSummaryHighlightsBucket,
|
||||
} from '../../../../common/http_api';
|
||||
import { InfraLogEntry, InfraLogItem, InfraLogMessageSegment } from '../../../graphql/types';
|
||||
import {
|
||||
InfraSourceConfiguration,
|
||||
InfraSources,
|
||||
|
@ -218,7 +216,7 @@ export class InfraLogEntriesDomain {
|
|||
end: number,
|
||||
bucketSize: number,
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<InfraLogSummaryBucket[]> {
|
||||
): Promise<LogEntriesSummaryBucket[]> {
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(
|
||||
requestContext,
|
||||
sourceId
|
||||
|
@ -242,7 +240,7 @@ export class InfraLogEntriesDomain {
|
|||
bucketSize: number,
|
||||
highlightQueries: string[],
|
||||
filterQuery?: LogEntryQuery
|
||||
): Promise<InfraLogSummaryHighlightBucket[][]> {
|
||||
): Promise<LogEntriesSummaryHighlightsBucket[][]> {
|
||||
const { configuration } = await this.libs.sources.getSourceConfiguration(
|
||||
requestContext,
|
||||
sourceId
|
||||
|
@ -402,7 +400,7 @@ const logSummaryBucketHasEntries = (bucket: LogSummaryBucket) =>
|
|||
|
||||
const convertLogSummaryBucketToSummaryHighlightBucket = (
|
||||
bucket: LogSummaryBucket
|
||||
): InfraLogSummaryHighlightBucket => ({
|
||||
): LogEntriesSummaryHighlightsBucket => ({
|
||||
entriesCount: bucket.entriesCount,
|
||||
start: bucket.start,
|
||||
end: bucket.end,
|
||||
|
|
|
@ -4,10 +4,5 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
export interface LogSummaryBucket {
|
||||
count: number;
|
||||
end: number;
|
||||
start: number;
|
||||
}
|
||||
|
||||
export type SummaryBucketSize = 'y' | 'M' | 'w' | 'd' | 'h' | 'm' | 's';
|
||||
export * from './summary';
|
||||
export * from './summary_highlights';
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import { throwErrors } from '../../../common/runtime_types';
|
||||
|
||||
import { InfraBackendLibs } from '../../lib/infra_types';
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_PATH,
|
||||
logEntriesSummaryRequestRT,
|
||||
logEntriesSummaryResponseRT,
|
||||
} from '../../../common/http_api/log_entries';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
|
||||
const escapeHatch = schema.object({}, { allowUnknowns: true });
|
||||
|
||||
export const initLogEntriesSummaryRoute = ({ framework, logEntries }: InfraBackendLibs) => {
|
||||
framework.registerRoute(
|
||||
{
|
||||
method: 'post',
|
||||
path: LOG_ENTRIES_SUMMARY_PATH,
|
||||
validate: { body: escapeHatch },
|
||||
},
|
||||
async (requestContext, request, response) => {
|
||||
try {
|
||||
const payload = pipe(
|
||||
logEntriesSummaryRequestRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
const { sourceId, startDate, endDate, bucketSize, query } = payload;
|
||||
|
||||
const buckets = await logEntries.getLogSummaryBucketsBetween(
|
||||
requestContext,
|
||||
sourceId,
|
||||
startDate,
|
||||
endDate,
|
||||
bucketSize,
|
||||
parseFilterQuery(query)
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: logEntriesSummaryResponseRT.encode({
|
||||
data: {
|
||||
start: startDate,
|
||||
end: endDate,
|
||||
buckets,
|
||||
},
|
||||
}),
|
||||
});
|
||||
} catch (error) {
|
||||
return response.internalError({
|
||||
body: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import Boom from 'boom';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import { throwErrors } from '../../../common/runtime_types';
|
||||
|
||||
import { InfraBackendLibs } from '../../lib/infra_types';
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
|
||||
logEntriesSummaryHighlightsRequestRT,
|
||||
logEntriesSummaryHighlightsResponseRT,
|
||||
} from '../../../common/http_api/log_entries';
|
||||
import { parseFilterQuery } from '../../utils/serialized_query';
|
||||
|
||||
const escapeHatch = schema.object({}, { allowUnknowns: true });
|
||||
|
||||
export const initLogEntriesSummaryHighlightsRoute = ({
|
||||
framework,
|
||||
logEntries,
|
||||
}: InfraBackendLibs) => {
|
||||
framework.registerRoute(
|
||||
{
|
||||
method: 'post',
|
||||
path: LOG_ENTRIES_SUMMARY_HIGHLIGHTS_PATH,
|
||||
validate: { body: escapeHatch },
|
||||
},
|
||||
async (requestContext, request, response) => {
|
||||
try {
|
||||
const payload = pipe(
|
||||
logEntriesSummaryHighlightsRequestRT.decode(request.body),
|
||||
fold(throwErrors(Boom.badRequest), identity)
|
||||
);
|
||||
const { sourceId, startDate, endDate, bucketSize, query, highlightTerms } = payload;
|
||||
|
||||
const bucketsPerHighlightTerm = await logEntries.getLogSummaryHighlightBucketsBetween(
|
||||
requestContext,
|
||||
sourceId,
|
||||
startDate,
|
||||
endDate,
|
||||
bucketSize,
|
||||
highlightTerms,
|
||||
parseFilterQuery(query)
|
||||
);
|
||||
|
||||
return response.ok({
|
||||
body: logEntriesSummaryHighlightsResponseRT.encode({
|
||||
data: bucketsPerHighlightTerm.map(buckets => ({
|
||||
start: startDate,
|
||||
end: endDate,
|
||||
buckets,
|
||||
})),
|
||||
}),
|
||||
});
|
||||
} catch (error) {
|
||||
return response.internalError({
|
||||
body: error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
|
@ -5,76 +5,72 @@
|
|||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { pairs } from 'd3-array';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
|
||||
import {
|
||||
createPlainError,
|
||||
throwErrors,
|
||||
} from '../../../../legacy/plugins/infra/common/runtime_types';
|
||||
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_PATH,
|
||||
logEntriesSummaryRequestRT,
|
||||
logEntriesSummaryResponseRT,
|
||||
} from '../../../../legacy/plugins/infra/common/http_api/log_entries';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
const EARLIEST_TIME_WITH_DATA = new Date('2018-10-17T19:42:22.000Z').valueOf();
|
||||
const LATEST_TIME_WITH_DATA = new Date('2018-10-17T19:57:21.611Z').valueOf();
|
||||
|
||||
const logSummaryBetweenQuery = gql`
|
||||
query LogSummary(
|
||||
$sourceId: ID = "default"
|
||||
$start: Float!
|
||||
$end: Float!
|
||||
$bucketSize: Float!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logSummaryBetween(
|
||||
start: $start
|
||||
end: $end
|
||||
bucketSize: $bucketSize
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start
|
||||
end
|
||||
buckets {
|
||||
start
|
||||
end
|
||||
entriesCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
const COMMON_HEADERS = {
|
||||
'kbn-xsrf': 'some-xsrf-token',
|
||||
};
|
||||
|
||||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('logSummaryBetween', () => {
|
||||
before(() => esArchiver.load('infra/metrics_and_logs'));
|
||||
after(() => esArchiver.unload('infra/metrics_and_logs'));
|
||||
|
||||
it('should return empty and non-empty consecutive buckets', async () => {
|
||||
const start = EARLIEST_TIME_WITH_DATA;
|
||||
const end = LATEST_TIME_WITH_DATA + (LATEST_TIME_WITH_DATA - EARLIEST_TIME_WITH_DATA);
|
||||
const bucketSize = Math.ceil((end - start) / 10);
|
||||
const startDate = EARLIEST_TIME_WITH_DATA;
|
||||
const endDate = LATEST_TIME_WITH_DATA + (LATEST_TIME_WITH_DATA - EARLIEST_TIME_WITH_DATA);
|
||||
const bucketSize = Math.ceil((endDate - startDate) / 10);
|
||||
|
||||
const {
|
||||
data: {
|
||||
source: { logSummaryBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logSummaryBetweenQuery,
|
||||
variables: {
|
||||
start,
|
||||
end,
|
||||
bucketSize,
|
||||
},
|
||||
});
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_SUMMARY_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesSummaryRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate,
|
||||
endDate,
|
||||
bucketSize,
|
||||
query: null,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
expect(logSummaryBetween).to.have.property('buckets');
|
||||
expect(logSummaryBetween.buckets).to.have.length(10);
|
||||
const logSummaryResponse = pipe(
|
||||
logEntriesSummaryResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
expect(logSummaryResponse.data.buckets).to.have.length(10);
|
||||
expect(
|
||||
logSummaryBetween.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
logSummaryResponse.data.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
).to.have.length(5);
|
||||
expect(
|
||||
pairs(
|
||||
logSummaryBetween.buckets,
|
||||
logSummaryResponse.data.buckets,
|
||||
(first: any, second: any) => first.end === second.start
|
||||
).every(pair => pair)
|
||||
).to.equal(true);
|
||||
|
|
|
@ -8,10 +8,27 @@ import expect from '@kbn/expect';
|
|||
import { ascending, pairs } from 'd3-array';
|
||||
import gql from 'graphql-tag';
|
||||
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import { identity } from 'fp-ts/lib/function';
|
||||
import { fold } from 'fp-ts/lib/Either';
|
||||
|
||||
import {
|
||||
createPlainError,
|
||||
throwErrors,
|
||||
} from '../../../../legacy/plugins/infra/common/runtime_types';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
import { sharedFragments } from '../../../../legacy/plugins/infra/common/graphql/shared';
|
||||
import { InfraTimeKey } from '../../../../legacy/plugins/infra/public/graphql/types';
|
||||
import {
|
||||
LOG_ENTRIES_SUMMARY_PATH,
|
||||
logEntriesSummaryRequestRT,
|
||||
logEntriesSummaryResponseRT,
|
||||
} from '../../../../legacy/plugins/infra/common/http_api/log_entries';
|
||||
|
||||
const COMMON_HEADERS = {
|
||||
'kbn-xsrf': 'some-xsrf-token',
|
||||
};
|
||||
const KEY_WITHIN_DATA_RANGE = {
|
||||
time: new Date('2019-01-06T00:00:00.000Z').valueOf(),
|
||||
tiebreaker: 0,
|
||||
|
@ -28,6 +45,7 @@ const LATEST_KEY_WITH_DATA = {
|
|||
export default function({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const client = getService('infraOpsGraphQLClient');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('logs without epoch_millis format', () => {
|
||||
before(() => esArchiver.load('infra/logs_without_epoch_millis'));
|
||||
|
@ -74,26 +92,31 @@ export default function({ getService }: FtrProviderContext) {
|
|||
});
|
||||
|
||||
it('logSummaryBetween should return non-empty buckets', async () => {
|
||||
const start = EARLIEST_KEY_WITH_DATA.time;
|
||||
const end = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
const bucketSize = Math.ceil((end - start) / 10);
|
||||
const startDate = EARLIEST_KEY_WITH_DATA.time;
|
||||
const endDate = LATEST_KEY_WITH_DATA.time + 1; // the interval end is exclusive
|
||||
const bucketSize = Math.ceil((endDate - startDate) / 10);
|
||||
|
||||
const {
|
||||
data: {
|
||||
source: { logSummaryBetween },
|
||||
},
|
||||
} = await client.query<any>({
|
||||
query: logSummaryBetweenQuery,
|
||||
variables: {
|
||||
start,
|
||||
end,
|
||||
bucketSize,
|
||||
},
|
||||
});
|
||||
const { body } = await supertest
|
||||
.post(LOG_ENTRIES_SUMMARY_PATH)
|
||||
.set(COMMON_HEADERS)
|
||||
.send(
|
||||
logEntriesSummaryRequestRT.encode({
|
||||
sourceId: 'default',
|
||||
startDate,
|
||||
endDate,
|
||||
bucketSize,
|
||||
query: null,
|
||||
})
|
||||
)
|
||||
.expect(200);
|
||||
|
||||
const logSummaryResponse = pipe(
|
||||
logEntriesSummaryResponseRT.decode(body),
|
||||
fold(throwErrors(createPlainError), identity)
|
||||
);
|
||||
|
||||
expect(logSummaryBetween).to.have.property('buckets');
|
||||
expect(
|
||||
logSummaryBetween.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
logSummaryResponse.data.buckets.filter((bucket: any) => bucket.entriesCount > 0)
|
||||
).to.have.length(2);
|
||||
});
|
||||
});
|
||||
|
@ -161,34 +184,6 @@ const logEntriesBetweenQuery = gql`
|
|||
${sharedFragments.InfraLogEntryFields}
|
||||
`;
|
||||
|
||||
const logSummaryBetweenQuery = gql`
|
||||
query LogSummary(
|
||||
$sourceId: ID = "default"
|
||||
$start: Float!
|
||||
$end: Float!
|
||||
$bucketSize: Float!
|
||||
$filterQuery: String
|
||||
) {
|
||||
source(id: $sourceId) {
|
||||
id
|
||||
logSummaryBetween(
|
||||
start: $start
|
||||
end: $end
|
||||
bucketSize: $bucketSize
|
||||
filterQuery: $filterQuery
|
||||
) {
|
||||
start
|
||||
end
|
||||
buckets {
|
||||
start
|
||||
end
|
||||
entriesCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const isSorted = <Value>(comparator: (first: Value, second: Value) => number) => (
|
||||
values: Value[]
|
||||
) => pairs(values, comparator).every(order => order <= 0);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue