mirror of
https://github.com/elastic/kibana.git
synced 2025-06-28 11:05:39 -04:00
[APM] TypeScript performance fix (#120754)
* [APM] TypeScript performance fix * Clarify use_apm_params split with comments * Remove ts-expect-error for security plugin * Fix type error in searchtypes
This commit is contained in:
parent
c3d9c0ce8e
commit
5b359a90b0
12 changed files with 548 additions and 585 deletions
|
@ -13,97 +13,13 @@ import { RequiredKeys, ValuesType } from 'utility-types';
|
|||
// import { unconst } from '../unconst';
|
||||
import { NormalizePath } from './utils';
|
||||
|
||||
type PathsOfRoute<TRoute extends Route> =
|
||||
| TRoute['path']
|
||||
| (TRoute extends { children: Route[] }
|
||||
? AppendPath<TRoute['path'], '/*'> | PathsOf<TRoute['children']>
|
||||
: never);
|
||||
// type PathsOfRoute<TRoute extends Route> =
|
||||
// | TRoute['path']
|
||||
// | (TRoute extends { children: Route[] }
|
||||
// ? AppendPath<TRoute['path'], '/*'> | PathsOf<TRoute['children']>
|
||||
// : never);
|
||||
|
||||
export type PathsOf<TRoutes extends Route[]> = TRoutes extends []
|
||||
? never
|
||||
: TRoutes extends [Route]
|
||||
? PathsOfRoute<TRoutes[0]>
|
||||
: TRoutes extends [Route, Route]
|
||||
? PathsOfRoute<TRoutes[0]> | PathsOfRoute<TRoutes[1]>
|
||||
: TRoutes extends [Route, Route, Route]
|
||||
? PathsOfRoute<TRoutes[0]> | PathsOfRoute<TRoutes[1]> | PathsOfRoute<TRoutes[2]>
|
||||
: TRoutes extends [Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
| PathsOfRoute<TRoutes[6]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
| PathsOfRoute<TRoutes[6]>
|
||||
| PathsOfRoute<TRoutes[7]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
| PathsOfRoute<TRoutes[6]>
|
||||
| PathsOfRoute<TRoutes[7]>
|
||||
| PathsOfRoute<TRoutes[8]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
| PathsOfRoute<TRoutes[6]>
|
||||
| PathsOfRoute<TRoutes[7]>
|
||||
| PathsOfRoute<TRoutes[8]>
|
||||
| PathsOfRoute<TRoutes[9]>
|
||||
: TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
|
||||
?
|
||||
| PathsOfRoute<TRoutes[0]>
|
||||
| PathsOfRoute<TRoutes[1]>
|
||||
| PathsOfRoute<TRoutes[2]>
|
||||
| PathsOfRoute<TRoutes[3]>
|
||||
| PathsOfRoute<TRoutes[4]>
|
||||
| PathsOfRoute<TRoutes[5]>
|
||||
| PathsOfRoute<TRoutes[6]>
|
||||
| PathsOfRoute<TRoutes[7]>
|
||||
| PathsOfRoute<TRoutes[8]>
|
||||
| PathsOfRoute<TRoutes[9]>
|
||||
| PathsOfRoute<TRoutes[10]>
|
||||
: string;
|
||||
export type PathsOf<TRoutes extends Route[]> = keyof MapRoutes<TRoutes> & string;
|
||||
|
||||
export interface RouteMatch<TRoute extends Route = Route> {
|
||||
route: TRoute;
|
||||
|
@ -347,6 +263,14 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
|
||||
// const routes = unconst([
|
||||
// {
|
||||
// path: '/link-to/transaction/{transactionId}',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/link-to/trace/{traceId}',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/',
|
||||
// element,
|
||||
// children: [
|
||||
|
@ -393,6 +317,10 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/settings/agent-keys',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/settings',
|
||||
// element,
|
||||
// },
|
||||
|
@ -430,11 +358,19 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/transactions/view',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/dependencies',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/errors',
|
||||
// element,
|
||||
// children: [
|
||||
// {
|
||||
// path: '/:groupId',
|
||||
// path: '/services/:serviceName/errors/:groupId',
|
||||
// element,
|
||||
// params: t.type({
|
||||
// path: t.type({
|
||||
|
@ -443,7 +379,7 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// }),
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName',
|
||||
// path: '/services/:serviceName/errors',
|
||||
// element,
|
||||
// params: t.partial({
|
||||
// query: t.partial({
|
||||
|
@ -457,15 +393,33 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/foo',
|
||||
// path: '/services/:serviceName/metrics',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/bar',
|
||||
// path: '/services/:serviceName/nodes',
|
||||
// element,
|
||||
// children: [
|
||||
// {
|
||||
// path: '/services/{serviceName}/nodes/{serviceNodeName}/metrics',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/nodes',
|
||||
// element,
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/service-map',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/baz',
|
||||
// path: '/services/:serviceName/logs',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/services/:serviceName/profiling',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
|
@ -497,6 +451,24 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/backends',
|
||||
// element,
|
||||
// children: [
|
||||
// {
|
||||
// path: '/backends/{backendName}/overview',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/backends/overview',
|
||||
// element,
|
||||
// },
|
||||
// {
|
||||
// path: '/backends',
|
||||
// element,
|
||||
// },
|
||||
// ],
|
||||
// },
|
||||
// {
|
||||
// path: '/',
|
||||
// element,
|
||||
// },
|
||||
|
@ -509,10 +481,11 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// type Routes = typeof routes;
|
||||
|
||||
// type Mapped = keyof MapRoutes<Routes>;
|
||||
// type Paths = PathsOf<Routes>;
|
||||
|
||||
// type Bar = ValuesType<Match<Routes, '/*'>>['route']['path'];
|
||||
// type Foo = OutputOf<Routes, '/*'>;
|
||||
// type Baz = OutputOf<Routes, '/services/:serviceName/baz'>;
|
||||
// // type Baz = OutputOf<Routes, '/services/:serviceName/errors'>;
|
||||
|
||||
// const { path }: Foo = {} as any;
|
||||
|
||||
|
@ -520,4 +493,4 @@ type MapRoutes<TRoutes, TParents extends Route[] = []> = TRoutes extends [Route]
|
|||
// return {} as any;
|
||||
// }
|
||||
|
||||
// const params = _useApmParams('/*');
|
||||
// // const params = _useApmParams('/services/:serviceName/nodes/*');
|
||||
|
|
|
@ -9,6 +9,11 @@
|
|||
import { ValuesType, UnionToIntersection } from 'utility-types';
|
||||
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
interface AggregationsAggregationContainer extends Record<string, any> {
|
||||
aggs?: any;
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
type InvalidAggregationRequest = unknown;
|
||||
|
||||
// ensures aggregations work with requests where aggregation options are a union type,
|
||||
|
@ -31,7 +36,7 @@ type KeysOfSources<T extends any[]> = T extends [any]
|
|||
? KeyOfSource<T[0]> & KeyOfSource<T[1]> & KeyOfSource<T[2]> & KeyOfSource<T[3]>
|
||||
: Record<string, null | string | number>;
|
||||
|
||||
type CompositeKeysOf<TAggregationContainer extends estypes.AggregationsAggregationContainer> =
|
||||
type CompositeKeysOf<TAggregationContainer extends AggregationsAggregationContainer> =
|
||||
TAggregationContainer extends {
|
||||
composite: { sources: [...infer TSource] };
|
||||
}
|
||||
|
@ -40,7 +45,7 @@ type CompositeKeysOf<TAggregationContainer extends estypes.AggregationsAggregati
|
|||
|
||||
type Source = estypes.SearchSourceFilter | boolean | estypes.Fields;
|
||||
|
||||
type TopMetricKeysOf<TAggregationContainer extends estypes.AggregationsAggregationContainer> =
|
||||
type TopMetricKeysOf<TAggregationContainer extends AggregationsAggregationContainer> =
|
||||
TAggregationContainer extends { top_metrics: { metrics: { field: infer TField } } }
|
||||
? TField
|
||||
: TAggregationContainer extends { top_metrics: { metrics: Array<{ field: infer TField }> } }
|
||||
|
@ -92,17 +97,9 @@ type HitsOf<
|
|||
>
|
||||
>;
|
||||
|
||||
type AggregationTypeName = Exclude<
|
||||
keyof estypes.AggregationsAggregationContainer,
|
||||
'aggs' | 'aggregations'
|
||||
>;
|
||||
type AggregationMap = Partial<Record<string, AggregationsAggregationContainer>>;
|
||||
|
||||
type AggregationMap = Partial<Record<string, estypes.AggregationsAggregationContainer>>;
|
||||
|
||||
type TopLevelAggregationRequest = Pick<
|
||||
estypes.AggregationsAggregationContainer,
|
||||
'aggs' | 'aggregations'
|
||||
>;
|
||||
type TopLevelAggregationRequest = Pick<AggregationsAggregationContainer, 'aggs' | 'aggregations'>;
|
||||
|
||||
type MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
|
@ -113,448 +110,460 @@ type MaybeKeyed<
|
|||
: { buckets: TBucket[] };
|
||||
|
||||
export type AggregateOf<
|
||||
TAggregationContainer extends estypes.AggregationsAggregationContainer,
|
||||
TAggregationContainer extends AggregationsAggregationContainer,
|
||||
TDocument
|
||||
> = (Record<AggregationTypeName, unknown> & {
|
||||
adjacency_matrix: {
|
||||
buckets: Array<
|
||||
{
|
||||
key: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
auto_date_histogram: {
|
||||
interval: string;
|
||||
buckets: Array<
|
||||
{
|
||||
key: number;
|
||||
key_as_string: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
avg: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
avg_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
boxplot: {
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
q1: number | null;
|
||||
q2: number | null;
|
||||
q3: number | null;
|
||||
};
|
||||
bucket_script: {
|
||||
value: unknown;
|
||||
};
|
||||
cardinality: {
|
||||
value: number;
|
||||
};
|
||||
children: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
composite: {
|
||||
after_key: CompositeKeysOf<TAggregationContainer>;
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: CompositeKeysOf<TAggregationContainer>;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
cumulative_cardinality: {
|
||||
value: number;
|
||||
};
|
||||
cumulative_sum: {
|
||||
value: number;
|
||||
};
|
||||
date_histogram: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: number;
|
||||
key_as_string: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
date_range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
Partial<{ from: string | number; from_as_string: string }> &
|
||||
Partial<{ to: string | number; to_as_string: string }> & {
|
||||
doc_count: number;
|
||||
key: string;
|
||||
}
|
||||
>;
|
||||
derivative:
|
||||
| {
|
||||
value: number | null;
|
||||
}
|
||||
| undefined;
|
||||
extended_stats: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
sum_of_squares: number | null;
|
||||
variance: number | null;
|
||||
variance_population: number | null;
|
||||
variance_sampling: number | null;
|
||||
std_deviation: number | null;
|
||||
std_deviation_population: number | null;
|
||||
std_deviation_sampling: number | null;
|
||||
std_deviation_bounds: {
|
||||
upper: number | null;
|
||||
lower: number | null;
|
||||
upper_population: number | null;
|
||||
lower_population: number | null;
|
||||
upper_sampling: number | null;
|
||||
lower_sampling: number | null;
|
||||
};
|
||||
} & (
|
||||
| {
|
||||
min_as_string: string;
|
||||
max_as_string: string;
|
||||
avg_as_string: string;
|
||||
sum_of_squares_as_string: string;
|
||||
variance_population_as_string: string;
|
||||
variance_sampling_as_string: string;
|
||||
std_deviation_as_string: string;
|
||||
std_deviation_population_as_string: string;
|
||||
std_deviation_sampling_as_string: string;
|
||||
std_deviation_bounds_as_string: {
|
||||
upper: string;
|
||||
lower: string;
|
||||
upper_population: string;
|
||||
lower_population: string;
|
||||
upper_sampling: string;
|
||||
lower_sampling: string;
|
||||
};
|
||||
}
|
||||
| {}
|
||||
);
|
||||
extended_stats_bucket: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number | null;
|
||||
sum_of_squares: number | null;
|
||||
variance: number | null;
|
||||
variance_population: number | null;
|
||||
variance_sampling: number | null;
|
||||
std_deviation: number | null;
|
||||
std_deviation_population: number | null;
|
||||
std_deviation_sampling: number | null;
|
||||
std_deviation_bounds: {
|
||||
upper: number | null;
|
||||
lower: number | null;
|
||||
upper_population: number | null;
|
||||
lower_population: number | null;
|
||||
upper_sampling: number | null;
|
||||
lower_sampling: number | null;
|
||||
};
|
||||
};
|
||||
filter: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
filters: {
|
||||
buckets: TAggregationContainer extends { filters: { filters: any[] } }
|
||||
? Array<
|
||||
> = ValuesType<
|
||||
Pick<
|
||||
Record<string, unknown> & {
|
||||
adjacency_matrix: {
|
||||
buckets: Array<
|
||||
{
|
||||
key: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>
|
||||
: TAggregationContainer extends { filters: { filters: Record<string, any> } }
|
||||
? {
|
||||
[key in keyof TAggregationContainer['filters']['filters']]: {
|
||||
>;
|
||||
};
|
||||
auto_date_histogram: {
|
||||
interval: string;
|
||||
buckets: Array<
|
||||
{
|
||||
key: number;
|
||||
key_as_string: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
} & (TAggregationContainer extends { filters: { other_bucket_key: infer TOtherBucketKey } }
|
||||
? Record<
|
||||
TOtherBucketKey & string,
|
||||
{ doc_count: number } & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>
|
||||
: unknown) &
|
||||
(TAggregationContainer extends { filters: { other_bucket: true } }
|
||||
? { _other: { doc_count: number } & SubAggregateOf<TAggregationContainer, TDocument> }
|
||||
: unknown)
|
||||
: unknown;
|
||||
};
|
||||
geo_bounds: {
|
||||
top_left: {
|
||||
lat: number | null;
|
||||
lon: number | null;
|
||||
};
|
||||
bottom_right: {
|
||||
lat: number | null;
|
||||
lon: number | null;
|
||||
};
|
||||
};
|
||||
geo_centroid: {
|
||||
count: number;
|
||||
location: {
|
||||
lat: number;
|
||||
lon: number;
|
||||
};
|
||||
};
|
||||
geo_distance: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
from: number;
|
||||
to?: number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
geo_hash: {
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
geotile_grid: {
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
global: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
histogram: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
ip_range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
doc_count: number;
|
||||
},
|
||||
TAggregationContainer extends { ip_range: { ranges: Array<infer TRangeType> } }
|
||||
? TRangeType extends { key: infer TKeys }
|
||||
? TKeys
|
||||
: string
|
||||
: string
|
||||
>;
|
||||
inference: {
|
||||
value: number;
|
||||
prediction_probability: number;
|
||||
prediction_score: number;
|
||||
};
|
||||
max: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
max_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
min: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
min_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
median_absolute_deviation: {
|
||||
value: number | null;
|
||||
};
|
||||
moving_avg:
|
||||
| {
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
avg: {
|
||||
value: number | null;
|
||||
}
|
||||
| undefined;
|
||||
moving_fn: {
|
||||
value: number | null;
|
||||
};
|
||||
moving_percentiles: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null> | undefined;
|
||||
missing: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
multi_terms: {
|
||||
doc_count_error_upper_bound: number;
|
||||
sum_other_doc_count: number;
|
||||
buckets: Array<
|
||||
{
|
||||
value_as_string?: string;
|
||||
};
|
||||
avg_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
boxplot: {
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
q1: number | null;
|
||||
q2: number | null;
|
||||
q3: number | null;
|
||||
};
|
||||
bucket_script: {
|
||||
value: unknown;
|
||||
};
|
||||
cardinality: {
|
||||
value: number;
|
||||
};
|
||||
children: {
|
||||
doc_count: number;
|
||||
key: string[];
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
nested: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
normalize: {
|
||||
value: number | null;
|
||||
// TODO: should be perhaps based on input? ie when `format` is specified
|
||||
value_as_string?: string;
|
||||
};
|
||||
parent: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
percentiles: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
percentile_ranks: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
percentiles_bucket: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: string;
|
||||
from?: number;
|
||||
from_as_string?: string;
|
||||
to?: number;
|
||||
to_as_string?: string;
|
||||
doc_count: number;
|
||||
},
|
||||
TAggregationContainer extends { range: { ranges: Array<infer TRangeType> } }
|
||||
? TRangeType extends { key: infer TKeys }
|
||||
? TKeys
|
||||
: string
|
||||
: string
|
||||
>;
|
||||
rare_terms: Array<
|
||||
{
|
||||
key: string | number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
rate: {
|
||||
value: number | null;
|
||||
};
|
||||
reverse_nested: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
sampler: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
scripted_metric: {
|
||||
value: unknown;
|
||||
};
|
||||
serial_diff: {
|
||||
value: number | null;
|
||||
// TODO: should be perhaps based on input? ie when `format` is specified
|
||||
value_as_string?: string;
|
||||
};
|
||||
significant_terms: {
|
||||
doc_count: number;
|
||||
bg_count: number;
|
||||
buckets: Array<
|
||||
{
|
||||
key: string | number;
|
||||
score: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
composite: {
|
||||
after_key: CompositeKeysOf<TAggregationContainer>;
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: CompositeKeysOf<TAggregationContainer>;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
cumulative_cardinality: {
|
||||
value: number;
|
||||
};
|
||||
cumulative_sum: {
|
||||
value: number;
|
||||
};
|
||||
date_histogram: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: number;
|
||||
key_as_string: string;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
date_range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
Partial<{ from: string | number; from_as_string: string }> &
|
||||
Partial<{ to: string | number; to_as_string: string }> & {
|
||||
doc_count: number;
|
||||
key: string;
|
||||
}
|
||||
>;
|
||||
derivative:
|
||||
| {
|
||||
value: number | null;
|
||||
}
|
||||
| undefined;
|
||||
extended_stats: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
sum_of_squares: number | null;
|
||||
variance: number | null;
|
||||
variance_population: number | null;
|
||||
variance_sampling: number | null;
|
||||
std_deviation: number | null;
|
||||
std_deviation_population: number | null;
|
||||
std_deviation_sampling: number | null;
|
||||
std_deviation_bounds: {
|
||||
upper: number | null;
|
||||
lower: number | null;
|
||||
upper_population: number | null;
|
||||
lower_population: number | null;
|
||||
upper_sampling: number | null;
|
||||
lower_sampling: number | null;
|
||||
};
|
||||
} & (
|
||||
| {
|
||||
min_as_string: string;
|
||||
max_as_string: string;
|
||||
avg_as_string: string;
|
||||
sum_of_squares_as_string: string;
|
||||
variance_population_as_string: string;
|
||||
variance_sampling_as_string: string;
|
||||
std_deviation_as_string: string;
|
||||
std_deviation_population_as_string: string;
|
||||
std_deviation_sampling_as_string: string;
|
||||
std_deviation_bounds_as_string: {
|
||||
upper: string;
|
||||
lower: string;
|
||||
upper_population: string;
|
||||
lower_population: string;
|
||||
upper_sampling: string;
|
||||
lower_sampling: string;
|
||||
};
|
||||
}
|
||||
| {}
|
||||
);
|
||||
extended_stats_bucket: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number | null;
|
||||
sum_of_squares: number | null;
|
||||
variance: number | null;
|
||||
variance_population: number | null;
|
||||
variance_sampling: number | null;
|
||||
std_deviation: number | null;
|
||||
std_deviation_population: number | null;
|
||||
std_deviation_sampling: number | null;
|
||||
std_deviation_bounds: {
|
||||
upper: number | null;
|
||||
lower: number | null;
|
||||
upper_population: number | null;
|
||||
lower_population: number | null;
|
||||
upper_sampling: number | null;
|
||||
lower_sampling: number | null;
|
||||
};
|
||||
};
|
||||
filter: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
filters: {
|
||||
buckets: TAggregationContainer extends { filters: { filters: any[] } }
|
||||
? Array<
|
||||
{
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>
|
||||
: TAggregationContainer extends { filters: { filters: Record<string, any> } }
|
||||
? {
|
||||
[key in keyof TAggregationContainer['filters']['filters']]: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
} & (TAggregationContainer extends {
|
||||
filters: { other_bucket_key: infer TOtherBucketKey };
|
||||
}
|
||||
? Record<
|
||||
TOtherBucketKey & string,
|
||||
{ doc_count: number } & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>
|
||||
: unknown) &
|
||||
(TAggregationContainer extends { filters: { other_bucket: true } }
|
||||
? {
|
||||
_other: { doc_count: number } & SubAggregateOf<
|
||||
TAggregationContainer,
|
||||
TDocument
|
||||
>;
|
||||
}
|
||||
: unknown)
|
||||
: unknown;
|
||||
};
|
||||
geo_bounds: {
|
||||
top_left: {
|
||||
lat: number | null;
|
||||
lon: number | null;
|
||||
};
|
||||
bottom_right: {
|
||||
lat: number | null;
|
||||
lon: number | null;
|
||||
};
|
||||
};
|
||||
geo_centroid: {
|
||||
count: number;
|
||||
location: {
|
||||
lat: number;
|
||||
lon: number;
|
||||
};
|
||||
};
|
||||
geo_distance: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
from: number;
|
||||
to?: number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
geo_hash: {
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
geotile_grid: {
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
global: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
histogram: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
ip_range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
doc_count: number;
|
||||
},
|
||||
TAggregationContainer extends { ip_range: { ranges: Array<infer TRangeType> } }
|
||||
? TRangeType extends { key: infer TKeys }
|
||||
? TKeys
|
||||
: string
|
||||
: string
|
||||
>;
|
||||
inference: {
|
||||
value: number;
|
||||
prediction_probability: number;
|
||||
prediction_score: number;
|
||||
};
|
||||
max: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
max_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
min: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
min_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
median_absolute_deviation: {
|
||||
value: number | null;
|
||||
};
|
||||
moving_avg:
|
||||
| {
|
||||
value: number | null;
|
||||
}
|
||||
| undefined;
|
||||
moving_fn: {
|
||||
value: number | null;
|
||||
};
|
||||
moving_percentiles: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null> | undefined;
|
||||
missing: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
multi_terms: {
|
||||
doc_count_error_upper_bound: number;
|
||||
sum_other_doc_count: number;
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string[];
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
nested: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
normalize: {
|
||||
value: number | null;
|
||||
// TODO: should be perhaps based on input? ie when `format` is specified
|
||||
value_as_string?: string;
|
||||
};
|
||||
parent: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
percentiles: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
percentile_ranks: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
percentiles_bucket: {
|
||||
values: TAggregationContainer extends Record<string, { keyed: false }>
|
||||
? Array<{ key: number; value: number | null }>
|
||||
: Record<string, number | null>;
|
||||
};
|
||||
range: MaybeKeyed<
|
||||
TAggregationContainer,
|
||||
{
|
||||
key: string;
|
||||
from?: number;
|
||||
from_as_string?: string;
|
||||
to?: number;
|
||||
to_as_string?: string;
|
||||
doc_count: number;
|
||||
},
|
||||
TAggregationContainer extends { range: { ranges: Array<infer TRangeType> } }
|
||||
? TRangeType extends { key: infer TKeys }
|
||||
? TKeys
|
||||
: string
|
||||
: string
|
||||
>;
|
||||
rare_terms: Array<
|
||||
{
|
||||
key: string | number;
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
rate: {
|
||||
value: number | null;
|
||||
};
|
||||
reverse_nested: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
sampler: {
|
||||
doc_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>;
|
||||
scripted_metric: {
|
||||
value: unknown;
|
||||
};
|
||||
serial_diff: {
|
||||
value: number | null;
|
||||
// TODO: should be perhaps based on input? ie when `format` is specified
|
||||
value_as_string?: string;
|
||||
};
|
||||
significant_terms: {
|
||||
doc_count: number;
|
||||
bg_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
significant_text: {
|
||||
doc_count: number;
|
||||
buckets: Array<{
|
||||
key: string;
|
||||
doc_count: number;
|
||||
score: number;
|
||||
bg_count: number;
|
||||
}>;
|
||||
};
|
||||
stats: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
} & (
|
||||
| {
|
||||
min_as_string: string;
|
||||
max_as_string: string;
|
||||
avg_as_string: string;
|
||||
sum_as_string: string;
|
||||
}
|
||||
| {}
|
||||
);
|
||||
stats_bucket: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
};
|
||||
string_stats: {
|
||||
count: number;
|
||||
min_length: number | null;
|
||||
max_length: number | null;
|
||||
avg_length: number | null;
|
||||
entropy: number | null;
|
||||
distribution: Record<string, number>;
|
||||
};
|
||||
sum: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
sum_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
terms: {
|
||||
doc_count_error_upper_bound: number;
|
||||
sum_other_doc_count: number;
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string | number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
top_hits: {
|
||||
hits: {
|
||||
total: {
|
||||
value: number;
|
||||
relation: 'eq' | 'gte';
|
||||
buckets: Array<
|
||||
{
|
||||
key: string | number;
|
||||
score: number;
|
||||
doc_count: number;
|
||||
bg_count: number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
max_score: number | null;
|
||||
hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
|
||||
? HitsOf<TAggregationContainer['top_hits'], TDocument>
|
||||
: estypes.SearchHitsMetadata<TDocument>;
|
||||
};
|
||||
};
|
||||
top_metrics: {
|
||||
top: Array<{
|
||||
sort: number[] | string[];
|
||||
metrics: Record<TopMetricKeysOf<TAggregationContainer>, string | number | null>;
|
||||
}>;
|
||||
};
|
||||
weighted_avg: { value: number | null };
|
||||
value_count: {
|
||||
value: number;
|
||||
};
|
||||
// t_test: {} not defined
|
||||
})[ValidAggregationKeysOf<TAggregationContainer> & AggregationTypeName];
|
||||
significant_text: {
|
||||
doc_count: number;
|
||||
buckets: Array<{
|
||||
key: string;
|
||||
doc_count: number;
|
||||
score: number;
|
||||
bg_count: number;
|
||||
}>;
|
||||
};
|
||||
stats: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
} & (
|
||||
| {
|
||||
min_as_string: string;
|
||||
max_as_string: string;
|
||||
avg_as_string: string;
|
||||
sum_as_string: string;
|
||||
}
|
||||
| {}
|
||||
);
|
||||
stats_bucket: {
|
||||
count: number;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
avg: number | null;
|
||||
sum: number;
|
||||
};
|
||||
string_stats: {
|
||||
count: number;
|
||||
min_length: number | null;
|
||||
max_length: number | null;
|
||||
avg_length: number | null;
|
||||
entropy: number | null;
|
||||
distribution: Record<string, number>;
|
||||
};
|
||||
sum: {
|
||||
value: number | null;
|
||||
value_as_string?: string;
|
||||
};
|
||||
sum_bucket: {
|
||||
value: number | null;
|
||||
};
|
||||
terms: {
|
||||
doc_count_error_upper_bound: number;
|
||||
sum_other_doc_count: number;
|
||||
buckets: Array<
|
||||
{
|
||||
doc_count: number;
|
||||
key: string | number;
|
||||
} & SubAggregateOf<TAggregationContainer, TDocument>
|
||||
>;
|
||||
};
|
||||
top_hits: {
|
||||
hits: {
|
||||
total: {
|
||||
value: number;
|
||||
relation: 'eq' | 'gte';
|
||||
};
|
||||
max_score: number | null;
|
||||
hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
|
||||
? HitsOf<TAggregationContainer['top_hits'], TDocument>
|
||||
: estypes.SearchHitsMetadata<TDocument>;
|
||||
};
|
||||
};
|
||||
top_metrics: {
|
||||
top: Array<{
|
||||
sort: number[] | string[];
|
||||
metrics: Record<TopMetricKeysOf<TAggregationContainer>, string | number | null>;
|
||||
}>;
|
||||
};
|
||||
weighted_avg: { value: number | null };
|
||||
value_count: {
|
||||
value: number;
|
||||
};
|
||||
// t_test: {} not defined
|
||||
},
|
||||
Exclude<ValidAggregationKeysOf<TAggregationContainer>, 'aggs' | 'aggregations'> & string
|
||||
>
|
||||
>;
|
||||
|
||||
type AggregateOfMap<TAggregationMap extends AggregationMap | undefined, TDocument> = {
|
||||
[TAggregationName in keyof TAggregationMap]: Required<TAggregationMap>[TAggregationName] extends estypes.AggregationsAggregationContainer
|
||||
[TAggregationName in keyof TAggregationMap]: Required<TAggregationMap>[TAggregationName] extends AggregationsAggregationContainer
|
||||
? AggregateOf<TAggregationMap[TAggregationName], TDocument>
|
||||
: never; // using never means we effectively ignore optional keys, using {} creates a union type of { ... } | {}
|
||||
};
|
||||
|
|
|
@ -19,7 +19,7 @@ import { useAnomalyDetectionJobsContext } from '../../../context/anomaly_detecti
|
|||
import { useApmPluginContext } from '../../../context/apm_plugin/use_apm_plugin_context';
|
||||
import { useLegacyUrlParams } from '../../../context/url_params_context/use_url_params';
|
||||
import { useLocalStorage } from '../../../hooks/useLocalStorage';
|
||||
import { useApmParams } from '../../../hooks/use_apm_params';
|
||||
import { useAnyOfApmParams } from '../../../hooks/use_apm_params';
|
||||
import { FETCH_STATUS, useFetcher } from '../../../hooks/use_fetcher';
|
||||
import { useTimeRange } from '../../../hooks/use_time_range';
|
||||
import { useUpgradeAssistantHref } from '../../shared/Links/kibana';
|
||||
|
@ -46,9 +46,7 @@ function useServicesFetcher() {
|
|||
|
||||
const {
|
||||
query: { rangeFrom, rangeTo, environment, kuery },
|
||||
} =
|
||||
// @ts-ignore 4.3.5 upgrade - Type instantiation is excessively deep and possibly infinite.
|
||||
useApmParams('/services/{serviceName}', '/services');
|
||||
} = useAnyOfApmParams('/services/{serviceName}', '/services');
|
||||
|
||||
const { start, end } = useTimeRange({ rangeFrom, rangeTo });
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ import { useLegacyUrlParams } from '../../../context/url_params_context/use_url_
|
|||
import { APMQueryParams } from '../../shared/Links/url_helpers';
|
||||
import { CytoscapeContext } from './Cytoscape';
|
||||
import { getAnimationOptions, getNodeHeight } from './cytoscape_options';
|
||||
import { useApmParams } from '../../../hooks/use_apm_params';
|
||||
import { useAnyOfApmParams } from '../../../hooks/use_apm_params';
|
||||
|
||||
const ControlsContainer = euiStyled('div')`
|
||||
left: ${({ theme }) => theme.eui.gutterTypes.gutterMedium};
|
||||
|
@ -107,7 +107,7 @@ export function Controls() {
|
|||
|
||||
const {
|
||||
query: { kuery },
|
||||
} = useApmParams('/service-map', '/services/{serviceName}/service-map');
|
||||
} = useAnyOfApmParams('/service-map', '/services/{serviceName}/service-map');
|
||||
|
||||
const [zoom, setZoom] = useState((cy && cy.zoom()) || 1);
|
||||
const duration = parseInt(theme.eui.euiAnimSpeedFast, 10);
|
||||
|
|
|
@ -13,7 +13,7 @@ import React from 'react';
|
|||
import { useUiTracker } from '../../../../../../observability/public';
|
||||
import { ContentsProps } from '.';
|
||||
import { NodeStats } from '../../../../../common/service_map';
|
||||
import { useApmParams } from '../../../../hooks/use_apm_params';
|
||||
import { useAnyOfApmParams } from '../../../../hooks/use_apm_params';
|
||||
import { useApmRouter } from '../../../../hooks/use_apm_router';
|
||||
import { FETCH_STATUS, useFetcher } from '../../../../hooks/use_fetcher';
|
||||
import { ApmRoutes } from '../../../routing/apm_route_config';
|
||||
|
@ -25,8 +25,7 @@ export function BackendContents({
|
|||
start,
|
||||
end,
|
||||
}: ContentsProps) {
|
||||
// @ts-ignore 4.3.5 upgrade - Type instantiation is excessively deep and possibly infinite.
|
||||
const { query } = useApmParams(
|
||||
const { query } = useAnyOfApmParams(
|
||||
'/service-map',
|
||||
'/services/{serviceName}/service-map'
|
||||
);
|
||||
|
|
|
@ -15,7 +15,7 @@ import { useUiTracker } from '../../../../../observability/public';
|
|||
import { TimeRangeComparisonEnum } from '../../../../common/runtime_types/comparison_type_rt';
|
||||
import { useLegacyUrlParams } from '../../../context/url_params_context/use_url_params';
|
||||
import { useApmPluginContext } from '../../../context/apm_plugin/use_apm_plugin_context';
|
||||
import { useApmParams } from '../../../hooks/use_apm_params';
|
||||
import { useAnyOfApmParams } from '../../../hooks/use_apm_params';
|
||||
import { useBreakpoints } from '../../../hooks/use_breakpoints';
|
||||
import { useTimeRange } from '../../../hooks/use_time_range';
|
||||
import * as urlHelpers from '../../shared/Links/url_helpers';
|
||||
|
@ -121,8 +121,7 @@ export function TimeComparison() {
|
|||
const { isSmall } = useBreakpoints();
|
||||
const {
|
||||
query: { rangeFrom, rangeTo },
|
||||
// @ts-expect-error Type instantiation is excessively deep and possibly infinite.
|
||||
} = useApmParams('/services', '/backends/*', '/services/{serviceName}');
|
||||
} = useAnyOfApmParams('/services', '/backends/*', '/services/{serviceName}');
|
||||
|
||||
const { exactStart, exactEnd } = useTimeRange({
|
||||
rangeFrom,
|
||||
|
|
|
@ -4,42 +4,29 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ValuesType } from 'utility-types';
|
||||
import { TypeOf, PathsOf, useParams } from '@kbn/typed-react-router-config';
|
||||
import { ApmRoutes } from '../components/routing/apm_route_config';
|
||||
|
||||
export function useApmParams<TPath extends PathsOf<ApmRoutes>>(
|
||||
// these three different functions exist purely to speed up completions from
|
||||
// TypeScript. One overloaded function is expensive because of the size of the
|
||||
// union type that is created.
|
||||
|
||||
export function useMaybeApmParams<TPath extends PathsOf<ApmRoutes>>(
|
||||
path: TPath,
|
||||
optional: true
|
||||
): TypeOf<ApmRoutes, TPath> | undefined;
|
||||
): TypeOf<ApmRoutes, TPath> | undefined {
|
||||
return useParams(path, optional);
|
||||
}
|
||||
|
||||
export function useApmParams<TPath extends PathsOf<ApmRoutes>>(
|
||||
path: TPath
|
||||
): TypeOf<ApmRoutes, TPath>;
|
||||
|
||||
export function useApmParams<
|
||||
TPath1 extends PathsOf<ApmRoutes>,
|
||||
TPath2 extends PathsOf<ApmRoutes>
|
||||
>(
|
||||
path1: TPath1,
|
||||
path2: TPath2
|
||||
): TypeOf<ApmRoutes, TPath1> | TypeOf<ApmRoutes, TPath2>;
|
||||
|
||||
export function useApmParams<
|
||||
TPath1 extends PathsOf<ApmRoutes>,
|
||||
TPath2 extends PathsOf<ApmRoutes>,
|
||||
TPath3 extends PathsOf<ApmRoutes>
|
||||
>(
|
||||
path1: TPath1,
|
||||
path2: TPath2,
|
||||
path3: TPath3
|
||||
):
|
||||
| TypeOf<ApmRoutes, TPath1>
|
||||
| TypeOf<ApmRoutes, TPath2>
|
||||
| TypeOf<ApmRoutes, TPath3>;
|
||||
|
||||
export function useApmParams(
|
||||
...args: any[]
|
||||
): TypeOf<ApmRoutes, PathsOf<ApmRoutes>> | undefined {
|
||||
return useParams(...args);
|
||||
): TypeOf<ApmRoutes, TPath> {
|
||||
return useParams(path)!;
|
||||
}
|
||||
|
||||
export function useAnyOfApmParams<TPaths extends Array<PathsOf<ApmRoutes>>>(
|
||||
...paths: TPaths
|
||||
): TypeOf<ApmRoutes, ValuesType<TPaths>> {
|
||||
return useParams(...paths)!;
|
||||
}
|
||||
|
|
|
@ -14,6 +14,8 @@ export function useApmRouter() {
|
|||
const { core } = useApmPluginContext();
|
||||
|
||||
const link = (...args: [any]) => {
|
||||
// @ts-expect-error router.link() expects never type, because
|
||||
// no routes are specified. that's okay.
|
||||
return core.http.basePath.prepend('/app/apm' + router.link(...args));
|
||||
};
|
||||
|
||||
|
|
|
@ -110,7 +110,6 @@ export async function getErrorGroupMainStatistics({
|
|||
);
|
||||
|
||||
return (
|
||||
// @ts-ignore 4.3.5 upgrade - Expression produces a union type that is too complex to represent. ts(2590)
|
||||
response.aggregations?.error_groups.buckets.map((bucket) => ({
|
||||
groupId: bucket.key as string,
|
||||
name: getErrorName(bucket.sample.hits.hits[0]._source),
|
||||
|
|
|
@ -120,7 +120,6 @@ export async function getServiceAnomalies({
|
|||
const relevantBuckets = uniqBy(
|
||||
sortBy(
|
||||
// make sure we only return data for jobs that are available in this space
|
||||
// @ts-ignore 4.3.5 upgrade
|
||||
typedAnomalyResponse.aggregations?.services.buckets.filter((bucket) =>
|
||||
jobIds.includes(bucket.key.jobId as string)
|
||||
) ?? [],
|
||||
|
|
|
@ -168,7 +168,6 @@ export async function getServiceInstancesTransactionStatistics<
|
|||
const { timeseries } = serviceNodeBucket;
|
||||
return {
|
||||
serviceNodeName,
|
||||
// @ts-ignore 4.3.5 upgrade - Expression produces a union type that is too complex to represent.
|
||||
errorRate: timeseries.buckets.map((dateBucket) => ({
|
||||
x: dateBucket.key,
|
||||
y: dateBucket.failures.doc_count / dateBucket.doc_count,
|
||||
|
|
|
@ -27,7 +27,6 @@ describe('query for signal', () => {
|
|||
server = serverMock.create();
|
||||
({ context } = requestContextMock.createTools());
|
||||
|
||||
// @ts-expect-error 4.3.5 upgrade
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
ruleDataClient.getReader().search.mockResolvedValue(getEmptySignalsResponse() as any);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue