mirror of
https://github.com/elastic/kibana.git
synced 2025-06-27 18:51:07 -04:00
Update ES client to 7.16-canary.6 (#117305)
* bump client version * bump version to canary 6 * remove unnecesary ts-expect-error in pacakges * fix errors in src/core * fix errors in data_views * fix errors in actions * fix errors in fleet * fix errors in ilm * fix errors in index_management * mute errors in ML * fix errors in transform * mute errors in infra * fix errors in ingest_pipeline * fix errors in security_solution * fix errors in snapshot_restore * fix errors in upgrade_assistant * fix and mute errors in APM * mute errors in license_management * mute errors in logstash * mute errors in osquery * mute errors in remote clusters * mute errors in task_manager * mute errors in uptime * mute errors in monitoring * mute errors in task_manager * mute errors in triggers_actions_ui * fix errors in rule_registry * fix errors in security * update infra ts-ignore. there are errors on the localhost but no errors on CI * bump to a version with module resolutioon fix * update test in apm * remove unnecassry ts-expect-error in kbn-es-query Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
ad0907593a
commit
bd0ef1eac9
99 changed files with 136 additions and 119 deletions
|
@ -97,7 +97,7 @@
|
|||
"@elastic/apm-rum-react": "^1.3.1",
|
||||
"@elastic/charts": "38.0.2",
|
||||
"@elastic/datemath": "link:bazel-bin/packages/elastic-datemath",
|
||||
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^7.16.0-canary.4",
|
||||
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^7.16.0-canary.7",
|
||||
"@elastic/ems-client": "7.16.0",
|
||||
"@elastic/eui": "39.1.2",
|
||||
"@elastic/filesaver": "1.1.2",
|
||||
|
|
|
@ -109,7 +109,6 @@ describe('kuery functions', () => {
|
|||
const node = nodeTypes.function.buildNode('geoBoundingBox', 'geo', params);
|
||||
const result = geoBoundingBox.toElasticsearchQuery(node, indexPattern);
|
||||
|
||||
// @ts-expect-error @elastic/elasticsearch doesn't support ignore_unmapped in QueryDslGeoBoundingBoxQuery
|
||||
expect(result.geo_bounding_box!.ignore_unmapped).toBe(true);
|
||||
});
|
||||
|
||||
|
|
|
@ -53,7 +53,6 @@ export function toElasticsearchQuery(
|
|||
}
|
||||
|
||||
return {
|
||||
// @ts-expect-error @elastic/elasticsearch doesn't support ignore_unmapped in QueryDslGeoBoundingBoxQuery
|
||||
geo_bounding_box: {
|
||||
[fieldName]: queryParams,
|
||||
ignore_unmapped: true,
|
||||
|
|
|
@ -49,7 +49,6 @@ export function toElasticsearchQuery(
|
|||
}
|
||||
|
||||
return {
|
||||
// @ts-expect-error @elastic/elasticsearch doesn't support ignore_unmapped in QueryDslGeoPolygonQuery
|
||||
geo_polygon: {
|
||||
[fieldName]: queryParams,
|
||||
ignore_unmapped: true,
|
||||
|
|
|
@ -12,8 +12,5 @@ export const deletePolicy = async (
|
|||
esClient: ElasticsearchClient,
|
||||
policy: string
|
||||
): Promise<unknown> => {
|
||||
return (
|
||||
// @ts-expect-error policy_id is required by mistake. fixed in the v8.0
|
||||
(await esClient.ilm.deleteLifecycle({ policy })).body
|
||||
);
|
||||
return (await esClient.ilm.deleteLifecycle({ policy })).body;
|
||||
};
|
||||
|
|
|
@ -164,7 +164,7 @@ describe('ElasticIndex', () => {
|
|||
client.tasks.get.mockResolvedValue(
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise({
|
||||
completed: true,
|
||||
} as estypes.TaskGetResponse)
|
||||
} as estypes.TasksGetResponse)
|
||||
);
|
||||
|
||||
const info = {
|
||||
|
@ -248,7 +248,7 @@ describe('ElasticIndex', () => {
|
|||
reason: 'all shards failed',
|
||||
failed_shards: [],
|
||||
},
|
||||
} as estypes.TaskGetResponse)
|
||||
} as estypes.TasksGetResponse)
|
||||
);
|
||||
|
||||
const info = {
|
||||
|
|
|
@ -450,7 +450,7 @@ function withIndex(
|
|||
client.tasks.get.mockReturnValue(
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise({
|
||||
completed: true,
|
||||
} as estypes.TaskGetResponse)
|
||||
} as estypes.TasksGetResponse)
|
||||
);
|
||||
client.search.mockReturnValue(
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise(searchResult(0) as any)
|
||||
|
|
|
@ -242,7 +242,7 @@ const mockV2MigrationOptions = () => {
|
|||
error: undefined,
|
||||
failures: [],
|
||||
task: { description: 'task description' } as any,
|
||||
} as estypes.TaskGetResponse)
|
||||
} as estypes.TasksGetResponse)
|
||||
);
|
||||
|
||||
options.client.search = jest
|
||||
|
|
|
@ -100,7 +100,7 @@ export const createIndex = ({
|
|||
* - acknowledged=true, shards_acknowledged=true, index creation complete
|
||||
*/
|
||||
return Either.right({
|
||||
acknowledged: res.body.acknowledged,
|
||||
acknowledged: Boolean(res.body.acknowledged),
|
||||
shardsAcknowledged: res.body.shards_acknowledged,
|
||||
});
|
||||
})
|
||||
|
|
|
@ -49,7 +49,7 @@ export function getSortingParams(
|
|||
{
|
||||
[sortField]: {
|
||||
order: sortOrder,
|
||||
unmapped_type: rootField.type,
|
||||
unmapped_type: rootField.type as estypes.MappingFieldType,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -73,7 +73,7 @@ export function getSortingParams(
|
|||
{
|
||||
[key]: {
|
||||
order: sortOrder,
|
||||
unmapped_type: field.type,
|
||||
unmapped_type: field.type as estypes.MappingFieldType,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -259,7 +259,6 @@ export async function updateObjectsSpaces({
|
|||
// @ts-expect-error BulkOperation.retry_on_conflict, BulkOperation.routing. BulkOperation.version, and BulkOperation.version_type are optional
|
||||
bulkOperationParams.push({ update: documentMetadata }, { doc: documentToSave });
|
||||
} else {
|
||||
// @ts-expect-error BulkOperation.retry_on_conflict, BulkOperation.routing. BulkOperation.version, and BulkOperation.version_type are optional
|
||||
bulkOperationParams.push({ delete: documentMetadata });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,7 +116,6 @@ export function readFieldCapsResponse(
|
|||
}),
|
||||
{}
|
||||
),
|
||||
// @ts-expect-error
|
||||
metadata_field: capsByType[types[0]].metadata_field,
|
||||
};
|
||||
// This is intentionally using a "hash" and a "push" to be highly optimized with very large indexes
|
||||
|
@ -133,7 +132,6 @@ export function readFieldCapsResponse(
|
|||
searchable: isSearchable,
|
||||
aggregatable: isAggregatable,
|
||||
readFromDocValues: shouldReadFieldFromDocValues(isAggregatable, esType),
|
||||
// @ts-expect-error
|
||||
metadata_field: capsByType[types[0]].metadata_field,
|
||||
};
|
||||
// This is intentionally using a "hash" and a "push" to be highly optimized with very large indexes
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { ApiResponse, estypes } from '@elastic/elasticsearch';
|
||||
|
||||
type ResponseFailures = Array<Pick<estypes.BulkDeleteResponseItem, '_id' | 'status' | 'result'>>;
|
||||
type ResponseFailures = Array<Pick<estypes.BulkResponseItem, '_id' | 'status' | 'result'>>;
|
||||
|
||||
export function extractBulkResponseDeleteFailures(
|
||||
response: ApiResponse<estypes.BulkResponse, unknown>
|
||||
|
|
|
@ -271,6 +271,7 @@ const getWaterfallDuration = (waterfallItems: IWaterfallItem[]) =>
|
|||
|
||||
const getWaterfallItems = (items: TraceAPIResponse['traceDocs']) =>
|
||||
items.map((item) => {
|
||||
// @ts-expect-error processor doesn't exist on Profile
|
||||
const docType = item.processor.event;
|
||||
switch (docType) {
|
||||
case 'span':
|
||||
|
@ -278,7 +279,7 @@ const getWaterfallItems = (items: TraceAPIResponse['traceDocs']) =>
|
|||
case 'transaction':
|
||||
return getTransactionItem(item as Transaction);
|
||||
}
|
||||
});
|
||||
}) as IWaterfallSpanOrTransaction[];
|
||||
|
||||
function reparentSpans(waterfallItems: IWaterfallSpanOrTransaction[]) {
|
||||
// find children that needs to be re-parented and map them to their correct parent id
|
||||
|
|
|
@ -51,6 +51,7 @@ describe('Error count alert', () => {
|
|||
const params = { threshold: 2, windowSize: 5, windowUnit: 'm' };
|
||||
|
||||
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
|
||||
// @ts-expect-error not full interface
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise({
|
||||
hits: {
|
||||
hits: [],
|
||||
|
|
|
@ -58,6 +58,7 @@ describe('Transaction error rate alert', () => {
|
|||
});
|
||||
|
||||
services.scopedClusterClient.asCurrentUser.search.mockReturnValue(
|
||||
// @ts-expect-error not full interface
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise({
|
||||
hits: {
|
||||
hits: [],
|
||||
|
|
|
@ -43,7 +43,9 @@ Object {
|
|||
"size": 1,
|
||||
"sort": Array [
|
||||
Object {
|
||||
"_score": "desc",
|
||||
"_score": Object {
|
||||
"order": "desc",
|
||||
},
|
||||
},
|
||||
Object {
|
||||
"@timestamp": Object {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import { asMutableArray } from '../../../common/utils/as_mutable_array';
|
||||
import {
|
||||
ERROR_GROUP_ID,
|
||||
|
@ -55,9 +55,9 @@ export async function getErrorGroupSample({
|
|||
},
|
||||
},
|
||||
sort: asMutableArray([
|
||||
{ _score: 'desc' }, // sort by _score first to ensure that errors with transaction.sampled:true ends up on top
|
||||
{ '@timestamp': { order: 'desc' } }, // sort by timestamp to get the most recent error
|
||||
] as const),
|
||||
{ _score: { order: 'desc' as const } }, // sort by _score first to ensure that errors with transaction.sampled:true ends up on top
|
||||
{ '@timestamp': { order: 'desc' as const } }, // sort by timestamp to get the most recent error
|
||||
]) as estypes.SearchSortCombinations[],
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -66,9 +66,9 @@ export const fetchBooleanFieldStats = async (
|
|||
termFilters
|
||||
);
|
||||
const { body } = await esClient.search(request);
|
||||
const aggregations = body.aggregations as {
|
||||
const aggregations = body.aggregations as unknown as {
|
||||
sample: {
|
||||
sampled_value_count: estypes.AggregationsFiltersBucketItemKeys;
|
||||
sampled_value_count: estypes.AggregationsFiltersBucketItem;
|
||||
sampled_values: estypes.AggregationsTermsAggregate<TopValueBucket>;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -66,7 +66,7 @@ export const fetchKeywordFieldStats = async (
|
|||
termFilters
|
||||
);
|
||||
const { body } = await esClient.search(request);
|
||||
const aggregations = body.aggregations as {
|
||||
const aggregations = body.aggregations as unknown as {
|
||||
sample: {
|
||||
sampled_top: estypes.AggregationsTermsAggregate<TopValueBucket>;
|
||||
};
|
||||
|
|
|
@ -87,7 +87,7 @@ export const fetchNumericFieldStats = async (
|
|||
);
|
||||
const { body } = await esClient.search(request);
|
||||
|
||||
const aggregations = body.aggregations as {
|
||||
const aggregations = body.aggregations as unknown as {
|
||||
sample: {
|
||||
sampled_top: estypes.AggregationsTermsAggregate<TopValueBucket>;
|
||||
sampled_percentiles: estypes.AggregationsHdrPercentilesAggregate;
|
||||
|
|
|
@ -36,7 +36,6 @@ const mappings: Mappings = {
|
|||
dynamic_templates: [
|
||||
{
|
||||
// force string to keyword (instead of default of text + keyword)
|
||||
// @ts-expect-error @elastic/elasticsearch expects here mapping: MappingPropertyBase
|
||||
strings: {
|
||||
match_mapping_type: 'string',
|
||||
mapping: {
|
||||
|
|
|
@ -315,11 +315,11 @@ export async function bulkUpdateAgents(
|
|||
});
|
||||
|
||||
return {
|
||||
items: res.body.items.map((item: estypes.BulkResponseItemContainer) => ({
|
||||
// @ts-expect-error ErrorCause is not assignable to Error
|
||||
items: res.body.items.map((item) => ({
|
||||
id: item.update!._id as string,
|
||||
success: !item.update!.error,
|
||||
// @ts-expect-error ErrorCause is not assignable to Error
|
||||
error: item.update!.error as Error,
|
||||
error: item.update!.error,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
|
|
@ -192,7 +192,6 @@ function putComponentTemplate(
|
|||
const { name, body, create = false } = params;
|
||||
return {
|
||||
clusterPromise: esClient.cluster.putComponentTemplate(
|
||||
// @ts-expect-error body is missing required key `settings`. TemplateMapEntry has settings *or* mappings
|
||||
{ name, body, create },
|
||||
{ ignore: [404] }
|
||||
),
|
||||
|
|
|
@ -212,12 +212,12 @@ export interface IndexLifecyclePolicy {
|
|||
index: string;
|
||||
managed: boolean;
|
||||
action?: string;
|
||||
action_time_millis?: number;
|
||||
age?: string;
|
||||
action_time_millis?: string | number;
|
||||
age?: string | number;
|
||||
failed_step?: string;
|
||||
failed_step_retry_count?: number;
|
||||
is_auto_retryable_error?: boolean;
|
||||
lifecycle_date_millis?: number;
|
||||
lifecycle_date_millis?: string | number;
|
||||
phase?: string;
|
||||
phase_execution?: {
|
||||
policy: string;
|
||||
|
|
|
@ -32,10 +32,10 @@ const indexLifecycleDataEnricher = async (
|
|||
index: '*',
|
||||
});
|
||||
|
||||
// @ts-expect-error Property 'phase_definition' is missing in type 'IlmExplainLifecycleLifecycleExplainPhaseExecution'
|
||||
return indicesList.map((index: IndexWithoutIlm) => {
|
||||
return {
|
||||
...index,
|
||||
// @ts-expect-error @elastic/elasticsearch https://github.com/elastic/elasticsearch-specification/issues/531
|
||||
ilm: { ...(ilmIndicesData[index.name] || {}) },
|
||||
};
|
||||
});
|
||||
|
|
|
@ -17,7 +17,6 @@ async function deletePolicies(client: ElasticsearchClient, policyName: string):
|
|||
ignore: [404],
|
||||
};
|
||||
|
||||
// @ts-expect-error @elastic/elasticsearch DeleteSnapshotLifecycleRequest.policy_id is required
|
||||
return client.ilm.deleteLifecycle({ policy: policyName }, options);
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,6 @@ async function fetchIndicesCall(
|
|||
aliases: aliases.length ? aliases : 'none',
|
||||
// @ts-expect-error @elastic/elasticsearch https://github.com/elastic/elasticsearch-specification/issues/532
|
||||
hidden: index.settings.index.hidden === 'true',
|
||||
// @ts-expect-error @elastic/elasticsearch https://github.com/elastic/elasticsearch-specification/issues/532
|
||||
data_stream: index.data_stream!,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -54,7 +54,6 @@ export const saveTemplate = async ({
|
|||
|
||||
return await client.asCurrentUser.indices.putTemplate({
|
||||
name: template.name,
|
||||
// @ts-expect-error @elastic/elasticsearch https://github.com/elastic/elasticsearch-specification/issues/533
|
||||
order,
|
||||
include_type_name,
|
||||
body: {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import { schema, TypeOf } from '@kbn/config-schema';
|
||||
|
||||
import { RouteDependencies } from '../../../types';
|
||||
|
@ -31,7 +31,7 @@ export function registerSimulateRoute({ router, lib: { handleEsError } }: RouteD
|
|||
// Issue: https://github.com/elastic/elasticsearch/issues/59152
|
||||
index_patterns: ['a_fake_index_pattern_that_wont_match_any_indices'],
|
||||
},
|
||||
});
|
||||
} as estypes.IndicesSimulateTemplateRequest);
|
||||
|
||||
return response.ok({ body: templatePreview });
|
||||
} catch (error) {
|
||||
|
|
|
@ -54,7 +54,7 @@ describe('ExpressionChart', () => {
|
|||
metricAlias: 'metricbeat-*',
|
||||
inventoryDefaultView: 'host',
|
||||
metricsExplorerDefaultView: 'host',
|
||||
|
||||
// @ts-ignore message is missing
|
||||
fields: {
|
||||
timestamp: '@timestamp',
|
||||
container: 'container.id',
|
||||
|
|
|
@ -10,6 +10,7 @@ import { InfraWaffleMapOptions, InfraFormatterType } from '../../../../lib/lib';
|
|||
import { SnapshotMetricType } from '../../../../../common/inventory_models/types';
|
||||
|
||||
const options: InfraWaffleMapOptions = {
|
||||
// @ts-ignore message is missing
|
||||
fields: {
|
||||
container: 'container.id',
|
||||
pod: 'kubernetes.pod.uid',
|
||||
|
|
|
@ -10,8 +10,8 @@ import { fold, map } from 'fp-ts/lib/Either';
|
|||
import { constant, identity } from 'fp-ts/lib/function';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import * as runtimeTypes from 'io-ts';
|
||||
import { compact } from 'lodash';
|
||||
import { JsonArray } from '@kbn/utility-types';
|
||||
import { compact } from 'lodash';
|
||||
import type { InfraPluginRequestHandlerContext } from '../../../types';
|
||||
import {
|
||||
LogEntriesAdapter,
|
||||
|
@ -46,7 +46,7 @@ export class InfraKibanaLogEntriesAdapter implements LogEntriesAdapter {
|
|||
const highlightClause = highlightQuery
|
||||
? {
|
||||
highlight: {
|
||||
boundary_scanner: 'word',
|
||||
boundary_scanner: 'word' as const,
|
||||
fields: fields.reduce(
|
||||
(highlightFieldConfigs, fieldName) => ({
|
||||
...highlightFieldConfigs,
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mapValues, first, last, isNaN, isNumber, isObject, has } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { mapValues, first, last, isNaN, isNumber, isObject, has } from 'lodash';
|
||||
import {
|
||||
isTooManyBucketsPreviewException,
|
||||
TOO_MANY_BUCKETS_PREVIEW_EXCEPTION,
|
||||
|
@ -222,6 +222,7 @@ const getMetric: (
|
|||
return groupedResults;
|
||||
}
|
||||
const { body: result } = await esClient.search({
|
||||
// @ts-expect-error buckets_path is not compatible with @elastic/elasticsearch
|
||||
body: searchBody,
|
||||
index,
|
||||
});
|
||||
|
|
|
@ -85,11 +85,13 @@ export const initLogSourceConfigurationRoutes = ({ framework, sources }: InfraBa
|
|||
? sources.updateSourceConfiguration(
|
||||
requestContext.core.savedObjects.client,
|
||||
sourceId,
|
||||
// @ts-ignore
|
||||
patchedSourceConfigurationProperties
|
||||
)
|
||||
: sources.createSourceConfiguration(
|
||||
requestContext.core.savedObjects.client,
|
||||
sourceId,
|
||||
// @ts-ignore
|
||||
patchedSourceConfigurationProperties
|
||||
));
|
||||
|
||||
|
|
|
@ -88,11 +88,13 @@ export const initMetricsSourceConfigurationRoutes = (libs: InfraBackendLibs) =>
|
|||
? sources.updateSourceConfiguration(
|
||||
requestContext.core.savedObjects.client,
|
||||
sourceId,
|
||||
// @ts-ignore
|
||||
patchedSourceConfigurationProperties
|
||||
)
|
||||
: sources.createSourceConfiguration(
|
||||
requestContext.core.savedObjects.client,
|
||||
sourceId,
|
||||
// @ts-ignore
|
||||
patchedSourceConfigurationProperties
|
||||
));
|
||||
|
||||
|
|
|
@ -113,7 +113,6 @@ export const logEntriesSearchStrategyProvider = ({
|
|||
messageFormattingRules,
|
||||
]): IEsSearchRequest => {
|
||||
return {
|
||||
// @ts-expect-error @elastic/elasticsearch declares indices_boost as Record<string, number>
|
||||
params: createGetLogEntriesQuery(
|
||||
indices,
|
||||
params.startTimestamp,
|
||||
|
|
|
@ -84,7 +84,6 @@ export const logEntrySearchStrategyProvider = ({
|
|||
tiebreakerField,
|
||||
runtimeMappings,
|
||||
}): IEsSearchRequest => ({
|
||||
// @ts-expect-error `Field` is not assignable to `SearchRequest.docvalue_fields`
|
||||
params: createGetLogEntryQuery(
|
||||
indices,
|
||||
params.logEntryId,
|
||||
|
|
|
@ -53,7 +53,6 @@ export const createGetLogEntriesQuery = (
|
|||
},
|
||||
},
|
||||
fields,
|
||||
// @ts-expect-error @elastic/elasticsearch doesn't declare "runtime_mappings" property
|
||||
runtime_mappings: runtimeMappings,
|
||||
_source: false,
|
||||
...createSortClause(sortDirection, timestampField, tiebreakerField),
|
||||
|
@ -87,7 +86,7 @@ const createHighlightClause = (highlightQuery: JsonObject | undefined, fields: s
|
|||
highlightQuery
|
||||
? {
|
||||
highlight: {
|
||||
boundary_scanner: 'word',
|
||||
boundary_scanner: 'word' as const,
|
||||
fields: fields.reduce(
|
||||
(highlightFieldConfigs, fieldName) => ({
|
||||
...highlightFieldConfigs,
|
||||
|
|
|
@ -32,7 +32,6 @@ export const createGetLogEntryQuery = (
|
|||
},
|
||||
},
|
||||
fields: ['*'],
|
||||
// @ts-expect-error @elastic/elasticsearch doesn't declare "runtime_mappings" property
|
||||
runtime_mappings: runtimeMappings,
|
||||
sort: [{ [timestampField]: 'desc' }, { [tiebreakerField]: 'desc' }],
|
||||
_source: false,
|
||||
|
|
|
@ -41,6 +41,7 @@ export const registerPrivilegesRoute = ({ router, config }: RouteDependencies) =
|
|||
const {
|
||||
body: { has_all_requested: hasAllPrivileges, cluster },
|
||||
} = await clusterClient.asCurrentUser.security.hasPrivileges({
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
body: { cluster: APP_CLUSTER_REQUIRED_PRIVILEGES },
|
||||
});
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ export const registerSimulateRoute = ({
|
|||
verbose,
|
||||
body: {
|
||||
pipeline,
|
||||
docs: documents as estypes.IngestSimulatePipelineDocument[],
|
||||
docs: documents as estypes.IngestSimulateDocument[],
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ export async function getPermissions({ isSecurityEnabled, client }: GetPermissio
|
|||
};
|
||||
|
||||
try {
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
const { body: response } = await client.asCurrentUser.security.hasPrivileges(options);
|
||||
return {
|
||||
hasPermission: response.cluster.manage,
|
||||
|
|
|
@ -47,6 +47,7 @@ export function registerPipelineSaveRoute(
|
|||
|
||||
await client.asCurrentUser.logstash.putPipeline({
|
||||
id: pipeline.id,
|
||||
// @ts-expect-error description is required
|
||||
body: pipeline.upstreamJSON,
|
||||
});
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ const MlAnomalyAlertTrigger: FC<MlAnomalyAlertTriggerProps> = ({
|
|||
const availableResultTypes = useMemo(() => {
|
||||
if (jobConfigs.length === 0) return Object.values(ANOMALY_RESULT_TYPE);
|
||||
|
||||
return (jobConfigs ?? []).some((v) => v.analysis_config.influencers.length > 0)
|
||||
return (jobConfigs ?? []).some((v) => v.analysis_config.influencers!.length > 0)
|
||||
? Object.values(ANOMALY_RESULT_TYPE)
|
||||
: [ANOMALY_RESULT_TYPE.BUCKET, ANOMALY_RESULT_TYPE.RECORD];
|
||||
}, [jobConfigs]);
|
||||
|
|
|
@ -26,7 +26,7 @@ export function chartLoaderProvider(mlResultsService: MlResultsService) {
|
|||
const resp = await mlResultsService.getEventRateData(
|
||||
job.datafeed_config.indices.join(),
|
||||
job.datafeed_config.query,
|
||||
job.data_description.time_field,
|
||||
job.data_description.time_field!,
|
||||
job.data_counts.earliest_record_timestamp,
|
||||
job.data_counts.latest_record_timestamp,
|
||||
intervalMs,
|
||||
|
|
|
@ -83,7 +83,7 @@ export class JobCreator {
|
|||
this._calendars = [];
|
||||
this._datafeed_config = createEmptyDatafeed(this._indexPatternTitle);
|
||||
this._detectors = this._job_config.analysis_config.detectors;
|
||||
this._influencers = this._job_config.analysis_config.influencers;
|
||||
this._influencers = this._job_config.analysis_config.influencers!;
|
||||
|
||||
if (typeof indexPattern.timeFieldName === 'string') {
|
||||
this._job_config.data_description.time_field = indexPattern.timeFieldName;
|
||||
|
@ -370,7 +370,7 @@ export class JobCreator {
|
|||
}
|
||||
|
||||
public get timeFieldName(): string {
|
||||
return this._job_config.data_description.time_field;
|
||||
return this._job_config.data_description.time_field!;
|
||||
}
|
||||
|
||||
public set timeFieldName(fieldName: string) {
|
||||
|
@ -766,7 +766,7 @@ export class JobCreator {
|
|||
this._datafeed_config = datafeed;
|
||||
|
||||
this._detectors = this._job_config.analysis_config.detectors;
|
||||
this._influencers = this._job_config.analysis_config.influencers;
|
||||
this._influencers = this._job_config.analysis_config.influencers!;
|
||||
if (this._job_config.groups === undefined) {
|
||||
this._job_config.groups = [];
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ export class SingleMetricJobCreator extends JobCreator {
|
|||
delete this._datafeed_config.aggregations;
|
||||
|
||||
const functionName = this._aggs[0].dslName;
|
||||
const timeField = this._job_config.data_description.time_field;
|
||||
const timeField = this._job_config.data_description.time_field!;
|
||||
|
||||
const duration = parseInterval(this._job_config.analysis_config.bucket_span, true);
|
||||
if (duration === null) {
|
||||
|
|
|
@ -142,7 +142,7 @@ export const useModelMemoryEstimator = (
|
|||
analysisConfig: jobCreator.jobConfig.analysis_config,
|
||||
indexPattern: jobCreator.indexPatternTitle,
|
||||
query: jobCreator.datafeedConfig.query,
|
||||
timeFieldName: jobCreator.jobConfig.data_description.time_field,
|
||||
timeFieldName: jobCreator.jobConfig.data_description.time_field!,
|
||||
earliestMs: jobCreator.start,
|
||||
latestMs: jobCreator.end,
|
||||
});
|
||||
|
|
|
@ -50,7 +50,7 @@ export class AnomalyDetectorService {
|
|||
}
|
||||
const influencers = new Set<string>();
|
||||
for (const job of jobs) {
|
||||
for (const influencer of job.analysis_config.influencers) {
|
||||
for (const influencer of job.analysis_config.influencers!) {
|
||||
influencers.add(influencer);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -260,7 +260,7 @@ export class AnomalyExplorerChartsService {
|
|||
detector.function === ML_JOB_AGGREGATION.LAT_LONG
|
||||
? ML_JOB_AGGREGATION.LAT_LONG
|
||||
: mlFunctionToESAggregation(detector.function),
|
||||
timeField: job.data_description.time_field,
|
||||
timeField: job.data_description.time_field!,
|
||||
interval: job.analysis_config.bucket_span,
|
||||
datafeedConfig: job.datafeed_config,
|
||||
summaryCountFieldName: job.analysis_config.summary_count_field_name,
|
||||
|
|
|
@ -89,9 +89,10 @@ const cardinalityCheckProvider = (client: IScopedClusterClient) => {
|
|||
new Set<string>()
|
||||
);
|
||||
|
||||
// @ts-expect-error influencers is always an array
|
||||
const normalizedInfluencers: estypes.Field[] = Array.isArray(influencers)
|
||||
? influencers
|
||||
: [influencers];
|
||||
? influencers!
|
||||
: [influencers!];
|
||||
const maxBucketFieldCardinalities = normalizedInfluencers.filter(
|
||||
(influencerField) =>
|
||||
!!influencerField &&
|
||||
|
|
|
@ -1127,7 +1127,7 @@ export class DataRecognizer {
|
|||
// set the fallback range for all jobs
|
||||
// as there may not be a common query, we use a match_all
|
||||
const { start: fallbackStart, end: fallbackEnd } = await this._getFallbackTimeRange(
|
||||
firstJobTimeField,
|
||||
firstJobTimeField!,
|
||||
{ match_all: {} }
|
||||
);
|
||||
start = fallbackStart;
|
||||
|
@ -1139,7 +1139,7 @@ export class DataRecognizer {
|
|||
let latestMs = end;
|
||||
if (earliestMs === undefined || latestMs === undefined) {
|
||||
const timeFieldRange = await this._getFallbackTimeRange(
|
||||
job.config.data_description.time_field,
|
||||
job.config.data_description.time_field!,
|
||||
query
|
||||
);
|
||||
earliestMs = timeFieldRange.start;
|
||||
|
@ -1150,7 +1150,7 @@ export class DataRecognizer {
|
|||
job.config.analysis_config,
|
||||
this._indexPatternName,
|
||||
query,
|
||||
job.config.data_description.time_field,
|
||||
job.config.data_description.time_field!,
|
||||
earliestMs,
|
||||
latestMs
|
||||
);
|
||||
|
|
|
@ -113,7 +113,7 @@ class FieldsService {
|
|||
this._mlClusterClient,
|
||||
this._dataViewsService
|
||||
);
|
||||
const rollupConfigs: estypes.RollupGetRollupCapabilitiesRollupCapabilitySummary[] | null =
|
||||
const rollupConfigs: estypes.RollupGetRollupCapsRollupCapabilitySummary[] | null =
|
||||
await rollupService.getRollupJobs();
|
||||
|
||||
// if a rollup index has been specified, yet there are no
|
||||
|
@ -137,7 +137,7 @@ class FieldsService {
|
|||
}
|
||||
|
||||
function combineAllRollupFields(
|
||||
rollupConfigs: estypes.RollupGetRollupCapabilitiesRollupCapabilitySummary[]
|
||||
rollupConfigs: estypes.RollupGetRollupCapsRollupCapabilitySummary[]
|
||||
): RollupFields {
|
||||
const rollupFields: RollupFields = {};
|
||||
rollupConfigs.forEach((conf) => {
|
||||
|
|
|
@ -29,7 +29,7 @@ export async function rollupServiceProvider(
|
|||
let jobIndexPatterns: string[] = [indexPattern];
|
||||
|
||||
async function getRollupJobs(): Promise<
|
||||
estypes.RollupGetRollupCapabilitiesRollupCapabilitySummary[] | null
|
||||
estypes.RollupGetRollupCapsRollupCapabilitySummary[] | null
|
||||
> {
|
||||
if (
|
||||
rollupIndexPatternObject !== null &&
|
||||
|
|
|
@ -69,7 +69,7 @@ export async function validateJob(
|
|||
const timeField = job.data_description.time_field;
|
||||
const timeRange = await fs.getTimeFieldRange(
|
||||
index,
|
||||
timeField,
|
||||
timeField!,
|
||||
job.datafeed_config.query,
|
||||
job.datafeed_config.runtime_mappings,
|
||||
job.datafeed_config.indices_options
|
||||
|
|
|
@ -17,7 +17,7 @@ export async function validateInfluencers(job: CombinedJob) {
|
|||
validateJobObject(job);
|
||||
|
||||
const messages = [];
|
||||
const influencers = job.analysis_config.influencers;
|
||||
const influencers = job.analysis_config.influencers!;
|
||||
|
||||
const detectorFieldNames: string[] = [];
|
||||
job.analysis_config.detectors.forEach((d) => {
|
||||
|
|
|
@ -63,7 +63,7 @@ export async function validateModelMemoryLimit(
|
|||
job.analysis_config,
|
||||
job.datafeed_config.indices.join(','),
|
||||
job.datafeed_config.query,
|
||||
job.data_description.time_field,
|
||||
job.data_description.time_field!,
|
||||
duration!.start as number,
|
||||
duration!.end as number,
|
||||
true,
|
||||
|
|
|
@ -29,7 +29,7 @@ const MIN_TIME_SPAN_READABLE = '2 hours';
|
|||
|
||||
export async function isValidTimeField({ asCurrentUser }: IScopedClusterClient, job: CombinedJob) {
|
||||
const index = job.datafeed_config.indices.join(',');
|
||||
const timeField = job.data_description.time_field;
|
||||
const timeField = job.data_description.time_field!;
|
||||
|
||||
// check if time_field is of type 'date' or 'date_nanos'
|
||||
const { body: fieldCaps } = await asCurrentUser.fieldCaps({
|
||||
|
|
|
@ -638,7 +638,7 @@ export function resultsServiceProvider(mlClient: MlClient, client?: IScopedClust
|
|||
}
|
||||
|
||||
const jobConfig = jobsResponse.jobs[0];
|
||||
const timefield = jobConfig.data_description.time_field;
|
||||
const timefield = jobConfig.data_description.time_field!;
|
||||
const bucketSpan = jobConfig.analysis_config.bucket_span;
|
||||
|
||||
if (datafeedConfig === undefined) {
|
||||
|
@ -708,6 +708,7 @@ export function resultsServiceProvider(mlClient: MlClient, client?: IScopedClust
|
|||
const [bucketResp, annotationResp, { body: modelSnapshotsResp }] = await Promise.all([
|
||||
mlClient.getBuckets({
|
||||
job_id: jobId,
|
||||
// @ts-expect-error page is not defined on body
|
||||
body: { desc: true, start: String(start), end: String(end), page: { from: 0, size: 1000 } },
|
||||
}),
|
||||
getAnnotations({
|
||||
|
|
|
@ -221,6 +221,7 @@ export function jobRoutes({ router, routeGuard }: RouteInitialization) {
|
|||
const { jobId } = request.params;
|
||||
const { body } = await mlClient.updateJob({
|
||||
job_id: jobId,
|
||||
// @ts-expect-error detectors cannot be undefined
|
||||
body: request.body,
|
||||
});
|
||||
return response.ok({
|
||||
|
|
|
@ -115,7 +115,7 @@ export function registerCollector(usageCollection: UsageCollectionSetup, kibanaI
|
|||
},
|
||||
});
|
||||
|
||||
const aggResponse = result.body.aggregations as {
|
||||
const aggResponse = result.body.aggregations as unknown as {
|
||||
count_by_result_type: {
|
||||
buckets: Array<{
|
||||
key: AnomalyResultType;
|
||||
|
|
|
@ -63,7 +63,7 @@ const checkLatestMonitoringIsLegacy = async (context: RequestHandlerContext, ind
|
|||
|
||||
const {
|
||||
types: { buckets },
|
||||
} = aggregations as { types: { buckets: Array<{ key: string }> } };
|
||||
} = aggregations as unknown as { types: { buckets: Array<{ key: string }> } };
|
||||
counts.mbIndicesCount = buckets.filter(({ key }: { key: string }) => key.includes('-mb-')).length;
|
||||
|
||||
counts.legacyIndicesCount = buckets.length - counts.mbIndicesCount;
|
||||
|
|
|
@ -81,6 +81,7 @@ describe('processAggregations', () => {
|
|||
],
|
||||
},
|
||||
};
|
||||
// @ts-expect-error input doesn't implement a full interface
|
||||
const { platforms, policies, overlap } = processAggregations(input);
|
||||
expect(platforms).toEqual([
|
||||
{
|
||||
|
@ -107,6 +108,7 @@ describe('processAggregations', () => {
|
|||
],
|
||||
},
|
||||
};
|
||||
// @ts-expect-error input doesn't implement a full interface
|
||||
const { platforms, policies, overlap } = processAggregations(input);
|
||||
expect(platforms).toEqual([]);
|
||||
expect(policies).toEqual([
|
||||
|
@ -158,6 +160,7 @@ describe('processAggregations', () => {
|
|||
],
|
||||
},
|
||||
};
|
||||
// @ts-expect-error input doesn't implement a full interface
|
||||
const { platforms, policies, overlap } = processAggregations(input);
|
||||
expect(platforms).toEqual([
|
||||
{
|
||||
|
|
|
@ -21,6 +21,7 @@ describe('extractBeatUsageMetrics', () => {
|
|||
expect(
|
||||
extractBeatUsageMetrics({
|
||||
aggregations: {
|
||||
// @ts-expect-error not full interface
|
||||
lastDay: {
|
||||
max_rss: {
|
||||
value: 1,
|
||||
|
@ -42,6 +43,7 @@ describe('extractBeatUsageMetrics', () => {
|
|||
expect(
|
||||
extractBeatUsageMetrics({
|
||||
aggregations: {
|
||||
// @ts-expect-error not full interface
|
||||
lastDay: {
|
||||
max_rss: {
|
||||
value: 1,
|
||||
|
|
|
@ -164,6 +164,7 @@ describe('UPDATE remote clusters', () => {
|
|||
|
||||
test('updates v1 proxy cluster', async () => {
|
||||
remoteInfoMockFn.mockResolvedValueOnce(
|
||||
// @ts-expect-error not full interface
|
||||
createApiResponse({
|
||||
body: {
|
||||
test: {
|
||||
|
|
|
@ -311,8 +311,6 @@ export class ResourceInstaller {
|
|||
settings: {
|
||||
'index.lifecycle': {
|
||||
name: ilmPolicyName,
|
||||
// TODO: fix the types in the ES package, they don't include rollover_alias???
|
||||
// @ts-expect-error
|
||||
rollover_alias: primaryNamespacedAlias,
|
||||
},
|
||||
},
|
||||
|
@ -399,7 +397,7 @@ export class ResourceInstaller {
|
|||
return clusterClient.cluster.putComponentTemplate(template);
|
||||
}
|
||||
|
||||
private async createOrUpdateIndexTemplate(template: estypes.IndicesPutIndexTemplateRequest) {
|
||||
private async createOrUpdateIndexTemplate(template: estypes.IndicesSimulateTemplateRequest) {
|
||||
const { logger, getClusterClient } = this.options;
|
||||
const clusterClient = await getClusterClient();
|
||||
|
||||
|
@ -414,6 +412,7 @@ export class ResourceInstaller {
|
|||
);
|
||||
}
|
||||
|
||||
// @ts-expect-error estypes.IndicesSimulateTemplateRequest.name is option but requied in estypes.IndicesPutIndexTemplateRequest
|
||||
return clusterClient.indices.putIndexTemplate(template);
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,9 @@ export function checkPrivilegesWithRequestFactory(
|
|||
const clusterClient = await getClusterClient();
|
||||
const { body } = await clusterClient.asScoped(request).asCurrentUser.security.hasPrivileges({
|
||||
body: {
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
cluster: privileges.elasticsearch?.cluster,
|
||||
// @ts-expect-error SecurityIndexPrivilege doesn't contain all possible priviledges
|
||||
index: Object.entries(privileges.elasticsearch?.index ?? {}).map(
|
||||
([name, indexPrivileges]) => ({
|
||||
names: [name],
|
||||
|
|
|
@ -17,11 +17,11 @@ describe('getSignalVersionsByIndex', () => {
|
|||
|
||||
it('properly transforms the elasticsearch aggregation', async () => {
|
||||
esClient.search.mockResolvedValueOnce({
|
||||
// @ts-expect-error mocking only what we need
|
||||
body: {
|
||||
aggregations: {
|
||||
signals_indices: {
|
||||
buckets: [
|
||||
// @ts-expect-error mocking only what we need
|
||||
{
|
||||
key: 'index1',
|
||||
signal_versions: {
|
||||
|
|
|
@ -72,7 +72,7 @@ export const getSignalVersionsByIndex = async ({
|
|||
},
|
||||
});
|
||||
|
||||
const aggs = response.body.aggregations as SignalVersionsAggResponse['aggregations'];
|
||||
const aggs = response.body.aggregations as unknown as SignalVersionsAggResponse['aggregations'];
|
||||
const indexBuckets = aggs.signals_indices.buckets;
|
||||
|
||||
return index.reduce<SignalVersionsByIndex>((agg, _index) => {
|
||||
|
|
|
@ -71,6 +71,6 @@ export const getSignalsIndicesInRange = async ({
|
|||
},
|
||||
});
|
||||
|
||||
const aggs = response.body.aggregations as IndexesResponse['aggregations'];
|
||||
const aggs = response.body.aggregations as unknown as IndexesResponse['aggregations'];
|
||||
return aggs.indexes.buckets.map((bucket) => bucket.key);
|
||||
};
|
||||
|
|
|
@ -179,7 +179,7 @@ export interface ThreatEnrichment {
|
|||
}
|
||||
|
||||
export interface SortWithTieBreaker {
|
||||
[key: string]: string;
|
||||
[key: string]: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
export interface ThreatMatchNamedQuery {
|
||||
|
|
|
@ -32,6 +32,7 @@ describe('transformThresholdNormalizedResultsToEcs', () => {
|
|||
aggregations: {
|
||||
'threshold_0:source.ip': {
|
||||
buckets: [
|
||||
// @ts-expect-error not full primary_interface
|
||||
{
|
||||
key: '127.0.0.1',
|
||||
doc_count: 15,
|
||||
|
@ -141,6 +142,7 @@ describe('transformThresholdNormalizedResultsToEcs', () => {
|
|||
aggregations: {
|
||||
'threshold_0:source.ip': {
|
||||
buckets: [
|
||||
// @ts-expect-error not full interface
|
||||
{
|
||||
key: '127.0.0.1',
|
||||
doc_count: 15,
|
||||
|
@ -203,6 +205,7 @@ describe('transformThresholdNormalizedResultsToEcs', () => {
|
|||
aggregations: {
|
||||
threshold_0: {
|
||||
buckets: [
|
||||
// @ts-expect-error not full interface
|
||||
{
|
||||
key: '',
|
||||
doc_count: 15,
|
||||
|
|
|
@ -24,7 +24,6 @@ import { getHttpEdges } from './helpers';
|
|||
import { buildHttpQuery } from './query.http_network.dsl';
|
||||
|
||||
export const networkHttp: SecuritySolutionFactory<NetworkQueries.http> = {
|
||||
// @ts-expect-error dns_name_query_count is not conpatible with @elastic/elasticsearch
|
||||
buildDsl: (options: NetworkHttpRequestOptions) => {
|
||||
if (options.pagination && options.pagination.querySize >= DEFAULT_MAX_TABLE_QUERY_SIZE) {
|
||||
throw new Error(`No query size above ${DEFAULT_MAX_TABLE_QUERY_SIZE}`);
|
||||
|
|
|
@ -53,6 +53,7 @@ export function registerAppRoutes({
|
|||
body: { has_all_requested: hasAllPrivileges, cluster },
|
||||
} = await clusterClient.asCurrentUser.security.hasPrivileges({
|
||||
body: {
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
cluster: [...APP_REQUIRED_CLUSTER_PRIVILEGES, ...APP_SLM_CLUSTER_PRIVILEGES],
|
||||
},
|
||||
});
|
||||
|
@ -73,6 +74,7 @@ export function registerAppRoutes({
|
|||
}
|
||||
|
||||
const indexHasAllPrivileges = APP_RESTORE_INDEX_PRIVILEGES.every((privilege) =>
|
||||
// @ts-expect-error SecurityIndexPrivilege doesn't contain all possible priviledges
|
||||
privileges.includes(privilege)
|
||||
);
|
||||
|
||||
|
|
|
@ -157,7 +157,6 @@ export function registerSnapshotsRoutes({
|
|||
repositories,
|
||||
// @ts-expect-error @elastic/elasticsearch https://github.com/elastic/elasticsearch-specification/issues/845
|
||||
errors: fetchedSnapshots?.failures,
|
||||
// @ts-expect-error @elastic/elasticsearch "total" is a new field in the response
|
||||
total: fetchedSnapshots?.total,
|
||||
},
|
||||
});
|
||||
|
|
|
@ -516,6 +516,7 @@ describe('Workload Statistics Aggregator', () => {
|
|||
const taskStore = taskStoreMock.create({});
|
||||
taskStore.aggregate
|
||||
.mockResolvedValueOnce(
|
||||
// @ts-expect-error not full interface
|
||||
mockAggregatedResult().then((res) =>
|
||||
setTaskTypeCount(res, 'alerting_telemetry', {
|
||||
idle: 2,
|
||||
|
@ -524,6 +525,7 @@ describe('Workload Statistics Aggregator', () => {
|
|||
)
|
||||
.mockRejectedValueOnce(new Error('Elasticsearch has gone poof'))
|
||||
.mockResolvedValueOnce(
|
||||
// @ts-expect-error not full interface
|
||||
mockAggregatedResult().then((res) =>
|
||||
setTaskTypeCount(res, 'alerting_telemetry', {
|
||||
idle: 1,
|
||||
|
@ -685,6 +687,8 @@ describe('Workload Statistics Aggregator', () => {
|
|||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
let errorWasThrowAt = 0;
|
||||
|
||||
// @ts-expect-error not full interface
|
||||
taskStore.aggregate.mockImplementation(async () => {
|
||||
if (errorWasThrowAt === 0) {
|
||||
errorWasThrowAt = Date.now();
|
||||
|
|
|
@ -452,12 +452,15 @@ function hasAggregations(
|
|||
);
|
||||
}
|
||||
export interface WorkloadAggregationResponse {
|
||||
// @ts-expect-error is not compatible with AggregationsAggregate
|
||||
taskType: TaskTypeAggregation;
|
||||
// @ts-expect-error is not compatible with AggregationsAggregate
|
||||
schedule: ScheduleAggregation;
|
||||
idleTasks: IdleTasksAggregation;
|
||||
nonRecurringTasks: {
|
||||
doc_count: number;
|
||||
};
|
||||
// @ts-expect-error is not compatible with AggregationsAggregate
|
||||
ownerIds: {
|
||||
ownerIds: {
|
||||
value: number;
|
||||
|
@ -465,6 +468,7 @@ export interface WorkloadAggregationResponse {
|
|||
};
|
||||
[otherAggs: string]: estypes.AggregationsAggregate;
|
||||
}
|
||||
// @ts-expect-error key doesn't accept a string
|
||||
export interface TaskTypeAggregation extends estypes.AggregationsFiltersAggregate {
|
||||
buckets: Array<{
|
||||
doc_count: number;
|
||||
|
@ -481,6 +485,7 @@ export interface TaskTypeAggregation extends estypes.AggregationsFiltersAggregat
|
|||
doc_count_error_upper_bound?: number | undefined;
|
||||
sum_other_doc_count?: number | undefined;
|
||||
}
|
||||
// @ts-expect-error key doesn't accept a string
|
||||
export interface ScheduleAggregation extends estypes.AggregationsFiltersAggregate {
|
||||
buckets: Array<{
|
||||
doc_count: number;
|
||||
|
|
|
@ -102,10 +102,7 @@ if (doc['task.runAt'].size()!=0) {
|
|||
},
|
||||
},
|
||||
};
|
||||
export const SortByRunAtAndRetryAt = SortByRunAtAndRetryAtScript as unknown as Record<
|
||||
string,
|
||||
estypes.SearchSort
|
||||
>;
|
||||
export const SortByRunAtAndRetryAt = SortByRunAtAndRetryAtScript as estypes.SearchSortContainer;
|
||||
|
||||
export const updateFieldsAndMarkAsFailed = (
|
||||
fieldUpdates: {
|
||||
|
|
|
@ -69,7 +69,7 @@ export const FilterTermForm: FilterAggConfigTerm['aggTypeConfig']['FilterAggForm
|
|||
if (
|
||||
!(
|
||||
isEsSearchResponseWithAggregations(response) &&
|
||||
isMultiBucketAggregate<estypes.AggregationsKeyedBucketKeys>(
|
||||
isMultiBucketAggregate<estypes.AggregationsKeyedBucket>(
|
||||
response.aggregations.field_values
|
||||
)
|
||||
)
|
||||
|
|
|
@ -33,6 +33,7 @@ export function registerPrivilegesRoute({ router, license }: RouteDependencies)
|
|||
body: { has_all_requested: hasAllPrivileges, cluster },
|
||||
} = await ctx.core.elasticsearch.client.asCurrentUser.security.hasPrivileges({
|
||||
body: {
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
cluster: APP_CLUSTER_PRIVILEGES,
|
||||
},
|
||||
});
|
||||
|
|
|
@ -555,7 +555,6 @@ const previewTransformHandler: RequestHandler<
|
|||
try {
|
||||
const reqBody = req.body;
|
||||
const { body } = await ctx.core.elasticsearch.client.asCurrentUser.transform.previewTransform({
|
||||
// @ts-expect-error max_page_search_size is required in TransformPivot
|
||||
body: reqBody,
|
||||
});
|
||||
if (isLatestTransform(reqBody)) {
|
||||
|
|
|
@ -54,6 +54,7 @@ export function registerTransformNodesRoutes({ router, license }: RouteDependenc
|
|||
body: { has_all_requested: hasAllPrivileges },
|
||||
} = await ctx.core.elasticsearch.client.asCurrentUser.security.hasPrivileges({
|
||||
body: {
|
||||
// @ts-expect-error SecurityClusterPrivilege doesn't contain all possible priviledges
|
||||
cluster: NODES_INFO_PRIVILEGES,
|
||||
},
|
||||
});
|
||||
|
|
|
@ -59,6 +59,7 @@ describe('timeSeriesQuery', () => {
|
|||
describe('getResultFromEs', () => {
|
||||
it('correctly parses time series results for count aggregation', () => {
|
||||
expect(
|
||||
// @ts-expect-error not full interface
|
||||
getResultFromEs(true, false, {
|
||||
took: 0,
|
||||
timed_out: false,
|
||||
|
|
|
@ -152,6 +152,7 @@ export function getResultFromEs(
|
|||
const dateAgg = aggregations.dateAgg;
|
||||
|
||||
aggregations.groupAgg = {
|
||||
// @ts-expect-error doesn't contain required doc_count, key_as_string
|
||||
buckets: [{ key: 'all documents', dateAgg }],
|
||||
};
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
MigrationDeprecationInfoDeprecation,
|
||||
MigrationDeprecationInfoResponse,
|
||||
} from '@elastic/elasticsearch/api/types';
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import { SavedObject, SavedObjectAttributes } from 'src/core/public';
|
||||
|
||||
export type DeprecationSource = 'Kibana' | 'Elasticsearch';
|
||||
|
@ -187,8 +184,8 @@ export interface IndexSettingAction {
|
|||
deprecatedSettings: string[];
|
||||
}
|
||||
export interface EnrichedDeprecationInfo
|
||||
extends Omit<MigrationDeprecationInfoDeprecation, 'level'> {
|
||||
type: keyof MigrationDeprecationInfoResponse;
|
||||
extends Omit<estypes.MigrationDeprecationsDeprecation, 'level'> {
|
||||
type: keyof estypes.MigrationDeprecationsResponse;
|
||||
isCritical: boolean;
|
||||
index?: string;
|
||||
correctiveAction?: ReindexAction | MlAction | IndexSettingAction;
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import _ from 'lodash';
|
||||
import { RequestEvent } from '@elastic/elasticsearch/lib/Transport';
|
||||
import { elasticsearchServiceMock } from 'src/core/server/mocks';
|
||||
import { MigrationDeprecationInfoResponse } from '@elastic/elasticsearch/api/types';
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
|
||||
import { getESUpgradeStatus } from './es_deprecations_status';
|
||||
import fakeDeprecations from './__fixtures__/fake_deprecations.json';
|
||||
|
@ -32,7 +32,7 @@ describe('getESUpgradeStatus', () => {
|
|||
};
|
||||
|
||||
// @ts-expect-error mock data is too loosely typed
|
||||
const deprecationsResponse: MigrationDeprecationInfoResponse = _.cloneDeep(fakeDeprecations);
|
||||
const deprecationsResponse: estypes.MigrationDeprecationsResponse = _.cloneDeep(fakeDeprecations);
|
||||
|
||||
const esClient = elasticsearchServiceMock.createScopedClusterClient();
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
MigrationDeprecationInfoDeprecation,
|
||||
MigrationDeprecationInfoResponse,
|
||||
} from '@elastic/elasticsearch/api/types';
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import { IScopedClusterClient } from 'src/core/server';
|
||||
import { indexSettingDeprecations } from '../../common/constants';
|
||||
import { EnrichedDeprecationInfo, ESUpgradeStatus } from '../../common/types';
|
||||
|
@ -41,8 +38,8 @@ export async function getESUpgradeStatus(
|
|||
combinedDeprecations = combinedDeprecations.concat(withoutSystemIndices);
|
||||
} else {
|
||||
const deprecationsByType = deprecations[
|
||||
deprecationType as keyof MigrationDeprecationInfoResponse
|
||||
] as MigrationDeprecationInfoDeprecation[];
|
||||
deprecationType as keyof estypes.MigrationDeprecationsResponse
|
||||
] as estypes.MigrationDeprecationsDeprecation[];
|
||||
|
||||
const enrichedDeprecationInfo = deprecationsByType.map(
|
||||
({
|
||||
|
@ -59,7 +56,7 @@ export async function getESUpgradeStatus(
|
|||
details,
|
||||
message,
|
||||
url,
|
||||
type: deprecationType as keyof MigrationDeprecationInfoResponse,
|
||||
type: deprecationType as keyof estypes.MigrationDeprecationsResponse,
|
||||
isCritical: level === 'critical',
|
||||
resolveDuringUpgrade,
|
||||
correctiveAction: getCorrectiveAction(message, metadata),
|
||||
|
@ -85,7 +82,7 @@ export async function getESUpgradeStatus(
|
|||
|
||||
// Reformats the index deprecations to an array of deprecation warnings extended with an index field.
|
||||
const getCombinedIndexInfos = async (
|
||||
deprecations: MigrationDeprecationInfoResponse,
|
||||
deprecations: estypes.MigrationDeprecationsResponse,
|
||||
dataClient: IScopedClusterClient
|
||||
) => {
|
||||
const indices = Object.keys(deprecations.index_settings).reduce(
|
||||
|
|
|
@ -99,7 +99,7 @@ export const getDeprecatedSettingWarning = (
|
|||
): ReindexWarning | undefined => {
|
||||
const { settings } = flatSettings;
|
||||
|
||||
const deprecatedSettingsInUse = Object.keys(settings).filter((setting) => {
|
||||
const deprecatedSettingsInUse = Object.keys(settings || {}).filter((setting) => {
|
||||
return deprecatedSettings.indexOf(setting) > -1;
|
||||
});
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ interface MetaProperties {
|
|||
}
|
||||
|
||||
export interface FlatSettings {
|
||||
settings: estypes.IndicesIndexState['settings'];
|
||||
settings?: estypes.IndicesIndexState['settings'];
|
||||
mappings?: {
|
||||
properties?: MappingProperties;
|
||||
_meta?: MetaProperties;
|
||||
|
@ -29,7 +29,7 @@ export interface FlatSettings {
|
|||
|
||||
// Specific to 7.x-8 upgrade
|
||||
export interface FlatSettingsWithTypeName {
|
||||
settings: estypes.IndicesIndexState['settings'];
|
||||
settings?: estypes.IndicesIndexState['settings'];
|
||||
mappings?: {
|
||||
[typeName: string]: {
|
||||
properties?: MappingProperties;
|
||||
|
|
|
@ -42,6 +42,7 @@ export const getCertsRequestBody = ({
|
|||
body: {
|
||||
from: pageIndex * size,
|
||||
size,
|
||||
// @ts-expect-error direction is not a string, but a union of literals
|
||||
sort: asMutableArray([
|
||||
{
|
||||
[sort]: {
|
||||
|
@ -178,6 +179,7 @@ export const processCertsResult = (result: CertificatesResults): CertResult => {
|
|||
common_name: commonName,
|
||||
};
|
||||
});
|
||||
// @ts-expect-error aggregations is unknown
|
||||
const total = result.aggregations?.total?.value ?? 0;
|
||||
return { certs, total };
|
||||
};
|
||||
|
|
|
@ -27,6 +27,7 @@ describe('getJourneyScreenshot', () => {
|
|||
expect(
|
||||
await getJourneyScreenshot({
|
||||
uptimeEsClient: mockSearchResult([], {
|
||||
// @ts-expect-error not full interface
|
||||
step: {
|
||||
image: {
|
||||
hits: {
|
||||
|
@ -94,6 +95,7 @@ describe('getJourneyScreenshot', () => {
|
|||
expect(
|
||||
await getJourneyScreenshot({
|
||||
uptimeEsClient: mockSearchResult([], {
|
||||
// @ts-expect-error not full interface
|
||||
step: { image: { hits: { hits: [screenshotRefResult], total: 1 } } },
|
||||
}),
|
||||
checkGroup: 'checkGroup',
|
||||
|
|
|
@ -96,7 +96,6 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await esClient.snapshot.deleteRepository({
|
||||
repository: REPO_NAME,
|
||||
});
|
||||
// @ts-expect-error @elastic/elasticsearch DeleteSnapshotLifecycleRequest.policy_id is required
|
||||
await esClient.ilm.deleteLifecycle({ policy: POLICY_NAME });
|
||||
await esClient.indices.deleteIndexTemplate({ name: indexTemplateName });
|
||||
});
|
||||
|
|
|
@ -93,7 +93,6 @@ export default ({ getService }: FtrProviderContext) => {
|
|||
const olderDate = moment().subtract(100, 'days').valueOf();
|
||||
await es.index({
|
||||
index: '.kibana',
|
||||
type: '_doc',
|
||||
body: {
|
||||
type: 'lens-ui-telemetry',
|
||||
'lens-ui-telemetry': {
|
||||
|
|
|
@ -44,6 +44,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
);
|
||||
const { body: result } = await client.search({
|
||||
index,
|
||||
// @ts-expect-error buckets_path is incompatible. expected 'string | string[] | Record<string, string> | undefined'.
|
||||
body: searchBody,
|
||||
});
|
||||
|
||||
|
@ -67,6 +68,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
);
|
||||
const { body: result } = await client.search({
|
||||
index,
|
||||
// @ts-expect-error search is incompatible
|
||||
body: searchBody,
|
||||
});
|
||||
|
||||
|
@ -89,6 +91,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
);
|
||||
const { body: result } = await client.search({
|
||||
index,
|
||||
// @ts-expect-error search is incompatible
|
||||
body: searchBody,
|
||||
});
|
||||
|
||||
|
@ -110,6 +113,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
);
|
||||
const { body: result } = await client.search({
|
||||
index,
|
||||
// @ts-expect-error search is incompatible
|
||||
body: searchBody,
|
||||
});
|
||||
|
||||
|
|
|
@ -60,6 +60,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
`);
|
||||
expectSnapshot(
|
||||
response.body.traceDocs.map((doc) =>
|
||||
// @ts-expect-error processor doesn't exist on Profile
|
||||
doc.processor.event === 'transaction'
|
||||
? // @ts-expect-error
|
||||
`${doc.transaction.name} (transaction)`
|
||||
|
|
|
@ -32,7 +32,6 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
body: { _source },
|
||||
} = await es.get<Record<string, any>>({
|
||||
id: generateRawID(id, type),
|
||||
type: '_doc',
|
||||
index: '.kibana',
|
||||
});
|
||||
return _source?.[type];
|
||||
|
|
|
@ -34,7 +34,6 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
|
|||
});
|
||||
after(async () => {
|
||||
await esClient.snapshot.deleteRepository({ repository: repoName });
|
||||
// @ts-expect-error @elastic/elasticsearch DeleteSnapshotLifecycleRequest.policy_id is required
|
||||
await esClient.ilm.deleteLifecycle({ policy: policyName });
|
||||
});
|
||||
|
||||
|
|
|
@ -2379,10 +2379,10 @@
|
|||
dependencies:
|
||||
"@elastic/ecs-helpers" "^1.1.0"
|
||||
|
||||
"@elastic/elasticsearch@npm:@elastic/elasticsearch-canary@^7.16.0-canary.4":
|
||||
version "7.16.0-canary.4"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch-canary/-/elasticsearch-canary-7.16.0-canary.4.tgz#366fe186f8cb0eeee00aba12de5269cc4104461c"
|
||||
integrity sha512-Z20xpt9nCkjh+nU2UOnSGV/ZxZtu1ZHX2IQD38IYeng/Nhp0tBYP3zR27v4oCa5Jj2fB/XUnlB/jp+SXtschoQ==
|
||||
"@elastic/elasticsearch@npm:@elastic/elasticsearch-canary@^7.16.0-canary.7":
|
||||
version "7.16.0-canary.7"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch-canary/-/elasticsearch-canary-7.16.0-canary.7.tgz#f05b23b20fd356761d61a094db08a3828b363da0"
|
||||
integrity sha512-zrBL0f5OgkjIb2OCSSrQYUqNV8Hy5lUyrwkO7c028qHRKlOEz5lQo7NCQmX52tlEMfQ8hk9+jBipzU/5iLEqTQ==
|
||||
dependencies:
|
||||
debug "^4.3.1"
|
||||
hpagent "^0.1.1"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue