mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Upgrade @elastic/elasticsearch@8.5.0-canary.1
(#145416)
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
56916574ef
commit
5efededc06
28 changed files with 58 additions and 79 deletions
|
@ -106,7 +106,7 @@
|
|||
"@elastic/apm-rum-react": "^1.4.2",
|
||||
"@elastic/charts": "50.2.1",
|
||||
"@elastic/datemath": "5.0.3",
|
||||
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@8.4.0-canary.1",
|
||||
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@8.5.0-canary.1",
|
||||
"@elastic/ems-client": "8.3.3",
|
||||
"@elastic/eui": "70.2.4",
|
||||
"@elastic/filesaver": "1.1.2",
|
||||
|
|
|
@ -72,7 +72,7 @@ export function getBulkOperationError(
|
|||
id: string,
|
||||
rawResponse: {
|
||||
status: number;
|
||||
error?: { type: string; reason: string; index: string };
|
||||
error?: { type: string; reason?: string; index: string };
|
||||
// Other fields are present on a bulk operation result but they are irrelevant for this function
|
||||
}
|
||||
): Payload | undefined {
|
||||
|
|
|
@ -1560,7 +1560,6 @@ export class SavedObjectsRepository implements ISavedObjectsRepository {
|
|||
// @ts-expect-error @elastic/elasticsearch _source is optional
|
||||
...this._rawToSavedObject(hit),
|
||||
score: hit._score!,
|
||||
// @ts-expect-error @elastic/elasticsearch _source is optional
|
||||
sort: hit.sort,
|
||||
})
|
||||
),
|
||||
|
|
|
@ -10,7 +10,7 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
export const isWriteBlockException = (errorCause?: estypes.ErrorCause): boolean => {
|
||||
return (
|
||||
errorCause?.type === 'cluster_block_exception' &&
|
||||
errorCause?.reason.match(/index \[.+] blocked by: \[FORBIDDEN\/8\/.+ \(api\)\]/) !== null
|
||||
errorCause?.reason?.match(/index \[.+] blocked by: \[FORBIDDEN\/8\/.+ \(api\)\]/) !== null
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -28,7 +28,7 @@ export const isIndexNotFoundException = (errorCause?: estypes.ErrorCause): boole
|
|||
export const isClusterShardLimitExceeded = (errorCause?: estypes.ErrorCause): boolean => {
|
||||
return (
|
||||
errorCause?.type === 'validation_exception' &&
|
||||
errorCause?.reason.match(
|
||||
errorCause?.reason?.match(
|
||||
/this action would add .* shards, but this cluster currently has .* maximum normal shards open/
|
||||
) !== null
|
||||
);
|
||||
|
|
|
@ -18,7 +18,7 @@ import {
|
|||
|
||||
/** @internal */
|
||||
export interface WaitForTaskResponse {
|
||||
error: Option.Option<{ type: string; reason: string; index?: string }>;
|
||||
error: Option.Option<{ type: string; reason?: string; index?: string }>;
|
||||
completed: boolean;
|
||||
failures: Option.Option<any[]>;
|
||||
description?: string;
|
||||
|
|
|
@ -226,7 +226,6 @@ export function mergeTimeShifts(
|
|||
const bucketKey = bucketAgg.type.getShiftedKey(bucketAgg, bucket.key, shift);
|
||||
// if a bucket is missing in the map, create an empty one
|
||||
if (!baseBucketMap[bucketKey]) {
|
||||
// @ts-expect-error 'number' is not comparable to type 'AggregationsAggregate'.
|
||||
baseBucketMap[String(bucketKey)] = {
|
||||
key: bucketKey,
|
||||
} as GenericBucket;
|
||||
|
|
|
@ -17,7 +17,7 @@ export interface FailedShard {
|
|||
|
||||
export interface Reason {
|
||||
type: string;
|
||||
reason: string;
|
||||
reason?: string;
|
||||
script_stack?: string[];
|
||||
position?: {
|
||||
offset: number;
|
||||
|
|
|
@ -11,7 +11,7 @@ import type {
|
|||
FieldCapsResponse,
|
||||
TermsEnumRequest,
|
||||
TermsEnumResponse,
|
||||
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
} from '@elastic/elasticsearch/lib/api/types';
|
||||
import { ValuesType } from 'utility-types';
|
||||
import { ElasticsearchClient, KibanaRequest } from '@kbn/core/server';
|
||||
import type { ESSearchRequest, InferSearchResponseOf } from '@kbn/es-types';
|
||||
|
|
|
@ -40,10 +40,8 @@ export async function getServiceNamesFromTermsEnum({
|
|||
ProcessorEvent.error,
|
||||
],
|
||||
},
|
||||
body: {
|
||||
size: maxNumberOfServices,
|
||||
field: SERVICE_NAME,
|
||||
},
|
||||
size: maxNumberOfServices,
|
||||
field: SERVICE_NAME,
|
||||
}
|
||||
);
|
||||
|
||||
|
|
|
@ -33,18 +33,16 @@ export async function getSuggestionsWithTermsEnum({
|
|||
ProcessorEvent.metric,
|
||||
],
|
||||
},
|
||||
body: {
|
||||
case_insensitive: true,
|
||||
field: fieldName,
|
||||
size,
|
||||
string: fieldValue,
|
||||
index_filter: {
|
||||
range: {
|
||||
['@timestamp']: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
format: 'epoch_millis',
|
||||
},
|
||||
case_insensitive: true,
|
||||
field: fieldName,
|
||||
size,
|
||||
string: fieldValue,
|
||||
index_filter: {
|
||||
range: {
|
||||
['@timestamp']: {
|
||||
gte: start,
|
||||
lte: end,
|
||||
format: 'epoch_millis',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -89,19 +89,17 @@ export async function getTraceSamplesByQuery({
|
|||
ProcessorEvent.error,
|
||||
],
|
||||
},
|
||||
body: {
|
||||
size: 1000,
|
||||
filter: {
|
||||
bool: {
|
||||
filter: [
|
||||
...rangeQuery(start, end),
|
||||
...environmentQuery(environment),
|
||||
],
|
||||
},
|
||||
size: 1000,
|
||||
filter: {
|
||||
bool: {
|
||||
filter: [
|
||||
...rangeQuery(start, end),
|
||||
...environmentQuery(environment),
|
||||
],
|
||||
},
|
||||
event_category_field: PROCESSOR_EVENT,
|
||||
query,
|
||||
},
|
||||
event_category_field: PROCESSOR_EVENT,
|
||||
query,
|
||||
filter_path: 'hits.sequences.events._source.trace.id',
|
||||
})
|
||||
).hits?.sequences?.flatMap((sequence) =>
|
||||
|
|
|
@ -195,7 +195,7 @@ export const parseMlInferenceParametersFromPipeline = (
|
|||
return null;
|
||||
}
|
||||
return {
|
||||
destination_field: inferenceProcessor.target_field.replace('ml.inference.', ''),
|
||||
destination_field: inferenceProcessor.target_field?.replace('ml.inference.', ''),
|
||||
model_id: inferenceProcessor.model_id,
|
||||
pipeline_name: name,
|
||||
source_field: sourceField,
|
||||
|
|
|
@ -568,7 +568,6 @@ export function getQueryBodyWithAuthFilter(
|
|||
},
|
||||
{
|
||||
bool: {
|
||||
// @ts-expect-error undefined is not assignable as QueryDslTermQuery value
|
||||
should: namespaceQuery,
|
||||
},
|
||||
},
|
||||
|
@ -714,7 +713,6 @@ export function getQueryBody(
|
|||
},
|
||||
},
|
||||
},
|
||||
// @ts-expect-error undefined is not assignable as QueryDslTermQuery value
|
||||
namespaceQuery,
|
||||
];
|
||||
|
||||
|
|
|
@ -102,7 +102,6 @@ describe('experimental_datastream_features', () => {
|
|||
settings: {},
|
||||
mappings: {
|
||||
_source: {
|
||||
// @ts-expect-error
|
||||
mode: 'stored',
|
||||
},
|
||||
properties: {
|
||||
|
|
|
@ -163,7 +163,6 @@ export class KibanaFramework {
|
|||
} as estypes.MsearchRequest);
|
||||
break;
|
||||
case 'fieldCaps':
|
||||
// @ts-expect-error FieldCapsRequest.fields is not optional, CallWithRequestParams.fields is
|
||||
apiResult = elasticsearch.client.asCurrentUser.fieldCaps({
|
||||
...params,
|
||||
});
|
||||
|
|
|
@ -47,7 +47,7 @@ const isEsAggError = (e: Error | EsAggError): e is EsAggError => {
|
|||
|
||||
function getNestedErrorClauseWithContext({
|
||||
type,
|
||||
reason,
|
||||
reason = '',
|
||||
caused_by: causedBy,
|
||||
lang,
|
||||
script,
|
||||
|
@ -73,7 +73,7 @@ function getNestedErrorClauseWithContext({
|
|||
}
|
||||
|
||||
function getNestedErrorClause(e: ErrorCause | Reason): ReasonDescription[] {
|
||||
const { type, reason, caused_by: causedBy } = e;
|
||||
const { type, reason = '', caused_by: causedBy } = e;
|
||||
// Painless scripts errors are nested within the failed_shards property
|
||||
if ('failed_shards' in e) {
|
||||
if (e.failed_shards) {
|
||||
|
|
|
@ -18,7 +18,6 @@ interface PutLicenseArg {
|
|||
export async function putLicense({ acknowledge, client, licensing, license }: PutLicenseArg) {
|
||||
try {
|
||||
const response = await client.asCurrentUser.license.post({
|
||||
// @ts-expect-error license is not typed in LM code
|
||||
body: license,
|
||||
acknowledge,
|
||||
});
|
||||
|
|
|
@ -97,7 +97,6 @@ export const getSearchAfterFromResponse = <T>({
|
|||
}: {
|
||||
response: estypes.SearchResponse<T>;
|
||||
}): string[] | undefined =>
|
||||
// @ts-expect-error @elastic/elasticsearch SortResults contains null
|
||||
response.hits.hits.length > 0
|
||||
? response.hits.hits[response.hits.hits.length - 1].sort
|
||||
: undefined;
|
||||
|
|
|
@ -38,9 +38,7 @@ export function indicesRoutes({ router, routeGuard }: RouteInitialization) {
|
|||
body: { index, fields: requestFields },
|
||||
} = request;
|
||||
const fields =
|
||||
requestFields !== undefined && Array.isArray(requestFields)
|
||||
? requestFields.join(',')
|
||||
: '*';
|
||||
requestFields !== undefined && Array.isArray(requestFields) ? requestFields : '*';
|
||||
const body = await client.asCurrentUser.fieldCaps({ index, fields }, { maxRetries: 0 });
|
||||
return response.ok({ body });
|
||||
} catch (e) {
|
||||
|
|
|
@ -39,7 +39,6 @@ export function termsQuery(
|
|||
return [];
|
||||
}
|
||||
|
||||
// @ts-expect-error undefined and null aren't assignable
|
||||
return [{ terms: { [field]: filtered } }];
|
||||
}
|
||||
|
||||
|
|
|
@ -132,7 +132,6 @@ export async function fetchRollupSavedSearches(
|
|||
|
||||
savedSearchesList = await getSavedObjectsList({
|
||||
...searchProps,
|
||||
// @ts-expect-error@elastic/elasticsearch SortResults might contain null
|
||||
searchAfter: savedSearchesList.hits.hits[savedSearchesList.hits.hits.length - 1].sort,
|
||||
});
|
||||
}
|
||||
|
@ -201,7 +200,6 @@ export async function fetchRollupVisualizations(
|
|||
|
||||
savedVisualizationsList = await getSavedObjectsList({
|
||||
...searchProps,
|
||||
// @ts-expect-error@elastic/elasticsearch SortResults might contain null
|
||||
searchAfter: sort,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -35,14 +35,15 @@ export const errorAggregator = (
|
|||
): BulkResponseErrorAggregation => {
|
||||
return response.items.reduce<BulkResponseErrorAggregation>((accum, item) => {
|
||||
if (item.create?.error != null && !ignoreStatusCodes.includes(item.create.status)) {
|
||||
if (accum[item.create.error.reason] == null) {
|
||||
accum[item.create.error.reason] = {
|
||||
const reason = item.create.error.reason ?? 'unknown';
|
||||
if (accum[reason] == null) {
|
||||
accum[reason] = {
|
||||
count: 1,
|
||||
statusCode: item.create.status,
|
||||
};
|
||||
} else {
|
||||
accum[item.create.error.reason] = {
|
||||
count: accum[item.create.error.reason].count + 1,
|
||||
accum[reason] = {
|
||||
count: accum[reason].count + 1,
|
||||
statusCode: item.create.status,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -293,6 +293,7 @@ describe('API Keys', () => {
|
|||
id: '123',
|
||||
name: 'key-name',
|
||||
api_key: 'abc123',
|
||||
encoded: 'utf8',
|
||||
});
|
||||
const result = await apiKeys.grantAsInternalUser(
|
||||
httpServerMock.createKibanaRequest({
|
||||
|
@ -308,6 +309,7 @@ describe('API Keys', () => {
|
|||
api_key: 'abc123',
|
||||
id: '123',
|
||||
name: 'key-name',
|
||||
encoded: 'utf8',
|
||||
});
|
||||
expect(mockValidateKibanaPrivileges).not.toHaveBeenCalled(); // this is only called if kibana_role_descriptors is defined
|
||||
expect(mockClusterClient.asInternalUser.security.grantApiKey).toHaveBeenCalledWith({
|
||||
|
|
|
@ -273,10 +273,7 @@ export class APIKeys {
|
|||
// User needs `manage_api_key` or `grant_api_key` privilege to use this API
|
||||
let result: GrantAPIKeyResult;
|
||||
try {
|
||||
result = await this.clusterClient.asInternalUser.security.grantApiKey({
|
||||
// @ts-expect-error @elastic/elasticsearch api_key.role_descriptors doesn't support `Record<string, any>`
|
||||
body: params,
|
||||
});
|
||||
result = await this.clusterClient.asInternalUser.security.grantApiKey({ body: params });
|
||||
this.logger.debug('API key was granted successfully');
|
||||
} catch (e) {
|
||||
this.logger.error(`Failed to grant API key: ${e.message}`);
|
||||
|
|
|
@ -70,8 +70,10 @@ export class Tokens {
|
|||
|
||||
return {
|
||||
accessToken,
|
||||
refreshToken,
|
||||
// @ts-expect-error @elastic/elasticsearch user metadata defined as Record<string, any>
|
||||
// We can safely use a non-null assertion for the refresh token since `refresh_token` grant type guarantees that
|
||||
// getToken API will always return a new refresh token, unlike some other grant types (e.g. client_credentials).
|
||||
refreshToken: refreshToken!,
|
||||
// @ts-expect-error many optional properties are string | null | undefined while we declare them as string | undefined
|
||||
authenticationInfo: authenticationInfo as AuthenticationInfo,
|
||||
};
|
||||
} catch (err) {
|
||||
|
|
|
@ -5,10 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type {
|
||||
SecurityActivateUserProfileRequest,
|
||||
SecurityUserProfileWithMetadata,
|
||||
} from '@elastic/elasticsearch/lib/api/types';
|
||||
import type { SecurityActivateUserProfileRequest } from '@elastic/elasticsearch/lib/api/types';
|
||||
import type { SecurityUserProfile } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import type { IClusterClient, KibanaRequest, Logger } from '@kbn/core/server';
|
||||
|
@ -330,11 +327,10 @@ export class UserProfileService {
|
|||
|
||||
let body;
|
||||
try {
|
||||
// @ts-expect-error Invalid response format.
|
||||
body = (await clusterClient.asInternalUser.security.getUserProfile({
|
||||
body = await clusterClient.asInternalUser.security.getUserProfile({
|
||||
uid: userSession.value.userProfileId,
|
||||
data: dataPath ? prefixCommaSeparatedValues(dataPath, KIBANA_DATA_ROOT) : undefined,
|
||||
})) as { profiles: SecurityUserProfileWithMetadata[] };
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
`Failed to retrieve user profile for the current user [sid=${getPrintableSessionId(
|
||||
|
@ -368,11 +364,10 @@ export class UserProfileService {
|
|||
}
|
||||
|
||||
try {
|
||||
// @ts-expect-error Invalid response format.
|
||||
const body = (await clusterClient.asInternalUser.security.getUserProfile({
|
||||
const body = await clusterClient.asInternalUser.security.getUserProfile({
|
||||
uid: [...uids].join(','),
|
||||
data: dataPath ? prefixCommaSeparatedValues(dataPath, KIBANA_DATA_ROOT) : undefined,
|
||||
})) as { profiles: SecurityUserProfileWithMetadata[] };
|
||||
});
|
||||
|
||||
return body.profiles.map((rawUserProfile) => parseUserProfile<D>(rawUserProfile));
|
||||
} catch (error) {
|
||||
|
|
|
@ -382,14 +382,15 @@ export const errorAggregator = (
|
|||
): BulkResponseErrorAggregation => {
|
||||
return response.items.reduce<BulkResponseErrorAggregation>((accum, item) => {
|
||||
if (item.create?.error != null && !ignoreStatusCodes.includes(item.create.status)) {
|
||||
if (accum[item.create.error.reason] == null) {
|
||||
accum[item.create.error.reason] = {
|
||||
const reason = item.create.error.reason ?? 'unknown';
|
||||
if (accum[reason] == null) {
|
||||
accum[reason] = {
|
||||
count: 1,
|
||||
statusCode: item.create.status,
|
||||
};
|
||||
} else {
|
||||
accum[item.create.error.reason] = {
|
||||
count: accum[item.create.error.reason].count + 1,
|
||||
accum[reason] = {
|
||||
count: accum[reason].count + 1,
|
||||
statusCode: item.create.status,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1499,10 +1499,10 @@
|
|||
dependencies:
|
||||
"@elastic/ecs-helpers" "^1.1.0"
|
||||
|
||||
"@elastic/elasticsearch@npm:@elastic/elasticsearch-canary@8.4.0-canary.1":
|
||||
version "8.4.0-canary.1"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch-canary/-/elasticsearch-canary-8.4.0-canary.1.tgz#af951826a67eb8a97562014e90447087c3bccee9"
|
||||
integrity sha512-BWC3u2SfNBZBPUB/M7Qt6jl/8QcnG1+tzYto4pTZW/AaQn6raTyKtdta78Vi9Tj83MzA2skz7Y0FfCmPudwPsQ==
|
||||
"@elastic/elasticsearch@npm:@elastic/elasticsearch-canary@8.5.0-canary.1":
|
||||
version "8.5.0-canary.1"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch-canary/-/elasticsearch-canary-8.5.0-canary.1.tgz#3e2f40bc2e58d4f1c1cc391b2d594db59c5e0cd8"
|
||||
integrity sha512-WmiK5A04tUwpAdbPeLo9ONV/T6foYYa1EjGQFxTPmEPLyY9s0B14o1JAJ7OnzQe9qmDahSJksSA1xLr0LdutmA==
|
||||
dependencies:
|
||||
"@elastic/transport" "^8.2.0"
|
||||
tslib "^2.4.0"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue