mirror of
https://github.com/elastic/kibana.git
synced 2025-04-25 02:09:32 -04:00
Update dependency @elastic/elasticsearch to ^8.15.0 (main) (#190378)
Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Alejandro Fernández Haro <alejandro.haro@elastic.co> Co-authored-by: Walter Rafelsberger <walter.rafelsberger@elastic.co>
This commit is contained in:
parent
745ecfdd30
commit
68a924411b
27 changed files with 145 additions and 145 deletions
|
@ -113,7 +113,7 @@
|
|||
"@elastic/datemath": "5.0.3",
|
||||
"@elastic/ebt": "1.0.0",
|
||||
"@elastic/ecs": "^8.11.1",
|
||||
"@elastic/elasticsearch": "^8.14.0",
|
||||
"@elastic/elasticsearch": "^8.15.0",
|
||||
"@elastic/ems-client": "8.5.3",
|
||||
"@elastic/eui": "95.7.0",
|
||||
"@elastic/filesaver": "1.1.2",
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import type {
|
||||
PropertyName as EsPropertyName,
|
||||
MappingProperty as EsMappingProperty,
|
||||
MappingPropertyBase as EsMappingPropertyBase,
|
||||
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
/**
|
||||
|
@ -64,7 +65,8 @@ export interface SavedObjectsMappingProperties {
|
|||
*
|
||||
* @public
|
||||
*/
|
||||
export type SavedObjectsFieldMapping = EsMappingProperty & {
|
||||
export type SavedObjectsFieldMapping = EsMappingProperty &
|
||||
EsMappingPropertyBase & {
|
||||
/**
|
||||
* The dynamic property of the mapping, either `false` or `'strict'`. If
|
||||
* unspecified `dynamic: 'strict'` will be inherited from the top-level
|
||||
|
@ -79,4 +81,4 @@ export type SavedObjectsFieldMapping = EsMappingProperty & {
|
|||
* to avoid type failures on all code using accessing them via `SavedObjectsFieldMapping.properties`.
|
||||
*/
|
||||
properties?: Record<EsPropertyName, EsMappingProperty>;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -37,7 +37,7 @@ export type ScriptedPhraseFilter = Filter & {
|
|||
meta: PhraseFilterMeta;
|
||||
query: {
|
||||
script: {
|
||||
script: estypes.InlineScript;
|
||||
script: estypes.Script;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@ -134,7 +134,7 @@ export const getPhraseScript = (field: DataViewFieldBase, value: PhraseFilterVal
|
|||
params: {
|
||||
value: convertedValue,
|
||||
},
|
||||
} as estypes.InlineScript,
|
||||
} as estypes.Script,
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ export type ScriptedRangeFilter = Filter & {
|
|||
meta: RangeFilterMeta;
|
||||
query: {
|
||||
script: {
|
||||
script: estypes.InlineScript;
|
||||
script: estypes.Script;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@ -189,7 +189,7 @@ export const buildRangeFilter = (
|
|||
* @internal
|
||||
*/
|
||||
export const getRangeScript = (field: DataViewFieldBase, params: RangeFilterParams) => {
|
||||
const knownParams: estypes.InlineScript['params'] = mapValues(
|
||||
const knownParams: estypes.Script['params'] = mapValues(
|
||||
pickBy(params, (val, key) => key in operators),
|
||||
(value) => (field.type === 'number' && typeof value === 'string' ? parseFloat(value) : value)
|
||||
);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import type {
|
||||
InlineScript,
|
||||
Script,
|
||||
MappingRuntimeField,
|
||||
MappingRuntimeFields,
|
||||
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
@ -52,7 +52,7 @@ export interface MainAggregation extends NamedAggregation {
|
|||
}
|
||||
|
||||
export interface GroupingRuntimeField extends MappingRuntimeField {
|
||||
script: InlineScript & {
|
||||
script: Script & {
|
||||
params: Record<string, any>;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import { FormattedMessage } from '@kbn/i18n-react';
|
|||
import { cloneDeep } from 'lodash';
|
||||
import useDebounce from 'react-use/lib/useDebounce';
|
||||
import type {
|
||||
InferenceModelConfigContainer,
|
||||
InferenceInferenceEndpointInfo,
|
||||
MappingTypeMapping,
|
||||
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { createSemanticTextCombinedField, getFieldNames, getNameCollisionMsg } from './utils';
|
||||
|
@ -61,14 +61,14 @@ export const SemanticTextForm: FC<Props> = ({ addCombinedField, hasNameCollision
|
|||
|
||||
useEffect(() => {
|
||||
http
|
||||
.fetch<InferenceModelConfigContainer[]>('/internal/data_visualizer/inference_services', {
|
||||
.fetch<InferenceInferenceEndpointInfo[]>('/internal/data_visualizer/inference_services', {
|
||||
method: 'GET',
|
||||
version: '1',
|
||||
})
|
||||
.then((response) => {
|
||||
const inferenceServiceOptions = response.map((service) => ({
|
||||
value: service.model_id,
|
||||
text: service.model_id,
|
||||
value: service.inference_id,
|
||||
text: service.inference_id,
|
||||
}));
|
||||
setInferenceServices(inferenceServiceOptions);
|
||||
setSelectedInference(inferenceServiceOptions[0]?.value ?? undefined);
|
||||
|
|
|
@ -304,10 +304,8 @@ const getDataComparisonQuery = ({
|
|||
|
||||
if (rangeFilter && isPopulatedObject<string, QueryDslBoolQuery>(query, ['bool'])) {
|
||||
if (Array.isArray(query.bool.filter)) {
|
||||
// @ts-expect-error gte and lte can be numeric
|
||||
query.bool.filter.push(rangeFilter);
|
||||
} else {
|
||||
// @ts-expect-error gte and lte can be numeric
|
||||
query.bool.filter = [rangeFilter];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,8 +83,7 @@ export function routes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger)
|
|||
async (context, request, response) => {
|
||||
try {
|
||||
const esClient = (await context.core).elasticsearch.client;
|
||||
// @ts-expect-error types are wrong
|
||||
const { endpoints } = await esClient.asCurrentUser.inference.getModel({
|
||||
const { endpoints } = await esClient.asCurrentUser.inference.get({
|
||||
inference_id: '_all',
|
||||
});
|
||||
|
||||
|
|
|
@ -50,7 +50,6 @@ const getSemanticTextFields = (
|
|||
return Object.entries(fields).flatMap(([key, value]) => {
|
||||
const currentPath: string = path ? `${path}.${key}` : key;
|
||||
const currentField: Array<{ path: string; source: SemanticTextProperty }> =
|
||||
// @ts-expect-error because semantic_text type isn't incorporated in API type yet
|
||||
value.type === 'semantic_text' ? [{ path: currentPath, source: value }] : [];
|
||||
if (hasProperties(value)) {
|
||||
const childSemanticTextFields: Array<{ path: string; source: SemanticTextProperty }> =
|
||||
|
|
|
@ -11,7 +11,7 @@ import { i18n } from '@kbn/i18n';
|
|||
import { EuiAccordion, EuiSpacer } from '@elastic/eui';
|
||||
import type { MlUrlConfig } from '@kbn/ml-anomaly-utils';
|
||||
import type { DataFrameAnalyticsConfig } from '@kbn/ml-data-frame-analytics-utils';
|
||||
import type { DeepPartial } from '../../../../../../../common/types/common';
|
||||
import type { DeepPartial } from '@kbn/utility-types';
|
||||
import { Description } from './description';
|
||||
import { CustomUrlsWrapper } from '../../../../../components/custom_urls';
|
||||
import {
|
||||
|
|
|
@ -16,7 +16,8 @@ import {
|
|||
type DataFrameAnalysisConfigType,
|
||||
type FeatureProcessor,
|
||||
} from '@kbn/ml-data-frame-analytics-utils';
|
||||
import type { DeepPartial, DeepReadonly } from '../../../../../../../common/types/common';
|
||||
import type { DeepPartial } from '@kbn/utility-types';
|
||||
import type { DeepReadonly } from '../../../../../../../common/types/common';
|
||||
import { checkPermission } from '../../../../../capabilities/check_capabilities';
|
||||
import { mlNodesAvailable } from '../../../../../ml_nodes_check';
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import { isEqual } from 'lodash';
|
|||
import type { Observable } from 'rxjs';
|
||||
import { from, isObservable, Subject } from 'rxjs';
|
||||
import { distinctUntilChanged, flatMap, scan, shareReplay } from 'rxjs';
|
||||
import type { DeepPartial } from '../../../common/types/common';
|
||||
import type { DeepPartial } from '@kbn/utility-types';
|
||||
import { jobSelectionActionCreator } from './actions';
|
||||
import { EXPLORER_ACTION } from './explorer_constants';
|
||||
import type { ExplorerState } from './reducers';
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import type { DeepPartial } from '@kbn/utility-types';
|
||||
import type { NewJobCapsResponse } from '@kbn/ml-anomaly-utils';
|
||||
import type {
|
||||
AnalyticsMapReturnType,
|
||||
|
@ -21,7 +22,6 @@ import type { HttpService } from '../http_service';
|
|||
import { useMlKibana } from '../../contexts/kibana';
|
||||
|
||||
import type { ValidateAnalyticsJobResponse } from '../../../../common/constants/validation';
|
||||
import type { DeepPartial } from '../../../../common/types/common';
|
||||
import type { JobMessage } from '../../../../common/types/audit_message';
|
||||
import type { PutDataFrameAnalyticsResponseSchema } from '../../../../server/routes/schemas/data_frame_analytics_schema';
|
||||
|
||||
|
|
|
@ -11,11 +11,6 @@ import type { ModelConfig } from '@kbn/inference_integration_flyout/types';
|
|||
import type { HttpService } from '../http_service';
|
||||
import { ML_INTERNAL_BASE_PATH } from '../../../../common/constants/app';
|
||||
|
||||
// TODO remove inference_id when esType has been updated to include it
|
||||
export interface GetInferenceEndpointsResponse extends estypes.InferenceModelConfigContainer {
|
||||
inference_id: string;
|
||||
}
|
||||
|
||||
export function inferenceModelsApiProvider(httpService: HttpService) {
|
||||
return {
|
||||
/**
|
||||
|
@ -29,7 +24,7 @@ export function inferenceModelsApiProvider(httpService: HttpService) {
|
|||
taskType: InferenceTaskType,
|
||||
modelConfig: ModelConfig
|
||||
) {
|
||||
const result = await httpService.http<estypes.InferencePutModelResponse>({
|
||||
const result = await httpService.http<estypes.InferencePutResponse>({
|
||||
path: `${ML_INTERNAL_BASE_PATH}/_inference/${taskType}/${inferenceId}`,
|
||||
method: 'PUT',
|
||||
body: JSON.stringify(modelConfig),
|
||||
|
@ -41,9 +36,7 @@ export function inferenceModelsApiProvider(httpService: HttpService) {
|
|||
* Gets all inference endpoints
|
||||
*/
|
||||
async getAllInferenceEndpoints() {
|
||||
const result = await httpService.http<{
|
||||
endpoints: GetInferenceEndpointsResponse[];
|
||||
}>({
|
||||
const result = await httpService.http<estypes.InferenceGetResponse>({
|
||||
path: `${ML_INTERNAL_BASE_PATH}/_inference/all`,
|
||||
method: 'GET',
|
||||
version: '1',
|
||||
|
|
|
@ -9,12 +9,12 @@ import type { JobsHealthService } from './jobs_health_service';
|
|||
import { jobsHealthServiceProvider } from './jobs_health_service';
|
||||
import type { DatafeedsService } from '../../models/job_service/datafeeds';
|
||||
import type { Logger } from '@kbn/core/server';
|
||||
import type { DeepPartial } from '@kbn/utility-types';
|
||||
import type { MlClient } from '../ml_client';
|
||||
import type { MlJob, MlJobStats } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type { AnnotationService } from '../../models/annotation_service/annotation';
|
||||
import type { JobsHealthExecutorOptions } from './register_jobs_monitoring_rule_type';
|
||||
import type { JobAuditMessagesService } from '../../models/job_audit_messages/job_audit_messages';
|
||||
import type { DeepPartial } from '../../../common/types/common';
|
||||
import type { FieldFormatsRegistryProvider } from '../../../common/types/kibana';
|
||||
|
||||
const MOCK_DATE_NOW = 1487076708000;
|
||||
|
|
|
@ -10,7 +10,7 @@ import type { IScopedClusterClient } from '@kbn/core/server';
|
|||
import { JOB_MAP_NODE_TYPES, type MapElements } from '@kbn/ml-data-frame-analytics-utils';
|
||||
import { flatten } from 'lodash';
|
||||
import type {
|
||||
InferenceModelConfig,
|
||||
InferenceInferenceEndpoint,
|
||||
InferenceTaskType,
|
||||
TasksTaskInfo,
|
||||
TransformGetTransformTransformSummary,
|
||||
|
@ -591,19 +591,19 @@ export class ModelsProvider {
|
|||
* Puts the requested Inference endpoint id into elasticsearch, triggering elasticsearch to create the inference endpoint id
|
||||
* @param inferenceId - Inference Endpoint Id
|
||||
* @param taskType - Inference Task type. Either sparse_embedding or text_embedding
|
||||
* @param modelConfig - Model configuration based on service type
|
||||
* @param inferenceConfig - Model configuration based on service type
|
||||
*/
|
||||
async createInferenceEndpoint(
|
||||
inferenceId: string,
|
||||
taskType: InferenceTaskType,
|
||||
modelConfig: InferenceModelConfig
|
||||
inferenceConfig: InferenceInferenceEndpoint
|
||||
) {
|
||||
try {
|
||||
const result = await this._client.asCurrentUser.inference.putModel(
|
||||
const result = await this._client.asCurrentUser.inference.put(
|
||||
{
|
||||
inference_id: inferenceId,
|
||||
task_type: taskType,
|
||||
model_config: modelConfig,
|
||||
inference_config: inferenceConfig,
|
||||
},
|
||||
{ maxRetries: 0 }
|
||||
);
|
||||
|
@ -613,7 +613,7 @@ export class ModelsProvider {
|
|||
// Erroring out is misleading in these cases, so we return the model_id and task_type
|
||||
if (error.name === 'TimeoutError') {
|
||||
return {
|
||||
model_id: modelConfig.service,
|
||||
model_id: inferenceConfig.service,
|
||||
task_type: taskType,
|
||||
};
|
||||
} else {
|
||||
|
|
|
@ -6,7 +6,10 @@
|
|||
*/
|
||||
import type { CloudSetup } from '@kbn/cloud-plugin/server';
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import type { InferenceModelConfig, InferenceTaskType } from '@elastic/elasticsearch/lib/api/types';
|
||||
import type {
|
||||
InferenceInferenceEndpoint,
|
||||
InferenceTaskType,
|
||||
} from '@elastic/elasticsearch/lib/api/types';
|
||||
import type { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
|
||||
import type { RouteInitialization } from '../types';
|
||||
import { createInferenceSchema } from './schemas/inference_schema';
|
||||
|
@ -46,7 +49,7 @@ export function inferenceModelRoutes(
|
|||
const body = await modelsProvider(client, mlClient, cloud).createInferenceEndpoint(
|
||||
inferenceId,
|
||||
taskType as InferenceTaskType,
|
||||
request.body as InferenceModelConfig
|
||||
request.body as InferenceInferenceEndpoint
|
||||
);
|
||||
const { syncSavedObjects } = syncSavedObjectsFactory(client, mlSavedObjectService);
|
||||
await syncSavedObjects(false);
|
||||
|
|
|
@ -50,7 +50,6 @@ export const getPingHistogram: UMElasticsearchQueryFn<
|
|||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
filter: [...filter, SUMMARY_FILTER, EXCLUDE_RUN_ONCE_FILTER],
|
||||
},
|
||||
},
|
||||
|
@ -81,7 +80,6 @@ export const getPingHistogram: UMElasticsearchQueryFn<
|
|||
});
|
||||
|
||||
const { body: result } = await uptimeEsClient.search(params, 'getPingsOverTime');
|
||||
// @ts-expect-error upgrade typescript v5.1.6
|
||||
const buckets = result?.aggregations?.timeseries?.buckets ?? [];
|
||||
|
||||
const histogram = buckets.map((bucket: Pick<(typeof buckets)[0], 'key' | 'down' | 'up'>) => {
|
||||
|
|
|
@ -367,7 +367,8 @@ const allowedFilterKeysSchema = t.union([
|
|||
t.literal('range'),
|
||||
t.literal('rank_feature'),
|
||||
t.literal('regexp'),
|
||||
t.literal('rule_query'),
|
||||
t.literal('rule'),
|
||||
t.literal('semantic'),
|
||||
t.literal('shape'),
|
||||
t.literal('simple_query_string'),
|
||||
t.literal('span_containing'),
|
||||
|
|
|
@ -30,7 +30,6 @@ import {
|
|||
AggregateName,
|
||||
AggregationsAggregate,
|
||||
AggregationsMultiBucketAggregateBase,
|
||||
InlineScript,
|
||||
MappingRuntimeFields,
|
||||
QueryDslQueryContainer,
|
||||
SortCombinations,
|
||||
|
@ -826,7 +825,6 @@ export class AlertsClient {
|
|||
const result = await this.esClient.updateByQuery({
|
||||
index,
|
||||
conflicts: 'proceed',
|
||||
body: {
|
||||
script: {
|
||||
source: `if (ctx._source['${ALERT_WORKFLOW_STATUS}'] != null) {
|
||||
ctx._source['${ALERT_WORKFLOW_STATUS}'] = '${status}'
|
||||
|
@ -835,9 +833,8 @@ export class AlertsClient {
|
|||
ctx._source.signal.status = '${status}'
|
||||
}`,
|
||||
lang: 'painless',
|
||||
} as InlineScript,
|
||||
query: fetchAndAuditResponse.authorizedQuery as Omit<QueryDslQueryContainer, 'script'>,
|
||||
},
|
||||
query: fetchAndAuditResponse.authorizedQuery as Omit<QueryDslQueryContainer, 'script'>,
|
||||
ignore_unavailable: true,
|
||||
});
|
||||
return result;
|
||||
|
@ -965,14 +962,12 @@ export class AlertsClient {
|
|||
await this.esClient.updateByQuery({
|
||||
index,
|
||||
conflicts: 'proceed',
|
||||
body: {
|
||||
script: {
|
||||
source: painlessScript,
|
||||
lang: 'painless',
|
||||
params: { caseIds },
|
||||
} as InlineScript,
|
||||
query: esQuery,
|
||||
},
|
||||
query: esQuery,
|
||||
ignore_unavailable: true,
|
||||
});
|
||||
} catch (err) {
|
||||
|
|
|
@ -105,7 +105,9 @@ describe('remove cases from alerts', () => {
|
|||
|
||||
expect(esClientMock.updateByQuery.mock.calls[0][0]).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"body": Object {
|
||||
"conflicts": "proceed",
|
||||
"ignore_unavailable": true,
|
||||
"index": "undefined-*",
|
||||
"query": Object {
|
||||
"bool": Object {
|
||||
"filter": Array [
|
||||
|
@ -166,10 +168,6 @@ describe('remove cases from alerts', () => {
|
|||
}
|
||||
}",
|
||||
},
|
||||
},
|
||||
"conflicts": "proceed",
|
||||
"ignore_unavailable": true,
|
||||
"index": "undefined-*",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
|
|
@ -13,7 +13,7 @@ describe('deleteInferenceEndpoint', () => {
|
|||
beforeEach(() => {
|
||||
mockClient = {
|
||||
inference: {
|
||||
deleteModel: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
@ -24,7 +24,7 @@ describe('deleteInferenceEndpoint', () => {
|
|||
|
||||
await deleteInferenceEndpoint(mockClient, type, id);
|
||||
|
||||
expect(mockClient.inference.deleteModel).toHaveBeenCalledWith({
|
||||
expect(mockClient.inference.delete).toHaveBeenCalledWith({
|
||||
inference_id: id,
|
||||
task_type: type,
|
||||
});
|
||||
|
|
|
@ -19,6 +19,6 @@ export const deleteInferenceEndpoint = async (
|
|||
id: string
|
||||
) => {
|
||||
if (isTaskType(type)) {
|
||||
return await client.inference.deleteModel({ inference_id: id, task_type: type });
|
||||
return await client.inference.delete({ inference_id: id, task_type: type });
|
||||
}
|
||||
};
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { EqlHitsSequence } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
/**
|
||||
* Defines the search types you can have from Elasticsearch within a
|
||||
* doc._source. It uses recursive types of "| SearchTypes[]" to designate
|
||||
|
@ -32,10 +34,7 @@ export interface BaseHit<T> {
|
|||
fields?: Record<string, SearchTypes[]>;
|
||||
}
|
||||
|
||||
export interface EqlSequence<T> {
|
||||
join_keys: SearchTypes[];
|
||||
events: Array<BaseHit<T>>;
|
||||
}
|
||||
export type EqlSequence<T> = EqlHitsSequence<T>;
|
||||
|
||||
export interface EqlSearchResponse<T> {
|
||||
is_partial: boolean;
|
||||
|
|
|
@ -1406,7 +1406,7 @@ describe('TaskStore', () => {
|
|||
childEsClient.updateByQuery.mockResponse({
|
||||
hits: { hits: [], total: 0, updated: 100, version_conflicts: 0 },
|
||||
} as UpdateByQueryResponse);
|
||||
await store.updateByQuery({ script: '' }, { max_docs: 10 });
|
||||
await store.updateByQuery({ script: { source: '' } }, { max_docs: 10 });
|
||||
expect(childEsClient.updateByQuery).toHaveBeenCalledWith(expect.any(Object), {
|
||||
requestTimeout: 1000,
|
||||
});
|
||||
|
|
|
@ -28,6 +28,13 @@ import { TransformHealthColoredDot } from './transform_health_colored_dot';
|
|||
import type { SectionConfig, SectionItem } from './expanded_row_column_view';
|
||||
import { ExpandedRowColumnView } from './expanded_row_column_view';
|
||||
|
||||
const notAvailableMessage = i18n.translate(
|
||||
'xpack.transform.transformList.transformDetails.notAvailable',
|
||||
{
|
||||
defaultMessage: 'n/a',
|
||||
}
|
||||
);
|
||||
|
||||
interface ExpandedRowDetailsPaneProps {
|
||||
item: TransformListRow;
|
||||
onAlertEdit: (alertRule: TransformHealthAlertRule) => void;
|
||||
|
@ -166,17 +173,23 @@ export const ExpandedRowDetailsPane: FC<ExpandedRowDetailsPaneProps> = ({ item,
|
|||
if (displayStats.checkpointing.next.checkpoint_progress !== undefined) {
|
||||
checkpointingItems.push({
|
||||
title: 'next.checkpoint_progress.total_docs',
|
||||
description: displayStats.checkpointing.next.checkpoint_progress.total_docs,
|
||||
description:
|
||||
displayStats.checkpointing.next.checkpoint_progress.total_docs ?? notAvailableMessage,
|
||||
});
|
||||
checkpointingItems.push({
|
||||
title: 'next.checkpoint_progress.docs_remaining',
|
||||
description: displayStats.checkpointing.next.checkpoint_progress.docs_remaining,
|
||||
description:
|
||||
displayStats.checkpointing.next.checkpoint_progress.docs_remaining ??
|
||||
notAvailableMessage,
|
||||
});
|
||||
checkpointingItems.push({
|
||||
title: 'next.checkpoint_progress.percent_complete',
|
||||
description: `${Math.round(
|
||||
description:
|
||||
typeof displayStats.checkpointing.next.checkpoint_progress.percent_complete === 'number'
|
||||
? `${Math.round(
|
||||
displayStats.checkpointing.next.checkpoint_progress.percent_complete
|
||||
)}%`,
|
||||
)}%`
|
||||
: notAvailableMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
27
yarn.lock
27
yarn.lock
|
@ -1745,12 +1745,12 @@
|
|||
"@elastic/transport" "^8.3.1"
|
||||
tslib "^2.4.0"
|
||||
|
||||
"@elastic/elasticsearch@^8.14.0":
|
||||
version "8.14.0"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-8.14.0.tgz#93b1f2a7cb6cc5cd1ceebf5060576bc690432e0a"
|
||||
integrity sha512-MGrgCI4y+Ozssf5Q2IkVJlqt5bUMnKIICG2qxeOfrJNrVugMCBCAQypyesmSSocAtNm8IX3LxfJ3jQlFHmKe2w==
|
||||
"@elastic/elasticsearch@^8.15.0":
|
||||
version "8.15.0"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-8.15.0.tgz#cb29b3ae33203c545d435cf3dc4b557c8b4961d5"
|
||||
integrity sha512-mG90EMdTDoT6GFSdqpUAhWK9LGuiJo6tOWqs0Usd/t15mPQDj7ZqHXfCBqNkASZpwPZpbAYVjd57S6nbUBINCg==
|
||||
dependencies:
|
||||
"@elastic/transport" "^8.6.0"
|
||||
"@elastic/transport" "^8.7.0"
|
||||
tslib "^2.4.0"
|
||||
|
||||
"@elastic/ems-client@8.5.3":
|
||||
|
@ -1935,11 +1935,12 @@
|
|||
undici "^5.21.2"
|
||||
yaml "^2.2.2"
|
||||
|
||||
"@elastic/transport@^8.3.1", "@elastic/transport@^8.6.0":
|
||||
version "8.6.0"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/transport/-/transport-8.6.0.tgz#8de9794c87eb0fd2bdb2c6c1e32792aeb06b32bc"
|
||||
integrity sha512-/Ucpztrc+urZK8yCtFBUu2LePYJNnukgZSUUApUzGH/SxejqkH526Nph7aru8I0vZwdW5wqgCHSOIq3J7tIxGg==
|
||||
"@elastic/transport@^8.3.1", "@elastic/transport@^8.7.0":
|
||||
version "8.7.0"
|
||||
resolved "https://registry.yarnpkg.com/@elastic/transport/-/transport-8.7.0.tgz#006987fc5583f61c266e0b1003371e82efc7a6b5"
|
||||
integrity sha512-IqXT7a8DZPJtqP2qmX1I2QKmxYyN27kvSW4g6pInESE1SuGwZDp2FxHJ6W2kwmYOJwQdAt+2aWwzXO5jHo9l4A==
|
||||
dependencies:
|
||||
"@opentelemetry/api" "1.x"
|
||||
debug "^4.3.4"
|
||||
hpagent "^1.0.0"
|
||||
ms "^2.1.3"
|
||||
|
@ -7943,10 +7944,10 @@
|
|||
dependencies:
|
||||
"@opentelemetry/api" "^1.0.0"
|
||||
|
||||
"@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
|
||||
version "1.8.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.8.0.tgz#5aa7abb48f23f693068ed2999ae627d2f7d902ec"
|
||||
integrity sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w==
|
||||
"@opentelemetry/api@1.x", "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.1.0", "@opentelemetry/api@^1.4.1":
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe"
|
||||
integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==
|
||||
|
||||
"@opentelemetry/core@1.15.0":
|
||||
version "1.15.0"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue