mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Monitoring] Ensure Stack Monitoring UI works with metricbeat-*
indices (#96205)
* WIP * WIP * Remove unnecessary fields * Work on node detail page * Cluster overview looking good * Index page * Fix types * ML jobs * CCR * CCR * We just need total here * Standalone cluster fix * Re-enable logstash * FIx jest test * Fix tests * Fix tests * Fix unused import * Add new MB-based archives * Add actual archives * Fix types * Add this file back in * Fix tests and add more * Update whitelist * Renames * Renames * Only do ccs if enabled * Updates * Comment out * More tests passing * Another passing test * More passing, yay * Forgot to add the actual tests, wow * CCR * Fix CI issues * Missed a field here * Kibana tests passing * Fix type issues * Fix type * Fix types * Good chunk of logstash work * Fix types * Fix jest test * Fix issue with get usage in logstash code * Fix small bug here * Update archives with proper mappings * Handle both legacy and mb fields properly * Fixes * Fix jest test * Fix issue * Getting setup tests in a better state * Only beats is failing now, which is good * Progress on cluster listing * Functional tests passing! * More progress on cluster tests * More cluster work * Fix test * Last recovery working * Fix types * Fix tests * More tweaks * Fix snapshot * Use stats instead of kibana_stats * Use node and node_stats for logstash * Beats tests passing * APM tests passing * API tests passing! * Fix types * Fix tests * Renames beats-with-restarted-instance archive dirs Kebab case is disallowed for all newly added files. * Renames logstash-pipeline dirs Kebab case disallowed for all new files * Renames multi-basic dirs Kebab case disallowed for all new files * Renames singlecluster-* dirs Kebab case disallowed for all new files * Fixes inaccurate path change for archive setup * Reverts changes to rebuild_all script Co-authored-by: Jason Rhodes <jason.matthew.rhodes@gmail.com> Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
298eccb1ff
commit
c9ce295a0b
272 changed files with 1179027 additions and 240813 deletions
|
@ -139,7 +139,6 @@ export const TEMPORARILY_IGNORED_PATHS = [
|
|||
'test/functional/apps/management/exports/_import_objects-conflicts.json',
|
||||
'x-pack/legacy/plugins/index_management/public/lib/editSettings.js',
|
||||
'x-pack/legacy/plugins/license_management/public/store/reducers/licenseManagement.js',
|
||||
'x-pack/plugins/monitoring/public/components/sparkline/__mocks__/plugins/xpack_main/jquery_flot.js',
|
||||
'x-pack/plugins/monitoring/public/icons/health-gray.svg',
|
||||
'x-pack/plugins/monitoring/public/icons/health-green.svg',
|
||||
'x-pack/plugins/monitoring/public/icons/health-red.svg',
|
||||
|
@ -150,28 +149,4 @@ export const TEMPORARILY_IGNORED_PATHS = [
|
|||
'x-pack/plugins/reporting/server/export_types/common/assets/fonts/roboto/Roboto-Medium.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/common/assets/fonts/roboto/Roboto-Regular.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/common/assets/img/logo-grey.png',
|
||||
'x-pack/test/functional/es_archives/monitoring/beats-with-restarted-instance/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/beats-with-restarted-instance/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/logstash-pipelines/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/logstash-pipelines/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/multi-basic/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/multi-basic/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-basic-beats/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-basic-beats/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-gold/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-gold/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-platinum/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-platinum/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-trial-two-nodes-one-cgrouped/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-green-trial-two-nodes-one-cgrouped/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-red-platinum/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-red-platinum/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-three-nodes-shard-relocation/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-three-nodes-shard-relocation/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-basic/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-basic/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-platinum--with-10-alerts/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-platinum--with-10-alerts/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-platinum/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/singlecluster-yellow-platinum/mappings.json',
|
||||
];
|
||||
|
|
|
@ -11,7 +11,6 @@ import { IHttpFetchError } from 'src/core/public';
|
|||
import { InvalidNodeError } from './invalid_node';
|
||||
import { DocumentTitle } from '../../../../components/document_title';
|
||||
import { ErrorPageBody } from '../../../error';
|
||||
|
||||
interface Props {
|
||||
name: string;
|
||||
error: IHttpFetchError;
|
||||
|
@ -30,13 +29,11 @@ export const PageError = ({ error, name }: Props) => {
|
|||
})
|
||||
}
|
||||
/>
|
||||
{
|
||||
(error.body.statusCode = 404 ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
))
|
||||
}
|
||||
{error.body?.statusCode === 404 ? (
|
||||
<InvalidNodeError nodeName={name} />
|
||||
) : (
|
||||
<ErrorPageBody message={error.message} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -39,15 +39,31 @@ export interface ElasticsearchSourceKibanaStats {
|
|||
response_times?: {
|
||||
max?: number;
|
||||
};
|
||||
transport_address?: string;
|
||||
host?: string;
|
||||
};
|
||||
os?: {
|
||||
memory?: {
|
||||
free_in_bytes?: number;
|
||||
};
|
||||
load?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
};
|
||||
response_times?: {
|
||||
average?: number;
|
||||
max?: number;
|
||||
};
|
||||
requests?: {
|
||||
total?: number;
|
||||
};
|
||||
process?: {
|
||||
uptime_in_millis?: number;
|
||||
memory?: {
|
||||
resident_set_size_in_bytes?: number;
|
||||
};
|
||||
};
|
||||
concurrent_connections?: number;
|
||||
}
|
||||
|
||||
export interface ElasticsearchSourceLogstashPipelineVertex {
|
||||
|
@ -100,6 +116,7 @@ export interface ElasticsearchNodeStats {
|
|||
|
||||
export interface ElasticsearchIndexStats {
|
||||
index?: string;
|
||||
name?: string;
|
||||
shards: {
|
||||
primaries: number;
|
||||
};
|
||||
|
@ -139,15 +156,21 @@ export interface ElasticsearchLegacySource {
|
|||
heap_max_in_bytes?: number;
|
||||
};
|
||||
};
|
||||
fs: {
|
||||
available_in_bytes?: number;
|
||||
total_in_bytes?: number;
|
||||
};
|
||||
versions?: string[];
|
||||
};
|
||||
indices?: {
|
||||
count?: number;
|
||||
docs?: {
|
||||
deleted?: number;
|
||||
count?: number;
|
||||
};
|
||||
shards?: {
|
||||
total?: number;
|
||||
primaries?: number;
|
||||
};
|
||||
store?: {
|
||||
size_in_bytes?: number;
|
||||
|
@ -156,6 +179,7 @@ export interface ElasticsearchLegacySource {
|
|||
};
|
||||
cluster_state?: {
|
||||
status?: string;
|
||||
state_uuid?: string;
|
||||
nodes?: {
|
||||
[nodeUuid: string]: {
|
||||
ephemeral_id?: string;
|
||||
|
@ -189,14 +213,46 @@ export interface ElasticsearchLegacySource {
|
|||
};
|
||||
logstash_stats?: {
|
||||
timestamp?: string;
|
||||
logstash?: {};
|
||||
events?: {};
|
||||
reloads?: {};
|
||||
logstash?: {
|
||||
timestamp?: string;
|
||||
pipeline: {
|
||||
batch_size: number;
|
||||
workers: number;
|
||||
};
|
||||
http_address: string;
|
||||
name: string;
|
||||
host: string;
|
||||
uuid: string;
|
||||
version: string;
|
||||
status: string;
|
||||
};
|
||||
queue?: {
|
||||
type?: string;
|
||||
};
|
||||
jvm?: {
|
||||
uptime_in_millis?: number;
|
||||
mem?: {
|
||||
heap_used_percent?: number;
|
||||
};
|
||||
};
|
||||
process?: {
|
||||
cpu?: {
|
||||
percent?: number;
|
||||
};
|
||||
};
|
||||
os?: {
|
||||
cpu?: {
|
||||
load_average?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
events?: {
|
||||
out?: number;
|
||||
};
|
||||
reloads?: {
|
||||
failures?: number;
|
||||
successes?: number;
|
||||
};
|
||||
};
|
||||
beats_stats?: {
|
||||
|
@ -276,14 +332,12 @@ export interface ElasticsearchLegacySource {
|
|||
};
|
||||
index_stats?: ElasticsearchIndexStats;
|
||||
node_stats?: ElasticsearchNodeStats;
|
||||
service?: {
|
||||
address?: string;
|
||||
};
|
||||
shard?: {
|
||||
index?: string;
|
||||
shard?: string;
|
||||
state?: string;
|
||||
primary?: boolean;
|
||||
relocating_node?: string;
|
||||
relocating_node: string | null;
|
||||
node?: string;
|
||||
};
|
||||
ccr_stats?: {
|
||||
|
@ -303,17 +357,309 @@ export interface ElasticsearchLegacySource {
|
|||
}
|
||||
|
||||
export interface ElasticsearchIndexRecoveryShard {
|
||||
start_time_in_millis: number;
|
||||
stop_time_in_millis: number;
|
||||
id?: number;
|
||||
name?: string;
|
||||
stage?: string;
|
||||
type?: string;
|
||||
primary?: boolean;
|
||||
source?: {
|
||||
name?: string;
|
||||
transport_address?: string;
|
||||
};
|
||||
target?: {
|
||||
name?: string;
|
||||
transport_address?: string;
|
||||
};
|
||||
index?: {
|
||||
files?: {
|
||||
percent?: string;
|
||||
recovered?: number;
|
||||
total?: number;
|
||||
reused?: number;
|
||||
};
|
||||
size?: {
|
||||
recovered_in_bytes?: number;
|
||||
reused_in_bytes?: number;
|
||||
total_in_bytes?: number;
|
||||
};
|
||||
};
|
||||
start_time_in_millis?: number;
|
||||
stop_time_in_millis?: number;
|
||||
translog?: {
|
||||
total?: number;
|
||||
percent?: string;
|
||||
total_on_start?: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ElasticsearchMetricbeatNode {
|
||||
name?: string;
|
||||
stats?: ElasticsearchNodeStats;
|
||||
}
|
||||
|
||||
export interface ElasticsearchMetricbeatSource {
|
||||
'@timestamp'?: string;
|
||||
service?: {
|
||||
address?: string;
|
||||
};
|
||||
elasticsearch?: {
|
||||
node?: ElasticsearchLegacySource['source_node'] & ElasticsearchMetricbeatNode;
|
||||
index?: ElasticsearchIndexStats & {
|
||||
recovery?: ElasticsearchIndexRecoveryShard;
|
||||
};
|
||||
version?: string;
|
||||
shard?: ElasticsearchLegacySource['shard'] & {
|
||||
number?: string;
|
||||
relocating_node?: {
|
||||
id?: string;
|
||||
};
|
||||
};
|
||||
ml?: {
|
||||
job?: {
|
||||
id?: string;
|
||||
state?: string;
|
||||
model_size?: {};
|
||||
data_counts?: {
|
||||
processed_record_count?: number;
|
||||
};
|
||||
forecasts_stats?: {
|
||||
total?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
ccr?: {
|
||||
leader?: {
|
||||
index?: string;
|
||||
};
|
||||
follower?: {
|
||||
index?: string;
|
||||
shard?: {
|
||||
number?: number;
|
||||
};
|
||||
time_since_last_read?: {
|
||||
ms?: number;
|
||||
};
|
||||
operations_written?: number;
|
||||
failed_read_requests?: number;
|
||||
};
|
||||
|
||||
read_exceptions?: Array<{
|
||||
exception?: {
|
||||
type?: string;
|
||||
};
|
||||
}>;
|
||||
};
|
||||
cluster?: {
|
||||
name?: string;
|
||||
id?: string;
|
||||
stats?: {
|
||||
license?: ElasticsearchLegacySource['license'];
|
||||
state?: {
|
||||
state_uuid?: string;
|
||||
master_node?: string;
|
||||
nodes?: {
|
||||
[uuid: string]: {};
|
||||
};
|
||||
};
|
||||
status?: string;
|
||||
version?: string;
|
||||
indices?: {
|
||||
total?: number;
|
||||
docs?: {
|
||||
deleted?: number;
|
||||
total?: number;
|
||||
};
|
||||
shards?: {
|
||||
count?: number;
|
||||
primaries?: number;
|
||||
};
|
||||
store?: {
|
||||
size?: {
|
||||
bytes?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
nodes?: {
|
||||
versions?: string[];
|
||||
count?: number;
|
||||
jvm?: {
|
||||
max_uptime?: {
|
||||
ms?: number;
|
||||
};
|
||||
memory?: {
|
||||
heap?: {
|
||||
used?: {
|
||||
bytes?: number;
|
||||
};
|
||||
max?: {
|
||||
bytes?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
fs?: {
|
||||
available?: {
|
||||
bytes?: number;
|
||||
};
|
||||
total?: {
|
||||
bytes?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
stack?: {
|
||||
xpack?: {
|
||||
ccr?: {
|
||||
available?: boolean;
|
||||
enabled?: boolean;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
kibana?: {
|
||||
kibana?: {
|
||||
transport_address?: string;
|
||||
name?: string;
|
||||
host?: string;
|
||||
uuid?: string;
|
||||
status?: string;
|
||||
};
|
||||
stats?: {
|
||||
concurrent_connections?: number;
|
||||
process?: {
|
||||
uptime?: {
|
||||
ms?: number;
|
||||
};
|
||||
memory?: {
|
||||
heap?: {
|
||||
size_limit?: {
|
||||
bytes?: number;
|
||||
};
|
||||
};
|
||||
resident_set_size?: {
|
||||
bytes?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
os?: {
|
||||
load?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
memory?: {
|
||||
free_in_bytes?: number;
|
||||
};
|
||||
};
|
||||
request?: {
|
||||
disconnects?: number;
|
||||
total?: number;
|
||||
};
|
||||
response_time?: {
|
||||
avg?: {
|
||||
ms?: number;
|
||||
};
|
||||
max?: {
|
||||
ms?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
logstash?: {
|
||||
node?: {
|
||||
stats?: {
|
||||
timestamp?: string;
|
||||
logstash?: {
|
||||
pipeline: {
|
||||
batch_size: number;
|
||||
workers: number;
|
||||
};
|
||||
http_address: string;
|
||||
name: string;
|
||||
host: string;
|
||||
uuid: string;
|
||||
version: string;
|
||||
status: string;
|
||||
};
|
||||
queue?: {
|
||||
type?: string;
|
||||
};
|
||||
jvm?: {
|
||||
uptime_in_millis?: number;
|
||||
mem?: {
|
||||
heap_used_percent?: number;
|
||||
};
|
||||
};
|
||||
process?: {
|
||||
cpu?: {
|
||||
percent?: number;
|
||||
};
|
||||
};
|
||||
os?: {
|
||||
cpu?: {
|
||||
load_average?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
events?: {
|
||||
out?: number;
|
||||
};
|
||||
reloads?: {
|
||||
failures?: number;
|
||||
successes?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
beat?: {
|
||||
stats?: {
|
||||
timestamp?: string;
|
||||
beat?: {
|
||||
uuid?: string;
|
||||
name?: string;
|
||||
type?: string;
|
||||
version?: string;
|
||||
host?: string;
|
||||
};
|
||||
handles?: {
|
||||
limit?: {
|
||||
hard?: number;
|
||||
soft?: number;
|
||||
};
|
||||
};
|
||||
info?: {
|
||||
uptime?: {
|
||||
ms?: number;
|
||||
};
|
||||
};
|
||||
memstats?: {
|
||||
memory?: {
|
||||
alloc?: number;
|
||||
};
|
||||
};
|
||||
libbeat?: {
|
||||
config?: {
|
||||
reloads?: number;
|
||||
};
|
||||
output?: {
|
||||
type?: string;
|
||||
read?: {
|
||||
errors?: number;
|
||||
};
|
||||
write?: {
|
||||
bytes?: string;
|
||||
errors?: number;
|
||||
};
|
||||
};
|
||||
pipeline?: {
|
||||
events?: {
|
||||
total?: number;
|
||||
published?: number;
|
||||
dropped?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
|
|
38
x-pack/plugins/monitoring/common/types/filebeat.ts
Normal file
38
x-pack/plugins/monitoring/common/types/filebeat.ts
Normal file
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export interface FilebeatResponse {
|
||||
hits?: {
|
||||
hits: FilebeatResponseHit[];
|
||||
total: {
|
||||
value: number;
|
||||
};
|
||||
};
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
export interface FilebeatResponseHit {
|
||||
_source: {
|
||||
message?: string;
|
||||
log?: {
|
||||
level?: string;
|
||||
};
|
||||
'@timestamp': string;
|
||||
event?: {
|
||||
dataset?: string;
|
||||
};
|
||||
elasticsearch?: {
|
||||
component?: string;
|
||||
index?: {
|
||||
name?: string;
|
||||
};
|
||||
node?: {
|
||||
name?: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
|
@ -48,7 +48,7 @@ export function BeatsPanel(props) {
|
|||
/>
|
||||
) : null;
|
||||
|
||||
const beatTypes = props.beats.types.map((beat, index) => {
|
||||
const beatTypes = get(props, 'beats.types', []).map((beat, index) => {
|
||||
return [
|
||||
<EuiDescriptionListTitle
|
||||
key={`beat-types-type-${index}`}
|
||||
|
|
|
@ -424,8 +424,8 @@ export function ElasticsearchPanel(props) {
|
|||
</EuiDescriptionListTitle>
|
||||
<EuiDescriptionListDescription data-test-subj="esDiskAvailable">
|
||||
<BytesPercentageUsage
|
||||
usedBytes={get(nodes, 'fs.available_in_bytes')}
|
||||
maxBytes={get(nodes, 'fs.total_in_bytes')}
|
||||
usedBytes={get(nodes, 'fs.available.bytes', get(nodes, 'fs.available_in_bytes'))}
|
||||
maxBytes={get(nodes, 'fs.total.bytes', get(nodes, 'fs.total_in_bytes'))}
|
||||
/>
|
||||
</EuiDescriptionListDescription>
|
||||
<EuiDescriptionListTitle className="eui-textBreakWord">
|
||||
|
@ -437,8 +437,16 @@ export function ElasticsearchPanel(props) {
|
|||
</EuiDescriptionListTitle>
|
||||
<EuiDescriptionListDescription data-test-subj="esJvmHeap">
|
||||
<BytesPercentageUsage
|
||||
usedBytes={get(nodes, 'jvm.mem.heap_used_in_bytes')}
|
||||
maxBytes={get(nodes, 'jvm.mem.heap_max_in_bytes')}
|
||||
usedBytes={get(
|
||||
nodes,
|
||||
'jvm.mem.heap.used.bytes',
|
||||
get(nodes, 'jvm.mem.heap_used_in_bytes')
|
||||
)}
|
||||
maxBytes={get(
|
||||
nodes,
|
||||
'jvm.mem.heap.max.bytes',
|
||||
get(nodes, 'jvm.mem.heap_max_in_bytes')
|
||||
)}
|
||||
/>
|
||||
</EuiDescriptionListDescription>
|
||||
</EuiDescriptionList>
|
||||
|
@ -489,7 +497,7 @@ export function ElasticsearchPanel(props) {
|
|||
data-test-subj="esDocumentsCount"
|
||||
className="eui-textBreakWord"
|
||||
>
|
||||
{formatNumber(get(indices, 'docs.count'), 'int_commas')}
|
||||
{formatNumber(get(indices, 'docs.total', get(indices, 'docs.count')), 'int_commas')}
|
||||
</EuiDescriptionListDescription>
|
||||
|
||||
<EuiDescriptionListTitle className="eui-textBreakWord">
|
||||
|
@ -499,7 +507,10 @@ export function ElasticsearchPanel(props) {
|
|||
/>
|
||||
</EuiDescriptionListTitle>
|
||||
<EuiDescriptionListDescription data-test-subj="esDiskUsage">
|
||||
{formatNumber(get(indices, 'store.size_in_bytes'), 'byte')}
|
||||
{formatNumber(
|
||||
get(indices, 'store.size.bytes', get(indices, 'store.size_in_bytes')),
|
||||
'byte'
|
||||
)}
|
||||
</EuiDescriptionListDescription>
|
||||
|
||||
<EuiDescriptionListTitle className="eui-textBreakWord">
|
||||
|
|
|
@ -47,13 +47,7 @@ exports[`CcrShard that is renders an exception properly 1`] = `
|
|||
`;
|
||||
|
||||
exports[`CcrShard that it renders normally 1`] = `
|
||||
<EuiPage
|
||||
style={
|
||||
Object {
|
||||
"backgroundColor": "white",
|
||||
}
|
||||
}
|
||||
>
|
||||
<EuiPage>
|
||||
<EuiPageBody>
|
||||
<EuiPanel>
|
||||
<Status
|
||||
|
|
|
@ -128,7 +128,7 @@ export class CcrShard extends PureComponent {
|
|||
const { stat, oldestStat, formattedLeader, alerts } = this.props;
|
||||
|
||||
return (
|
||||
<EuiPage style={{ backgroundColor: 'white' }}>
|
||||
<EuiPage>
|
||||
<EuiPageBody>
|
||||
<EuiPanel>
|
||||
<Status
|
||||
|
|
|
@ -6,23 +6,21 @@
|
|||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { get } from 'lodash';
|
||||
import { SummaryStatus } from '../../summary_status';
|
||||
import { formatMetric } from '../../../lib/format_number';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { AlertsStatus } from '../../../alerts/status';
|
||||
|
||||
export function Status({ stat, formattedLeader, oldestStat, alerts = {} }) {
|
||||
const {
|
||||
follower_index: followerIndex,
|
||||
shard_id: shardId,
|
||||
operations_written: operationsReceived,
|
||||
failed_read_requests: failedFetches,
|
||||
} = stat;
|
||||
|
||||
const {
|
||||
operations_written: oldestOperationsReceived,
|
||||
failed_read_requests: oldestFailedFetches,
|
||||
} = oldestStat;
|
||||
const followerIndex = stat.follower_index || get(stat, 'follower.index');
|
||||
const shardId = stat.shard_id || get(stat, 'follower.shard.number');
|
||||
const operationsReceived = stat.operations_written || get(stat, 'follower.operations_written');
|
||||
const failedFetches = stat.failed_read_requests || get(stat, 'requests.failed.read.count');
|
||||
const oldestOperationsReceived =
|
||||
oldestStat.operations_written || get(oldestStat, 'follower.operations_written');
|
||||
const oldestFailedFetches =
|
||||
oldestStat.failed_read_requests || get(oldestStat, 'requests.failed.read.count');
|
||||
|
||||
const metrics = [
|
||||
{
|
||||
|
|
|
@ -31,8 +31,16 @@ function sortByName(item) {
|
|||
|
||||
export class Assigned extends React.Component {
|
||||
createShard = (shard) => {
|
||||
const type = shard.primary ? 'primary' : 'replica';
|
||||
const key = `${shard.index}.${shard.node}.${type}.${shard.state}.${shard.shard}`;
|
||||
const type = get(shard, 'shard.primary', shard.primary) ? 'primary' : 'replica';
|
||||
const key = `${get(shard, 'index.name', shard.index)}.${get(
|
||||
shard,
|
||||
'node.name',
|
||||
shard.node
|
||||
)}.${type}.${get(shard, 'shard.state', shard.state)}.${get(
|
||||
shard,
|
||||
'shard.number',
|
||||
shard.shard
|
||||
)}`;
|
||||
return <Shard shard={shard} key={key} />;
|
||||
};
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import React from 'react';
|
||||
import { get } from 'lodash';
|
||||
import { calculateClass } from '../lib/calculate_class';
|
||||
import { vents } from '../lib/vents';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
@ -65,9 +66,11 @@ export class Shard extends React.Component {
|
|||
|
||||
generateKey = (relocating) => {
|
||||
const shard = this.props.shard;
|
||||
const shardType = shard.primary ? 'primary' : 'replica';
|
||||
const additionId = shard.state === 'UNASSIGNED' ? Math.random() : '';
|
||||
const node = relocating ? shard.relocating_node : shard.node;
|
||||
const shardType = get(shard, 'shard.primary', shard.primary) ? 'primary' : 'replica';
|
||||
const additionId = get(shard, 'shard.state', shard.state) === 'UNASSIGNED' ? Math.random() : '';
|
||||
const node = relocating
|
||||
? get(shard, 'relocation_node.uuid', shard.relocating_node)
|
||||
: get(shard, 'shard.name', shard.node);
|
||||
return shard.index + '.' + node + '.' + shardType + '.' + shard.shard + additionId;
|
||||
};
|
||||
|
||||
|
@ -93,9 +96,9 @@ export class Shard extends React.Component {
|
|||
const shard = this.props.shard;
|
||||
const classes = calculateClass(shard);
|
||||
const color = getColor(classes);
|
||||
const classification = classes + ' ' + shard.shard;
|
||||
const classification = classes + ' ' + get(shard, 'shard.number', shard.shard);
|
||||
|
||||
let shardUi = <EuiBadge color={color}>{shard.shard}</EuiBadge>;
|
||||
let shardUi = <EuiBadge color={color}>{get(shard, 'shard.number', shard.shard)}</EuiBadge>;
|
||||
const tooltipContent =
|
||||
shard.tooltip_message ||
|
||||
i18n.translate('xpack.monitoring.elasticsearch.shardAllocation.shardDisplayName', {
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { get } from 'lodash';
|
||||
|
||||
export function calculateClass(item, initial) {
|
||||
const classes = [item.type];
|
||||
|
@ -12,9 +13,16 @@ export function calculateClass(item, initial) {
|
|||
}
|
||||
if (item.type === 'shard') {
|
||||
classes.push('monShard');
|
||||
classes.push((item.primary && 'primary') || 'replica');
|
||||
classes.push(item.state.toLowerCase());
|
||||
if (item.state === 'UNASSIGNED' && item.primary) {
|
||||
if (get(item, 'shard.primary', item.primary)) {
|
||||
classes.push('primary');
|
||||
} else {
|
||||
classes.push('replica');
|
||||
}
|
||||
classes.push(get(item, 'shard.state', item.state).toLowerCase());
|
||||
if (
|
||||
get(item, 'shard.state', item.state) === 'UNASSIGNED' &&
|
||||
get(item, 'shard.primary', item.primary)
|
||||
) {
|
||||
classes.push('emergency');
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,8 @@ export function decorateShards(shards, nodes) {
|
|||
);
|
||||
}
|
||||
}
|
||||
return upperFirst(shard.state.toLowerCase());
|
||||
const state = get(shard, 'state', get(shard, 'shard.state'));
|
||||
return upperFirst(state.toLowerCase());
|
||||
}
|
||||
|
||||
return shards.map((shard) => {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { find, some, reduce, values, sortBy } from 'lodash';
|
||||
import { find, some, reduce, values, sortBy, get } from 'lodash';
|
||||
import { hasPrimaryChildren } from '../lib/has_primary_children';
|
||||
import { decorateShards } from '../lib/decorate_shards';
|
||||
|
||||
|
@ -28,8 +28,8 @@ export function nodesByIndices() {
|
|||
}
|
||||
|
||||
function createIndexAddShard(obj, shard) {
|
||||
const node = shard.node || 'unassigned';
|
||||
const index = shard.index;
|
||||
const node = get(shard, 'node.name', shard.node || 'unassigned');
|
||||
const index = get(shard, 'index.name', shard.index);
|
||||
if (!obj[node]) {
|
||||
createNode(obj, nodes[node], node);
|
||||
}
|
||||
|
|
|
@ -111,7 +111,17 @@ export function monitoringMlListingProvider() {
|
|||
}
|
||||
);
|
||||
|
||||
scope.$watch('jobs', (jobs = []) => {
|
||||
scope.$watch('jobs', (_jobs = []) => {
|
||||
const jobs = _jobs.map((job) => {
|
||||
if (job.ml) {
|
||||
return {
|
||||
...job.ml.job,
|
||||
node: job.node,
|
||||
job_id: job.ml.job.id,
|
||||
};
|
||||
}
|
||||
return job;
|
||||
});
|
||||
const mlTable = (
|
||||
<EuiPage>
|
||||
<EuiPageBody>
|
||||
|
|
|
@ -75,8 +75,16 @@ uiRoutes.when('/elasticsearch/ccr/:index/shard/:shardId', {
|
|||
i18n.translate('xpack.monitoring.elasticsearch.ccr.shard.pageTitle', {
|
||||
defaultMessage: 'Elasticsearch Ccr Shard - Index: {followerIndex} Shard: {shardId}',
|
||||
values: {
|
||||
followerIndex: get(pageData, 'stat.follower_index'),
|
||||
shardId: get(pageData, 'stat.shard_id'),
|
||||
followerIndex: get(
|
||||
pageData,
|
||||
'stat.follower.index',
|
||||
get(pageData, 'stat.follower_index')
|
||||
),
|
||||
shardId: get(
|
||||
pageData,
|
||||
'stat.follower.shard.number',
|
||||
get(pageData, 'stat.shard_id')
|
||||
),
|
||||
},
|
||||
})
|
||||
);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertCluster, AlertClusterHealth } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource } from '../../../common/types/es';
|
||||
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
|
||||
|
||||
export async function fetchClusterHealth(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -59,8 +59,9 @@ export async function fetchClusterHealth(
|
|||
},
|
||||
};
|
||||
|
||||
const { body: response } = await esClient.search<ElasticsearchSource>(params);
|
||||
return response.hits.hits.map((hit) => {
|
||||
const result = await esClient.search<ElasticsearchSource>(params);
|
||||
const response: ElasticsearchResponse = result.body as ElasticsearchResponse;
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
return {
|
||||
health: hit._source!.cluster_state?.status,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource } from '../../../common/types/es';
|
||||
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
|
||||
|
||||
export async function fetchElasticsearchVersions(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -60,8 +60,9 @@ export async function fetchElasticsearchVersions(
|
|||
},
|
||||
};
|
||||
|
||||
const { body: response } = await esClient.search<ElasticsearchSource>(params);
|
||||
return response.hits.hits.map((hit) => {
|
||||
const result = await esClient.search<ElasticsearchSource>(params);
|
||||
const response: ElasticsearchResponse = result.body as ElasticsearchResponse;
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
const versions = hit._source!.cluster_stats?.nodes?.versions ?? [];
|
||||
return {
|
||||
versions,
|
||||
|
|
|
@ -94,13 +94,13 @@ export const apmUuidsAgg = (maxBucketSize) => ({
|
|||
});
|
||||
|
||||
export const apmAggResponseHandler = (response) => {
|
||||
const apmTotal = get(response, 'aggregations.total.value', null);
|
||||
const apmTotal = get(response, 'aggregations.total.value', 0);
|
||||
|
||||
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', null);
|
||||
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', null);
|
||||
const memRssMax = get(response, 'aggregations.max_mem_rss_total.value', null);
|
||||
const memRssMin = get(response, 'aggregations.min_mem_rss_total.value', null);
|
||||
const memTotal = get(response, 'aggregations.max_mem_total_total.value', null);
|
||||
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', 0);
|
||||
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', 0);
|
||||
const memRssMax = get(response, 'aggregations.max_mem_rss_total.value', 0);
|
||||
const memRssMin = get(response, 'aggregations.min_mem_rss_total.value', 0);
|
||||
const memTotal = get(response, 'aggregations.max_mem_total_total.value', 0);
|
||||
|
||||
return {
|
||||
apmTotal,
|
||||
|
|
|
@ -32,7 +32,7 @@ export async function getTimeOfLastEvent({
|
|||
size: 1,
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
_source: ['timestamp'],
|
||||
_source: ['beats_stats.timestamp', '@timestamp'],
|
||||
sort: [
|
||||
{
|
||||
timestamp: {
|
||||
|
@ -60,5 +60,8 @@ export async function getTimeOfLastEvent({
|
|||
};
|
||||
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
return response.hits?.hits.length ? response.hits?.hits[0]?._source.timestamp : undefined;
|
||||
return response.hits?.hits.length
|
||||
? response.hits?.hits[0]?._source.beats_stats?.timestamp ??
|
||||
response.hits?.hits[0]?._source['@timestamp']
|
||||
: undefined;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ export function createApmQuery(options = {}) {
|
|||
options = defaults(options, {
|
||||
filters: [],
|
||||
metric: ApmMetric.getMetricFields(),
|
||||
type: 'beats_stats',
|
||||
types: ['stats', 'beats_stats'],
|
||||
});
|
||||
|
||||
options.filters.push({
|
||||
|
|
|
@ -29,36 +29,40 @@ export function handleResponse(
|
|||
|
||||
const firstHit = response.hits.hits[0];
|
||||
|
||||
let firstStats = null;
|
||||
const stats = firstHit._source.beats_stats ?? {};
|
||||
|
||||
let firstStatsMetrics = null;
|
||||
if (
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits &&
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits.length > 0 &&
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits.length > 0
|
||||
) {
|
||||
firstStats = firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats;
|
||||
firstStatsMetrics =
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats?.metrics ??
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beat?.stats;
|
||||
}
|
||||
|
||||
const eventsTotalFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.total;
|
||||
const eventsEmittedFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.published;
|
||||
const eventsDroppedFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.dropped;
|
||||
const bytesWrittenFirst = firstStats?.metrics?.libbeat?.output?.write?.bytes;
|
||||
const stats = firstHit._source.beats_stats ?? firstHit._source?.beat?.stats;
|
||||
const statsMetrics = firstHit._source.beats_stats?.metrics ?? firstHit._source?.beat?.stats;
|
||||
|
||||
const eventsTotalLast = stats.metrics?.libbeat?.pipeline?.events?.total;
|
||||
const eventsEmittedLast = stats.metrics?.libbeat?.pipeline?.events?.published;
|
||||
const eventsDroppedLast = stats.metrics?.libbeat?.pipeline?.events?.dropped;
|
||||
const bytesWrittenLast = stats.metrics?.libbeat?.output?.write?.bytes;
|
||||
const eventsTotalFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenFirst = firstStatsMetrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
|
||||
const eventsTotalLast = statsMetrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedLast = statsMetrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedLast = statsMetrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenLast = statsMetrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
|
||||
return {
|
||||
uuid: apmUuid,
|
||||
transportAddress: stats.beat?.host,
|
||||
version: stats.beat?.version,
|
||||
name: stats.beat?.name,
|
||||
type: upperFirst(stats.beat?.type) || null,
|
||||
output: upperFirst(stats.metrics?.libbeat?.output?.type) || null,
|
||||
configReloads: stats.metrics?.libbeat?.config?.reloads,
|
||||
uptime: stats.metrics?.beat?.info?.uptime?.ms,
|
||||
transportAddress: stats?.beat?.host,
|
||||
version: stats?.beat?.version,
|
||||
name: stats?.beat?.name,
|
||||
type: upperFirst(stats?.beat?.type) || null,
|
||||
output: upperFirst(statsMetrics?.libbeat?.output?.type) ?? null,
|
||||
configReloads: statsMetrics?.libbeat?.config?.reloads ?? null,
|
||||
uptime:
|
||||
firstHit._source.beats_stats?.metrics?.beat?.info?.uptime?.ms ??
|
||||
firstHit._source.beat?.stats?.info?.uptime?.ms,
|
||||
eventsTotal: getDiffCalculation(eventsTotalLast, eventsTotalFirst),
|
||||
eventsEmitted: getDiffCalculation(eventsEmittedLast, eventsEmittedFirst),
|
||||
eventsDropped: getDiffCalculation(eventsDroppedLast, eventsDroppedFirst),
|
||||
|
@ -110,6 +114,22 @@ export async function getApmInfo(
|
|||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.dropped',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
|
||||
'hits.hits._source.beat.stats.beat.host',
|
||||
'hits.hits._source.beat.stats.beat.version',
|
||||
'hits.hits._source.beat.stats.beat.name',
|
||||
'hits.hits._source.beat.stats.beat.type',
|
||||
'hits.hits._source.beat.stats.libbeat.output.type',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.published',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.dropped',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beat.stats.libbeat.config.reloads',
|
||||
'hits.hits._source.beat.stats.info.uptime.ms',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.published',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.dropped',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
],
|
||||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
|
@ -125,7 +145,10 @@ export async function getApmInfo(
|
|||
inner_hits: {
|
||||
name: 'first_hit',
|
||||
size: 1,
|
||||
sort: { 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
sort: [
|
||||
{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
{ '@timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -21,18 +21,21 @@ import { ElasticsearchResponse, ElasticsearchResponseHit } from '../../../common
|
|||
export function handleResponse(response: ElasticsearchResponse, start: number, end: number) {
|
||||
const initial = { ids: new Set(), beats: [] };
|
||||
const { beats } = response.hits?.hits.reduce((accum: any, hit: ElasticsearchResponseHit) => {
|
||||
const stats = hit._source.beats_stats;
|
||||
const stats = hit._source.beats_stats ?? hit._source.beat?.stats;
|
||||
const statsMetrics = hit._source.beats_stats?.metrics ?? hit._source.beat?.stats;
|
||||
if (!stats) {
|
||||
return accum;
|
||||
}
|
||||
|
||||
let earliestStats = null;
|
||||
if (
|
||||
hit.inner_hits?.earliest?.hits?.hits &&
|
||||
hit.inner_hits?.earliest?.hits?.hits.length > 0 &&
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats
|
||||
) {
|
||||
earliestStats = hit.inner_hits.earliest.hits.hits[0]._source.beats_stats;
|
||||
let earliestStatsMetrics = null;
|
||||
if (hit.inner_hits?.earliest?.hits?.hits && hit.inner_hits?.earliest?.hits?.hits.length > 0) {
|
||||
earliestStats =
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats ??
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beat?.stats;
|
||||
earliestStatsMetrics =
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats?.metrics ??
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beat?.stats;
|
||||
}
|
||||
|
||||
const uuid = stats?.beat?.uuid;
|
||||
|
@ -46,44 +49,47 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
|
|||
|
||||
// add the beat
|
||||
const rateOptions = {
|
||||
hitTimestamp: stats.timestamp,
|
||||
earliestHitTimestamp: earliestStats?.timestamp,
|
||||
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp'],
|
||||
earliestHitTimestamp:
|
||||
earliestStats?.timestamp ?? hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp'],
|
||||
timeWindowMin: start,
|
||||
timeWindowMax: end,
|
||||
};
|
||||
|
||||
const { rate: bytesSentRate } = calculateRate({
|
||||
latestTotal: stats.metrics?.libbeat?.output?.write?.bytes,
|
||||
earliestTotal: earliestStats?.metrics?.libbeat?.output?.write?.bytes,
|
||||
latestTotal: statsMetrics?.libbeat?.output?.write?.bytes,
|
||||
earliestTotal: earliestStatsMetrics?.libbeat?.output?.write?.bytes,
|
||||
...rateOptions,
|
||||
});
|
||||
|
||||
const { rate: totalEventsRate } = calculateRate({
|
||||
latestTotal: stats.metrics?.libbeat?.pipeline?.events?.total,
|
||||
earliestTotal: earliestStats?.metrics?.libbeat?.pipeline?.events?.total,
|
||||
latestTotal: statsMetrics?.libbeat?.pipeline?.events?.total,
|
||||
earliestTotal: earliestStatsMetrics?.libbeat?.pipeline?.events?.total,
|
||||
...rateOptions,
|
||||
});
|
||||
|
||||
const errorsWrittenLatest = stats.metrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsWrittenEarliest = earliestStats?.metrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsReadLatest = stats.metrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsReadEarliest = earliestStats?.metrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsWrittenLatest = statsMetrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsWrittenEarliest = earliestStatsMetrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsReadLatest = statsMetrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsReadEarliest = earliestStatsMetrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errors = getDiffCalculation(
|
||||
errorsWrittenLatest + errorsReadLatest,
|
||||
errorsWrittenEarliest + errorsReadEarliest
|
||||
);
|
||||
|
||||
accum.beats.push({
|
||||
uuid: stats.beat?.uuid,
|
||||
name: stats.beat?.name,
|
||||
type: upperFirst(stats.beat?.type),
|
||||
output: upperFirst(stats.metrics?.libbeat?.output?.type),
|
||||
uuid: stats?.beat?.uuid,
|
||||
name: stats?.beat?.name,
|
||||
type: upperFirst(stats?.beat?.type),
|
||||
output: upperFirst(statsMetrics?.libbeat?.output?.type),
|
||||
total_events_rate: totalEventsRate,
|
||||
bytes_sent_rate: bytesSentRate,
|
||||
errors,
|
||||
memory: stats.metrics?.beat?.memstats?.memory_alloc,
|
||||
version: stats.beat?.version,
|
||||
time_of_last_event: hit._source.timestamp,
|
||||
memory:
|
||||
hit._source.beats_stats?.metrics?.beat?.memstats?.memory_alloc ??
|
||||
hit._source.beat?.stats?.memstats?.memory?.alloc,
|
||||
version: stats?.beat?.version,
|
||||
time_of_last_event: hit._source.beats_stats?.timestamp ?? hit._source['@timestamp'],
|
||||
});
|
||||
|
||||
return accum;
|
||||
|
@ -106,6 +112,7 @@ export async function getApms(req: LegacyRequest, apmIndexPattern: string, clust
|
|||
filterPath: [
|
||||
// only filter path can filter for inner_hits
|
||||
'hits.hits._source.timestamp',
|
||||
'hits.hits._source.@timestamp',
|
||||
'hits.hits._source.beats_stats.beat.uuid',
|
||||
'hits.hits._source.beats_stats.beat.name',
|
||||
'hits.hits._source.beats_stats.beat.host',
|
||||
|
@ -115,20 +122,36 @@ export async function getApms(req: LegacyRequest, apmIndexPattern: string, clust
|
|||
'hits.hits._source.beats_stats.metrics.libbeat.output.read.errors',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.write.errors',
|
||||
'hits.hits._source.beats_stats.metrics.beat.memstats.memory_alloc',
|
||||
'hits.hits._source.beat.stats.beat.uuid',
|
||||
'hits.hits._source.beat.stats.beat.name',
|
||||
'hits.hits._source.beat.stats.beat.host',
|
||||
'hits.hits._source.beat.stats.beat.type',
|
||||
'hits.hits._source.beat.stats.beat.version',
|
||||
'hits.hits._source.beat.stats.libbeat.output.type',
|
||||
'hits.hits._source.beat.stats.libbeat.output.read.errors',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.errors',
|
||||
'hits.hits._source.beat.stats.memstats.memory.alloc',
|
||||
|
||||
// latest hits for calculating metrics
|
||||
'hits.hits._source.beats_stats.timestamp',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
|
||||
// earliest hits for calculating metrics
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.@timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
|
||||
// earliest hits for calculating diffs
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.read.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.write.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.read.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.write.errors',
|
||||
],
|
||||
body: {
|
||||
query: createApmQuery({
|
||||
|
@ -141,7 +164,10 @@ export async function getApms(req: LegacyRequest, apmIndexPattern: string, clust
|
|||
inner_hits: {
|
||||
name: 'earliest',
|
||||
size: 1,
|
||||
sort: [{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } }],
|
||||
sort: [
|
||||
{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
{ '@timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
sort: [
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { checkParam } from '../error_missing_required';
|
||||
import { createApmQuery } from './create_apm_query';
|
||||
import { ApmMetric } from '../metrics';
|
||||
|
@ -40,7 +41,7 @@ export function getApmsForClusters(req, apmIndexPattern, clusters) {
|
|||
|
||||
return Promise.all(
|
||||
clusters.map(async (cluster) => {
|
||||
const clusterUuid = cluster.cluster_uuid;
|
||||
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
|
||||
const params = {
|
||||
index: apmIndexPattern,
|
||||
size: 0,
|
||||
|
|
|
@ -101,7 +101,7 @@ export const beatsUuidsAgg = (maxBucketSize) => ({
|
|||
export const beatsAggResponseHandler = (response) => {
|
||||
// beat types stat
|
||||
const buckets = get(response, 'aggregations.types.buckets', []);
|
||||
const beatTotal = get(response, 'aggregations.total.value', null);
|
||||
const beatTotal = get(response, 'aggregations.total.value', 0);
|
||||
const beatTypes = buckets.reduce((types, typeBucket) => {
|
||||
return [
|
||||
...types,
|
||||
|
@ -112,10 +112,10 @@ export const beatsAggResponseHandler = (response) => {
|
|||
];
|
||||
}, []);
|
||||
|
||||
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', null);
|
||||
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', null);
|
||||
const bytesSentMax = get(response, 'aggregations.max_bytes_sent_total.value', null);
|
||||
const bytesSentMin = get(response, 'aggregations.min_bytes_sent_total.value', null);
|
||||
const eventsTotalMax = get(response, 'aggregations.max_events_total.value', 0);
|
||||
const eventsTotalMin = get(response, 'aggregations.min_events_total.value', 0);
|
||||
const bytesSentMax = get(response, 'aggregations.max_bytes_sent_total.value', 0);
|
||||
const bytesSentMin = get(response, 'aggregations.min_bytes_sent_total.value', 0);
|
||||
|
||||
return {
|
||||
beatTotal,
|
||||
|
|
|
@ -21,7 +21,7 @@ export function createBeatsQuery(options = {}) {
|
|||
options = defaults(options, {
|
||||
filters: [],
|
||||
metric: BeatsMetric.getMetricFields(),
|
||||
type: 'beats_stats',
|
||||
types: ['stats', 'beats_stats'],
|
||||
});
|
||||
|
||||
// avoid showing APM Server stats alongside other Beats because APM Server will have its own UI
|
||||
|
|
|
@ -22,28 +22,36 @@ export function handleResponse(response: ElasticsearchResponse, beatUuid: string
|
|||
|
||||
const firstHit = response.hits.hits[0];
|
||||
|
||||
let firstStats = null;
|
||||
let firstStatsMetrics = null;
|
||||
if (
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits &&
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits.length > 0 &&
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats
|
||||
firstHit.inner_hits?.first_hit?.hits?.hits.length > 0
|
||||
) {
|
||||
firstStats = firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats;
|
||||
firstStatsMetrics =
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beats_stats?.metrics ??
|
||||
firstHit.inner_hits.first_hit.hits.hits[0]._source.beat?.stats;
|
||||
}
|
||||
|
||||
const stats = firstHit._source.beats_stats ?? {};
|
||||
const stats = firstHit._source.beats_stats ?? firstHit._source?.beat?.stats;
|
||||
const statsMetrics = firstHit._source.beats_stats?.metrics ?? firstHit._source?.beat?.stats;
|
||||
|
||||
const eventsTotalFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedFirst = firstStats?.metrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenFirst = firstStats?.metrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
const eventsTotalFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedFirst = firstStatsMetrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenFirst = firstStatsMetrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
|
||||
const eventsTotalLast = stats?.metrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedLast = stats?.metrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedLast = stats?.metrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenLast = stats?.metrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
const handlesHardLimit = stats?.metrics?.beat?.handles?.limit?.hard ?? null;
|
||||
const handlesSoftLimit = stats?.metrics?.beat?.handles?.limit?.soft ?? null;
|
||||
const eventsTotalLast = statsMetrics?.libbeat?.pipeline?.events?.total ?? null;
|
||||
const eventsEmittedLast = statsMetrics?.libbeat?.pipeline?.events?.published ?? null;
|
||||
const eventsDroppedLast = statsMetrics?.libbeat?.pipeline?.events?.dropped ?? null;
|
||||
const bytesWrittenLast = statsMetrics?.libbeat?.output?.write?.bytes ?? null;
|
||||
const handlesHardLimit =
|
||||
firstHit._source.beats_stats?.metrics?.beat?.handles?.limit?.hard ??
|
||||
firstHit._source.beat?.stats?.handles?.limit?.hard ??
|
||||
null;
|
||||
const handlesSoftLimit =
|
||||
firstHit._source.beats_stats?.metrics?.beat?.handles?.limit?.soft ??
|
||||
firstHit._source.beat?.stats?.handles?.limit?.soft ??
|
||||
null;
|
||||
|
||||
return {
|
||||
uuid: beatUuid,
|
||||
|
@ -51,9 +59,11 @@ export function handleResponse(response: ElasticsearchResponse, beatUuid: string
|
|||
version: stats?.beat?.version ?? null,
|
||||
name: stats?.beat?.name ?? null,
|
||||
type: upperFirst(stats?.beat?.type) ?? null,
|
||||
output: upperFirst(stats?.metrics?.libbeat?.output?.type) ?? null,
|
||||
configReloads: stats?.metrics?.libbeat?.config?.reloads ?? null,
|
||||
uptime: stats?.metrics?.beat?.info?.uptime?.ms ?? null,
|
||||
output: upperFirst(statsMetrics?.libbeat?.output?.type) ?? null,
|
||||
configReloads: statsMetrics?.libbeat?.config?.reloads ?? null,
|
||||
uptime:
|
||||
firstHit._source.beats_stats?.metrics?.beat?.info?.uptime?.ms ??
|
||||
firstHit._source.beat?.stats?.info?.uptime?.ms,
|
||||
eventsTotal: getDiffCalculation(eventsTotalLast, eventsTotalFirst) ?? null,
|
||||
eventsEmitted: getDiffCalculation(eventsEmittedLast, eventsEmittedFirst) ?? null,
|
||||
eventsDropped: getDiffCalculation(eventsDroppedLast, eventsDroppedFirst) ?? null,
|
||||
|
@ -82,22 +92,39 @@ export async function getBeatSummary(
|
|||
ignoreUnavailable: true,
|
||||
filterPath: [
|
||||
'hits.hits._source.beats_stats.beat.host',
|
||||
'hits.hits._source.beat.stats.beat.host',
|
||||
'hits.hits._source.beats_stats.beat.version',
|
||||
'hits.hits._source.beat.stats.beat.version',
|
||||
'hits.hits._source.beats_stats.beat.name',
|
||||
'hits.hits._source.beat.stats.beat.name',
|
||||
'hits.hits._source.beats_stats.beat.type',
|
||||
'hits.hits._source.beat.stats.beat.type',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.type',
|
||||
'hits.hits._source.beat.stats.libbeat.output.type',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.published',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.published',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.dropped',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.dropped',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.config.reloads',
|
||||
'hits.hits._source.beat.stats.libbeat.config.reloads',
|
||||
'hits.hits._source.beats_stats.metrics.beat.info.uptime.ms',
|
||||
'hits.hits._source.beats_stats.metrics.beat.handles.limit.hard',
|
||||
'hits.hits._source.beat.stats.info.uptime.ms',
|
||||
'hits.hits._source.beats_stats.metrics.beat.handles.limit.s',
|
||||
'hits.hits._source.beat.stats.handles.limit.hard',
|
||||
'hits.hits._source.beats_stats.metrics.beat.handles.limit.soft',
|
||||
'hits.hits._source.beat.stats.handles.limit.soft',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.published',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.published',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.dropped',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.pipeline.events.dropped',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits.inner_hits.first_hit.hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
],
|
||||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
|
@ -112,7 +139,10 @@ export async function getBeatSummary(
|
|||
inner_hits: {
|
||||
name: 'first_hit',
|
||||
size: 1,
|
||||
sort: { 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
sort: [
|
||||
{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
{ '@timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -34,7 +34,8 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
|
|||
const hits = response.hits?.hits ?? [];
|
||||
const initial: { ids: Set<string>; beats: Beat[] } = { ids: new Set(), beats: [] };
|
||||
const { beats } = hits.reduce((accum, hit) => {
|
||||
const stats = hit._source.beats_stats;
|
||||
const stats = hit._source.beats_stats ?? hit._source.beat?.stats;
|
||||
const statsMetrics = hit._source.beats_stats?.metrics ?? hit._source.beat?.stats;
|
||||
const uuid = stats?.beat?.uuid;
|
||||
|
||||
if (!uuid) {
|
||||
|
@ -50,38 +51,41 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
|
|||
accum.ids.add(uuid);
|
||||
|
||||
let earliestStats = null;
|
||||
if (
|
||||
hit.inner_hits?.earliest?.hits?.hits &&
|
||||
hit.inner_hits?.earliest?.hits?.hits.length > 0 &&
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats
|
||||
) {
|
||||
earliestStats = hit.inner_hits.earliest.hits.hits[0]._source.beats_stats;
|
||||
let earliestStatsMetrics = null;
|
||||
if (hit.inner_hits?.earliest?.hits?.hits && hit.inner_hits?.earliest?.hits?.hits.length > 0) {
|
||||
earliestStats =
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats ??
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beat?.stats;
|
||||
earliestStatsMetrics =
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beats_stats?.metrics ??
|
||||
hit.inner_hits.earliest.hits.hits[0]._source.beat?.stats;
|
||||
}
|
||||
|
||||
// add the beat
|
||||
const rateOptions = {
|
||||
hitTimestamp: stats?.timestamp,
|
||||
earliestHitTimestamp: earliestStats?.timestamp,
|
||||
hitTimestamp: stats?.timestamp ?? hit._source['@timestamp'],
|
||||
earliestHitTimestamp:
|
||||
earliestStats?.timestamp ?? hit.inner_hits?.earliest.hits?.hits[0]._source['@timestamp'],
|
||||
timeWindowMin: start,
|
||||
timeWindowMax: end,
|
||||
};
|
||||
|
||||
const { rate: bytesSentRate } = calculateRate({
|
||||
latestTotal: stats?.metrics?.libbeat?.output?.write?.bytes,
|
||||
earliestTotal: earliestStats?.metrics?.libbeat?.output?.write?.bytes,
|
||||
latestTotal: statsMetrics?.libbeat?.output?.write?.bytes,
|
||||
earliestTotal: earliestStatsMetrics?.libbeat?.output?.write?.bytes,
|
||||
...rateOptions,
|
||||
});
|
||||
|
||||
const { rate: totalEventsRate } = calculateRate({
|
||||
latestTotal: stats?.metrics?.libbeat?.pipeline?.events?.total,
|
||||
earliestTotal: earliestStats?.metrics?.libbeat?.pipeline?.events?.total,
|
||||
latestTotal: statsMetrics?.libbeat?.pipeline?.events?.total,
|
||||
earliestTotal: earliestStatsMetrics?.libbeat?.pipeline?.events?.total,
|
||||
...rateOptions,
|
||||
});
|
||||
|
||||
const errorsWrittenLatest = stats?.metrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsWrittenEarliest = earliestStats?.metrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsReadLatest = stats?.metrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsReadEarliest = earliestStats?.metrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsWrittenLatest = statsMetrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsWrittenEarliest = earliestStatsMetrics?.libbeat?.output?.write?.errors ?? 0;
|
||||
const errorsReadLatest = statsMetrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errorsReadEarliest = earliestStatsMetrics?.libbeat?.output?.read?.errors ?? 0;
|
||||
const errors = getDiffCalculation(
|
||||
errorsWrittenLatest + errorsReadLatest,
|
||||
errorsWrittenEarliest + errorsReadEarliest
|
||||
|
@ -91,11 +95,13 @@ export function handleResponse(response: ElasticsearchResponse, start: number, e
|
|||
uuid: stats?.beat?.uuid,
|
||||
name: stats?.beat?.name,
|
||||
type: upperFirst(stats?.beat?.type),
|
||||
output: upperFirst(stats?.metrics?.libbeat?.output?.type),
|
||||
output: upperFirst(statsMetrics?.libbeat?.output?.type),
|
||||
total_events_rate: totalEventsRate,
|
||||
bytes_sent_rate: bytesSentRate,
|
||||
errors,
|
||||
memory: stats?.metrics?.beat?.memstats?.memory_alloc,
|
||||
memory:
|
||||
hit._source.beats_stats?.metrics?.beat?.memstats?.memory_alloc ??
|
||||
hit._source.beat?.stats?.memstats?.memory?.alloc,
|
||||
version: stats?.beat?.version,
|
||||
});
|
||||
|
||||
|
@ -119,28 +125,45 @@ export async function getBeats(req: LegacyRequest, beatsIndexPattern: string, cl
|
|||
filterPath: [
|
||||
// only filter path can filter for inner_hits
|
||||
'hits.hits._source.beats_stats.beat.uuid',
|
||||
'hits.hits._source.beat.stats.beat.uuid',
|
||||
'hits.hits._source.beats_stats.beat.name',
|
||||
'hits.hits._source.beat.stats.beat.name',
|
||||
'hits.hits._source.beats_stats.beat.host',
|
||||
'hits.hits._source.beat.stats.beat.host',
|
||||
'hits.hits._source.beats_stats.beat.type',
|
||||
'hits.hits._source.beat.stats.beat.type',
|
||||
'hits.hits._source.beats_stats.beat.version',
|
||||
'hits.hits._source.beat.stats.beat.version',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.type',
|
||||
'hits.hits._source.beat.stats.libbeat.output.type',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.read.errors',
|
||||
'hits.hits._source.beat.stats.libbeat.output.read.errors',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.write.errors',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.errors',
|
||||
'hits.hits._source.beats_stats.metrics.beat.memstats.memory_alloc',
|
||||
'hits.hits._source.beat.stats.memstats.memory.alloc',
|
||||
|
||||
// latest hits for calculating metrics
|
||||
'hits.hits._source.beats_stats.timestamp',
|
||||
'hits.hits._source.@timestamp',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
'hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
|
||||
// earliest hits for calculating metrics
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.@timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.output.write.bytes',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.write.bytes',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.pipeline.events.total',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.pipeline.events.total',
|
||||
|
||||
// earliest hits for calculating diffs
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.read.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.output.read.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beats_stats.metrics.libbeat.output.write.errors',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.beat.stats.libbeat.output.write.errors',
|
||||
],
|
||||
body: {
|
||||
query: createBeatsQuery({
|
||||
|
@ -153,7 +176,10 @@ export async function getBeats(req: LegacyRequest, beatsIndexPattern: string, cl
|
|||
inner_hits: {
|
||||
name: 'earliest',
|
||||
size: 1,
|
||||
sort: [{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } }],
|
||||
sort: [
|
||||
{ 'beats_stats.timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
{ '@timestamp': { order: 'asc', unmapped_type: 'long' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
sort: [
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { checkParam } from '../error_missing_required';
|
||||
import { BeatsClusterMetric } from '../metrics';
|
||||
import { createBeatsQuery } from './create_beats_query';
|
||||
|
@ -39,7 +40,7 @@ export function getBeatsForClusters(req, beatsIndexPattern, clusters) {
|
|||
|
||||
return Promise.all(
|
||||
clusters.map(async (cluster) => {
|
||||
const clusterUuid = cluster.cluster_uuid;
|
||||
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
|
||||
const params = {
|
||||
index: beatsIndexPattern,
|
||||
size: 0,
|
||||
|
|
|
@ -14,10 +14,10 @@ describe('get_beats_for_clusters', () => {
|
|||
expect(handleResponse(clusterUuid, response)).toEqual({
|
||||
clusterUuid: 'foo_uuid',
|
||||
stats: {
|
||||
totalEvents: null,
|
||||
bytesSent: null,
|
||||
totalEvents: 0,
|
||||
bytesSent: 0,
|
||||
beats: {
|
||||
total: null,
|
||||
total: 0,
|
||||
types: [],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -11,10 +11,10 @@ describe('beats/get_stats', () => {
|
|||
it('Handle empty response', () => {
|
||||
expect(handleResponse()).toEqual({
|
||||
stats: {
|
||||
bytesSent: null,
|
||||
totalEvents: null,
|
||||
bytesSent: 0,
|
||||
totalEvents: 0,
|
||||
},
|
||||
total: null,
|
||||
total: 0,
|
||||
types: [],
|
||||
});
|
||||
});
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { isFunction, get } from 'lodash';
|
||||
|
||||
export function appendMetricbeatIndex(config, indexPattern, bypass = false) {
|
||||
export function appendMetricbeatIndex(config, indexPattern, ccs, bypass = false) {
|
||||
if (bypass) {
|
||||
return indexPattern;
|
||||
}
|
||||
|
@ -21,6 +21,10 @@ export function appendMetricbeatIndex(config, indexPattern, bypass = false) {
|
|||
mbIndex = get(config, 'ui.metricbeat.index');
|
||||
}
|
||||
|
||||
if (ccs) {
|
||||
mbIndex = `${mbIndex},${ccs}:${mbIndex}`;
|
||||
}
|
||||
|
||||
return `${indexPattern},${mbIndex}`;
|
||||
}
|
||||
|
||||
|
@ -46,7 +50,12 @@ export function prefixIndexPattern(config, indexPattern, ccs, monitoringIndicesO
|
|||
}
|
||||
|
||||
if (!ccsEnabled || !ccs) {
|
||||
return appendMetricbeatIndex(config, indexPattern, monitoringIndicesOnly);
|
||||
return appendMetricbeatIndex(
|
||||
config,
|
||||
indexPattern,
|
||||
ccsEnabled ? ccs : undefined,
|
||||
monitoringIndicesOnly
|
||||
);
|
||||
}
|
||||
|
||||
const patterns = indexPattern.split(',');
|
||||
|
@ -57,11 +66,12 @@ export function prefixIndexPattern(config, indexPattern, ccs, monitoringIndicesO
|
|||
return appendMetricbeatIndex(
|
||||
config,
|
||||
`${prefixedPattern},${indexPattern}`,
|
||||
ccs,
|
||||
monitoringIndicesOnly
|
||||
);
|
||||
}
|
||||
|
||||
return appendMetricbeatIndex(config, prefixedPattern, monitoringIndicesOnly);
|
||||
return appendMetricbeatIndex(config, prefixedPattern, ccs, monitoringIndicesOnly);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -340,10 +340,10 @@
|
|||
"versions": []
|
||||
},
|
||||
"beats": {
|
||||
"totalEvents": null,
|
||||
"bytesSent": null,
|
||||
"totalEvents": 0,
|
||||
"bytesSent": 0,
|
||||
"beats": {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": []
|
||||
}
|
||||
}
|
||||
|
@ -679,10 +679,10 @@
|
|||
"versions": []
|
||||
},
|
||||
"beats": {
|
||||
"totalEvents": null,
|
||||
"bytesSent": null,
|
||||
"totalEvents": 0,
|
||||
"bytesSent": 0,
|
||||
"beats": {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": []
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,11 +11,11 @@ Array [
|
|||
"apm": undefined,
|
||||
"beats": Object {
|
||||
"beats": Object {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": Array [],
|
||||
},
|
||||
"bytesSent": null,
|
||||
"totalEvents": null,
|
||||
"bytesSent": 0,
|
||||
"totalEvents": 0,
|
||||
},
|
||||
"ccs": "proddy1",
|
||||
"cluster_name": "Custom name",
|
||||
|
@ -29,25 +29,7 @@ Array [
|
|||
"deleted": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"index": Object {
|
||||
"primaries": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
"replication": Object {
|
||||
"avg": 0,
|
||||
"max": 0,
|
||||
"min": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
},
|
||||
"primaries": 1,
|
||||
"replication": 0,
|
||||
"total": 1,
|
||||
},
|
||||
"store": Object {
|
||||
|
@ -60,7 +42,6 @@ Array [
|
|||
},
|
||||
"fs": Object {
|
||||
"available_in_bytes": 224468717568,
|
||||
"free_in_bytes": 228403855360,
|
||||
"total_in_bytes": 499963170816,
|
||||
},
|
||||
"jvm": Object {
|
||||
|
@ -115,11 +96,11 @@ Array [
|
|||
"apm": undefined,
|
||||
"beats": Object {
|
||||
"beats": Object {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": Array [],
|
||||
},
|
||||
"bytesSent": null,
|
||||
"totalEvents": null,
|
||||
"bytesSent": 0,
|
||||
"totalEvents": 0,
|
||||
},
|
||||
"ccs": undefined,
|
||||
"cluster_name": "monitoring-one",
|
||||
|
@ -133,25 +114,7 @@ Array [
|
|||
"deleted": 1,
|
||||
},
|
||||
"shards": Object {
|
||||
"index": Object {
|
||||
"primaries": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
"replication": Object {
|
||||
"avg": 0,
|
||||
"max": 0,
|
||||
"min": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
},
|
||||
"primaries": 6,
|
||||
"replication": 0,
|
||||
"total": 6,
|
||||
},
|
||||
"store": Object {
|
||||
|
@ -164,7 +127,6 @@ Array [
|
|||
},
|
||||
"fs": Object {
|
||||
"available_in_bytes": 224468783104,
|
||||
"free_in_bytes": 228403920896,
|
||||
"total_in_bytes": 499963170816,
|
||||
},
|
||||
"jvm": Object {
|
||||
|
@ -224,11 +186,11 @@ Array [
|
|||
"apm": undefined,
|
||||
"beats": Object {
|
||||
"beats": Object {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": Array [],
|
||||
},
|
||||
"bytesSent": null,
|
||||
"totalEvents": null,
|
||||
"bytesSent": 0,
|
||||
"totalEvents": 0,
|
||||
},
|
||||
"ccs": "proddy1",
|
||||
"cluster_name": "Custom name",
|
||||
|
@ -242,25 +204,7 @@ Array [
|
|||
"deleted": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"index": Object {
|
||||
"primaries": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
"replication": Object {
|
||||
"avg": 0,
|
||||
"max": 0,
|
||||
"min": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
},
|
||||
"primaries": 1,
|
||||
"replication": 0,
|
||||
"total": 1,
|
||||
},
|
||||
"store": Object {
|
||||
|
@ -273,7 +217,6 @@ Array [
|
|||
},
|
||||
"fs": Object {
|
||||
"available_in_bytes": 224468717568,
|
||||
"free_in_bytes": 228403855360,
|
||||
"total_in_bytes": 499963170816,
|
||||
},
|
||||
"jvm": Object {
|
||||
|
@ -328,11 +271,11 @@ Array [
|
|||
"apm": undefined,
|
||||
"beats": Object {
|
||||
"beats": Object {
|
||||
"total": null,
|
||||
"total": 0,
|
||||
"types": Array [],
|
||||
},
|
||||
"bytesSent": null,
|
||||
"totalEvents": null,
|
||||
"bytesSent": 0,
|
||||
"totalEvents": 0,
|
||||
},
|
||||
"ccs": undefined,
|
||||
"cluster_name": "monitoring-one",
|
||||
|
@ -346,25 +289,7 @@ Array [
|
|||
"deleted": 1,
|
||||
},
|
||||
"shards": Object {
|
||||
"index": Object {
|
||||
"primaries": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
"replication": Object {
|
||||
"avg": 0,
|
||||
"max": 0,
|
||||
"min": 0,
|
||||
},
|
||||
"shards": Object {
|
||||
"avg": 1,
|
||||
"max": 1,
|
||||
"min": 1,
|
||||
},
|
||||
},
|
||||
"primaries": 6,
|
||||
"replication": 0,
|
||||
"total": 6,
|
||||
},
|
||||
"store": Object {
|
||||
|
@ -377,7 +302,6 @@ Array [
|
|||
},
|
||||
"fs": Object {
|
||||
"available_in_bytes": 224468783104,
|
||||
"free_in_bytes": 228403920896,
|
||||
"total_in_bytes": 499963170816,
|
||||
},
|
||||
"jvm": Object {
|
||||
|
|
|
@ -31,13 +31,20 @@ async function findSupportedBasicLicenseCluster(
|
|||
index: kbnIndexPattern,
|
||||
size: 1,
|
||||
ignoreUnavailable: true,
|
||||
filterPath: 'hits.hits._source.cluster_uuid',
|
||||
filterPath: ['hits.hits._source.cluster_uuid', 'hits.hits._source.cluster.id'],
|
||||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{ term: { type: 'kibana_stats' } },
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'kibana_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{ term: { 'kibana_stats.kibana.uuid': kibanaUuid } },
|
||||
{ range: { timestamp: { gte, lte, format: 'strict_date_optional_time' } } },
|
||||
],
|
||||
|
@ -80,7 +87,7 @@ export function flagSupportedClusters(req: LegacyRequest, kbnIndexPattern: strin
|
|||
const serverLog = (message: string) => req.getLogger('supported-clusters').debug(message);
|
||||
const flagAllSupported = (clusters: ElasticsearchModifiedSource[]) => {
|
||||
clusters.forEach((cluster) => {
|
||||
if (cluster.license) {
|
||||
if (cluster.license || cluster.elasticsearch?.cluster?.stats?.license) {
|
||||
cluster.isSupported = true;
|
||||
}
|
||||
});
|
||||
|
@ -100,7 +107,9 @@ export function flagSupportedClusters(req: LegacyRequest, kbnIndexPattern: strin
|
|||
}
|
||||
if (linkedClusterCount > 1) {
|
||||
const basicLicenseCount = clusters.reduce((accumCount, cluster) => {
|
||||
if (cluster.license && cluster.license.type === 'basic') {
|
||||
const licenseType =
|
||||
cluster.license?.type ?? cluster.elasticsearch?.cluster?.stats?.license?.type;
|
||||
if (licenseType === 'basic') {
|
||||
accumCount++;
|
||||
}
|
||||
return accumCount;
|
||||
|
@ -129,7 +138,10 @@ export function flagSupportedClusters(req: LegacyRequest, kbnIndexPattern: strin
|
|||
'Found some basic license clusters in monitoring data. Only non-basic will be supported.'
|
||||
);
|
||||
clusters.forEach((cluster) => {
|
||||
if (cluster.license && cluster.license.type !== 'basic') {
|
||||
if (
|
||||
cluster.license?.type !== 'basic' &&
|
||||
cluster.elasticsearch?.cluster?.stats?.license?.type !== 'basic'
|
||||
) {
|
||||
cluster.isSupported = true;
|
||||
}
|
||||
});
|
||||
|
|
|
@ -14,11 +14,11 @@ import { ElasticsearchSource } from '../../../common/types/es';
|
|||
* @return top-level cluster summary data
|
||||
*/
|
||||
export function getClusterStatus(cluster: ElasticsearchSource, shardStats: unknown) {
|
||||
const clusterStats = cluster.cluster_stats ?? {};
|
||||
const clusterNodes = clusterStats.nodes ?? {};
|
||||
const clusterIndices = clusterStats.indices ?? {};
|
||||
const clusterStatsLegacy = cluster.cluster_stats;
|
||||
const clusterStatsMB = cluster.elasticsearch?.cluster?.stats;
|
||||
|
||||
const clusterTotalShards = clusterIndices.shards?.total ?? 0;
|
||||
const clusterTotalShards =
|
||||
clusterStatsLegacy?.indices?.shards?.total ?? clusterStatsMB?.indices?.shards?.count ?? 0;
|
||||
let unassignedShardsTotal = 0;
|
||||
const unassignedShards = get(shardStats, 'indicesTotals.unassigned');
|
||||
if (unassignedShards !== undefined) {
|
||||
|
@ -28,17 +28,31 @@ export function getClusterStatus(cluster: ElasticsearchSource, shardStats: unkno
|
|||
const totalShards = clusterTotalShards + unassignedShardsTotal;
|
||||
|
||||
return {
|
||||
status: cluster.cluster_state?.status ?? 'unknown',
|
||||
status:
|
||||
cluster.elasticsearch?.cluster?.stats?.status ?? cluster.cluster_state?.status ?? 'unknown',
|
||||
// index-based stats
|
||||
indicesCount: clusterIndices.count ?? 0,
|
||||
documentCount: clusterIndices.docs?.count ?? 0,
|
||||
dataSize: clusterIndices.store?.size_in_bytes ?? 0,
|
||||
indicesCount: clusterStatsLegacy?.indices?.count ?? clusterStatsMB?.indices?.total ?? 0,
|
||||
documentCount:
|
||||
clusterStatsLegacy?.indices?.docs?.count ?? clusterStatsMB?.indices?.docs?.total ?? 0,
|
||||
dataSize:
|
||||
clusterStatsMB?.indices?.store?.size?.bytes ??
|
||||
clusterStatsLegacy?.indices?.store?.size_in_bytes ??
|
||||
0,
|
||||
// node-based stats
|
||||
nodesCount: clusterNodes.count?.total ?? 0,
|
||||
upTime: clusterNodes.jvm?.max_uptime_in_millis ?? 0,
|
||||
version: clusterNodes.versions ?? null,
|
||||
memUsed: clusterNodes.jvm?.mem?.heap_used_in_bytes ?? 0,
|
||||
memMax: clusterNodes.jvm?.mem?.heap_max_in_bytes ?? 0,
|
||||
nodesCount: clusterStatsLegacy?.nodes?.count?.total ?? clusterStatsMB?.nodes?.count ?? 0,
|
||||
upTime:
|
||||
clusterStatsMB?.nodes?.jvm?.max_uptime?.ms ??
|
||||
clusterStatsLegacy?.nodes?.jvm?.max_uptime_in_millis ??
|
||||
0,
|
||||
version: clusterStatsMB?.nodes?.versions ?? clusterStatsLegacy?.nodes?.versions ?? null,
|
||||
memUsed:
|
||||
clusterStatsMB?.nodes?.jvm?.memory?.heap?.used?.bytes ??
|
||||
clusterStatsLegacy?.nodes?.jvm?.mem?.heap_used_in_bytes ??
|
||||
0,
|
||||
memMax:
|
||||
clusterStatsMB?.nodes?.jvm?.memory?.heap?.max?.bytes ??
|
||||
clusterStatsLegacy?.nodes?.jvm?.mem?.heap_max_in_bytes ??
|
||||
0,
|
||||
unassignedShards: unassignedShardsTotal,
|
||||
totalShards,
|
||||
};
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { notFound } from '@hapi/boom';
|
||||
import { set } from '@elastic/safer-lodash-set';
|
||||
import { findIndex } from 'lodash';
|
||||
import { get } from 'lodash';
|
||||
import { getClustersStats } from './get_clusters_stats';
|
||||
import { flagSupportedClusters } from './flag_supported_clusters';
|
||||
import { getMlJobsForCluster } from '../elasticsearch';
|
||||
|
@ -98,7 +98,7 @@ export async function getClustersFromRequest(
|
|||
|
||||
cluster.logs = isInCodePath(codePaths, [CODE_PATH_LOGS])
|
||||
? await getLogTypes(req, filebeatIndexPattern, {
|
||||
clusterUuid: cluster.cluster_uuid,
|
||||
clusterUuid: get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid),
|
||||
start,
|
||||
end,
|
||||
})
|
||||
|
@ -122,7 +122,7 @@ export async function getClustersFromRequest(
|
|||
alertsClient,
|
||||
req.server.plugins.monitoring.info,
|
||||
undefined,
|
||||
clusters.map((cluster) => cluster.cluster_uuid)
|
||||
clusters.map((cluster) => get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid))
|
||||
);
|
||||
|
||||
for (const cluster of clusters) {
|
||||
|
@ -142,7 +142,9 @@ export async function getClustersFromRequest(
|
|||
accum[alertName] = {
|
||||
...value,
|
||||
states: value.states.filter(
|
||||
(state) => state.state.cluster.clusterUuid === cluster.cluster_uuid
|
||||
(state) =>
|
||||
state.state.cluster.clusterUuid ===
|
||||
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid)
|
||||
),
|
||||
};
|
||||
} else {
|
||||
|
@ -177,7 +179,10 @@ export async function getClustersFromRequest(
|
|||
: [];
|
||||
// add the kibana data to each cluster
|
||||
kibanas.forEach((kibana) => {
|
||||
const clusterIndex = findIndex(clusters, { cluster_uuid: kibana.clusterUuid });
|
||||
const clusterIndex = clusters.findIndex(
|
||||
(cluster) =>
|
||||
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid) === kibana.clusterUuid
|
||||
);
|
||||
set(clusters[clusterIndex], 'kibana', kibana.stats);
|
||||
});
|
||||
|
||||
|
@ -186,8 +191,10 @@ export async function getClustersFromRequest(
|
|||
const logstashes = await getLogstashForClusters(req, lsIndexPattern, clusters);
|
||||
const pipelines = await getLogstashPipelineIds(req, lsIndexPattern, { clusterUuid }, 1);
|
||||
logstashes.forEach((logstash) => {
|
||||
const clusterIndex = findIndex(clusters, { cluster_uuid: logstash.clusterUuid });
|
||||
|
||||
const clusterIndex = clusters.findIndex(
|
||||
(cluster) =>
|
||||
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid) === logstash.clusterUuid
|
||||
);
|
||||
// withhold LS overview stats until there is at least 1 pipeline
|
||||
if (logstash.clusterUuid === clusterUuid && !pipelines.length) {
|
||||
logstash.stats = {};
|
||||
|
@ -201,7 +208,10 @@ export async function getClustersFromRequest(
|
|||
? await getBeatsForClusters(req, beatsIndexPattern, clusters)
|
||||
: [];
|
||||
beatsByCluster.forEach((beats) => {
|
||||
const clusterIndex = findIndex(clusters, { cluster_uuid: beats.clusterUuid });
|
||||
const clusterIndex = clusters.findIndex(
|
||||
(cluster) =>
|
||||
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid) === beats.clusterUuid
|
||||
);
|
||||
set(clusters[clusterIndex], 'beats', beats.stats);
|
||||
});
|
||||
|
||||
|
@ -210,12 +220,17 @@ export async function getClustersFromRequest(
|
|||
? await getApmsForClusters(req, apmIndexPattern, clusters)
|
||||
: [];
|
||||
apmsByCluster.forEach((apm) => {
|
||||
const clusterIndex = findIndex(clusters, { cluster_uuid: apm.clusterUuid });
|
||||
const { stats, config } = apm;
|
||||
clusters[clusterIndex].apm = {
|
||||
...stats,
|
||||
config,
|
||||
};
|
||||
const clusterIndex = clusters.findIndex(
|
||||
(cluster) =>
|
||||
get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid) === apm.clusterUuid
|
||||
);
|
||||
if (clusterIndex >= 0) {
|
||||
const { stats, config } = apm;
|
||||
clusters[clusterIndex].apm = {
|
||||
...stats,
|
||||
config,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// check ccr configuration
|
||||
|
|
|
@ -54,8 +54,8 @@ export function getClustersState(
|
|||
checkParam(esIndexPattern, 'esIndexPattern in cluster/getClustersHealth');
|
||||
|
||||
const clusterUuids = clusters
|
||||
.filter((cluster) => !cluster.cluster_state)
|
||||
.map((cluster) => cluster.cluster_uuid);
|
||||
.filter((cluster) => !cluster.cluster_state || !cluster.elasticsearch?.cluster?.stats?.state)
|
||||
.map((cluster) => cluster.cluster_uuid || cluster.elasticsearch?.cluster?.id);
|
||||
|
||||
// we only need to fetch the cluster state if we don't already have it
|
||||
// newer documents (those from the version 6 schema and later already have the cluster state with cluster stats)
|
||||
|
@ -69,8 +69,9 @@ export function getClustersState(
|
|||
ignoreUnavailable: true,
|
||||
filterPath: [
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._source.cluster_state',
|
||||
'hits.hits._source.cluster_state',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.state',
|
||||
],
|
||||
body: {
|
||||
query: {
|
||||
|
|
|
@ -57,15 +57,26 @@ function fetchClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUu
|
|||
filterPath: [
|
||||
'hits.hits._index',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._source.cluster_name',
|
||||
'hits.hits._source.elasticsearch.cluster.name',
|
||||
'hits.hits._source.version',
|
||||
'hits.hits._source.elasticsearch.version',
|
||||
'hits.hits._source.elasticsearch.cluster.node.version',
|
||||
'hits.hits._source.license.status', // license data only includes necessary fields to drive UI
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.status',
|
||||
'hits.hits._source.license.type',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.type',
|
||||
'hits.hits._source.license.issue_date',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.issue_date',
|
||||
'hits.hits._source.license.expiry_date',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.expiry_date',
|
||||
'hits.hits._source.license.expiry_date_in_millis',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.expiry_date_in_millis',
|
||||
'hits.hits._source.cluster_stats',
|
||||
'hits.hits._source.elasticsearch.cluster.stats',
|
||||
'hits.hits._source.cluster_state',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.state',
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
],
|
||||
body: {
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { pick, omit, get } from 'lodash';
|
||||
import { calculateOverallStatus } from '../calculate_overall_status';
|
||||
import { LOGGING_TAG } from '../../../common/constants';
|
||||
import { MonitoringLicenseError } from '../errors/custom_errors';
|
||||
|
||||
export function getClustersSummary(server, clusters, kibanaUuid, isCcrEnabled) {
|
||||
return clusters.map((cluster) => {
|
||||
const {
|
||||
isSupported,
|
||||
cluster_uuid: clusterUuid,
|
||||
version,
|
||||
license,
|
||||
cluster_stats: clusterStats,
|
||||
logstash,
|
||||
kibana,
|
||||
ml,
|
||||
beats,
|
||||
apm,
|
||||
alerts,
|
||||
ccs,
|
||||
cluster_settings: clusterSettings,
|
||||
logs,
|
||||
} = cluster;
|
||||
|
||||
const clusterName = get(clusterSettings, 'cluster.metadata.display_name', cluster.cluster_name);
|
||||
|
||||
// check for any missing licenses
|
||||
if (!license) {
|
||||
const clusterId = cluster.name || clusterName || clusterUuid;
|
||||
server.log(
|
||||
['error', LOGGING_TAG],
|
||||
"Could not find license information for cluster = '" +
|
||||
clusterId +
|
||||
"'. " +
|
||||
"Please check the cluster's master node server logs for errors or warnings."
|
||||
);
|
||||
throw new MonitoringLicenseError(clusterId);
|
||||
}
|
||||
|
||||
const {
|
||||
status: licenseStatus,
|
||||
type: licenseType,
|
||||
expiry_date_in_millis: licenseExpiry,
|
||||
} = license;
|
||||
|
||||
const indices = pick(clusterStats.indices, ['count', 'docs', 'shards', 'store']);
|
||||
|
||||
const jvm = {
|
||||
max_uptime_in_millis: clusterStats.nodes.jvm.max_uptime_in_millis,
|
||||
mem: clusterStats.nodes.jvm.mem,
|
||||
};
|
||||
|
||||
const nodes = {
|
||||
fs: clusterStats.nodes.fs,
|
||||
count: {
|
||||
total: clusterStats.nodes.count.total,
|
||||
},
|
||||
jvm,
|
||||
};
|
||||
const { status } = cluster.cluster_state;
|
||||
|
||||
return {
|
||||
isSupported,
|
||||
cluster_uuid: clusterUuid,
|
||||
cluster_name: clusterName,
|
||||
version,
|
||||
license: {
|
||||
status: licenseStatus,
|
||||
type: licenseType,
|
||||
expiry_date_in_millis: licenseExpiry,
|
||||
},
|
||||
elasticsearch: {
|
||||
cluster_stats: {
|
||||
indices,
|
||||
nodes,
|
||||
status,
|
||||
},
|
||||
logs,
|
||||
},
|
||||
logstash,
|
||||
kibana: omit(kibana, 'uuids'),
|
||||
ml,
|
||||
ccs,
|
||||
beats,
|
||||
apm,
|
||||
alerts,
|
||||
isPrimary: kibana ? kibana.uuids.includes(kibanaUuid) : false,
|
||||
status: calculateOverallStatus([status, (kibana && kibana.status) || null]),
|
||||
isCcrEnabled,
|
||||
};
|
||||
});
|
||||
}
|
|
@ -10,7 +10,9 @@ import { getClustersSummary } from './get_clusters_summary';
|
|||
|
||||
const mockLog = jest.fn();
|
||||
const mockServer = {
|
||||
log: mockLog,
|
||||
log: {
|
||||
error: mockLog,
|
||||
},
|
||||
};
|
||||
|
||||
describe('getClustersSummary', () => {
|
||||
|
@ -34,7 +36,6 @@ describe('getClustersSummary', () => {
|
|||
|
||||
expect(() => getClustersSummary(mockServer, fakeClusters)).toThrow('Monitoring License Error');
|
||||
expect(mockLog).toHaveBeenCalledWith(
|
||||
['error', 'monitoring'],
|
||||
"Could not find license information for cluster = 'Custom name'. " +
|
||||
"Please check the cluster's master node server logs for errors or warnings."
|
||||
);
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { omit, get } from 'lodash';
|
||||
import {
|
||||
ElasticsearchModifiedSource,
|
||||
ElasticsearchLegacySource,
|
||||
ElasticsearchSourceKibanaStats,
|
||||
} from '../../../common/types/es';
|
||||
// @ts-ignore
|
||||
import { calculateOverallStatus } from '../calculate_overall_status';
|
||||
// @ts-ignore
|
||||
import { MonitoringLicenseError } from '../errors/custom_errors';
|
||||
|
||||
type EnhancedClusters = ElasticsearchModifiedSource & {
|
||||
license: ElasticsearchLegacySource['license'];
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
type EnhancedKibana = ElasticsearchSourceKibanaStats['kibana'] & {
|
||||
uuids?: string[];
|
||||
};
|
||||
|
||||
export function getClustersSummary(
|
||||
server: any,
|
||||
clusters: EnhancedClusters[],
|
||||
kibanaUuid: string,
|
||||
isCcrEnabled: boolean
|
||||
) {
|
||||
return clusters.map((cluster) => {
|
||||
const {
|
||||
isSupported,
|
||||
logstash,
|
||||
kibana,
|
||||
ml,
|
||||
beats,
|
||||
apm,
|
||||
alerts,
|
||||
ccs,
|
||||
cluster_settings: clusterSettings,
|
||||
logs,
|
||||
} = cluster;
|
||||
|
||||
const license = cluster.license || cluster.elasticsearch?.cluster?.stats?.license;
|
||||
const version = cluster.version || cluster.elasticsearch?.version;
|
||||
const clusterUuid = cluster.cluster_uuid || cluster.elasticsearch?.cluster?.id;
|
||||
const clusterStatsLegacy = cluster.cluster_stats;
|
||||
const clusterStatsMB = cluster.elasticsearch?.cluster?.stats;
|
||||
|
||||
const clusterName = get(
|
||||
clusterSettings,
|
||||
'cluster.metadata.display_name',
|
||||
cluster.elasticsearch?.cluster?.name ?? cluster.cluster_name
|
||||
);
|
||||
|
||||
// check for any missing licenses
|
||||
if (!license) {
|
||||
const clusterId = cluster.name || clusterName || clusterUuid;
|
||||
server.log.error(
|
||||
"Could not find license information for cluster = '" +
|
||||
clusterId +
|
||||
"'. " +
|
||||
"Please check the cluster's master node server logs for errors or warnings."
|
||||
);
|
||||
throw new MonitoringLicenseError(clusterId);
|
||||
}
|
||||
|
||||
const {
|
||||
status: licenseStatus,
|
||||
type: licenseType,
|
||||
expiry_date_in_millis: licenseExpiry,
|
||||
} = license;
|
||||
|
||||
const indices = {
|
||||
count: clusterStatsLegacy?.indices?.count ?? clusterStatsMB?.indices?.total,
|
||||
docs: {
|
||||
deleted:
|
||||
clusterStatsLegacy?.indices?.docs?.deleted ?? clusterStatsMB?.indices?.docs?.deleted,
|
||||
count: clusterStatsLegacy?.indices?.docs?.count ?? clusterStatsMB?.indices?.docs?.total,
|
||||
},
|
||||
shards: {
|
||||
total: clusterStatsLegacy?.indices?.shards?.total ?? clusterStatsMB?.indices?.shards?.count,
|
||||
primaries:
|
||||
clusterStatsLegacy?.indices?.shards?.primaries ??
|
||||
clusterStatsMB?.indices?.shards?.primaries,
|
||||
},
|
||||
store: {
|
||||
size_in_bytes:
|
||||
clusterStatsLegacy?.indices?.store?.size_in_bytes ??
|
||||
clusterStatsMB?.indices?.store?.size?.bytes,
|
||||
},
|
||||
};
|
||||
|
||||
const jvm = {
|
||||
max_uptime_in_millis:
|
||||
clusterStatsLegacy?.nodes?.jvm?.max_uptime_in_millis ??
|
||||
clusterStatsMB?.nodes?.jvm?.max_uptime?.ms,
|
||||
mem: {
|
||||
heap_max_in_bytes:
|
||||
clusterStatsLegacy?.nodes?.jvm?.mem?.heap_max_in_bytes ??
|
||||
clusterStatsMB?.nodes?.jvm?.memory?.heap?.max?.bytes,
|
||||
heap_used_in_bytes:
|
||||
clusterStatsLegacy?.nodes?.jvm?.mem?.heap_used_in_bytes ??
|
||||
clusterStatsMB?.nodes?.jvm?.memory?.heap?.used?.bytes,
|
||||
},
|
||||
};
|
||||
|
||||
const nodes = {
|
||||
fs: {
|
||||
total_in_bytes:
|
||||
clusterStatsLegacy?.nodes?.fs?.total_in_bytes ?? clusterStatsMB?.nodes?.fs?.total?.bytes,
|
||||
available_in_bytes:
|
||||
clusterStatsLegacy?.nodes?.fs?.available_in_bytes ??
|
||||
clusterStatsMB?.nodes?.fs?.available?.bytes,
|
||||
},
|
||||
count: {
|
||||
total: clusterStatsLegacy?.nodes?.count?.total ?? clusterStatsMB?.nodes?.count,
|
||||
},
|
||||
jvm,
|
||||
};
|
||||
const { status } = cluster.cluster_state ??
|
||||
cluster?.elasticsearch?.cluster?.stats ?? { status: null };
|
||||
|
||||
return {
|
||||
isSupported,
|
||||
cluster_uuid: clusterUuid,
|
||||
cluster_name: clusterName,
|
||||
version,
|
||||
license: {
|
||||
status: licenseStatus,
|
||||
type: licenseType,
|
||||
expiry_date_in_millis: licenseExpiry,
|
||||
},
|
||||
elasticsearch: {
|
||||
cluster_stats: {
|
||||
indices,
|
||||
nodes,
|
||||
status,
|
||||
},
|
||||
logs,
|
||||
},
|
||||
logstash,
|
||||
kibana: omit(kibana, 'uuids'),
|
||||
ml,
|
||||
ccs,
|
||||
beats,
|
||||
apm,
|
||||
alerts,
|
||||
isPrimary: kibana ? (kibana as EnhancedKibana).uuids?.includes(kibanaUuid) : false,
|
||||
status: calculateOverallStatus([
|
||||
status,
|
||||
(kibana && (kibana as EnhancedKibana).status) || null,
|
||||
]),
|
||||
isCcrEnabled,
|
||||
};
|
||||
});
|
||||
}
|
|
@ -32,7 +32,7 @@ export function getIndexPatterns(server, additionalPatterns = {}, ccs = '*') {
|
|||
...Object.keys(additionalPatterns).reduce((accum, varName) => {
|
||||
return {
|
||||
...accum,
|
||||
[varName]: prefixIndexPattern(config, additionalPatterns[varName], ccs),
|
||||
[varName]: prefixIndexPattern(config, additionalPatterns[varName], ccs, true),
|
||||
};
|
||||
}, {}),
|
||||
};
|
||||
|
|
|
@ -52,13 +52,22 @@ export function createTimeFilter(options) {
|
|||
*/
|
||||
export function createQuery(options) {
|
||||
options = defaults(options, { filters: [] });
|
||||
const { type, clusterUuid, uuid, filters } = options;
|
||||
const { type, types, clusterUuid, uuid, filters } = options;
|
||||
|
||||
const isFromStandaloneCluster = clusterUuid === STANDALONE_CLUSTER_CLUSTER_UUID;
|
||||
|
||||
let typeFilter;
|
||||
if (type) {
|
||||
typeFilter = { bool: { should: [{ term: { type } }, { term: { 'metricset.name': type } }] } };
|
||||
} else if (types) {
|
||||
typeFilter = {
|
||||
bool: {
|
||||
should: [
|
||||
...types.map((type) => ({ term: { type } })),
|
||||
...types.map((type) => ({ term: { 'metricset.name': type } })),
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
let clusterUuidFilter;
|
||||
|
|
|
@ -43,7 +43,15 @@ function getUuid(req, metric) {
|
|||
}
|
||||
|
||||
function defaultCalculation(bucket, key) {
|
||||
const value = get(bucket, key, null);
|
||||
const mbKey = `metric_mb_deriv.normalized_value`;
|
||||
const legacyValue = get(bucket, key, null);
|
||||
const mbValue = get(bucket, mbKey, null);
|
||||
let value;
|
||||
if (!isNaN(mbValue) && mbValue > 0) {
|
||||
value = mbValue;
|
||||
} else {
|
||||
value = legacyValue;
|
||||
}
|
||||
// negatives suggest derivatives that have been reset (usually due to restarts that reset the count)
|
||||
if (value < 0) {
|
||||
return null;
|
||||
|
@ -54,6 +62,17 @@ function defaultCalculation(bucket, key) {
|
|||
|
||||
function createMetricAggs(metric) {
|
||||
if (metric.derivative) {
|
||||
const mbDerivative = metric.mbField
|
||||
? {
|
||||
metric_mb_deriv: {
|
||||
derivative: {
|
||||
buckets_path: 'metric_mb',
|
||||
gap_policy: 'skip',
|
||||
unit: NORMALIZED_DERIVATIVE_UNIT,
|
||||
},
|
||||
},
|
||||
}
|
||||
: {};
|
||||
return {
|
||||
metric_deriv: {
|
||||
derivative: {
|
||||
|
@ -62,6 +81,7 @@ function createMetricAggs(metric) {
|
|||
unit: NORMALIZED_DERIVATIVE_UNIT,
|
||||
},
|
||||
},
|
||||
...mbDerivative,
|
||||
...metric.aggs,
|
||||
};
|
||||
}
|
||||
|
@ -97,6 +117,13 @@ async function fetchSeries(
|
|||
},
|
||||
...createMetricAggs(metric),
|
||||
};
|
||||
if (metric.mbField) {
|
||||
dateHistogramSubAggs.metric_mb = {
|
||||
[metric.metricAgg]: {
|
||||
field: metric.mbField,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let aggs = {
|
||||
|
@ -209,7 +236,7 @@ function isObject(value) {
|
|||
}
|
||||
|
||||
function countBuckets(data, count = 0) {
|
||||
if (data.buckets) {
|
||||
if (data && data.buckets) {
|
||||
count += data.buckets.length;
|
||||
for (const bucket of data.buckets) {
|
||||
for (const key of Object.keys(bucket)) {
|
||||
|
@ -218,7 +245,7 @@ function countBuckets(data, count = 0) {
|
|||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
} else if (data) {
|
||||
for (const key of Object.keys(data)) {
|
||||
if (isObject(data[key])) {
|
||||
count = countBuckets(data[key], count);
|
||||
|
|
|
@ -15,15 +15,8 @@ import { createQuery } from '../create_query';
|
|||
import { ElasticsearchResponse } from '../../../common/types/es';
|
||||
import { LegacyRequest } from '../../types';
|
||||
|
||||
export function handleResponse(response: ElasticsearchResponse) {
|
||||
const isEnabled = response.hits?.hits[0]?._source.stack_stats?.xpack?.ccr?.enabled ?? undefined;
|
||||
const isAvailable =
|
||||
response.hits?.hits[0]?._source.stack_stats?.xpack?.ccr?.available ?? undefined;
|
||||
return isEnabled && isAvailable;
|
||||
}
|
||||
|
||||
export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in getNodes');
|
||||
checkParam(esIndexPattern, 'esIndexPattern in checkCcrEnabled');
|
||||
|
||||
const start = moment.utc(req.payload.timeRange.min).valueOf();
|
||||
const end = moment.utc(req.payload.timeRange.max).valueOf();
|
||||
|
@ -45,10 +38,17 @@ export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string
|
|||
}),
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
},
|
||||
filterPath: ['hits.hits._source.stack_stats.xpack.ccr'],
|
||||
filterPath: [
|
||||
'hits.hits._source.stack_stats.xpack.ccr',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.stack.xpack.ccr',
|
||||
],
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
return handleResponse(response);
|
||||
const response: ElasticsearchResponse = await callWithRequest(req, 'search', params);
|
||||
const legacyCcr = response.hits?.hits[0]?._source.stack_stats?.xpack?.ccr;
|
||||
const mbCcr = response.hits?.hits[0]?._source?.elasticsearch?.cluster?.stats?.stack?.xpack?.ccr;
|
||||
const isEnabled = legacyCcr?.enabled ?? mbCcr?.enabled;
|
||||
const isAvailable = legacyCcr?.available ?? mbCcr?.available;
|
||||
return isEnabled && isAvailable;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { handleLastRecoveries, filterOldShardActivity } from './get_last_recovery';
|
||||
import { handleLegacyLastRecoveries, filterOldShardActivity } from './get_last_recovery';
|
||||
|
||||
describe('get_last_recovery', () => {
|
||||
// Note: times are from the epoch!
|
||||
|
@ -47,24 +47,24 @@ describe('get_last_recovery', () => {
|
|||
|
||||
it('No hits results in an empty array', () => {
|
||||
// Note: we don't expect it to touch hits without total === 1
|
||||
expect(handleLastRecoveries({ hits: { hits: [] } }, new Date(0))).toHaveLength(0);
|
||||
expect(handleLegacyLastRecoveries({ hits: { hits: [] } }, new Date(0))).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('Filters on stop time', () => {
|
||||
expect(handleLastRecoveries(resp, new Date(0))).toHaveLength(5);
|
||||
expect(handleLastRecoveries(resp, new Date(99))).toHaveLength(5);
|
||||
expect(handleLastRecoveries(resp, new Date(100))).toHaveLength(5);
|
||||
expect(handleLastRecoveries(resp, new Date(101))).toHaveLength(3);
|
||||
expect(handleLastRecoveries(resp, new Date(501))).toHaveLength(0);
|
||||
expect(handleLegacyLastRecoveries(resp, new Date(0))).toHaveLength(5);
|
||||
expect(handleLegacyLastRecoveries(resp, new Date(99))).toHaveLength(5);
|
||||
expect(handleLegacyLastRecoveries(resp, new Date(100))).toHaveLength(5);
|
||||
expect(handleLegacyLastRecoveries(resp, new Date(101))).toHaveLength(3);
|
||||
expect(handleLegacyLastRecoveries(resp, new Date(501))).toHaveLength(0);
|
||||
|
||||
const filteredActivities = handleLastRecoveries(resp, new Date(301));
|
||||
const filteredActivities = handleLegacyLastRecoveries(resp, new Date(301));
|
||||
|
||||
expect(filteredActivities).toHaveLength(1);
|
||||
expect(filteredActivities[0].stop_time_in_millis).toEqual(500);
|
||||
});
|
||||
|
||||
it('Sorts based on start time (descending)', () => {
|
||||
const sortedActivities = handleLastRecoveries(resp, new Date(0));
|
||||
const sortedActivities = handleLegacyLastRecoveries(resp, new Date(0));
|
||||
|
||||
expect(sortedActivities[0].start_time_in_millis).toEqual(100);
|
||||
expect(sortedActivities[4].start_time_in_millis).toEqual(0);
|
||||
|
|
|
@ -13,7 +13,11 @@ import { checkParam } from '../error_missing_required';
|
|||
import { createQuery } from '../create_query';
|
||||
// @ts-ignore
|
||||
import { ElasticsearchMetric } from '../metrics';
|
||||
import { ElasticsearchResponse, ElasticsearchIndexRecoveryShard } from '../../../common/types/es';
|
||||
import {
|
||||
ElasticsearchResponse,
|
||||
ElasticsearchIndexRecoveryShard,
|
||||
ElasticsearchResponseHit,
|
||||
} from '../../../common/types/es';
|
||||
import { LegacyRequest } from '../../types';
|
||||
|
||||
/**
|
||||
|
@ -28,9 +32,12 @@ import { LegacyRequest } from '../../types';
|
|||
* @returns {boolean} true to keep
|
||||
*/
|
||||
export function filterOldShardActivity(startMs: number) {
|
||||
return (activity: ElasticsearchIndexRecoveryShard) => {
|
||||
return (activity?: ElasticsearchIndexRecoveryShard) => {
|
||||
// either it's still going and there is no stop time, or the stop time happened after we started looking for one
|
||||
return !_.isNumber(activity.stop_time_in_millis) || activity.stop_time_in_millis >= startMs;
|
||||
return (
|
||||
activity &&
|
||||
(!_.isNumber(activity.stop_time_in_millis) || activity.stop_time_in_millis >= startMs)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -42,19 +49,44 @@ export function filterOldShardActivity(startMs: number) {
|
|||
* @param {Date} start The start time from the request payload (expected to be of type {@code Date})
|
||||
* @returns {Object[]} An array of shards representing active shard activity from {@code _source.index_recovery.shards}.
|
||||
*/
|
||||
export function handleLastRecoveries(resp: ElasticsearchResponse, start: number) {
|
||||
export function handleLegacyLastRecoveries(resp: ElasticsearchResponse, start: number) {
|
||||
if (resp.hits?.hits.length === 1) {
|
||||
const data = (resp.hits?.hits[0]?._source.index_recovery?.shards ?? []).filter(
|
||||
filterOldShardActivity(moment.utc(start).valueOf())
|
||||
);
|
||||
data.sort((a, b) => b.start_time_in_millis - a.start_time_in_millis);
|
||||
data.sort((a, b) => (b.start_time_in_millis ?? 0) - (a.start_time_in_millis ?? 0));
|
||||
return data;
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
export function getLastRecovery(req: LegacyRequest, esIndexPattern: string) {
|
||||
// For MB, we index individual documents instead of a single document with a list of recovered shards
|
||||
// This means we need to query a bit differently to end up with the same result. We need to ensure
|
||||
// that our recovered shards are within the same time window to match the legacy query (of size: 1)
|
||||
export function handleMbLastRecoveries(resp: ElasticsearchResponse, start: number) {
|
||||
const hits = resp.hits?.hits ?? [];
|
||||
const groupedByTimestamp = hits.reduce(
|
||||
(accum: { [timestamp: string]: ElasticsearchResponseHit[] }, hit) => {
|
||||
const timestamp = hit._source['@timestamp'] ?? '';
|
||||
accum[timestamp] = accum[timestamp] || [];
|
||||
accum[timestamp].push(hit);
|
||||
return accum;
|
||||
},
|
||||
{}
|
||||
);
|
||||
const maxTimestamp = resp.aggregations?.max_timestamp?.value_as_string;
|
||||
const mapped = (groupedByTimestamp[maxTimestamp] ?? []).map(
|
||||
(hit) => hit._source.elasticsearch?.index?.recovery
|
||||
);
|
||||
const filtered = mapped.filter(filterOldShardActivity(moment.utc(start).valueOf()));
|
||||
filtered.sort((a, b) =>
|
||||
a && b ? (b.start_time_in_millis ?? 0) - (a.start_time_in_millis ?? 0) : 0
|
||||
);
|
||||
return filtered;
|
||||
}
|
||||
|
||||
export async function getLastRecovery(req: LegacyRequest, esIndexPattern: string, size: number) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getLastRecovery');
|
||||
|
||||
const start = req.payload.timeRange.min;
|
||||
|
@ -62,7 +94,7 @@ export function getLastRecovery(req: LegacyRequest, esIndexPattern: string) {
|
|||
const clusterUuid = req.params.clusterUuid;
|
||||
|
||||
const metric = ElasticsearchMetric.getMetricFields();
|
||||
const params = {
|
||||
const legacyParams = {
|
||||
index: esIndexPattern,
|
||||
size: 1,
|
||||
ignoreUnavailable: true,
|
||||
|
@ -72,9 +104,31 @@ export function getLastRecovery(req: LegacyRequest, esIndexPattern: string) {
|
|||
query: createQuery({ type: 'index_recovery', start, end, clusterUuid, metric }),
|
||||
},
|
||||
};
|
||||
const mbParams = {
|
||||
index: esIndexPattern,
|
||||
size,
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
_source: ['elasticsearch.index.recovery', '@timestamp'],
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: createQuery({ type: 'index_recovery', start, end, clusterUuid, metric }),
|
||||
aggs: {
|
||||
max_timestamp: {
|
||||
max: {
|
||||
field: '@timestamp',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
return callWithRequest(req, 'search', params).then((resp) => {
|
||||
return handleLastRecoveries(resp, start);
|
||||
});
|
||||
const [legacyResp, mbResp] = await Promise.all([
|
||||
callWithRequest(req, 'search', legacyParams),
|
||||
callWithRequest(req, 'search', mbParams),
|
||||
]);
|
||||
const legacyResult = handleLegacyLastRecoveries(legacyResp, start);
|
||||
const mbResult = handleMbLastRecoveries(mbResp, start);
|
||||
|
||||
return [...legacyResult, ...mbResult];
|
||||
}
|
||||
|
|
|
@ -21,7 +21,18 @@ import { LegacyRequest } from '../../types';
|
|||
*/
|
||||
export function handleResponse(response: ElasticsearchResponse) {
|
||||
const hits = response.hits?.hits;
|
||||
return hits?.map((hit) => hit._source.job_stats) ?? [];
|
||||
return (
|
||||
hits?.map((hit) => {
|
||||
const job = hit._source.job_stats ?? hit._source.elasticsearch;
|
||||
return {
|
||||
...job,
|
||||
node: {
|
||||
...job?.node,
|
||||
name: job?.node?.name ?? job?.node?.id,
|
||||
},
|
||||
};
|
||||
}) ?? []
|
||||
);
|
||||
}
|
||||
|
||||
export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
|
||||
|
@ -39,17 +50,24 @@ export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
|
|||
ignoreUnavailable: true,
|
||||
filterPath: [
|
||||
'hits.hits._source.job_stats.job_id',
|
||||
'hits.hits._source.elasticsearch.ml.job.id',
|
||||
'hits.hits._source.job_stats.state',
|
||||
'hits.hits._source.elasticsearch.ml.job.state',
|
||||
'hits.hits._source.job_stats.data_counts.processed_record_count',
|
||||
'hits.hits._source.elasticsearch.ml.job.data_counts.processed_record_count',
|
||||
'hits.hits._source.job_stats.model_size_stats.model_bytes',
|
||||
'hits.hits._source.elasticsearch.ml.job.model_size_stats.model_bytes',
|
||||
'hits.hits._source.job_stats.forecasts_stats.total',
|
||||
'hits.hits._source.elasticsearch.ml.job.forecasts_stats.total',
|
||||
'hits.hits._source.job_stats.node.id',
|
||||
'hits.hits._source.elasticsearch.node.id',
|
||||
'hits.hits._source.job_stats.node.name',
|
||||
'hits.hits._source.elasticsearch.node.name',
|
||||
],
|
||||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
collapse: { field: 'job_stats.job_id' },
|
||||
query: createQuery({ type: 'job_stats', start, end, clusterUuid, metric }),
|
||||
query: createQuery({ types: ['ml_job', 'job_stats'], start, end, clusterUuid, metric }),
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -66,7 +84,7 @@ export function getMlJobsForCluster(
|
|||
esIndexPattern: string,
|
||||
cluster: ElasticsearchSource
|
||||
) {
|
||||
const license = cluster.license ?? {};
|
||||
const license = cluster.license ?? cluster.elasticsearch?.cluster?.stats?.license ?? {};
|
||||
|
||||
if (license.status === 'active' && includes(ML_SUPPORTED_LICENSES, license.type)) {
|
||||
// ML is supported
|
||||
|
@ -80,7 +98,7 @@ export function getMlJobsForCluster(
|
|||
ignoreUnavailable: true,
|
||||
filterPath: 'aggregations.jobs_count.value',
|
||||
body: {
|
||||
query: createQuery({ type: 'job_stats', start, end, clusterUuid, metric }),
|
||||
query: createQuery({ types: ['ml_job', 'job_stats'], start, end, clusterUuid, metric }),
|
||||
aggs: {
|
||||
jobs_count: { cardinality: { field: 'job_stats.job_id' } },
|
||||
},
|
||||
|
|
|
@ -18,7 +18,9 @@ import { LegacyRequest } from '../../../types';
|
|||
|
||||
export function handleResponse(shardStats: any, indexUuid: string) {
|
||||
return (response: ElasticsearchResponse) => {
|
||||
const indexStats = response.hits?.hits[0]?._source.index_stats;
|
||||
const indexStats =
|
||||
response.hits?.hits[0]?._source.index_stats ??
|
||||
response.hits?.hits[0]?._source.elasticsearch?.index;
|
||||
const primaries = indexStats?.primaries;
|
||||
const total = indexStats?.total;
|
||||
|
||||
|
@ -74,14 +76,30 @@ export function getIndexSummary(
|
|||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getIndexSummary');
|
||||
|
||||
const metric = ElasticsearchMetric.getMetricFields();
|
||||
const filters = [{ term: { 'index_stats.index': indexUuid } }];
|
||||
const filters = [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'index_stats.index': indexUuid } },
|
||||
{ term: { 'elasticsearch.index.name': indexUuid } },
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
const params = {
|
||||
index: esIndexPattern,
|
||||
size: 1,
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: createQuery({ type: 'index_stats', start, end, clusterUuid, metric, filters }),
|
||||
query: createQuery({
|
||||
types: ['index', 'index_stats'],
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
metric,
|
||||
filters,
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -29,12 +29,16 @@ export function handleResponse(
|
|||
// map the hits
|
||||
const hits = resp?.hits?.hits ?? [];
|
||||
return hits.map((hit) => {
|
||||
const stats = hit._source.index_stats;
|
||||
const earliestStats = hit.inner_hits?.earliest?.hits?.hits[0]?._source.index_stats;
|
||||
const stats = hit._source.index_stats ?? hit._source.elasticsearch?.index;
|
||||
const earliestStats =
|
||||
hit.inner_hits?.earliest?.hits?.hits[0]?._source.index_stats ??
|
||||
hit.inner_hits?.earliest?.hits?.hits[0]?._source.elasticsearch?.index;
|
||||
|
||||
const rateOptions = {
|
||||
hitTimestamp: hit._source.timestamp,
|
||||
earliestHitTimestamp: hit.inner_hits?.earliest?.hits?.hits[0]?._source.timestamp,
|
||||
hitTimestamp: hit._source.timestamp ?? hit._source['@timestamp'],
|
||||
earliestHitTimestamp:
|
||||
hit.inner_hits?.earliest?.hits?.hits[0]?._source.timestamp ??
|
||||
hit.inner_hits?.earliest?.hits?.hits[0]?._source['@timestamp'],
|
||||
timeWindowMin: min,
|
||||
timeWindowMax: max,
|
||||
};
|
||||
|
@ -53,7 +57,7 @@ export function handleResponse(
|
|||
...rateOptions,
|
||||
});
|
||||
|
||||
const shardStatsForIndex = get(shardStats, ['indices', stats?.index ?? '']);
|
||||
const shardStatsForIndex = get(shardStats, ['indices', stats?.index ?? stats?.name ?? '']);
|
||||
let status;
|
||||
let statusSort;
|
||||
let unassignedShards;
|
||||
|
@ -77,7 +81,7 @@ export function handleResponse(
|
|||
}
|
||||
|
||||
return {
|
||||
name: stats?.index,
|
||||
name: stats?.index ?? stats?.name,
|
||||
status,
|
||||
doc_count: stats?.primaries?.docs?.count,
|
||||
data_size: stats?.total?.store?.size_in_bytes,
|
||||
|
@ -116,22 +120,31 @@ export function buildGetIndicesQuery(
|
|||
filterPath: [
|
||||
// only filter path can filter for inner_hits
|
||||
'hits.hits._source.index_stats.index',
|
||||
'hits.hits._source.elasticsearch.index.name',
|
||||
'hits.hits._source.index_stats.primaries.docs.count',
|
||||
'hits.hits._source.elasticsearch.index.primaries.docs.count',
|
||||
'hits.hits._source.index_stats.total.store.size_in_bytes',
|
||||
'hits.hits._source.elasticsearch.index.total.store.size_in_bytes',
|
||||
|
||||
// latest hits for calculating metrics
|
||||
'hits.hits._source.timestamp',
|
||||
'hits.hits._source.@timestamp',
|
||||
'hits.hits._source.index_stats.primaries.indexing.index_total',
|
||||
'hits.hits._source.elasticsearch.index.primaries.indexing.index_total',
|
||||
'hits.hits._source.index_stats.total.search.query_total',
|
||||
'hits.hits._source.elasticsearch.index.total.search.query_total',
|
||||
|
||||
// earliest hits for calculating metrics
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.@timestamp',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.index_stats.primaries.indexing.index_total',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.elasticsearch.index.primaries.indexing.index_total',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.index_stats.total.search.query_total',
|
||||
'hits.hits.inner_hits.earliest.hits.hits._source.elasticsearch.index.total.search.query_total',
|
||||
],
|
||||
body: {
|
||||
query: createQuery({
|
||||
type: 'index_stats',
|
||||
types: ['index', 'index_stats'],
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
|
|
|
@ -25,7 +25,9 @@ export function calculateNodeType(node, masterNodeId) {
|
|||
return attr === 'false';
|
||||
}
|
||||
|
||||
if (node.uuid !== undefined && node.uuid === masterNodeId) {
|
||||
const uuid = node.uuid ?? node.id;
|
||||
|
||||
if (uuid !== undefined && uuid === masterNodeId) {
|
||||
return 'master';
|
||||
}
|
||||
if (includes(node.node_ids, masterNodeId)) {
|
||||
|
|
|
@ -112,6 +112,7 @@ export async function getNodes(
|
|||
},
|
||||
filterPath: [
|
||||
'hits.hits._source.source_node',
|
||||
'hits.hits._source.service.address',
|
||||
'hits.hits._source.elasticsearch.node',
|
||||
'aggregations.nodes.buckets.key',
|
||||
...LISTING_METRICS_PATHS,
|
||||
|
|
|
@ -26,12 +26,13 @@ export function mapNodesInfo(
|
|||
clusterStats?: ElasticsearchModifiedSource,
|
||||
nodesShardCount?: { nodes: { [nodeId: string]: { shardCount: number } } }
|
||||
) {
|
||||
const clusterState = clusterStats?.cluster_state ?? { nodes: {} };
|
||||
const clusterState =
|
||||
clusterStats?.cluster_state ?? clusterStats?.elasticsearch?.cluster?.stats?.state;
|
||||
|
||||
return nodeHits.reduce((prev, node) => {
|
||||
const sourceNode = node._source.source_node || node._source.elasticsearch?.node;
|
||||
|
||||
const calculatedNodeType = calculateNodeType(sourceNode, clusterState.master_node);
|
||||
const calculatedNodeType = calculateNodeType(sourceNode, clusterState?.master_node);
|
||||
const { nodeType, nodeTypeLabel, nodeTypeClass } = getNodeTypeClassLabel(
|
||||
sourceNode,
|
||||
calculatedNodeType
|
||||
|
@ -40,13 +41,13 @@ export function mapNodesInfo(
|
|||
if (!uuid) {
|
||||
return prev;
|
||||
}
|
||||
const isOnline = !isUndefined(clusterState.nodes ? clusterState.nodes[uuid] : undefined);
|
||||
const isOnline = !isUndefined(clusterState?.nodes ? clusterState.nodes[uuid] : undefined);
|
||||
|
||||
return {
|
||||
...prev,
|
||||
[uuid]: {
|
||||
name: sourceNode?.name,
|
||||
transport_address: sourceNode?.transport_address,
|
||||
transport_address: node._source.service?.address ?? sourceNode?.transport_address,
|
||||
type: nodeType,
|
||||
isOnline,
|
||||
nodeTypeLabel,
|
||||
|
|
|
@ -6,16 +6,38 @@
|
|||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createQuery } from '../../create_query';
|
||||
// @ts-ignore
|
||||
import { ElasticsearchMetric } from '../../metrics';
|
||||
// @ts-ignore
|
||||
import { calculateIndicesTotals } from './calculate_shard_stat_indices_totals';
|
||||
import { LegacyRequest } from '../../../types';
|
||||
import { ElasticsearchModifiedSource } from '../../../../common/types/es';
|
||||
|
||||
async function getUnassignedShardData(req, esIndexPattern, cluster) {
|
||||
async function getUnassignedShardData(
|
||||
req: LegacyRequest,
|
||||
esIndexPattern: string,
|
||||
cluster: ElasticsearchModifiedSource
|
||||
) {
|
||||
const config = req.server.config();
|
||||
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
|
||||
const metric = ElasticsearchMetric.getMetricFields();
|
||||
|
||||
const filters = [];
|
||||
if (cluster.cluster_state?.state_uuid) {
|
||||
filters.push({ term: { state_uuid: cluster.cluster_state?.state_uuid } });
|
||||
} else if (cluster.elasticsearch?.cluster?.stats?.state?.state_uuid) {
|
||||
filters.push({
|
||||
term: {
|
||||
'elasticsearch.cluster.stats.state.state_uuid':
|
||||
cluster.elasticsearch?.cluster?.stats?.state?.state_uuid,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const params = {
|
||||
index: esIndexPattern,
|
||||
size: 0,
|
||||
|
@ -23,10 +45,10 @@ async function getUnassignedShardData(req, esIndexPattern, cluster) {
|
|||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: createQuery({
|
||||
type: 'shards',
|
||||
clusterUuid: cluster.cluster_uuid,
|
||||
types: ['shard', 'shards'],
|
||||
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
|
||||
metric,
|
||||
filters: [{ term: { state_uuid: get(cluster, 'cluster_state.state_uuid') } }],
|
||||
filters,
|
||||
}),
|
||||
aggs: {
|
||||
indices: {
|
||||
|
@ -60,34 +82,41 @@ async function getUnassignedShardData(req, esIndexPattern, cluster) {
|
|||
return await callWithRequest(req, 'search', params);
|
||||
}
|
||||
|
||||
export async function getIndicesUnassignedShardStats(req, esIndexPattern, cluster) {
|
||||
export async function getIndicesUnassignedShardStats(
|
||||
req: LegacyRequest,
|
||||
esIndexPattern: string,
|
||||
cluster: ElasticsearchModifiedSource
|
||||
) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
|
||||
|
||||
const response = await getUnassignedShardData(req, esIndexPattern, cluster);
|
||||
const indices = get(response, 'aggregations.indices.buckets', []).reduce((accum, bucket) => {
|
||||
const index = bucket.key;
|
||||
const states = get(bucket, 'state.primary.buckets', []);
|
||||
const unassignedReplica = states
|
||||
.filter((state) => state.key_as_string === 'false')
|
||||
.reduce((total, state) => total + state.doc_count, 0);
|
||||
const unassignedPrimary = states
|
||||
.filter((state) => state.key_as_string === 'true')
|
||||
.reduce((total, state) => total + state.doc_count, 0);
|
||||
const indices = get(response, 'aggregations.indices.buckets', []).reduce(
|
||||
(accum: any, bucket: any) => {
|
||||
const index = bucket.key;
|
||||
const states = get(bucket, 'state.primary.buckets', []);
|
||||
const unassignedReplica = states
|
||||
.filter((state: any) => state.key_as_string === 'false')
|
||||
.reduce((total: number, state: any) => total + state.doc_count, 0);
|
||||
const unassignedPrimary = states
|
||||
.filter((state: any) => state.key_as_string === 'true')
|
||||
.reduce((total: number, state: any) => total + state.doc_count, 0);
|
||||
|
||||
let status = 'green';
|
||||
if (unassignedReplica > 0) {
|
||||
status = 'yellow';
|
||||
}
|
||||
if (unassignedPrimary > 0) {
|
||||
status = 'red';
|
||||
}
|
||||
let status = 'green';
|
||||
if (unassignedReplica > 0) {
|
||||
status = 'yellow';
|
||||
}
|
||||
if (unassignedPrimary > 0) {
|
||||
status = 'red';
|
||||
}
|
||||
|
||||
accum[index] = {
|
||||
unassigned: { primary: unassignedPrimary, replica: unassignedReplica },
|
||||
status,
|
||||
};
|
||||
return accum;
|
||||
}, {});
|
||||
accum[index] = {
|
||||
unassigned: { primary: unassignedPrimary, replica: unassignedReplica },
|
||||
status,
|
||||
};
|
||||
return accum;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
const indicesTotals = calculateIndicesTotals(indices);
|
||||
return { indices, indicesTotals };
|
|
@ -6,15 +6,36 @@
|
|||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createQuery } from '../../create_query';
|
||||
// @ts-ignore
|
||||
import { ElasticsearchMetric } from '../../metrics';
|
||||
import { LegacyRequest } from '../../../types';
|
||||
import { ElasticsearchModifiedSource } from '../../../../common/types/es';
|
||||
|
||||
async function getShardCountPerNode(req, esIndexPattern, cluster) {
|
||||
async function getShardCountPerNode(
|
||||
req: LegacyRequest,
|
||||
esIndexPattern: string,
|
||||
cluster: ElasticsearchModifiedSource
|
||||
) {
|
||||
const config = req.server.config();
|
||||
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
|
||||
const metric = ElasticsearchMetric.getMetricFields();
|
||||
|
||||
const filters = [];
|
||||
if (cluster.cluster_state?.state_uuid) {
|
||||
filters.push({ term: { state_uuid: cluster.cluster_state?.state_uuid } });
|
||||
} else if (cluster.elasticsearch?.cluster?.stats?.state?.state_uuid) {
|
||||
filters.push({
|
||||
term: {
|
||||
'elasticsearch.cluster.stats.state.state_uuid':
|
||||
cluster.elasticsearch?.cluster?.stats?.state?.state_uuid,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const params = {
|
||||
index: esIndexPattern,
|
||||
size: 0,
|
||||
|
@ -22,10 +43,10 @@ async function getShardCountPerNode(req, esIndexPattern, cluster) {
|
|||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: createQuery({
|
||||
type: 'shards',
|
||||
clusterUuid: cluster.cluster_uuid,
|
||||
types: ['shard', 'shards'],
|
||||
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
|
||||
metric,
|
||||
filters: [{ term: { state_uuid: get(cluster, 'cluster_state.state_uuid') } }],
|
||||
filters,
|
||||
}),
|
||||
aggs: {
|
||||
nodes: {
|
||||
|
@ -42,13 +63,20 @@ async function getShardCountPerNode(req, esIndexPattern, cluster) {
|
|||
return await callWithRequest(req, 'search', params);
|
||||
}
|
||||
|
||||
export async function getNodesShardCount(req, esIndexPattern, cluster) {
|
||||
export async function getNodesShardCount(
|
||||
req: LegacyRequest,
|
||||
esIndexPattern: string,
|
||||
cluster: ElasticsearchModifiedSource
|
||||
) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
|
||||
|
||||
const response = await getShardCountPerNode(req, esIndexPattern, cluster);
|
||||
const nodes = get(response, 'aggregations.nodes.buckets', []).reduce((accum, bucket) => {
|
||||
accum[bucket.key] = { shardCount: bucket.doc_count };
|
||||
return accum;
|
||||
}, {});
|
||||
const nodes = get(response, 'aggregations.nodes.buckets', []).reduce(
|
||||
(accum: any, bucket: any) => {
|
||||
accum[bucket.key] = { shardCount: bucket.doc_count };
|
||||
return accum;
|
||||
},
|
||||
{}
|
||||
);
|
||||
return { nodes };
|
||||
}
|
|
@ -13,7 +13,6 @@ import { createQuery } from '../../create_query';
|
|||
import { ElasticsearchMetric } from '../../metrics';
|
||||
import { ElasticsearchResponse, ElasticsearchLegacySource } from '../../../../common/types/es';
|
||||
import { LegacyRequest } from '../../../types';
|
||||
|
||||
export function handleResponse(response: ElasticsearchResponse) {
|
||||
const hits = response.hits?.hits;
|
||||
if (!hits) {
|
||||
|
@ -23,16 +22,31 @@ export function handleResponse(response: ElasticsearchResponse) {
|
|||
// deduplicate any shards from earlier days with the same cluster state state_uuid
|
||||
const uniqueShards = new Set<string>();
|
||||
|
||||
// map into object with shard and source properties
|
||||
// map into object with shard and source propertiesd
|
||||
return hits.reduce((shards: Array<ElasticsearchLegacySource['shard']>, hit) => {
|
||||
const shard = hit._source.shard;
|
||||
const legacyShard = hit._source.shard;
|
||||
const mbShard = hit._source.elasticsearch;
|
||||
|
||||
if (shard) {
|
||||
if (legacyShard || mbShard) {
|
||||
const index = mbShard?.index?.name ?? legacyShard?.index;
|
||||
const shardNumber = mbShard?.shard?.number ?? legacyShard?.shard;
|
||||
const primary = mbShard?.shard?.primary ?? legacyShard?.primary;
|
||||
const relocatingNode =
|
||||
mbShard?.shard?.relocating_node?.id ?? legacyShard?.relocating_node ?? null;
|
||||
const node = mbShard?.node?.id ?? legacyShard?.node;
|
||||
// note: if the request is for a node, then it's enough to deduplicate without primary, but for indices it displays both
|
||||
const shardId = `${shard.index}-${shard.shard}-${shard.primary}-${shard.relocating_node}-${shard.node}`;
|
||||
const shardId = `${index}-${shardNumber}-${primary}-${relocatingNode}-${node}`;
|
||||
|
||||
if (!uniqueShards.has(shardId)) {
|
||||
shards.push(shard);
|
||||
// @ts-ignore
|
||||
shards.push({
|
||||
index,
|
||||
node,
|
||||
primary,
|
||||
relocating_node: relocatingNode,
|
||||
shard: shardNumber,
|
||||
state: legacyShard?.state ?? mbShard?.shard?.state,
|
||||
});
|
||||
uniqueShards.add(shardId);
|
||||
}
|
||||
}
|
||||
|
@ -52,10 +66,34 @@ export function getShardAllocation(
|
|||
) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardAllocation');
|
||||
|
||||
const filters = [{ term: { state_uuid: stateUuid } }, shardFilter];
|
||||
const filters = [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
state_uuid: stateUuid,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'elasticsearch.cluster.state.id': stateUuid,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
shardFilter,
|
||||
];
|
||||
|
||||
if (!showSystemIndices) {
|
||||
filters.push({
|
||||
bool: { must_not: [{ prefix: { 'shard.index': '.' } }] },
|
||||
bool: {
|
||||
must_not: [
|
||||
{ prefix: { 'shard.index': '.' } },
|
||||
{ prefix: { 'elasticsearch.index.name': '.' } },
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -67,7 +105,7 @@ export function getShardAllocation(
|
|||
size: config.get('monitoring.ui.max_bucket_size'),
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({ type: 'shards', clusterUuid, metric, filters }),
|
||||
query: createQuery({ types: ['shard', 'shards'], clusterUuid, metric, filters }),
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -6,14 +6,27 @@
|
|||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createQuery } from '../../create_query';
|
||||
// @ts-ignore
|
||||
import { ElasticsearchMetric } from '../../metrics';
|
||||
// @ts-ignore
|
||||
import { normalizeIndexShards, normalizeNodeShards } from './normalize_shard_objects';
|
||||
// @ts-ignore
|
||||
import { getShardAggs } from './get_shard_stat_aggs';
|
||||
// @ts-ignore
|
||||
import { calculateIndicesTotals } from './calculate_shard_stat_indices_totals';
|
||||
import { LegacyRequest } from '../../../types';
|
||||
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../../common/types/es';
|
||||
|
||||
export function handleResponse(resp, includeNodes, includeIndices, cluster) {
|
||||
export function handleResponse(
|
||||
resp: ElasticsearchResponse,
|
||||
includeNodes: boolean,
|
||||
includeIndices: boolean,
|
||||
cluster: ElasticsearchModifiedSource
|
||||
) {
|
||||
let indices;
|
||||
let indicesTotals;
|
||||
let nodes;
|
||||
|
@ -25,7 +38,11 @@ export function handleResponse(resp, includeNodes, includeIndices, cluster) {
|
|||
}
|
||||
|
||||
if (includeNodes) {
|
||||
const masterNode = get(cluster, 'cluster_state.master_node');
|
||||
const masterNode = get(
|
||||
cluster,
|
||||
'elasticsearch.cluster.stats.state.master_node',
|
||||
get(cluster, 'cluster_state.master_node')
|
||||
);
|
||||
nodes = resp.aggregations.nodes.buckets.reduce(normalizeNodeShards(masterNode), {});
|
||||
}
|
||||
|
||||
|
@ -37,21 +54,44 @@ export function handleResponse(resp, includeNodes, includeIndices, cluster) {
|
|||
}
|
||||
|
||||
export function getShardStats(
|
||||
req,
|
||||
esIndexPattern,
|
||||
cluster,
|
||||
req: LegacyRequest,
|
||||
esIndexPattern: string,
|
||||
cluster: ElasticsearchModifiedSource,
|
||||
{ includeNodes = false, includeIndices = false, indexName = null, nodeUuid = null } = {}
|
||||
) {
|
||||
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
|
||||
|
||||
const config = req.server.config();
|
||||
const metric = ElasticsearchMetric.getMetricFields();
|
||||
const filters = [{ term: { state_uuid: get(cluster, 'cluster_state.state_uuid') } }];
|
||||
const filters = [];
|
||||
if (cluster.cluster_state?.state_uuid) {
|
||||
filters.push({ term: { state_uuid: cluster.cluster_state.state_uuid } });
|
||||
} else if (cluster.elasticsearch?.cluster?.stats?.state?.state_uuid) {
|
||||
filters.push({
|
||||
term: {
|
||||
'elasticsearch.cluster.state.id': cluster.elasticsearch.cluster.stats.state.state_uuid,
|
||||
},
|
||||
});
|
||||
}
|
||||
if (indexName) {
|
||||
filters.push({ term: { 'shard.index': indexName } });
|
||||
filters.push({
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'shard.index': indexName } },
|
||||
{ term: { 'elasticsearch.index.name': indexName } },
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
if (nodeUuid) {
|
||||
filters.push({ term: { 'shard.node': nodeUuid } });
|
||||
filters.push({
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'shard.node': nodeUuid } },
|
||||
{ term: { 'elasticsearch.node.id': nodeUuid } },
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
const params = {
|
||||
index: esIndexPattern,
|
||||
|
@ -60,8 +100,8 @@ export function getShardStats(
|
|||
body: {
|
||||
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
|
||||
query: createQuery({
|
||||
type: 'shards',
|
||||
clusterUuid: cluster.cluster_uuid,
|
||||
types: ['shard', 'shards'],
|
||||
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
|
||||
metric,
|
||||
filters,
|
||||
}),
|
|
@ -14,12 +14,15 @@ import { LegacyRequest } from '../../types';
|
|||
import { ElasticsearchResponse } from '../../../common/types/es';
|
||||
|
||||
export function handleResponse(resp: ElasticsearchResponse) {
|
||||
const source = resp.hits?.hits[0]?._source.kibana_stats;
|
||||
const kibana = source?.kibana;
|
||||
const legacySource = resp.hits?.hits[0]?._source.kibana_stats;
|
||||
const mbSource = resp.hits?.hits[0]?._source.kibana?.stats;
|
||||
const kibana = resp.hits?.hits[0]?._source.kibana?.kibana ?? legacySource?.kibana;
|
||||
return merge(kibana, {
|
||||
availability: calculateAvailability(source?.timestamp),
|
||||
os_memory_free: source?.os?.memory?.free_in_bytes,
|
||||
uptime: source?.process?.uptime_in_millis,
|
||||
availability: calculateAvailability(
|
||||
resp.hits?.hits[0]?._source['@timestamp'] ?? legacySource?.timestamp
|
||||
),
|
||||
os_memory_free: mbSource?.os?.memory?.free_in_bytes ?? legacySource?.os?.memory?.free_in_bytes,
|
||||
uptime: mbSource?.process?.uptime?.ms ?? legacySource?.process?.uptime_in_millis,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -36,9 +39,13 @@ export function getKibanaInfo(
|
|||
ignoreUnavailable: true,
|
||||
filterPath: [
|
||||
'hits.hits._source.kibana_stats.kibana',
|
||||
'hits.hits._source.kibana.kibana',
|
||||
'hits.hits._source.kibana_stats.os.memory.free_in_bytes',
|
||||
'hits.hits._source.kibana.stats.os.memory.free_in_bytes',
|
||||
'hits.hits._source.kibana_stats.process.uptime_in_millis',
|
||||
'hits.hits._source.kibana.stats.process.uptime.ms',
|
||||
'hits.hits._source.kibana_stats.timestamp',
|
||||
'hits.hits._source.@timestamp',
|
||||
],
|
||||
body: {
|
||||
query: {
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { checkParam } from '../error_missing_required';
|
||||
import { createQuery } from '../create_query';
|
||||
import { calculateAvailability } from '../calculate_availability';
|
||||
import { KibanaMetric } from '../metrics';
|
||||
|
||||
/*
|
||||
* Get detailed info for Kibanas in the cluster
|
||||
* for Kibana listing page
|
||||
* For each instance:
|
||||
* - name
|
||||
* - status
|
||||
* - memory
|
||||
* - os load average
|
||||
* - requests
|
||||
* - response times
|
||||
*/
|
||||
export function getKibanas(req, kbnIndexPattern, { clusterUuid }) {
|
||||
checkParam(kbnIndexPattern, 'kbnIndexPattern in getKibanas');
|
||||
|
||||
const config = req.server.config();
|
||||
const start = moment.utc(req.payload.timeRange.min).valueOf();
|
||||
const end = moment.utc(req.payload.timeRange.max).valueOf();
|
||||
|
||||
const params = {
|
||||
index: kbnIndexPattern,
|
||||
size: config.get('monitoring.ui.max_bucket_size'),
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
type: 'kibana_stats',
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
metric: KibanaMetric.getMetricFields(),
|
||||
}),
|
||||
collapse: {
|
||||
field: 'kibana_stats.kibana.uuid',
|
||||
},
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: [
|
||||
'timestamp',
|
||||
'kibana_stats.process.memory.resident_set_size_in_bytes',
|
||||
'kibana_stats.os.load.1m',
|
||||
'kibana_stats.response_times.average',
|
||||
'kibana_stats.response_times.max',
|
||||
'kibana_stats.requests.total',
|
||||
'kibana_stats.kibana.transport_address',
|
||||
'kibana_stats.kibana.name',
|
||||
'kibana_stats.kibana.host',
|
||||
'kibana_stats.kibana.uuid',
|
||||
'kibana_stats.kibana.status',
|
||||
'kibana_stats.concurrent_connections',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
return callWithRequest(req, 'search', params).then((resp) => {
|
||||
const instances = get(resp, 'hits.hits', []);
|
||||
|
||||
return instances.map((hit) => {
|
||||
return {
|
||||
...get(hit, '_source.kibana_stats'),
|
||||
availability: calculateAvailability(get(hit, '_source.timestamp')),
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
151
x-pack/plugins/monitoring/server/lib/kibana/get_kibanas.ts
Normal file
151
x-pack/plugins/monitoring/server/lib/kibana/get_kibanas.ts
Normal file
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createQuery } from '../create_query';
|
||||
// @ts-ignore
|
||||
import { calculateAvailability } from '../calculate_availability';
|
||||
// @ts-ignore
|
||||
import { KibanaMetric } from '../metrics';
|
||||
import { LegacyRequest } from '../../types';
|
||||
import { ElasticsearchResponse } from '../../../common/types/es';
|
||||
|
||||
interface Kibana {
|
||||
process?: {
|
||||
memory?: {
|
||||
resident_set_size_in_bytes?: number;
|
||||
};
|
||||
};
|
||||
os?: {
|
||||
load?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
};
|
||||
response_times?: {
|
||||
average?: number;
|
||||
max?: number;
|
||||
};
|
||||
requests?: {
|
||||
total?: number;
|
||||
};
|
||||
concurrent_connections?: number;
|
||||
kibana?: {
|
||||
transport_address?: string;
|
||||
name?: string;
|
||||
host?: string;
|
||||
uuid?: string;
|
||||
status?: string;
|
||||
};
|
||||
availability: boolean;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get detailed info for Kibanas in the cluster
|
||||
* for Kibana listing page
|
||||
* For each instance:
|
||||
* - name
|
||||
* - status
|
||||
* - memory
|
||||
* - os load average
|
||||
* - requests
|
||||
* - response times
|
||||
*/
|
||||
export async function getKibanas(
|
||||
req: LegacyRequest,
|
||||
kbnIndexPattern: string,
|
||||
{ clusterUuid }: { clusterUuid: string }
|
||||
) {
|
||||
checkParam(kbnIndexPattern, 'kbnIndexPattern in getKibanas');
|
||||
|
||||
const config = req.server.config();
|
||||
const start = moment.utc(req.payload.timeRange.min).valueOf();
|
||||
const end = moment.utc(req.payload.timeRange.max).valueOf();
|
||||
|
||||
const params = {
|
||||
index: kbnIndexPattern,
|
||||
size: config.get('monitoring.ui.max_bucket_size'),
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
types: ['kibana_stats', 'stats'],
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
metric: KibanaMetric.getMetricFields(),
|
||||
}),
|
||||
collapse: {
|
||||
field: 'kibana_stats.kibana.uuid',
|
||||
},
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: [
|
||||
'timestamp',
|
||||
'@timestamp',
|
||||
'kibana_stats.process.memory.resident_set_size_in_bytes',
|
||||
'kibana.stats.process.memory.resident_set_size.bytes',
|
||||
'kibana_stats.os.load.1m',
|
||||
'kibana.stats.os.load.1m',
|
||||
'kibana_stats.response_times.average',
|
||||
'kibana.stats.response_time.avg.ms',
|
||||
'kibana_stats.response_times.max',
|
||||
'kibana.stats.response_time.max.ms',
|
||||
'kibana_stats.requests.total',
|
||||
'kibana.stats.request.total',
|
||||
'kibana_stats.kibana.transport_address',
|
||||
'kibana.kibana.transport_address',
|
||||
'kibana_stats.kibana.name',
|
||||
'kibana.kibana.name',
|
||||
'kibana_stats.kibana.host',
|
||||
'kibana.kibana.host',
|
||||
'kibana_stats.kibana.uuid',
|
||||
'kibana.kibana.uuid',
|
||||
'kibana_stats.kibana.status',
|
||||
'kibana.kibana.status',
|
||||
'kibana_stats.concurrent_connections',
|
||||
'kibana.stats.concurrent_connections',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response: ElasticsearchResponse = await callWithRequest(req, 'search', params);
|
||||
const instances = response.hits?.hits ?? [];
|
||||
|
||||
return instances.map((hit) => {
|
||||
const legacyStats = hit._source.kibana_stats;
|
||||
const mbStats = hit._source.kibana?.stats;
|
||||
|
||||
const kibana: Kibana = {
|
||||
kibana: hit._source.kibana?.kibana ?? legacyStats?.kibana,
|
||||
concurrent_connections:
|
||||
mbStats?.concurrent_connections ?? legacyStats?.concurrent_connections,
|
||||
process: {
|
||||
memory: {
|
||||
resident_set_size_in_bytes:
|
||||
mbStats?.process?.memory?.resident_set_size?.bytes ??
|
||||
legacyStats?.process?.memory?.resident_set_size_in_bytes,
|
||||
},
|
||||
},
|
||||
os: {
|
||||
load: {
|
||||
'1m': mbStats?.os?.load?.['1m'] ?? legacyStats?.os?.load?.['1m'],
|
||||
},
|
||||
},
|
||||
response_times: {
|
||||
average: mbStats?.response_time?.avg?.ms ?? legacyStats?.response_times?.average,
|
||||
max: mbStats?.response_time?.max?.ms ?? legacyStats?.response_times?.max,
|
||||
},
|
||||
requests: {
|
||||
total: mbStats?.request?.total ?? legacyStats?.requests?.total,
|
||||
},
|
||||
availability: calculateAvailability(hit._source['@timestamp'] ?? hit._source.timestamp),
|
||||
};
|
||||
return kibana;
|
||||
});
|
||||
}
|
|
@ -32,7 +32,7 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
|
|||
const end = req.payload.timeRange.max;
|
||||
|
||||
return Bluebird.map(clusters, (cluster) => {
|
||||
const clusterUuid = cluster.cluster_uuid;
|
||||
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
|
||||
const metric = KibanaClusterMetric.getMetricFields();
|
||||
const params = {
|
||||
index: kbnIndexPattern,
|
||||
|
@ -40,7 +40,7 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
|
|||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
type: 'kibana_stats',
|
||||
types: ['stats', 'kibana_stats'],
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
|
|
|
@ -5,25 +5,40 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createTimeFilter } from '../create_query';
|
||||
// @ts-ignore
|
||||
import { detectReason } from './detect_reason';
|
||||
// @ts-ignore
|
||||
import { detectReasonFromException } from './detect_reason_from_exception';
|
||||
import { LegacyRequest } from '../../types';
|
||||
import { FilebeatResponse } from '../../../common/types/filebeat';
|
||||
|
||||
async function handleResponse(response, req, filebeatIndexPattern, opts) {
|
||||
const result = {
|
||||
interface LogType {
|
||||
level?: string;
|
||||
count?: number;
|
||||
}
|
||||
|
||||
async function handleResponse(
|
||||
response: FilebeatResponse,
|
||||
req: LegacyRequest,
|
||||
filebeatIndexPattern: string,
|
||||
opts: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
|
||||
) {
|
||||
const result: { enabled: boolean; types: LogType[]; reason?: any } = {
|
||||
enabled: false,
|
||||
types: [],
|
||||
};
|
||||
|
||||
const typeBuckets = get(response, 'aggregations.types.buckets', []);
|
||||
const typeBuckets = response.aggregations?.types?.buckets ?? [];
|
||||
if (typeBuckets.length) {
|
||||
result.enabled = true;
|
||||
result.types = typeBuckets.map((typeBucket) => {
|
||||
result.types = typeBuckets.map((typeBucket: any) => {
|
||||
return {
|
||||
type: typeBucket.key.split('.')[1],
|
||||
levels: typeBucket.levels.buckets.map((levelBucket) => {
|
||||
levels: typeBucket.levels.buckets.map((levelBucket: any) => {
|
||||
return {
|
||||
level: levelBucket.key.toLowerCase(),
|
||||
count: levelBucket.doc_count,
|
||||
|
@ -39,9 +54,15 @@ async function handleResponse(response, req, filebeatIndexPattern, opts) {
|
|||
}
|
||||
|
||||
export async function getLogTypes(
|
||||
req,
|
||||
filebeatIndexPattern,
|
||||
{ clusterUuid, nodeUuid, indexUuid, start, end }
|
||||
req: LegacyRequest,
|
||||
filebeatIndexPattern: string,
|
||||
{
|
||||
clusterUuid,
|
||||
nodeUuid,
|
||||
indexUuid,
|
||||
start,
|
||||
end,
|
||||
}: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
|
||||
) {
|
||||
checkParam(filebeatIndexPattern, 'filebeatIndexPattern in logs/getLogTypes');
|
||||
|
||||
|
@ -90,7 +111,10 @@ export async function getLogTypes(
|
|||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
let result = {};
|
||||
let result: { enabled: boolean; types: LogType[]; reason?: any } = {
|
||||
enabled: false,
|
||||
types: [],
|
||||
};
|
||||
try {
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
result = await handleResponse(response, req, filebeatIndexPattern, {
|
|
@ -6,37 +6,59 @@
|
|||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
import { get } from 'lodash';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createTimeFilter } from '../create_query';
|
||||
// @ts-ignore
|
||||
import { detectReason } from './detect_reason';
|
||||
// @ts-ignore
|
||||
import { formatUTCTimestampForTimezone } from '../format_timezone';
|
||||
// @ts-ignore
|
||||
import { getTimezone } from '../get_timezone';
|
||||
// @ts-ignore
|
||||
import { detectReasonFromException } from './detect_reason_from_exception';
|
||||
import { LegacyRequest } from '../../types';
|
||||
import { FilebeatResponse } from '../../../common/types/filebeat';
|
||||
|
||||
async function handleResponse(response, req, filebeatIndexPattern, opts) {
|
||||
const result = {
|
||||
interface Log {
|
||||
timestamp?: string;
|
||||
component?: string;
|
||||
node?: string;
|
||||
index?: string;
|
||||
level?: string;
|
||||
type?: string;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
async function handleResponse(
|
||||
response: FilebeatResponse,
|
||||
req: LegacyRequest,
|
||||
filebeatIndexPattern: string,
|
||||
opts: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
|
||||
) {
|
||||
const result: { enabled: boolean; logs: Log[]; reason?: any } = {
|
||||
enabled: false,
|
||||
logs: [],
|
||||
};
|
||||
|
||||
const timezone = await getTimezone(req);
|
||||
const hits = get(response, 'hits.hits', []);
|
||||
const hits = response.hits?.hits ?? [];
|
||||
if (hits.length) {
|
||||
result.enabled = true;
|
||||
result.logs = hits.map((hit) => {
|
||||
const source = hit._source;
|
||||
const type = get(source, 'event.dataset').split('.')[1];
|
||||
const utcTimestamp = moment(get(source, '@timestamp')).valueOf();
|
||||
const type = (source.event?.dataset ?? '').split('.')[1];
|
||||
const utcTimestamp = moment(source['@timestamp']).valueOf();
|
||||
|
||||
return {
|
||||
timestamp: formatUTCTimestampForTimezone(utcTimestamp, timezone),
|
||||
component: get(source, 'elasticsearch.component'),
|
||||
node: get(source, 'elasticsearch.node.name'),
|
||||
index: get(source, 'elasticsearch.index.name'),
|
||||
level: get(source, 'log.level'),
|
||||
component: source.elasticsearch?.component,
|
||||
node: source.elasticsearch?.node?.name,
|
||||
index: source.elasticsearch?.index?.name,
|
||||
level: source.log?.level,
|
||||
type,
|
||||
message: get(source, 'message'),
|
||||
message: source.message,
|
||||
};
|
||||
});
|
||||
} else {
|
||||
|
@ -47,10 +69,16 @@ async function handleResponse(response, req, filebeatIndexPattern, opts) {
|
|||
}
|
||||
|
||||
export async function getLogs(
|
||||
config,
|
||||
req,
|
||||
filebeatIndexPattern,
|
||||
{ clusterUuid, nodeUuid, indexUuid, start, end }
|
||||
config: { get: (key: string) => any },
|
||||
req: LegacyRequest,
|
||||
filebeatIndexPattern: string,
|
||||
{
|
||||
clusterUuid,
|
||||
nodeUuid,
|
||||
indexUuid,
|
||||
start,
|
||||
end,
|
||||
}: { clusterUuid: string; nodeUuid: string; indexUuid: string; start: number; end: number }
|
||||
) {
|
||||
checkParam(filebeatIndexPattern, 'filebeatIndexPattern in logs/getLogs');
|
||||
|
||||
|
@ -94,9 +122,12 @@ export async function getLogs(
|
|||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
|
||||
let result = {};
|
||||
let result: { enabled: boolean; logs: Log[]; reason?: any } = {
|
||||
enabled: false,
|
||||
logs: [],
|
||||
};
|
||||
try {
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
const response: FilebeatResponse = await callWithRequest(req, 'search', params);
|
||||
result = await handleResponse(response, req, filebeatIndexPattern, {
|
||||
clusterUuid,
|
||||
nodeUuid,
|
|
@ -13,7 +13,7 @@ import { InfraPluginSetup } from '../../../../infra/server';
|
|||
|
||||
export const initInfraSource = (config: MonitoringConfig, infraPlugin: InfraPluginSetup) => {
|
||||
if (infraPlugin) {
|
||||
const filebeatIndexPattern = prefixIndexPattern(config, config.ui.logs.index, '*');
|
||||
const filebeatIndexPattern = prefixIndexPattern(config, config.ui.logs.index, '*', true);
|
||||
infraPlugin.defineInternalSourceConfiguration(INFRA_SOURCE_ID, {
|
||||
name: 'Elastic Stack Logs',
|
||||
logIndices: {
|
||||
|
|
|
@ -44,14 +44,14 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
|
|||
const config = req.server.config();
|
||||
|
||||
return Bluebird.map(clusters, (cluster) => {
|
||||
const clusterUuid = cluster.cluster_uuid;
|
||||
const clusterUuid = get(cluster, 'elasticsearch.cluster.id', cluster.cluster_uuid);
|
||||
const params = {
|
||||
index: lsIndexPattern,
|
||||
size: 0,
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
type: 'logstash_stats',
|
||||
types: ['stats', 'logstash_stats'],
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
|
@ -148,6 +148,31 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
|
|||
},
|
||||
},
|
||||
},
|
||||
pipelines_nested_mb: {
|
||||
nested: {
|
||||
path: 'logstash.node.stats.pipelines',
|
||||
},
|
||||
aggs: {
|
||||
pipelines: {
|
||||
sum_bucket: {
|
||||
buckets_path: 'queue_types>num_pipelines',
|
||||
},
|
||||
},
|
||||
queue_types: {
|
||||
terms: {
|
||||
field: 'logstash.node.stats.pipelines.queue.type',
|
||||
size: config.get('monitoring.ui.max_bucket_size'),
|
||||
},
|
||||
aggs: {
|
||||
num_pipelines: {
|
||||
cardinality: {
|
||||
field: 'logstash.node.stats.pipelines.id',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
events_in_total: {
|
||||
sum_bucket: {
|
||||
buckets_path: 'logstash_uuids>events_in_total_per_node',
|
||||
|
@ -199,6 +224,11 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
|
|||
maxUptime = get(aggregations, 'max_uptime.value');
|
||||
}
|
||||
|
||||
let types = get(aggregations, 'pipelines_nested_mb.queue_types.buckets', []);
|
||||
if (!types || types.length === 0) {
|
||||
types = get(aggregations, 'pipelines_nested.queue_types.buckets', []);
|
||||
}
|
||||
|
||||
return {
|
||||
clusterUuid,
|
||||
stats: {
|
||||
|
@ -208,8 +238,10 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
|
|||
avg_memory: memory,
|
||||
avg_memory_used: memoryUsed,
|
||||
max_uptime: maxUptime,
|
||||
pipeline_count: get(aggregations, 'pipelines_nested.pipelines.value', 0),
|
||||
queue_types: getQueueTypes(get(aggregations, 'pipelines_nested.queue_types.buckets', [])),
|
||||
pipeline_count:
|
||||
get(aggregations, 'pipelines_nested_mb.pipelines.value') ||
|
||||
get(aggregations, 'pipelines_nested.pipelines.value', 0),
|
||||
queue_types: getQueueTypes(types),
|
||||
versions: logstashVersions.map((versionBucket) => versionBucket.key),
|
||||
},
|
||||
};
|
||||
|
|
|
@ -17,14 +17,15 @@ import { STANDALONE_CLUSTER_CLUSTER_UUID } from '../../../common/constants';
|
|||
import { standaloneClusterFilter } from '../standalone_clusters/standalone_cluster_query_filter';
|
||||
|
||||
export function handleResponse(resp: ElasticsearchResponse) {
|
||||
const source = resp.hits?.hits[0]?._source?.logstash_stats;
|
||||
const logstash = source?.logstash;
|
||||
const legacyStats = resp.hits?.hits[0]?._source?.logstash_stats;
|
||||
const mbStats = resp.hits?.hits[0]?._source?.logstash?.node?.stats;
|
||||
const logstash = mbStats?.logstash ?? legacyStats?.logstash;
|
||||
const info = merge(logstash, {
|
||||
availability: calculateAvailability(source?.timestamp),
|
||||
events: source?.events,
|
||||
reloads: source?.reloads,
|
||||
queue_type: source?.queue?.type,
|
||||
uptime: source?.jvm?.uptime_in_millis,
|
||||
availability: calculateAvailability(mbStats?.timestamp ?? legacyStats?.timestamp),
|
||||
events: mbStats?.events ?? legacyStats?.events,
|
||||
reloads: mbStats?.reloads ?? legacyStats?.reloads,
|
||||
queue_type: mbStats?.queue?.type ?? legacyStats?.queue?.type,
|
||||
uptime: mbStats?.jvm?.uptime_in_millis ?? legacyStats?.jvm?.uptime_in_millis,
|
||||
});
|
||||
return info;
|
||||
}
|
||||
|
@ -47,11 +48,17 @@ export function getNodeInfo(
|
|||
ignoreUnavailable: true,
|
||||
filterPath: [
|
||||
'hits.hits._source.logstash_stats.events',
|
||||
'hits.hits._source.logstash.node.stats.events',
|
||||
'hits.hits._source.logstash_stats.jvm.uptime_in_millis',
|
||||
'hits.hits._source.logstash.node.stats.jvm.uptime_in_millis',
|
||||
'hits.hits._source.logstash_stats.logstash',
|
||||
'hits.hits._source.logstash.node.stats.logstash',
|
||||
'hits.hits._source.logstash_stats.queue.type',
|
||||
'hits.hits._source.logstash.node.stats.queue.type',
|
||||
'hits.hits._source.logstash_stats.reloads',
|
||||
'hits.hits._source.logstash.node.stats.reloads',
|
||||
'hits.hits._source.logstash_stats.timestamp',
|
||||
'hits.hits._source.logstash.node.stats.timestamp',
|
||||
],
|
||||
body: {
|
||||
query: {
|
||||
|
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import moment from 'moment';
|
||||
import { checkParam } from '../error_missing_required';
|
||||
import { createQuery } from '../create_query';
|
||||
import { calculateAvailability } from '../calculate_availability';
|
||||
import { LogstashMetric } from '../metrics';
|
||||
|
||||
/*
|
||||
* Get detailed info for Logstash's in the cluster
|
||||
* for Logstash nodes listing page
|
||||
* For each instance:
|
||||
* - name
|
||||
* - status
|
||||
* - JVM memory
|
||||
* - os load average
|
||||
* - events
|
||||
* - config reloads
|
||||
*/
|
||||
export function getNodes(req, lsIndexPattern, { clusterUuid }) {
|
||||
checkParam(lsIndexPattern, 'lsIndexPattern in getNodes');
|
||||
|
||||
const config = req.server.config();
|
||||
const start = moment.utc(req.payload.timeRange.min).valueOf();
|
||||
const end = moment.utc(req.payload.timeRange.max).valueOf();
|
||||
|
||||
const params = {
|
||||
index: lsIndexPattern,
|
||||
size: config.get('monitoring.ui.max_bucket_size'), // FIXME
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
metric: LogstashMetric.getMetricFields(),
|
||||
type: 'logstash_stats',
|
||||
}),
|
||||
collapse: {
|
||||
field: 'logstash_stats.logstash.uuid',
|
||||
},
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: [
|
||||
'timestamp',
|
||||
'logstash_stats.process.cpu.percent',
|
||||
'logstash_stats.jvm.mem.heap_used_percent',
|
||||
'logstash_stats.os.cpu.load_average.1m',
|
||||
'logstash_stats.events.out',
|
||||
'logstash_stats.logstash.http_address',
|
||||
'logstash_stats.logstash.name',
|
||||
'logstash_stats.logstash.host',
|
||||
'logstash_stats.logstash.uuid',
|
||||
'logstash_stats.logstash.status',
|
||||
'logstash_stats.logstash.pipeline',
|
||||
'logstash_stats.reloads',
|
||||
'logstash_stats.logstash.version',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
return callWithRequest(req, 'search', params).then((resp) => {
|
||||
const instances = get(resp, 'hits.hits', []);
|
||||
|
||||
return instances.map((hit) => {
|
||||
return {
|
||||
...get(hit, '_source.logstash_stats'),
|
||||
availability: calculateAvailability(get(hit, '_source.timestamp')),
|
||||
};
|
||||
});
|
||||
});
|
||||
}
|
168
x-pack/plugins/monitoring/server/lib/logstash/get_nodes.ts
Normal file
168
x-pack/plugins/monitoring/server/lib/logstash/get_nodes.ts
Normal file
|
@ -0,0 +1,168 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
// @ts-ignore
|
||||
import { checkParam } from '../error_missing_required';
|
||||
// @ts-ignore
|
||||
import { createQuery } from '../create_query';
|
||||
// @ts-ignore
|
||||
import { calculateAvailability } from '../calculate_availability';
|
||||
// @ts-ignore
|
||||
import { LogstashMetric } from '../metrics';
|
||||
import { LegacyRequest } from '../../types';
|
||||
import { ElasticsearchResponse } from '../../../common/types/es';
|
||||
|
||||
interface Logstash {
|
||||
jvm?: {
|
||||
mem?: {
|
||||
heap_used_percent?: number;
|
||||
};
|
||||
};
|
||||
logstash?: {
|
||||
pipeline?: {
|
||||
batch_size?: number;
|
||||
workers?: number;
|
||||
};
|
||||
http_address?: string;
|
||||
name?: string;
|
||||
host?: string;
|
||||
uuid?: string;
|
||||
version?: string;
|
||||
status?: string;
|
||||
};
|
||||
process?: {
|
||||
cpu?: {
|
||||
percent?: number;
|
||||
};
|
||||
};
|
||||
os?: {
|
||||
cpu?: {
|
||||
load_average?: {
|
||||
'1m'?: number;
|
||||
};
|
||||
};
|
||||
};
|
||||
events?: {
|
||||
out?: number;
|
||||
};
|
||||
reloads?: {
|
||||
failures?: number;
|
||||
successes?: number;
|
||||
};
|
||||
availability?: boolean;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get detailed info for Logstash's in the cluster
|
||||
* for Logstash nodes listing page
|
||||
* For each instance:
|
||||
* - name
|
||||
* - status
|
||||
* - JVM memory
|
||||
* - os load average
|
||||
* - events
|
||||
* - config reloads
|
||||
*/
|
||||
export async function getNodes(
|
||||
req: LegacyRequest,
|
||||
lsIndexPattern: string,
|
||||
{ clusterUuid }: { clusterUuid: string }
|
||||
) {
|
||||
checkParam(lsIndexPattern, 'lsIndexPattern in getNodes');
|
||||
|
||||
const config = req.server.config();
|
||||
const start = moment.utc(req.payload.timeRange.min).valueOf();
|
||||
const end = moment.utc(req.payload.timeRange.max).valueOf();
|
||||
|
||||
const params = {
|
||||
index: lsIndexPattern,
|
||||
size: config.get('monitoring.ui.max_bucket_size'), // FIXME
|
||||
ignoreUnavailable: true,
|
||||
body: {
|
||||
query: createQuery({
|
||||
start,
|
||||
end,
|
||||
clusterUuid,
|
||||
metric: LogstashMetric.getMetricFields(),
|
||||
types: ['stats', 'logstash_stats'],
|
||||
}),
|
||||
collapse: {
|
||||
field: 'logstash_stats.logstash.uuid',
|
||||
},
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: [
|
||||
'timestamp',
|
||||
'@timestamp',
|
||||
'logstash_stats.process.cpu.percent',
|
||||
'logstash.node.stats.process.cpu.percent',
|
||||
'logstash_stats.jvm.mem.heap_used_percent',
|
||||
'logstash.node.stats.jvm.mem.heap_used_percent',
|
||||
'logstash_stats.os.cpu.load_average.1m',
|
||||
'logstash.node.stats.os.cpu.load_average.1m',
|
||||
'logstash_stats.events.out',
|
||||
'logstash.node.stats.events.out',
|
||||
'logstash_stats.logstash.http_address',
|
||||
'logstash.node.stats.logstash.http_address',
|
||||
'logstash_stats.logstash.name',
|
||||
'logstash.node.stats.logstash.name',
|
||||
'logstash_stats.logstash.host',
|
||||
'logstash.node.stats.logstash.host',
|
||||
'logstash_stats.logstash.uuid',
|
||||
'logstash.node.stats.logstash.uuid',
|
||||
'logstash_stats.logstash.status',
|
||||
'logstash.node.stats.logstash.status',
|
||||
'logstash_stats.logstash.pipeline',
|
||||
'logstash.node.stats.logstash.pipeline',
|
||||
'logstash_stats.reloads',
|
||||
'logstash.node.stats.reloads',
|
||||
'logstash_stats.logstash.version',
|
||||
'logstash.node.stats.logstash.version',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response: ElasticsearchResponse = await callWithRequest(req, 'search', params);
|
||||
return response.hits?.hits.map((hit) => {
|
||||
const legacyStats = hit._source.logstash_stats;
|
||||
const mbStats = hit._source.logstash?.node?.stats;
|
||||
|
||||
const logstash: Logstash = {
|
||||
logstash: mbStats?.logstash ?? legacyStats?.logstash,
|
||||
jvm: {
|
||||
mem: {
|
||||
heap_used_percent:
|
||||
mbStats?.jvm?.mem?.heap_used_percent ?? legacyStats?.jvm?.mem?.heap_used_percent,
|
||||
},
|
||||
},
|
||||
process: {
|
||||
cpu: {
|
||||
percent: mbStats?.process?.cpu?.percent ?? legacyStats?.process?.cpu?.percent,
|
||||
},
|
||||
},
|
||||
os: {
|
||||
cpu: {
|
||||
load_average: {
|
||||
'1m':
|
||||
mbStats?.os?.cpu?.load_average?.['1m'] ?? legacyStats?.os?.cpu?.load_average?.['1m'],
|
||||
},
|
||||
},
|
||||
},
|
||||
events: {
|
||||
out: mbStats?.events?.out ?? legacyStats?.events?.out,
|
||||
},
|
||||
reloads: {
|
||||
failures: mbStats?.reloads?.failures ?? legacyStats?.reloads?.failures,
|
||||
successes: mbStats?.reloads?.successes ?? legacyStats?.reloads?.successes,
|
||||
},
|
||||
availability: calculateAvailability(hit._source['@timestamp'] ?? hit._source.timestamp),
|
||||
};
|
||||
|
||||
return logstash;
|
||||
});
|
||||
}
|
|
@ -103,7 +103,16 @@ async function getPaginatedThroughputData(pipelines, req, lsIndexPattern, throug
|
|||
req,
|
||||
lsIndexPattern,
|
||||
[throughputMetric],
|
||||
[],
|
||||
[
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
{
|
||||
pipeline,
|
||||
},
|
||||
|
@ -133,7 +142,13 @@ async function getPaginatedNodesData(pipelines, req, lsIndexPattern, nodesCountM
|
|||
req,
|
||||
lsIndexPattern,
|
||||
[nodesCountMetric],
|
||||
[],
|
||||
[
|
||||
{
|
||||
bool: {
|
||||
should: [{ term: { type: 'logstash_stats' } }, { term: { 'metricset.name': 'stats' } }],
|
||||
},
|
||||
},
|
||||
],
|
||||
{ pageOfPipelines: pipelines },
|
||||
2
|
||||
);
|
||||
|
@ -170,9 +185,24 @@ async function getThroughputPipelines(req, lsIndexPattern, pipelines, throughput
|
|||
const metricsResponse = await Promise.all(
|
||||
pipelines.map((pipeline) => {
|
||||
return new Promise(async (resolve) => {
|
||||
const data = await getMetrics(req, lsIndexPattern, [throughputMetric], [], {
|
||||
pipeline,
|
||||
});
|
||||
const data = await getMetrics(
|
||||
req,
|
||||
lsIndexPattern,
|
||||
[throughputMetric],
|
||||
[
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
{
|
||||
pipeline,
|
||||
}
|
||||
);
|
||||
|
||||
resolve(reduceData(pipeline, data));
|
||||
});
|
||||
|
@ -183,9 +213,21 @@ async function getThroughputPipelines(req, lsIndexPattern, pipelines, throughput
|
|||
}
|
||||
|
||||
async function getNodePipelines(req, lsIndexPattern, pipelines, nodesCountMetric) {
|
||||
const metricData = await getMetrics(req, lsIndexPattern, [nodesCountMetric], [], {
|
||||
pageOfPipelines: pipelines,
|
||||
});
|
||||
const metricData = await getMetrics(
|
||||
req,
|
||||
lsIndexPattern,
|
||||
[nodesCountMetric],
|
||||
[
|
||||
{
|
||||
bool: {
|
||||
should: [{ term: { type: 'logstash_stats' } }, { term: { 'metricset.name': 'stats' } }],
|
||||
},
|
||||
},
|
||||
],
|
||||
{
|
||||
pageOfPipelines: pipelines,
|
||||
}
|
||||
);
|
||||
|
||||
const metricObject = metricData[nodesCountMetric][0];
|
||||
const pipelinesData = pipelines.map(({ id }) => {
|
||||
|
|
|
@ -28,7 +28,7 @@ export async function getLogstashPipelineIds(
|
|||
index: logstashIndexPattern,
|
||||
size: 0,
|
||||
ignoreUnavailable: true,
|
||||
filterPath: ['aggregations.nest.id.buckets'],
|
||||
filterPath: ['aggregations.nest.id.buckets', 'aggregations.nest_mb.id.buckets'],
|
||||
body: {
|
||||
query: createQuery({
|
||||
start,
|
||||
|
@ -64,14 +64,50 @@ export async function getLogstashPipelineIds(
|
|||
},
|
||||
},
|
||||
},
|
||||
nest_mb: {
|
||||
nested: {
|
||||
path: 'logstash.node.stats.pipelines',
|
||||
},
|
||||
aggs: {
|
||||
id: {
|
||||
terms: {
|
||||
field: 'logstash.node.stats.pipelines.id',
|
||||
size,
|
||||
},
|
||||
aggs: {
|
||||
unnest_mb: {
|
||||
reverse_nested: {},
|
||||
aggs: {
|
||||
nodes: {
|
||||
terms: {
|
||||
field: 'logstash.node.stats.logstash.uuid',
|
||||
size,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response = await callWithRequest(req, 'search', params);
|
||||
return get(response, 'aggregations.nest.id.buckets', []).map((bucket) => ({
|
||||
id: bucket.key,
|
||||
nodeIds: get(bucket, 'unnest.nodes.buckets', []).map((item) => item.key),
|
||||
}));
|
||||
let buckets = get(response, 'aggregations.nest_mb.id.buckets', []);
|
||||
if (!buckets || buckets.length === 0) {
|
||||
buckets = get(response, 'aggregations.nest.id.buckets', []);
|
||||
}
|
||||
return buckets.map((bucket) => {
|
||||
let nodeBuckets = get(bucket, 'unnest_mb.nodes.buckets', []);
|
||||
if (!nodeBuckets || nodeBuckets.length === 0) {
|
||||
nodeBuckets = get(bucket, 'unnest.nodes.buckets', []);
|
||||
}
|
||||
return {
|
||||
id: bucket.key,
|
||||
nodeIds: nodeBuckets.map((item) => item.key),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ export function getPipelineStatsAggregation(
|
|||
end = version.lastSeen;
|
||||
|
||||
const query = createQuery({
|
||||
type: 'logstash_stats',
|
||||
types: ['stats', 'logstash_stats'],
|
||||
start,
|
||||
end,
|
||||
metric: LogstashMetric.getMetricFields(),
|
||||
|
|
|
@ -28,7 +28,7 @@ function fetchPipelineVersions(...args) {
|
|||
},
|
||||
];
|
||||
const query = createQuery({
|
||||
type: 'logstash_stats',
|
||||
types: ['stats', 'logstash_stats'],
|
||||
metric: LogstashMetric.getMetricFields(),
|
||||
clusterUuid,
|
||||
filters,
|
||||
|
|
|
@ -200,7 +200,7 @@ export function getPipelineVertexStatsAggregation(
|
|||
end = version.lastSeen;
|
||||
|
||||
const query = createQuery({
|
||||
type: 'logstash_stats',
|
||||
types: ['stats', 'logstash_stats'],
|
||||
start,
|
||||
end,
|
||||
metric: LogstashMetric.getMetricFields(),
|
||||
|
|
|
@ -3511,6 +3511,7 @@ Object {
|
|||
"format": "0,0.[00]",
|
||||
"getDateHistogramSubAggs": [Function],
|
||||
"label": "Pipeline Throughput",
|
||||
"mbField": "logstash.node.stats.pipelines.events.out",
|
||||
"timestampField": "logstash_stats.timestamp",
|
||||
"units": "e/s",
|
||||
"uuidField": "logstash_stats.logstash.uuid",
|
||||
|
|
|
@ -263,11 +263,23 @@ export class LogstashPipelineThroughputMetric extends LogstashMetric {
|
|||
unit: NORMALIZED_DERIVATIVE_UNIT,
|
||||
},
|
||||
},
|
||||
metric_mb_deriv: {
|
||||
derivative: {
|
||||
buckets_path: 'sum_mb',
|
||||
gap_policy: 'skip',
|
||||
unit: NORMALIZED_DERIVATIVE_UNIT,
|
||||
},
|
||||
},
|
||||
sum: {
|
||||
sum_bucket: {
|
||||
buckets_path: 'by_node_id>nest>pipeline>events_stats',
|
||||
},
|
||||
},
|
||||
sum_mb: {
|
||||
sum_bucket: {
|
||||
buckets_path: 'by_node_id>nest_mb>pipeline>events_stats',
|
||||
},
|
||||
},
|
||||
by_node_id: {
|
||||
terms: {
|
||||
field: 'logstash_stats.logstash.uuid',
|
||||
|
@ -296,6 +308,27 @@ export class LogstashPipelineThroughputMetric extends LogstashMetric {
|
|||
},
|
||||
},
|
||||
},
|
||||
nest_mb: {
|
||||
nested: {
|
||||
path: 'logstash.node.stats.pipelines',
|
||||
},
|
||||
aggs: {
|
||||
pipeline: {
|
||||
filter: {
|
||||
term: {
|
||||
'logstash.node.stats.pipelines.id': pipeline.id,
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
events_stats: {
|
||||
max: {
|
||||
field: this.mbField,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -342,12 +375,42 @@ export class LogstashPipelineNodeCountMetric extends LogstashMetric {
|
|||
},
|
||||
},
|
||||
},
|
||||
pipelines_mb_nested: {
|
||||
nested: {
|
||||
path: 'logstash.node.stats.pipelines',
|
||||
},
|
||||
aggs: {
|
||||
by_pipeline_id: {
|
||||
terms: {
|
||||
field: 'logstash.node.stats.pipelines.id',
|
||||
size: 1000,
|
||||
...termAggExtras,
|
||||
},
|
||||
aggs: {
|
||||
to_root: {
|
||||
reverse_nested: {},
|
||||
aggs: {
|
||||
node_count: {
|
||||
cardinality: {
|
||||
field: this.field,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
this.calculation = (bucket) => {
|
||||
const pipelineNodesCounts = {};
|
||||
const pipelineBuckets = _.get(bucket, 'pipelines_nested.by_pipeline_id.buckets', []);
|
||||
const legacyPipelineBuckets = _.get(bucket, 'pipelines_nested.by_pipeline_id.buckets', []);
|
||||
const mbPiplineBuckets = _.get(bucket, 'pipelines_mb_nested.by_pipeline_id.buckets', []);
|
||||
const pipelineBuckets = legacyPipelineBuckets.length
|
||||
? legacyPipelineBuckets
|
||||
: mbPiplineBuckets;
|
||||
pipelineBuckets.forEach((pipelineBucket) => {
|
||||
pipelineNodesCounts[pipelineBucket.key] = _.get(pipelineBucket, 'to_root.node_count.value');
|
||||
});
|
||||
|
|
|
@ -430,6 +430,7 @@ export const metrics = {
|
|||
units: 'B',
|
||||
}),
|
||||
logstash_cluster_pipeline_throughput: new LogstashPipelineThroughputMetric({
|
||||
mbField: 'logstash.node.stats.pipelines.events.out',
|
||||
field: 'logstash_stats.pipelines.events.out',
|
||||
label: pipelineThroughputLabel,
|
||||
description: pipelineThroughputDescription,
|
||||
|
|
|
@ -38,18 +38,20 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
filters.push({ term: { cluster_uuid: clusterUuid } });
|
||||
}
|
||||
|
||||
const nodesClause = [];
|
||||
const nodesClause = {};
|
||||
if (nodeUuid) {
|
||||
nodesClause.push({
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'node_stats.node_id': nodeUuid } },
|
||||
{ term: { 'kibana_stats.kibana.uuid': nodeUuid } },
|
||||
{ term: { 'beats_stats.beat.uuid': nodeUuid } },
|
||||
{ term: { 'logstash_stats.logstash.uuid': nodeUuid } },
|
||||
],
|
||||
nodesClause.must = [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'node_stats.node_id': nodeUuid } },
|
||||
{ term: { 'kibana_stats.kibana.uuid': nodeUuid } },
|
||||
{ term: { 'beats_stats.beat.uuid': nodeUuid } },
|
||||
{ term: { 'logstash_stats.logstash.uuid': nodeUuid } },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
];
|
||||
}
|
||||
|
||||
const params = {
|
||||
|
@ -61,7 +63,7 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
query: {
|
||||
bool: {
|
||||
filter: filters,
|
||||
must: nodesClause,
|
||||
...nodesClause,
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
|
@ -77,9 +79,21 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
size,
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
single_type: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -90,9 +104,21 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
size,
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
single_type: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'kibana_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -103,21 +129,33 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
size,
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
single_type: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'beats_stats' } },
|
||||
{ term: { 'metricset.name': 'beats_stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
beat_type: {
|
||||
terms: {
|
||||
field: 'beats_stats.beat.type',
|
||||
size,
|
||||
},
|
||||
},
|
||||
cluster_uuid: {
|
||||
terms: {
|
||||
field: 'cluster_uuid',
|
||||
size,
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
},
|
||||
},
|
||||
beat_type: {
|
||||
terms: {
|
||||
field: 'beats_stats.beat.type',
|
||||
size,
|
||||
},
|
||||
},
|
||||
cluster_uuid: {
|
||||
terms: {
|
||||
field: 'cluster_uuid',
|
||||
size,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -128,15 +166,27 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
|
|||
size,
|
||||
},
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
single_type: {
|
||||
filter: {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
cluster_uuid: {
|
||||
terms: {
|
||||
field: 'cluster_uuid',
|
||||
size,
|
||||
aggs: {
|
||||
by_timestamp: {
|
||||
max: {
|
||||
field: 'timestamp',
|
||||
},
|
||||
},
|
||||
cluster_uuid: {
|
||||
terms: {
|
||||
field: 'cluster_uuid',
|
||||
size,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -224,8 +274,18 @@ function getUuidBucketName(productName) {
|
|||
}
|
||||
}
|
||||
|
||||
function matchesMetricbeatIndex(metricbeatIndex, index) {
|
||||
if (index.includes(metricbeatIndex)) {
|
||||
return true;
|
||||
}
|
||||
if (metricbeatIndex.includes('*')) {
|
||||
return new RegExp(metricbeatIndex).test(index);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function isBeatFromAPM(bucket) {
|
||||
const beatType = get(bucket, 'beat_type');
|
||||
const beatType = get(bucket, 'single_type.beat_type');
|
||||
if (!beatType) {
|
||||
return false;
|
||||
}
|
||||
|
@ -364,6 +424,7 @@ export const getCollectionStatus = async (
|
|||
) => {
|
||||
const config = req.server.config();
|
||||
const kibanaUuid = config.get('server.uuid');
|
||||
const metricbeatIndex = config.get('monitoring.ui.metricbeat.index');
|
||||
const size = config.get('monitoring.ui.max_bucket_size');
|
||||
const hasPermissions = await hasNecessaryPermissions(req);
|
||||
|
||||
|
@ -399,8 +460,18 @@ export const getCollectionStatus = async (
|
|||
|
||||
const status = PRODUCTS.reduce((products, product) => {
|
||||
const token = product.token || product.name;
|
||||
const indexBuckets = indicesBuckets.filter((bucket) => bucket.key.includes(token));
|
||||
const uuidBucketName = getUuidBucketName(product.name);
|
||||
const indexBuckets = indicesBuckets.filter((bucket) => {
|
||||
if (bucket.key.includes(token)) {
|
||||
return true;
|
||||
}
|
||||
if (matchesMetricbeatIndex(metricbeatIndex, bucket.key)) {
|
||||
if (get(bucket, `${uuidBucketName}.buckets`, []).length) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
const productStatus = {
|
||||
totalUniqueInstanceCount: 0,
|
||||
|
@ -422,7 +493,9 @@ export const getCollectionStatus = async (
|
|||
// If there is a single bucket, then they are fully migrated or fully on the internal collector
|
||||
else if (indexBuckets.length === 1) {
|
||||
const singleIndexBucket = indexBuckets[0];
|
||||
const isFullyMigrated = singleIndexBucket.key.includes(METRICBEAT_INDEX_NAME_UNIQUE_TOKEN);
|
||||
const isFullyMigrated =
|
||||
singleIndexBucket.key.includes(METRICBEAT_INDEX_NAME_UNIQUE_TOKEN) ||
|
||||
matchesMetricbeatIndex(metricbeatIndex, singleIndexBucket.key);
|
||||
|
||||
const map = isFullyMigrated ? fullyMigratedUuidsMap : internalCollectorsUuidsMap;
|
||||
const uuidBuckets = get(singleIndexBucket, `${uuidBucketName}.buckets`, []);
|
||||
|
@ -430,17 +503,18 @@ export const getCollectionStatus = async (
|
|||
if (shouldSkipBucket(product, bucket)) {
|
||||
continue;
|
||||
}
|
||||
const { key, by_timestamp: byTimestamp } = bucket;
|
||||
const { key, single_type: singleType } = bucket;
|
||||
if (!map[key]) {
|
||||
const { by_timestamp: byTimestamp } = singleType;
|
||||
map[key] = { lastTimestamp: get(byTimestamp, 'value') };
|
||||
if (product.name === KIBANA_SYSTEM_ID && key === kibanaUuid) {
|
||||
map[key].isPrimary = true;
|
||||
}
|
||||
if (product.name === BEATS_SYSTEM_ID) {
|
||||
map[key].beatType = get(bucket.beat_type, 'buckets[0].key');
|
||||
map[key].beatType = get(bucket.single_type, 'beat_type.buckets[0].key');
|
||||
}
|
||||
if (bucket.cluster_uuid) {
|
||||
map[key].clusterUuid = get(bucket.cluster_uuid, 'buckets[0].key', '') || null;
|
||||
if (singleType.cluster_uuid) {
|
||||
map[key].clusterUuid = get(singleType.cluster_uuid, 'buckets[0].key', '') || null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -502,7 +576,8 @@ export const getCollectionStatus = async (
|
|||
for (const indexBucket of indexBuckets) {
|
||||
const isFullyMigrated =
|
||||
considerAllInstancesMigrated ||
|
||||
indexBucket.key.includes(METRICBEAT_INDEX_NAME_UNIQUE_TOKEN);
|
||||
indexBucket.key.includes(METRICBEAT_INDEX_NAME_UNIQUE_TOKEN) ||
|
||||
matchesMetricbeatIndex(metricbeatIndex, indexBucket.key);
|
||||
const map = isFullyMigrated ? fullyMigratedUuidsMap : internalCollectorsUuidsMap;
|
||||
const otherMap = !isFullyMigrated ? fullyMigratedUuidsMap : internalCollectorsUuidsMap;
|
||||
|
||||
|
@ -512,7 +587,8 @@ export const getCollectionStatus = async (
|
|||
continue;
|
||||
}
|
||||
|
||||
const { key, by_timestamp: byTimestamp } = bucket;
|
||||
const { key, single_type: singleType } = bucket;
|
||||
const { by_timestamp: byTimestamp } = singleType;
|
||||
if (!map[key]) {
|
||||
if (otherMap[key]) {
|
||||
partiallyMigratedUuidsMap[key] = otherMap[key] || {};
|
||||
|
@ -523,10 +599,10 @@ export const getCollectionStatus = async (
|
|||
map[key].isPrimary = true;
|
||||
}
|
||||
if (product.name === BEATS_SYSTEM_ID) {
|
||||
map[key].beatType = get(bucket.beat_type, 'buckets[0].key');
|
||||
map[key].beatType = get(singleType.beat_type, 'buckets[0].key');
|
||||
}
|
||||
if (bucket.cluster_uuid) {
|
||||
map[key].clusterUuid = get(bucket.cluster_uuid, 'buckets[0].key', '') || null;
|
||||
if (singleType.cluster_uuid) {
|
||||
map[key].clusterUuid = get(singleType.cluster_uuid, 'buckets[0].key', '') || null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,6 +34,9 @@ const mockReq = (
|
|||
if (prop === 'server.uuid') {
|
||||
return 'kibana-1234';
|
||||
}
|
||||
if (prop === 'monitoring.ui.metricbeat.index') {
|
||||
return 'metricbeat-*';
|
||||
}
|
||||
}),
|
||||
};
|
||||
},
|
||||
|
@ -104,24 +107,27 @@ describe('getCollectionStatus', () => {
|
|||
buckets: [
|
||||
{
|
||||
key: '.monitoring-es-7-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
es_uuids: { buckets: [{ key: 'es_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-kibana-7-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }] },
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-beats-7-2019',
|
||||
beats_uuids: {
|
||||
buckets: [
|
||||
{ key: 'apm_1', beat_type: { buckets: [{ key: 'apm-server' }] } },
|
||||
{ key: 'beats_1' },
|
||||
{
|
||||
key: 'apm_1',
|
||||
single_type: { beat_type: { buckets: [{ key: 'apm-server' }] } },
|
||||
},
|
||||
{ key: 'beats_1', single_type: {} },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: '.monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1', single_type: {} }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -158,19 +164,19 @@ describe('getCollectionStatus', () => {
|
|||
buckets: [
|
||||
{
|
||||
key: '.monitoring-es-7-mb-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
es_uuids: { buckets: [{ key: 'es_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-kibana-7-mb-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }] },
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-beats-7-2019',
|
||||
beats_uuids: { buckets: [{ key: 'beats_1' }] },
|
||||
beats_uuids: { buckets: [{ key: 'beats_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1', single_type: {} }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -203,23 +209,30 @@ describe('getCollectionStatus', () => {
|
|||
buckets: [
|
||||
{
|
||||
key: '.monitoring-es-7-mb-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
es_uuids: { buckets: [{ key: 'es_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-kibana-7-mb-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }, { key: 'kibana_2' }] },
|
||||
kibana_uuids: {
|
||||
buckets: [
|
||||
{ key: 'kibana_1', single_type: {} },
|
||||
{ key: 'kibana_2', single_type: {} },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: '.monitoring-kibana-7-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1', by_timestamp: { value: 12 } }] },
|
||||
kibana_uuids: {
|
||||
buckets: [{ key: 'kibana_1', single_type: { by_timestamp: { value: 12 } } }],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: '.monitoring-beats-7-2019',
|
||||
beats_uuids: { buckets: [{ key: 'beats_1' }] },
|
||||
beats_uuids: { buckets: [{ key: 'beats_1', single_type: {} }] },
|
||||
},
|
||||
{
|
||||
key: '.monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1', single_type: {} }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -18,5 +18,15 @@ export const getStandaloneClusterDefinition = () => {
|
|||
count: {},
|
||||
},
|
||||
},
|
||||
elasticsearch: {
|
||||
cluster: {
|
||||
stats: {
|
||||
nodes: {
|
||||
jvm: {},
|
||||
count: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
|
|
@ -15,7 +15,25 @@ export async function hasStandaloneClusters(req, indexPatterns) {
|
|||
return list;
|
||||
}, []);
|
||||
|
||||
const filters = [standaloneClusterFilter];
|
||||
const filters = [
|
||||
standaloneClusterFilter,
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
terms: {
|
||||
type: ['logstash_stats', 'logstash_state', 'beats_stats', 'beats_state'],
|
||||
},
|
||||
},
|
||||
{
|
||||
terms: {
|
||||
'metricset.name': ['logstash_stats', 'logstash_state', 'beats_stats', 'beats_state'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
// Not every page will contain a time range so check for that
|
||||
if (req.payload.timeRange) {
|
||||
const start = req.payload.timeRange.min;
|
||||
|
|
|
@ -333,6 +333,7 @@ export class MonitoringPlugin
|
|||
}
|
||||
},
|
||||
server: {
|
||||
log: this.log,
|
||||
route: () => {},
|
||||
config: legacyConfigWrapper,
|
||||
newPlatform: {
|
||||
|
|
|
@ -13,7 +13,11 @@ import { handleError } from '../../../../lib/errors/handle_error';
|
|||
// @ts-ignore
|
||||
import { prefixIndexPattern } from '../../../../lib/ccs_utils';
|
||||
import { INDEX_PATTERN_ELASTICSEARCH } from '../../../../../common/constants';
|
||||
import { ElasticsearchResponse, ElasticsearchSource } from '../../../../../common/types/es';
|
||||
import {
|
||||
ElasticsearchResponse,
|
||||
ElasticsearchLegacySource,
|
||||
ElasticsearchMetricbeatSource,
|
||||
} from '../../../../../common/types/es';
|
||||
import { LegacyRequest } from '../../../../types';
|
||||
|
||||
function getBucketScript(max: string, min: string) {
|
||||
|
@ -97,9 +101,13 @@ function buildRequest(
|
|||
size: maxBucketSize,
|
||||
filterPath: [
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.read_exceptions',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.elasticsearch.ccr.read_exceptions',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.follower_index',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.elasticsearch.ccr.follower.index',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.shard_id',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.elasticsearch.ccr.follower.shard.number',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.time_since_last_read_millis',
|
||||
'hits.hits.inner_hits.by_shard.hits.hits._source.elasticsearch.ccr.follower.time_since_last_read.ms',
|
||||
'aggregations.by_follower_index.buckets.key',
|
||||
'aggregations.by_follower_index.buckets.leader_index.buckets.key',
|
||||
'aggregations.by_follower_index.buckets.leader_index.buckets.remote_cluster.buckets.key',
|
||||
|
@ -115,10 +123,23 @@ function buildRequest(
|
|||
bool: {
|
||||
must: [
|
||||
{
|
||||
term: {
|
||||
type: {
|
||||
value: 'ccr_stats',
|
||||
},
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
type: {
|
||||
value: 'ccr_stats',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'metricset.name': {
|
||||
value: 'ccr',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -209,29 +230,28 @@ export function ccrRoute(server: {
|
|||
|
||||
try {
|
||||
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
|
||||
const response: ElasticsearchResponse = await callWithRequest(
|
||||
req,
|
||||
'search',
|
||||
buildRequest(req, config, esIndexPattern)
|
||||
);
|
||||
const params = buildRequest(req, config, esIndexPattern);
|
||||
const response: ElasticsearchResponse = await callWithRequest(req, 'search', params);
|
||||
|
||||
if (!response || Object.keys(response).length === 0) {
|
||||
return { data: [] };
|
||||
}
|
||||
|
||||
const fullStats: {
|
||||
[key: string]: Array<NonNullable<ElasticsearchSource['ccr_stats']>>;
|
||||
[key: string]: Array<
|
||||
| NonNullable<ElasticsearchLegacySource['ccr_stats']>
|
||||
| NonNullable<ElasticsearchMetricbeatSource['elasticsearch']>['ccr']
|
||||
>;
|
||||
} =
|
||||
response.hits?.hits.reduce((accum, hit) => {
|
||||
const innerHits = hit.inner_hits?.by_shard.hits?.hits ?? [];
|
||||
const innerHitsSource = innerHits.map(
|
||||
(innerHit) =>
|
||||
innerHit._source.ccr_stats as NonNullable<ElasticsearchSource['ccr_stats']>
|
||||
);
|
||||
const grouped = groupBy(
|
||||
innerHitsSource,
|
||||
(stat) => `${stat.follower_index}:${stat.shard_id}`
|
||||
);
|
||||
const grouped = groupBy(innerHits, (innerHit) => {
|
||||
if (innerHit._source.ccr_stats) {
|
||||
return `${innerHit._source.ccr_stats.follower_index}:${innerHit._source.ccr_stats.shard_id}`;
|
||||
} else if (innerHit._source.elasticsearch?.ccr?.follower?.shard) {
|
||||
return `${innerHit._source.elasticsearch?.ccr?.follower?.index}:${innerHit._source.elasticsearch?.ccr?.follower?.shard?.number}`;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
...accum,
|
||||
|
@ -268,14 +288,25 @@ export function ccrRoute(server: {
|
|||
|
||||
stat.shards = get(bucket, 'by_shard_id.buckets').reduce(
|
||||
(accum2: any, shardBucket: any) => {
|
||||
const fullStat = fullStats[`${bucket.key}:${shardBucket.key}`][0] ?? {};
|
||||
const fullStat: any = fullStats[`${bucket.key}:${shardBucket.key}`][0];
|
||||
const fullLegacyStat: ElasticsearchLegacySource = fullStat._source?.ccr_stats
|
||||
? fullStat._source
|
||||
: null;
|
||||
const fullMbStat: ElasticsearchMetricbeatSource = fullStat._source?.elasticsearch?.ccr
|
||||
? fullStat._source
|
||||
: null;
|
||||
const readExceptions =
|
||||
fullLegacyStat?.ccr_stats?.read_exceptions ??
|
||||
fullMbStat?.elasticsearch?.ccr?.read_exceptions ??
|
||||
[];
|
||||
const shardStat = {
|
||||
shardId: shardBucket.key,
|
||||
error: fullStat.read_exceptions?.length
|
||||
? fullStat.read_exceptions[0].exception?.type
|
||||
: null,
|
||||
error: readExceptions.length ? readExceptions[0].exception?.type : null,
|
||||
opsSynced: get(shardBucket, 'ops_synced.value'),
|
||||
syncLagTime: fullStat.time_since_last_read_millis,
|
||||
syncLagTime:
|
||||
// @ts-ignore
|
||||
fullLegacyStat?.ccr_stats?.time_since_last_read_millis ??
|
||||
fullMbStat?.elasticsearch?.ccr?.follower?.time_since_last_read?.ms,
|
||||
syncLagOps: get(shardBucket, 'lag_ops.value'),
|
||||
syncLagOpsLeader: get(shardBucket, 'leader_lag_ops.value'),
|
||||
syncLagOpsFollower: get(shardBucket, 'follower_lag_ops.value'),
|
||||
|
|
|
@ -37,9 +37,13 @@ async function getCcrStat(req: LegacyRequest, esIndexPattern: string, filters: u
|
|||
size: 1,
|
||||
filterPath: [
|
||||
'hits.hits._source.ccr_stats',
|
||||
'hits.hits._source.elasticsearch.ccr',
|
||||
'hits.hits._source.timestamp',
|
||||
'hits.hits._source.@timestamp',
|
||||
'hits.hits.inner_hits.oldest.hits.hits._source.ccr_stats.operations_written',
|
||||
'hits.hits.inner_hits.oldest.hits.hits._source.elasticsearch.ccr.follower.operations_written',
|
||||
'hits.hits.inner_hits.oldest.hits.hits._source.ccr_stats.failed_read_requests',
|
||||
'hits.hits.inner_hits.oldest.hits.hits._source.elasticsearch.ccr.requests.failed.read.count',
|
||||
],
|
||||
body: {
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
|
@ -102,10 +106,23 @@ export function ccrShardRoute(server: { route: (p: any) => void; config: () => {
|
|||
|
||||
const filters = [
|
||||
{
|
||||
term: {
|
||||
type: {
|
||||
value: 'ccr_stats',
|
||||
},
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
type: {
|
||||
value: 'ccr_stats',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'metricset.name': {
|
||||
value: 'ccr',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -138,16 +155,23 @@ export function ccrShardRoute(server: { route: (p: any) => void; config: () => {
|
|||
getCcrStat(req, esIndexPattern, filters),
|
||||
]);
|
||||
|
||||
const stat = ccrResponse.hits?.hits[0]?._source.ccr_stats ?? {};
|
||||
const oldestStat =
|
||||
ccrResponse.hits?.hits[0].inner_hits?.oldest.hits?.hits[0]?._source.ccr_stats ?? {};
|
||||
const legacyStat = ccrResponse.hits?.hits[0]?._source.ccr_stats;
|
||||
const mbStat = ccrResponse.hits?.hits[0]?._source.elasticsearch?.ccr;
|
||||
const oldestLegacyStat =
|
||||
ccrResponse.hits?.hits[0].inner_hits?.oldest.hits?.hits[0]?._source.ccr_stats;
|
||||
const oldestMBStat =
|
||||
ccrResponse.hits?.hits[0].inner_hits?.oldest.hits?.hits[0]?._source.elasticsearch?.ccr;
|
||||
|
||||
const leaderIndex = mbStat ? mbStat?.leader?.index : legacyStat?.leader_index;
|
||||
|
||||
return {
|
||||
metrics,
|
||||
stat,
|
||||
formattedLeader: getFormattedLeaderIndex(stat.leader_index ?? ''),
|
||||
timestamp: ccrResponse.hits?.hits[0]?._source.timestamp,
|
||||
oldestStat,
|
||||
stat: mbStat ?? legacyStat,
|
||||
formattedLeader: getFormattedLeaderIndex(leaderIndex ?? ''),
|
||||
timestamp:
|
||||
ccrResponse.hits?.hits[0]?._source['@timestamp'] ??
|
||||
ccrResponse.hits?.hits[0]?._source.timestamp,
|
||||
oldestStat: oldestMBStat ?? oldestLegacyStat,
|
||||
};
|
||||
} catch (err) {
|
||||
return handleError(err, req);
|
||||
|
|
|
@ -51,7 +51,8 @@ export function esIndexRoute(server) {
|
|||
const filebeatIndexPattern = prefixIndexPattern(
|
||||
config,
|
||||
config.get('monitoring.ui.logs.index'),
|
||||
'*'
|
||||
'*',
|
||||
true
|
||||
);
|
||||
const isAdvanced = req.payload.is_advanced;
|
||||
const metricSet = isAdvanced ? metricSetAdvanced : metricSetOverview;
|
||||
|
@ -78,8 +79,19 @@ export function esIndexRoute(server) {
|
|||
let shardAllocation;
|
||||
if (!isAdvanced) {
|
||||
// TODO: Why so many fields needed for a single component (shard legend)?
|
||||
const shardFilter = { term: { 'shard.index': indexUuid } };
|
||||
const stateUuid = get(cluster, 'cluster_state.state_uuid');
|
||||
const shardFilter = {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'shard.index': indexUuid } },
|
||||
{ term: { 'elasticsearch.index.name': indexUuid } },
|
||||
],
|
||||
},
|
||||
};
|
||||
const stateUuid = get(
|
||||
cluster,
|
||||
'elasticsearch.cluster.stats.state.state_uuid',
|
||||
get(cluster, 'cluster_state.state_uuid')
|
||||
);
|
||||
const allocationOptions = {
|
||||
shardFilter,
|
||||
stateUuid,
|
||||
|
|
|
@ -52,7 +52,8 @@ export function esNodeRoute(server) {
|
|||
const filebeatIndexPattern = prefixIndexPattern(
|
||||
config,
|
||||
config.get('monitoring.ui.logs.index'),
|
||||
'*'
|
||||
'*',
|
||||
true
|
||||
);
|
||||
const isAdvanced = req.payload.is_advanced;
|
||||
|
||||
|
@ -76,7 +77,11 @@ export function esNodeRoute(server) {
|
|||
try {
|
||||
const cluster = await getClusterStats(req, esIndexPattern, clusterUuid);
|
||||
|
||||
const clusterState = get(cluster, 'cluster_state', { nodes: {} });
|
||||
const clusterState = get(
|
||||
cluster,
|
||||
'cluster_state',
|
||||
get(cluster, 'elasticsearch.cluster.stats.state')
|
||||
);
|
||||
const shardStats = await getShardStats(req, esIndexPattern, cluster, {
|
||||
includeIndices: true,
|
||||
includeNodes: true,
|
||||
|
@ -91,13 +96,23 @@ export function esNodeRoute(server) {
|
|||
const metrics = await getMetrics(req, esIndexPattern, metricSet, [
|
||||
{ term: { 'source_node.uuid': nodeUuid } },
|
||||
]);
|
||||
|
||||
let logs;
|
||||
let shardAllocation;
|
||||
if (!isAdvanced) {
|
||||
// TODO: Why so many fields needed for a single component (shard legend)?
|
||||
const shardFilter = { term: { 'shard.node': nodeUuid } };
|
||||
const stateUuid = get(cluster, 'cluster_state.state_uuid');
|
||||
const shardFilter = {
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { 'shard.node': nodeUuid } },
|
||||
{ term: { 'elasticsearch.node.name': nodeUuid } },
|
||||
],
|
||||
},
|
||||
};
|
||||
const stateUuid = get(
|
||||
cluster,
|
||||
'cluster_state.state_uuid',
|
||||
get(cluster, 'elasticsearch.cluster.stats.state.state_uuid')
|
||||
);
|
||||
const allocationOptions = {
|
||||
shardFilter,
|
||||
stateUuid,
|
||||
|
|
|
@ -43,7 +43,8 @@ export function esOverviewRoute(server) {
|
|||
const filebeatIndexPattern = prefixIndexPattern(
|
||||
config,
|
||||
config.get('monitoring.ui.logs.index'),
|
||||
'*'
|
||||
'*',
|
||||
true
|
||||
);
|
||||
|
||||
const start = req.payload.timeRange.min;
|
||||
|
@ -53,7 +54,7 @@ export function esOverviewRoute(server) {
|
|||
const [clusterStats, metrics, shardActivity, logs] = await Promise.all([
|
||||
getClusterStats(req, esIndexPattern, clusterUuid),
|
||||
getMetrics(req, esIndexPattern, metricSet),
|
||||
getLastRecovery(req, esIndexPattern),
|
||||
getLastRecovery(req, esIndexPattern, config.get('monitoring.ui.max_bucket_size')),
|
||||
getLogs(config, req, filebeatIndexPattern, { clusterUuid, start, end }),
|
||||
]);
|
||||
const indicesUnassignedShardStats = await getIndicesUnassignedShardStats(
|
||||
|
@ -62,12 +63,13 @@ export function esOverviewRoute(server) {
|
|||
clusterStats
|
||||
);
|
||||
|
||||
return {
|
||||
const result = {
|
||||
clusterStatus: getClusterStatus(clusterStats, indicesUnassignedShardStats),
|
||||
metrics,
|
||||
logs,
|
||||
shardActivity,
|
||||
};
|
||||
return result;
|
||||
} catch (err) {
|
||||
throw handleError(err, req);
|
||||
}
|
||||
|
|
|
@ -43,7 +43,16 @@ export function kibanaOverviewRoute(server) {
|
|||
try {
|
||||
const [clusterStatus, metrics] = await Promise.all([
|
||||
getKibanaClusterStatus(req, kbnIndexPattern, { clusterUuid }),
|
||||
getMetrics(req, kbnIndexPattern, metricSet),
|
||||
getMetrics(req, kbnIndexPattern, metricSet, [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'kibana_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
]),
|
||||
]);
|
||||
|
||||
return {
|
||||
|
|
|
@ -72,7 +72,16 @@ export function logstashNodeRoute(server) {
|
|||
|
||||
try {
|
||||
const [metrics, nodeSummary] = await Promise.all([
|
||||
getMetrics(req, lsIndexPattern, metricSet),
|
||||
getMetrics(req, lsIndexPattern, metricSet, [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
]),
|
||||
getNodeInfo(req, lsIndexPattern, { clusterUuid, logstashUuid }),
|
||||
]);
|
||||
|
||||
|
|
|
@ -52,7 +52,16 @@ export function logstashOverviewRoute(server) {
|
|||
|
||||
try {
|
||||
const [metrics, clusterStatus] = await Promise.all([
|
||||
getMetrics(req, lsIndexPattern, metricSet),
|
||||
getMetrics(req, lsIndexPattern, metricSet, [
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
]),
|
||||
getClusterStatus(req, lsIndexPattern, { clusterUuid }),
|
||||
]);
|
||||
|
||||
|
|
|
@ -288,7 +288,10 @@ export async function fetchLogstashStats(
|
|||
{ terms: { cluster_uuid: clusterUuids } },
|
||||
{
|
||||
bool: {
|
||||
must: { term: { type: 'logstash_stats' } },
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
@ -119,6 +119,7 @@ export interface LegacyRequest {
|
|||
}
|
||||
|
||||
export interface LegacyServer {
|
||||
log: Logger;
|
||||
route: (params: any) => void;
|
||||
config: () => {
|
||||
get: (key: string) => string | undefined;
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"config": {
|
||||
"container": false
|
||||
},
|
||||
"timeOfLastEvent": "2018-08-31T13:59:21.201Z"
|
||||
"timeOfLastEvent": "2018-08-31T13:59:21.199Z"
|
||||
},
|
||||
"metrics": {
|
||||
"apm_cpu": [
|
||||
|
|
|
@ -147,7 +147,7 @@
|
|||
"isDerivative": false
|
||||
},
|
||||
"data": [
|
||||
[1535723880000, 5212816],
|
||||
[1535723880000, 4996912],
|
||||
[1535723910000, 4996912],
|
||||
[1535723940000, 4886176]
|
||||
]
|
||||
|
@ -957,7 +957,7 @@
|
|||
"data": [
|
||||
[
|
||||
1535723880000,
|
||||
5212816
|
||||
4996912
|
||||
],
|
||||
[
|
||||
1535723910000,
|
||||
|
@ -1003,6 +1003,6 @@
|
|||
"config": {
|
||||
"container": false
|
||||
},
|
||||
"timeOfLastEvent": "2018-08-31T13:59:21.201Z"
|
||||
"timeOfLastEvent": "2018-08-31T13:59:21.199Z"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,10 @@
|
|||
export default function ({ loadTestFile }) {
|
||||
describe('APM', () => {
|
||||
loadTestFile(require.resolve('./overview'));
|
||||
loadTestFile(require.resolve('./overview_mb'));
|
||||
loadTestFile(require.resolve('./instances'));
|
||||
loadTestFile(require.resolve('./instances_mb'));
|
||||
loadTestFile(require.resolve('./instance'));
|
||||
loadTestFile(require.resolve('./instance_mb'));
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import apmInstanceFixture from './fixtures/instance';
|
||||
|
||||
export default function ({ getService }) {
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
|
||||
describe('instance detail mb', () => {
|
||||
const archive = 'monitoring/apm_mb';
|
||||
const timeRange = {
|
||||
min: '2018-08-31T12:59:49.104Z',
|
||||
max: '2018-08-31T13:59:49.104Z',
|
||||
};
|
||||
|
||||
before('load archive', () => {
|
||||
return esArchiver.load(archive);
|
||||
});
|
||||
|
||||
after('unload archive', () => {
|
||||
return esArchiver.unload(archive);
|
||||
});
|
||||
|
||||
it('should get apm instance data', async () => {
|
||||
const { body } = await supertest
|
||||
.post(
|
||||
'/api/monitoring/v1/clusters/GUtE4UwgSR-XUICRDEFKkA/apm/9b16f434-2092-4983-a401-80a2b61c79d6'
|
||||
)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ timeRange })
|
||||
.expect(200);
|
||||
|
||||
expect(body).to.eql(apmInstanceFixture);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -39,7 +39,7 @@ export default function ({ getService }) {
|
|||
apms: {
|
||||
total: 2,
|
||||
},
|
||||
timeOfLastEvent: '2018-08-31T13:59:21.201Z',
|
||||
timeOfLastEvent: '2018-08-31T13:59:21.199Z',
|
||||
},
|
||||
apms: [
|
||||
{
|
||||
|
@ -52,7 +52,7 @@ export default function ({ getService }) {
|
|||
errors: 0,
|
||||
memory: 3445920,
|
||||
version: '7.0.0-alpha1',
|
||||
time_of_last_event: '2018-08-31T13:59:21.201Z',
|
||||
time_of_last_event: '2018-08-31T13:59:21.199Z',
|
||||
},
|
||||
{
|
||||
uuid: '9b16f434-2092-4983-a401-80a2b61c79d6',
|
||||
|
@ -64,7 +64,7 @@ export default function ({ getService }) {
|
|||
errors: 0,
|
||||
memory: 3087640,
|
||||
version: '7.0.0-alpha1',
|
||||
time_of_last_event: '2018-08-31T13:59:21.165Z',
|
||||
time_of_last_event: '2018-08-31T13:59:21.163Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
export default function ({ getService }) {
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
|
||||
describe('list mb', () => {
|
||||
const archive = 'monitoring/apm_mb';
|
||||
const timeRange = {
|
||||
min: '2018-08-31T12:59:49.104Z',
|
||||
max: '2018-08-31T13:59:49.104Z',
|
||||
};
|
||||
|
||||
before('load clusters archive', () => {
|
||||
return esArchiver.load(archive);
|
||||
});
|
||||
|
||||
after('unload clusters archive', () => {
|
||||
return esArchiver.unload(archive);
|
||||
});
|
||||
|
||||
it('should load multiple clusters', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/monitoring/v1/clusters/GUtE4UwgSR-XUICRDEFKkA/apm/instances')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ timeRange })
|
||||
.expect(200);
|
||||
|
||||
const expected = {
|
||||
stats: {
|
||||
totalEvents: 18,
|
||||
apms: {
|
||||
total: 2,
|
||||
},
|
||||
timeOfLastEvent: '2018-08-31T13:59:21.199Z',
|
||||
},
|
||||
apms: [
|
||||
{
|
||||
uuid: '55f1089b-43b1-472a-919a-344667bae595',
|
||||
name: 'd06490170f2b',
|
||||
type: 'Apm-server',
|
||||
output: 'Elasticsearch',
|
||||
total_events_rate: 0.0033333333333333335,
|
||||
bytes_sent_rate: 5.7316666666666665,
|
||||
errors: 0,
|
||||
memory: 3445920,
|
||||
version: '7.0.0-alpha1',
|
||||
time_of_last_event: '2018-08-31T13:59:21.199Z',
|
||||
},
|
||||
{
|
||||
uuid: '9b16f434-2092-4983-a401-80a2b61c79d6',
|
||||
name: '01323afae1fb',
|
||||
type: 'Apm-server',
|
||||
output: 'Elasticsearch',
|
||||
total_events_rate: 0.0016666666666666668,
|
||||
bytes_sent_rate: 2.9105555555555553,
|
||||
errors: 0,
|
||||
memory: 3087640,
|
||||
version: '7.0.0-alpha1',
|
||||
time_of_last_event: '2018-08-31T13:59:21.163Z',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
expect(body).to.eql(expected);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import apmClusterFixture from './fixtures/cluster';
|
||||
|
||||
export default function ({ getService }) {
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
|
||||
describe('overview mb', () => {
|
||||
const archive = 'monitoring/apm_mb';
|
||||
const timeRange = {
|
||||
min: '2018-08-31T12:59:49.104Z',
|
||||
max: '2018-08-31T13:59:49.104Z',
|
||||
};
|
||||
|
||||
before('load archive', () => {
|
||||
return esArchiver.load(archive);
|
||||
});
|
||||
|
||||
after('unload archive', () => {
|
||||
return esArchiver.unload(archive);
|
||||
});
|
||||
|
||||
it('should summarize apm cluster with metrics', async () => {
|
||||
const { body } = await supertest
|
||||
.post('/api/monitoring/v1/clusters/GUtE4UwgSR-XUICRDEFKkA/apm')
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ timeRange })
|
||||
.expect(200);
|
||||
|
||||
expect(body).to.eql(apmClusterFixture);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -13,7 +13,7 @@ export default function ({ getService }) {
|
|||
const esArchiver = getService('esArchiver');
|
||||
|
||||
describe('instance detail', () => {
|
||||
const archive = 'monitoring/beats-with-restarted-instance';
|
||||
const archive = 'monitoring/beats_with_restarted_instance';
|
||||
const timeRange = {
|
||||
min: '2018-02-09T20:49:00Z',
|
||||
max: '2018-02-09T21:50:00Z',
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue