mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[8.0] [Stack Monitoring] update rules queries to support metricbeat 8.0 [fixed PR] (#125748) (#125877)
* [Stack Monitoring] update rules queries to support metricbeat 8.0 [fixed PR] (#125748) * update queries to use metricset.name and fix paths * fix ccr query * fix cluster health query paths * update elasticsearch version mismatch * update fetchLicense paths * use ecs properties in fetch_index_shard_size * Clarified comment about createDatasetFilter * Re-editing a clarifying comment * Small comment edit * Revert "use ecs properties in fetch_index_shard_size" This reverts commitcdf8b2493e
. * simplify fetch_index_shard_size document handling Co-authored-by: neptunian <sandra.gonzales@elastic.co> Co-authored-by: klacabane <kevin.lacabane@elastic.co> (cherry picked from commit7126bc9cc1
) # Conflicts: # x-pack/plugins/monitoring/server/lib/alerts/create_dataset_query_filter.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_ccr_read_exceptions.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_ccr_read_exceptions.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_cluster_health.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_cluster_health.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_clusters.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_clusters.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_cpu_usage_node_stats.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_cpu_usage_node_stats.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_disk_usage_node_stats.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_disk_usage_node_stats.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_elasticsearch_versions.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_elasticsearch_versions.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_index_shard_size.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_index_shard_size.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_kibana_versions.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_kibana_versions.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_licenses.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_licenses.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_logstash_versions.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_logstash_versions.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_memory_usage_node_stats.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_memory_usage_node_stats.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_missing_monitoring_data.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_missing_monitoring_data.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_nodes_from_cluster_stats.test.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_nodes_from_cluster_stats.ts # x-pack/plugins/monitoring/server/lib/alerts/fetch_thread_pool_rejections_stats.ts * import createDatasetFilter * target correct filter property Co-authored-by: Jason Rhodes <jason.matthew.rhodes@gmail.com>
This commit is contained in:
parent
9863d0bd97
commit
2dfb0c40bd
16 changed files with 181 additions and 149 deletions
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/**
|
||||
* We expect that metricset and dataset will be aligned where dataset
|
||||
* is the full {product}.{metricset}, whereas metricset doesn't include
|
||||
* the product, e.g. dataset is elasticsearch.cluster_stats and metricset is
|
||||
* just cluster_stats.
|
||||
*
|
||||
* Unfortunately, this doesn't *always* seem to be the case, and sometimes
|
||||
* the "metricset" value is different. For this reason, we've left these
|
||||
* two as separate arguments to this function, at least until this is resolved.
|
||||
*
|
||||
* More info: https://github.com/elastic/kibana/pull/119112/files#r772605936
|
||||
*
|
||||
* @param {string} type matches legacy data
|
||||
* @param {string} metricset matches standalone beats
|
||||
* @param {string} dataset matches agent integration data streams
|
||||
*/
|
||||
export const createDatasetFilter = (type: string, metricset: string, dataset: string) => ({
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
type,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'metricset.name': metricset,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'data_stream.dataset': dataset,
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
});
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { CCRReadExceptionsStats } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchCCRReadExceptions(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -26,20 +27,35 @@ export async function fetchCCRReadExceptions(
|
|||
bool: {
|
||||
filter: [
|
||||
{
|
||||
nested: {
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'ccr_stats.read_exceptions.exception',
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'ccr_stats.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'ccr_stats',
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'elasticsearch.ccr.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'elasticsearch.ccr.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
createDatasetFilter('ccr_stats', 'ccr', 'elasticsearch.ccr'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -78,9 +94,13 @@ export async function fetchCCRReadExceptions(
|
|||
_source: {
|
||||
includes: [
|
||||
'cluster_uuid',
|
||||
'elasticsearch.cluster.id',
|
||||
'ccr_stats.read_exceptions',
|
||||
'elasticsearch.ccr.read_exceptions',
|
||||
'ccr_stats.shard_id',
|
||||
'elasticsearch.ccr.shard_id',
|
||||
'ccr_stats.leader_index',
|
||||
'elasticsearch.ccr.leader.index',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
@ -118,15 +138,19 @@ export async function fetchCCRReadExceptions(
|
|||
|
||||
for (const followerIndexBucket of followerIndicesBuckets) {
|
||||
const followerIndex = followerIndexBucket.key;
|
||||
const {
|
||||
_index: monitoringIndexName,
|
||||
_source: { ccr_stats: ccrStats, cluster_uuid: clusterUuid },
|
||||
} = get(followerIndexBucket, 'hits.hits.hits[0]');
|
||||
const {
|
||||
read_exceptions: readExceptions,
|
||||
leader_index: leaderIndex,
|
||||
shard_id: shardId,
|
||||
} = ccrStats;
|
||||
const clusterUuid =
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.cluster_uuid') ||
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]_source.elasticsearch.cluster.id');
|
||||
|
||||
const monitoringIndexName = get(followerIndexBucket, 'hits.hits.hits[0]._index');
|
||||
const ccrStats =
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.ccr_stats') ||
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.elasticsearch.ccr');
|
||||
|
||||
const { read_exceptions: readExceptions, shard_id: shardId } = ccrStats;
|
||||
|
||||
const leaderIndex = ccrStats.leaderIndex || ccrStats.leader.index;
|
||||
|
||||
const { exception: lastReadException } = readExceptions[readExceptions.length - 1];
|
||||
|
||||
stats.push({
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertCluster, AlertClusterHealth } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchClusterHealth(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -18,7 +19,9 @@ export async function fetchClusterHealth(
|
|||
index,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_state.status',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.status',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -39,11 +42,7 @@ export async function fetchClusterHealth(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -73,8 +72,9 @@ export async function fetchClusterHealth(
|
|||
const response: ElasticsearchResponse = result.body as ElasticsearchResponse;
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
return {
|
||||
health: hit._source!.cluster_state?.status,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
health:
|
||||
hit._source!.cluster_state?.status || hit._source!.elasticsearch?.cluster?.stats?.status,
|
||||
clusterUuid: hit._source!.cluster_uuid || hit._source!.elasticsearch?.cluster?.id,
|
||||
ccs: hit._index.includes(':') ? hit._index.split(':')[0] : undefined,
|
||||
} as AlertClusterHealth;
|
||||
});
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
interface RangeFilter {
|
||||
[field: string]: {
|
||||
|
@ -26,18 +27,16 @@ export async function fetchClusters(
|
|||
filter_path: [
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._source.cluster_name',
|
||||
'hits.hits._source.elasticsearch.cluster.name',
|
||||
],
|
||||
body: {
|
||||
size: 1000,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: rangeFilter,
|
||||
},
|
||||
|
@ -51,59 +50,16 @@ export async function fetchClusters(
|
|||
};
|
||||
|
||||
const { body: response } = await esClient.search(params);
|
||||
return get(response, 'hits.hits', []).map((hit: any) => {
|
||||
const clusterName: string =
|
||||
get(hit, '_source.cluster_settings.cluster.metadata.display_name') ||
|
||||
get(hit, '_source.cluster_name') ||
|
||||
get(hit, '_source.cluster_uuid');
|
||||
return {
|
||||
clusterUuid: get(hit, '_source.cluster_uuid'),
|
||||
clusterName,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export async function fetchClustersLegacy(
|
||||
callCluster: any,
|
||||
index: string,
|
||||
rangeFilter: RangeFilter = { timestamp: { gte: 'now-2m' } }
|
||||
): Promise<AlertCluster[]> {
|
||||
const params = {
|
||||
index,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.cluster_name',
|
||||
],
|
||||
body: {
|
||||
size: 1000,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: rangeFilter,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
collapse: {
|
||||
field: 'cluster_uuid',
|
||||
},
|
||||
},
|
||||
};
|
||||
const response = await callCluster('search', params);
|
||||
return get(response, 'hits.hits', []).map((hit: any) => {
|
||||
const clusterName: string =
|
||||
get(hit, '_source.cluster_settings.cluster.metadata.display_name') ||
|
||||
get(hit, '_source.cluster_name') ||
|
||||
get(hit, '_source.cluster_uuid');
|
||||
get(hit, '_source.elasticsearch.cluster.name') ||
|
||||
get(hit, '_source.cluster_uuid') ||
|
||||
get(hit, '_source.elasticsearch.cluster.id');
|
||||
return {
|
||||
clusterUuid: get(hit, '_source.cluster_uuid'),
|
||||
clusterUuid: get(hit, '_source.cluster_uuid') || get(hit, '_source.elasticsearch.cluster.id'),
|
||||
clusterName,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -213,7 +213,16 @@ describe('fetchCpuUsageNodeStats', () => {
|
|||
bool: {
|
||||
filter: [
|
||||
{ terms: { cluster_uuid: ['abc123'] } },
|
||||
{ term: { type: 'node_stats' } },
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
{ range: { timestamp: { format: 'epoch_millis', gte: 0, lte: 0 } } },
|
||||
{
|
||||
bool: { should: [{ exists: { field: 'cluster_uuid' } }], minimum_should_match: 1 },
|
||||
|
|
|
@ -10,6 +10,7 @@ import { get } from 'lodash';
|
|||
import moment from 'moment';
|
||||
import { NORMALIZED_DERIVATIVE_UNIT } from '../../../common/constants';
|
||||
import { AlertCluster, AlertCpuUsageNodeStats } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
interface NodeBucketESResponse {
|
||||
key: string;
|
||||
|
@ -48,11 +49,7 @@ export async function fetchCpuUsageNodeStats(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'node_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertDiskUsageNodeStats } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchDiskUsageNodeStats(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -31,11 +32,7 @@ export async function fetchDiskUsageNodeStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'node_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchElasticsearchVersions(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -19,8 +20,10 @@ export async function fetchElasticsearchVersions(
|
|||
index,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_stats.nodes.versions',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.nodes.versions',
|
||||
'hits.hits._index',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
],
|
||||
body: {
|
||||
size: clusters.length,
|
||||
|
@ -40,11 +43,7 @@ export async function fetchElasticsearchVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -73,10 +72,13 @@ export async function fetchElasticsearchVersions(
|
|||
const result = await esClient.search<ElasticsearchSource>(params);
|
||||
const response: ElasticsearchResponse = result.body as ElasticsearchResponse;
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
const versions = hit._source!.cluster_stats?.nodes?.versions ?? [];
|
||||
const versions =
|
||||
hit._source!.cluster_stats?.nodes?.versions ??
|
||||
hit._source!.elasticsearch?.cluster?.stats?.nodes?.versions ??
|
||||
[];
|
||||
return {
|
||||
versions,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
clusterUuid: hit._source!.elasticsearch?.cluster?.id || hit._source!.cluster_uuid,
|
||||
ccs: hit._index.includes(':') ? hit._index.split(':')[0] : undefined,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -10,6 +10,7 @@ import { AlertCluster, IndexShardSizeStats } from '../../../common/types/alerts'
|
|||
import { ElasticsearchIndexStats, ElasticsearchResponseHit } from '../../../common/types/es';
|
||||
import { ESGlobPatterns, RegExPatterns } from '../../../common/es_glob_patterns';
|
||||
import { Globals } from '../../static_globals';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
type TopHitType = ElasticsearchResponseHit & {
|
||||
_source: { index_stats?: Partial<ElasticsearchIndexStats> };
|
||||
|
@ -41,12 +42,8 @@ export async function fetchIndexShardSize(
|
|||
size: 0,
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
type: 'index_stats',
|
||||
},
|
||||
},
|
||||
filter: [
|
||||
createDatasetFilter('index_stats', 'index', 'elasticsearch.index'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -86,6 +83,8 @@ export async function fetchIndexShardSize(
|
|||
'_index',
|
||||
'index_stats.shards.primaries',
|
||||
'index_stats.primaries.store.size_in_bytes',
|
||||
'elasticsearch.index.shards.primaries',
|
||||
'elasticsearch.index.primaries.store.size_in_bytes',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
@ -102,7 +101,7 @@ export async function fetchIndexShardSize(
|
|||
try {
|
||||
if (filterQuery) {
|
||||
const filterQueryObject = JSON.parse(filterQuery);
|
||||
params.body.query.bool.must.push(filterQueryObject);
|
||||
params.body.query.bool.filter.push(filterQueryObject);
|
||||
}
|
||||
} catch (e) {
|
||||
// meh
|
||||
|
@ -127,10 +126,8 @@ export async function fetchIndexShardSize(
|
|||
if (!topHit || !ESGlobPatterns.isValid(shardIndex, validIndexPatterns)) {
|
||||
continue;
|
||||
}
|
||||
const {
|
||||
_index: monitoringIndexName,
|
||||
_source: { index_stats: indexStats },
|
||||
} = topHit;
|
||||
const { _index: monitoringIndexName, _source } = topHit;
|
||||
const indexStats = _source.index_stats || _source.elasticsearch?.index;
|
||||
|
||||
if (!indexStats || !indexStats.primaries) {
|
||||
continue;
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
interface ESAggResponse {
|
||||
key: string;
|
||||
|
@ -32,11 +33,7 @@ export async function fetchKibanaVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'kibana_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('kibana_stats', 'stats', 'kibana.stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertLicense, AlertCluster } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource } from '../../../common/types/es';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchLicenses(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -18,7 +19,9 @@ export async function fetchLicenses(
|
|||
index,
|
||||
filter_path: [
|
||||
'hits.hits._source.license.*',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.*',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -39,11 +42,7 @@ export async function fetchLicenses(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -72,12 +71,13 @@ export async function fetchLicenses(
|
|||
const { body: response } = await esClient.search<ElasticsearchSource>(params);
|
||||
return (
|
||||
response?.hits?.hits.map((hit) => {
|
||||
const rawLicense = hit._source!.license ?? {};
|
||||
const rawLicense =
|
||||
hit._source!.license ?? hit._source?.elasticsearch?.cluster?.stats?.license ?? {};
|
||||
const license: AlertLicense = {
|
||||
status: rawLicense.status ?? '',
|
||||
type: rawLicense.type ?? '',
|
||||
expiryDateMS: rawLicense.expiry_date_in_millis ?? 0,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
clusterUuid: hit._source?.elasticsearch?.cluster?.id || hit._source!.cluster_uuid,
|
||||
ccs: hit._index,
|
||||
};
|
||||
return license;
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
interface ESAggResponse {
|
||||
key: string;
|
||||
|
@ -32,11 +33,7 @@ export async function fetchLogstashVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'logstash_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('logstash_stats', 'node_stats', 'logstash.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertMemoryUsageNodeStats } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
export async function fetchMemoryUsageNodeStats(
|
||||
esClient: ElasticsearchClient,
|
||||
|
@ -32,11 +33,7 @@ export async function fetchMemoryUsageNodeStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'node_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertMissingData } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
interface ClusterBucketESResponse {
|
||||
key: string;
|
||||
|
@ -64,6 +65,7 @@ export async function fetchMissingMonitoringData(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -106,7 +108,7 @@ export async function fetchMissingMonitoringData(
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: ['_index', 'source_node.name'],
|
||||
includes: ['source_node.name', 'elasticsearch.node.name'],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -142,7 +144,10 @@ export async function fetchMissingMonitoringData(
|
|||
const nodeId = uuidBucket.key;
|
||||
const indexName = get(uuidBucket, `document.hits.hits[0]._index`);
|
||||
const differenceInMs = nowInMs - uuidBucket.most_recent.value;
|
||||
const nodeName = get(uuidBucket, `document.hits.hits[0]._source.source_node.name`, nodeId);
|
||||
const nodeName =
|
||||
get(uuidBucket, `document.hits.hits[0]._source.source_node.name`) ||
|
||||
get(uuidBucket, `document.hits.hits[0]._source.elasticsearch.node.name`) ||
|
||||
nodeId;
|
||||
|
||||
uniqueList[`${clusterUuid}${nodeId}`] = {
|
||||
nodeId,
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { AlertCluster, AlertClusterStatsNodes } from '../../../common/types/alerts';
|
||||
import { ElasticsearchSource } from '../../../common/types/es';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
function formatNode(
|
||||
nodes: NonNullable<NonNullable<ElasticsearchSource['cluster_state']>['nodes']> | undefined
|
||||
|
@ -45,11 +46,7 @@ export async function fetchNodesFromClusterStats(
|
|||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -78,7 +75,7 @@ export async function fetchNodesFromClusterStats(
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: ['cluster_state.nodes_hash', 'cluster_state.nodes'],
|
||||
includes: ['cluster_state.nodes', 'elasticsearch.cluster.stats.nodes'],
|
||||
},
|
||||
size: 2,
|
||||
},
|
||||
|
@ -111,8 +108,12 @@ export async function fetchNodesFromClusterStats(
|
|||
const indexName = hits[0]._index;
|
||||
nodes.push({
|
||||
clusterUuid,
|
||||
recentNodes: formatNode(hits[0]._source.cluster_state?.nodes),
|
||||
priorNodes: formatNode(hits[1]._source.cluster_state?.nodes),
|
||||
recentNodes: formatNode(
|
||||
hits[0]._source.cluster_state?.nodes || hits[0]._source.elasticsearch.cluster.stats.nodes
|
||||
),
|
||||
priorNodes: formatNode(
|
||||
hits[1]._source.cluster_state?.nodes || hits[1]._source.elasticsearch.cluster.stats.nodes
|
||||
),
|
||||
ccs: indexName.includes(':') ? indexName.split(':')[0] : undefined,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { get } from 'lodash';
|
||||
import { AlertCluster, AlertThreadPoolRejectionsStats } from '../../../common/types/alerts';
|
||||
import { createDatasetFilter } from './create_dataset_query_filter';
|
||||
|
||||
const invalidNumberValue = (value: number) => {
|
||||
return isNaN(value) || value === undefined || value === null;
|
||||
|
@ -24,7 +25,12 @@ const getTopHits = (threadType: string, order: 'asc' | 'desc') => ({
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: [`node_stats.thread_pool.${threadType}.rejected`, 'source_node.name'],
|
||||
includes: [
|
||||
`node_stats.thread_pool.${threadType}.rejected`,
|
||||
`elasticsearch.node.stats.thread_pool.${threadType}.rejected.count`,
|
||||
'source_node.name',
|
||||
'elasticsearch.node.name',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
},
|
||||
|
@ -53,11 +59,7 @@ export async function fetchThreadPoolRejectionStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
type: 'node_stats',
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -126,8 +128,11 @@ export async function fetchThreadPoolRejectionStats(
|
|||
}
|
||||
|
||||
const rejectedPath = `_source.node_stats.thread_pool.${threadType}.rejected`;
|
||||
const newRejectionCount = Number(get(mostRecentDoc, rejectedPath));
|
||||
const oldRejectionCount = Number(get(leastRecentDoc, rejectedPath));
|
||||
const rejectedPathEcs = `_source.elasticsearch.node.stats.thread_pool.${threadType}.rejected.count`;
|
||||
const newRejectionCount =
|
||||
Number(get(mostRecentDoc, rejectedPath)) || Number(get(mostRecentDoc, rejectedPathEcs));
|
||||
const oldRejectionCount =
|
||||
Number(get(leastRecentDoc, rejectedPath)) || Number(get(leastRecentDoc, rejectedPathEcs));
|
||||
|
||||
if (invalidNumberValue(newRejectionCount) || invalidNumberValue(oldRejectionCount)) {
|
||||
continue;
|
||||
|
@ -138,7 +143,10 @@ export async function fetchThreadPoolRejectionStats(
|
|||
? newRejectionCount
|
||||
: newRejectionCount - oldRejectionCount;
|
||||
const indexName = mostRecentDoc._index;
|
||||
const nodeName = get(mostRecentDoc, '_source.source_node.name') || node.key;
|
||||
const nodeName =
|
||||
get(mostRecentDoc, '_source.source_node.name') ||
|
||||
get(mostRecentDoc, '_source.elasticsearch.node.name') ||
|
||||
node.key;
|
||||
const nodeStat = {
|
||||
rejectionCount,
|
||||
type: threadType,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue