mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Stack Monitoring] update rules queries to support metricbeat 8.0 [fixed PR] (#125748)
* update queries to use metricset.name and fix paths
* fix ccr query
* fix cluster health query paths
* update elasticsearch version mismatch
* update fetchLicense paths
* use ecs properties in fetch_index_shard_size
* Clarified comment about createDatasetFilter
* Re-editing a clarifying comment
* Small comment edit
* Revert "use ecs properties in fetch_index_shard_size"
This reverts commit cdf8b2493e
.
* simplify fetch_index_shard_size document handling
Co-authored-by: neptunian <sandra.gonzales@elastic.co>
Co-authored-by: klacabane <kevin.lacabane@elastic.co>
This commit is contained in:
parent
b8ae75b3f7
commit
7126bc9cc1
28 changed files with 193 additions and 103 deletions
|
@ -5,12 +5,33 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export const createDatasetFilter = (legacyType: string, dataset: string) => ({
|
||||
/**
|
||||
* We expect that metricset and dataset will be aligned where dataset
|
||||
* is the full {product}.{metricset}, whereas metricset doesn't include
|
||||
* the product, e.g. dataset is elasticsearch.cluster_stats and metricset is
|
||||
* just cluster_stats.
|
||||
*
|
||||
* Unfortunately, this doesn't *always* seem to be the case, and sometimes
|
||||
* the "metricset" value is different. For this reason, we've left these
|
||||
* two as separate arguments to this function, at least until this is resolved.
|
||||
*
|
||||
* More info: https://github.com/elastic/kibana/pull/119112/files#r772605936
|
||||
*
|
||||
* @param {string} type matches legacy data
|
||||
* @param {string} metricset matches standalone beats
|
||||
* @param {string} dataset matches agent integration data streams
|
||||
*/
|
||||
export const createDatasetFilter = (type: string, metricset: string, dataset: string) => ({
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: {
|
||||
type: legacyType,
|
||||
type,
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
'metricset.name': metricset,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
|
@ -47,15 +47,39 @@ describe('fetchCCReadExceptions', () => {
|
|||
bool: {
|
||||
filter: [
|
||||
{
|
||||
nested: {
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: { exists: { field: 'ccr_stats.read_exceptions.exception' } },
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'ccr_stats.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'elasticsearch.ccr.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'elasticsearch.ccr.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'ccr_stats' } },
|
||||
{ term: { 'metricset.name': 'ccr' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.ccr' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
@ -82,9 +106,13 @@ describe('fetchCCReadExceptions', () => {
|
|||
_source: {
|
||||
includes: [
|
||||
'cluster_uuid',
|
||||
'elasticsearch.cluster.id',
|
||||
'ccr_stats.read_exceptions',
|
||||
'elasticsearch.ccr.read_exceptions',
|
||||
'ccr_stats.shard_id',
|
||||
'elasticsearch.ccr.shard_id',
|
||||
'ccr_stats.leader_index',
|
||||
'elasticsearch.ccr.leader.index',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
|
|
@ -35,16 +35,35 @@ export async function fetchCCRReadExceptions(
|
|||
bool: {
|
||||
filter: [
|
||||
{
|
||||
nested: {
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'ccr_stats.read_exceptions.exception',
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'ccr_stats.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'ccr_stats.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
nested: {
|
||||
ignore_unmapped: true,
|
||||
path: 'elasticsearch.ccr.read_exceptions',
|
||||
query: {
|
||||
exists: {
|
||||
field: 'elasticsearch.ccr.read_exceptions.exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
},
|
||||
},
|
||||
createDatasetFilter('ccr_stats', 'elasticsearch.ccr'),
|
||||
createDatasetFilter('ccr_stats', 'ccr', 'elasticsearch.ccr'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -83,9 +102,13 @@ export async function fetchCCRReadExceptions(
|
|||
_source: {
|
||||
includes: [
|
||||
'cluster_uuid',
|
||||
'elasticsearch.cluster.id',
|
||||
'ccr_stats.read_exceptions',
|
||||
'elasticsearch.ccr.read_exceptions',
|
||||
'ccr_stats.shard_id',
|
||||
'elasticsearch.ccr.shard_id',
|
||||
'ccr_stats.leader_index',
|
||||
'elasticsearch.ccr.leader.index',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
@ -123,15 +146,19 @@ export async function fetchCCRReadExceptions(
|
|||
|
||||
for (const followerIndexBucket of followerIndicesBuckets) {
|
||||
const followerIndex = followerIndexBucket.key;
|
||||
const {
|
||||
_index: monitoringIndexName,
|
||||
_source: { ccr_stats: ccrStats, cluster_uuid: clusterUuid },
|
||||
} = get(followerIndexBucket, 'hits.hits.hits[0]');
|
||||
const {
|
||||
read_exceptions: readExceptions,
|
||||
leader_index: leaderIndex,
|
||||
shard_id: shardId,
|
||||
} = ccrStats;
|
||||
const clusterUuid =
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.cluster_uuid') ||
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]_source.elasticsearch.cluster.id');
|
||||
|
||||
const monitoringIndexName = get(followerIndexBucket, 'hits.hits.hits[0]._index');
|
||||
const ccrStats =
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.ccr_stats') ||
|
||||
get(followerIndexBucket, 'hits.hits.hits[0]._source.elasticsearch.ccr');
|
||||
|
||||
const { read_exceptions: readExceptions, shard_id: shardId } = ccrStats;
|
||||
|
||||
const leaderIndex = ccrStats.leaderIndex || ccrStats.leader.index;
|
||||
|
||||
const { exception: lastReadException } = readExceptions[readExceptions.length - 1];
|
||||
|
||||
stats.push({
|
||||
|
|
|
@ -65,7 +65,9 @@ describe('fetchClusterHealth', () => {
|
|||
'*:.monitoring-es-*,.monitoring-es-*,*:metrics-elasticsearch.cluster_stats-*,metrics-elasticsearch.cluster_stats-*',
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_state.status',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.status',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -79,6 +81,7 @@ describe('fetchClusterHealth', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'cluster_stats' } },
|
||||
{ term: { 'metricset.name': 'cluster_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -27,7 +27,9 @@ export async function fetchClusterHealth(
|
|||
index: indexPatterns,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_state.status',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.status',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -48,7 +50,7 @@ export async function fetchClusterHealth(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -77,8 +79,9 @@ export async function fetchClusterHealth(
|
|||
const response = await esClient.search<ElasticsearchSource>(params);
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
return {
|
||||
health: hit._source!.cluster_state?.status,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
health:
|
||||
hit._source!.cluster_state?.status || hit._source!.elasticsearch?.cluster?.stats?.status,
|
||||
clusterUuid: hit._source!.cluster_uuid || hit._source!.elasticsearch?.cluster?.id,
|
||||
ccs: hit._index.includes(':') ? hit._index.split(':')[0] : undefined,
|
||||
} as AlertClusterHealth;
|
||||
});
|
||||
|
|
|
@ -87,7 +87,9 @@ describe('fetchClusters', () => {
|
|||
filter_path: [
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._source.cluster_name',
|
||||
'hits.hits._source.elasticsearch.cluster.name',
|
||||
],
|
||||
body: {
|
||||
size: 1000,
|
||||
|
@ -98,6 +100,7 @@ describe('fetchClusters', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'cluster_stats' } },
|
||||
{ term: { 'metricset.name': 'cluster_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -35,14 +35,16 @@ export async function fetchClusters(
|
|||
filter_path: [
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._source.cluster_name',
|
||||
'hits.hits._source.elasticsearch.cluster.name',
|
||||
],
|
||||
body: {
|
||||
size: 1000,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: rangeFilter,
|
||||
},
|
||||
|
@ -56,59 +58,16 @@ export async function fetchClusters(
|
|||
};
|
||||
|
||||
const response = await esClient.search(params);
|
||||
return get(response, 'hits.hits', []).map((hit: any) => {
|
||||
const clusterName: string =
|
||||
get(hit, '_source.cluster_settings.cluster.metadata.display_name') ||
|
||||
get(hit, '_source.cluster_name') ||
|
||||
get(hit, '_source.cluster_uuid');
|
||||
return {
|
||||
clusterUuid: get(hit, '_source.cluster_uuid'),
|
||||
clusterName,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export async function fetchClustersLegacy(
|
||||
callCluster: any,
|
||||
index: string,
|
||||
rangeFilter: RangeFilter = { timestamp: { gte: 'now-2m' } }
|
||||
): Promise<AlertCluster[]> {
|
||||
const params = {
|
||||
index,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.cluster_name',
|
||||
],
|
||||
body: {
|
||||
size: 1000,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
type: 'cluster_stats',
|
||||
},
|
||||
},
|
||||
{
|
||||
range: rangeFilter,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
collapse: {
|
||||
field: 'cluster_uuid',
|
||||
},
|
||||
},
|
||||
};
|
||||
const response = await callCluster('search', params);
|
||||
return get(response, 'hits.hits', []).map((hit: any) => {
|
||||
const clusterName: string =
|
||||
get(hit, '_source.cluster_settings.cluster.metadata.display_name') ||
|
||||
get(hit, '_source.cluster_name') ||
|
||||
get(hit, '_source.cluster_uuid');
|
||||
get(hit, '_source.elasticsearch.cluster.name') ||
|
||||
get(hit, '_source.cluster_uuid') ||
|
||||
get(hit, '_source.elasticsearch.cluster.id');
|
||||
return {
|
||||
clusterUuid: get(hit, '_source.cluster_uuid'),
|
||||
clusterUuid: get(hit, '_source.cluster_uuid') || get(hit, '_source.elasticsearch.cluster.id'),
|
||||
clusterName,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -227,6 +227,7 @@ describe('fetchCpuUsageNodeStats', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -58,7 +58,7 @@ export async function fetchCpuUsageNodeStats(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -101,6 +101,7 @@ describe('fetchDiskUsageNodeStats', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -40,7 +40,7 @@ export async function fetchDiskUsageNodeStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -70,8 +70,10 @@ describe('fetchElasticsearchVersions', () => {
|
|||
'*:.monitoring-es-*,.monitoring-es-*,*:metrics-elasticsearch.cluster_stats-*,metrics-elasticsearch.cluster_stats-*',
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_stats.nodes.versions',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.nodes.versions',
|
||||
'hits.hits._index',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
],
|
||||
body: {
|
||||
size: 1,
|
||||
|
@ -84,6 +86,7 @@ describe('fetchElasticsearchVersions', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'cluster_stats' } },
|
||||
{ term: { 'metricset.name': 'cluster_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -28,8 +28,10 @@ export async function fetchElasticsearchVersions(
|
|||
index: indexPatterns,
|
||||
filter_path: [
|
||||
'hits.hits._source.cluster_stats.nodes.versions',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.nodes.versions',
|
||||
'hits.hits._index',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
],
|
||||
body: {
|
||||
size: clusters.length,
|
||||
|
@ -49,7 +51,7 @@ export async function fetchElasticsearchVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -77,10 +79,13 @@ export async function fetchElasticsearchVersions(
|
|||
|
||||
const response = await esClient.search<ElasticsearchSource>(params);
|
||||
return (response.hits?.hits ?? []).map((hit) => {
|
||||
const versions = hit._source!.cluster_stats?.nodes?.versions ?? [];
|
||||
const versions =
|
||||
hit._source!.cluster_stats?.nodes?.versions ??
|
||||
hit._source!.elasticsearch?.cluster?.stats?.nodes?.versions ??
|
||||
[];
|
||||
return {
|
||||
versions,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
clusterUuid: hit._source!.elasticsearch?.cluster?.id || hit._source!.cluster_uuid,
|
||||
ccs: hit._index.includes(':') ? hit._index.split(':')[0] : undefined,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -161,6 +161,7 @@ describe('fetchIndexShardSize', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'index_stats' } },
|
||||
{ term: { 'metricset.name': 'index' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.index' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
@ -185,6 +186,8 @@ describe('fetchIndexShardSize', () => {
|
|||
'_index',
|
||||
'index_stats.shards.primaries',
|
||||
'index_stats.primaries.store.size_in_bytes',
|
||||
'elasticsearch.index.shards.primaries',
|
||||
'elasticsearch.index.primaries.store.size_in_bytes',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
|
|
@ -50,7 +50,7 @@ export async function fetchIndexShardSize(
|
|||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
createDatasetFilter('index_stats', 'elasticsearch.index'),
|
||||
createDatasetFilter('index_stats', 'index', 'elasticsearch.index'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -90,6 +90,8 @@ export async function fetchIndexShardSize(
|
|||
'_index',
|
||||
'index_stats.shards.primaries',
|
||||
'index_stats.primaries.store.size_in_bytes',
|
||||
'elasticsearch.index.shards.primaries',
|
||||
'elasticsearch.index.primaries.store.size_in_bytes',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
|
@ -131,10 +133,8 @@ export async function fetchIndexShardSize(
|
|||
if (!topHit || !ESGlobPatterns.isValid(shardIndex, validIndexPatterns)) {
|
||||
continue;
|
||||
}
|
||||
const {
|
||||
_index: monitoringIndexName,
|
||||
_source: { index_stats: indexStats },
|
||||
} = topHit;
|
||||
const { _index: monitoringIndexName, _source } = topHit;
|
||||
const indexStats = _source.index_stats || _source.elasticsearch?.index;
|
||||
|
||||
if (!indexStats || !indexStats.primaries) {
|
||||
continue;
|
||||
|
|
|
@ -102,6 +102,7 @@ describe('fetchKibanaVersions', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'kibana_stats' } },
|
||||
{ term: { 'metricset.name': 'stats' } },
|
||||
{ term: { 'data_stream.dataset': 'kibana.stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -41,7 +41,7 @@ export async function fetchKibanaVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('kibana_stats', 'kibana.stats'),
|
||||
createDatasetFilter('kibana_stats', 'stats', 'kibana.stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -84,7 +84,9 @@ describe('fetchLicenses', () => {
|
|||
'*:.monitoring-es-*,.monitoring-es-*,*:metrics-elasticsearch.cluster_stats-*,metrics-elasticsearch.cluster_stats-*',
|
||||
filter_path: [
|
||||
'hits.hits._source.license.*',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.*',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -98,6 +100,7 @@ describe('fetchLicenses', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'cluster_stats' } },
|
||||
{ term: { 'metricset.name': 'cluster_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -27,7 +27,9 @@ export async function fetchLicenses(
|
|||
index: indexPatterns,
|
||||
filter_path: [
|
||||
'hits.hits._source.license.*',
|
||||
'hits.hits._source.elasticsearch.cluster.stats.license.*',
|
||||
'hits.hits._source.cluster_uuid',
|
||||
'hits.hits._source.elasticsearch.cluster.id',
|
||||
'hits.hits._index',
|
||||
],
|
||||
body: {
|
||||
|
@ -48,7 +50,7 @@ export async function fetchLicenses(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -77,12 +79,13 @@ export async function fetchLicenses(
|
|||
const response = await esClient.search<ElasticsearchSource>(params);
|
||||
return (
|
||||
response?.hits?.hits.map((hit) => {
|
||||
const rawLicense = hit._source!.license ?? {};
|
||||
const rawLicense =
|
||||
hit._source!.license ?? hit._source?.elasticsearch?.cluster?.stats?.license ?? {};
|
||||
const license: AlertLicense = {
|
||||
status: rawLicense.status ?? '',
|
||||
type: rawLicense.type ?? '',
|
||||
expiryDateMS: rawLicense.expiry_date_in_millis ?? 0,
|
||||
clusterUuid: hit._source!.cluster_uuid,
|
||||
clusterUuid: hit._source?.elasticsearch?.cluster?.id || hit._source!.cluster_uuid,
|
||||
ccs: hit._index,
|
||||
};
|
||||
return license;
|
||||
|
|
|
@ -107,6 +107,7 @@ describe('fetchLogstashVersions', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'logstash_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'logstash.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -41,7 +41,7 @@ export async function fetchLogstashVersions(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('logstash_stats', 'logstash.node_stats'),
|
||||
createDatasetFilter('logstash_stats', 'node_stats', 'logstash.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -130,6 +130,7 @@ describe('fetchMemoryUsageNodeStats', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
|
|
@ -41,7 +41,7 @@ export async function fetchMemoryUsageNodeStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
|
|
@ -189,6 +189,7 @@ describe('fetchMissingMonitoringData', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'node_stats' } },
|
||||
{ term: { 'metricset.name': 'node_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
@ -210,7 +211,9 @@ describe('fetchMissingMonitoringData', () => {
|
|||
top_hits: {
|
||||
size: 1,
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: { includes: ['_index', 'source_node.name'] },
|
||||
_source: {
|
||||
includes: ['source_node.name', 'elasticsearch.node.name'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -221,7 +224,7 @@ describe('fetchMissingMonitoringData', () => {
|
|||
},
|
||||
});
|
||||
});
|
||||
it('should call ES with correct query when ccs disabled', async () => {
|
||||
it('should call ES with correct query when ccs disabled', async () => {
|
||||
const now = 10;
|
||||
const clusters = [
|
||||
{
|
||||
|
|
|
@ -74,7 +74,7 @@ export async function fetchMissingMonitoringData(
|
|||
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -117,7 +117,7 @@ export async function fetchMissingMonitoringData(
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: ['_index', 'source_node.name'],
|
||||
includes: ['source_node.name', 'elasticsearch.node.name'],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -153,7 +153,10 @@ export async function fetchMissingMonitoringData(
|
|||
const nodeId = uuidBucket.key;
|
||||
const indexName = get(uuidBucket, `document.hits.hits[0]._index`);
|
||||
const differenceInMs = nowInMs - uuidBucket.most_recent.value;
|
||||
const nodeName = get(uuidBucket, `document.hits.hits[0]._source.source_node.name`, nodeId);
|
||||
const nodeName =
|
||||
get(uuidBucket, `document.hits.hits[0]._source.source_node.name`) ||
|
||||
get(uuidBucket, `document.hits.hits[0]._source.elasticsearch.node.name`) ||
|
||||
nodeId;
|
||||
|
||||
uniqueList[`${clusterUuid}${nodeId}`] = {
|
||||
nodeId,
|
||||
|
|
|
@ -172,6 +172,7 @@ describe('fetchNodesFromClusterStats', () => {
|
|||
bool: {
|
||||
should: [
|
||||
{ term: { type: 'cluster_stats' } },
|
||||
{ term: { 'metricset.name': 'cluster_stats' } },
|
||||
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
|
||||
],
|
||||
minimum_should_match: 1,
|
||||
|
@ -188,7 +189,9 @@ describe('fetchNodesFromClusterStats', () => {
|
|||
top: {
|
||||
top_hits: {
|
||||
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
|
||||
_source: { includes: ['cluster_state.nodes_hash', 'cluster_state.nodes'] },
|
||||
_source: {
|
||||
includes: ['cluster_state.nodes', 'elasticsearch.cluster.stats.nodes'],
|
||||
},
|
||||
size: 2,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -54,7 +54,7 @@ export async function fetchNodesFromClusterStats(
|
|||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
createDatasetFilter('cluster_stats', 'cluster_stats', 'elasticsearch.cluster_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -83,7 +83,7 @@ export async function fetchNodesFromClusterStats(
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: ['cluster_state.nodes_hash', 'cluster_state.nodes'],
|
||||
includes: ['cluster_state.nodes', 'elasticsearch.cluster.stats.nodes'],
|
||||
},
|
||||
size: 2,
|
||||
},
|
||||
|
@ -116,8 +116,12 @@ export async function fetchNodesFromClusterStats(
|
|||
const indexName = hits[0]._index;
|
||||
nodes.push({
|
||||
clusterUuid,
|
||||
recentNodes: formatNode(hits[0]._source.cluster_state?.nodes),
|
||||
priorNodes: formatNode(hits[1]._source.cluster_state?.nodes),
|
||||
recentNodes: formatNode(
|
||||
hits[0]._source.cluster_state?.nodes || hits[0]._source.elasticsearch.cluster.stats.nodes
|
||||
),
|
||||
priorNodes: formatNode(
|
||||
hits[1]._source.cluster_state?.nodes || hits[1]._source.elasticsearch.cluster.stats.nodes
|
||||
),
|
||||
ccs: indexName.includes(':') ? indexName.split(':')[0] : undefined,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -28,7 +28,12 @@ const getTopHits = (threadType: string, order: 'asc' | 'desc') => ({
|
|||
},
|
||||
],
|
||||
_source: {
|
||||
includes: [`node_stats.thread_pool.${threadType}.rejected`, 'source_node.name'],
|
||||
includes: [
|
||||
`node_stats.thread_pool.${threadType}.rejected`,
|
||||
`elasticsearch.node.stats.thread_pool.${threadType}.rejected.count`,
|
||||
'source_node.name',
|
||||
'elasticsearch.node.name',
|
||||
],
|
||||
},
|
||||
size: 1,
|
||||
},
|
||||
|
@ -62,7 +67,7 @@ export async function fetchThreadPoolRejectionStats(
|
|||
cluster_uuid: clustersIds,
|
||||
},
|
||||
},
|
||||
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
|
||||
createDatasetFilter('node_stats', 'node_stats', 'elasticsearch.node_stats'),
|
||||
{
|
||||
range: {
|
||||
timestamp: {
|
||||
|
@ -131,8 +136,11 @@ export async function fetchThreadPoolRejectionStats(
|
|||
}
|
||||
|
||||
const rejectedPath = `_source.node_stats.thread_pool.${threadType}.rejected`;
|
||||
const newRejectionCount = Number(get(mostRecentDoc, rejectedPath));
|
||||
const oldRejectionCount = Number(get(leastRecentDoc, rejectedPath));
|
||||
const rejectedPathEcs = `_source.elasticsearch.node.stats.thread_pool.${threadType}.rejected.count`;
|
||||
const newRejectionCount =
|
||||
Number(get(mostRecentDoc, rejectedPath)) || Number(get(mostRecentDoc, rejectedPathEcs));
|
||||
const oldRejectionCount =
|
||||
Number(get(leastRecentDoc, rejectedPath)) || Number(get(leastRecentDoc, rejectedPathEcs));
|
||||
|
||||
if (invalidNumberValue(newRejectionCount) || invalidNumberValue(oldRejectionCount)) {
|
||||
continue;
|
||||
|
@ -143,7 +151,10 @@ export async function fetchThreadPoolRejectionStats(
|
|||
? newRejectionCount
|
||||
: newRejectionCount - oldRejectionCount;
|
||||
const indexName = mostRecentDoc._index;
|
||||
const nodeName = get(mostRecentDoc, '_source.source_node.name') || node.key;
|
||||
const nodeName =
|
||||
get(mostRecentDoc, '_source.source_node.name') ||
|
||||
get(mostRecentDoc, '_source.elasticsearch.node.name') ||
|
||||
node.key;
|
||||
const nodeStat = {
|
||||
rejectionCount,
|
||||
type: threadType,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue