[Stack Monitoring] compatibility for agent data streams (#119112)

* update queries for elasticsearch package

* fix unit test

* add gitCcs helper function

* modify rest of es queries

* update logstash and kibana queries to use new createQuery

* change beats and apm to use new createQuery

* update changeQuery and remove old one

* make getIndexPattern take request to check for ccs

* fix unit test

* fix unit tests

* update queries and createQuery

* don't add metric constant without dataset in query

* fix types

* fix type

* comment out mb tests

* fix unit test

* fix unit test

* fix

* fix function param

* change to getMetrics name

* change to node_stats

* comment out metricbeat tests

* fix types

* improve types and readability for test

* remove passing of data stream type for now

* add tests for createQuery changes

* update getNewIndexPatterns to take one dataset

* add unit test for getNewIndexPatterns

* fix types

* remove metrics from filter, update tests

* update createNewIndexPatterns to accept new config instead of legacy

* update alert queries to include datas stream index patterns

* update comment

* fix defaulting ccs to * for non cluster requests

* update elasticsearch enterprise module

* update unit test

* remove data_stream.type from queries

* change entsearch to metricbeat module name enterprisesearch

* undo ccs cluster stats change

* fix import

* update alert queries

* fix unit test

* update unit test

* change shard size query to use filter

* change must to filter fix

* update findSupportedBasicLicenseCluster index pattern

* add ccs param to cluster request functions

* update queries for ccs in get_clusters_from_request

* update getBeatsForClusters query

* update clusters apm query

* update enterprisesearch query

* move index pattern to query in fetch for alerts, fix ccs

* remove metricbeat config from alert tests

* fix ts

* add metricset.name back to queries

* comment tests back in

* remove enterprise search checking for standalone cluster to fix test

* update es index metricset name from index_stats to index for mb data

* fix type

* fetchClusters creates index pattern

* fix type

* remove monitoring.ui.metricbeat.index from config and usage in getCollectionStatus

* fix type

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Sandra G 2022-01-20 17:13:23 -05:00 committed by GitHub
parent 356861d23b
commit eb17b10203
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
151 changed files with 1678 additions and 1160 deletions

View file

@ -13,32 +13,17 @@ type Config = Partial<MonitoringConfig> & {
get?: (key: string) => any;
};
export function appendMetricbeatIndex(
config: Config,
indexPattern: string,
ccs?: string,
bypass: boolean = false
) {
if (bypass) {
return indexPattern;
}
// Leverage this function to also append the dynamic metricbeat index too
let mbIndex = null;
export function getConfigCcs(config: Config): boolean {
let ccsEnabled = false;
// TODO: NP
// This function is called with both NP config and LP config
if (isFunction(config.get)) {
mbIndex = config.get('monitoring.ui.metricbeat.index');
ccsEnabled = config.get('monitoring.ui.ccs.enabled');
} else {
mbIndex = get(config, 'ui.metricbeat.index');
ccsEnabled = get(config, 'ui.ccs.enabled');
}
if (ccs) {
mbIndex = `${mbIndex},${ccs}:${mbIndex}`;
}
return `${indexPattern},${mbIndex}`;
return ccsEnabled;
}
/**
* Prefix all comma separated index patterns within the original {@code indexPattern}.
*
@ -50,28 +35,10 @@ export function appendMetricbeatIndex(
* @param {String} ccs The optional cluster-prefix to prepend.
* @return {String} The index pattern with the {@code cluster} prefix appropriately prepended.
*/
export function prefixIndexPattern(
config: Config,
indexPattern: string,
ccs?: string,
monitoringIndicesOnly: boolean = false
) {
let ccsEnabled = false;
// TODO: NP
// This function is called with both NP config and LP config
if (isFunction(config.get)) {
ccsEnabled = config.get('monitoring.ui.ccs.enabled');
} else {
ccsEnabled = get(config, 'ui.ccs.enabled');
}
export function prefixIndexPattern(config: Config, indexPattern: string, ccs?: string) {
const ccsEnabled = getConfigCcs(config);
if (!ccsEnabled || !ccs) {
return appendMetricbeatIndex(
config,
indexPattern,
ccsEnabled ? ccs : undefined,
monitoringIndicesOnly
);
return indexPattern;
}
const patterns = indexPattern.split(',');
@ -79,15 +46,9 @@ export function prefixIndexPattern(
// if a wildcard is used, then we also want to search the local indices
if (ccs === '*') {
return appendMetricbeatIndex(
config,
`${prefixedPattern},${indexPattern}`,
ccs,
monitoringIndicesOnly
);
return `${prefixedPattern},${indexPattern}`;
}
return appendMetricbeatIndex(config, prefixedPattern, ccs, monitoringIndicesOnly);
return prefixedPattern;
}
/**

View file

@ -132,6 +132,9 @@ export const INDEX_PATTERN_ELASTICSEARCH = '.monitoring-es-*';
// ECS-compliant patterns (metricbeat >8 and agent)
export const INDEX_PATTERN_ELASTICSEARCH_ECS = '.monitoring-es-8-*';
export const INDEX_PATTERN_ENTERPRISE_SEARCH = '.monitoring-ent-search-*';
export const DS_INDEX_PATTERN_METRICS = 'metrics';
export const DS_INDEX_PATTERN_LOGS = 'logs';
export const DS_INDEX_PATTERN_ES = 'elasticsearch';
// This is the unique token that exists in monitoring indices collected by metricbeat
export const METRICBEAT_INDEX_NAME_UNIQUE_TOKEN = '-mb-';
@ -586,3 +589,12 @@ export const ALERT_EMAIL_SERVICES = ['gmail', 'hotmail', 'icloud', 'outlook365',
export const SAVED_OBJECT_TELEMETRY = 'monitoring-telemetry';
export const TELEMETRY_METRIC_BUTTON_CLICK = 'btnclick__';
export type INDEX_PATTERN_TYPES =
| 'elasticsearch'
| 'kibana'
| 'logstash'
| 'beats'
| 'enterprisesearch';
export type DS_INDEX_PATTERN_TYPES = typeof DS_INDEX_PATTERN_METRICS | typeof DS_INDEX_PATTERN_LOGS;

View file

@ -28,10 +28,7 @@ import {
CommonAlertParams,
} from '../../common/types/alerts';
import { fetchClusters } from '../lib/alerts/fetch_clusters';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
import { AlertSeverity } from '../../common/enums';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { parseDuration } from '../../../alerting/common';
import { Globals } from '../static_globals';
@ -226,23 +223,14 @@ export class BaseRule {
);
const esClient = services.scopedClusterClient.asCurrentUser;
const availableCcs = Globals.app.config.ui.ccs.enabled;
const clusters = await this.fetchClusters(esClient, params as CommonAlertParams, availableCcs);
const data = await this.fetchData(params, esClient, clusters, availableCcs);
const clusters = await this.fetchClusters(esClient, params as CommonAlertParams);
const data = await this.fetchData(params, esClient, clusters);
return await this.processData(data, clusters, services, state);
}
protected async fetchClusters(
esClient: ElasticsearchClient,
params: CommonAlertParams,
ccs?: boolean
) {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (ccs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, ccs);
}
protected async fetchClusters(esClient: ElasticsearchClient, params: CommonAlertParams) {
if (!params.limit) {
return await fetchClusters(esClient, esIndexPattern);
return await fetchClusters(esClient);
}
const limit = parseDuration(params.limit);
const rangeFilter = this.ruleOptions.fetchClustersRange
@ -253,14 +241,13 @@ export class BaseRule {
},
}
: undefined;
return await fetchClusters(esClient, esIndexPattern, rangeFilter);
return await fetchClusters(esClient, rangeFilter);
}
protected async fetchData(
params: CommonAlertParams | unknown,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<Array<AlertData & unknown>> {
throw new Error('Child classes must implement `fetchData`');
}

View file

@ -39,7 +39,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -22,18 +22,12 @@ import {
CCRReadExceptionsStats,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
INDEX_PATTERN_ELASTICSEARCH,
RULE_CCR_READ_EXCEPTIONS,
RULE_DETAILS,
} from '../../common/constants';
import { RULE_CCR_READ_EXCEPTIONS, RULE_DETAILS } from '../../common/constants';
import { fetchCCRReadExceptions } from '../lib/alerts/fetch_ccr_read_exceptions';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { parseDuration } from '../../../alerting/common/parse_duration';
import { SanitizedAlert, RawAlertInstance } from '../../../alerting/common';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { Globals } from '../static_globals';
export class CCRReadExceptionsRule extends BaseRule {
@ -72,20 +66,14 @@ export class CCRReadExceptionsRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const { duration: durationString } = params;
const duration = parseDuration(durationString);
const endMs = +new Date();
const startMs = endMs - duration;
const stats = await fetchCCRReadExceptions(
esClient,
esIndexPattern,
startMs,
endMs,
Globals.app.config.ui.max_bucket_size,

View file

@ -21,7 +21,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
},
},
},

View file

@ -19,17 +19,10 @@ import {
AlertInstanceState,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
RULE_CLUSTER_HEALTH,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_ELASTICSEARCH,
} from '../../common/constants';
import { RULE_CLUSTER_HEALTH, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertMessageTokenType, AlertClusterHealthType, AlertSeverity } from '../../common/enums';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchClusterHealth } from '../lib/alerts/fetch_cluster_health';
const RED_STATUS_MESSAGE = i18n.translate('xpack.monitoring.alerts.clusterHealth.redMessage', {
@ -66,19 +59,9 @@ export class ClusterHealthRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const healths = await fetchClusterHealth(
esClient,
clusters,
esIndexPattern,
params.filterQuery
);
const healths = await fetchClusterHealth(esClient, clusters, params.filterQuery);
return healths.map((clusterHealth) => {
const shouldFire = clusterHealth.health !== AlertClusterHealthType.Green;
const severity =

View file

@ -27,7 +27,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -23,16 +23,14 @@ import {
CommonAlertFilter,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import { INDEX_PATTERN_ELASTICSEARCH, RULE_CPU_USAGE, RULE_DETAILS } from '../../common/constants';
import { RULE_CPU_USAGE, RULE_DETAILS } from '../../common/constants';
// @ts-ignore
import { ROUNDED_FLOAT } from '../../common/formatting';
import { fetchCpuUsageNodeStats } from '../lib/alerts/fetch_cpu_usage_node_stats';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { RawAlertInstance, SanitizedAlert } from '../../../alerting/common';
import { parseDuration } from '../../../alerting/common/parse_duration';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { Globals } from '../static_globals';
export class CpuUsageRule extends BaseRule {
@ -60,20 +58,14 @@ export class CpuUsageRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const duration = parseDuration(params.duration);
const endMs = +new Date();
const startMs = endMs - duration;
const stats = await fetchCpuUsageNodeStats(
esClient,
clusters,
esIndexPattern,
startMs,
endMs,
Globals.app.config.ui.max_bucket_size,

View file

@ -40,7 +40,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -23,15 +23,13 @@ import {
CommonAlertFilter,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import { INDEX_PATTERN_ELASTICSEARCH, RULE_DISK_USAGE, RULE_DETAILS } from '../../common/constants';
import { RULE_DISK_USAGE, RULE_DETAILS } from '../../common/constants';
// @ts-ignore
import { ROUNDED_FLOAT } from '../../common/formatting';
import { fetchDiskUsageNodeStats } from '../lib/alerts/fetch_disk_usage_node_stats';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { RawAlertInstance, SanitizedAlert } from '../../../alerting/common';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { Globals } from '../static_globals';
export class DiskUsageRule extends BaseRule {
@ -59,18 +57,12 @@ export class DiskUsageRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const { duration, threshold } = params;
const stats = await fetchDiskUsageNodeStats(
esClient,
clusters,
esIndexPattern,
duration as string,
Globals.app.config.ui.max_bucket_size,
params.filterQuery

View file

@ -28,7 +28,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -18,17 +18,11 @@ import {
AlertVersions,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
RULE_ELASTICSEARCH_VERSION_MISMATCH,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_ELASTICSEARCH,
} from '../../common/constants';
import { RULE_ELASTICSEARCH_VERSION_MISMATCH, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertSeverity } from '../../common/enums';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchElasticsearchVersions } from '../lib/alerts/fetch_elasticsearch_versions';
export class ElasticsearchVersionMismatchRule extends BaseRule {
@ -55,17 +49,11 @@ export class ElasticsearchVersionMismatchRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const elasticsearchVersions = await fetchElasticsearchVersions(
esClient,
clusters,
esIndexPattern,
Globals.app.config.ui.max_bucket_size,
params.filterQuery
);

View file

@ -28,7 +28,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -18,17 +18,11 @@ import {
AlertVersions,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
RULE_KIBANA_VERSION_MISMATCH,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_KIBANA,
} from '../../common/constants';
import { RULE_KIBANA_VERSION_MISMATCH, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertSeverity } from '../../common/enums';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchKibanaVersions } from '../lib/alerts/fetch_kibana_versions';
export class KibanaVersionMismatchRule extends BaseRule {
@ -68,17 +62,11 @@ export class KibanaVersionMismatchRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let kibanaIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_KIBANA);
if (availableCcs) {
kibanaIndexPattern = getCcsIndexPattern(kibanaIndexPattern, availableCcs);
}
const kibanaVersions = await fetchKibanaVersions(
esClient,
clusters,
kibanaIndexPattern,
Globals.app.config.ui.max_bucket_size,
params.filterQuery
);

View file

@ -40,7 +40,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -22,17 +22,11 @@ import {
IndexShardSizeStats,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
INDEX_PATTERN_ELASTICSEARCH,
RULE_LARGE_SHARD_SIZE,
RULE_DETAILS,
} from '../../common/constants';
import { RULE_LARGE_SHARD_SIZE, RULE_DETAILS } from '../../common/constants';
import { fetchIndexShardSize } from '../lib/alerts/fetch_index_shard_size';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { SanitizedAlert, RawAlertInstance } from '../../../alerting/common';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { Globals } from '../static_globals';
export class LargeShardSizeRule extends BaseRule {
@ -60,19 +54,13 @@ export class LargeShardSizeRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams & { indexPattern: string },
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const { threshold, indexPattern: shardIndexPatterns } = params;
const stats = await fetchIndexShardSize(
esClient,
clusters,
esIndexPattern,
threshold!,
shardIndexPatterns,
Globals.app.config.ui.max_bucket_size,

View file

@ -34,7 +34,6 @@ jest.mock('../static_globals', () => ({
ui: {
show_license_expiration: true,
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -21,17 +21,11 @@ import {
AlertLicenseState,
} from '../../common/types/alerts';
import { AlertExecutorOptions, AlertInstance } from '../../../alerting/server';
import {
RULE_LICENSE_EXPIRATION,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_ELASTICSEARCH,
} from '../../common/constants';
import { RULE_LICENSE_EXPIRATION, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchLicenses } from '../lib/alerts/fetch_licenses';
const EXPIRES_DAYS = [60, 30, 14, 7];
@ -80,14 +74,9 @@ export class LicenseExpirationRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const licenses = await fetchLicenses(esClient, clusters, esIndexPattern, params.filterQuery);
const licenses = await fetchLicenses(esClient, clusters, params.filterQuery);
return licenses.map((license) => {
const { clusterUuid, type, expiryDateMS, status, ccs } = license;

View file

@ -29,7 +29,6 @@ jest.mock('../static_globals', () => ({
ui: {
show_license_expiration: true,
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -18,17 +18,11 @@ import {
AlertVersions,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
RULE_LOGSTASH_VERSION_MISMATCH,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_LOGSTASH,
} from '../../common/constants';
import { RULE_LOGSTASH_VERSION_MISMATCH, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertSeverity } from '../../common/enums';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchLogstashVersions } from '../lib/alerts/fetch_logstash_versions';
export class LogstashVersionMismatchRule extends BaseRule {
@ -55,17 +49,11 @@ export class LogstashVersionMismatchRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let logstashIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_LOGSTASH);
if (availableCcs) {
logstashIndexPattern = getCcsIndexPattern(logstashIndexPattern, availableCcs);
}
const logstashVersions = await fetchLogstashVersions(
esClient,
clusters,
logstashIndexPattern,
Globals.app.config.ui.max_bucket_size,
params.filterQuery
);

View file

@ -27,7 +27,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -23,19 +23,13 @@ import {
CommonAlertFilter,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
INDEX_PATTERN_ELASTICSEARCH,
RULE_MEMORY_USAGE,
RULE_DETAILS,
} from '../../common/constants';
import { RULE_MEMORY_USAGE, RULE_DETAILS } from '../../common/constants';
// @ts-ignore
import { ROUNDED_FLOAT } from '../../common/formatting';
import { fetchMemoryUsageNodeStats } from '../lib/alerts/fetch_memory_usage_node_stats';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { RawAlertInstance, SanitizedAlert } from '../../../alerting/common';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { parseDuration } from '../../../alerting/common/parse_duration';
import { Globals } from '../static_globals';
@ -64,13 +58,8 @@ export class MemoryUsageRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const { duration, threshold } = params;
const parsedDuration = parseDuration(duration as string);
const endMs = +new Date();
@ -79,7 +68,6 @@ export class MemoryUsageRule extends BaseRule {
const stats = await fetchMemoryUsageNodeStats(
esClient,
clusters,
esIndexPattern,
startMs,
endMs,
Globals.app.config.ui.max_bucket_size,

View file

@ -29,7 +29,6 @@ jest.mock('../static_globals', () => ({
ui: {
show_license_expiration: true,
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -20,12 +20,10 @@ import {
AlertNodeState,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import { INDEX_PATTERN, RULE_MISSING_MONITORING_DATA, RULE_DETAILS } from '../../common/constants';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { RULE_MISSING_MONITORING_DATA, RULE_DETAILS } from '../../common/constants';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { RawAlertInstance, SanitizedAlert } from '../../../alerting/common';
import { parseDuration } from '../../../alerting/common/parse_duration';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { fetchMissingMonitoringData } from '../lib/alerts/fetch_missing_monitoring_data';
import { AlertingDefaults, createLink } from './alert_helpers';
import { Globals } from '../static_globals';
@ -59,20 +57,14 @@ export class MissingMonitoringDataRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let indexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN);
if (availableCcs) {
indexPattern = getCcsIndexPattern(indexPattern, availableCcs);
}
const duration = parseDuration(params.duration);
const limit = parseDuration(params.limit!);
const now = +new Date();
const missingData = await fetchMissingMonitoringData(
esClient,
clusters,
indexPattern,
Globals.app.config.ui.max_bucket_size,
now,
now - limit - LIMIT_BUFFER,

View file

@ -34,7 +34,6 @@ jest.mock('../static_globals', () => ({
config: {
ui: {
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -20,19 +20,11 @@ import {
AlertNodesChangedState,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import {
RULE_NODES_CHANGED,
LEGACY_RULE_DETAILS,
INDEX_PATTERN_ELASTICSEARCH,
} from '../../common/constants';
import { RULE_NODES_CHANGED, LEGACY_RULE_DETAILS } from '../../common/constants';
import { AlertingDefaults } from './alert_helpers';
import { SanitizedAlert } from '../../../alerting/common';
import { Globals } from '../static_globals';
import { fetchNodesFromClusterStats } from '../lib/alerts/fetch_nodes_from_cluster_stats';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { AlertSeverity } from '../../common/enums';
interface AlertNodesChangedStates {
removed: AlertClusterStatsNode[];
added: AlertClusterStatsNode[];
@ -104,17 +96,11 @@ export class NodesChangedRule extends BaseRule {
protected async fetchData(
params: CommonAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const nodesFromClusterStats = await fetchNodesFromClusterStats(
esClient,
clusters,
esIndexPattern,
params.filterQuery
);
return nodesFromClusterStats.map((nodes) => {

View file

@ -21,13 +21,10 @@ import {
AlertThreadPoolRejectionsStats,
} from '../../common/types/alerts';
import { AlertInstance } from '../../../alerting/server';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
import { fetchThreadPoolRejectionStats } from '../lib/alerts/fetch_thread_pool_rejections_stats';
import { getCcsIndexPattern } from '../lib/alerts/get_ccs_index_pattern';
import { AlertMessageTokenType, AlertSeverity } from '../../common/enums';
import { Alert, RawAlertInstance } from '../../../alerting/common';
import { AlertingDefaults, createLink } from './alert_helpers';
import { appendMetricbeatIndex } from '../lib/alerts/append_mb_index';
import { Globals } from '../static_globals';
type ActionVariables = Array<{ name: string; description: string }>;
@ -70,20 +67,13 @@ export class ThreadPoolRejectionsRuleBase extends BaseRule {
protected async fetchData(
params: ThreadPoolRejectionsAlertParams,
esClient: ElasticsearchClient,
clusters: AlertCluster[],
availableCcs: boolean
clusters: AlertCluster[]
): Promise<AlertData[]> {
let esIndexPattern = appendMetricbeatIndex(Globals.app.config, INDEX_PATTERN_ELASTICSEARCH);
if (availableCcs) {
esIndexPattern = getCcsIndexPattern(esIndexPattern, availableCcs);
}
const { threshold, duration } = params;
const stats = await fetchThreadPoolRejectionStats(
esClient,
clusters,
esIndexPattern,
Globals.app.config.ui.max_bucket_size,
this.threadPoolType,
duration,

View file

@ -29,7 +29,6 @@ jest.mock('../static_globals', () => ({
ui: {
show_license_expiration: true,
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -29,7 +29,6 @@ jest.mock('../static_globals', () => ({
ui: {
show_license_expiration: true,
ccs: { enabled: true },
metricbeat: { index: 'metricbeat-*' },
container: { elasticsearch: { enabled: false } },
},
},

View file

@ -102,9 +102,6 @@ describe('config schema', () => {
"index": "filebeat-*",
},
"max_bucket_size": 10000,
"metricbeat": Object {
"index": "metricbeat-*",
},
"min_interval_seconds": 10,
"show_license_expiration": true,
},

View file

@ -32,9 +32,6 @@ export const configSchema = schema.object({
logs: schema.object({
index: schema.string({ defaultValue: 'filebeat-*' }),
}),
metricbeat: schema.object({
index: schema.string({ defaultValue: 'metricbeat-*' }),
}),
max_bucket_size: schema.number({ defaultValue: 10000 }),
elasticsearch: monitoringElasticsearchConfigSchema,
container: schema.object({

View file

@ -11,8 +11,6 @@ import { MonitoringConfig } from '../../config';
import { getStackProductsUsage } from './lib/get_stack_products_usage';
import { fetchLicenseType } from './lib/fetch_license_type';
import { MonitoringUsage, StackProductUsage, MonitoringClusterStackProductUsage } from './types';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../../common/constants';
import { getCcsIndexPattern } from '../../lib/alerts/get_ccs_index_pattern';
import { fetchClusters } from '../../lib/alerts/fetch_clusters';
export function getMonitoringUsageCollector(
@ -106,8 +104,7 @@ export function getMonitoringUsageCollector(
: getClient().asInternalUser;
const usageClusters: MonitoringClusterStackProductUsage[] = [];
const availableCcs = config.ui.ccs.enabled;
const elasticsearchIndex = getCcsIndexPattern(INDEX_PATTERN_ELASTICSEARCH, availableCcs);
const clusters = await fetchClusters(callCluster, elasticsearchIndex);
const clusters = await fetchClusters(callCluster);
for (const cluster of clusters) {
const license = await fetchLicenseType(callCluster, availableCcs, cluster.clusterUuid);
const stackProducts = await getStackProductsUsage(

View file

@ -1,12 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MonitoringConfig } from '../../config';
export function appendMetricbeatIndex(config: MonitoringConfig, indexPattern: string) {
return `${indexPattern},${config.ui.metricbeat.index}`;
}

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export const createDatasetFilter = (legacyType: string, dataset: string) => ({
bool: {
should: [
{
term: {
type: legacyType,
},
},
{
term: {
'data_stream.dataset': dataset,
},
},
],
minimum_should_match: 1,
},
});

View file

@ -8,17 +8,26 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { CCRReadExceptionsStats } from '../../../common/types/alerts';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
export async function fetchCCRReadExceptions(
esClient: ElasticsearchClient,
index: string,
startMs: number,
endMs: number,
size: number,
filterQuery?: string
): Promise<CCRReadExceptionsStats[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'ccr',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations.remote_clusters.buckets'],
body: {
size: 0,
@ -35,11 +44,7 @@ export async function fetchCCRReadExceptions(
},
},
},
{
term: {
type: 'ccr_stats',
},
},
createDatasetFilter('ccr_stats', 'elasticsearch.ccr'),
{
range: {
timestamp: {

View file

@ -10,6 +10,18 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { elasticsearchClientMock } from '../../../../../../src/core/server/elasticsearch/client/mocks';
import { fetchClusterHealth } from './fetch_cluster_health';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchClusterHealth', () => {
it('should return the cluster health', async () => {
const status = 'green';
@ -34,9 +46,8 @@ describe('fetchClusterHealth', () => {
);
const clusters = [{ clusterUuid, clusterName: 'foo' }];
const index = '.monitoring-es-*';
const health = await fetchClusterHealth(esClient, clusters, index);
const health = await fetchClusterHealth(esClient, clusters);
expect(health).toEqual([
{
health: status,

View file

@ -7,15 +7,24 @@
import { ElasticsearchClient } from 'kibana/server';
import { AlertCluster, AlertClusterHealth } from '../../../common/types/alerts';
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
export async function fetchClusterHealth(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
filterQuery?: string
): Promise<AlertClusterHealth[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'cluster_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: [
'hits.hits._source.cluster_state.status',
'hits.hits._source.cluster_uuid',
@ -39,11 +48,7 @@ export async function fetchClusterHealth(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'cluster_stats',
},
},
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
{
range: {
timestamp: {

View file

@ -11,6 +11,18 @@ import { elasticsearchClientMock } from 'src/core/server/elasticsearch/client/mo
import { elasticsearchServiceMock } from 'src/core/server/mocks';
import { fetchClusters } from './fetch_clusters';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchClusters', () => {
const clusterUuid = '1sdfds734';
const clusterName = 'monitoring';
@ -31,8 +43,7 @@ describe('fetchClusters', () => {
},
} as estypes.SearchResponse)
);
const index = '.monitoring-es-*';
const result = await fetchClusters(esClient, index);
const result = await fetchClusters(esClient);
expect(result).toEqual([{ clusterUuid, clusterName }]);
});
@ -60,15 +71,13 @@ describe('fetchClusters', () => {
},
} as estypes.SearchResponse)
);
const index = '.monitoring-es-*';
const result = await fetchClusters(esClient, index);
const result = await fetchClusters(esClient);
expect(result).toEqual([{ clusterUuid, clusterName: metadataName }]);
});
it('should limit the time period in the query', async () => {
const esClient = elasticsearchServiceMock.createScopedClusterClient().asCurrentUser;
const index = '.monitoring-es-*';
await fetchClusters(esClient, index);
await fetchClusters(esClient);
const params = esClient.search.mock.calls[0][0] as any;
expect(params?.body?.query.bool.filter[1].range.timestamp.gte).toBe('now-2m');
});

View file

@ -8,6 +8,10 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster } from '../../../common/types/alerts';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
interface RangeFilter {
[field: string]: {
@ -18,11 +22,15 @@ interface RangeFilter {
export async function fetchClusters(
esClient: ElasticsearchClient,
index: string,
rangeFilter: RangeFilter = { timestamp: { gte: 'now-2m' } }
): Promise<AlertCluster[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: [
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
'hits.hits._source.cluster_uuid',
@ -33,11 +41,7 @@ export async function fetchClusters(
query: {
bool: {
filter: [
{
term: {
type: 'cluster_stats',
},
},
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
{
range: rangeFilter,
},

View file

@ -10,6 +10,18 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { elasticsearchClientMock } from '../../../../../../src/core/server/elasticsearch/client/mocks';
import { fetchCpuUsageNodeStats } from './fetch_cpu_usage_node_stats';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchCpuUsageNodeStats', () => {
const esClient = elasticsearchClientMock.createScopedClusterClient().asCurrentUser;
const clusters = [
@ -18,7 +30,6 @@ describe('fetchCpuUsageNodeStats', () => {
clusterName: 'test',
},
];
const index = '.monitoring-es-*';
const startMs = 0;
const endMs = 0;
const size = 10;
@ -62,7 +73,7 @@ describe('fetchCpuUsageNodeStats', () => {
},
})
);
const result = await fetchCpuUsageNodeStats(esClient, clusters, index, startMs, endMs, size);
const result = await fetchCpuUsageNodeStats(esClient, clusters, startMs, endMs, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,
@ -129,7 +140,7 @@ describe('fetchCpuUsageNodeStats', () => {
},
})
);
const result = await fetchCpuUsageNodeStats(esClient, clusters, index, startMs, endMs, size);
const result = await fetchCpuUsageNodeStats(esClient, clusters, startMs, endMs, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,
@ -189,7 +200,7 @@ describe('fetchCpuUsageNodeStats', () => {
},
})
);
const result = await fetchCpuUsageNodeStats(esClient, clusters, index, startMs, endMs, size);
const result = await fetchCpuUsageNodeStats(esClient, clusters, startMs, endMs, size);
expect(result[0].ccs).toBe('foo');
});
@ -203,9 +214,10 @@ describe('fetchCpuUsageNodeStats', () => {
});
const filterQuery =
'{"bool":{"should":[{"exists":{"field":"cluster_uuid"}}],"minimum_should_match":1}}';
await fetchCpuUsageNodeStats(esClient, clusters, index, startMs, endMs, size, filterQuery);
await fetchCpuUsageNodeStats(esClient, clusters, startMs, endMs, size, filterQuery);
expect(params).toStrictEqual({
index: '.monitoring-es-*',
index:
'*:.monitoring-es-*,.monitoring-es-*,*:metrics-elasticsearch.node_stats-*,metrics-elasticsearch.node_stats-*',
filter_path: ['aggregations'],
body: {
size: 0,
@ -213,7 +225,15 @@ describe('fetchCpuUsageNodeStats', () => {
bool: {
filter: [
{ terms: { cluster_uuid: ['abc123'] } },
{
bool: {
should: [
{ term: { type: 'node_stats' } },
{ term: { 'data_stream.dataset': 'elasticsearch.node_stats' } },
],
minimum_should_match: 1,
},
},
{ range: { timestamp: { format: 'epoch_millis', gte: 0, lte: 0 } } },
{
bool: { should: [{ exists: { field: 'cluster_uuid' } }], minimum_should_match: 1 },

View file

@ -10,6 +10,10 @@ import { get } from 'lodash';
import moment from 'moment';
import { NORMALIZED_DERIVATIVE_UNIT } from '../../../common/constants';
import { AlertCluster, AlertCpuUsageNodeStats } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
interface NodeBucketESResponse {
key: string;
@ -26,7 +30,6 @@ interface ClusterBucketESResponse {
export async function fetchCpuUsageNodeStats(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
startMs: number,
endMs: number,
size: number,
@ -35,8 +38,15 @@ export async function fetchCpuUsageNodeStats(
// Using pure MS didn't seem to work well with the date_histogram interval
// but minutes does
const intervalInMinutes = moment.duration(endMs - startMs).asMinutes();
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -48,11 +58,7 @@ export async function fetchCpuUsageNodeStats(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'node_stats',
},
},
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
{
range: {
timestamp: {

View file

@ -10,6 +10,18 @@ import { elasticsearchClientMock } from '../../../../../../src/core/server/elast
import { elasticsearchServiceMock } from 'src/core/server/mocks';
import { fetchDiskUsageNodeStats } from './fetch_disk_usage_node_stats';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchDiskUsageNodeStats', () => {
const esClient = elasticsearchServiceMock.createScopedClusterClient().asCurrentUser;
@ -19,7 +31,6 @@ describe('fetchDiskUsageNodeStats', () => {
clusterName: 'test-cluster',
},
];
const index = '.monitoring-es-*';
const duration = '5m';
const size = 10;
@ -63,7 +74,7 @@ describe('fetchDiskUsageNodeStats', () => {
})
);
const result = await fetchDiskUsageNodeStats(esClient, clusters, index, duration, size);
const result = await fetchDiskUsageNodeStats(esClient, clusters, duration, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,

View file

@ -8,18 +8,27 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertDiskUsageNodeStats } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
export async function fetchDiskUsageNodeStats(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
duration: string,
size: number,
filterQuery?: string
): Promise<AlertDiskUsageNodeStats[]> {
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -31,11 +40,7 @@ export async function fetchDiskUsageNodeStats(
cluster_uuid: clustersIds,
},
},
{
term: {
type: 'node_stats',
},
},
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
{
range: {
timestamp: {

View file

@ -11,6 +11,18 @@ import { elasticsearchServiceMock } from 'src/core/server/mocks';
import { fetchElasticsearchVersions } from './fetch_elasticsearch_versions';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchElasticsearchVersions', () => {
const esClient = elasticsearchServiceMock.createScopedClusterClient().asCurrentUser;
@ -45,7 +57,7 @@ describe('fetchElasticsearchVersions', () => {
} as estypes.SearchResponse)
);
const result = await fetchElasticsearchVersions(esClient, clusters, index, size);
const result = await fetchElasticsearchVersions(esClient, clusters, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,

View file

@ -7,16 +7,25 @@
import { ElasticsearchClient } from 'kibana/server';
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
import { ElasticsearchSource, ElasticsearchResponse } from '../../../common/types/es';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
export async function fetchElasticsearchVersions(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
size: number,
filterQuery?: string
): Promise<AlertVersions[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'cluster_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: [
'hits.hits._source.cluster_stats.nodes.versions',
'hits.hits._index',
@ -40,11 +49,7 @@ export async function fetchElasticsearchVersions(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'cluster_stats',
},
},
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
{
range: {
timestamp: {

View file

@ -9,7 +9,10 @@ import { ElasticsearchClient } from 'kibana/server';
import { AlertCluster, IndexShardSizeStats } from '../../../common/types/alerts';
import { ElasticsearchIndexStats, ElasticsearchResponseHit } from '../../../common/types/es';
import { ESGlobPatterns, RegExPatterns } from '../../../common/es_glob_patterns';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
type TopHitType = ElasticsearchResponseHit & {
_source: { index_stats?: Partial<ElasticsearchIndexStats> };
@ -28,25 +31,26 @@ const gbMultiplier = 1000000000;
export async function fetchIndexShardSize(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
threshold: number,
shardIndexPatterns: string,
size: number,
filterQuery?: string
): Promise<IndexShardSizeStats[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'index',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,
query: {
bool: {
must: [
{
match: {
type: 'index_stats',
},
},
filter: [
createDatasetFilter('index_stats', 'elasticsearch.index'),
{
range: {
timestamp: {
@ -102,7 +106,7 @@ export async function fetchIndexShardSize(
try {
if (filterQuery) {
const filterQueryObject = JSON.parse(filterQuery);
params.body.query.bool.must.push(filterQueryObject);
params.body.query.bool.filter.push(filterQueryObject);
}
} catch (e) {
// meh

View file

@ -10,6 +10,18 @@ import { fetchKibanaVersions } from './fetch_kibana_versions';
import { elasticsearchClientMock } from '../../../../../../src/core/server/elasticsearch/client/mocks';
import { elasticsearchServiceMock } from 'src/core/server/mocks';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchKibanaVersions', () => {
const esClient = elasticsearchServiceMock.createScopedClusterClient().asCurrentUser;
const clusters = [
@ -66,7 +78,7 @@ describe('fetchKibanaVersions', () => {
})
);
const result = await fetchKibanaVersions(esClient, clusters, index, size);
const result = await fetchKibanaVersions(esClient, clusters, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,

View file

@ -7,6 +7,10 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
interface ESAggResponse {
key: string;
@ -15,12 +19,17 @@ interface ESAggResponse {
export async function fetchKibanaVersions(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
size: number,
filterQuery?: string
): Promise<AlertVersions[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'kibana',
dataset: 'stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -32,11 +41,7 @@ export async function fetchKibanaVersions(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'kibana_stats',
},
},
createDatasetFilter('kibana_stats', 'kibana.stats'),
{
range: {
timestamp: {

View file

@ -10,6 +10,18 @@ import { elasticsearchClientMock } from '../../../../../../src/core/server/elast
import { elasticsearchServiceMock } from 'src/core/server/mocks';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchLicenses', () => {
const clusterName = 'MyCluster';
const clusterUuid = 'clusterA';

View file

@ -7,15 +7,24 @@
import { ElasticsearchClient } from 'kibana/server';
import { AlertLicense, AlertCluster } from '../../../common/types/alerts';
import { ElasticsearchSource } from '../../../common/types/es';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
export async function fetchLicenses(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
filterQuery?: string
): Promise<AlertLicense[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'cluster_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: [
'hits.hits._source.license.*',
'hits.hits._source.cluster_uuid',
@ -39,11 +48,7 @@ export async function fetchLicenses(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'cluster_stats',
},
},
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
{
range: {
timestamp: {

View file

@ -10,6 +10,18 @@ import { fetchLogstashVersions } from './fetch_logstash_versions';
import { elasticsearchClientMock } from '../../../../../../src/core/server/elasticsearch/client/mocks';
import { elasticsearchServiceMock } from 'src/core/server/mocks';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('fetchLogstashVersions', () => {
const esClient = elasticsearchServiceMock.createScopedClusterClient().asCurrentUser;
const clusters = [
@ -66,7 +78,7 @@ describe('fetchLogstashVersions', () => {
})
);
const result = await fetchLogstashVersions(esClient, clusters, index, size);
const result = await fetchLogstashVersions(esClient, clusters, size);
expect(result).toEqual([
{
clusterUuid: clusters[0].clusterUuid,

View file

@ -7,6 +7,10 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertVersions } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
interface ESAggResponse {
key: string;
@ -15,12 +19,17 @@ interface ESAggResponse {
export async function fetchLogstashVersions(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
size: number,
filterQuery?: string
): Promise<AlertVersions[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'logstash',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -32,11 +41,7 @@ export async function fetchLogstashVersions(
cluster_uuid: clusters.map((cluster) => cluster.clusterUuid),
},
},
{
term: {
type: 'logstash_stats',
},
},
createDatasetFilter('logstash_stats', 'logstash.node_stats'),
{
range: {
timestamp: {

View file

@ -8,19 +8,28 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertMemoryUsageNodeStats } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
export async function fetchMemoryUsageNodeStats(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
startMs: number,
endMs: number,
size: number,
filterQuery?: string
): Promise<AlertMemoryUsageNodeStats[]> {
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -32,11 +41,7 @@ export async function fetchMemoryUsageNodeStats(
cluster_uuid: clustersIds,
},
},
{
term: {
type: 'node_stats',
},
},
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
{
range: {
timestamp: {

View file

@ -9,6 +9,18 @@
import { elasticsearchClientMock } from '../../../../../../src/core/server/elasticsearch/client/mocks';
import { fetchMissingMonitoringData } from './fetch_missing_monitoring_data';
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
function getResponse(
index: string,
products: Array<{
@ -42,7 +54,6 @@ function getResponse(
describe('fetchMissingMonitoringData', () => {
const esClient = elasticsearchClientMock.createScopedClusterClient().asCurrentUser;
const index = '.monitoring-*';
const startMs = 100;
const size = 10;
@ -87,7 +98,7 @@ describe('fetchMissingMonitoringData', () => {
},
})
);
const result = await fetchMissingMonitoringData(esClient, clusters, index, size, now, startMs);
const result = await fetchMissingMonitoringData(esClient, clusters, size, now, startMs);
expect(result).toEqual([
{
nodeId: 'nodeUuid1',
@ -137,7 +148,7 @@ describe('fetchMissingMonitoringData', () => {
},
})
);
const result = await fetchMissingMonitoringData(esClient, clusters, index, size, now, startMs);
const result = await fetchMissingMonitoringData(esClient, clusters, size, now, startMs);
expect(result).toEqual([
{
nodeId: 'nodeUuid1',

View file

@ -8,6 +8,9 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertMissingData } from '../../../common/types/alerts';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
interface ClusterBucketESResponse {
key: string;
@ -44,15 +47,21 @@ interface TopHitESResponse {
export async function fetchMissingMonitoringData(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
size: number,
nowInMs: number,
startMs: number,
filterQuery?: string
): Promise<AlertMissingData[]> {
const endMs = nowInMs;
// changing this to only search ES because of changes related to https://github.com/elastic/kibana/issues/83309
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,

View file

@ -7,6 +7,10 @@
import { ElasticsearchClient } from 'kibana/server';
import { AlertCluster, AlertClusterStatsNodes } from '../../../common/types/alerts';
import { ElasticsearchSource } from '../../../common/types/es';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
function formatNode(
nodes: NonNullable<NonNullable<ElasticsearchSource['cluster_state']>['nodes']> | undefined
@ -26,11 +30,16 @@ function formatNode(
export async function fetchNodesFromClusterStats(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
filterQuery?: string
): Promise<AlertClusterStatsNodes[]> {
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'cluster_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,
@ -45,11 +54,7 @@ export async function fetchNodesFromClusterStats(
query: {
bool: {
filter: [
{
term: {
type: 'cluster_stats',
},
},
createDatasetFilter('cluster_stats', 'elasticsearch.cluster_stats'),
{
range: {
timestamp: {

View file

@ -8,6 +8,10 @@
import { ElasticsearchClient } from 'kibana/server';
import { get } from 'lodash';
import { AlertCluster, AlertThreadPoolRejectionsStats } from '../../../common/types/alerts';
import { createDatasetFilter } from './create_dataset_query_filter';
import { Globals } from '../../static_globals';
import { getConfigCcs } from '../../../common/ccs_utils';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
const invalidNumberValue = (value: number) => {
return isNaN(value) || value === undefined || value === null;
@ -33,15 +37,20 @@ const getTopHits = (threadType: string, order: 'asc' | 'desc') => ({
export async function fetchThreadPoolRejectionStats(
esClient: ElasticsearchClient,
clusters: AlertCluster[],
index: string,
size: number,
threadType: string,
duration: string,
filterQuery?: string
): Promise<AlertThreadPoolRejectionsStats[]> {
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
dataset: 'node_stats',
ccs: getConfigCcs(Globals.app.config) ? '*' : undefined,
});
const params = {
index,
index: indexPatterns,
filter_path: ['aggregations'],
body: {
size: 0,
@ -53,11 +62,7 @@ export async function fetchThreadPoolRejectionStats(
cluster_uuid: clustersIds,
},
},
{
term: {
type: 'node_stats',
},
},
createDatasetFilter('node_stats', 'elasticsearch.node_stats'),
{
range: {
timestamp: {

View file

@ -25,7 +25,9 @@ export function createApmQuery(options: {
const opts = {
filters: [] as any[],
metric: ApmMetric.getMetricFields(),
types: ['stats', 'beats_stats'],
type: 'beats_stats',
metricset: 'stats',
dsDataset: 'beats.stats',
...(options ?? {}),
};
@ -38,6 +40,5 @@ export function createApmQuery(options: {
},
},
});
return createQuery(opts);
}

View file

@ -6,12 +6,13 @@
*/
import { LegacyRequest, Cluster } from '../../types';
import { checkParam } from '../error_missing_required';
import { createApmQuery } from './create_apm_query';
import { ApmMetric } from '../metrics';
import { apmAggResponseHandler, apmUuidsAgg, apmAggFilterPath } from './_apm_stats';
import { getTimeOfLastEvent } from './_get_time_of_last_event';
import { ElasticsearchResponse } from '../../../common/types/es';
import { getLegacyIndexPattern } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
export function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
const { apmTotal, totalEvents, memRss, versions } = apmAggResponseHandler(response);
@ -32,24 +33,24 @@ export function handleResponse(clusterUuid: string, response: ElasticsearchRespo
};
}
export function getApmsForClusters(
req: LegacyRequest,
apmIndexPattern: string,
clusters: Cluster[]
) {
checkParam(apmIndexPattern, 'apmIndexPattern in apms/getApmsForClusters');
export function getApmsForClusters(req: LegacyRequest, clusters: Cluster[], ccs?: string) {
const start = req.payload.timeRange.min;
const end = req.payload.timeRange.max;
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const cgroup = config.get('monitoring.ui.container.apm.enabled');
const indexPatterns = getLegacyIndexPattern({
moduleType: 'beats',
ccs: ccs || req.payload.ccs,
config: Globals.app.config,
});
return Promise.all(
clusters.map(async (cluster) => {
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const params = {
index: apmIndexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
filter_path: apmAggFilterPath,
@ -70,7 +71,7 @@ export function getApmsForClusters(
getTimeOfLastEvent({
req,
callWithRequest,
apmIndexPattern,
apmIndexPattern: indexPatterns,
start,
end,
clusterUuid,

View file

@ -26,9 +26,12 @@ export function createBeatsQuery(options: {
end?: number;
}) {
const opts = {
moduleType: 'beats',
filters: [] as any[],
metric: BeatsMetric.getMetricFields(),
types: ['stats', 'beats_stats'],
type: 'beats_stats',
metricset: 'stats',
dsDataset: 'beats.stats',
...(options ?? {}),
};

View file

@ -5,12 +5,13 @@
* 2.0.
*/
import { checkParam } from '../error_missing_required';
import { BeatsClusterMetric } from '../metrics';
import { createBeatsQuery } from './create_beats_query';
import { beatsAggFilterPath, beatsUuidsAgg, beatsAggResponseHandler } from './_beats_stats';
import type { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest, Cluster } from '../../types';
import { getLegacyIndexPattern } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
export function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
const { beatTotal, beatTypes, totalEvents, bytesSent } = beatsAggResponseHandler(response);
@ -31,23 +32,21 @@ export function handleResponse(clusterUuid: string, response: ElasticsearchRespo
};
}
export function getBeatsForClusters(
req: LegacyRequest,
beatsIndexPattern: string,
clusters: Cluster[]
) {
checkParam(beatsIndexPattern, 'beatsIndexPattern in beats/getBeatsForClusters');
export function getBeatsForClusters(req: LegacyRequest, clusters: Cluster[], ccs: string) {
const start = req.payload.timeRange.min;
const end = req.payload.timeRange.max;
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const indexPatterns = getLegacyIndexPattern({
moduleType: 'beats',
ccs,
config: Globals.app.config,
});
return Promise.all(
clusters.map(async (cluster) => {
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const params = {
index: beatsIndexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
filter_path: beatsAggFilterPath,

View file

@ -10,15 +10,24 @@ import { checkParam } from '../error_missing_required';
import { STANDALONE_CLUSTER_CLUSTER_UUID } from '../../../common/constants';
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from './get_index_patterns';
import { Globals } from '../../static_globals';
async function findSupportedBasicLicenseCluster(
req: LegacyRequest,
clusters: ElasticsearchModifiedSource[],
kbnIndexPattern: string,
ccs: string,
kibanaUuid: string,
serverLog: (message: string) => void
) {
checkParam(kbnIndexPattern, 'kbnIndexPattern in cluster/findSupportedBasicLicenseCluster');
const dataset = 'stats';
const moduleType = 'kibana';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs,
});
serverLog(
`Detected all clusters in monitoring data have basic license. Checking for supported admin cluster UUID for Kibana ${kibanaUuid}.`
@ -28,7 +37,7 @@ async function findSupportedBasicLicenseCluster(
const gte = req.payload.timeRange.min;
const lte = req.payload.timeRange.max;
const kibanaDataResult: ElasticsearchResponse = (await callWithRequest(req, 'search', {
index: kbnIndexPattern,
index: indexPatterns,
size: 1,
ignore_unavailable: true,
filter_path: ['hits.hits._source.cluster_uuid', 'hits.hits._source.cluster.id'],
@ -41,7 +50,7 @@ async function findSupportedBasicLicenseCluster(
bool: {
should: [
{ term: { type: 'kibana_stats' } },
{ term: { 'metricset.name': 'stats' } },
{ term: { 'data_stream.dataset': 'kibana.stats' } },
],
},
},
@ -58,7 +67,6 @@ async function findSupportedBasicLicenseCluster(
cluster.isSupported = true;
}
}
serverLog(
`Found basic license admin cluster UUID for Monitoring UI support: ${supportedClusterUuid}.`
);
@ -80,9 +88,7 @@ async function findSupportedBasicLicenseCluster(
* Non-Basic license clusters and any cluster in a single-cluster environment
* are also flagged as supported in this method.
*/
export function flagSupportedClusters(req: LegacyRequest, kbnIndexPattern: string) {
checkParam(kbnIndexPattern, 'kbnIndexPattern in cluster/flagSupportedClusters');
export function flagSupportedClusters(req: LegacyRequest, ccs: string) {
const config = req.server.config();
const serverLog = (message: string) => req.getLogger('supported-clusters').debug(message);
const flagAllSupported = (clusters: ElasticsearchModifiedSource[]) => {
@ -124,13 +130,7 @@ export function flagSupportedClusters(req: LegacyRequest, kbnIndexPattern: strin
// if all linked are basic licenses
if (linkedClusterCount === basicLicenseCount) {
const kibanaUuid = config.get('server.uuid') as string;
return await findSupportedBasicLicenseCluster(
req,
clusters,
kbnIndexPattern,
kibanaUuid,
serverLog
);
return await findSupportedBasicLicenseCluster(req, clusters, ccs, kibanaUuid, serverLog);
}
// if some non-basic licenses

View file

@ -13,19 +13,31 @@ import { createQuery } from '../create_query';
import { ElasticsearchMetric } from '../metrics';
import { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from './get_index_patterns';
import { Globals } from '../../static_globals';
export function getClusterLicense(req: LegacyRequest, esIndexPattern: string, clusterUuid: string) {
checkParam(esIndexPattern, 'esIndexPattern in getClusterLicense');
// is this being used anywhere? not called within the app
export function getClusterLicense(req: LegacyRequest, clusterUuid: string) {
const dataset = 'cluster_stats';
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs: req.payload.ccs,
});
const params = {
index: esIndexPattern,
index: indexPattern,
size: 1,
ignore_unavailable: true,
filter_path: ['hits.hits._source.license'],
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({
type: 'cluster_stats',
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
clusterUuid,
metric: ElasticsearchMetric.getMetricFields(),
}),

View file

@ -14,11 +14,10 @@ import { getClustersStats } from './get_clusters_stats';
* This will fetch the cluster stats and cluster state as a single object for the cluster specified by the {@code req}.
*
* @param {Object} req The incoming user's request
* @param {String} esIndexPattern The Elasticsearch index pattern
* @param {String} clusterUuid The requested cluster's UUID
* @return {Promise} The object cluster response.
*/
export function getClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUuid: string) {
export function getClusterStats(req: LegacyRequest, clusterUuid: string) {
if (!clusterUuid) {
throw badRequest(
i18n.translate('xpack.monitoring.clusterStats.uuidNotSpecifiedErrorMessage', {
@ -29,7 +28,7 @@ export function getClusterStats(req: LegacyRequest, esIndexPattern: string, clus
}
// passing clusterUuid so `get_clusters` will filter for single cluster
return getClustersStats(req, esIndexPattern, clusterUuid).then((clusters) => {
return getClustersStats(req, clusterUuid).then((clusters) => {
if (!clusters || clusters.length === 0) {
throw notFound(
i18n.translate('xpack.monitoring.clusterStats.uuidNotFoundErrorMessage', {

View file

@ -52,15 +52,7 @@ export async function getClustersFromRequest(
codePaths,
}: { clusterUuid: string; start: number; end: number; codePaths: string[] }
) {
const {
esIndexPattern,
kbnIndexPattern,
lsIndexPattern,
beatsIndexPattern,
apmIndexPattern,
enterpriseSearchIndexPattern,
filebeatIndexPattern,
} = indexPatterns;
const { filebeatIndexPattern } = indexPatterns;
const config = req.server.config();
const isStandaloneCluster = clusterUuid === STANDALONE_CLUSTER_CLUSTER_UUID;
@ -71,18 +63,11 @@ export async function getClustersFromRequest(
clusters.push(getStandaloneClusterDefinition());
} else {
// get clusters with stats and cluster state
clusters = await getClustersStats(req, esIndexPattern, clusterUuid);
clusters = await getClustersStats(req, clusterUuid, '*');
}
if (!clusterUuid && !isStandaloneCluster) {
const indexPatternsToCheckForNonClusters = [
lsIndexPattern,
beatsIndexPattern,
apmIndexPattern,
enterpriseSearchIndexPattern,
];
if (await hasStandaloneClusters(req, indexPatternsToCheckForNonClusters)) {
if (await hasStandaloneClusters(req, '*')) {
clusters.push(getStandaloneClusterDefinition());
}
}
@ -106,7 +91,7 @@ export async function getClustersFromRequest(
// add ml jobs and alerts data
const mlJobs = isInCodePath(codePaths, [CODE_PATH_ML])
? await getMlJobsForCluster(req, esIndexPattern, cluster)
? await getMlJobsForCluster(req, cluster, '*')
: null;
if (mlJobs !== null) {
cluster.ml = { jobs: mlJobs };
@ -128,7 +113,7 @@ export async function getClustersFromRequest(
}
// update clusters with license check results
const getSupportedClusters = flagSupportedClusters(req, kbnIndexPattern);
const getSupportedClusters = flagSupportedClusters(req, '*');
clusters = await getSupportedClusters(clusters);
// add alerts data
@ -184,7 +169,7 @@ export async function getClustersFromRequest(
// add kibana data
const kibanas =
isInCodePath(codePaths, [CODE_PATH_KIBANA]) && !isStandaloneCluster
? await getKibanasForClusters(req, kbnIndexPattern, clusters)
? await getKibanasForClusters(req, clusters, '*')
: [];
// add the kibana data to each cluster
kibanas.forEach((kibana) => {
@ -197,8 +182,8 @@ export async function getClustersFromRequest(
// add logstash data
if (isInCodePath(codePaths, [CODE_PATH_LOGSTASH])) {
const logstashes = await getLogstashForClusters(req, lsIndexPattern, clusters);
const pipelines = await getLogstashPipelineIds({ req, lsIndexPattern, clusterUuid, size: 1 });
const logstashes = await getLogstashForClusters(req, clusters, '*');
const pipelines = await getLogstashPipelineIds({ req, clusterUuid, size: 1, ccs: '*' });
logstashes.forEach((logstash) => {
const clusterIndex = clusters.findIndex(
(cluster) =>
@ -214,7 +199,7 @@ export async function getClustersFromRequest(
// add beats data
const beatsByCluster = isInCodePath(codePaths, [CODE_PATH_BEATS])
? await getBeatsForClusters(req, beatsIndexPattern, clusters)
? await getBeatsForClusters(req, clusters, '*')
: [];
beatsByCluster.forEach((beats) => {
const clusterIndex = clusters.findIndex(
@ -226,7 +211,7 @@ export async function getClustersFromRequest(
// add apm data
const apmsByCluster = isInCodePath(codePaths, [CODE_PATH_APM])
? await getApmsForClusters(req, apmIndexPattern, clusters)
? await getApmsForClusters(req, clusters, '*')
: [];
apmsByCluster.forEach((apm) => {
const clusterIndex = clusters.findIndex(
@ -244,7 +229,7 @@ export async function getClustersFromRequest(
// add Enterprise Search data
const enterpriseSearchByCluster = isInCodePath(codePaths, [CODE_PATH_ENTERPRISE_SEARCH])
? await getEnterpriseSearchForClusters(req, enterpriseSearchIndexPattern, clusters)
? await getEnterpriseSearchForClusters(req, clusters, '*')
: [];
enterpriseSearchByCluster.forEach((entSearch) => {
const clusterIndex = clusters.findIndex(
@ -259,7 +244,7 @@ export async function getClustersFromRequest(
});
// check ccr configuration
const isCcrEnabled = await checkCcrEnabled(req, esIndexPattern);
const isCcrEnabled = await checkCcrEnabled(req, '*');
const kibanaUuid = config.get('server.uuid')!;

View file

@ -10,6 +10,8 @@ import { find } from 'lodash';
import { checkParam } from '../error_missing_required';
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from './get_index_patterns';
import { Globals } from '../../static_globals';
/**
* Augment the {@clusters} with their cluster state's from the {@code response}.
@ -46,13 +48,7 @@ export function handleResponse(
*
* If there is no cluster state available for any cluster, then it will be returned without any cluster state information.
*/
export function getClustersState(
req: LegacyRequest,
esIndexPattern: string,
clusters: ElasticsearchModifiedSource[]
) {
checkParam(esIndexPattern, 'esIndexPattern in cluster/getClustersHealth');
export function getClustersState(req: LegacyRequest, clusters: ElasticsearchModifiedSource[]) {
const clusterUuids = clusters
.filter((cluster) => !cluster.cluster_state || !cluster.elasticsearch?.cluster?.stats?.state)
.map((cluster) => cluster.cluster_uuid || cluster.elasticsearch?.cluster?.id);
@ -63,8 +59,14 @@ export function getClustersState(
return Promise.resolve(clusters);
}
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType: 'elasticsearch',
ccs: req.payload.ccs,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: clusterUuids.length,
ignore_unavailable: true,
filter_path: [

View file

@ -43,7 +43,7 @@ describe('handleClusterStats', () => {
},
};
const clusters = handleClusterStats(response, { log: () => undefined });
const clusters = handleClusterStats(response);
expect(clusters.length).toEqual(1);
expect(clusters[0].ccs).toEqual('cluster_one');

View file

@ -16,21 +16,22 @@ import { parseCrossClusterPrefix } from '../../../common/ccs_utils';
import { getClustersState } from './get_clusters_state';
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from './get_index_patterns';
import { Globals } from '../../static_globals';
/**
* This will fetch the cluster stats and cluster state as a single object per cluster.
*
* @param {Object} req The incoming user's request
* @param {String} esIndexPattern The Elasticsearch index pattern
* @param {String} clusterUuid (optional) If not undefined, getClusters will filter for a single cluster
* @return {Promise} A promise containing an array of clusters.
*/
export function getClustersStats(req: LegacyRequest, esIndexPattern: string, clusterUuid: string) {
export function getClustersStats(req: LegacyRequest, clusterUuid: string, ccs?: string) {
return (
fetchClusterStats(req, esIndexPattern, clusterUuid)
fetchClusterStats(req, clusterUuid, ccs)
.then((response) => handleClusterStats(response))
// augment older documents (e.g., from 2.x - 5.4) with their cluster_state
.then((clusters) => getClustersState(req, esIndexPattern, clusters))
.then((clusters) => getClustersState(req, clusters))
);
}
@ -38,12 +39,19 @@ export function getClustersStats(req: LegacyRequest, esIndexPattern: string, clu
* Query cluster_stats for all the cluster data
*
* @param {Object} req (required) - server request
* @param {String} esIndexPattern (required) - index pattern to use in searching for cluster_stats data
* @param {String} clusterUuid (optional) - if not undefined, getClusters filters for a single clusterUuid
* @return {Promise} Object representing each cluster.
*/
function fetchClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUuid: string) {
checkParam(esIndexPattern, 'esIndexPattern in getClusters');
function fetchClusterStats(req: LegacyRequest, clusterUuid: string, ccs?: string) {
const dataset = 'cluster_stats';
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
// this is will be either *, a request value, or null
ccs: ccs || req.payload.ccs,
});
const config = req.server.config();
// Get the params from the POST body for the request
@ -51,7 +59,7 @@ function fetchClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUu
const end = req.payload.timeRange.max;
const metric = ElasticsearchMetric.getMetricFields();
const params = {
index: esIndexPattern,
index: indexPattern,
size: config.get('monitoring.ui.max_bucket_size'),
ignore_unavailable: true,
filter_path: [
@ -80,7 +88,15 @@ function fetchClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUu
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
],
body: {
query: createQuery({ type: 'cluster_stats', start, end, metric, clusterUuid }),
query: createQuery({
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
metric,
clusterUuid,
}),
collapse: {
field: 'cluster_uuid',
},

View file

@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MonitoringConfig } from '../..';
import { getNewIndexPatterns } from './get_index_patterns';
const getConfigWithCcs = (ccsEnabled: boolean) => {
return {
ui: {
ccs: {
enabled: ccsEnabled,
},
},
} as MonitoringConfig;
};
describe('getNewIndexPatterns', () => {
beforeEach(() => {
jest.resetModules();
});
it('returns local elasticsearch index patterns when ccs is enabled (default true) and no ccs payload', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'elasticsearch',
});
expect(indexPatterns).toBe('.monitoring-es-*,metrics-elasticsearch.*-*');
});
it('returns ecs only elasticsearch index patterns when specifying ecsLegacyOnly: true', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'elasticsearch',
ecsLegacyOnly: true,
});
expect(indexPatterns).toBe('.monitoring-es-8-*,metrics-elasticsearch.*-*');
});
it('returns local kibana index patterns when ccs is enabled with no ccs payload', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'kibana',
});
expect(indexPatterns).toBe('.monitoring-kibana-*,metrics-kibana.*-*');
});
it('returns logstash index patterns when ccs is enabled and no ccs payload', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'logstash',
});
expect(indexPatterns).toBe('.monitoring-logstash-*,metrics-logstash.*-*');
});
it('returns beats index patterns when ccs is enabled and no ccs payload', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'beats',
});
expect(indexPatterns).toBe('.monitoring-beats-*,metrics-beats.*-*');
});
it('returns elasticsearch index patterns with dataset', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
moduleType: 'elasticsearch',
dataset: 'cluster_stats',
});
expect(indexPatterns).toBe('.monitoring-es-*,metrics-elasticsearch.cluster_stats-*');
});
it('returns elasticsearch index patterns without ccs prefixes when ccs is disabled', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(false),
moduleType: 'elasticsearch',
});
expect(indexPatterns).toBe('.monitoring-es-*,metrics-elasticsearch.*-*');
});
it('returns elasticsearch index patterns without ccs prefixes when ccs is disabled but ccs request payload has a value', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(false),
ccs: 'myccs',
moduleType: 'elasticsearch',
});
expect(indexPatterns).toBe('.monitoring-es-*,metrics-elasticsearch.*-*');
});
it('returns elasticsearch index patterns with custom ccs prefixes when ccs is enabled and ccs request payload has a value', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
ccs: 'myccs',
moduleType: 'elasticsearch',
});
expect(indexPatterns).toBe('myccs:.monitoring-es-*,myccs:metrics-elasticsearch.*-*');
});
it('returns elasticsearch index patterns with ccs prefixes and local index patterns when ccs is enabled and ccs request payload value is *', () => {
const indexPatterns = getNewIndexPatterns({
config: getConfigWithCcs(true),
ccs: '*',
moduleType: 'elasticsearch',
});
expect(indexPatterns).toBe(
'*:.monitoring-es-*,.monitoring-es-*,*:metrics-elasticsearch.*-*,metrics-elasticsearch.*-*'
);
});
});

View file

@ -9,12 +9,17 @@ import { LegacyServer } from '../../types';
import { prefixIndexPattern } from '../../../common/ccs_utils';
import {
INDEX_PATTERN_ELASTICSEARCH,
INDEX_PATTERN_ELASTICSEARCH_ECS,
INDEX_PATTERN_KIBANA,
INDEX_PATTERN_LOGSTASH,
INDEX_PATTERN_BEATS,
INDEX_ALERTS,
DS_INDEX_PATTERN_TYPES,
DS_INDEX_PATTERN_METRICS,
INDEX_PATTERN_TYPES,
INDEX_PATTERN_ENTERPRISE_SEARCH,
} from '../../../common/constants';
import { MonitoringConfig } from '../..';
export function getIndexPatterns(
server: LegacyServer,
@ -44,10 +49,90 @@ export function getIndexPatterns(
...Object.keys(additionalPatterns).reduce((accum, varName) => {
return {
...accum,
[varName]: prefixIndexPattern(config, additionalPatterns[varName], ccs, true),
[varName]: prefixIndexPattern(config, additionalPatterns[varName], ccs),
};
}, {}),
};
return indexPatterns;
}
// calling legacy index patterns those that are .monitoring
export function getLegacyIndexPattern({
moduleType,
ecsLegacyOnly = false,
config,
ccs,
}: {
moduleType: INDEX_PATTERN_TYPES;
ecsLegacyOnly?: boolean;
config: MonitoringConfig;
ccs?: string;
}) {
let indexPattern = '';
switch (moduleType) {
case 'elasticsearch':
// there may be cases where we only want the legacy ecs version index pattern (>=8.0)
indexPattern = ecsLegacyOnly ? INDEX_PATTERN_ELASTICSEARCH_ECS : INDEX_PATTERN_ELASTICSEARCH;
break;
case 'kibana':
indexPattern = INDEX_PATTERN_KIBANA;
break;
case 'logstash':
indexPattern = INDEX_PATTERN_LOGSTASH;
break;
case 'beats':
indexPattern = INDEX_PATTERN_BEATS;
break;
case 'enterprisesearch':
indexPattern = INDEX_PATTERN_ENTERPRISE_SEARCH;
break;
default:
throw new Error(`invalid module type to create index pattern: ${moduleType}`);
}
return prefixIndexPattern(config, indexPattern, ccs);
}
export function getDsIndexPattern({
type = DS_INDEX_PATTERN_METRICS,
moduleType,
dataset,
namespace = '*',
config,
ccs,
}: {
type?: string;
dataset?: string;
moduleType: INDEX_PATTERN_TYPES;
namespace?: string;
config: MonitoringConfig;
ccs?: string;
}): string {
let datasetsPattern = '';
if (dataset) {
datasetsPattern = `${moduleType}.${dataset}`;
} else {
datasetsPattern = `${moduleType}.*`;
}
return prefixIndexPattern(config, `${type}-${datasetsPattern}-${namespace}`, ccs);
}
export function getNewIndexPatterns({
config,
moduleType,
type = DS_INDEX_PATTERN_METRICS,
dataset,
namespace = '*',
ccs,
ecsLegacyOnly,
}: {
config: MonitoringConfig;
moduleType: INDEX_PATTERN_TYPES;
type?: DS_INDEX_PATTERN_TYPES;
dataset?: string;
namespace?: string;
ccs?: string;
ecsLegacyOnly?: boolean;
}): string {
const legacyIndexPattern = getLegacyIndexPattern({ moduleType, ecsLegacyOnly, config, ccs });
const dsIndexPattern = getDsIndexPattern({ type, moduleType, dataset, namespace, config, ccs });
return `${legacyIndexPattern},${dsIndexPattern}`;
}

View file

@ -1,114 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { set } from '@elastic/safer-lodash-set';
import { MissingRequiredError } from './error_missing_required';
import { ElasticsearchMetric } from './metrics';
import { createQuery } from './create_query';
let metric;
describe('Create Query', () => {
beforeEach(() => {
metric = ElasticsearchMetric.getMetricFields();
});
it('Allows UUID to not be passed', () => {
const options = { metric };
const result = createQuery(options);
const expected = set({}, 'bool.filter', []);
expect(result).toEqual(expected);
});
it('Uses Elasticsearch timestamp field for start and end time range by default', () => {
const options = {
uuid: 'abc123',
start: '2016-03-01 10:00:00',
end: '2016-03-01 10:00:01',
metric,
};
const result = createQuery(options);
let expected = {};
expected = set(expected, 'bool.filter[0].term', {
'source_node.uuid': 'abc123',
});
expected = set(expected, 'bool.filter[1].range.timestamp', {
format: 'epoch_millis',
gte: 1456826400000,
lte: 1456826401000,
});
expect(result).toEqual(expected);
});
it('Injects uuid and timestamp fields dynamically, based on metric', () => {
const options = {
uuid: 'abc123',
start: '2016-03-01 10:00:00',
end: '2016-03-01 10:00:01',
metric: {
uuidField: 'testUuidField',
timestampField: 'testTimestampField',
},
};
const result = createQuery(options);
let expected = set({}, 'bool.filter[0].term.testUuidField', 'abc123');
expected = set(expected, 'bool.filter[1].range.testTimestampField', {
format: 'epoch_millis',
gte: 1456826400000,
lte: 1456826401000,
});
expect(result).toEqual(expected);
});
it('Throws if missing metric.timestampField', () => {
function callCreateQuery() {
const options = {}; // missing metric object
return createQuery(options);
}
expect(callCreateQuery).toThrowError(MissingRequiredError);
});
it('Throws if given uuid but missing metric.uuidField', () => {
function callCreateQuery() {
const options = { uuid: 'abc123', metric };
delete options.metric.uuidField;
return createQuery(options);
}
expect(callCreateQuery).toThrowError(MissingRequiredError);
});
// TODO: tests were not running and need to be updated to pass
it.skip('Uses `type` option to add type filter with minimal fields', () => {
const options = { type: 'test-type-yay', metric };
const result = createQuery(options);
let expected = {};
expected = set(expected, 'bool.filter[0].term', { type: 'test-type-yay' });
expect(result).to.be.eql(expected);
});
it.skip('Uses `type` option to add type filter with all other option fields', () => {
const options = {
type: 'test-type-yay',
uuid: 'abc123',
start: '2016-03-01 10:00:00',
end: '2016-03-01 10:00:01',
metric,
};
const result = createQuery(options);
let expected = {};
expected = set(expected, 'bool.filter[0].term', { type: 'test-type-yay' });
expected = set(expected, 'bool.filter[1].term', {
'source_node.uuid': 'abc123',
});
expected = set(expected, 'bool.filter[2].range.timestamp', {
format: 'epoch_millis',
gte: 1456826400000,
lte: 1456826401000,
});
expect(result).to.be.eql(expected);
});
});

View file

@ -0,0 +1,231 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { MissingRequiredError } from './error_missing_required';
import { ElasticsearchMetric } from './metrics';
import { createQuery } from './create_query';
interface Metric {
uuidField?: string;
timestampField: string;
}
let metric: Metric;
describe('Create Query', () => {
beforeEach(() => {
metric = ElasticsearchMetric.getMetricFields();
});
it('Allows UUID to not be passed', () => {
const options = { metric, clusterUuid: 'cuid123' };
expect(createQuery(options)).toEqual({
bool: { filter: [{ term: { cluster_uuid: 'cuid123' } }] },
});
});
it('Uses Elasticsearch timestamp field for start and end time range by default', () => {
const options = {
clusterUuid: 'cuid123',
uuid: 'abc123',
start: 1456826400000,
end: 14568264010000,
metric,
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{ term: { cluster_uuid: 'cuid123' } },
{ term: { 'source_node.uuid': 'abc123' } },
{
range: {
timestamp: { format: 'epoch_millis', gte: 1456826400000, lte: 14568264010000 },
},
},
],
},
});
});
it('Injects uuid and timestamp fields dynamically, based on metric', () => {
const options = {
clusterUuid: 'cuid123',
uuid: 'abc123',
start: 1456826400000,
end: 14568264010000,
metric: {
uuidField: 'testUuidField',
timestampField: 'testTimestampField',
},
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{ term: { cluster_uuid: 'cuid123' } },
{ term: { testUuidField: 'abc123' } },
{
range: {
testTimestampField: {
format: 'epoch_millis',
gte: 1456826400000,
lte: 14568264010000,
},
},
},
],
},
});
});
it('Throws if missing metric.timestampField', () => {
function callCreateQuery() {
const options = { clusterUuid: 'cuid123' }; // missing metric object
return createQuery(options);
}
expect(callCreateQuery).toThrowError(MissingRequiredError);
});
it('Throws if given uuid but missing metric.uuidField', () => {
function callCreateQuery() {
const options = { uuid: 'abc123', clusterUuid: 'cuid123', metric };
delete options.metric.uuidField;
return createQuery(options);
}
expect(callCreateQuery).toThrowError(MissingRequiredError);
});
it('Uses `type` option to add type filter with minimal fields', () => {
const options = { type: 'cluster_stats', clusterUuid: 'cuid123', metric };
expect(createQuery(options)).toEqual({
bool: {
filter: [
{ bool: { should: [{ term: { type: 'cluster_stats' } }] } },
{ term: { cluster_uuid: 'cuid123' } },
],
},
});
});
it('Uses `type` option to add type filter with all other option fields and no data stream fields', () => {
const options = {
type: 'cluster_stats',
clusterUuid: 'cuid123',
uuid: 'abc123',
start: 1456826400000,
end: 14568264000000,
metric,
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{ bool: { should: [{ term: { type: 'cluster_stats' } }] } },
{ term: { cluster_uuid: 'cuid123' } },
{ term: { 'source_node.uuid': 'abc123' } },
{
range: {
timestamp: { format: 'epoch_millis', gte: 1456826400000, lte: 14568264000000 },
},
},
],
},
});
});
it('Uses `dsType` option to add filter with all other option fields', () => {
const options = {
dsDataset: 'elasticsearch.cluster_stats',
clusterUuid: 'cuid123',
uuid: 'abc123',
start: 1456826400000,
end: 14568264000000,
metric,
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{
bool: {
should: [{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } }],
},
},
{ term: { cluster_uuid: 'cuid123' } },
{ term: { 'source_node.uuid': 'abc123' } },
{
range: {
timestamp: { format: 'epoch_millis', gte: 1456826400000, lte: 14568264000000 },
},
},
],
},
});
});
it('Uses legacy `type`, `dsDataset`, `metricset` options to add type filters and data stream filters with minimal fields that defaults to `metrics` data_stream', () => {
const options = {
type: 'cluster_stats',
metricset: 'cluster_stats',
dsDataset: 'elasticsearch.cluster_stats',
clusterUuid: 'cuid123',
metric,
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{
bool: {
should: [
{
term: {
'data_stream.dataset': 'elasticsearch.cluster_stats',
},
},
{
term: {
'metricset.name': 'cluster_stats',
},
},
{ term: { type: 'cluster_stats' } },
],
},
},
{ term: { cluster_uuid: 'cuid123' } },
],
},
});
});
it('Uses legacy `type`, `metricset`, `dsDataset`, and `filters` options', () => {
const options = {
type: 'cluster_stats',
metricset: 'cluster_stats',
dsDataset: 'elasticsearch.cluster_stats',
clusterUuid: 'cuid123',
metric,
filters: [
{
term: { 'source_node.uuid': `nuid123` },
},
],
};
expect(createQuery(options)).toEqual({
bool: {
filter: [
{
bool: {
should: [
{ term: { 'data_stream.dataset': 'elasticsearch.cluster_stats' } },
{ term: { 'metricset.name': 'cluster_stats' } },
{ term: { type: 'cluster_stats' } },
],
},
},
{ term: { cluster_uuid: 'cuid123' } },
{ term: { 'source_node.uuid': 'nuid123' } },
],
},
});
});
});

View file

@ -56,7 +56,9 @@ export function createTimeFilter(options: {
* document UUIDs, start time and end time, and injecting additional filters.
*
* Options object:
* @param {String} options.type - `type` field value of the documents
* @param {string} options.type - `type` field value of the documents in legay .monitoring indices
* @param {string} options.dsDataset - `data_stream.dataset` field values of the documents
* @param {string} options.metricset - `metricset.name` field values of the documents
* @param {Array} options.filters - additional filters to add to the `bool` section of the query. Default: []
* @param {string} options.clusterUuid - a UUID of the cluster. Required.
* @param {string} options.uuid - a UUID of the metric to filter for, or `null` if UUID should not be part of the query
@ -64,30 +66,44 @@ export function createTimeFilter(options: {
* @param {Date} options.end - numeric timestamp (optional)
* @param {Metric} options.metric - Metric instance or metric fields object @see ElasticsearchMetric.getMetricFields
*/
export function createQuery(options: {
interface CreateQueryOptions {
type?: string;
types?: string[];
dsDataset?: string;
metricset?: string;
filters?: any[];
clusterUuid: string;
uuid?: string;
start?: number;
end?: number;
metric?: { uuidField?: string; timestampField: string };
}) {
const { type, types, clusterUuid, uuid, filters } = defaults(options, { filters: [] });
}
export function createQuery(options: CreateQueryOptions) {
const { type, metricset, dsDataset, clusterUuid, uuid, filters } = defaults(options, {
filters: [],
});
const isFromStandaloneCluster = clusterUuid === STANDALONE_CLUSTER_CLUSTER_UUID;
const terms = [];
let typeFilter: any;
// data_stream.dataset matches agent integration data streams
if (dsDataset) {
terms.push({ term: { 'data_stream.dataset': dsDataset } });
}
// metricset.name matches standalone beats
if (metricset) {
terms.push({ term: { 'metricset.name': metricset } });
}
// type matches legacy data
if (type) {
typeFilter = { bool: { should: [{ term: { type } }, { term: { 'metricset.name': type } }] } };
} else if (types) {
terms.push({ term: { type } });
}
if (terms.length) {
typeFilter = {
bool: {
should: [
...types.map((t) => ({ term: { type: t } })),
...types.map((t) => ({ term: { 'metricset.name': t } })),
],
should: [...terms],
},
};
}

View file

@ -16,6 +16,18 @@ import aggMetricsBuckets from './__fixtures__/agg_metrics_buckets';
const min = 1498968000000; // 2017-07-02T04:00:00.000Z
const max = 1499054399999; // 2017-07-03T03:59:59.999Z
jest.mock('../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
function getMockReq(metricsBuckets = []) {
const config = {
get: sinon.stub(),
@ -59,27 +71,25 @@ function getMockReq(metricsBuckets = []) {
};
}
const indexPattern = [];
describe('getMetrics and getSeries', () => {
it('should return metrics with non-derivative metric', async () => {
const req = getMockReq(nonDerivMetricsBuckets);
const metricSet = ['node_cpu_utilization'];
const result = await getMetrics(req, indexPattern, metricSet);
const result = await getMetrics(req, 'elasticsearch', metricSet);
expect(result).toMatchSnapshot();
});
it('should return metrics with derivative metric', async () => {
const req = getMockReq(derivMetricsBuckets);
const metricSet = ['cluster_search_request_rate'];
const result = await getMetrics(req, indexPattern, metricSet);
const result = await getMetrics(req, 'elasticsearch', metricSet);
expect(result).toMatchSnapshot();
});
it('should return metrics with metric containing custom aggs', async () => {
const req = getMockReq(aggMetricsBuckets);
const metricSet = ['cluster_index_latency'];
const result = await getMetrics(req, indexPattern, metricSet);
const result = await getMetrics(req, 'elasticsearch', metricSet);
expect(result).toMatchSnapshot();
});
@ -91,14 +101,14 @@ describe('getMetrics and getSeries', () => {
keys: ['index_mem_fixed_bit_set', 'index_mem_versions'],
},
];
const result = await getMetrics(req, indexPattern, metricSet);
const result = await getMetrics(req, 'elasticsearch', metricSet);
expect(result).toMatchSnapshot();
});
it('should return metrics with metric that uses default calculation', async () => {
const req = getMockReq(nonDerivMetricsBuckets);
const metricSet = ['kibana_max_response_times'];
const result = await getMetrics(req, indexPattern, metricSet);
const result = await getMetrics(req, 'elasticsearch', metricSet);
expect(result).toMatchSnapshot();
});
});

View file

@ -11,20 +11,21 @@ import { getSeries } from './get_series';
import { calculateTimeseriesInterval } from '../calculate_timeseries_interval';
import { getTimezone } from '../get_timezone';
import { LegacyRequest } from '../../types';
import { INDEX_PATTERN_TYPES } from '../../../common/constants';
type Metric = string | { keys: string | string[]; name: string };
// TODO: Switch to an options object argument here
export async function getMetrics(
req: LegacyRequest,
indexPattern: string,
moduleType: INDEX_PATTERN_TYPES,
metricSet: Metric[] = [],
filters: Array<Record<string, any>> = [],
metricOptions: Record<string, any> = {},
numOfBuckets: number = 0,
groupBy: string | Record<string, any> | null = null
) {
checkParam(indexPattern, 'indexPattern in details/getMetrics');
checkParam(moduleType, 'moduleType in details/getMetrics');
checkParam(metricSet, 'metricSet in details/getMetrics');
const config = req.server.config();
@ -53,7 +54,7 @@ export async function getMetrics(
return Promise.all(
metricNames.map((metricName) => {
return getSeries(req, indexPattern, metricName, metricOptions, filters, groupBy, {
return getSeries(req, moduleType, metricName, metricOptions, filters, groupBy, {
min,
max,
bucketSize,

View file

@ -16,9 +16,12 @@ import { formatTimestampToDuration } from '../../../common';
import {
NORMALIZED_DERIVATIVE_UNIT,
CALCULATE_DURATION_UNTIL,
INDEX_PATTERN_TYPES,
STANDALONE_CLUSTER_CLUSTER_UUID,
} from '../../../common/constants';
import { formatUTCTimestampForTimezone } from '../format_timezone';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
type SeriesBucket = Bucket & { metric_mb_deriv?: { normalized_value: number } };
@ -117,7 +120,7 @@ function createMetricAggs(metric: Metric) {
async function fetchSeries(
req: LegacyRequest,
indexPattern: string,
moduleType: INDEX_PATTERN_TYPES,
metric: Metric,
metricOptions: any,
groupBy: string | Record<string, any> | null,
@ -175,8 +178,14 @@ async function fetchSeries(
};
}
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
ccs: req.payload.ccs,
});
const params = {
index: indexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
body: {
@ -327,14 +336,15 @@ function handleSeries(
* TODO: This should be expanded to accept multiple metrics in a single request to allow a single date histogram to be used.
*
* @param {Object} req The incoming user's request.
* @param {String} indexPattern The relevant index pattern (not just for Elasticsearch!).
* @param {String} moduleType The relevant module eg: elasticsearch, kibana, logstash.
* @param {String} metricName The name of the metric being plotted.
* @param {Array} filters Any filters that should be applied to the query.
* @return {Promise} The object response containing the {@code timeRange}, {@code metric}, and {@code data}.
*/
export async function getSeries(
req: LegacyRequest,
indexPattern: string,
moduleType: INDEX_PATTERN_TYPES,
metricName: string,
metricOptions: Record<string, any>,
filters: Array<Record<string, any>>,
@ -346,7 +356,7 @@ export async function getSeries(
timezone,
}: { min: string | number; max: string | number; bucketSize: number; timezone: string }
) {
checkParam(indexPattern, 'indexPattern in details/getSeries');
checkParam(moduleType, 'moduleType in details/getSeries');
const metric = metrics[metricName];
if (!metric) {
@ -354,7 +364,7 @@ export async function getSeries(
}
const response = await fetchSeries(
req,
indexPattern,
moduleType,
metric,
metricOptions,
groupBy,

View file

@ -14,9 +14,18 @@ import { ElasticsearchMetric } from '../metrics';
import { createQuery } from '../create_query';
import { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string) {
checkParam(esIndexPattern, 'esIndexPattern in checkCcrEnabled');
export async function checkCcrEnabled(req: LegacyRequest, ccs: string) {
const dataset = 'cluster_stats';
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs,
});
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
@ -25,12 +34,14 @@ export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string
const metricFields = ElasticsearchMetric.getMetricFields();
const params = {
index: esIndexPattern,
index: indexPatterns,
size: 1,
ignore_unavailable: true,
body: {
query: createQuery({
type: 'cluster_stats',
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,

View file

@ -19,6 +19,8 @@ import {
ElasticsearchResponseHit,
} from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
/**
* Filter out shard activity that we do not care about.
@ -86,37 +88,63 @@ export function handleMbLastRecoveries(resp: ElasticsearchResponse, start: numbe
return filtered;
}
export async function getLastRecovery(
req: LegacyRequest,
esIndexPattern: string,
esIndexPatternEcs: string,
size: number
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getLastRecovery');
export async function getLastRecovery(req: LegacyRequest, size: number) {
const start = req.payload.timeRange.min;
const end = req.payload.timeRange.max;
const clusterUuid = req.params.clusterUuid;
const metric = ElasticsearchMetric.getMetricFields();
const dataset = 'index_recovery';
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs: req.payload.ccs,
});
const legacyParams = {
index: esIndexPattern,
index: indexPattern,
size: 1,
ignore_unavailable: true,
body: {
_source: ['index_recovery.shards'],
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({ type: 'index_recovery', start, end, clusterUuid, metric }),
query: createQuery({
type: dataset,
metricset: dataset,
start,
end,
clusterUuid,
metric,
}),
},
};
const indexPatternEcs = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs: req.payload.ccs,
ecsLegacyOnly: true,
});
const ecsParams = {
index: esIndexPatternEcs,
index: indexPatternEcs,
size,
ignore_unavailable: true,
body: {
_source: ['elasticsearch.index.recovery', '@timestamp'],
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({ type: 'index_recovery', start, end, clusterUuid, metric }),
query: createQuery({
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
metric,
}),
aggs: {
max_timestamp: {
max: {

View file

@ -15,6 +15,8 @@ import { ElasticsearchMetric } from '../metrics';
import { ML_SUPPORTED_LICENSES } from '../../../common/constants';
import { ElasticsearchResponse, ElasticsearchSource } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
/*
* Get a listing of jobs along with some metric data to use for the listing
@ -37,17 +39,26 @@ export function handleResponse(response: ElasticsearchResponse) {
export type MLJobs = ReturnType<typeof handleResponse>;
export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
checkParam(esIndexPattern, 'esIndexPattern in getMlJobs');
export function getMlJobs(req: LegacyRequest) {
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const start = req.payload.timeRange.min; // no wrapping in moment :)
const end = req.payload.timeRange.max;
const clusterUuid = req.params.clusterUuid;
const metric = ElasticsearchMetric.getMetricFields();
const dataset = 'ml_job';
const type = 'job_stats';
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: maxBucketSize,
ignore_unavailable: true,
filter_path: [
@ -69,7 +80,15 @@ export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
collapse: { field: 'job_stats.job_id' },
query: createQuery({ types: ['ml_job', 'job_stats'], start, end, clusterUuid, metric }),
query: createQuery({
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
metric,
}),
},
};
@ -81,11 +100,7 @@ export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
* cardinality isn't guaranteed to be accurate is the issue
* but it will be as long as the precision threshold is >= the actual value
*/
export function getMlJobsForCluster(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchSource
) {
export function getMlJobsForCluster(req: LegacyRequest, cluster: ElasticsearchSource, ccs: string) {
const license = cluster.license ?? cluster.elasticsearch?.cluster?.stats?.license ?? {};
if (license.status === 'active' && includes(ML_SUPPORTED_LICENSES, license.type)) {
@ -94,19 +109,37 @@ export function getMlJobsForCluster(
const end = req.payload.timeRange.max;
const clusterUuid = req.params.clusterUuid;
const metric = ElasticsearchMetric.getMetricFields();
const type = 'job_stats';
const dataset = 'ml_job';
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
moduleType,
dataset,
ccs,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
filter_path: 'aggregations.jobs_count.value',
body: {
query: createQuery({ types: ['ml_job', 'job_stats'], start, end, clusterUuid, metric }),
query: createQuery({
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
metric,
}),
aggs: {
jobs_count: { cardinality: { field: 'job_stats.job_id' } },
},
},
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return callWithRequest(req, 'search', params).then((response: ElasticsearchResponse) => {

View file

@ -15,6 +15,8 @@ import { createQuery } from '../../create_query';
import { ElasticsearchMetric } from '../../metrics';
import { ElasticsearchResponse } from '../../../../common/types/es';
import { LegacyRequest } from '../../../types';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
export function handleResponse(shardStats: any, indexUuid: string) {
return (response: ElasticsearchResponse) => {
@ -64,7 +66,6 @@ export function handleResponse(shardStats: any, indexUuid: string) {
export function getIndexSummary(
req: LegacyRequest,
esIndexPattern: string,
shardStats: any,
{
clusterUuid,
@ -73,7 +74,15 @@ export function getIndexSummary(
end,
}: { clusterUuid: string; indexUuid: string; start: number; end: number }
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getIndexSummary');
const dataset = 'index'; // data_stream.dataset
const type = 'index_stats'; // legacy
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
dataset,
moduleType,
ccs: req.payload.ccs,
});
const metric = ElasticsearchMetric.getMetricFields();
const filters = [
@ -87,13 +96,15 @@ export function getIndexSummary(
},
];
const params = {
index: esIndexPattern,
index: indexPatterns,
size: 1,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({
types: ['index', 'index_stats'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,

View file

@ -19,6 +19,8 @@ import { calculateRate } from '../../calculate_rate';
import { getUnassignedShards } from '../shards';
import { ElasticsearchResponse } from '../../../../common/types/es';
import { LegacyRequest } from '../../../types';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
export function handleResponse(
resp: ElasticsearchResponse,
@ -95,7 +97,7 @@ export function handleResponse(
}
export function buildGetIndicesQuery(
esIndexPattern: string,
req: LegacyRequest,
clusterUuid: string,
{
start,
@ -113,9 +115,18 @@ export function buildGetIndicesQuery(
});
}
const metricFields = ElasticsearchMetric.getMetricFields();
const dataset = 'index'; // data_stream.dataset
const type = 'index_stats'; // legacy
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
dataset,
moduleType,
});
return {
index: esIndexPattern,
index: indexPatterns,
size,
ignore_unavailable: true,
filter_path: [
@ -145,7 +156,9 @@ export function buildGetIndicesQuery(
],
body: {
query: createQuery({
types: ['index', 'index_stats'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
@ -167,17 +180,14 @@ export function buildGetIndicesQuery(
export function getIndices(
req: LegacyRequest,
esIndexPattern: string,
showSystemIndices: boolean = false,
shardStats: any
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getIndices');
const { min: start, max: end } = req.payload.timeRange;
const clusterUuid = req.params.clusterUuid;
const config = req.server.config();
const params = buildGetIndicesQuery(esIndexPattern, clusterUuid, {
const params = buildGetIndicesQuery(req, clusterUuid, {
start,
end,
showSystemIndices,

View file

@ -24,6 +24,8 @@ import {
ElasticsearchLegacySource,
} from '../../../../common/types/es';
import { LegacyRequest } from '../../../types';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
export function handleResponse(
clusterState: ElasticsearchSource['cluster_state'],
@ -100,7 +102,6 @@ export function handleResponse(
export function getNodeSummary(
req: LegacyRequest,
esIndexPattern: string,
clusterState: ElasticsearchSource['cluster_state'],
shardStats: any,
{
@ -110,25 +111,40 @@ export function getNodeSummary(
end,
}: { clusterUuid: string; nodeUuid: string; start: number; end: number }
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getNodeSummary');
// Build up the Elasticsearch request
const metric = ElasticsearchMetric.getMetricFields();
const filters = [
{
term: { 'source_node.uuid': nodeUuid },
},
];
const dataset = 'node_stats';
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
dataset,
moduleType,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: 1,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({ type: 'node_stats', start, end, clusterUuid, metric, filters }),
query: createQuery({
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
metric,
filters,
}),
},
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return callWithRequest(req, 'search', params).then(
handleResponse(clusterState, shardStats, nodeUuid)

View file

@ -7,6 +7,18 @@
import { getNodeIds } from './get_node_ids';
jest.mock('../../../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('getNodeIds', () => {
it('should return a list of ids and uuids', async () => {
const callWithRequest = jest.fn().mockReturnValue({
@ -37,6 +49,9 @@ describe('getNodeIds', () => {
},
},
server: {
config: () => ({
get: () => true,
}),
plugins: {
elasticsearch: {
getCluster: () => ({

View file

@ -10,16 +10,26 @@ import { get } from 'lodash';
import { ElasticsearchMetric } from '../../../metrics';
import { createQuery } from '../../../create_query';
import { LegacyRequest, Bucket } from '../../../../types';
import { getNewIndexPatterns } from '../../../cluster/get_index_patterns';
import { Globals } from '../../../../static_globals';
export async function getNodeIds(
req: LegacyRequest,
indexPattern: string,
{ clusterUuid }: { clusterUuid: string },
size: number
) {
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
const dataset = 'node_stats';
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: indexPattern,
size: 0,
@ -27,7 +37,9 @@ export async function getNodeIds(
filter_path: ['aggregations.composite_data.buckets'],
body: {
query: createQuery({
type: 'node_stats',
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
metric: ElasticsearchMetric.getMetricFields(),

View file

@ -6,7 +6,6 @@
*/
import moment from 'moment';
import { checkParam } from '../../../error_missing_required';
import { createQuery } from '../../../create_query';
import { calculateAuto } from '../../../calculate_auto';
import { ElasticsearchMetric } from '../../../metrics';
@ -15,6 +14,8 @@ import { handleResponse } from './handle_response';
import { LISTING_METRICS_NAMES, LISTING_METRICS_PATHS } from './nodes_listing_metrics';
import { LegacyRequest } from '../../../../types';
import { ElasticsearchModifiedSource } from '../../../../../common/types/es';
import { getNewIndexPatterns } from '../../../cluster/get_index_patterns';
import { Globals } from '../../../../static_globals';
/* Run an aggregation on node_stats to get stat data for the selected time
* range for all the active nodes. Every option is a key to a configuration
@ -35,13 +36,10 @@ import { ElasticsearchModifiedSource } from '../../../../../common/types/es';
*/
export async function getNodes(
req: LegacyRequest,
esIndexPattern: string,
pageOfNodes: Array<{ uuid: string }>,
clusterStats: ElasticsearchModifiedSource,
nodesShardCount: { nodes: { [nodeId: string]: { shardCount: number } } }
) {
checkParam(esIndexPattern, 'esIndexPattern in getNodes');
const start = moment.utc(req.payload.timeRange.min).valueOf();
const orgStart = start;
const end = moment.utc(req.payload.timeRange.max).valueOf();
@ -67,13 +65,24 @@ export async function getNodes(
},
];
const dataset = 'node_stats';
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: config.get('monitoring.ui.max_bucket_size'),
ignore_unavailable: true,
body: {
query: createQuery({
type: 'node_stats',
type: dataset,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,
@ -112,7 +121,6 @@ export async function getNodes(
...LISTING_METRICS_PATHS,
],
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
const response = await callWithRequest(req, 'search', params);

View file

@ -47,7 +47,6 @@ describe('getPaginatedNodes', () => {
}),
},
};
const esIndexPattern = '.monitoring-es-*';
const clusterUuid = '1abc';
const metricSet = ['foo', 'bar'];
const pagination = { index: 0, size: 10 };
@ -74,7 +73,6 @@ describe('getPaginatedNodes', () => {
it('should return a subset based on the pagination parameters', async () => {
const nodes = await getPaginatedNodes(
req,
esIndexPattern,
{ clusterUuid },
metricSet,
pagination,
@ -94,7 +92,6 @@ describe('getPaginatedNodes', () => {
it('should return a sorted subset', async () => {
const nodes = await getPaginatedNodes(
req,
esIndexPattern,
{ clusterUuid },
metricSet,
pagination,
@ -111,17 +108,11 @@ describe('getPaginatedNodes', () => {
});
});
it('should return a filterd subset', async () => {
const nodes = await getPaginatedNodes(
req,
esIndexPattern,
{ clusterUuid },
metricSet,
pagination,
sort,
'tw',
{ clusterStats, nodesShardCount }
);
it('should return a filtered subset', async () => {
const nodes = await getPaginatedNodes(req, { clusterUuid }, metricSet, pagination, sort, 'tw', {
clusterStats,
nodesShardCount,
});
expect(nodes).toEqual({
pageOfNodes: [{ name: 'two', uuid: 2, isOnline: false, shardCount: 5, foo: 12 }],
totalNodeCount: 1,

View file

@ -41,7 +41,6 @@ interface Node {
export async function getPaginatedNodes(
req: LegacyRequest,
esIndexPattern: string,
{ clusterUuid }: { clusterUuid: string },
metricSet: string[],
pagination: { index: number; size: number },
@ -59,7 +58,7 @@ export async function getPaginatedNodes(
) {
const config = req.server.config();
const size = Number(config.get('monitoring.ui.max_bucket_size'));
const nodes: Node[] = await getNodeIds(req, esIndexPattern, { clusterUuid }, size);
const nodes: Node[] = await getNodeIds(req, { clusterUuid }, size);
// Add `isOnline` and shards from the cluster state and shard stats
const clusterState = clusterStats?.cluster_state ?? { nodes: {} };
@ -87,13 +86,14 @@ export async function getPaginatedNodes(
};
const metricSeriesData = await getMetrics(
req,
esIndexPattern,
'elasticsearch',
metricSet,
filters,
{ nodes },
4,
groupBy
);
for (const metricName in metricSeriesData) {
if (!metricSeriesData.hasOwnProperty(metricName)) {
continue;

View file

@ -7,6 +7,18 @@
import { getIndicesUnassignedShardStats } from './get_indices_unassigned_shard_stats';
jest.mock('../../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('getIndicesUnassignedShardStats', () => {
it('should return the unassigned shard stats for indices', async () => {
const indices = {
@ -16,6 +28,7 @@ describe('getIndicesUnassignedShardStats', () => {
};
const req = {
payload: {},
server: {
config: () => ({
get: () => {},
@ -52,9 +65,8 @@ describe('getIndicesUnassignedShardStats', () => {
},
},
};
const esIndexPattern = '*';
const cluster = {};
const stats = await getIndicesUnassignedShardStats(req, esIndexPattern, cluster);
const stats = await getIndicesUnassignedShardStats(req, cluster);
expect(stats.indices).toEqual(indices);
});
});

View file

@ -16,12 +16,10 @@ import { ElasticsearchMetric } from '../../metrics';
import { calculateIndicesTotals } from './calculate_shard_stat_indices_totals';
import { LegacyRequest } from '../../../types';
import { ElasticsearchModifiedSource } from '../../../../common/types/es';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
async function getUnassignedShardData(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchModifiedSource
) {
async function getUnassignedShardData(req: LegacyRequest, cluster: ElasticsearchModifiedSource) {
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const metric = ElasticsearchMetric.getMetricFields();
@ -38,14 +36,26 @@ async function getUnassignedShardData(
});
}
const dataset = 'shard'; // data_stream.dataset
const type = 'shards'; // legacy
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: esIndexPattern,
index: indexPattern,
size: 0,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({
types: ['shard', 'shards'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
metric,
filters,
@ -84,12 +94,9 @@ async function getUnassignedShardData(
export async function getIndicesUnassignedShardStats(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchModifiedSource
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
const response = await getUnassignedShardData(req, esIndexPattern, cluster);
const response = await getUnassignedShardData(req, cluster);
const indices = get(response, 'aggregations.indices.buckets', []).reduce(
(accum: any, bucket: any) => {
const index = bucket.key;

View file

@ -7,6 +7,18 @@
import { getNodesShardCount } from './get_nodes_shard_count';
jest.mock('../../../static_globals', () => ({
Globals: {
app: {
config: {
ui: {
ccs: { enabled: true },
},
},
},
},
}));
describe('getNodeShardCount', () => {
it('should return the shard count per node', async () => {
const nodes = {
@ -16,6 +28,7 @@ describe('getNodeShardCount', () => {
};
const req = {
payload: {},
server: {
config: () => ({
get: () => {},
@ -38,9 +51,8 @@ describe('getNodeShardCount', () => {
},
},
};
const esIndexPattern = '*';
const cluster = {};
const counts = await getNodesShardCount(req, esIndexPattern, cluster);
const counts = await getNodesShardCount(req, cluster);
expect(counts.nodes).toEqual(nodes);
});
});

View file

@ -14,12 +14,10 @@ import { createQuery } from '../../create_query';
import { ElasticsearchMetric } from '../../metrics';
import { LegacyRequest } from '../../../types';
import { ElasticsearchModifiedSource } from '../../../../common/types/es';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
async function getShardCountPerNode(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchModifiedSource
) {
async function getShardCountPerNode(req: LegacyRequest, cluster: ElasticsearchModifiedSource) {
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const metric = ElasticsearchMetric.getMetricFields();
@ -35,15 +33,26 @@ async function getShardCountPerNode(
},
});
}
const dataset = 'shard'; // data_stream.dataset
const type = 'shards'; // legacy
const moduleType = 'elasticsearch';
const indexPattern = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: esIndexPattern,
index: indexPattern,
size: 0,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({
types: ['shard', 'shards'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
metric,
filters,
@ -63,14 +72,8 @@ async function getShardCountPerNode(
return await callWithRequest(req, 'search', params);
}
export async function getNodesShardCount(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchModifiedSource
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
const response = await getShardCountPerNode(req, esIndexPattern, cluster);
export async function getNodesShardCount(req: LegacyRequest, cluster: ElasticsearchModifiedSource) {
const response = await getShardCountPerNode(req, cluster);
const nodes = get(response, 'aggregations.nodes.buckets', []).reduce(
(accum: any, bucket: any) => {
accum[bucket.key] = { shardCount: bucket.doc_count };

View file

@ -6,13 +6,16 @@
*/
// @ts-ignore
import { checkParam } from '../../error_missing_required';
import { StringOptions } from '@kbn/config-schema/target_types/types';
// @ts-ignore
import { createQuery } from '../../create_query';
// @ts-ignore
import { ElasticsearchMetric } from '../../metrics';
import { ElasticsearchResponse, ElasticsearchLegacySource } from '../../../../common/types/es';
import { LegacyRequest } from '../../../types';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
export function handleResponse(response: ElasticsearchResponse) {
const hits = response.hits?.hits;
if (!hits) {
@ -57,15 +60,12 @@ export function handleResponse(response: ElasticsearchResponse) {
export function getShardAllocation(
req: LegacyRequest,
esIndexPattern: string,
{
shardFilter,
stateUuid,
showSystemIndices = false,
}: { shardFilter: any; stateUuid: string; showSystemIndices: boolean }
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardAllocation');
const filters = [
{
bool: {
@ -100,15 +100,32 @@ export function getShardAllocation(
const config = req.server.config();
const clusterUuid = req.params.clusterUuid;
const metric = ElasticsearchMetric.getMetricFields();
const dataset = 'shard'; // data_stream.dataset
const type = 'shards'; // legacy
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
dataset,
moduleType,
});
const params = {
index: esIndexPattern,
index: indexPatterns,
size: config.get('monitoring.ui.max_bucket_size'),
ignore_unavailable: true,
body: {
query: createQuery({ types: ['shard', 'shards'], clusterUuid, metric, filters }),
query: createQuery({
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
clusterUuid,
metric,
filters,
}),
},
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return callWithRequest(req, 'search', params).then(handleResponse);
}

View file

@ -20,6 +20,8 @@ import { getShardAggs } from './get_shard_stat_aggs';
import { calculateIndicesTotals } from './calculate_shard_stat_indices_totals';
import { LegacyRequest } from '../../../types';
import { ElasticsearchResponse, ElasticsearchModifiedSource } from '../../../../common/types/es';
import { getNewIndexPatterns } from '../../cluster/get_index_patterns';
import { Globals } from '../../../static_globals';
export function handleResponse(
resp: ElasticsearchResponse,
@ -55,11 +57,18 @@ export function handleResponse(
export function getShardStats(
req: LegacyRequest,
esIndexPattern: string,
cluster: ElasticsearchModifiedSource,
{ includeNodes = false, includeIndices = false, indexName = null, nodeUuid = null } = {}
) {
checkParam(esIndexPattern, 'esIndexPattern in elasticsearch/getShardStats');
const dataset = 'shard'; // data_stream.dataset
const type = 'shards'; // legacy
const moduleType = 'elasticsearch';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const config = req.server.config();
const metric = ElasticsearchMetric.getMetricFields();
@ -95,13 +104,15 @@ export function getShardStats(
});
}
const params = {
index: esIndexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({
types: ['shard', 'shards'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
clusterUuid: cluster.cluster_uuid ?? cluster.elasticsearch?.cluster?.id,
metric,
filters,
@ -111,7 +122,6 @@ export function getShardStats(
},
},
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
return callWithRequest(req, 'search', params).then((resp) => {
return handleResponse(resp, includeNodes, includeIndices, cluster);

View file

@ -48,7 +48,8 @@ export function normalizeNodeShards(masterNode: string) {
[node.key]: {
shardCount: node.doc_count,
indexCount: node.index_count.value,
name: node.node_names.buckets[0].key,
// this field is always missing, problem with package? elasticsearch.node.name ECS field doesn't exist
name: node.node_names.buckets[0]?.key || 'NO NAME',
node_ids: nodeIds,
type: calculateNodeType(_node, masterNode), // put the "star" icon on the node link in the shard allocator
},

View file

@ -16,7 +16,6 @@ import { STANDALONE_CLUSTER_CLUSTER_UUID } from '../../../common/constants';
*/
export function createEnterpriseSearchQuery(options: {
filters?: any[];
types?: string[];
metric?: EnterpriseSearchMetricFields;
uuid?: string;
start?: number;
@ -25,7 +24,6 @@ export function createEnterpriseSearchQuery(options: {
const opts = {
filters: [] as any[],
metric: EnterpriseSearchMetric.getMetricFields(),
types: ['health', 'stats'],
clusterUuid: STANDALONE_CLUSTER_CLUSTER_UUID, // This is to disable the stack monitoring clusterUuid filter
...(options ?? {}),
};
@ -33,6 +31,26 @@ export function createEnterpriseSearchQuery(options: {
opts.filters.push({
bool: {
should: [
{
term: {
type: 'health',
},
},
{
term: {
type: 'stats',
},
},
{
term: {
'metricset.name': 'health',
},
},
{
term: {
'metricset.name': 'stats',
},
},
{ term: { 'event.dataset': 'enterprisesearch.health' } },
{ term: { 'event.dataset': 'enterprisesearch.stats' } },
],

View file

@ -7,7 +7,6 @@
import { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest, Cluster } from '../../types';
import { checkParam } from '../error_missing_required';
import { createEnterpriseSearchQuery } from './create_enterprise_search_query';
import { EnterpriseSearchMetric } from '../metrics';
import {
@ -15,6 +14,8 @@ import {
entSearchAggResponseHandler,
entSearchUuidsAgg,
} from './_enterprise_search_stats';
import { getLegacyIndexPattern } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
const stats = entSearchAggResponseHandler(response);
@ -27,24 +28,25 @@ function handleResponse(clusterUuid: string, response: ElasticsearchResponse) {
export function getEnterpriseSearchForClusters(
req: LegacyRequest,
entSearchIndexPattern: string,
clusters: Cluster[]
clusters: Cluster[],
ccs: string
) {
checkParam(
entSearchIndexPattern,
'entSearchIndexPattern in enterprise_earch/getEnterpriseSearchForClusters'
);
const start = req.payload.timeRange.min;
const end = req.payload.timeRange.max;
const config = req.server.config();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
const indexPatterns = getLegacyIndexPattern({
moduleType: 'enterprisesearch',
ccs,
config: Globals.app.config,
});
return Promise.all(
clusters.map(async (cluster) => {
const clusterUuid = cluster.elasticsearch?.cluster?.id ?? cluster.cluster_uuid;
const params = {
index: entSearchIndexPattern,
index: indexPatterns,
size: 0,
ignore_unavailable: true,
filter_path: entSearchAggFilterPath,

View file

@ -8,28 +8,30 @@
import moment from 'moment';
import { ElasticsearchResponse } from '../../../common/types/es';
import { LegacyRequest } from '../../types';
import { checkParam } from '../error_missing_required';
import { createEnterpriseSearchQuery } from './create_enterprise_search_query';
import {
entSearchAggFilterPath,
entSearchUuidsAgg,
entSearchAggResponseHandler,
} from './_enterprise_search_stats';
import { getLegacyIndexPattern } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
export async function getStats(
req: LegacyRequest,
entSearchIndexPattern: string,
clusterUuid: string
) {
checkParam(entSearchIndexPattern, 'entSearchIndexPattern in getStats');
export async function getStats(req: LegacyRequest, clusterUuid: string) {
const config = req.server.config();
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
const maxBucketSize = config.get('monitoring.ui.max_bucket_size');
// just get the legacy pattern since no integration exists yet
const indexPattern = getLegacyIndexPattern({
moduleType: 'enterprisesearch',
config: Globals.app.config,
ccs: req.payload.ccs,
});
const params = {
index: entSearchIndexPattern,
index: indexPattern,
filter_path: entSearchAggFilterPath,
size: 0,
ignore_unavailable: true,

View file

@ -12,6 +12,8 @@ import { checkParam, MissingRequiredError } from '../error_missing_required';
import { calculateAvailability } from '../calculate_availability';
import { LegacyRequest } from '../../types';
import { ElasticsearchResponse } from '../../../common/types/es';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
import { buildKibanaInfo } from './build_kibana_info';
export function handleResponse(resp: ElasticsearchResponse) {
@ -32,13 +34,16 @@ export function handleResponse(resp: ElasticsearchResponse) {
export function getKibanaInfo(
req: LegacyRequest,
kbnIndexPattern: string,
{ clusterUuid, kibanaUuid }: { clusterUuid: string; kibanaUuid: string }
) {
checkParam(kbnIndexPattern, 'kbnIndexPattern in getKibanaInfo');
const moduleType = 'kibana';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
});
const params = {
index: kbnIndexPattern,
index: indexPatterns,
size: 1,
ignore_unavailable: true,
filter_path: [

View file

@ -15,6 +15,8 @@ import { calculateAvailability } from '../calculate_availability';
// @ts-ignore
import { KibanaMetric } from '../metrics';
import { LegacyRequest } from '../../types';
import { getNewIndexPatterns } from '../cluster/get_index_patterns';
import { Globals } from '../../static_globals';
import { ElasticsearchResponse, ElasticsearchResponseHit } from '../../../common/types/es';
import { KibanaInfo, buildKibanaInfo } from './build_kibana_info';
@ -52,24 +54,28 @@ interface Kibana {
* - requests
* - response times
*/
export async function getKibanas(
req: LegacyRequest,
kbnIndexPattern: string,
{ clusterUuid }: { clusterUuid: string }
) {
checkParam(kbnIndexPattern, 'kbnIndexPattern in getKibanas');
export async function getKibanas(req: LegacyRequest, { clusterUuid }: { clusterUuid: string }) {
const config = req.server.config();
const start = moment.utc(req.payload.timeRange.min).valueOf();
const end = moment.utc(req.payload.timeRange.max).valueOf();
const moduleType = 'kibana';
const type = 'kibana_stats';
const dataset = 'stats';
const indexPatterns = getNewIndexPatterns({
config: Globals.app.config,
ccs: req.payload.ccs,
moduleType,
dataset,
});
const params = {
index: kbnIndexPattern,
index: indexPatterns,
size: config.get('monitoring.ui.max_bucket_size'),
ignore_unavailable: true,
body: {
query: createQuery({
types: ['kibana_stats', 'stats'],
type,
dsDataset: `${moduleType}.${dataset}`,
metricset: dataset,
start,
end,
clusterUuid,

Some files were not shown because too many files have changed in this diff Show more