mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Revert "Added new index pattern" (#56804)
This reverts commit 0f175060d1
.
This commit is contained in:
parent
8243f5dfba
commit
7276087e8a
14 changed files with 104 additions and 380 deletions
|
@ -55,7 +55,7 @@ describe('Alerts Cluster Search', () => {
|
|||
const { mockReq, callWithRequestStub } = createStubs(mockQueryResult, featureStub);
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
'.monitoring-alerts',
|
||||
{ cluster_uuid: 'cluster-1234' },
|
||||
checkLicense
|
||||
).then(alerts => {
|
||||
|
@ -68,7 +68,7 @@ describe('Alerts Cluster Search', () => {
|
|||
const { mockReq, callWithRequestStub } = createStubs(mockQueryResult, featureStub);
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
'.monitoring-alerts',
|
||||
{ cluster_uuid: 'cluster-1234' },
|
||||
checkLicense,
|
||||
{ size: 3 }
|
||||
|
@ -89,15 +89,9 @@ describe('Alerts Cluster Search', () => {
|
|||
issue_date: 'fake-issue_date',
|
||||
},
|
||||
};
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
cluster,
|
||||
checkLicense,
|
||||
{
|
||||
size: 3,
|
||||
}
|
||||
).then(alerts => {
|
||||
return alertsClusterSearch(mockReq, '.monitoring-alerts', cluster, checkLicense, {
|
||||
size: 3,
|
||||
}).then(alerts => {
|
||||
expect(alerts).to.have.length(3);
|
||||
expect(alerts[0]).to.eql(mockAlerts[0]);
|
||||
expect(alerts[1]).to.eql({
|
||||
|
@ -128,15 +122,9 @@ describe('Alerts Cluster Search', () => {
|
|||
issue_date: 'fake-issue_date',
|
||||
},
|
||||
};
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
cluster,
|
||||
checkLicense,
|
||||
{
|
||||
size: 3,
|
||||
}
|
||||
).then(alerts => {
|
||||
return alertsClusterSearch(mockReq, '.monitoring-alerts', cluster, checkLicense, {
|
||||
size: 3,
|
||||
}).then(alerts => {
|
||||
expect(alerts).to.have.length(1);
|
||||
expect(alerts[0]).to.eql({
|
||||
metadata: {
|
||||
|
@ -167,7 +155,7 @@ describe('Alerts Cluster Search', () => {
|
|||
const { mockReq, callWithRequestStub } = createStubs({}, featureStub);
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
'.monitoring-alerts',
|
||||
{ cluster_uuid: 'cluster-1234' },
|
||||
checkLicense
|
||||
).then(alerts => {
|
||||
|
@ -189,7 +177,7 @@ describe('Alerts Cluster Search', () => {
|
|||
const { mockReq, callWithRequestStub } = createStubs({}, featureStub);
|
||||
return alertsClusterSearch(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
'.monitoring-alerts',
|
||||
{ cluster_uuid: 'cluster-1234' },
|
||||
checkLicense
|
||||
).then(alerts => {
|
||||
|
|
|
@ -73,37 +73,34 @@ describe('Alerts Clusters Aggregation', () => {
|
|||
|
||||
it('aggregates alert count summary by cluster', () => {
|
||||
const { mockReq } = createStubs(mockQueryResult, featureStub);
|
||||
return alertsClustersAggregation(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
clusters,
|
||||
checkLicense
|
||||
).then(result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: true },
|
||||
'cluster-abc0': undefined,
|
||||
'cluster-abc1': {
|
||||
count: 1,
|
||||
high: 0,
|
||||
low: 1,
|
||||
medium: 0,
|
||||
},
|
||||
'cluster-abc2': {
|
||||
count: 2,
|
||||
high: 0,
|
||||
low: 0,
|
||||
medium: 2,
|
||||
},
|
||||
'cluster-abc3': {
|
||||
count: 3,
|
||||
high: 3,
|
||||
low: 0,
|
||||
medium: 0,
|
||||
},
|
||||
'cluster-no-license': undefined,
|
||||
'cluster-invalid': undefined,
|
||||
});
|
||||
});
|
||||
return alertsClustersAggregation(mockReq, '.monitoring-alerts', clusters, checkLicense).then(
|
||||
result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: true },
|
||||
'cluster-abc0': undefined,
|
||||
'cluster-abc1': {
|
||||
count: 1,
|
||||
high: 0,
|
||||
low: 1,
|
||||
medium: 0,
|
||||
},
|
||||
'cluster-abc2': {
|
||||
count: 2,
|
||||
high: 0,
|
||||
low: 0,
|
||||
medium: 2,
|
||||
},
|
||||
'cluster-abc3': {
|
||||
count: 3,
|
||||
high: 3,
|
||||
low: 0,
|
||||
medium: 0,
|
||||
},
|
||||
'cluster-no-license': undefined,
|
||||
'cluster-invalid': undefined,
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('aggregates alert count summary by cluster include static alert', () => {
|
||||
|
@ -116,7 +113,7 @@ describe('Alerts Clusters Aggregation', () => {
|
|||
|
||||
return alertsClustersAggregation(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
'.monitoring-alerts',
|
||||
newClusters,
|
||||
checkLicense
|
||||
).then(result => {
|
||||
|
@ -166,16 +163,13 @@ describe('Alerts Clusters Aggregation', () => {
|
|||
const checkLicense = () => ({ clusterAlerts: { enabled: true } });
|
||||
const { mockReq } = createStubs(mockQueryResult, featureStub);
|
||||
|
||||
return alertsClustersAggregation(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
clusters,
|
||||
checkLicense
|
||||
).then(result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: false, message: 'monitoring cluster license is fail' },
|
||||
});
|
||||
});
|
||||
return alertsClustersAggregation(mockReq, '.monitoring-alerts', clusters, checkLicense).then(
|
||||
result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: false, message: 'monitoring cluster license is fail' },
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('returns the input set if disabled because production cluster checks', () => {
|
||||
|
@ -187,56 +181,53 @@ describe('Alerts Clusters Aggregation', () => {
|
|||
const checkLicense = () => ({ clusterAlerts: { enabled: false } });
|
||||
const { mockReq } = createStubs(mockQueryResult, featureStub);
|
||||
|
||||
return alertsClustersAggregation(
|
||||
mockReq,
|
||||
'.monitoring-alerts,monitoring-alerts',
|
||||
clusters,
|
||||
checkLicense
|
||||
).then(result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: true },
|
||||
'cluster-abc0': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc0-name] license type [test_license] does not support Cluster Alerts',
|
||||
return alertsClustersAggregation(mockReq, '.monitoring-alerts', clusters, checkLicense).then(
|
||||
result => {
|
||||
expect(result).to.eql({
|
||||
alertsMeta: { enabled: true },
|
||||
'cluster-abc0': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc0-name] license type [test_license] does not support Cluster Alerts',
|
||||
},
|
||||
},
|
||||
},
|
||||
'cluster-abc1': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc1-name] license type [test_license] does not support Cluster Alerts',
|
||||
'cluster-abc1': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc1-name] license type [test_license] does not support Cluster Alerts',
|
||||
},
|
||||
},
|
||||
},
|
||||
'cluster-abc2': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc2-name] license type [test_license] does not support Cluster Alerts',
|
||||
'cluster-abc2': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc2-name] license type [test_license] does not support Cluster Alerts',
|
||||
},
|
||||
},
|
||||
},
|
||||
'cluster-abc3': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc3-name] license type [test_license] does not support Cluster Alerts',
|
||||
'cluster-abc3': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message:
|
||||
'Cluster [cluster-abc3-name] license type [test_license] does not support Cluster Alerts',
|
||||
},
|
||||
},
|
||||
},
|
||||
'cluster-no-license': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message: `Cluster [cluster-no-license-name] license type [undefined] does not support Cluster Alerts`,
|
||||
'cluster-no-license': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message: `Cluster [cluster-no-license-name] license type [undefined] does not support Cluster Alerts`,
|
||||
},
|
||||
},
|
||||
},
|
||||
'cluster-invalid': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message: `Cluster [cluster-invalid-name] license type [undefined] does not support Cluster Alerts`,
|
||||
'cluster-invalid': {
|
||||
clusterMeta: {
|
||||
enabled: false,
|
||||
message: `Cluster [cluster-invalid-name] license type [undefined] does not support Cluster Alerts`,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,8 +10,7 @@ import { parseCrossClusterPrefix, prefixIndexPattern } from '../ccs_utils';
|
|||
|
||||
describe('ccs_utils', () => {
|
||||
describe('prefixIndexPattern', () => {
|
||||
const indexPattern =
|
||||
'.monitoring-xyz-1-*,.monitoring-xyz-2-*,monitoring-xyz-1-*,monitoring-xyz-2-*';
|
||||
const indexPattern = '.monitoring-xyz-1-*,.monitoring-xyz-2-*';
|
||||
|
||||
it('returns the index pattern if ccs is not enabled', () => {
|
||||
const get = sinon.stub();
|
||||
|
@ -54,11 +53,9 @@ describe('ccs_utils', () => {
|
|||
const abcPattern = prefixIndexPattern(config, indexPattern, 'aBc');
|
||||
const underscorePattern = prefixIndexPattern(config, indexPattern, 'cluster_one');
|
||||
|
||||
expect(abcPattern).to.eql(
|
||||
'aBc:.monitoring-xyz-1-*,aBc:.monitoring-xyz-2-*,aBc:monitoring-xyz-1-*,aBc:monitoring-xyz-2-*'
|
||||
);
|
||||
expect(abcPattern).to.eql('aBc:.monitoring-xyz-1-*,aBc:.monitoring-xyz-2-*');
|
||||
expect(underscorePattern).to.eql(
|
||||
'cluster_one:.monitoring-xyz-1-*,cluster_one:.monitoring-xyz-2-*,cluster_one:monitoring-xyz-1-*,cluster_one:monitoring-xyz-2-*'
|
||||
'cluster_one:.monitoring-xyz-1-*,cluster_one:.monitoring-xyz-2-*'
|
||||
);
|
||||
expect(get.callCount).to.eql(2);
|
||||
});
|
||||
|
@ -72,11 +69,7 @@ describe('ccs_utils', () => {
|
|||
const pattern = prefixIndexPattern(config, indexPattern, '*');
|
||||
|
||||
// it should have BOTH patterns so that it searches all CCS clusters and the local cluster
|
||||
expect(pattern).to.eql(
|
||||
'*:.monitoring-xyz-1-*,*:.monitoring-xyz-2-*,*:monitoring-xyz-1-*,*:monitoring-xyz-2-*' +
|
||||
',' +
|
||||
indexPattern
|
||||
);
|
||||
expect(pattern).to.eql('*:.monitoring-xyz-1-*,*:.monitoring-xyz-2-*' + ',' + indexPattern);
|
||||
expect(get.callCount).to.eql(1);
|
||||
});
|
||||
});
|
||||
|
@ -84,25 +77,18 @@ describe('ccs_utils', () => {
|
|||
describe('parseCrossClusterPrefix', () => {
|
||||
it('returns ccs prefix for index with one', () => {
|
||||
expect(parseCrossClusterPrefix('abc:.monitoring-es-6-2017.07.28')).to.eql('abc');
|
||||
expect(parseCrossClusterPrefix('abc:monitoring-es-6-2017.07.28')).to.eql('abc');
|
||||
expect(parseCrossClusterPrefix('abc_123:.monitoring-es-6-2017.07.28')).to.eql('abc_123');
|
||||
expect(parseCrossClusterPrefix('abc_123:monitoring-es-6-2017.07.28')).to.eql('abc_123');
|
||||
expect(parseCrossClusterPrefix('broken:example:.monitoring-es-6-2017.07.28')).to.eql(
|
||||
'broken'
|
||||
);
|
||||
expect(parseCrossClusterPrefix('broken:example:monitoring-es-6-2017.07.28')).to.eql('broken');
|
||||
expect(parseCrossClusterPrefix('with-a-dash:.monitoring-es-6-2017.07.28')).to.eql(
|
||||
'with-a-dash'
|
||||
);
|
||||
expect(parseCrossClusterPrefix('with-a-dash:monitoring-es-6-2017.07.28')).to.eql(
|
||||
'with-a-dash'
|
||||
);
|
||||
expect(parseCrossClusterPrefix('something:not-monitoring')).to.eql('something');
|
||||
});
|
||||
|
||||
it('returns null when no prefix exists', () => {
|
||||
expect(parseCrossClusterPrefix('.monitoring-es-6-2017.07.28')).to.be(null);
|
||||
expect(parseCrossClusterPrefix('monitoring-es-6-2017.07.28')).to.be(null);
|
||||
expect(parseCrossClusterPrefix('random')).to.be(null);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -47,7 +47,7 @@ describe('getNodeIds', () => {
|
|||
};
|
||||
const clusterUuid = '1cb';
|
||||
|
||||
const result = await getNodeIds(req, '.monitoring-es-*,monitoring-es-*', { clusterUuid }, 10);
|
||||
const result = await getNodeIds(req, '.monitoring-es-*', { clusterUuid }, 10);
|
||||
expect(result).toEqual([
|
||||
{ name: 'foobar', uuid: 1 },
|
||||
{ name: 'barfoo', uuid: 2 },
|
||||
|
|
|
@ -45,7 +45,7 @@ describe('getPaginatedNodes', () => {
|
|||
}),
|
||||
},
|
||||
};
|
||||
const esIndexPattern = '.monitoring-es-*,monitoring-es-*';
|
||||
const esIndexPattern = '.monitoring-es-*';
|
||||
const clusterUuid = '1abc';
|
||||
const metricSet = ['foo', 'bar'];
|
||||
const pagination = { index: 0, size: 10 };
|
||||
|
|
|
@ -20,7 +20,7 @@ import { getMetrics } from '../../../details/get_metrics';
|
|||
* and returns that so the caller can perform their normal call to get the time-series data.
|
||||
*
|
||||
* @param {*} req - Server request object
|
||||
* @param {*} esIndexPattern - The index pattern to search against (`.monitoring-es-*,monitoring-es-*`)
|
||||
* @param {*} esIndexPattern - The index pattern to search against (`.monitoring-es-*`)
|
||||
* @param {*} uuids - The optional `clusterUuid` and `nodeUuid` to filter the results from
|
||||
* @param {*} metricSet - The array of metrics that are sortable in the UI
|
||||
* @param {*} pagination - ({ index, size })
|
||||
|
|
|
@ -30,7 +30,7 @@ export async function verifyMonitoringAuth(req) {
|
|||
|
||||
/**
|
||||
* Reach out to the Monitoring cluster and ensure that it believes the current user has the privileges necessary
|
||||
* to make API calls against .monitoring-*,monitoring-* indices.
|
||||
* to make API calls against .monitoring-* indices.
|
||||
*
|
||||
* @param req {Object} the server route handler request object
|
||||
* @return {Promise} That either resolves with no response (void) or an exception.
|
||||
|
|
|
@ -20,7 +20,7 @@ import { getMetrics } from '../details/get_metrics';
|
|||
* and returns that so the caller can perform their normal call to get the time-series data.
|
||||
*
|
||||
* @param {*} req - Server request object
|
||||
* @param {*} lsIndexPattern - The index pattern to search against (`.monitoring-logstash-*,monitoring-logstash-*`)
|
||||
* @param {*} lsIndexPattern - The index pattern to search against (`.monitoring-logstash-*`)
|
||||
* @param {*} uuids - The optional `clusterUuid` and `logstashUuid` to filter the results from
|
||||
* @param {*} metricSet - The array of metrics that are sortable in the UI
|
||||
* @param {*} pagination - ({ index, size })
|
||||
|
|
|
@ -1,241 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import { getCollectionStatus } from '../';
|
||||
import { getIndexPatterns } from '../../../cluster/get_index_patterns';
|
||||
|
||||
const liveClusterUuid = 'a12';
|
||||
const mockReq = (searchResult = {}) => {
|
||||
return {
|
||||
server: {
|
||||
newPlatform: {
|
||||
setup: {
|
||||
plugins: {
|
||||
usageCollection: {
|
||||
getCollectorByType: () => ({
|
||||
isReady: () => false,
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
config() {
|
||||
return {
|
||||
get: sinon
|
||||
.stub()
|
||||
.withArgs('server.uuid')
|
||||
.returns('kibana-1234'),
|
||||
};
|
||||
},
|
||||
usage: {
|
||||
collectorSet: {
|
||||
getCollectorByType: () => ({
|
||||
isReady: () => false,
|
||||
}),
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
elasticsearch: {
|
||||
getCluster() {
|
||||
return {
|
||||
callWithRequest(_req, type, params) {
|
||||
if (
|
||||
type === 'transport.request' &&
|
||||
params &&
|
||||
params.path === '/_cluster/state/cluster_uuid'
|
||||
) {
|
||||
return Promise.resolve({ cluster_uuid: liveClusterUuid });
|
||||
}
|
||||
if (type === 'transport.request' && params && params.path === '/_nodes') {
|
||||
return Promise.resolve({ nodes: {} });
|
||||
}
|
||||
if (type === 'cat.indices') {
|
||||
return Promise.resolve([1]);
|
||||
}
|
||||
return Promise.resolve(searchResult);
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
describe('getCollectionStatus none system index', () => {
|
||||
it('should handle all stack products with internal monitoring', async () => {
|
||||
const req = mockReq({
|
||||
aggregations: {
|
||||
indices: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'monitoring-es-7-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-kibana-7-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-beats-7-2019',
|
||||
beats_uuids: {
|
||||
buckets: [
|
||||
{ key: 'apm_1', beat_type: { buckets: [{ key: 'apm-server' }] } },
|
||||
{ key: 'beats_1' },
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await getCollectionStatus(req, getIndexPatterns(req.server));
|
||||
|
||||
expect(result.kibana.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.kibana.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.kibana.byUuid.kibana_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.beats.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.beats.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.beats.byUuid.beats_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.apm.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.apm.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.apm.byUuid.apm_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.logstash.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.logstash.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.logstash.byUuid.logstash_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.elasticsearch.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.elasticsearch.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.elasticsearch.byUuid.es_1.isInternalCollector).to.be(true);
|
||||
});
|
||||
|
||||
it('should handle some stack products as fully migrated', async () => {
|
||||
const req = mockReq({
|
||||
aggregations: {
|
||||
indices: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'monitoring-es-7-mb-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-kibana-7-mb-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-beats-7-2019',
|
||||
beats_uuids: { buckets: [{ key: 'beats_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await getCollectionStatus(req, getIndexPatterns(req.server));
|
||||
|
||||
expect(result.kibana.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.kibana.totalUniqueFullyMigratedCount).to.be(1);
|
||||
expect(result.kibana.byUuid.kibana_1.isFullyMigrated).to.be(true);
|
||||
|
||||
expect(result.beats.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.beats.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.beats.byUuid.beats_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.logstash.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.logstash.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.logstash.byUuid.logstash_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.elasticsearch.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.elasticsearch.totalUniqueFullyMigratedCount).to.be(1);
|
||||
expect(result.elasticsearch.byUuid.es_1.isFullyMigrated).to.be(true);
|
||||
});
|
||||
|
||||
it('should handle some stack products as partially migrated', async () => {
|
||||
const req = mockReq({
|
||||
aggregations: {
|
||||
indices: {
|
||||
buckets: [
|
||||
{
|
||||
key: 'monitoring-es-7-mb-2019',
|
||||
es_uuids: { buckets: [{ key: 'es_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-kibana-7-mb-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1' }, { key: 'kibana_2' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-kibana-7-2019',
|
||||
kibana_uuids: { buckets: [{ key: 'kibana_1', by_timestamp: { value: 12 } }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-beats-7-2019',
|
||||
beats_uuids: { buckets: [{ key: 'beats_1' }] },
|
||||
},
|
||||
{
|
||||
key: 'monitoring-logstash-7-2019',
|
||||
logstash_uuids: { buckets: [{ key: 'logstash_1' }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const result = await getCollectionStatus(req, getIndexPatterns(req.server));
|
||||
|
||||
expect(result.kibana.totalUniqueInstanceCount).to.be(2);
|
||||
expect(result.kibana.totalUniqueFullyMigratedCount).to.be(1);
|
||||
expect(result.kibana.byUuid.kibana_1.isPartiallyMigrated).to.be(true);
|
||||
expect(result.kibana.byUuid.kibana_1.lastInternallyCollectedTimestamp).to.be(12);
|
||||
|
||||
expect(result.beats.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.beats.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.beats.byUuid.beats_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.logstash.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.logstash.totalUniqueFullyMigratedCount).to.be(0);
|
||||
expect(result.logstash.byUuid.logstash_1.isInternalCollector).to.be(true);
|
||||
|
||||
expect(result.elasticsearch.totalUniqueInstanceCount).to.be(1);
|
||||
expect(result.elasticsearch.totalUniqueFullyMigratedCount).to.be(1);
|
||||
expect(result.elasticsearch.byUuid.es_1.isFullyMigrated).to.be(true);
|
||||
});
|
||||
|
||||
it('should detect products based on other indices', async () => {
|
||||
const req = mockReq(
|
||||
{},
|
||||
{
|
||||
responses: [
|
||||
{ hits: { total: { value: 1 } } },
|
||||
{ hits: { total: { value: 1 } } },
|
||||
{ hits: { total: { value: 1 } } },
|
||||
{ hits: { total: { value: 1 } } },
|
||||
{ hits: { total: { value: 1 } } },
|
||||
],
|
||||
}
|
||||
);
|
||||
|
||||
const result = await getCollectionStatus(req, getIndexPatterns(req.server), liveClusterUuid);
|
||||
|
||||
expect(result.kibana.detected.doesExist).to.be(true);
|
||||
expect(result.elasticsearch.detected.doesExist).to.be(true);
|
||||
expect(result.beats.detected.mightExist).to.be(true);
|
||||
expect(result.logstash.detected.mightExist).to.be(true);
|
||||
});
|
||||
});
|
|
@ -242,7 +242,7 @@ async function hasNecessaryPermissions(req) {
|
|||
* Determines if we should ignore this bucket from this product.
|
||||
*
|
||||
* We need this logic because APM and Beats are separate products, but their
|
||||
* monitoring data appears in the same index (.monitoring-beats-*,monitoring-beats-*) and the single
|
||||
* monitoring data appears in the same index (.monitoring-beats-*) and the single
|
||||
* way to determine the difference between two documents in that index
|
||||
* is `beats_stats.beat.type` which we are performing a terms agg in the above query.
|
||||
* If that value is `apm-server` and we're attempting to calculating status for beats
|
||||
|
@ -325,7 +325,7 @@ async function getLiveElasticsearchCollectionEnabled(req) {
|
|||
* }
|
||||
|
||||
* @param {*} req Standard request object. Can contain a timeRange to use for the query
|
||||
* @param {*} indexPatterns Map of index patterns to search against (will be all .monitoring-*,monitoring-* indices)
|
||||
* @param {*} indexPatterns Map of index patterns to search against (will be all .monitoring-* indices)
|
||||
* @param {*} clusterUuid Optional and will be used to filter down the query if used
|
||||
* @param {*} nodeUuid Optional and will be used to filter down the query if used
|
||||
* @param {*} skipLiveData Optional and will not make any live api calls if set to true
|
||||
|
|
|
@ -19,11 +19,11 @@ describe.skip('get_all_stats', () => {
|
|||
get: sinon
|
||||
.stub()
|
||||
.withArgs('xpack.monitoring.elasticsearch.index_pattern')
|
||||
.returns('.monitoring-es-N-*,monitoring-es-N-*')
|
||||
.returns('.monitoring-es-N-*')
|
||||
.withArgs('xpack.monitoring.kibana.index_pattern')
|
||||
.returns('.monitoring-kibana-N-*,monitoring-kibana-N-*')
|
||||
.returns('.monitoring-kibana-N-*')
|
||||
.withArgs('xpack.monitoring.logstash.index_pattern')
|
||||
.returns('.monitoring-logstash-N-*,monitoring-logstash-N-*')
|
||||
.returns('.monitoring-logstash-N-*')
|
||||
.withArgs('xpack.monitoring.max_bucket_size')
|
||||
.returns(size),
|
||||
}),
|
||||
|
|
|
@ -20,7 +20,7 @@ describe('get_cluster_uuids', () => {
|
|||
get: sinon
|
||||
.stub()
|
||||
.withArgs('xpack.monitoring.elasticsearch.index_pattern')
|
||||
.returns('.monitoring-es-N-*,monitoring-es-N-*')
|
||||
.returns('.monitoring-es-N-*')
|
||||
.withArgs('xpack.monitoring.max_bucket_size')
|
||||
.returns(size),
|
||||
}),
|
||||
|
|
|
@ -20,7 +20,7 @@ describe('get_es_stats', () => {
|
|||
get: sinon
|
||||
.stub()
|
||||
.withArgs('xpack.monitoring.elasticsearch.index_pattern')
|
||||
.returns('.monitoring-es-N-*,monitoring-es-N-*')
|
||||
.returns('.monitoring-es-N-*')
|
||||
.withArgs('xpack.monitoring.max_bucket_size')
|
||||
.returns(size),
|
||||
}),
|
||||
|
|
|
@ -24,7 +24,7 @@ describe('get_high_level_stats', () => {
|
|||
get: sinon
|
||||
.stub()
|
||||
.withArgs(`xpack.monitoring.${product}.index_pattern`)
|
||||
.returns(`.monitoring-${product}-N-*,monitoring-${product}-N-*`)
|
||||
.returns(`.monitoring-${product}-N-*`)
|
||||
.withArgs('xpack.monitoring.max_bucket_size')
|
||||
.returns(size),
|
||||
}),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue