mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>
This commit is contained in:
parent
0327f0e14c
commit
5192b8f12c
8 changed files with 459 additions and 403 deletions
|
@ -12,13 +12,13 @@ export default function ({ getService, getPageObjects }) {
|
|||
const overview = getService('monitoringClusterOverview');
|
||||
const nodesList = getService('monitoringElasticsearchNodes');
|
||||
const esClusterSummaryStatus = getService('monitoringElasticsearchSummaryStatus');
|
||||
const retry = getService('retry');
|
||||
|
||||
describe('Elasticsearch nodes listing', function () {
|
||||
// FF issue: https://github.com/elastic/kibana/issues/35551
|
||||
this.tags(['skipFirefox']);
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/116533
|
||||
describe.skip('with offline node', () => {
|
||||
describe('with offline node', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
||||
before(async () => {
|
||||
|
@ -59,58 +59,62 @@ export default function ({ getService, getPageObjects }) {
|
|||
this.tags(['skipCloud']);
|
||||
|
||||
it('should have a nodes table with correct rows with default sorting', async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
name: 'whatever-01',
|
||||
status: 'Status: Online',
|
||||
cpu: '0%',
|
||||
cpuText: 'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{
|
||||
name: 'whatever-02',
|
||||
status: 'Status: Online',
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{ name: 'whatever-03', status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
name: 'whatever-01',
|
||||
status: 'Status: Online',
|
||||
cpu: '0%',
|
||||
cpuText:
|
||||
'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{
|
||||
name: 'whatever-02',
|
||||
status: 'Status: Online',
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{ name: 'whatever-03', status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -118,22 +122,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickCpuCol();
|
||||
await nodesList.clickCpuCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
cpu: '0%',
|
||||
cpuText: 'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{ cpu: undefined, cpuText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
cpu: '0%',
|
||||
cpuText:
|
||||
'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{ cpu: undefined, cpuText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -141,23 +149,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickLoadCol();
|
||||
await nodesList.clickLoadCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
},
|
||||
{ load: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
},
|
||||
{ load: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -166,14 +177,17 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickNameCol();
|
||||
await nodesList.clickNameCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ name: 'whatever-01' },
|
||||
{ name: 'whatever-02' },
|
||||
{ name: 'whatever-03' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ name: 'whatever-01' },
|
||||
{ name: 'whatever-02' },
|
||||
{ name: 'whatever-03' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -181,14 +195,17 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickStatusCol();
|
||||
await nodesList.clickStatusCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -196,23 +213,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickMemoryCol();
|
||||
await nodesList.clickMemoryCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{ memory: undefined, memoryText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{ memory: undefined, memoryText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -220,23 +240,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickDiskCol();
|
||||
await nodesList.clickDiskCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{ disk: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{ disk: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -244,16 +267,18 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickShardsCol();
|
||||
await nodesList.clickShardsCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [{ shards: '38' }, { shards: '38' }, { shards: undefined }];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [{ shards: '38' }, { shards: '38' }, { shards: undefined }];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/100438
|
||||
describe.skip('with only online nodes', () => {
|
||||
describe('with only online nodes', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
||||
before(async () => {
|
||||
|
@ -265,6 +290,8 @@ export default function ({ getService, getPageObjects }) {
|
|||
}
|
||||
);
|
||||
|
||||
await overview.closeAlertsModal();
|
||||
|
||||
// go to nodes listing
|
||||
await overview.clickEsNodes();
|
||||
expect(await nodesList.isOnListing()).to.be(true);
|
||||
|
@ -289,14 +316,24 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
it('should filter for specific indices', async () => {
|
||||
await nodesList.setFilter('01');
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(1);
|
||||
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(1);
|
||||
});
|
||||
|
||||
await nodesList.clearFilter();
|
||||
});
|
||||
|
||||
it('should filter for non-existent index', async () => {
|
||||
await nodesList.setFilter('foobar');
|
||||
await nodesList.assertNoData();
|
||||
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
await nodesList.assertNoData();
|
||||
});
|
||||
|
||||
await nodesList.clearFilter();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -12,13 +12,13 @@ export default function ({ getService, getPageObjects }) {
|
|||
const overview = getService('monitoringClusterOverview');
|
||||
const nodesList = getService('monitoringElasticsearchNodes');
|
||||
const esClusterSummaryStatus = getService('monitoringElasticsearchSummaryStatus');
|
||||
const retry = getService('retry');
|
||||
|
||||
describe('Elasticsearch nodes listing mb', function () {
|
||||
// FF issue: https://github.com/elastic/kibana/issues/35551
|
||||
this.tags(['skipFirefox']);
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/116065
|
||||
describe.skip('with offline node', () => {
|
||||
describe('with offline node', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
||||
before(async () => {
|
||||
|
@ -60,58 +60,62 @@ export default function ({ getService, getPageObjects }) {
|
|||
this.tags(['skipCloud']);
|
||||
|
||||
it('should have a nodes table with correct rows with default sorting', async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
name: 'whatever-01',
|
||||
status: 'Status: Online',
|
||||
cpu: '0%',
|
||||
cpuText: 'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{
|
||||
name: 'whatever-02',
|
||||
status: 'Status: Online',
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{ name: 'whatever-03', status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
name: 'whatever-01',
|
||||
status: 'Status: Online',
|
||||
cpu: '0%',
|
||||
cpuText:
|
||||
'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{
|
||||
name: 'whatever-02',
|
||||
status: 'Status: Online',
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
shards: '38',
|
||||
},
|
||||
{ name: 'whatever-03', status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -119,22 +123,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickCpuCol();
|
||||
await nodesList.clickCpuCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
cpu: '0%',
|
||||
cpuText: 'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{ cpu: undefined, cpuText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
cpu: '2%',
|
||||
cpuText:
|
||||
'Trending\ndown\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
cpu: '0%',
|
||||
cpuText:
|
||||
'Trending\nup\nMax value\n3%\nMin value\n0%\nApplies to current time period',
|
||||
},
|
||||
{ cpu: undefined, cpuText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].cpu).to.be(tableData[node].cpu);
|
||||
expect(nodesAll[node].cpuText).to.be(tableData[node].cpuText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -142,23 +150,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickLoadCol();
|
||||
await nodesList.clickLoadCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
},
|
||||
{ load: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.71\nMin value\n2.19\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
load: '3.28',
|
||||
loadText:
|
||||
'Trending\nup\nMax value\n3.73\nMin value\n2.29\nApplies to current time period',
|
||||
},
|
||||
{ load: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].load).to.be(tableData[node].load);
|
||||
expect(nodesAll[node].loadText).to.be(tableData[node].loadText);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -167,14 +178,17 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickNameCol();
|
||||
await nodesList.clickNameCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ name: 'whatever-01' },
|
||||
{ name: 'whatever-02' },
|
||||
{ name: 'whatever-03' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ name: 'whatever-01' },
|
||||
{ name: 'whatever-02' },
|
||||
{ name: 'whatever-03' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].name).to.be(tableData[node].name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -182,14 +196,17 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickStatusCol();
|
||||
await nodesList.clickStatusCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Online' },
|
||||
{ status: 'Status: Offline' },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].status).to.be(tableData[node].status);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -197,23 +214,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickMemoryCol();
|
||||
await nodesList.clickMemoryCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{ memory: undefined, memoryText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
memory: '39%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n52%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
memory: '25%',
|
||||
memoryText:
|
||||
'Trending\ndown\nMax value\n49%\nMin value\n25%\nApplies to current time period',
|
||||
},
|
||||
{ memory: undefined, memoryText: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].memory).to.be(tableData[node].memory);
|
||||
expect(nodesAll[node].memoryText).to.be(tableData[node].memoryText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -221,23 +241,26 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickDiskCol();
|
||||
await nodesList.clickDiskCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{ disk: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{
|
||||
disk: '173.9 GB',
|
||||
diskText:
|
||||
'Trending\ndown\nMax value\n173.9 GB\nMin value\n173.9 GB\nApplies to current time period',
|
||||
},
|
||||
{ disk: undefined },
|
||||
];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].disk).to.be(tableData[node].disk);
|
||||
expect(nodesAll[node].diskText).to.be(tableData[node].diskText);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -245,10 +268,13 @@ export default function ({ getService, getPageObjects }) {
|
|||
await nodesList.clickShardsCol();
|
||||
await nodesList.clickShardsCol();
|
||||
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [{ shards: '38' }, { shards: '38' }, { shards: undefined }];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const nodesAll = await nodesList.getNodesAll();
|
||||
const tableData = [{ shards: '38' }, { shards: '38' }, { shards: undefined }];
|
||||
nodesAll.forEach((obj, node) => {
|
||||
expect(nodesAll[node].shards).to.be(tableData[node].shards);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -292,14 +318,24 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
it('should filter for specific indices', async () => {
|
||||
await nodesList.setFilter('01');
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(1);
|
||||
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodesList.getRows();
|
||||
expect(rows.length).to.be(1);
|
||||
});
|
||||
|
||||
await nodesList.clearFilter();
|
||||
});
|
||||
|
||||
it('should filter for non-existent index', async () => {
|
||||
await nodesList.setFilter('foobar');
|
||||
await nodesList.assertNoData();
|
||||
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
await nodesList.assertNoData();
|
||||
});
|
||||
|
||||
await nodesList.clearFilter();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -67,19 +67,15 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
await pipelinesList.clickIdCol();
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -89,19 +85,15 @@ export default function ({ getService, getPageObjects }) {
|
|||
const rows = await pipelinesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -67,19 +67,15 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
await pipelinesList.clickIdCol();
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -89,19 +85,15 @@ export default function ({ getService, getPageObjects }) {
|
|||
const rows = await pipelinesList.getRows();
|
||||
expect(rows.length).to.be(3);
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ export default function ({ getService, getPageObjects }) {
|
|||
const clusterOverview = getService('monitoringClusterOverview');
|
||||
const nodes = getService('monitoringLogstashNodes');
|
||||
const logstashSummaryStatus = getService('monitoringLogstashSummaryStatus');
|
||||
const retry = getService('retry');
|
||||
|
||||
describe('Logstash nodes', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
@ -41,8 +42,11 @@ export default function ({ getService, getPageObjects }) {
|
|||
});
|
||||
});
|
||||
it('should have a nodes table with the correct number of rows', async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
});
|
||||
});
|
||||
it('should have a nodes table with the correct data', async () => {
|
||||
const nodesAll = await nodes.getNodesAll();
|
||||
|
@ -96,8 +100,13 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
it('should filter for specific nodes', async () => {
|
||||
await nodes.setFilter('sha');
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
});
|
||||
|
||||
await nodes.clearFilter();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -12,6 +12,7 @@ export default function ({ getService, getPageObjects }) {
|
|||
const clusterOverview = getService('monitoringClusterOverview');
|
||||
const nodes = getService('monitoringLogstashNodes');
|
||||
const logstashSummaryStatus = getService('monitoringLogstashSummaryStatus');
|
||||
const retry = getService('retry');
|
||||
|
||||
describe('Logstash nodes mb', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
@ -41,8 +42,10 @@ export default function ({ getService, getPageObjects }) {
|
|||
});
|
||||
});
|
||||
it('should have a nodes table with the correct number of rows', async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
await retry.try(async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
});
|
||||
});
|
||||
it('should have a nodes table with the correct data', async () => {
|
||||
const nodesAll = await nodes.getNodesAll();
|
||||
|
@ -96,8 +99,12 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
it('should filter for specific nodes', async () => {
|
||||
await nodes.setFilter('sha');
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
|
||||
await retry.try(async () => {
|
||||
const rows = await nodes.getRows();
|
||||
expect(rows.length).to.be(2);
|
||||
});
|
||||
|
||||
await nodes.clearFilter();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -15,8 +15,7 @@ export default function ({ getService, getPageObjects }) {
|
|||
const pipelinesList = getService('monitoringLogstashPipelines');
|
||||
const lsClusterSummaryStatus = getService('monitoringLogstashSummaryStatus');
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/116070
|
||||
describe.skip('Logstash pipelines', () => {
|
||||
describe('Logstash pipelines', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
||||
before(async () => {
|
||||
|
@ -51,43 +50,36 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
await pipelinesList.clickIdCol();
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should have Pipelines Table showing correct rows after sorting by Events Emitted Rate Asc', async () => {
|
||||
await pipelinesList.clickEventsEmittedRateCol();
|
||||
|
||||
const rows = await pipelinesList.getRows();
|
||||
expect(rows.length).to.be(4);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await pipelinesList.getRows();
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
expect(rows.length).to.be(4);
|
||||
|
||||
const tableData = [
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
];
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -15,8 +15,7 @@ export default function ({ getService, getPageObjects }) {
|
|||
const pipelinesList = getService('monitoringLogstashPipelines');
|
||||
const lsClusterSummaryStatus = getService('monitoringLogstashSummaryStatus');
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/121172
|
||||
describe.skip('Logstash pipelines mb', () => {
|
||||
describe('Logstash pipelines mb', () => {
|
||||
const { setup, tearDown } = getLifecycleMethods(getService, getPageObjects);
|
||||
|
||||
before(async () => {
|
||||
|
@ -51,43 +50,35 @@ export default function ({ getService, getPageObjects }) {
|
|||
|
||||
await pipelinesList.clickIdCol();
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should have Pipelines Table showing correct rows after sorting by Events Emitted Rate Asc', async () => {
|
||||
await pipelinesList.clickEventsEmittedRateCol();
|
||||
|
||||
const rows = await pipelinesList.getRows();
|
||||
expect(rows.length).to.be(4);
|
||||
// retry in case the table hasn't had time to re-render
|
||||
await retry.try(async () => {
|
||||
const rows = await pipelinesList.getRows();
|
||||
expect(rows.length).to.be(4);
|
||||
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
const pipelinesAll = await pipelinesList.getPipelinesAll();
|
||||
|
||||
const tableData = [
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
];
|
||||
|
||||
// check the all data in the table
|
||||
pipelinesAll.forEach((obj, index) => {
|
||||
expect(pipelinesAll[index].id).to.be(tableData[index].id);
|
||||
expect(pipelinesAll[index].eventsEmittedRate).to.be(tableData[index].eventsEmittedRate);
|
||||
expect(pipelinesAll[index].nodeCount).to.be(tableData[index].nodeCount);
|
||||
expect(pipelinesAll).to.eql([
|
||||
{ id: 'test_interpolation', eventsEmittedRate: '0 e/s', nodeCount: '1' },
|
||||
{ id: 'tweets_about_labradoodles', eventsEmittedRate: '1.2 e/s', nodeCount: '1' },
|
||||
{ id: 'nginx_logs', eventsEmittedRate: '62.5 e/s', nodeCount: '1' },
|
||||
{ id: 'main', eventsEmittedRate: '162.5 e/s', nodeCount: '1' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue