[App Search] Add Crawl Type and Domains columns to Crawl Requests table (#118917) (#119181)

Co-authored-by: Byron Hulcher <byronhulcher@gmail.com>
This commit is contained in:
Kibana Machine 2021-11-19 11:33:00 -05:00 committed by GitHub
parent 8c31588fb9
commit c517030fee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 208 additions and 24 deletions

View file

@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import '../../../__mocks__/engine_logic.mock';
import { shallow } from 'enzyme';
import { EuiBadge } from '@elastic/eui';
import { mountWithIntl } from '../../../../test_helpers';
import { CrawlEvent, CrawlerStatus, CrawlType } from '../types';
import { CrawlEventTypeBadge } from './crawl_event_type_badge';
const MOCK_EVENT: CrawlEvent = {
id: '618d0e66abe97bc688328900',
status: CrawlerStatus.Pending,
stage: 'crawl',
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
type: CrawlType.Full,
crawlConfig: {
domainAllowlist: ['https://www.elastic.co'],
},
};
describe('CrawlEventTypeBadge', () => {
it('renders a badge for process crawls', () => {
const wrapper = mountWithIntl(
<CrawlEventTypeBadge event={{ ...MOCK_EVENT, stage: 'process' }} />
);
const badge = wrapper.find(EuiBadge);
expect(badge.prop('color')).toEqual('hollow');
expect(badge.text()).toEqual('Re-applied crawl rules');
});
it('renders a badge for partial crawls', () => {
const wrapper = mountWithIntl(
<CrawlEventTypeBadge event={{ ...MOCK_EVENT, type: CrawlType.Partial }} />
);
const badge = wrapper.find(EuiBadge);
expect(badge.prop('color')).toEqual('hollow');
expect(badge.text()).toEqual('Partial');
});
it('renders a badge for full crawls', () => {
const wrapper = mountWithIntl(
<CrawlEventTypeBadge event={{ ...MOCK_EVENT, type: CrawlType.Full }} />
);
const badge = wrapper.find(EuiBadge);
expect(badge.prop('color')).toBeUndefined();
expect(badge.text()).toEqual('Full');
});
it('is empty by default', () => {
const wrapper = shallow(<CrawlEventTypeBadge event={{} as CrawlEvent} />);
expect(wrapper.isEmptyRender()).toBe(true);
});
});

View file

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { EuiBadge } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CrawlEvent, CrawlType, readableCrawlTypes } from '../types';
export const CrawlEventTypeBadge: React.FC<{ event: CrawlEvent }> = ({ event }) => {
if (event.stage === 'process') {
return (
<EuiBadge color="hollow">
{i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlTypeOptions.reAppliedCrawlRules',
{
defaultMessage: 'Re-applied crawl rules',
}
)}
</EuiBadge>
);
}
if (event.type === CrawlType.Full) {
return <EuiBadge>{readableCrawlTypes[CrawlType.Full]}</EuiBadge>;
}
if (event.type === CrawlType.Partial) {
return <EuiBadge color="hollow">{readableCrawlTypes[CrawlType.Partial]}</EuiBadge>;
}
return null;
};

View file

@ -16,7 +16,7 @@ import { EuiBasicTable, EuiEmptyPrompt } from '@elastic/eui';
import { mountWithIntl } from '../../../../test_helpers';
import { CrawlEvent, CrawlerStatus } from '../types';
import { CrawlEvent, CrawlerStatus, CrawlType } from '../types';
import { CrawlRequestsTable } from './crawl_requests_table';
@ -30,6 +30,10 @@ const values: { events: CrawlEvent[] } = {
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
type: CrawlType.Full,
crawlConfig: {
domainAllowlist: ['https://www.elastic.co'],
},
},
],
};
@ -60,6 +64,16 @@ describe('CrawlRequestsTable', () => {
expect(tableContent).toContain('Aug 31, 2020');
});
it('renders a type column', () => {
expect(tableContent).toContain('Crawl type');
expect(tableContent).toContain('Full');
});
it('renders a domains column', () => {
expect(tableContent).toContain('Domains');
// TODO How to test for the contents of this badge?
});
it('renders a status column', () => {
expect(tableContent).toContain('Status');
expect(tableContent).toContain('Pending');

View file

@ -9,21 +9,17 @@ import React from 'react';
import { useValues } from 'kea';
import {
EuiBasicTable,
EuiEmptyPrompt,
EuiIconTip,
EuiTableFieldDataColumnType,
} from '@elastic/eui';
import { EuiBadge, EuiBasicTable, EuiBasicTableColumn, EuiEmptyPrompt } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CrawlerLogic } from '../crawler_logic';
import { CrawlEvent, readableCrawlerStatuses } from '../types';
import { CrawlEventTypeBadge } from './crawl_event_type_badge';
import { CustomFormattedTimestamp } from './custom_formatted_timestamp';
const columns: Array<EuiTableFieldDataColumnType<CrawlEvent>> = [
const columns: Array<EuiBasicTableColumn<CrawlEvent>> = [
{
field: 'id',
name: i18n.translate(
@ -45,6 +41,25 @@ const columns: Array<EuiTableFieldDataColumnType<CrawlEvent>> = [
<CustomFormattedTimestamp timestamp={createdAt} />
),
},
{
field: 'type',
name: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlRequestsTable.column.crawlType',
{
defaultMessage: 'Crawl type',
}
),
render: (_, event: CrawlEvent) => <CrawlEventTypeBadge event={event} />,
},
{
name: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlRequestsTable.column.domains',
{
defaultMessage: 'Domains',
}
),
render: (event: CrawlEvent) => <EuiBadge>{event.crawlConfig.domainAllowlist.length}</EuiBadge>,
},
{
field: 'status',
name: i18n.translate(
@ -53,22 +68,7 @@ const columns: Array<EuiTableFieldDataColumnType<CrawlEvent>> = [
defaultMessage: 'Status',
}
),
align: 'right',
render: (status: CrawlEvent['status'], event: CrawlEvent) => (
<>
{event.stage === 'process' && (
<EuiIconTip
aria-label="Process crawl"
size="m"
type="iInCircle"
color="primary"
position="top"
content="Re-applied crawl rules"
/>
)}
{readableCrawlerStatuses[status]}
</>
),
render: (status: CrawlEvent['status']) => readableCrawlerStatuses[status],
},
];

View file

@ -25,6 +25,7 @@ import {
CrawlerRules,
CrawlerStatus,
CrawlRule,
CrawlType,
} from './types';
import { crawlerDataServerToClient } from './utils';
@ -132,6 +133,10 @@ describe('CrawlerLogic', () => {
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
type: CrawlType.Full,
crawlConfig: {
domainAllowlist: ['elastic.co'],
},
},
],
mostRecentCrawlRequest: {

View file

@ -29,6 +29,7 @@ import {
CrawlerRules,
CrawlerStatus,
CrawlEventFromServer,
CrawlType,
} from './types';
const domains: CrawlerDomainFromServer[] = [
@ -73,6 +74,10 @@ const events: CrawlEventFromServer[] = [
created_at: 'Mon, 31 Aug 2020 11:00:00 +0000',
began_at: 'Mon, 31 Aug 2020 12:00:00 +0000',
completed_at: 'Mon, 31 Aug 2020 13:00:00 +0000',
type: CrawlType.Full,
crawl_config: {
domain_allowlist: ['moviedatabase.com', 'swiftype.com'],
},
},
{
id: 'b',
@ -81,6 +86,10 @@ const events: CrawlEventFromServer[] = [
created_at: 'Mon, 31 Aug 2020 14:00:00 +0000',
began_at: 'Mon, 31 Aug 2020 15:00:00 +0000',
completed_at: 'Mon, 31 Aug 2020 16:00:00 +0000',
type: CrawlType.Partial,
crawl_config: {
domain_allowlist: ['swiftype.com'],
},
},
];

View file

@ -179,6 +179,10 @@ export enum CrawlerStatus {
Skipped = 'skipped',
}
export enum CrawlType {
Full = 'full',
Partial = 'partial',
}
export interface CrawlRequestFromServer {
id: string;
status: CrawlerStatus;
@ -197,6 +201,13 @@ export interface CrawlRequest {
export type CrawlEventStage = 'crawl' | 'process';
export interface CrawlConfig {
domainAllowlist: string[];
}
export interface CrawlConfigFromServer {
domain_allowlist: string[];
}
export interface CrawlEventFromServer {
id: string;
stage: CrawlEventStage;
@ -204,6 +215,8 @@ export interface CrawlEventFromServer {
created_at: string;
began_at: string | null;
completed_at: string | null;
type: CrawlType;
crawl_config: CrawlConfigFromServer;
}
export interface CrawlEvent {
@ -213,6 +226,8 @@ export interface CrawlEvent {
createdAt: string;
beganAt: string | null;
completedAt: string | null;
type: CrawlType;
crawlConfig: CrawlConfig;
}
export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
@ -258,6 +273,17 @@ export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
),
};
export const readableCrawlTypes: { [key in CrawlType]: string } = {
[CrawlType.Full]: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlTypeOptions.full',
{ defaultMessage: 'Full' }
),
[CrawlType.Partial]: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.crawlTypeOptions.partial',
{ defaultMessage: 'Partial' }
),
};
export interface CrawlSchedule {
frequency: number;
unit: CrawlUnits;

View file

@ -17,6 +17,7 @@ import {
CrawlerData,
CrawlRequest,
CrawlerDomain,
CrawlType,
} from './types';
import {
@ -162,6 +163,10 @@ describe('crawlerDataServerToClient', () => {
created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
began_at: null,
completed_at: null,
type: CrawlType.Full,
crawl_config: {
domain_allowlist: ['https://www.elastic.co'],
},
},
],
most_recent_crawl_request: {
@ -211,6 +216,10 @@ describe('crawlerDataServerToClient', () => {
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
beganAt: null,
completedAt: null,
type: 'full',
crawlConfig: {
domainAllowlist: ['https://www.elastic.co'],
},
},
]);
expect(output.mostRecentCrawlRequest).toEqual({

View file

@ -20,6 +20,8 @@ import {
CrawlerRules,
CrawlEventFromServer,
CrawlEvent,
CrawlConfigFromServer,
CrawlConfig,
} from './types';
export function crawlerDomainServerToClient(payload: CrawlerDomainFromServer): CrawlerDomain {
@ -80,6 +82,14 @@ export function crawlRequestServerToClient(crawlRequest: CrawlRequestFromServer)
};
}
export function crawlConfigServerToClient(crawlConfig: CrawlConfigFromServer): CrawlConfig {
const { domain_allowlist: domainAllowlist } = crawlConfig;
return {
domainAllowlist,
};
}
export function crawlerEventServerToClient(event: CrawlEventFromServer): CrawlEvent {
const {
id,
@ -88,6 +98,8 @@ export function crawlerEventServerToClient(event: CrawlEventFromServer): CrawlEv
created_at: createdAt,
began_at: beganAt,
completed_at: completedAt,
type,
crawl_config: crawlConfig,
} = event;
return {
@ -97,6 +109,8 @@ export function crawlerEventServerToClient(event: CrawlEventFromServer): CrawlEv
createdAt,
beganAt,
completedAt,
type,
crawlConfig: crawlConfigServerToClient(crawlConfig),
};
}