mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[App Search] Migrate Crawler Status Indicator, Crawler Status Banner, and Crawl Request polling (#107603)
This commit is contained in:
parent
8665f36cf3
commit
125a22c233
14 changed files with 1045 additions and 64 deletions
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockValues } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { EuiCallOut } from '@elastic/eui';
|
||||
|
||||
import { CrawlerStatus } from '../types';
|
||||
|
||||
import { CrawlerStatusBanner } from './crawler_status_banner';
|
||||
|
||||
describe('CrawlerStatusBanner', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
[(CrawlerStatus.Starting, CrawlerStatus.Running, CrawlerStatus.Canceling)].forEach((status) => {
|
||||
describe(`when the status is ${status}`, () => {
|
||||
it('renders a callout', () => {
|
||||
setMockValues({
|
||||
mostRecentCrawlRequestStatus: status,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusBanner />);
|
||||
|
||||
expect(wrapper.find(EuiCallOut)).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
[
|
||||
CrawlerStatus.Success,
|
||||
CrawlerStatus.Failed,
|
||||
CrawlerStatus.Canceled,
|
||||
CrawlerStatus.Pending,
|
||||
CrawlerStatus.Suspended,
|
||||
CrawlerStatus.Suspending,
|
||||
].forEach((status) => {
|
||||
describe(`when the status is ${status}`, () => {
|
||||
it('does not render a banner/callout', () => {
|
||||
setMockValues({
|
||||
mostRecentCrawlRequestStatus: status,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusBanner />);
|
||||
|
||||
expect(wrapper.isEmptyRender()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useValues } from 'kea';
|
||||
|
||||
import { EuiCallOut } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CrawlerOverviewLogic } from '../crawler_overview_logic';
|
||||
import { CrawlerStatus } from '../types';
|
||||
|
||||
export const CrawlerStatusBanner: React.FC = () => {
|
||||
const { mostRecentCrawlRequestStatus } = useValues(CrawlerOverviewLogic);
|
||||
if (
|
||||
mostRecentCrawlRequestStatus === CrawlerStatus.Running ||
|
||||
mostRecentCrawlRequestStatus === CrawlerStatus.Starting ||
|
||||
mostRecentCrawlRequestStatus === CrawlerStatus.Canceling
|
||||
) {
|
||||
return (
|
||||
<EuiCallOut
|
||||
iconType="iInCircle"
|
||||
title={i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusBanner.changesCalloutTitle',
|
||||
{
|
||||
defaultMessage:
|
||||
"Changes you make now won't take effect until the start of your next crawl.",
|
||||
}
|
||||
)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
};
|
|
@ -0,0 +1,156 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { EuiButton } from '@elastic/eui';
|
||||
|
||||
import { CrawlerDomain, CrawlerStatus } from '../../types';
|
||||
|
||||
import { CrawlerStatusIndicator } from './crawler_status_indicator';
|
||||
import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
domains: [{}, {}] as CrawlerDomain[],
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus.Success,
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
startCrawl: jest.fn(),
|
||||
stopCrawl: jest.fn(),
|
||||
};
|
||||
|
||||
describe('CrawlerStatusIndicator', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
setMockActions(MOCK_ACTIONS);
|
||||
});
|
||||
|
||||
describe('when status is not a valid status', () => {
|
||||
it('is disabled', () => {
|
||||
// this tests a codepath that should be impossible to reach, status should always be a CrawlerStatus
|
||||
// but we use a switch statement and need to test the default case for this to recieve 100% coverage
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: null,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Start a crawl');
|
||||
expect(wrapper.prop('disabled')).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when there are no domains', () => {
|
||||
it('is disabled', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
domains: [],
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Start a crawl');
|
||||
expect(wrapper.prop('disabled')).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the status is success', () => {
|
||||
it('renders an CrawlerStatusIndicator with a start crawl button', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus.Success,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Start a crawl');
|
||||
expect(wrapper.prop('onClick')).toEqual(MOCK_ACTIONS.startCrawl);
|
||||
});
|
||||
});
|
||||
|
||||
[CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
|
||||
describe(`when the status is ready for retry: ${status}`, () => {
|
||||
it('renders an CrawlerStatusIndicator with a retry crawl button', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: status,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Retry crawl');
|
||||
expect(wrapper.prop('onClick')).toEqual(MOCK_ACTIONS.startCrawl);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
[CrawlerStatus.Pending, CrawlerStatus.Suspended].forEach((status) => {
|
||||
describe(`when the status is ${status}`, () => {
|
||||
it('renders an CrawlerStatusIndicator with a pending indicator', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: status,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Pending...');
|
||||
expect(wrapper.prop('disabled')).toEqual(true);
|
||||
expect(wrapper.prop('isLoading')).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the status is Starting', () => {
|
||||
it('renders an appropriate CrawlerStatusIndicator', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus.Starting,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Starting...');
|
||||
expect(wrapper.prop('isLoading')).toEqual(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the status is Running', () => {
|
||||
it('renders a stop crawl popover menu', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus.Running,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(StopCrawlPopoverContextMenu)).toEqual(true);
|
||||
expect(wrapper.prop('stopCrawl')).toEqual(MOCK_ACTIONS.stopCrawl);
|
||||
});
|
||||
});
|
||||
|
||||
[CrawlerStatus.Canceling, CrawlerStatus.Suspending].forEach((status) => {
|
||||
describe(`when the status is ${status}`, () => {
|
||||
it('renders an CrawlerStatusIndicator with a stopping indicator', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
mostRecentCrawlRequestStatus: status,
|
||||
});
|
||||
|
||||
const wrapper = shallow(<CrawlerStatusIndicator />);
|
||||
expect(wrapper.is(EuiButton)).toEqual(true);
|
||||
expect(wrapper.render().text()).toContain('Stopping...');
|
||||
expect(wrapper.prop('isLoading')).toEqual(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
import { EuiButton } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CrawlerOverviewLogic } from '../../crawler_overview_logic';
|
||||
import { CrawlerStatus } from '../../types';
|
||||
|
||||
import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
|
||||
|
||||
export const CrawlerStatusIndicator: React.FC = () => {
|
||||
const { domains, mostRecentCrawlRequestStatus } = useValues(CrawlerOverviewLogic);
|
||||
const { startCrawl, stopCrawl } = useActions(CrawlerOverviewLogic);
|
||||
|
||||
const disabledButton = (
|
||||
<EuiButton disabled>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Start a crawl',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
|
||||
if (domains.length === 0) {
|
||||
return disabledButton;
|
||||
}
|
||||
|
||||
switch (mostRecentCrawlRequestStatus) {
|
||||
case CrawlerStatus.Success:
|
||||
return (
|
||||
<EuiButton fill onClick={startCrawl}>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Start a crawl',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
case CrawlerStatus.Failed:
|
||||
case CrawlerStatus.Canceled:
|
||||
return (
|
||||
<EuiButton fill onClick={startCrawl}>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Retry crawl',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
case CrawlerStatus.Pending:
|
||||
case CrawlerStatus.Suspended:
|
||||
return (
|
||||
<EuiButton disabled isLoading>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.pendingButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Pending...',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
case CrawlerStatus.Starting:
|
||||
return (
|
||||
<EuiButton isLoading>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.startingButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Starting...',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
case CrawlerStatus.Running:
|
||||
return <StopCrawlPopoverContextMenu stopCrawl={stopCrawl} />;
|
||||
case CrawlerStatus.Canceling:
|
||||
case CrawlerStatus.Suspending:
|
||||
return (
|
||||
<EuiButton isLoading fill>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Stopping...',
|
||||
}
|
||||
)}
|
||||
</EuiButton>
|
||||
);
|
||||
default:
|
||||
// We should never get here, you would have to pass a CrawlerStatus option not covered
|
||||
// in the switch cases above
|
||||
return disabledButton;
|
||||
}
|
||||
};
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiContextMenuItem,
|
||||
EuiContextMenuPanel,
|
||||
EuiPopover,
|
||||
EuiResizeObserver,
|
||||
} from '@elastic/eui';
|
||||
|
||||
import { mountWithIntl } from '../../../../../test_helpers';
|
||||
|
||||
import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
|
||||
|
||||
const stopCrawl = jest.fn();
|
||||
|
||||
describe('StopCrawlsPopoverContextMenu', () => {
|
||||
it('is initially closed', () => {
|
||||
const wrapper = shallow(<StopCrawlPopoverContextMenu stopCrawl={stopCrawl} />);
|
||||
|
||||
expect(wrapper.is(EuiPopover)).toBe(true);
|
||||
expect(wrapper.prop('isOpen')).toEqual(false);
|
||||
});
|
||||
|
||||
it('can be opened to stop crawls', () => {
|
||||
const wrapper = mountWithIntl(<StopCrawlPopoverContextMenu stopCrawl={stopCrawl} />);
|
||||
|
||||
wrapper.find(EuiButton).simulate('click');
|
||||
|
||||
expect(wrapper.find(EuiPopover).prop('isOpen')).toEqual(true);
|
||||
|
||||
const menuItem = wrapper
|
||||
.find(EuiContextMenuPanel)
|
||||
.find(EuiResizeObserver)
|
||||
.find(EuiContextMenuItem);
|
||||
|
||||
expect(menuItem).toHaveLength(1);
|
||||
|
||||
menuItem.simulate('click');
|
||||
|
||||
expect(stopCrawl).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import {
|
||||
EuiButton,
|
||||
EuiContextMenuItem,
|
||||
EuiContextMenuPanel,
|
||||
EuiFlexGroup,
|
||||
EuiFlexItem,
|
||||
EuiLoadingSpinner,
|
||||
EuiPopover,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
interface StopCrawlPopoverContextMenuProps {
|
||||
stopCrawl(): void;
|
||||
}
|
||||
|
||||
export const StopCrawlPopoverContextMenu: React.FC<StopCrawlPopoverContextMenuProps> = ({
|
||||
stopCrawl,
|
||||
...rest
|
||||
}) => {
|
||||
const [isPopoverOpen, setPopover] = useState(false);
|
||||
|
||||
const togglePopover = () => setPopover(!isPopoverOpen);
|
||||
|
||||
const closePopover = () => setPopover(false);
|
||||
|
||||
return (
|
||||
<EuiPopover
|
||||
{...rest}
|
||||
button={
|
||||
<EuiButton
|
||||
iconType="arrowDown"
|
||||
iconSide="right"
|
||||
onClick={togglePopover}
|
||||
className="crawlInProgressButton"
|
||||
>
|
||||
<EuiFlexGroup alignItems="center" responsive={false} gutterSize="s">
|
||||
<EuiFlexItem grow={false}>
|
||||
<EuiLoadingSpinner size="m" />
|
||||
</EuiFlexItem>
|
||||
<EuiFlexItem>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel',
|
||||
{
|
||||
defaultMessage: 'Crawling...',
|
||||
}
|
||||
)}
|
||||
</EuiFlexItem>
|
||||
</EuiFlexGroup>
|
||||
</EuiButton>
|
||||
}
|
||||
isOpen={isPopoverOpen}
|
||||
closePopover={closePopover}
|
||||
panelPaddingSize="none"
|
||||
anchorPosition="downLeft"
|
||||
>
|
||||
<EuiContextMenuPanel
|
||||
items={[
|
||||
<EuiContextMenuItem
|
||||
key="cancel crawl"
|
||||
icon="cross"
|
||||
onClick={() => {
|
||||
closePopover();
|
||||
stopCrawl();
|
||||
}}
|
||||
>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel',
|
||||
{
|
||||
defaultMessage: 'Cancel Crawl',
|
||||
}
|
||||
)}
|
||||
</EuiContextMenuItem>,
|
||||
]}
|
||||
/>
|
||||
</EuiPopover>
|
||||
);
|
||||
};
|
|
@ -13,10 +13,14 @@ import React from 'react';
|
|||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { getPageHeaderActions } from '../../../test_helpers';
|
||||
|
||||
import { AddDomainFlyout } from './components/add_domain/add_domain_flyout';
|
||||
import { AddDomainForm } from './components/add_domain/add_domain_form';
|
||||
import { AddDomainFormSubmitButton } from './components/add_domain/add_domain_form_submit_button';
|
||||
import { CrawlRequestsTable } from './components/crawl_requests_table';
|
||||
import { CrawlerStatusBanner } from './components/crawler_status_banner';
|
||||
import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
|
||||
import { DomainsTable } from './components/domains_table';
|
||||
import { CrawlerOverview } from './crawler_overview';
|
||||
import {
|
||||
|
@ -75,6 +79,7 @@ const crawlRequests: CrawlRequestFromServer[] = [
|
|||
describe('CrawlerOverview', () => {
|
||||
const mockActions = {
|
||||
fetchCrawlerData: jest.fn(),
|
||||
getLatestCrawlRequests: jest.fn(),
|
||||
};
|
||||
|
||||
const mockValues = {
|
||||
|
@ -88,12 +93,26 @@ describe('CrawlerOverview', () => {
|
|||
setMockActions(mockActions);
|
||||
});
|
||||
|
||||
it('calls fetchCrawlerData on page load', () => {
|
||||
it('calls fetchCrawlerData and starts polling on page load', () => {
|
||||
setMockValues(mockValues);
|
||||
|
||||
shallow(<CrawlerOverview />);
|
||||
|
||||
expect(mockActions.fetchCrawlerData).toHaveBeenCalledTimes(1);
|
||||
expect(mockActions.getLatestCrawlRequests).toHaveBeenCalledWith(false);
|
||||
});
|
||||
|
||||
it('contains a crawler status banner', () => {
|
||||
setMockValues(mockValues);
|
||||
const wrapper = shallow(<CrawlerOverview />);
|
||||
|
||||
expect(wrapper.find(CrawlerStatusBanner)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('contains a crawler status indicator', () => {
|
||||
const wrapper = shallow(<CrawlerOverview />);
|
||||
|
||||
expect(getPageHeaderActions(wrapper).find(CrawlerStatusIndicator)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('hides the domain and crawl request tables when there are no domains, and no crawl requests', () => {
|
||||
|
|
|
@ -21,6 +21,8 @@ import { AddDomainFlyout } from './components/add_domain/add_domain_flyout';
|
|||
import { AddDomainForm } from './components/add_domain/add_domain_form';
|
||||
import { AddDomainFormSubmitButton } from './components/add_domain/add_domain_form_submit_button';
|
||||
import { CrawlRequestsTable } from './components/crawl_requests_table';
|
||||
import { CrawlerStatusBanner } from './components/crawler_status_banner';
|
||||
import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
|
||||
import { DomainsTable } from './components/domains_table';
|
||||
import { CRAWLER_TITLE } from './constants';
|
||||
import { CrawlerOverviewLogic } from './crawler_overview_logic';
|
||||
|
@ -28,18 +30,24 @@ import { CrawlerOverviewLogic } from './crawler_overview_logic';
|
|||
export const CrawlerOverview: React.FC = () => {
|
||||
const { crawlRequests, dataLoading, domains } = useValues(CrawlerOverviewLogic);
|
||||
|
||||
const { fetchCrawlerData } = useActions(CrawlerOverviewLogic);
|
||||
const { fetchCrawlerData, getLatestCrawlRequests } = useActions(CrawlerOverviewLogic);
|
||||
|
||||
useEffect(() => {
|
||||
fetchCrawlerData();
|
||||
getLatestCrawlRequests(false);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<AppSearchPageTemplate
|
||||
pageChrome={getEngineBreadcrumbs([CRAWLER_TITLE])}
|
||||
pageHeader={{ pageTitle: CRAWLER_TITLE }}
|
||||
pageHeader={{
|
||||
pageTitle: CRAWLER_TITLE,
|
||||
rightSideItems: [<CrawlerStatusIndicator />],
|
||||
}}
|
||||
isLoading={dataLoading}
|
||||
>
|
||||
<CrawlerStatusBanner />
|
||||
<EuiSpacer size="l" />
|
||||
{domains.length > 0 ? (
|
||||
<>
|
||||
<EuiFlexGroup direction="row" alignItems="stretch">
|
||||
|
|
|
@ -23,15 +23,16 @@ import {
|
|||
CrawlerRules,
|
||||
CrawlerStatus,
|
||||
CrawlRequest,
|
||||
CrawlRequestFromServer,
|
||||
CrawlRule,
|
||||
} from './types';
|
||||
import { crawlerDataServerToClient, crawlRequestServerToClient } from './utils';
|
||||
import { crawlerDataServerToClient } from './utils';
|
||||
|
||||
const DEFAULT_VALUES: CrawlerOverviewValues = {
|
||||
crawlRequests: [],
|
||||
dataLoading: true,
|
||||
domains: [],
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus.Success,
|
||||
timeoutId: null,
|
||||
};
|
||||
|
||||
const DEFAULT_CRAWL_RULE: CrawlRule = {
|
||||
|
@ -55,36 +56,50 @@ const MOCK_SERVER_CRAWLER_DATA: CrawlerDataFromServer = {
|
|||
],
|
||||
};
|
||||
|
||||
const MOCK_SERVER_CRAWL_REQUESTS_DATA: CrawlRequestFromServer[] = [
|
||||
{
|
||||
id: '618d0e66abe97bc688328900',
|
||||
status: CrawlerStatus.Pending,
|
||||
created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
began_at: null,
|
||||
completed_at: null,
|
||||
},
|
||||
];
|
||||
|
||||
const MOCK_CLIENT_CRAWLER_DATA = crawlerDataServerToClient(MOCK_SERVER_CRAWLER_DATA);
|
||||
const MOCK_CLIENT_CRAWL_REQUESTS_DATA = MOCK_SERVER_CRAWL_REQUESTS_DATA.map(
|
||||
crawlRequestServerToClient
|
||||
);
|
||||
|
||||
describe('CrawlerOverviewLogic', () => {
|
||||
const { mount } = new LogicMounter(CrawlerOverviewLogic);
|
||||
const { mount, unmount } = new LogicMounter(CrawlerOverviewLogic);
|
||||
const { http } = mockHttpValues;
|
||||
const { flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.useFakeTimers(); // this should be run before every test to reset these mocks
|
||||
mount();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
expect(CrawlerOverviewLogic.values).toEqual(DEFAULT_VALUES);
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('clearTimeoutId', () => {
|
||||
it('clears the timeout in the logic', () => {
|
||||
mount({
|
||||
timeoutId: setTimeout(() => {}, 1),
|
||||
});
|
||||
|
||||
CrawlerOverviewLogic.actions.clearTimeoutId();
|
||||
|
||||
expect(CrawlerOverviewLogic.values.timeoutId).toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onCreateNewTimeout', () => {
|
||||
it('sets the timeout in the logic', () => {
|
||||
const timeout = setTimeout(() => {}, 1);
|
||||
|
||||
CrawlerOverviewLogic.actions.onCreateNewTimeout(timeout);
|
||||
|
||||
expect(CrawlerOverviewLogic.values.timeoutId).toEqual(timeout);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onReceiveCrawlerData', () => {
|
||||
const crawlerData: CrawlerData = {
|
||||
domains: [
|
||||
|
@ -139,42 +154,20 @@ describe('CrawlerOverviewLogic', () => {
|
|||
describe('fetchCrawlerData', () => {
|
||||
it('updates logic with data that has been converted from server to client', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'onReceiveCrawlerData');
|
||||
// TODO this spyOn should be removed when crawl request polling is added
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'onReceiveCrawlRequests');
|
||||
|
||||
// TODO this first mock for MOCK_SERVER_CRAWL_REQUESTS_DATA should be removed when crawl request polling is added
|
||||
http.get.mockReturnValueOnce(Promise.resolve(MOCK_SERVER_CRAWL_REQUESTS_DATA));
|
||||
http.get.mockReturnValueOnce(Promise.resolve(MOCK_SERVER_CRAWLER_DATA));
|
||||
|
||||
CrawlerOverviewLogic.actions.fetchCrawlerData();
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'/api/app_search/engines/some-engine/crawler/crawl_requests'
|
||||
);
|
||||
expect(CrawlerOverviewLogic.actions.onReceiveCrawlRequests).toHaveBeenCalledWith(
|
||||
MOCK_CLIENT_CRAWL_REQUESTS_DATA
|
||||
);
|
||||
|
||||
expect(http.get).toHaveBeenNthCalledWith(2, '/api/app_search/engines/some-engine/crawler');
|
||||
expect(http.get).toHaveBeenCalledWith('/api/app_search/engines/some-engine/crawler');
|
||||
expect(CrawlerOverviewLogic.actions.onReceiveCrawlerData).toHaveBeenCalledWith(
|
||||
MOCK_CLIENT_CRAWLER_DATA
|
||||
);
|
||||
});
|
||||
|
||||
// TODO this test should be removed when crawl request polling is added
|
||||
it('calls flashApiErrors when there is an error on the request for crawl results', async () => {
|
||||
http.get.mockReturnValueOnce(Promise.reject('error'));
|
||||
CrawlerOverviewLogic.actions.fetchCrawlerData();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith('error');
|
||||
});
|
||||
|
||||
it('calls flashApiErrors when there is an error on the request for crawler data', async () => {
|
||||
// TODO this first mock for MOCK_SERVER_CRAWL_REQUESTS_DATA should be removed when crawl request polling is added
|
||||
http.get.mockReturnValueOnce(Promise.resolve(MOCK_SERVER_CRAWL_REQUESTS_DATA));
|
||||
http.get.mockReturnValueOnce(Promise.reject('error'));
|
||||
|
||||
CrawlerOverviewLogic.actions.fetchCrawlerData();
|
||||
await nextTick();
|
||||
|
||||
|
@ -185,8 +178,8 @@ describe('CrawlerOverviewLogic', () => {
|
|||
describe('deleteDomain', () => {
|
||||
it('calls onReceiveCrawlerData with retrieved data that has been converted from server to client', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'onReceiveCrawlerData');
|
||||
|
||||
http.delete.mockReturnValue(Promise.resolve(MOCK_SERVER_CRAWLER_DATA));
|
||||
|
||||
CrawlerOverviewLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain);
|
||||
await nextTick();
|
||||
|
||||
|
@ -204,11 +197,248 @@ describe('CrawlerOverviewLogic', () => {
|
|||
|
||||
it('calls flashApiErrors when there is an error', async () => {
|
||||
http.delete.mockReturnValue(Promise.reject('error'));
|
||||
|
||||
CrawlerOverviewLogic.actions.deleteDomain({ id: '1234' } as CrawlerDomain);
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith('error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('startCrawl', () => {
|
||||
describe('success path', () => {
|
||||
it('creates a new crawl request and then fetches the latest crawl requests', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
|
||||
http.post.mockReturnValueOnce(Promise.resolve());
|
||||
|
||||
CrawlerOverviewLogic.actions.startCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
'/api/app_search/engines/some-engine/crawler/crawl_requests'
|
||||
);
|
||||
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('on failure', () => {
|
||||
it('flashes an error message', async () => {
|
||||
http.post.mockReturnValueOnce(Promise.reject('error'));
|
||||
|
||||
CrawlerOverviewLogic.actions.startCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith('error');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('stopCrawl', () => {
|
||||
describe('success path', () => {
|
||||
it('stops the crawl starts and then fetches the latest crawl requests', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
|
||||
http.post.mockReturnValueOnce(Promise.resolve());
|
||||
|
||||
CrawlerOverviewLogic.actions.stopCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(http.post).toHaveBeenCalledWith(
|
||||
'/api/app_search/engines/some-engine/crawler/crawl_requests/cancel'
|
||||
);
|
||||
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('on failure', () => {
|
||||
it('flashes an error message', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
|
||||
http.post.mockReturnValueOnce(Promise.reject('error'));
|
||||
|
||||
CrawlerOverviewLogic.actions.stopCrawl();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith('error');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('createNewTimeoutForCrawlRequests', () => {
|
||||
it('saves the timeout ID in the logic', () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'onCreateNewTimeout');
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'getLatestCrawlRequests');
|
||||
|
||||
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests(2000);
|
||||
|
||||
expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 2000);
|
||||
expect(CrawlerOverviewLogic.actions.onCreateNewTimeout).toHaveBeenCalled();
|
||||
|
||||
jest.runAllTimers();
|
||||
|
||||
expect(CrawlerOverviewLogic.actions.getLatestCrawlRequests).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('clears a timeout if one already exists', () => {
|
||||
const timeoutId = setTimeout(() => {}, 1);
|
||||
mount({
|
||||
timeoutId,
|
||||
});
|
||||
|
||||
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests(2000);
|
||||
|
||||
expect(clearTimeout).toHaveBeenCalledWith(timeoutId);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getLatestCrawlRequests', () => {
|
||||
describe('on success', () => {
|
||||
[
|
||||
CrawlerStatus.Pending,
|
||||
CrawlerStatus.Starting,
|
||||
CrawlerStatus.Running,
|
||||
CrawlerStatus.Canceling,
|
||||
].forEach((status) => {
|
||||
it(`creates a new timeout for status ${status}`, async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'createNewTimeoutForCrawlRequests');
|
||||
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
|
||||
|
||||
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
|
||||
await nextTick();
|
||||
|
||||
expect(
|
||||
CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests
|
||||
).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
|
||||
it(`clears the timeout and fetches data for status ${status}`, async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'clearTimeoutId');
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'fetchCrawlerData');
|
||||
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
|
||||
|
||||
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
|
||||
await nextTick();
|
||||
|
||||
expect(CrawlerOverviewLogic.actions.clearTimeoutId).toHaveBeenCalled();
|
||||
expect(CrawlerOverviewLogic.actions.fetchCrawlerData).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`optionally supresses fetching data for status ${status}`, async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'clearTimeoutId');
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'fetchCrawlerData');
|
||||
http.get.mockReturnValueOnce(Promise.resolve([{ status }]));
|
||||
|
||||
CrawlerOverviewLogic.actions.getLatestCrawlRequests(false);
|
||||
await nextTick();
|
||||
|
||||
expect(CrawlerOverviewLogic.actions.clearTimeoutId).toHaveBeenCalled();
|
||||
expect(CrawlerOverviewLogic.actions.fetchCrawlerData).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('on failure', () => {
|
||||
it('creates a new timeout', async () => {
|
||||
jest.spyOn(CrawlerOverviewLogic.actions, 'createNewTimeoutForCrawlRequests');
|
||||
http.get.mockReturnValueOnce(Promise.reject());
|
||||
|
||||
CrawlerOverviewLogic.actions.getLatestCrawlRequests();
|
||||
await nextTick();
|
||||
|
||||
expect(CrawlerOverviewLogic.actions.createNewTimeoutForCrawlRequests).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('selectors', () => {
|
||||
describe('mostRecentCrawlRequestStatus', () => {
|
||||
it('is Success when there are no crawl requests', () => {
|
||||
mount({
|
||||
crawlRequests: [],
|
||||
});
|
||||
|
||||
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
|
||||
CrawlerStatus.Success
|
||||
);
|
||||
});
|
||||
|
||||
it('is Success when there are only crawl requests', () => {
|
||||
mount({
|
||||
crawlRequests: [
|
||||
{
|
||||
id: '2',
|
||||
status: CrawlerStatus.Skipped,
|
||||
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
status: CrawlerStatus.Skipped,
|
||||
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
|
||||
CrawlerStatus.Success
|
||||
);
|
||||
});
|
||||
|
||||
it('is the first non-skipped crawl request status', () => {
|
||||
mount({
|
||||
crawlRequests: [
|
||||
{
|
||||
id: '3',
|
||||
status: CrawlerStatus.Skipped,
|
||||
createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
status: CrawlerStatus.Failed,
|
||||
createdAt: 'Mon, 30 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
},
|
||||
{
|
||||
id: '1',
|
||||
status: CrawlerStatus.Success,
|
||||
createdAt: 'Mon, 29 Aug 2020 17:00:00 +0000',
|
||||
beganAt: null,
|
||||
completedAt: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(CrawlerOverviewLogic.values.mostRecentCrawlRequestStatus).toEqual(
|
||||
CrawlerStatus.Failed
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('events', () => {
|
||||
describe('beforeUnmount', () => {
|
||||
it('clears the timeout if there is one', () => {
|
||||
jest.spyOn(global, 'setTimeout');
|
||||
|
||||
mount({
|
||||
timeoutId: setTimeout(() => {}, 1),
|
||||
});
|
||||
unmount();
|
||||
|
||||
expect(setTimeout).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('does not crash if no timeout exists', () => {
|
||||
mount({ timeoutId: null });
|
||||
unmount();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -14,7 +14,13 @@ import { flashAPIErrors, flashSuccessToast } from '../../../shared/flash_message
|
|||
import { HttpLogic } from '../../../shared/http';
|
||||
import { EngineLogic } from '../engine';
|
||||
|
||||
import { CrawlerData, CrawlerDomain, CrawlRequest, CrawlRequestFromServer } from './types';
|
||||
import {
|
||||
CrawlerData,
|
||||
CrawlerDomain,
|
||||
CrawlRequest,
|
||||
CrawlRequestFromServer,
|
||||
CrawlerStatus,
|
||||
} from './types';
|
||||
import { crawlerDataServerToClient, crawlRequestServerToClient } from './utils';
|
||||
|
||||
export const DELETE_DOMAIN_MESSAGE = (domainUrl: string) =>
|
||||
|
@ -28,17 +34,28 @@ export const DELETE_DOMAIN_MESSAGE = (domainUrl: string) =>
|
|||
}
|
||||
);
|
||||
|
||||
const POLLING_DURATION = 1000;
|
||||
const POLLING_DURATION_ON_FAILURE = 5000;
|
||||
|
||||
export interface CrawlerOverviewValues {
|
||||
crawlRequests: CrawlRequest[];
|
||||
dataLoading: boolean;
|
||||
domains: CrawlerDomain[];
|
||||
mostRecentCrawlRequestStatus: CrawlerStatus;
|
||||
timeoutId: NodeJS.Timeout | null;
|
||||
}
|
||||
|
||||
interface CrawlerOverviewActions {
|
||||
clearTimeoutId(): void;
|
||||
createNewTimeoutForCrawlRequests(duration: number): { duration: number };
|
||||
deleteDomain(domain: CrawlerDomain): { domain: CrawlerDomain };
|
||||
fetchCrawlerData(): void;
|
||||
getLatestCrawlRequests(refreshData?: boolean): { refreshData?: boolean };
|
||||
onCreateNewTimeout(timeoutId: NodeJS.Timeout): { timeoutId: NodeJS.Timeout };
|
||||
onReceiveCrawlerData(data: CrawlerData): { data: CrawlerData };
|
||||
onReceiveCrawlRequests(crawlRequests: CrawlRequest[]): { crawlRequests: CrawlRequest[] };
|
||||
startCrawl(): void;
|
||||
stopCrawl(): void;
|
||||
}
|
||||
|
||||
export const CrawlerOverviewLogic = kea<
|
||||
|
@ -46,10 +63,16 @@ export const CrawlerOverviewLogic = kea<
|
|||
>({
|
||||
path: ['enterprise_search', 'app_search', 'crawler', 'crawler_overview'],
|
||||
actions: {
|
||||
clearTimeoutId: true,
|
||||
createNewTimeoutForCrawlRequests: (duration) => ({ duration }),
|
||||
deleteDomain: (domain) => ({ domain }),
|
||||
fetchCrawlerData: true,
|
||||
getLatestCrawlRequests: (refreshData) => ({ refreshData }),
|
||||
onCreateNewTimeout: (timeoutId) => ({ timeoutId }),
|
||||
onReceiveCrawlerData: (data) => ({ data }),
|
||||
onReceiveCrawlRequests: (crawlRequests) => ({ crawlRequests }),
|
||||
startCrawl: () => null,
|
||||
stopCrawl: () => null,
|
||||
},
|
||||
reducers: {
|
||||
dataLoading: [
|
||||
|
@ -70,24 +93,33 @@ export const CrawlerOverviewLogic = kea<
|
|||
onReceiveCrawlRequests: (_, { crawlRequests }) => crawlRequests,
|
||||
},
|
||||
],
|
||||
timeoutId: [
|
||||
null,
|
||||
{
|
||||
clearTimeoutId: () => null,
|
||||
onCreateNewTimeout: (_, { timeoutId }) => timeoutId,
|
||||
},
|
||||
],
|
||||
},
|
||||
listeners: ({ actions }) => ({
|
||||
selectors: ({ selectors }) => ({
|
||||
mostRecentCrawlRequestStatus: [
|
||||
() => [selectors.crawlRequests],
|
||||
(crawlRequests: CrawlerOverviewValues['crawlRequests']) => {
|
||||
const eligibleCrawlRequests = crawlRequests.filter(
|
||||
(req) => req.status !== CrawlerStatus.Skipped
|
||||
);
|
||||
if (eligibleCrawlRequests.length === 0) {
|
||||
return CrawlerStatus.Success;
|
||||
}
|
||||
return eligibleCrawlRequests[0].status;
|
||||
},
|
||||
],
|
||||
}),
|
||||
listeners: ({ actions, values }) => ({
|
||||
fetchCrawlerData: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { engineName } = EngineLogic.values;
|
||||
|
||||
// TODO Remove fetching crawl requests here once Crawl Request Polling is implemented
|
||||
try {
|
||||
const crawlResultsResponse: CrawlRequestFromServer[] = await http.get(
|
||||
`/api/app_search/engines/${engineName}/crawler/crawl_requests`
|
||||
);
|
||||
|
||||
const crawlRequests = crawlResultsResponse.map(crawlRequestServerToClient);
|
||||
actions.onReceiveCrawlRequests(crawlRequests);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await http.get(`/api/app_search/engines/${engineName}/crawler`);
|
||||
|
||||
|
@ -118,5 +150,78 @@ export const CrawlerOverviewLogic = kea<
|
|||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
startCrawl: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { engineName } = EngineLogic.values;
|
||||
|
||||
try {
|
||||
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests`);
|
||||
actions.getLatestCrawlRequests();
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
stopCrawl: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { engineName } = EngineLogic.values;
|
||||
|
||||
try {
|
||||
await http.post(`/api/app_search/engines/${engineName}/crawler/crawl_requests/cancel`);
|
||||
actions.getLatestCrawlRequests();
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
}
|
||||
},
|
||||
createNewTimeoutForCrawlRequests: ({ duration }) => {
|
||||
if (values.timeoutId) {
|
||||
clearTimeout(values.timeoutId);
|
||||
}
|
||||
|
||||
const timeoutIdId = setTimeout(() => {
|
||||
actions.getLatestCrawlRequests();
|
||||
}, duration);
|
||||
|
||||
actions.onCreateNewTimeout(timeoutIdId);
|
||||
},
|
||||
getLatestCrawlRequests: async ({ refreshData = true }) => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { engineName } = EngineLogic.values;
|
||||
|
||||
try {
|
||||
const crawlRequestsFromServer: CrawlRequestFromServer[] = await http.get(
|
||||
`/api/app_search/engines/${engineName}/crawler/crawl_requests`
|
||||
);
|
||||
const crawlRequests = crawlRequestsFromServer.map(crawlRequestServerToClient);
|
||||
actions.onReceiveCrawlRequests(crawlRequests);
|
||||
if (
|
||||
[
|
||||
CrawlerStatus.Pending,
|
||||
CrawlerStatus.Starting,
|
||||
CrawlerStatus.Running,
|
||||
CrawlerStatus.Canceling,
|
||||
].includes(crawlRequests[0]?.status)
|
||||
) {
|
||||
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION);
|
||||
} else if (
|
||||
[CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].includes(
|
||||
crawlRequests[0]?.status
|
||||
)
|
||||
) {
|
||||
actions.clearTimeoutId();
|
||||
if (refreshData) {
|
||||
actions.fetchCrawlerData();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
actions.createNewTimeoutForCrawlRequests(POLLING_DURATION_ON_FAILURE);
|
||||
}
|
||||
},
|
||||
}),
|
||||
events: ({ values }) => ({
|
||||
beforeUnmount: () => {
|
||||
if (values.timeoutId) {
|
||||
clearTimeout(values.timeoutId);
|
||||
}
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
|
|
@ -15,17 +15,28 @@ import { shallow } from 'enzyme';
|
|||
|
||||
import { EuiCode } from '@elastic/eui';
|
||||
|
||||
import { getPageHeaderActions } from '../../../test_helpers';
|
||||
|
||||
import { CrawlerStatusBanner } from './components/crawler_status_banner';
|
||||
import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
|
||||
import { CrawlerOverview } from './crawler_overview';
|
||||
import { CrawlerSingleDomain } from './crawler_single_domain';
|
||||
|
||||
const MOCK_VALUES = {
|
||||
// CrawlerSingleDomainLogic
|
||||
dataLoading: false,
|
||||
domain: {
|
||||
url: 'https://elastic.co',
|
||||
},
|
||||
// CrawlerOverviewLogic
|
||||
domains: [],
|
||||
crawlRequests: [],
|
||||
};
|
||||
|
||||
const MOCK_ACTIONS = {
|
||||
fetchCrawlerData: jest.fn(),
|
||||
fetchDomainData: jest.fn(),
|
||||
getLatestCrawlRequests: jest.fn(),
|
||||
};
|
||||
|
||||
describe('CrawlerSingleDomain', () => {
|
||||
|
@ -40,10 +51,10 @@ describe('CrawlerSingleDomain', () => {
|
|||
const wrapper = shallow(<CrawlerSingleDomain />);
|
||||
|
||||
expect(wrapper.find(EuiCode).render().text()).toContain('https://elastic.co');
|
||||
expect(wrapper.prop('pageHeader')).toEqual({ pageTitle: 'https://elastic.co' });
|
||||
expect(wrapper.prop('pageHeader').pageTitle).toEqual('https://elastic.co');
|
||||
});
|
||||
|
||||
it('uses a placeholder for the page title and page chrome if a domain has not been', () => {
|
||||
it('uses a placeholder for the page title and page chrome if a domain has not been set', () => {
|
||||
setMockValues({
|
||||
...MOCK_VALUES,
|
||||
domain: null,
|
||||
|
@ -51,6 +62,18 @@ describe('CrawlerSingleDomain', () => {
|
|||
|
||||
const wrapper = shallow(<CrawlerSingleDomain />);
|
||||
|
||||
expect(wrapper.prop('pageHeader')).toEqual({ pageTitle: 'Loading...' });
|
||||
expect(wrapper.prop('pageHeader').pageTitle).toEqual('Loading...');
|
||||
});
|
||||
|
||||
it('contains a crawler status banner', () => {
|
||||
const wrapper = shallow(<CrawlerOverview />);
|
||||
|
||||
expect(wrapper.find(CrawlerStatusBanner)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('contains a crawler status indicator', () => {
|
||||
const wrapper = shallow(<CrawlerOverview />);
|
||||
|
||||
expect(getPageHeaderActions(wrapper).find(CrawlerStatusIndicator)).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,13 +11,15 @@ import { useParams } from 'react-router-dom';
|
|||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
import { EuiCode } from '@elastic/eui';
|
||||
import { EuiCode, EuiSpacer } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { getEngineBreadcrumbs } from '../engine';
|
||||
import { AppSearchPageTemplate } from '../layout';
|
||||
|
||||
import { CrawlerStatusBanner } from './components/crawler_status_banner';
|
||||
import { CrawlerStatusIndicator } from './components/crawler_status_indicator/crawler_status_indicator';
|
||||
import { CRAWLER_TITLE } from './constants';
|
||||
import { CrawlerSingleDomainLogic } from './crawler_single_domain_logic';
|
||||
|
||||
|
@ -41,9 +43,11 @@ export const CrawlerSingleDomain: React.FC = () => {
|
|||
return (
|
||||
<AppSearchPageTemplate
|
||||
pageChrome={getEngineBreadcrumbs([CRAWLER_TITLE, displayDomainUrl])}
|
||||
pageHeader={{ pageTitle: displayDomainUrl }}
|
||||
pageHeader={{ pageTitle: displayDomainUrl, rightSideItems: [<CrawlerStatusIndicator />] }}
|
||||
isLoading={dataLoading}
|
||||
>
|
||||
<CrawlerStatusBanner />
|
||||
<EuiSpacer size="l" />
|
||||
<EuiCode>{JSON.stringify(domain, null, 2)}</EuiCode>
|
||||
</AppSearchPageTemplate>
|
||||
);
|
||||
|
|
|
@ -76,6 +76,72 @@ describe('crawler routes', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('POST /api/app_search/engines/{name}/crawler/crawl_requests', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/api/app_search/engines/{name}/crawler/crawl_requests',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/as/v0/engines/:name/crawler/crawl_requests',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { name: 'some-engine' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/app_search/engines/{name}/crawler/crawl_requests/cancel', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/api/app_search/engines/{name}/crawler/crawl_requests/cancel',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/as/v0/engines/:name/crawler/crawl_requests/active/cancel',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { name: 'some-engine' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /api/app_search/engines/{name}/crawler/domains', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
|
|
|
@ -41,6 +41,34 @@ export function registerCrawlerRoutes({
|
|||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/api/app_search/engines/{name}/crawler/crawl_requests',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
name: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/as/v0/engines/:name/crawler/crawl_requests',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/api/app_search/engines/{name}/crawler/crawl_requests/cancel',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
name: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/as/v0/engines/:name/crawler/crawl_requests/active/cancel',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/api/app_search/engines/{name}/crawler/domains',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue