[App Search] Migrate Crawl Schedule form (#108066)

This commit is contained in:
Byron Hulcher 2021-08-11 12:45:25 -04:00 committed by GitHub
parent ae73cf8416
commit 0d55d30c97
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 1066 additions and 28 deletions

View file

@ -0,0 +1,98 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
import '../../../../../__mocks__/shallow_useeffect.mock';
import React from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
import {
EuiButton,
EuiButtonEmpty,
EuiFieldNumber,
EuiForm,
EuiSelect,
EuiSwitch,
} from '@elastic/eui';
import { CrawlUnits } from '../../types';
import { AutomaticCrawlScheduler } from './automatic_crawl_scheduler';
const MOCK_ACTIONS = {
// AutomaticCrawlSchedulerLogic
fetchCrawlSchedule: jest.fn(),
setCrawlFrequency: jest.fn(),
setCrawlUnit: jest.fn(),
saveChanges: jest.fn(),
toggleCrawlAutomatically: jest.fn(),
// ManageCrawlsPopoverLogic
closePopover: jest.fn(),
};
const MOCK_VALUES = {
crawlAutomatically: false,
crawlFrequency: 7,
crawlUnit: CrawlUnits.days,
isSubmitting: false,
};
describe('AutomaticCrawlScheduler', () => {
let wrapper: ShallowWrapper;
beforeEach(() => {
setMockActions(MOCK_ACTIONS);
setMockValues(MOCK_VALUES);
wrapper = shallow(<AutomaticCrawlScheduler />);
});
it('calls fetchCrawlSchedule on component load', () => {
expect(MOCK_ACTIONS.fetchCrawlSchedule).toHaveBeenCalled();
});
it('renders', () => {
expect(wrapper.find(EuiForm)).toHaveLength(1);
expect(wrapper.find(EuiFieldNumber)).toHaveLength(1);
expect(wrapper.find(EuiSelect)).toHaveLength(1);
});
it('saves changes on form submit', () => {
const preventDefault = jest.fn();
wrapper.find(EuiForm).simulate('submit', { preventDefault });
expect(preventDefault).toHaveBeenCalled();
expect(MOCK_ACTIONS.saveChanges).toHaveBeenCalled();
});
it('contains a switch that toggles automatic crawling', () => {
wrapper.find(EuiSwitch).simulate('change');
expect(MOCK_ACTIONS.toggleCrawlAutomatically).toHaveBeenCalled();
});
it('contains a number field that updates the crawl frequency', () => {
wrapper.find(EuiFieldNumber).simulate('change', { target: { value: '10' } });
expect(MOCK_ACTIONS.setCrawlFrequency).toHaveBeenCalledWith(10);
});
it('contains a select field that updates the crawl unit', () => {
wrapper.find(EuiSelect).simulate('change', { target: { value: CrawlUnits.weeks } });
expect(MOCK_ACTIONS.setCrawlUnit).toHaveBeenCalledWith(CrawlUnits.weeks);
});
it('contains a button to close the popover', () => {
expect(wrapper.find(EuiButtonEmpty).prop('onClick')).toEqual(MOCK_ACTIONS.closePopover);
});
it('contains a submit button', () => {
expect(wrapper.find(EuiButton).prop('type')).toEqual('submit');
});
});

View file

@ -0,0 +1,204 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useEffect } from 'react';
import { useActions, useValues } from 'kea';
import {
EuiButton,
EuiButtonEmpty,
EuiFieldNumber,
EuiFlexGroup,
EuiFlexItem,
EuiForm,
EuiFormRow,
EuiLink,
EuiPopoverFooter,
EuiSelect,
EuiSpacer,
EuiSwitch,
EuiText,
htmlIdGenerator,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import {
DAYS_UNIT_LABEL,
HOURS_UNIT_LABEL,
MONTHS_UNIT_LABEL,
WEEKS_UNIT_LABEL,
} from '../../../../..//shared/constants/units';
import { CANCEL_BUTTON_LABEL, SAVE_BUTTON_LABEL } from '../../../../../shared/constants';
import { DOCS_PREFIX } from '../../../../routes';
import { CrawlUnits } from '../../types';
import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
import { ManageCrawlsPopoverLogic } from './manage_crawls_popover_logic';
export const AutomaticCrawlScheduler: React.FC = () => {
const {
fetchCrawlSchedule,
setCrawlFrequency,
setCrawlUnit,
saveChanges,
toggleCrawlAutomatically,
} = useActions(AutomaticCrawlSchedulerLogic);
const { closePopover } = useActions(ManageCrawlsPopoverLogic);
const { crawlAutomatically, crawlFrequency, crawlUnit, isSubmitting } = useValues(
AutomaticCrawlSchedulerLogic
);
useEffect(() => {
fetchCrawlSchedule();
}, []);
const formId = htmlIdGenerator('AutomaticCrawlScheduler')();
return (
<EuiForm
onSubmit={(event) => {
event.preventDefault();
saveChanges();
}}
component="form"
id={formId}
>
<EuiSpacer size="s" />
<EuiText size="s" color="subdued">
<FormattedMessage
id="xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.formDescription"
defaultMessage="Don't worry about it, we'll start a crawl for you. {readMoreMessage}."
values={{
readMoreMessage: (
<EuiLink href={`${DOCS_PREFIX}/web-crawler.html`} target="_blank">
{i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.readMoreLink',
{
defaultMessage: 'Read more.',
}
)}
</EuiLink>
),
}}
/>
</EuiText>
<EuiSpacer size="m" />
<EuiFormRow display="rowCompressed">
<EuiSwitch
autoFocus
checked={crawlAutomatically}
label={
<EuiText>
{i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel',
{
defaultMessage: 'Crawl automatically',
}
)}
</EuiText>
}
onChange={toggleCrawlAutomatically}
compressed
/>
</EuiFormRow>
<EuiFormRow display="rowCompressed">
<EuiFlexGroup direction="row" gutterSize="s" alignItems="center">
<EuiFlexItem grow={false}>
<EuiText>
{i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.crawlUnitsPrefix',
{
defaultMessage: 'Every',
}
)}
</EuiText>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiFieldNumber
aria-label={i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel',
{
defaultMessage: 'Schedule frequency',
}
)}
disabled={!crawlAutomatically}
fullWidth={false}
min={0}
max={99}
compressed
value={crawlFrequency}
onChange={(e) => setCrawlFrequency(parseInt(e.target.value, 10))}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiSelect
aria-label={i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel',
{
defaultMessage: 'Schedule units of time',
}
)}
disabled={!crawlAutomatically}
compressed
options={[
{
text: HOURS_UNIT_LABEL,
value: CrawlUnits.hours,
},
{
text: DAYS_UNIT_LABEL,
value: CrawlUnits.days,
},
{
text: WEEKS_UNIT_LABEL,
value: CrawlUnits.weeks,
},
{
text: MONTHS_UNIT_LABEL,
value: CrawlUnits.months,
},
]}
value={crawlUnit}
onChange={(e) => setCrawlUnit(e.target.value as CrawlUnits)}
/>
</EuiFlexItem>
<EuiFlexItem />
</EuiFlexGroup>
</EuiFormRow>
<EuiSpacer />
<EuiText size="xs" color="subdued">
{i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlSchedule.scheduleDescription',
{
defaultMessage: 'The crawl schedule applies to every domain on this engine.',
}
)}
</EuiText>
<EuiPopoverFooter>
<EuiFormRow display="rowCompressed">
<EuiFlexGroup>
<EuiFlexItem>
<EuiButtonEmpty onClick={closePopover}>{CANCEL_BUTTON_LABEL}</EuiButtonEmpty>
</EuiFlexItem>
<EuiFlexItem>
<EuiButton form={formId} type="submit" isLoading={isSubmitting} fill>
{SAVE_BUTTON_LABEL}
</EuiButton>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFormRow>
</EuiPopoverFooter>
</EuiForm>
);
};

View file

@ -0,0 +1,307 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
LogicMounter,
mockHttpValues,
mockFlashMessageHelpers,
} from '../../../../../__mocks__/kea_logic';
import '../../../../__mocks__/engine_logic.mock';
jest.mock('./manage_crawls_popover_logic', () => ({
ManageCrawlsPopoverLogic: {
actions: {
closePopover: jest.fn(),
},
},
}));
import { nextTick } from '@kbn/test/jest';
import { CrawlUnits } from '../../types';
import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
import { ManageCrawlsPopoverLogic } from './manage_crawls_popover_logic';
describe('AutomaticCrawlSchedulerLogic', () => {
const { mount } = new LogicMounter(AutomaticCrawlSchedulerLogic);
const { http } = mockHttpValues;
const { flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;
beforeEach(() => {
jest.clearAllMocks();
});
it('has expected default values', () => {
mount();
expect(AutomaticCrawlSchedulerLogic.values).toEqual({
crawlAutomatically: false,
crawlFrequency: 7,
crawlUnit: CrawlUnits.days,
isSubmitting: false,
});
});
describe('actions', () => {
describe('clearCrawlSchedule', () => {
it('sets crawl schedule related values to their defaults', () => {
mount({
crawlAutomatically: true,
crawlFrequency: 36,
crawlUnit: CrawlUnits.hours,
});
AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule();
expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
crawlAutomatically: false,
crawlFrequency: 7,
crawlUnit: CrawlUnits.days,
});
});
});
describe('toggleCrawlAutomatically', () => {
it('toggles the ability to crawl automatically', () => {
mount({
crawlAutomatically: false,
});
AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(true);
AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(false);
});
});
describe('onDoneSubmitting', () => {
mount({
isSubmitting: true,
});
AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting();
expect(AutomaticCrawlSchedulerLogic.values.isSubmitting).toEqual(false);
});
describe('setCrawlFrequency', () => {
it("sets the crawl schedule's frequency", () => {
mount({
crawlFrequency: 36,
});
AutomaticCrawlSchedulerLogic.actions.setCrawlFrequency(12);
expect(AutomaticCrawlSchedulerLogic.values.crawlFrequency).toEqual(12);
});
});
describe('setCrawlSchedule', () => {
it("sets the crawl schedule's frequency and unit, and enables crawling automatically", () => {
mount();
AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule({
frequency: 3,
unit: CrawlUnits.hours,
});
expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
crawlAutomatically: true,
crawlFrequency: 3,
crawlUnit: CrawlUnits.hours,
});
});
});
describe('setCrawlUnit', () => {
it("sets the crawl schedule's unit", () => {
mount({
crawlUnit: CrawlUnits.months,
});
AutomaticCrawlSchedulerLogic.actions.setCrawlUnit(CrawlUnits.weeks);
expect(AutomaticCrawlSchedulerLogic.values.crawlUnit).toEqual(CrawlUnits.weeks);
});
});
});
describe('listeners', () => {
describe('deleteCrawlSchedule', () => {
it('resets the states of the crawl scheduler and popover, and shows a toast, on success', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
jest.spyOn(ManageCrawlsPopoverLogic.actions, 'closePopover');
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
http.delete.mockReturnValueOnce(Promise.resolve());
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
await nextTick();
expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
expect(flashSuccessToast).toHaveBeenCalledWith(expect.any(String));
expect(ManageCrawlsPopoverLogic.actions.closePopover).toHaveBeenCalled();
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
});
describe('error paths', () => {
it('resets the states of the crawl scheduler and popover on a 404 respose', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
jest.spyOn(ManageCrawlsPopoverLogic.actions, 'closePopover');
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
http.delete.mockReturnValueOnce(
Promise.reject({
response: { status: 404 },
})
);
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
await nextTick();
expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
expect(ManageCrawlsPopoverLogic.actions.closePopover).toHaveBeenCalled();
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
});
it('flashes an error on a non-404 respose', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
http.delete.mockReturnValueOnce(
Promise.reject({
response: { status: 500 },
})
);
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith({
response: { status: 500 },
});
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
});
});
});
describe('fetchCrawlSchedule', () => {
it('set the state of the crawl scheduler on success', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
http.get.mockReturnValueOnce(
Promise.resolve({
unit: CrawlUnits.days,
frequency: '30',
})
);
AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
await nextTick();
expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
unit: CrawlUnits.days,
frequency: '30',
});
});
describe('error paths', () => {
it('resets the states of the crawl scheduler on a 404 respose', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
http.get.mockReturnValueOnce(
Promise.reject({
response: { status: 404 },
})
);
AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
await nextTick();
expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
});
it('flashes an error on a non-404 respose', async () => {
http.get.mockReturnValueOnce(
Promise.reject({
response: { status: 500 },
})
);
AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith({
response: { status: 500 },
});
});
});
});
describe('saveChanges', () => {
it('updates or creates a crawl schedule if the user has chosen to crawl automatically', () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'submitCrawlSchedule');
mount({
crawlAutomatically: true,
});
AutomaticCrawlSchedulerLogic.actions.saveChanges();
expect(AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule);
});
it('deletes the crawl schedule if the user has chosen to disable automatic crawling', () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'deleteCrawlSchedule');
mount({
crawlAutomatically: false,
});
AutomaticCrawlSchedulerLogic.actions.saveChanges();
expect(AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule);
});
});
describe('submitCrawlSchedule', () => {
it('sets the states of the crawl scheduler and closes the popover on success', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
http.put.mockReturnValueOnce(
Promise.resolve({
unit: CrawlUnits.days,
frequency: 30,
})
);
AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule();
await nextTick();
expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
unit: CrawlUnits.days,
frequency: 30,
});
expect(ManageCrawlsPopoverLogic.actions.closePopover).toHaveBeenCalled();
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
});
it('flashes an error callout if there is an error', async () => {
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
http.delete.mockReturnValueOnce(
Promise.reject({
response: { status: 500 },
})
);
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
await nextTick();
expect(flashAPIErrors).toHaveBeenCalledWith({
response: { status: 500 },
});
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
});
});
});
});

View file

@ -0,0 +1,189 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { kea, MakeLogicType } from 'kea';
import { i18n } from '@kbn/i18n';
import { flashAPIErrors, flashSuccessToast } from '../../../../../shared/flash_messages';
import { HttpLogic } from '../../../../../shared/http';
import { EngineLogic } from '../../../engine';
import { CrawlSchedule, CrawlUnits } from '../../types';
import { ManageCrawlsPopoverLogic } from './manage_crawls_popover_logic';
export interface AutomaticCrawlSchedulerLogicValues {
crawlAutomatically: boolean;
crawlFrequency: CrawlSchedule['frequency'];
crawlUnit: CrawlSchedule['unit'];
isSubmitting: boolean;
}
const DEFAULT_VALUES: Pick<AutomaticCrawlSchedulerLogicValues, 'crawlFrequency' | 'crawlUnit'> = {
crawlFrequency: 7,
crawlUnit: CrawlUnits.days,
};
export interface AutomaticCrawlSchedulerLogicActions {
clearCrawlSchedule(): void;
deleteCrawlSchedule(): void;
disableCrawlAutomatically(): void;
onDoneSubmitting(): void;
enableCrawlAutomatically(): void;
fetchCrawlSchedule(): void;
saveChanges(): void;
setCrawlFrequency(
crawlFrequency: CrawlSchedule['frequency']
): { crawlFrequency: CrawlSchedule['frequency'] };
setCrawlSchedule(crawlSchedule: CrawlSchedule): { crawlSchedule: CrawlSchedule };
setCrawlUnit(crawlUnit: CrawlSchedule['unit']): { crawlUnit: CrawlSchedule['unit'] };
submitCrawlSchedule(): void;
toggleCrawlAutomatically(): void;
}
export const AutomaticCrawlSchedulerLogic = kea<
MakeLogicType<AutomaticCrawlSchedulerLogicValues, AutomaticCrawlSchedulerLogicActions>
>({
path: ['enterprise_search', 'app_search', 'crawler', 'automatic_crawl_scheduler'],
actions: () => ({
clearCrawlSchedule: true,
deleteCrawlSchedule: true,
disableCrawlAutomatically: true,
onDoneSubmitting: true,
enableCrawlAutomatically: true,
fetchCrawlSchedule: true,
saveChanges: true,
setCrawlSchedule: (crawlSchedule: CrawlSchedule) => ({ crawlSchedule }),
submitCrawlSchedule: true,
setCrawlFrequency: (crawlFrequency: string) => ({ crawlFrequency }),
setCrawlUnit: (crawlUnit: CrawlUnits) => ({ crawlUnit }),
toggleCrawlAutomatically: true,
}),
reducers: () => ({
crawlAutomatically: [
false,
{
clearCrawlSchedule: () => false,
setCrawlSchedule: () => true,
toggleCrawlAutomatically: (crawlAutomatically) => !crawlAutomatically,
},
],
crawlFrequency: [
DEFAULT_VALUES.crawlFrequency,
{
clearCrawlSchedule: () => DEFAULT_VALUES.crawlFrequency,
setCrawlSchedule: (_, { crawlSchedule: { frequency } }) => frequency,
setCrawlFrequency: (_, { crawlFrequency }) => crawlFrequency,
},
],
crawlUnit: [
DEFAULT_VALUES.crawlUnit,
{
clearCrawlSchedule: () => DEFAULT_VALUES.crawlUnit,
setCrawlSchedule: (_, { crawlSchedule: { unit } }) => unit,
setCrawlUnit: (_, { crawlUnit }) => crawlUnit,
},
],
isSubmitting: [
false,
{
deleteCrawlSchedule: () => true,
onDoneSubmitting: () => false,
submitCrawlSchedule: () => true,
},
],
}),
listeners: ({ actions, values }) => ({
deleteCrawlSchedule: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
const { closePopover } = ManageCrawlsPopoverLogic.actions;
try {
await http.delete(`/api/app_search/engines/${engineName}/crawler/crawl_schedule`);
actions.clearCrawlSchedule();
flashSuccessToast(
i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlScheduler.disableCrawlSchedule.successMessage',
{
defaultMessage: 'Automatic crawling has been disabled.',
}
)
);
closePopover();
} catch (e) {
// A 404 is expected and means the user has no crawl schedule to delete
if (e.response?.status === 404) {
actions.clearCrawlSchedule();
closePopover();
} else {
flashAPIErrors(e);
// Keep the popover open
}
} finally {
actions.onDoneSubmitting();
}
},
fetchCrawlSchedule: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
try {
const crawlSchedule: CrawlSchedule = await http.get(
`/api/app_search/engines/${engineName}/crawler/crawl_schedule`
);
actions.setCrawlSchedule(crawlSchedule);
} catch (e) {
// A 404 is expected and means the user does not have crawl schedule
// for this engine. We continue to use the defaults.
if (e.response.status === 404) {
actions.clearCrawlSchedule();
} else {
flashAPIErrors(e);
}
}
},
saveChanges: () => {
if (values.crawlAutomatically) {
actions.submitCrawlSchedule();
} else {
actions.deleteCrawlSchedule();
}
},
submitCrawlSchedule: async () => {
const { http } = HttpLogic.values;
const { engineName } = EngineLogic.values;
const { closePopover } = ManageCrawlsPopoverLogic.actions;
try {
const crawlSchedule: CrawlSchedule = await http.put(
`/api/app_search/engines/${engineName}/crawler/crawl_schedule`,
{
body: JSON.stringify({
unit: values.crawlUnit,
frequency: values.crawlFrequency,
}),
}
);
actions.setCrawlSchedule(crawlSchedule);
flashSuccessToast(
i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.automaticCrawlScheduler.submitCrawlSchedule.successMessage',
{
defaultMessage: 'Your automatic crawling schedule has been updated.',
}
)
);
closePopover();
} catch (e) {
flashAPIErrors(e);
} finally {
actions.onDoneSubmitting();
}
},
}),
});

View file

@ -9,7 +9,7 @@ import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logi
import React from 'react';
import { shallow } from 'enzyme';
import { ReactWrapper, shallow } from 'enzyme';
import {
EuiButton,
@ -22,6 +22,7 @@ import {
import { mountWithIntl } from '../../../../../test_helpers';
import { CrawlerDomain } from '../../types';
import { AutomaticCrawlScheduler } from './automatic_crawl_scheduler';
import { ManageCrawlsPopover } from './manage_crawls_popover';
const MOCK_ACTIONS = {
@ -57,22 +58,33 @@ describe('ManageCrawlsPopover', () => {
expect(wrapper.find(EuiContextMenuPanel)).toHaveLength(0);
});
it('includes a context menu when open', () => {
setMockValues({
...MOCK_VALUES,
isOpen: true,
describe('when open', () => {
let wrapper: ReactWrapper;
let menuItems: ReactWrapper;
beforeEach(() => {
setMockValues({
...MOCK_VALUES,
isOpen: true,
});
wrapper = mountWithIntl(<ManageCrawlsPopover domain={MOCK_DOMAIN} />);
menuItems = wrapper
.find(EuiContextMenuPanel)
.find(EuiResizeObserver)
.find(EuiContextMenuItem);
});
const wrapper = mountWithIntl(<ManageCrawlsPopover domain={MOCK_DOMAIN} />);
it('includes a button to reapply crawl rules', () => {
menuItems.at(0).simulate('click');
expect(MOCK_ACTIONS.reApplyCrawlRules).toHaveBeenCalledWith(MOCK_DOMAIN);
});
const menuItems = wrapper
.find(EuiContextMenuPanel)
.find(EuiResizeObserver)
.find(EuiContextMenuItem);
it('includes a form to set a crawl schedule ', () => {
menuItems.at(1).simulate('click');
expect(menuItems).toHaveLength(1);
menuItems.first().simulate('click');
expect(MOCK_ACTIONS.reApplyCrawlRules).toHaveBeenCalledWith(MOCK_DOMAIN);
expect(wrapper.find(EuiContextMenuPanel).find(AutomaticCrawlScheduler));
});
});
});

View file

@ -15,7 +15,7 @@ import { i18n } from '@kbn/i18n';
import { CrawlerDomain } from '../../types';
// import { AutomaticCrawlScheduler } from './automatic_crawl_scheduler';
import { AutomaticCrawlScheduler } from './automatic_crawl_scheduler';
import { ManageCrawlsPopoverLogic } from './manage_crawls_popover_logic';
@ -40,19 +40,25 @@ export const ManageCrawlsPopover: React.FC<ManageCrawlsPopoverProps> = ({ domain
icon: 'refresh',
onClick: () => reApplyCrawlRules(domain),
},
// {
// name: 'Automatic Crawling',
// icon: 'gear',
// panel: 1,
// },
{
name: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.manageCrawlsPopover.automaticCrawlingButtonLabel',
{ defaultMessage: 'Automatic crawling' }
),
icon: 'gear',
panel: 1,
},
],
},
// {
// id: 1,
// title: 'Automatic Crawling',
// width: 400,
// content: <AutomaticCrawlScheduler />,
// },
{
id: 1,
title: i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.manageCrawlsPopover.automaticCrawlingTitle',
{ defaultMessage: 'Automatic crawling' }
),
width: 400,
content: <AutomaticCrawlScheduler />,
},
];
return (

View file

@ -76,7 +76,7 @@ describe('ManageCrawlsPopoverLogic', () => {
} as CrawlerDomain);
await nextTick();
expect(flashSuccessToast).toHaveBeenCalled();
expect(flashSuccessToast).toHaveBeenCalledWith(expect.any(String));
expect(ManageCrawlsPopoverLogic.actions.closePopover).toHaveBeenCalled();
});

View file

@ -7,6 +7,8 @@
import { kea, MakeLogicType } from 'kea';
import { i18n } from '@kbn/i18n';
import { flashAPIErrors, flashSuccessToast } from '../../../../../shared/flash_messages';
import { HttpLogic } from '../../../../../shared/http';
import { EngineLogic } from '../../../engine';
@ -55,7 +57,14 @@ export const ManageCrawlsPopoverLogic = kea<
body: JSON.stringify(requestBody),
});
flashSuccessToast('Crawl Rules are being re-applied in the background');
flashSuccessToast(
i18n.translate(
'xpack.enterpriseSearch.appSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage',
{
defaultMessage: 'Crawl rules are being re-applied in the background',
}
)
);
} catch (e) {
flashAPIErrors(e);
} finally {

View file

@ -175,3 +175,17 @@ export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
{ defaultMessage: 'Skipped' }
),
};
export interface CrawlSchedule {
frequency: number;
unit: CrawlUnits;
}
// The BE uses a singular form of each unit
// See shared_togo/app/models/shared_togo/crawler/crawl_schedule.rb
export enum CrawlUnits {
hours = 'hour',
days = 'day',
weeks = 'week',
months = 'month',
}

View file

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { i18n } from '@kbn/i18n';
export const HOURS_UNIT_LABEL = i18n.translate('xpack.enterpriseSearch.units.hoursLabel', {
defaultMessage: 'Hours',
});
export const DAYS_UNIT_LABEL = i18n.translate('xpack.enterpriseSearch.units.daysLabel', {
defaultMessage: 'Days',
});
export const WEEKS_UNIT_LABEL = i18n.translate('xpack.enterpriseSearch.units.weeksLabel', {
defaultMessage: 'Weeks',
});
export const MONTHS_UNIT_LABEL = i18n.translate('xpack.enterpriseSearch.units.monthsLabel', {
defaultMessage: 'Months',
});

View file

@ -365,4 +365,133 @@ describe('crawler routes', () => {
mockRouter.shouldThrow(request);
});
});
describe('GET /api/app_search/engines/{name}/crawler/crawl_schedule', () => {
let mockRouter: MockRouter;
beforeEach(() => {
jest.clearAllMocks();
mockRouter = new MockRouter({
method: 'get',
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
});
registerCrawlerRoutes({
...mockDependencies,
router: mockRouter.router,
});
});
it('creates a request to enterprise search', () => {
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
});
});
it('validates correctly', () => {
const request = {
params: { name: 'some-engine' },
};
mockRouter.shouldValidate(request);
});
it('fails validation without a name param', () => {
const request = {
params: {},
};
mockRouter.shouldThrow(request);
});
});
describe('PUT /api/app_search/engines/{name}/crawler/crawl_schedule', () => {
let mockRouter: MockRouter;
beforeEach(() => {
jest.clearAllMocks();
mockRouter = new MockRouter({
method: 'put',
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
});
registerCrawlerRoutes({
...mockDependencies,
router: mockRouter.router,
});
});
it('creates a request to enterprise search', () => {
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
});
});
it('validates correctly', () => {
const request = {
params: { name: 'some-engine' },
body: { unit: 'day', frequency: 7 },
};
mockRouter.shouldValidate(request);
});
it('fails validation without a name param', () => {
const request = {
params: {},
body: { unit: 'day', frequency: 7 },
};
mockRouter.shouldThrow(request);
});
it('fails validation without a unit property in body', () => {
const request = {
params: { name: 'some-engine' },
body: { frequency: 7 },
};
mockRouter.shouldThrow(request);
});
it('fails validation without a frequency property in body', () => {
const request = {
params: { name: 'some-engine' },
body: { unit: 'day' },
};
mockRouter.shouldThrow(request);
});
});
describe('DELETE /api/app_search/engines/{name}/crawler/crawl_schedule', () => {
let mockRouter: MockRouter;
beforeEach(() => {
jest.clearAllMocks();
mockRouter = new MockRouter({
method: 'delete',
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
});
registerCrawlerRoutes({
...mockDependencies,
router: mockRouter.router,
});
});
it('creates a request to enterprise search', () => {
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
});
});
it('validates correctly', () => {
const request = {
params: { name: 'some-engine' },
};
mockRouter.shouldValidate(request);
});
it('fails validation without a name param', () => {
const request = {
params: {},
};
mockRouter.shouldThrow(request);
});
});
});

View file

@ -158,4 +158,50 @@ export function registerCrawlerRoutes({
path: '/api/as/v0/engines/:name/crawler/process_crawls',
})
);
router.get(
{
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
validate: {
params: schema.object({
name: schema.string(),
}),
},
},
enterpriseSearchRequestHandler.createRequest({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
})
);
router.put(
{
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
validate: {
params: schema.object({
name: schema.string(),
}),
body: schema.object({
unit: schema.string(),
frequency: schema.number(),
}),
},
},
enterpriseSearchRequestHandler.createRequest({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
})
);
router.delete(
{
path: '/api/app_search/engines/{name}/crawler/crawl_schedule',
validate: {
params: schema.object({
name: schema.string(),
}),
},
},
enterpriseSearchRequestHandler.createRequest({
path: '/api/as/v0/engines/:name/crawler/crawl_schedule',
})
);
}