mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[8.6] [Enterprise Search] Fix support for cron scheduling for Elastic Crawler Indices (#146357) (#146463)
# Backport This will backport the following commits from `main` to `8.6`: - [[Enterprise Search] Fix support for cron scheduling for Elastic Crawler Indices (#146357)](https://github.com/elastic/kibana/pull/146357) <!--- Backport version: 8.9.7 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Byron Hulcher","email":"byronhulcher@gmail.com"},"sourceCommit":{"committedDate":"2022-11-28T19:13:08Z","message":"[Enterprise Search] Fix support for cron scheduling for Elastic Crawler Indices (#146357)","sha":"5b79dbea96c9e4694cec2e47d87b1bcbea6740b5","branchLabelMapping":{"^v8.7.0$":"main","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","Team:EnterpriseSearch","backport:prev-minor","v8.6.0","v8.7.0"],"number":146357,"url":"https://github.com/elastic/kibana/pull/146357","mergeCommit":{"message":"[Enterprise Search] Fix support for cron scheduling for Elastic Crawler Indices (#146357)","sha":"5b79dbea96c9e4694cec2e47d87b1bcbea6740b5"}},"sourceBranch":"main","suggestedTargetBranches":["8.6"],"targetPullRequestStates":[{"branch":"8.6","label":"v8.6.0","labelRegex":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v8.7.0","labelRegex":"^v8.7.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/146357","number":146357,"mergeCommit":{"message":"[Enterprise Search] Fix support for cron scheduling for Elastic Crawler Indices (#146357)","sha":"5b79dbea96c9e4694cec2e47d87b1bcbea6740b5"}}]}] BACKPORT--> Co-authored-by: Byron Hulcher <byronhulcher@gmail.com>
This commit is contained in:
parent
301bd83458
commit
c4de26f9e0
3 changed files with 59 additions and 287 deletions
|
@ -27,7 +27,6 @@ import {
|
|||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CrawlerIndex } from '../../../../../../../common/types/indices';
|
||||
import {
|
||||
HOURS_UNIT_LABEL,
|
||||
DAYS_UNIT_LABEL,
|
||||
|
@ -36,23 +35,21 @@ import {
|
|||
} from '../../../../../shared/constants';
|
||||
import { EnterpriseSearchCronEditor } from '../../../../../shared/cron_editor/enterprise_search_cron_editor';
|
||||
import { docLinks } from '../../../../../shared/doc_links/doc_links';
|
||||
import { UpdateConnectorSchedulingApiLogic } from '../../../../api/connector/update_connector_scheduling_api_logic';
|
||||
import { CrawlUnits } from '../../../../api/crawler/types';
|
||||
import { isCrawlerIndex } from '../../../../utils/indices';
|
||||
import { IndexViewLogic } from '../../index_view_logic';
|
||||
|
||||
import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
|
||||
|
||||
export const AutomaticCrawlScheduler: React.FC = () => {
|
||||
const { index } = useValues(IndexViewLogic);
|
||||
const { makeRequest } = useActions(UpdateConnectorSchedulingApiLogic);
|
||||
const {
|
||||
setCrawlAutomatically,
|
||||
setCrawlFrequency,
|
||||
setCrawlUnit,
|
||||
setUseConnectorSchedule,
|
||||
submitConnectorSchedule,
|
||||
} = useActions(AutomaticCrawlSchedulerLogic);
|
||||
|
||||
const scheduling = (index as CrawlerIndex)?.connector?.scheduling;
|
||||
|
||||
const { setCrawlFrequency, setCrawlUnit, setUseConnectorSchedule, toggleCrawlAutomatically } =
|
||||
useActions(AutomaticCrawlSchedulerLogic);
|
||||
|
||||
const { crawlAutomatically, crawlFrequency, crawlUnit, useConnectorSchedule } = useValues(
|
||||
const { index, crawlAutomatically, crawlFrequency, crawlUnit, useConnectorSchedule } = useValues(
|
||||
AutomaticCrawlSchedulerLogic
|
||||
);
|
||||
|
||||
|
@ -84,7 +81,7 @@ export const AutomaticCrawlScheduler: React.FC = () => {
|
|||
defaultMessage: 'Enable recurring crawls with the following schedule',
|
||||
}
|
||||
)}
|
||||
onChange={toggleCrawlAutomatically}
|
||||
onChange={(e) => setCrawlAutomatically(e.target.checked)}
|
||||
compressed
|
||||
/>
|
||||
</EuiFormRow>
|
||||
|
@ -124,11 +121,11 @@ export const AutomaticCrawlScheduler: React.FC = () => {
|
|||
>
|
||||
<EnterpriseSearchCronEditor
|
||||
disabled={!crawlAutomatically || !useConnectorSchedule}
|
||||
scheduling={scheduling}
|
||||
scheduling={index.connector.scheduling}
|
||||
onChange={(newScheduling) =>
|
||||
makeRequest({
|
||||
connectorId: index.connector.id,
|
||||
scheduling: { ...newScheduling },
|
||||
submitConnectorSchedule({
|
||||
...newScheduling,
|
||||
enabled: true,
|
||||
})
|
||||
}
|
||||
/>
|
||||
|
|
|
@ -1,266 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LogicMounter,
|
||||
mockHttpValues,
|
||||
mockFlashMessageHelpers,
|
||||
} from '../../../../../__mocks__/kea_logic';
|
||||
import '../../_mocks_/index_name_logic.mock';
|
||||
|
||||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { CrawlUnits } from '../../../../api/crawler/types';
|
||||
|
||||
import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
|
||||
|
||||
describe('AutomaticCrawlSchedulerLogic', () => {
|
||||
const { mount } = new LogicMounter(AutomaticCrawlSchedulerLogic);
|
||||
const { http } = mockHttpValues;
|
||||
const { flashAPIErrors } = mockFlashMessageHelpers;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('has expected default values', () => {
|
||||
mount();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values).toEqual({
|
||||
crawlAutomatically: false,
|
||||
crawlFrequency: 24,
|
||||
crawlUnit: CrawlUnits.hours,
|
||||
isSubmitting: false,
|
||||
useConnectorSchedule: false,
|
||||
});
|
||||
});
|
||||
|
||||
describe('actions', () => {
|
||||
describe('clearCrawlSchedule', () => {
|
||||
it('sets crawl schedule related values to their defaults', () => {
|
||||
mount({
|
||||
crawlAutomatically: true,
|
||||
crawlFrequency: 36,
|
||||
crawlUnit: CrawlUnits.hours,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
|
||||
crawlAutomatically: false,
|
||||
crawlFrequency: 24,
|
||||
crawlUnit: CrawlUnits.hours,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('toggleCrawlAutomatically', () => {
|
||||
it('toggles the ability to crawl automatically', () => {
|
||||
mount({
|
||||
crawlAutomatically: false,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(true);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onDoneSubmitting', () => {
|
||||
mount({
|
||||
isSubmitting: true,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values.isSubmitting).toEqual(false);
|
||||
});
|
||||
|
||||
describe('setCrawlFrequency', () => {
|
||||
it("sets the crawl schedule's frequency", () => {
|
||||
mount({
|
||||
crawlFrequency: 36,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.setCrawlFrequency(12);
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values.crawlFrequency).toEqual(12);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCrawlSchedule', () => {
|
||||
it("sets the crawl schedule's frequency and unit, and enables crawling automatically", () => {
|
||||
mount();
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule({
|
||||
frequency: 3,
|
||||
unit: CrawlUnits.hours,
|
||||
useConnectorSchedule: true,
|
||||
});
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
|
||||
crawlAutomatically: true,
|
||||
crawlFrequency: 3,
|
||||
crawlUnit: CrawlUnits.hours,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCrawlUnit', () => {
|
||||
it("sets the crawl schedule's unit", () => {
|
||||
mount({
|
||||
crawlUnit: CrawlUnits.months,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.setCrawlUnit(CrawlUnits.weeks);
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.values.crawlUnit).toEqual(CrawlUnits.weeks);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('listeners', () => {
|
||||
describe('deleteCrawlSchedule', () => {
|
||||
describe('error paths', () => {
|
||||
it('resets the states of the crawl scheduler on a 404 response', async () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
|
||||
http.delete.mockReturnValueOnce(
|
||||
Promise.reject({
|
||||
response: { status: 404 },
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('flashes an error on a non-404 response', async () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
|
||||
http.delete.mockReturnValueOnce(
|
||||
Promise.reject({
|
||||
response: { status: 500 },
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith({
|
||||
response: { status: 500 },
|
||||
});
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('fetchCrawlSchedule', () => {
|
||||
it('set the state of the crawl scheduler on success', async () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
unit: CrawlUnits.days,
|
||||
frequency: '30',
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
|
||||
unit: CrawlUnits.days,
|
||||
frequency: '30',
|
||||
});
|
||||
});
|
||||
|
||||
describe('error paths', () => {
|
||||
it('flashes an error on a non-404 response', async () => {
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.reject({
|
||||
response: { status: 500 },
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith({
|
||||
response: { status: 500 },
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveChanges', () => {
|
||||
it('updates or creates a crawl schedule if the user has chosen to crawl automatically', () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'submitCrawlSchedule');
|
||||
mount({
|
||||
crawlAutomatically: true,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.saveChanges();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule);
|
||||
});
|
||||
|
||||
it('deletes the crawl schedule if the user has chosen to disable automatic crawling', () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'deleteCrawlSchedule');
|
||||
mount({
|
||||
crawlAutomatically: false,
|
||||
});
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.saveChanges();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule);
|
||||
});
|
||||
});
|
||||
|
||||
describe('submitCrawlSchedule', () => {
|
||||
it('sets the states of the crawl scheduler and closes the popover on success', async () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
|
||||
http.put.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
unit: CrawlUnits.days,
|
||||
frequency: 30,
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
|
||||
unit: CrawlUnits.days,
|
||||
frequency: 30,
|
||||
});
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('flashes an error callout if there is an error', async () => {
|
||||
jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
|
||||
http.delete.mockReturnValueOnce(
|
||||
Promise.reject({
|
||||
response: { status: 500 },
|
||||
})
|
||||
);
|
||||
|
||||
AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
|
||||
await nextTick();
|
||||
|
||||
expect(flashAPIErrors).toHaveBeenCalledWith({
|
||||
response: { status: 500 },
|
||||
});
|
||||
expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -7,16 +7,27 @@
|
|||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { ConnectorScheduling } from '../../../../../../../common/types/connectors';
|
||||
|
||||
import { CrawlerIndex } from '../../../../../../../common/types/indices';
|
||||
import { Actions } from '../../../../../shared/api_logic/create_api_logic';
|
||||
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import {
|
||||
UpdateConnectorSchedulingApiLogic,
|
||||
UpdateConnectorSchedulingArgs,
|
||||
} from '../../../../api/connector/update_connector_scheduling_api_logic';
|
||||
import { CrawlSchedule, CrawlScheduleFromServer, CrawlUnits } from '../../../../api/crawler/types';
|
||||
import { crawlScheduleServerToClient } from '../../../../api/crawler/utils';
|
||||
import { IndexNameLogic } from '../../index_name_logic';
|
||||
import { IndexViewLogic } from '../../index_view_logic';
|
||||
|
||||
export interface AutomaticCrawlSchedulerLogicValues {
|
||||
crawlAutomatically: boolean;
|
||||
crawlFrequency: CrawlSchedule['frequency'];
|
||||
crawlUnit: CrawlSchedule['unit'];
|
||||
index: CrawlerIndex;
|
||||
isSubmitting: boolean;
|
||||
useConnectorSchedule: CrawlSchedule['useConnectorSchedule'];
|
||||
}
|
||||
|
@ -33,7 +44,9 @@ export interface AutomaticCrawlSchedulerLogicActions {
|
|||
onDoneSubmitting(): void;
|
||||
enableCrawlAutomatically(): void;
|
||||
fetchCrawlSchedule(): void;
|
||||
makeUpdateConnectorSchedulingRequest: Actions<{}, UpdateConnectorSchedulingArgs>['makeRequest'];
|
||||
saveChanges(): void;
|
||||
setCrawlAutomatically(crawlAutomatically: boolean): { crawlAutomatically: boolean };
|
||||
setCrawlFrequency(crawlFrequency: CrawlSchedule['frequency']): {
|
||||
crawlFrequency: CrawlSchedule['frequency'];
|
||||
};
|
||||
|
@ -42,14 +55,25 @@ export interface AutomaticCrawlSchedulerLogicActions {
|
|||
setUseConnectorSchedule(useConnectorSchedule: CrawlSchedule['useConnectorSchedule']): {
|
||||
useConnectorSchedule: CrawlSchedule['useConnectorSchedule'];
|
||||
};
|
||||
submitConnectorSchedule(scheduling: ConnectorScheduling): { scheduling: ConnectorScheduling };
|
||||
submitCrawlSchedule(): void;
|
||||
toggleCrawlAutomatically(): void;
|
||||
updateConnectorSchedulingApiError: Actions<{}, UpdateConnectorSchedulingArgs>['apiError'];
|
||||
}
|
||||
|
||||
export const AutomaticCrawlSchedulerLogic = kea<
|
||||
MakeLogicType<AutomaticCrawlSchedulerLogicValues, AutomaticCrawlSchedulerLogicActions>
|
||||
>({
|
||||
path: ['enterprise_search', 'crawler', 'automatic_crawl_scheduler_logic'],
|
||||
connect: {
|
||||
actions: [
|
||||
UpdateConnectorSchedulingApiLogic,
|
||||
[
|
||||
'makeRequest as makeUpdateConnectorSchedulingRequest',
|
||||
'apiError as updateConnectorSchedulingApiError',
|
||||
],
|
||||
],
|
||||
values: [IndexViewLogic, ['index']],
|
||||
},
|
||||
actions: () => ({
|
||||
clearCrawlSchedule: true,
|
||||
deleteCrawlSchedule: true,
|
||||
|
@ -59,19 +83,20 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
fetchCrawlSchedule: true,
|
||||
saveChanges: true,
|
||||
setCrawlSchedule: (crawlSchedule: CrawlSchedule) => ({ crawlSchedule }),
|
||||
submitConnectorSchedule: (scheduling) => ({ scheduling }),
|
||||
submitCrawlSchedule: true,
|
||||
setCrawlAutomatically: (crawlAutomatically) => ({ crawlAutomatically }),
|
||||
setCrawlFrequency: (crawlFrequency: string) => ({ crawlFrequency }),
|
||||
setCrawlUnit: (crawlUnit: CrawlUnits) => ({ crawlUnit }),
|
||||
setUseConnectorSchedule: (useConnectorSchedule) => ({ useConnectorSchedule }),
|
||||
toggleCrawlAutomatically: true,
|
||||
}),
|
||||
reducers: () => ({
|
||||
crawlAutomatically: [
|
||||
false,
|
||||
{
|
||||
clearCrawlSchedule: () => false,
|
||||
setCrawlAutomatically: (_, { crawlAutomatically }) => crawlAutomatically,
|
||||
setCrawlSchedule: () => true,
|
||||
toggleCrawlAutomatically: (crawlAutomatically) => !crawlAutomatically,
|
||||
},
|
||||
],
|
||||
crawlFrequency: [
|
||||
|
@ -80,6 +105,8 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
clearCrawlSchedule: () => DEFAULT_VALUES.crawlFrequency,
|
||||
setCrawlSchedule: (_, { crawlSchedule: { frequency } }) => frequency,
|
||||
setCrawlFrequency: (_, { crawlFrequency }) => crawlFrequency,
|
||||
setUseConnectorSchedule: (crawlFrequency) =>
|
||||
crawlFrequency || DEFAULT_VALUES.crawlFrequency,
|
||||
},
|
||||
],
|
||||
crawlUnit: [
|
||||
|
@ -88,6 +115,7 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
clearCrawlSchedule: () => DEFAULT_VALUES.crawlUnit,
|
||||
setCrawlSchedule: (_, { crawlSchedule: { unit } }) => unit,
|
||||
setCrawlUnit: (_, { crawlUnit }) => crawlUnit,
|
||||
setUseConnectorSchedule: (crawlUnit) => crawlUnit || DEFAULT_VALUES.crawlUnit,
|
||||
},
|
||||
],
|
||||
isSubmitting: [
|
||||
|
@ -101,6 +129,8 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
useConnectorSchedule: [
|
||||
false,
|
||||
{
|
||||
setCrawlAutomatically: (useConnectorSchedule, { crawlAutomatically }) =>
|
||||
crawlAutomatically || useConnectorSchedule,
|
||||
setCrawlSchedule: (_, { crawlSchedule: { useConnectorSchedule = false } }) =>
|
||||
useConnectorSchedule,
|
||||
setUseConnectorSchedule: (_, { useConnectorSchedule }) => useConnectorSchedule,
|
||||
|
@ -148,11 +178,21 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
} else {
|
||||
actions.deleteCrawlSchedule();
|
||||
}
|
||||
actions.submitConnectorSchedule({
|
||||
...values.index.connector.scheduling,
|
||||
enabled: values.crawlAutomatically && values.useConnectorSchedule,
|
||||
});
|
||||
},
|
||||
setCrawlUnit: actions.saveChanges,
|
||||
setCrawlAutomatically: actions.saveChanges,
|
||||
setCrawlFrequency: actions.saveChanges,
|
||||
setCrawlUnit: actions.saveChanges,
|
||||
setUseConnectorSchedule: actions.saveChanges,
|
||||
toggleCrawlAutomatically: actions.saveChanges,
|
||||
submitConnectorSchedule: ({ scheduling }) => {
|
||||
actions.makeUpdateConnectorSchedulingRequest({
|
||||
connectorId: values.index.connector.id,
|
||||
scheduling,
|
||||
});
|
||||
},
|
||||
submitCrawlSchedule: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
|
@ -179,6 +219,7 @@ export const AutomaticCrawlSchedulerLogic = kea<
|
|||
actions.onDoneSubmitting();
|
||||
}
|
||||
},
|
||||
updateConnectorSchedulingApiError: (e) => flashAPIErrors(e),
|
||||
}),
|
||||
events: ({ actions }) => ({
|
||||
afterMount: () => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue