mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[App Search] Added a SitemapsTable to the Crawler view (#108405)
This commit is contained in:
parent
a8b4433294
commit
5bfba1b014
8 changed files with 562 additions and 1 deletions
|
@ -0,0 +1,185 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockFlashMessageHelpers, setMockActions } from '../../../../__mocks__/kea_logic';
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { shallow } from 'enzyme';
|
||||
|
||||
import { EuiEmptyPrompt, EuiFieldText } from '@elastic/eui';
|
||||
|
||||
import { GenericEndpointInlineEditableTable } from '../../../../shared/tables/generic_endpoint_inline_editable_table';
|
||||
|
||||
import { mountWithIntl } from '../../../../test_helpers';
|
||||
|
||||
import { SitemapsTable } from './sitemaps_table';
|
||||
|
||||
describe('SitemapsTable', () => {
|
||||
const { clearFlashMessages, flashSuccessToast } = mockFlashMessageHelpers;
|
||||
const engineName = 'my-engine';
|
||||
const sitemaps = [
|
||||
{ id: '1', url: 'http://www.example.com/sitemap.xml' },
|
||||
{ id: '2', url: 'http://www.example.com/whatever/sitemaps.xml' },
|
||||
];
|
||||
const domain = {
|
||||
createdOn: '2018-01-01T00:00:00.000Z',
|
||||
documentCount: 10,
|
||||
id: '6113e1407a2f2e6f42489794',
|
||||
url: 'https://www.elastic.co',
|
||||
crawlRules: [],
|
||||
entryPoints: [],
|
||||
sitemaps,
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders', () => {
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
|
||||
expect(wrapper.find(GenericEndpointInlineEditableTable).exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('the first and only column in the table', () => {
|
||||
it('shows the url of a sitemap', () => {
|
||||
const sitemap = { id: '1', url: 'http://www.example.com/sitemap.xml' };
|
||||
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
|
||||
const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
|
||||
const column = shallow(<div>{columns[0].render(sitemap)}</div>);
|
||||
expect(column.html()).toContain('http://www.example.com/sitemap.xml');
|
||||
});
|
||||
|
||||
it('can show the url of a sitemap as editable', () => {
|
||||
const sitemap = { id: '1', url: 'http://www.example.com/sitemap.xml' };
|
||||
const onChange = jest.fn();
|
||||
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
|
||||
const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
|
||||
const column = shallow(
|
||||
<div>
|
||||
{columns[0].editingRender(sitemap, onChange, { isInvalid: false, isLoading: false })}
|
||||
</div>
|
||||
);
|
||||
|
||||
const textField = column.find(EuiFieldText);
|
||||
expect(textField.props()).toEqual(
|
||||
expect.objectContaining({
|
||||
value: 'http://www.example.com/sitemap.xml',
|
||||
disabled: false, // It would be disabled if isLoading is true
|
||||
isInvalid: false,
|
||||
})
|
||||
);
|
||||
|
||||
textField.simulate('change', { target: { value: '/foo' } });
|
||||
expect(onChange).toHaveBeenCalledWith('/foo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('routes', () => {
|
||||
it('can calculate an update and delete route correctly', () => {
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
|
||||
const table = wrapper.find(GenericEndpointInlineEditableTable);
|
||||
|
||||
const sitemap = { id: '1', url: '/whatever' };
|
||||
expect(table.prop('deleteRoute')(sitemap)).toEqual(
|
||||
'/api/app_search/engines/my-engine/crawler/domains/6113e1407a2f2e6f42489794/sitemaps/1'
|
||||
);
|
||||
expect(table.prop('updateRoute')(sitemap)).toEqual(
|
||||
'/api/app_search/engines/my-engine/crawler/domains/6113e1407a2f2e6f42489794/sitemaps/1'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows a no items message whem there are no sitemaps to show', () => {
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
|
||||
const editNewItems = jest.fn();
|
||||
const table = wrapper.find(GenericEndpointInlineEditableTable);
|
||||
const message = mountWithIntl(<div>{table.prop('noItemsMessage')!(editNewItems)}</div>);
|
||||
expect(message.find(EuiEmptyPrompt).exists()).toBe(true);
|
||||
});
|
||||
|
||||
describe('when a sitemap is added', () => {
|
||||
it('should update the sitemaps for the current domain, and clear flash messages', () => {
|
||||
const updateSitemaps = jest.fn();
|
||||
setMockActions({
|
||||
updateSitemaps,
|
||||
});
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
const table = wrapper.find(GenericEndpointInlineEditableTable);
|
||||
|
||||
const sitemapThatWasAdded = { id: '2', value: 'bar' };
|
||||
const updatedSitemaps = [
|
||||
{ id: '1', value: 'foo' },
|
||||
{ id: '2', value: 'bar' },
|
||||
];
|
||||
table.prop('onAdd')(sitemapThatWasAdded, updatedSitemaps);
|
||||
expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
|
||||
expect(clearFlashMessages).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a sitemap is updated', () => {
|
||||
it('should update the sitemaps for the current domain, and clear flash messages', () => {
|
||||
const updateSitemaps = jest.fn();
|
||||
setMockActions({
|
||||
updateSitemaps,
|
||||
});
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
const table = wrapper.find(GenericEndpointInlineEditableTable);
|
||||
|
||||
const sitemapThatWasUpdated = { id: '2', value: 'bar' };
|
||||
const updatedSitemaps = [
|
||||
{ id: '1', value: 'foo' },
|
||||
{ id: '2', value: 'baz' },
|
||||
];
|
||||
table.prop('onUpdate')(sitemapThatWasUpdated, updatedSitemaps);
|
||||
expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
|
||||
expect(clearFlashMessages).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a sitemap is deleted', () => {
|
||||
it('should update the sitemaps for the current domain, clear flash messages, and show a success', () => {
|
||||
const updateSitemaps = jest.fn();
|
||||
setMockActions({
|
||||
updateSitemaps,
|
||||
});
|
||||
const wrapper = shallow(
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
);
|
||||
const table = wrapper.find(GenericEndpointInlineEditableTable);
|
||||
|
||||
const sitemapThatWasDeleted = { id: '2', value: 'bar' };
|
||||
const updatedSitemaps = [{ id: '1', value: 'foo' }];
|
||||
table.prop('onDelete')(sitemapThatWasDeleted, updatedSitemaps);
|
||||
expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
|
||||
expect(clearFlashMessages).toHaveBeenCalled();
|
||||
expect(flashSuccessToast).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { useActions } from 'kea';
|
||||
|
||||
import { EuiButton, EuiEmptyPrompt, EuiFieldText, EuiText } from '@elastic/eui';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { clearFlashMessages, flashSuccessToast } from '../../../../shared/flash_messages';
|
||||
import { GenericEndpointInlineEditableTable } from '../../../../shared/tables/generic_endpoint_inline_editable_table';
|
||||
import { InlineEditableTableColumn } from '../../../../shared/tables/inline_editable_table/types';
|
||||
import { ItemWithAnID } from '../../../../shared/tables/types';
|
||||
import { CrawlerSingleDomainLogic } from '../crawler_single_domain_logic';
|
||||
import { CrawlerDomain, Sitemap } from '../types';
|
||||
|
||||
const ADD_BUTTON_LABEL = i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.addButtonLabel',
|
||||
{ defaultMessage: 'Add sitemap' }
|
||||
);
|
||||
|
||||
interface SitemapsTableProps {
|
||||
domain: CrawlerDomain;
|
||||
engineName: string;
|
||||
items: Sitemap[];
|
||||
}
|
||||
|
||||
export const SitemapsTable: React.FC<SitemapsTableProps> = ({ domain, engineName, items }) => {
|
||||
const { updateSitemaps } = useActions(CrawlerSingleDomainLogic);
|
||||
const field = 'url';
|
||||
|
||||
const columns: Array<InlineEditableTableColumn<ItemWithAnID>> = [
|
||||
{
|
||||
editingRender: (sitemap, onChange, { isInvalid, isLoading }) => (
|
||||
<EuiFieldText
|
||||
fullWidth
|
||||
value={(sitemap as Sitemap)[field]}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
disabled={isLoading}
|
||||
isInvalid={isInvalid}
|
||||
/>
|
||||
),
|
||||
render: (sitemap) => <EuiText size="s">{(sitemap as Sitemap)[field]}</EuiText>,
|
||||
name: i18n.translate('xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.urlTableHead', {
|
||||
defaultMessage: 'URL',
|
||||
}),
|
||||
field,
|
||||
},
|
||||
];
|
||||
|
||||
const sitemapsRoute = `/api/app_search/engines/${engineName}/crawler/domains/${domain.id}/sitemaps`;
|
||||
const getSitemapRoute = (sitemap: Sitemap) =>
|
||||
`/api/app_search/engines/${engineName}/crawler/domains/${domain.id}/sitemaps/${sitemap.id}`;
|
||||
|
||||
return (
|
||||
<GenericEndpointInlineEditableTable
|
||||
addButtonText={ADD_BUTTON_LABEL}
|
||||
columns={columns}
|
||||
description={
|
||||
<p>
|
||||
{i18n.translate('xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.description', {
|
||||
defaultMessage: 'Specify sitemap URLs for the crawler on this domain.',
|
||||
})}
|
||||
</p>
|
||||
}
|
||||
instanceId="SitemapsTable"
|
||||
items={items}
|
||||
canRemoveLastItem
|
||||
noItemsMessage={(editNewItem) => (
|
||||
<>
|
||||
<EuiEmptyPrompt
|
||||
title={
|
||||
<h4>
|
||||
{i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.emptyMessageTitle',
|
||||
{
|
||||
defaultMessage: 'There are no existing sitemaps.',
|
||||
}
|
||||
)}
|
||||
</h4>
|
||||
}
|
||||
titleSize="s"
|
||||
body={<EuiText>Add a sitemap to specify an entry point for the crawler.</EuiText>}
|
||||
actions={<EuiButton onClick={editNewItem}>{ADD_BUTTON_LABEL}</EuiButton>}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
addRoute={sitemapsRoute}
|
||||
deleteRoute={getSitemapRoute}
|
||||
updateRoute={getSitemapRoute}
|
||||
dataProperty="sitemaps"
|
||||
onAdd={(_, newSitemaps) => {
|
||||
updateSitemaps(newSitemaps as Sitemap[]);
|
||||
clearFlashMessages();
|
||||
}}
|
||||
onDelete={(_, newSitemaps) => {
|
||||
updateSitemaps(newSitemaps as Sitemap[]);
|
||||
clearFlashMessages();
|
||||
flashSuccessToast(
|
||||
i18n.translate(
|
||||
'xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.deleteSuccessToastMessage',
|
||||
{
|
||||
defaultMessage: 'The sitemap has been deleted.',
|
||||
}
|
||||
)
|
||||
);
|
||||
}}
|
||||
onUpdate={(_, newSitemaps) => {
|
||||
updateSitemaps(newSitemaps as Sitemap[]);
|
||||
clearFlashMessages();
|
||||
}}
|
||||
title={i18n.translate('xpack.enterpriseSearch.appSearch.crawler.sitemapsTable.title', {
|
||||
defaultMessage: 'Sitemaps',
|
||||
})}
|
||||
disableReordering
|
||||
/>
|
||||
);
|
||||
};
|
|
@ -23,6 +23,7 @@ import { CrawlerStatusIndicator } from './components/crawler_status_indicator/cr
|
|||
import { DeleteDomainPanel } from './components/delete_domain_panel';
|
||||
import { EntryPointsTable } from './components/entry_points_table';
|
||||
import { ManageCrawlsPopover } from './components/manage_crawls_popover/manage_crawls_popover';
|
||||
import { SitemapsTable } from './components/sitemaps_table';
|
||||
import { CRAWLER_TITLE } from './constants';
|
||||
import { CrawlerSingleDomainLogic } from './crawler_single_domain_logic';
|
||||
|
||||
|
@ -59,6 +60,10 @@ export const CrawlerSingleDomain: React.FC = () => {
|
|||
<EntryPointsTable domain={domain} engineName={engineName} items={domain.entryPoints} />
|
||||
</EuiPanel>
|
||||
<EuiSpacer size="xl" />
|
||||
<EuiPanel paddingSize="l" hasBorder>
|
||||
<SitemapsTable domain={domain} engineName={engineName} items={domain.sitemaps} />
|
||||
</EuiPanel>
|
||||
<EuiSpacer size="xl" />
|
||||
</>
|
||||
)}
|
||||
<EuiTitle size="s">
|
||||
|
|
|
@ -81,6 +81,36 @@ describe('CrawlerSingleDomainLogic', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateSitemaps', () => {
|
||||
beforeEach(() => {
|
||||
mount({
|
||||
domain: {
|
||||
id: '507f1f77bcf86cd799439011',
|
||||
sitemaps: [],
|
||||
},
|
||||
});
|
||||
|
||||
CrawlerSingleDomainLogic.actions.updateSitemaps([
|
||||
{
|
||||
id: '1234',
|
||||
url: 'http://www.example.com/sitemap.xml',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should update the sitemaps on the domain', () => {
|
||||
expect(CrawlerSingleDomainLogic.values.domain).toEqual({
|
||||
id: '507f1f77bcf86cd799439011',
|
||||
sitemaps: [
|
||||
{
|
||||
id: '1234',
|
||||
url: 'http://www.example.com/sitemap.xml',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('listeners', () => {
|
||||
|
|
|
@ -14,7 +14,7 @@ import { KibanaLogic } from '../../../shared/kibana';
|
|||
import { ENGINE_CRAWLER_PATH } from '../../routes';
|
||||
import { EngineLogic, generateEnginePath } from '../engine';
|
||||
|
||||
import { CrawlerDomain, EntryPoint } from './types';
|
||||
import { CrawlerDomain, EntryPoint, Sitemap } from './types';
|
||||
import { crawlerDomainServerToClient, getDeleteDomainSuccessMessage } from './utils';
|
||||
|
||||
export interface CrawlerSingleDomainValues {
|
||||
|
@ -27,6 +27,7 @@ interface CrawlerSingleDomainActions {
|
|||
fetchDomainData(domainId: string): { domainId: string };
|
||||
onReceiveDomainData(domain: CrawlerDomain): { domain: CrawlerDomain };
|
||||
updateEntryPoints(entryPoints: EntryPoint[]): { entryPoints: EntryPoint[] };
|
||||
updateSitemaps(entryPoints: Sitemap[]): { sitemaps: Sitemap[] };
|
||||
}
|
||||
|
||||
export const CrawlerSingleDomainLogic = kea<
|
||||
|
@ -38,6 +39,7 @@ export const CrawlerSingleDomainLogic = kea<
|
|||
fetchDomainData: (domainId) => ({ domainId }),
|
||||
onReceiveDomainData: (domain) => ({ domain }),
|
||||
updateEntryPoints: (entryPoints) => ({ entryPoints }),
|
||||
updateSitemaps: (sitemaps) => ({ sitemaps }),
|
||||
},
|
||||
reducers: {
|
||||
dataLoading: [
|
||||
|
@ -52,6 +54,8 @@ export const CrawlerSingleDomainLogic = kea<
|
|||
onReceiveDomainData: (_, { domain }) => domain,
|
||||
updateEntryPoints: (currentDomain, { entryPoints }) =>
|
||||
({ ...currentDomain, entryPoints } as CrawlerDomain),
|
||||
updateSitemaps: (currentDomain, { sitemaps }) =>
|
||||
({ ...currentDomain, sitemaps } as CrawlerDomain),
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockDependencies, mockRequestHandler, MockRouter } from '../../__mocks__';
|
||||
|
||||
import { registerCrawlerSitemapRoutes } from './crawler_sitemaps';
|
||||
|
||||
describe('crawler sitemap routes', () => {
|
||||
describe('POST /api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps',
|
||||
});
|
||||
|
||||
registerCrawlerSitemapRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with required params', () => {
|
||||
const request = {
|
||||
params: { engineName: 'some-engine', domainId: '1234' },
|
||||
body: {
|
||||
url: 'http://www.example.com/sitemaps.xml',
|
||||
},
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails otherwise', () => {
|
||||
const request = { params: {}, body: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'put',
|
||||
path:
|
||||
'/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
});
|
||||
|
||||
registerCrawlerSitemapRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with required params', () => {
|
||||
const request = {
|
||||
params: { engineName: 'some-engine', domainId: '1234', sitemapId: '5678' },
|
||||
body: {
|
||||
url: 'http://www.example.com/sitemaps.xml',
|
||||
},
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails otherwise', () => {
|
||||
const request = { params: {}, body: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'delete',
|
||||
path:
|
||||
'/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
});
|
||||
|
||||
registerCrawlerSitemapRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with required params', () => {
|
||||
const request = {
|
||||
params: { engineName: 'some-engine', domainId: '1234', sitemapId: '5678' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails otherwise', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import { RouteDependencies } from '../../plugin';
|
||||
|
||||
export function registerCrawlerSitemapRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
}: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
engineName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
url: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
engineName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
sitemapId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
url: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/api/app_search/engines/{engineName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
engineName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
sitemapId: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/as/v0/engines/:engineName/crawler/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
|
@ -11,6 +11,7 @@ import { registerAnalyticsRoutes } from './analytics';
|
|||
import { registerApiLogsRoutes } from './api_logs';
|
||||
import { registerCrawlerRoutes } from './crawler';
|
||||
import { registerCrawlerEntryPointRoutes } from './crawler_entry_points';
|
||||
import { registerCrawlerSitemapRoutes } from './crawler_sitemaps';
|
||||
import { registerCredentialsRoutes } from './credentials';
|
||||
import { registerCurationsRoutes } from './curations';
|
||||
import { registerDocumentsRoutes, registerDocumentRoutes } from './documents';
|
||||
|
@ -46,4 +47,5 @@ export const registerAppSearchRoutes = (dependencies: RouteDependencies) => {
|
|||
registerOnboardingRoutes(dependencies);
|
||||
registerCrawlerRoutes(dependencies);
|
||||
registerCrawlerEntryPointRoutes(dependencies);
|
||||
registerCrawlerSitemapRoutes(dependencies);
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue