mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Enterprise Search] Fix bug limiting domains displayed on Crawl with custom settings flyouts (#137623) (#137787)
(cherry picked from commit 1f33c5bdc6
)
Co-authored-by: Byron Hulcher <byron.hulcher@elastic.co>
This commit is contained in:
parent
738c4a7674
commit
fc1847c1af
8 changed files with 179 additions and 17 deletions
|
@ -48,8 +48,16 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
describe('fetchDomainConfigData', () => {
|
||||
it('updates logic with data that has been converted from server to client', async () => {
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'onRecieveDomainConfigData');
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 1,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '1234',
|
||||
|
@ -61,12 +69,50 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
})
|
||||
);
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 2,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seed_urls: [],
|
||||
sitemap_urls: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData();
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
'/internal/app_search/engines/some-engine/crawler/domain_configs'
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'/internal/enterprise_search/engines/some-engine/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 1,
|
||||
'page[size]': 100,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'/internal/enterprise_search/engines/some-engine/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 2,
|
||||
'page[size]': 1,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
expect(
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onRecieveDomainConfigData
|
||||
).toHaveBeenCalledWith([
|
||||
|
@ -76,6 +122,12 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
|
|
|
@ -7,10 +7,10 @@
|
|||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Meta } from '../../../../../../../common/types';
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import { EngineLogic } from '../../../engine';
|
||||
|
||||
import { CrawlerLogic, CrawlRequestOverrides } from '../../crawler_logic';
|
||||
import { DomainConfig, DomainConfigFromServer } from '../../types';
|
||||
import { domainConfigServerToClient } from '../../utils';
|
||||
|
@ -198,12 +198,29 @@ export const CrawlCustomSettingsFlyoutLogic = kea<
|
|||
const { http } = HttpLogic.values;
|
||||
const { engineName } = EngineLogic.values;
|
||||
|
||||
let domainConfigs: DomainConfig[] = [];
|
||||
let nextPage: number = 1;
|
||||
let totalPages: number = 1;
|
||||
let pageSize: number = 100;
|
||||
try {
|
||||
const { results } = await http.get<{
|
||||
results: DomainConfigFromServer[];
|
||||
}>(`/internal/app_search/engines/${engineName}/crawler/domain_configs`);
|
||||
while (nextPage <= totalPages) {
|
||||
const {
|
||||
results,
|
||||
meta: { page },
|
||||
} = await http.get<{
|
||||
meta: Meta;
|
||||
results: DomainConfigFromServer[];
|
||||
}>(`/internal/enterprise_search/engines/${engineName}/crawler/domain_configs`, {
|
||||
query: { 'page[current]': nextPage, 'page[size]': pageSize },
|
||||
});
|
||||
|
||||
domainConfigs = [...domainConfigs, ...results.map(domainConfigServerToClient)];
|
||||
|
||||
nextPage = page.current + 1;
|
||||
totalPages = page.total_pages;
|
||||
pageSize = page.size;
|
||||
}
|
||||
|
||||
const domainConfigs = results.map(domainConfigServerToClient);
|
||||
actions.onRecieveDomainConfigData(domainConfigs);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
|
|
|
@ -10,7 +10,6 @@ import '../../_mocks_/index_name_logic.mock';
|
|||
import { nextTick } from '@kbn/test-jest-helpers';
|
||||
|
||||
import { itShowsServerErrorAsFlashMessage } from '../../../../../test_helpers';
|
||||
|
||||
import { DomainConfig } from '../../../../api/crawler/types';
|
||||
import { CrawlerLogic } from '../crawler_logic';
|
||||
|
||||
|
@ -49,8 +48,16 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
describe('fetchDomainConfigData', () => {
|
||||
it('updates logic with data that has been converted from server to client', async () => {
|
||||
jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'onRecieveDomainConfigData');
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 1,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '1234',
|
||||
|
@ -62,11 +69,48 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
})
|
||||
);
|
||||
|
||||
http.get.mockReturnValueOnce(
|
||||
Promise.resolve({
|
||||
meta: {
|
||||
page: {
|
||||
current: 2,
|
||||
size: 1,
|
||||
total_pages: 2,
|
||||
},
|
||||
},
|
||||
results: [
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seed_urls: [],
|
||||
sitemap_urls: [],
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData();
|
||||
await nextTick();
|
||||
|
||||
expect(http.get).toHaveBeenCalledWith(
|
||||
'/internal/enterprise_search/indices/index-name/crawler/domain_configs'
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'/internal/enterprise_search/indices/index-name/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 1,
|
||||
'page[size]': 100,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(http.get).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'/internal/enterprise_search/indices/index-name/crawler/domain_configs',
|
||||
{
|
||||
query: {
|
||||
'page[current]': 2,
|
||||
'page[size]': 1,
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(
|
||||
CrawlCustomSettingsFlyoutLogic.actions.onRecieveDomainConfigData
|
||||
|
@ -77,6 +121,12 @@ describe('CrawlCustomSettingsFlyoutLogic', () => {
|
|||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
{
|
||||
id: '5678',
|
||||
name: 'https://www.swiftype.com',
|
||||
seedUrls: [],
|
||||
sitemapUrls: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { kea, MakeLogicType } from 'kea';
|
||||
|
||||
import { Meta } from '../../../../../../../common/types';
|
||||
import { flashAPIErrors } from '../../../../../shared/flash_messages';
|
||||
import { HttpLogic } from '../../../../../shared/http';
|
||||
import { GetCrawlerApiLogic } from '../../../../api/crawler/get_crawler_api_logic';
|
||||
|
@ -202,12 +203,31 @@ export const CrawlCustomSettingsFlyoutLogic = kea<
|
|||
fetchDomainConfigData: async () => {
|
||||
const { http } = HttpLogic.values;
|
||||
const { indexName } = IndexNameLogic.values;
|
||||
try {
|
||||
const { results } = await http.get<{
|
||||
results: DomainConfigFromServer[];
|
||||
}>(`/internal/enterprise_search/indices/${indexName}/crawler/domain_configs`);
|
||||
|
||||
const domainConfigs = results.map(domainConfigServerToClient);
|
||||
let domainConfigs: DomainConfig[] = [];
|
||||
let totalPages: number = 1;
|
||||
let nextPage: number = 1;
|
||||
let pageSize: number = 100;
|
||||
|
||||
try {
|
||||
while (nextPage <= totalPages) {
|
||||
const {
|
||||
results,
|
||||
meta: { page },
|
||||
} = await http.get<{
|
||||
meta: Meta;
|
||||
results: DomainConfigFromServer[];
|
||||
}>(`/internal/enterprise_search/indices/${indexName}/crawler/domain_configs`, {
|
||||
query: { 'page[current]': nextPage, 'page[size]': pageSize },
|
||||
});
|
||||
|
||||
domainConfigs = [...domainConfigs, ...results.map(domainConfigServerToClient)];
|
||||
|
||||
nextPage = page.current + 1;
|
||||
totalPages = page.total_pages;
|
||||
pageSize = page.size;
|
||||
}
|
||||
|
||||
actions.onRecieveDomainConfigData(domainConfigs);
|
||||
} catch (e) {
|
||||
flashAPIErrors(e);
|
||||
|
|
|
@ -698,11 +698,16 @@ describe('crawler routes', () => {
|
|||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { name: 'some-engine' } };
|
||||
const request = { params: { name: 'some-engine' }, query: { 'page[current]': 4 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
it('validates correctly with page[current]', () => {
|
||||
const request = { params: { name: 'some-engine' }, query: { 'page[size]': 100 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without page[size]', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
|
|
@ -283,6 +283,10 @@ export function registerCrawlerRoutes({
|
|||
params: schema.object({
|
||||
name: schema.string(),
|
||||
}),
|
||||
query: schema.object({
|
||||
'page[current]': schema.maybe(schema.number()),
|
||||
'page[size]': schema.maybe(schema.number()),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
|
|
|
@ -689,6 +689,16 @@ describe('crawler routes', () => {
|
|||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with page[current]', () => {
|
||||
const request = { params: { indexName: 'index-name' }, query: { 'page[current]': 4 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with page[size]', () => {
|
||||
const request = { params: { indexName: 'index-name' }, query: { 'page[size]': 100 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
|
|
|
@ -274,6 +274,10 @@ export function registerCrawlerRoutes(routeDependencies: RouteDependencies) {
|
|||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
query: schema.object({
|
||||
'page[current]': schema.maybe(schema.number()),
|
||||
'page[size]': schema.maybe(schema.number()),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue