mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[Search] Remove webcrawler endpoints (#208827)
## Summary - Removed webcrawler endpoints - Removes unused cloud_health endpoint - Removes ent search node request handler ### Checklist - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios
This commit is contained in:
parent
a149a1147b
commit
ea32413c08
39 changed files with 10 additions and 3483 deletions
|
@ -17162,21 +17162,16 @@
|
|||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionExistsError": "Le nom de collection existe déjà. Choisissez un autre nom.",
|
||||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionNotFoundErrorMessage": "Collection d'analyses introuvable",
|
||||
"xpack.enterpriseSearch.server.routes.addConnector.connectorExistsError": "Le connecteur ou l'index existe déjà",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.connectorExistsError": "Un connecteur existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.crawlerExistsError": "Un robot d'indexation existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.indexExistsError": "L'index existe déjà.",
|
||||
"xpack.enterpriseSearch.server.routes.checkKibanaLogsMessage": "{errorMessage} Vérifiez les logs du serveur Kibana pour plus de détails.",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.expensive_query_not_allowed_error": "Les requêtes de recherche lourdes ne sont pas autorisées. \"recherche.autoriser_recherches_lourdes\" est défini comme faux",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.generateConfiguration.indexAlreadyExistsError": "Impossible de trouver un nom de connecteur unique",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.resource_not_found_error": "Le connecteur avec l'ID {connectorId} est introuvable.",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.statusTransitionError": "Le travail de synchronisation du connecteur ne peut pas être annulé. Le connecteur est déjà annulé ou n'est pas dans un état annulable.",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.connectorExistsError": "Un connecteur existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.crawlerExistsError": "Un robot d'indexation existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.indexExistsError": "L'index existe déjà.",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplciationExistsError": "Le nom de l’application de recherche est déjà pris. Choisissez un autre nom.",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplicationInvalidName": "Nom de l'application de recherche non valide. {exceptionReason}",
|
||||
"xpack.enterpriseSearch.server.routes.errorLogMessage": "Une erreur s'est produite lors de la résolution de la requête en {requestUrl} : {errorMessage}",
|
||||
"xpack.enterpriseSearch.server.routes.fetchCrawlerMultipleSchedules.documentNotFoundError": "Les données du robot d'indexation sont introuvables.",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.error": "Impossible de trouver l'application de recherche",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.missingAliasError": "L'alias de l'application de recherche est manquant.",
|
||||
"xpack.enterpriseSearch.server.routes.indices.existsErrorLogMessage": "Une erreur s'est produite lors de la résolution de la requête en {requestUrl}",
|
||||
|
@ -17184,10 +17179,8 @@
|
|||
"xpack.enterpriseSearch.server.routes.indices.pipelines.indexMissingError": "L'index {indexName} n'existe pas",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineMissingError": "Le pipeline {pipelineName} n'existe pas",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineNotFoundError": "Le pipeline {pipelineName} n'existe pas",
|
||||
"xpack.enterpriseSearch.server.routes.recreateConnector.connectorExistsError": "Un connecteur existe déjà pour cet index",
|
||||
"xpack.enterpriseSearch.server.routes.unauthorizedError": "Vous ne disposez pas d'autorisations suffisantes.",
|
||||
"xpack.enterpriseSearch.server.routes.uncaughtExceptionError": "Search a rencontré une erreur.",
|
||||
"xpack.enterpriseSearch.server.routes.updateHtmlExtraction.noCrawlerFound": "Impossible de trouver un robot d'indexation pour cet index",
|
||||
"xpack.enterpriseSearch.server.utils.invalidEnumValue": "Valeur {value} non autorisée pour le champ {fieldName}",
|
||||
"xpack.enterpriseSearch.shared.flashMessages.defaultErrorMessage": "Une erreur inattendue s'est produite",
|
||||
"xpack.enterpriseSearch.shared.searchLabsBanner.logoAltLabel": "Ateliers Elastic Search",
|
||||
|
|
|
@ -17021,21 +17021,16 @@
|
|||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionExistsError": "コレクション名はすでに存在します。別の名前を選択してください。",
|
||||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionNotFoundErrorMessage": "分析コレクションが見つかりません",
|
||||
"xpack.enterpriseSearch.server.routes.addConnector.connectorExistsError": "コネクターまたはインデックスはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.connectorExistsError": "このインデックスのコネクターはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.crawlerExistsError": "このインデックスのクローラーはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.indexExistsError": "このインデックスはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.checkKibanaLogsMessage": "{errorMessage}詳細については、Kibanaサーバーログを確認してください。",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.expensive_query_not_allowed_error": "高コストの検索クエリーは許可されません。\"search.allow_expensive_queries\"はfalseに設定されています。",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.generateConfiguration.indexAlreadyExistsError": "一意のコネクター名が見つかりません",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.resource_not_found_error": "ID \"{connectorId}\"のコネクターが見つかりません。",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.statusTransitionError": "コネクター同期ジョブをキャンセルできませんでした。コネクターはすでにキャンセルされているか、キャンセル可能な状態ではありません。",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.connectorExistsError": "このインデックスのコネクターはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.crawlerExistsError": "このインデックスのクローラーはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.indexExistsError": "このインデックスはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplciationExistsError": "検索アプリケーション名はすでに取得されています。別の名前を選択してください。",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplicationInvalidName": "無効な検索アプリケーション名です。{exceptionReason}",
|
||||
"xpack.enterpriseSearch.server.routes.errorLogMessage": "{requestUrl}へのリクエストの解決中にエラーが発生しました:{errorMessage}",
|
||||
"xpack.enterpriseSearch.server.routes.fetchCrawlerMultipleSchedules.documentNotFoundError": "クローラーデータが見つかりませんでした。",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.error": "検索アプリケーションが見つかりませんでした。",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.missingAliasError": "検索アプリケーションのエイリアスが見つかりません。",
|
||||
"xpack.enterpriseSearch.server.routes.indices.existsErrorLogMessage": "{requestUrl}へのリクエストの解決中にエラーが発生しました",
|
||||
|
@ -17043,10 +17038,8 @@
|
|||
"xpack.enterpriseSearch.server.routes.indices.pipelines.indexMissingError": "インデックス{indexName}が存在しません",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineMissingError": "パイプライン{pipelineName}が存在しません",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineNotFoundError": "パイプライン{pipelineName}が存在しません",
|
||||
"xpack.enterpriseSearch.server.routes.recreateConnector.connectorExistsError": "このインデックスのコネクターはすでに存在します",
|
||||
"xpack.enterpriseSearch.server.routes.unauthorizedError": "十分な権限がありません。",
|
||||
"xpack.enterpriseSearch.server.routes.uncaughtExceptionError": "検索でエラーが発生しました。",
|
||||
"xpack.enterpriseSearch.server.routes.updateHtmlExtraction.noCrawlerFound": "このインデックスのクローラーが見つかりませんでした",
|
||||
"xpack.enterpriseSearch.server.utils.invalidEnumValue": "フィールド{fieldName}の値{value}が正しくありません",
|
||||
"xpack.enterpriseSearch.shared.flashMessages.defaultErrorMessage": "予期しないエラーが発生しました",
|
||||
"xpack.enterpriseSearch.shared.searchLabsBanner.logoAltLabel": "Elastic Search Labs",
|
||||
|
|
|
@ -16744,31 +16744,24 @@
|
|||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionExistsError": "集合名称已存在。请选择其他名称。",
|
||||
"xpack.enterpriseSearch.server.routes.addAnalyticsCollection.analyticsCollectionNotFoundErrorMessage": "未找到分析集合",
|
||||
"xpack.enterpriseSearch.server.routes.addConnector.connectorExistsError": "连接器或索引已存在",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.connectorExistsError": "此索引的连接器已存在",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.crawlerExistsError": "此索引的网络爬虫已存在",
|
||||
"xpack.enterpriseSearch.server.routes.addCrawler.indexExistsError": "此索引已存在",
|
||||
"xpack.enterpriseSearch.server.routes.checkKibanaLogsMessage": "{errorMessage} 请查阅 Kibana 服务器日志了解详情。",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.expensive_query_not_allowed_error": "不允许执行资源密集型搜索查询。已将'search.allow_expensive_queries'设置为 false",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.generateConfiguration.indexAlreadyExistsError": "找不到唯一的连接器名称",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.resource_not_found_error": "找不到 ID 为 {connectorId} 的连接器。",
|
||||
"xpack.enterpriseSearch.server.routes.connectors.statusTransitionError": "无法取消连接器同步作业。连接器已取消或未处于可取消状态。",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.connectorExistsError": "此索引的连接器已存在",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.crawlerExistsError": "此索引的网络爬虫已存在",
|
||||
"xpack.enterpriseSearch.server.routes.createApiIndex.indexExistsError": "此索引已存在",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplciationExistsError": "搜索应用程序名称已占用。请选择其他名称。",
|
||||
"xpack.enterpriseSearch.server.routes.createSearchApplication.searchApplicationInvalidName": "搜索应用程序名称无效。{exceptionReason}",
|
||||
"xpack.enterpriseSearch.server.routes.errorLogMessage": "解析 {requestUrl} 请求时出错:{errorMessage}",
|
||||
"xpack.enterpriseSearch.server.routes.fetchCrawlerMultipleSchedules.documentNotFoundError": "找不到网络爬虫数据。",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.error": "找不到搜索应用程序",
|
||||
"xpack.enterpriseSearch.server.routes.fetchSearchApplicationFieldCapabilities.missingAliasError": "搜索应用程序别名缺失。",
|
||||
"xpack.enterpriseSearch.server.routes.indices.existsErrorLogMessage": "解析 {requestUrl} 请求时出错",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.indexMissingError": "索引 {indexName} 不存在",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineMissingError": "管道 {pipelineName} 不存在",
|
||||
"xpack.enterpriseSearch.server.routes.indices.pipelines.pipelineNotFoundError": "管道 {pipelineName} 不存在",
|
||||
"xpack.enterpriseSearch.server.routes.recreateConnector.connectorExistsError": "此索引的连接器已存在",
|
||||
"xpack.enterpriseSearch.server.routes.unauthorizedError": "您的权限不足。",
|
||||
"xpack.enterpriseSearch.server.routes.uncaughtExceptionError": "搜索遇到错误。",
|
||||
"xpack.enterpriseSearch.server.routes.updateHtmlExtraction.noCrawlerFound": "找不到此索引的网络爬虫",
|
||||
"xpack.enterpriseSearch.server.utils.invalidEnumValue": "字段 {fieldName} 的非法值 {value}",
|
||||
"xpack.enterpriseSearch.shared.flashMessages.defaultErrorMessage": "发生意外错误",
|
||||
"xpack.enterpriseSearch.shared.searchLabsBanner.logoAltLabel": "Elastic Search Labs",
|
||||
|
|
|
@ -5,21 +5,13 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Connector, ConnectorIndex, ElasticsearchIndex } from '@kbn/search-connectors';
|
||||
|
||||
import { Crawler } from './crawler';
|
||||
import type { ConnectorIndex, ElasticsearchIndex } from '@kbn/search-connectors';
|
||||
|
||||
export interface AlwaysShowPattern {
|
||||
alias_pattern: string;
|
||||
index_pattern: string;
|
||||
}
|
||||
|
||||
// TODO: Remove this type
|
||||
export interface CrawlerIndex extends ElasticsearchIndex {
|
||||
connector: Connector;
|
||||
crawler: Crawler;
|
||||
}
|
||||
|
||||
export interface ElasticsearchIndexWithPrivileges extends ElasticsearchIndex {
|
||||
alias: boolean;
|
||||
privileges: {
|
||||
|
@ -28,4 +20,4 @@ export interface ElasticsearchIndexWithPrivileges extends ElasticsearchIndex {
|
|||
};
|
||||
}
|
||||
|
||||
export type ElasticsearchIndexWithIngestion = ElasticsearchIndex | ConnectorIndex | CrawlerIndex;
|
||||
export type ElasticsearchIndexWithIngestion = ElasticsearchIndex | ConnectorIndex;
|
||||
|
|
|
@ -280,10 +280,6 @@ export const indices: ElasticsearchIndexWithIngestion[] = [
|
|||
sync_now: false,
|
||||
},
|
||||
count: 1,
|
||||
crawler: {
|
||||
id: '5',
|
||||
index_name: 'connector-crawler',
|
||||
},
|
||||
hidden: false,
|
||||
name: 'crawler',
|
||||
total: {
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CloudHealth } from '../../../../../common/stats';
|
||||
|
||||
import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
|
||||
import { HttpLogic } from '../../../shared/http';
|
||||
|
||||
export type FetchCloudHealthResponse = CloudHealth;
|
||||
|
||||
export const fetchCloudHealth = async () => {
|
||||
const route = '/internal/enterprise_search/stats/cloud_health';
|
||||
return await HttpLogic.values.http.get<FetchCloudHealthResponse>(route);
|
||||
};
|
||||
|
||||
export const FetchCloudHealthApiLogic = createApiLogic(
|
||||
['enterprise_search_content', 'fetch_cloud_health_api_logic'],
|
||||
fetchCloudHealth
|
||||
);
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
|
||||
import { useActions, useValues } from 'kea';
|
||||
|
||||
|
@ -22,8 +22,6 @@ import {
|
|||
} from '../../../../shared/licensing_callout/licensing_callout';
|
||||
import { AddConnectorApiLogic } from '../../../api/connector/add_connector_api_logic';
|
||||
|
||||
import { FetchCloudHealthApiLogic } from '../../../api/stats/fetch_cloud_health_api_logic';
|
||||
|
||||
import { AddConnectorLogic } from './add_connector_logic';
|
||||
import { NewConnectorTemplate } from './new_connector_template';
|
||||
|
||||
|
@ -60,14 +58,6 @@ export const MethodConnector: React.FC<MethodConnectorProps> = ({
|
|||
|
||||
const isGated = isNative && !isCloud && !hasPlatinumLicense;
|
||||
|
||||
const { makeRequest: fetchCloudHealth } = useActions(FetchCloudHealthApiLogic);
|
||||
|
||||
useEffect(() => {
|
||||
if (isCloud) {
|
||||
fetchCloudHealth({});
|
||||
}
|
||||
}, [isCloud]);
|
||||
|
||||
return (
|
||||
<EuiFlexGroup direction="column">
|
||||
{isGated && (
|
||||
|
|
|
@ -9,12 +9,8 @@ import { ElasticsearchIndex, ElasticsearchViewIndexExtension } from '@kbn/search
|
|||
|
||||
import { ConnectorIndex } from '@kbn/search-connectors/types/indices';
|
||||
|
||||
import { CrawlerIndex } from '../../../common/types/indices';
|
||||
|
||||
export type ConnectorViewIndex = ConnectorIndex & ElasticsearchViewIndexExtension;
|
||||
|
||||
export type CrawlerViewIndex = CrawlerIndex & ElasticsearchViewIndexExtension;
|
||||
|
||||
export type ApiViewIndex = ElasticsearchIndex & ElasticsearchViewIndexExtension;
|
||||
|
||||
export type ElasticsearchViewIndex = CrawlerViewIndex | ConnectorViewIndex | ApiViewIndex;
|
||||
export type ElasticsearchViewIndex = ConnectorViewIndex | ApiViewIndex;
|
||||
|
|
|
@ -22,7 +22,7 @@ import {
|
|||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../../common/constants';
|
||||
import { ElasticsearchIndexWithIngestion } from '../../../../common/types/indices';
|
||||
|
||||
import { ApiViewIndex, CrawlerViewIndex, ElasticsearchViewIndex } from '../types';
|
||||
import { ApiViewIndex, ElasticsearchViewIndex } from '../types';
|
||||
|
||||
export function isConnectorIndex(
|
||||
index: ElasticsearchIndexWithIngestion | null | undefined
|
||||
|
@ -104,7 +104,6 @@ export function getContentExtractionDisabled(index?: ElasticsearchIndexWithInges
|
|||
}
|
||||
|
||||
export function indexToViewIndex(index: ElasticsearchIndex): ConnectorViewIndex;
|
||||
export function indexToViewIndex(index: ElasticsearchIndex): CrawlerViewIndex;
|
||||
export function indexToViewIndex(index: ElasticsearchIndex): ApiViewIndex {
|
||||
const extraFields = {
|
||||
ingestionMethod: getIngestionMethod(index),
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const mockHttpAgent = jest.fn();
|
||||
|
||||
jest.mock('../lib/enterprise_search_http_agent', () => ({
|
||||
entSearchHttpAgent: {
|
||||
getHttpAgent: () => mockHttpAgent,
|
||||
},
|
||||
}));
|
|
@ -6,11 +6,4 @@
|
|||
*/
|
||||
|
||||
export { MockRouter } from './router.mock';
|
||||
export {
|
||||
mockConfig,
|
||||
mockLogger,
|
||||
mockRequestHandler,
|
||||
mockDependencies,
|
||||
} from './routerDependencies.mock';
|
||||
|
||||
export { mockHttpAgent } from './http_agent.mock';
|
||||
export { mockConfig, mockLogger, mockDependencies } from './routerDependencies.mock';
|
||||
|
|
|
@ -14,13 +14,6 @@ import { GlobalConfigService } from '../services/global_config_service';
|
|||
|
||||
export const mockLogger = loggingSystemMock.createLogger().get();
|
||||
|
||||
export const mockRequestHandler = {
|
||||
createRequest: jest.fn(() => () => {}),
|
||||
hasValidData(data: any) {
|
||||
return (this.createRequest as jest.Mock).mock.calls[0][0].hasValidData(data);
|
||||
},
|
||||
};
|
||||
|
||||
export const mockMl = mlPluginServerMock.createSetupContract();
|
||||
|
||||
export const mockConfig: ConfigType = {
|
||||
|
@ -50,7 +43,6 @@ export const mockConfig: ConfigType = {
|
|||
export const mockDependencies = {
|
||||
// Mock router should be handled on a per-test basis
|
||||
config: mockConfig,
|
||||
enterpriseSearchRequestHandler: mockRequestHandler as any,
|
||||
getSavedObjectsService: jest.fn(),
|
||||
getStartServices: jest.fn(),
|
||||
globalConfigService: new GlobalConfigService(),
|
||||
|
|
|
@ -15,7 +15,6 @@ import {
|
|||
|
||||
import { ErrorCode } from '../../../common/types/error_codes';
|
||||
|
||||
import { fetchCrawlerByIndexName } from '../crawler/fetch_crawlers';
|
||||
import { generateApiKey } from '../indices/generate_api_key';
|
||||
import { textAnalysisSettings } from '../indices/text_analysis';
|
||||
|
||||
|
@ -26,7 +25,6 @@ jest.mock('@kbn/search-connectors', () => ({
|
|||
deleteConnectorById: jest.fn(),
|
||||
fetchConnectorByIndexName: jest.fn(),
|
||||
}));
|
||||
jest.mock('../crawler/fetch_crawlers', () => ({ fetchCrawlerByIndexName: jest.fn() }));
|
||||
jest.mock('../indices/generate_api_key', () => ({ generateApiKey: jest.fn() }));
|
||||
|
||||
describe('addConnector lib function', () => {
|
||||
|
@ -70,7 +68,6 @@ describe('addConnector lib function', () => {
|
|||
}));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => false);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
(generateApiKey as jest.Mock).mockImplementation(() => undefined);
|
||||
|
||||
|
@ -112,7 +109,6 @@ describe('addConnector lib function', () => {
|
|||
}));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => false);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
(generateApiKey as jest.Mock).mockImplementation(() => ({
|
||||
id: 'api-key-id',
|
||||
|
@ -157,7 +153,6 @@ describe('addConnector lib function', () => {
|
|||
}));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => true);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
|
||||
await expect(
|
||||
|
@ -175,7 +170,6 @@ describe('addConnector lib function', () => {
|
|||
mockClient.asCurrentUser.index.mockImplementation(() => ({ _id: 'fakeId' }));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => false);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => true);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
|
||||
await expect(
|
||||
|
@ -189,34 +183,10 @@ describe('addConnector lib function', () => {
|
|||
expect(mockClient.asCurrentUser.indices.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reject if crawler already exists', async () => {
|
||||
mockClient.asCurrentUser.index.mockImplementation(() => ({ _id: 'fakeId' }));
|
||||
(createConnector as jest.Mock).mockImplementation(() => ({
|
||||
id: 'fakeId',
|
||||
index_name: 'index_name',
|
||||
}));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => false);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => true);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
|
||||
await expect(
|
||||
addConnector(mockClient as unknown as IScopedClusterClient, {
|
||||
indexName: 'index_name',
|
||||
isNative: false,
|
||||
language: 'en',
|
||||
name: '',
|
||||
})
|
||||
).rejects.toEqual(new Error(ErrorCode.CRAWLER_ALREADY_EXISTS));
|
||||
expect(mockClient.asCurrentUser.indices.create).not.toHaveBeenCalled();
|
||||
expect(createConnector).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reject with index already exists if connector and index already exist', async () => {
|
||||
mockClient.asCurrentUser.index.mockImplementation(() => ({ _id: 'fakeId' }));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => true);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => true);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
|
||||
await expect(
|
||||
|
@ -239,7 +209,6 @@ describe('addConnector lib function', () => {
|
|||
}));
|
||||
mockClient.asCurrentUser.indices.exists.mockImplementation(() => false);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementation(() => ({ id: 'connectorId' }));
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementation(() => undefined);
|
||||
mockClient.asCurrentUser.indices.getMapping.mockImplementation(() => connectorsIndicesMapping);
|
||||
(generateApiKey as jest.Mock).mockImplementation(() => ({
|
||||
id: 'api-key-id',
|
||||
|
|
|
@ -20,7 +20,6 @@ import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../commo
|
|||
|
||||
import { ErrorCode } from '../../../common/types/error_codes';
|
||||
|
||||
import { fetchCrawlerByIndexName } from '../crawler/fetch_crawlers';
|
||||
import { createIndex } from '../indices/create_index';
|
||||
import { generateApiKey } from '../indices/generate_api_key';
|
||||
import { getDefaultPipeline } from '../pipelines/get_default_pipeline';
|
||||
|
@ -53,11 +52,7 @@ export const addConnector = async (
|
|||
throw new Error(ErrorCode.CONNECTOR_DOCUMENT_ALREADY_EXISTS);
|
||||
}
|
||||
}
|
||||
const crawler = await fetchCrawlerByIndexName(client, index);
|
||||
|
||||
if (crawler) {
|
||||
throw new Error(ErrorCode.CRAWLER_ALREADY_EXISTS);
|
||||
}
|
||||
await createIndex(client, index, input.language, false);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core/server';
|
||||
|
||||
import { CONNECTORS_INDEX, Connector } from '@kbn/search-connectors';
|
||||
|
||||
const CUSTOM_SCHEDULING = 'custom_scheduling';
|
||||
|
||||
export const fetchCrawlerCustomSchedulingByIndexName = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string
|
||||
): Promise<Connector | undefined> => {
|
||||
const crawlerResult = await client.asCurrentUser.search<Connector>({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: { term: { index_name: indexName } },
|
||||
_source: CUSTOM_SCHEDULING,
|
||||
});
|
||||
const result = crawlerResult.hits.hits[0]?._source;
|
||||
return result;
|
||||
};
|
||||
|
||||
export const fetchCrawlerCustomSchedulingKeysByIndexName = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string
|
||||
): Promise<string[]> => {
|
||||
const crawlerCustomSchedules = await fetchCrawlerCustomSchedulingByIndexName(client, indexName);
|
||||
|
||||
return crawlerCustomSchedules?.custom_scheduling
|
||||
? Object.keys(crawlerCustomSchedules.custom_scheduling)
|
||||
: [];
|
||||
};
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { QueryDslQueryContainer } from '@elastic/elasticsearch/lib/api/types';
|
||||
import { IScopedClusterClient } from '@kbn/core/server';
|
||||
import { Connector, CONNECTORS_INDEX } from '@kbn/search-connectors';
|
||||
|
||||
import { Crawler, CrawlRequest } from '../../../common/types/crawler';
|
||||
import { fetchAll } from '../fetch_all';
|
||||
|
||||
const CRAWLER_CONFIGURATIONS_INDEX = '.ent-search-actastic-crawler2_configurations_v2';
|
||||
const CRAWLER_CRAWL_REQUESTS_INDEX = '.ent-search-actastic-crawler2_crawl_requests_v2';
|
||||
|
||||
export const fetchMostRecentCrawlerRequestByConfigurationId = async (
|
||||
client: IScopedClusterClient,
|
||||
configurationId: string
|
||||
): Promise<CrawlRequest | undefined> => {
|
||||
try {
|
||||
const crawlRequestResult = await client.asCurrentUser.search<CrawlRequest>({
|
||||
index: CRAWLER_CRAWL_REQUESTS_INDEX,
|
||||
query: { term: { configuration_oid: configurationId } },
|
||||
sort: 'created_at:desc',
|
||||
});
|
||||
const result = crawlRequestResult.hits.hits[0]?._source;
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchCrawlerByIndexName = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string
|
||||
): Promise<Crawler | undefined> => {
|
||||
let crawler: Crawler | undefined;
|
||||
try {
|
||||
const crawlerResult = await client.asCurrentUser.search<Crawler>({
|
||||
index: CRAWLER_CONFIGURATIONS_INDEX,
|
||||
query: { term: { index_name: indexName } },
|
||||
});
|
||||
crawler = crawlerResult.hits.hits[0]?._source;
|
||||
} catch (error) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (crawler) {
|
||||
try {
|
||||
const mostRecentCrawlRequest = await fetchMostRecentCrawlerRequestByConfigurationId(
|
||||
client,
|
||||
crawler.id
|
||||
);
|
||||
|
||||
return {
|
||||
...crawler,
|
||||
most_recent_crawl_request_status: mostRecentCrawlRequest?.status,
|
||||
};
|
||||
} catch (error) {
|
||||
return crawler;
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const fetchCrawlers = async (
|
||||
client: IScopedClusterClient,
|
||||
indexNames?: string[]
|
||||
): Promise<Crawler[]> => {
|
||||
const query: QueryDslQueryContainer = indexNames
|
||||
? { terms: { index_name: indexNames } }
|
||||
: { match_all: {} };
|
||||
let crawlers: Crawler[];
|
||||
try {
|
||||
crawlers = await fetchAll<Crawler>(client, CRAWLER_CONFIGURATIONS_INDEX, query);
|
||||
} catch (error) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
// TODO replace this with an aggregation query
|
||||
const crawlersWithStatuses = await Promise.all(
|
||||
crawlers.map(async (crawler): Promise<Crawler> => {
|
||||
const mostRecentCrawlRequest = await fetchMostRecentCrawlerRequestByConfigurationId(
|
||||
client,
|
||||
crawler.id
|
||||
);
|
||||
|
||||
return {
|
||||
...crawler,
|
||||
most_recent_crawl_request_status: mostRecentCrawlRequest?.status,
|
||||
};
|
||||
})
|
||||
);
|
||||
return crawlersWithStatuses;
|
||||
} catch (error) {
|
||||
return crawlers;
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchCrawlerDocumentIdByIndexName = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string
|
||||
): Promise<string> => {
|
||||
const crawlerResult = await client.asCurrentUser.search<Connector>({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: { term: { index_name: indexName } },
|
||||
_source: '_id',
|
||||
});
|
||||
const crawlerId = crawlerResult.hits.hits[0]?._id!;
|
||||
return crawlerId;
|
||||
};
|
|
@ -1,120 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core/server';
|
||||
|
||||
import { CONNECTORS_INDEX, ConnectorStatus } from '@kbn/search-connectors';
|
||||
|
||||
import { recreateConnectorDocument } from './post_connector';
|
||||
|
||||
describe('recreateConnectorDocument lib function', () => {
|
||||
const mockClient = {
|
||||
asCurrentUser: {
|
||||
index: jest.fn(),
|
||||
},
|
||||
asInternalUser: {},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should recreate connector document', async () => {
|
||||
mockClient.asCurrentUser.index.mockResolvedValue({ _id: 'connectorId' });
|
||||
|
||||
await recreateConnectorDocument(mockClient as unknown as IScopedClusterClient, 'indexName');
|
||||
expect(mockClient.asCurrentUser.index).toHaveBeenCalledWith({
|
||||
document: {
|
||||
api_key_id: null,
|
||||
api_key_secret_id: null,
|
||||
configuration: {},
|
||||
custom_scheduling: {},
|
||||
deleted: false,
|
||||
description: null,
|
||||
error: null,
|
||||
features: null,
|
||||
filtering: [
|
||||
{
|
||||
active: {
|
||||
advanced_snippet: {
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
created_at: expect.any(String),
|
||||
field: '_',
|
||||
id: 'DEFAULT',
|
||||
order: 0,
|
||||
policy: 'include',
|
||||
rule: 'regex',
|
||||
updated_at: expect.any(String),
|
||||
value: '.*',
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
errors: [],
|
||||
state: 'valid',
|
||||
},
|
||||
},
|
||||
domain: 'DEFAULT',
|
||||
draft: {
|
||||
advanced_snippet: {
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
created_at: expect.any(String),
|
||||
field: '_',
|
||||
id: 'DEFAULT',
|
||||
order: 0,
|
||||
policy: 'include',
|
||||
rule: 'regex',
|
||||
updated_at: expect.any(String),
|
||||
value: '.*',
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
errors: [],
|
||||
state: 'valid',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
index_name: 'indexName',
|
||||
is_native: false,
|
||||
language: '',
|
||||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: null,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
last_sync_status: null,
|
||||
last_synced: null,
|
||||
name: 'indexName',
|
||||
pipeline: null,
|
||||
scheduling: {
|
||||
access_control: { enabled: false, interval: '0 0 0 * * ?' },
|
||||
full: { enabled: false, interval: '0 0 0 * * ?' },
|
||||
incremental: { enabled: false, interval: '0 0 0 * * ?' },
|
||||
},
|
||||
service_type: 'elastic-crawler',
|
||||
status: ConnectorStatus.CONFIGURED,
|
||||
sync_now: false,
|
||||
},
|
||||
index: CONNECTORS_INDEX,
|
||||
refresh: 'wait_for',
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core-elasticsearch-server';
|
||||
|
||||
import { createConnectorDocument, CONNECTORS_INDEX, ConnectorStatus } from '@kbn/search-connectors';
|
||||
|
||||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../common/constants';
|
||||
import { stripSearchPrefix } from '../../../common/utils/strip_search_prefix';
|
||||
|
||||
export const recreateConnectorDocument = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string
|
||||
) => {
|
||||
const document = createConnectorDocument({
|
||||
indexName,
|
||||
isNative: false,
|
||||
// The search index has already been created so we don't need the language, which we can't retrieve anymore anyway
|
||||
language: '',
|
||||
name: stripSearchPrefix(indexName),
|
||||
pipeline: null,
|
||||
serviceType: ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE,
|
||||
});
|
||||
const result = await client.asCurrentUser.index({
|
||||
document: { ...document, status: ConnectorStatus.CONFIGURED },
|
||||
index: CONNECTORS_INDEX,
|
||||
refresh: 'wait_for',
|
||||
});
|
||||
return result._id;
|
||||
};
|
|
@ -1,101 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core/server';
|
||||
|
||||
import { CONNECTORS_INDEX } from '@kbn/search-connectors';
|
||||
|
||||
import {
|
||||
CrawlerCustomScheduleMappingServer,
|
||||
CrawlerCustomScheduleMappingClient,
|
||||
CrawlerCustomScheduleServer,
|
||||
} from '../../../common/types/crawler';
|
||||
|
||||
import { fetchCrawlerCustomSchedulingKeysByIndexName } from './fetch_crawler_multiple_schedules';
|
||||
import { fetchCrawlerDocumentIdByIndexName } from './fetch_crawlers';
|
||||
|
||||
const convertCustomScheduleMappingClientToServer = (
|
||||
customSchedules: CrawlerCustomScheduleMappingClient
|
||||
): CrawlerCustomScheduleMappingServer => {
|
||||
const customSchedulesServer = Array.from(customSchedules, ([scheduleName, customSchedule]) => {
|
||||
const { name, interval, configurationOverrides, enabled } = customSchedule;
|
||||
|
||||
const {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
maxCrawlDepth: max_crawl_depth,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
sitemapDiscoveryDisabled: sitemap_discovery_disabled,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
domainAllowlist: domain_allowlist,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
sitemapUrls: sitemap_urls,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
seedUrls: seed_urls,
|
||||
} = configurationOverrides;
|
||||
|
||||
const scheduleServer: CrawlerCustomScheduleServer = {
|
||||
name,
|
||||
interval,
|
||||
configuration_overrides: {
|
||||
max_crawl_depth,
|
||||
sitemap_discovery_disabled,
|
||||
domain_allowlist,
|
||||
sitemap_urls,
|
||||
seed_urls,
|
||||
},
|
||||
enabled,
|
||||
};
|
||||
|
||||
return [scheduleName, scheduleServer];
|
||||
}).reduce((map, scheduleEntry) => {
|
||||
const [name, schedule] = scheduleEntry;
|
||||
map.set(name, schedule);
|
||||
return map;
|
||||
}, new Map());
|
||||
return customSchedulesServer;
|
||||
};
|
||||
|
||||
export const postCrawlerCustomScheduling = async (
|
||||
client: IScopedClusterClient,
|
||||
indexName: string,
|
||||
customSchedules: CrawlerCustomScheduleMappingClient
|
||||
) => {
|
||||
const connectorId = await fetchCrawlerDocumentIdByIndexName(client, indexName);
|
||||
const customSchedulingPayload = convertCustomScheduleMappingClientToServer(customSchedules);
|
||||
|
||||
const existingCustomScheduleKeys = await fetchCrawlerCustomSchedulingKeysByIndexName(
|
||||
client,
|
||||
indexName
|
||||
);
|
||||
const newCustomScheduleKeys = Array.from(customSchedulingPayload.keys());
|
||||
const scheduleKeysToDelete = existingCustomScheduleKeys.filter(
|
||||
(key) => !newCustomScheduleKeys.includes(key)
|
||||
);
|
||||
|
||||
// Handle deleted schedules
|
||||
if (scheduleKeysToDelete.length > 0) {
|
||||
const scriptSource = scheduleKeysToDelete
|
||||
.map((scheduleKey) => `ctx._source['custom_scheduling'].remove('${scheduleKey}');`)
|
||||
.join(' ');
|
||||
|
||||
await client.asCurrentUser.update({
|
||||
index: CONNECTORS_INDEX,
|
||||
id: connectorId,
|
||||
script: {
|
||||
source: scriptSource,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return await client.asCurrentUser.update({
|
||||
index: CONNECTORS_INDEX,
|
||||
id: connectorId,
|
||||
doc: {
|
||||
custom_scheduling: Object.fromEntries(customSchedulingPayload),
|
||||
},
|
||||
});
|
||||
};
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core/server';
|
||||
|
||||
import { Connector, CONNECTORS_INDEX } from '@kbn/search-connectors';
|
||||
|
||||
import { updateHtmlExtraction } from './put_html_extraction';
|
||||
|
||||
describe('updateHtmlExtraction lib function', () => {
|
||||
const mockClient = {
|
||||
asCurrentUser: {
|
||||
update: jest.fn(),
|
||||
},
|
||||
asInternalUser: {},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should update connector configuration', async () => {
|
||||
mockClient.asCurrentUser.update.mockResolvedValue(true);
|
||||
const mockConnector = {
|
||||
configuration: { test: { label: 'haha', value: 'this' } },
|
||||
id: 'connectorId',
|
||||
};
|
||||
|
||||
await updateHtmlExtraction(
|
||||
mockClient as unknown as IScopedClusterClient,
|
||||
true,
|
||||
mockConnector as any as Connector
|
||||
);
|
||||
expect(mockClient.asCurrentUser.update).toHaveBeenCalledWith({
|
||||
doc: {
|
||||
configuration: {
|
||||
...mockConnector.configuration,
|
||||
extract_full_html: { label: 'Extract full HTML', value: true },
|
||||
},
|
||||
},
|
||||
id: 'connectorId',
|
||||
index: CONNECTORS_INDEX,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IScopedClusterClient } from '@kbn/core-elasticsearch-server';
|
||||
|
||||
import { CONNECTORS_INDEX, Connector } from '@kbn/search-connectors';
|
||||
|
||||
export async function updateHtmlExtraction(
|
||||
client: IScopedClusterClient,
|
||||
htmlExtraction: boolean,
|
||||
connector: Connector
|
||||
) {
|
||||
return await client.asCurrentUser.update({
|
||||
doc: {
|
||||
configuration: {
|
||||
...connector.configuration,
|
||||
extract_full_html: {
|
||||
label: 'Extract full HTML',
|
||||
value: htmlExtraction,
|
||||
},
|
||||
},
|
||||
},
|
||||
id: connector.id,
|
||||
index: CONNECTORS_INDEX,
|
||||
});
|
||||
}
|
|
@ -1,118 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
jest.mock('fs', () => ({ readFileSync: jest.fn() }));
|
||||
import { readFileSync } from 'fs';
|
||||
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
|
||||
import { ConfigType } from '..';
|
||||
|
||||
import { entSearchHttpAgent } from './enterprise_search_http_agent';
|
||||
|
||||
describe('entSearchHttpAgent', () => {
|
||||
describe('initializeHttpAgent', () => {
|
||||
it('creates an https.Agent when host URL is using HTTPS', () => {
|
||||
entSearchHttpAgent.initializeHttpAgent({
|
||||
host: 'https://example.org',
|
||||
ssl: {},
|
||||
} as ConfigType);
|
||||
expect(entSearchHttpAgent.getHttpAgent()).toBeInstanceOf(https.Agent);
|
||||
});
|
||||
|
||||
it('creates an http.Agent when host URL is using HTTP', () => {
|
||||
entSearchHttpAgent.initializeHttpAgent({
|
||||
host: 'http://example.org',
|
||||
ssl: {},
|
||||
} as ConfigType);
|
||||
expect(entSearchHttpAgent.getHttpAgent()).toBeInstanceOf(http.Agent);
|
||||
});
|
||||
|
||||
describe('fallbacks', () => {
|
||||
it('initializes a http.Agent when host URL is invalid', () => {
|
||||
entSearchHttpAgent.initializeHttpAgent({
|
||||
host: '##!notarealurl#$',
|
||||
ssl: {},
|
||||
} as ConfigType);
|
||||
expect(entSearchHttpAgent.getHttpAgent()).toBeInstanceOf(http.Agent);
|
||||
});
|
||||
|
||||
it('should be an http.Agent when host URL is empty', () => {
|
||||
entSearchHttpAgent.initializeHttpAgent({
|
||||
host: undefined,
|
||||
ssl: {},
|
||||
} as ConfigType);
|
||||
expect(entSearchHttpAgent.getHttpAgent()).toBeInstanceOf(http.Agent);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCertificateAuthorities', () => {
|
||||
describe('happy path', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
(readFileSync as jest.Mock).mockImplementation((path: string) => `content-of-${path}`);
|
||||
});
|
||||
|
||||
it('reads certificate authorities when ssl.certificateAuthorities is a string', () => {
|
||||
const certs = entSearchHttpAgent.loadCertificateAuthorities('some-path');
|
||||
expect(readFileSync).toHaveBeenCalledTimes(1);
|
||||
expect(certs).toEqual(['content-of-some-path']);
|
||||
});
|
||||
|
||||
it('reads certificate authorities when ssl.certificateAuthorities is an array', () => {
|
||||
const certs = entSearchHttpAgent.loadCertificateAuthorities(['some-path', 'another-path']);
|
||||
expect(readFileSync).toHaveBeenCalledTimes(2);
|
||||
expect(certs).toEqual(['content-of-some-path', 'content-of-another-path']);
|
||||
});
|
||||
|
||||
it('does not read anything when ssl.certificateAuthorities is empty', () => {
|
||||
const certs = entSearchHttpAgent.loadCertificateAuthorities(undefined);
|
||||
expect(readFileSync).toHaveBeenCalledTimes(0);
|
||||
expect(certs).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
beforeEach(() => {
|
||||
const realFs = jest.requireActual('fs');
|
||||
(readFileSync as jest.Mock).mockImplementation((path: string) => realFs.readFileSync(path));
|
||||
});
|
||||
|
||||
it('throws if certificateAuthorities is invalid', () => {
|
||||
expect(() => entSearchHttpAgent.loadCertificateAuthorities('/invalid/ca')).toThrow(
|
||||
"ENOENT: no such file or directory, open '/invalid/ca'"
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAgentOptions', () => {
|
||||
it('verificationMode: none', () => {
|
||||
expect(entSearchHttpAgent.getAgentOptions('none')).toEqual({
|
||||
rejectUnauthorized: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('verificationMode: certificate', () => {
|
||||
expect(entSearchHttpAgent.getAgentOptions('certificate')).toEqual({
|
||||
rejectUnauthorized: true,
|
||||
checkServerIdentity: expect.any(Function),
|
||||
});
|
||||
|
||||
const { checkServerIdentity } = entSearchHttpAgent.getAgentOptions('certificate') as any;
|
||||
expect(checkServerIdentity()).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('verificationMode: full', () => {
|
||||
expect(entSearchHttpAgent.getAgentOptions('full')).toEqual({
|
||||
rejectUnauthorized: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,84 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
import { PeerCertificate } from 'tls';
|
||||
|
||||
import { ConfigType } from '..';
|
||||
|
||||
export type HttpAgent = http.Agent | https.Agent;
|
||||
interface AgentOptions {
|
||||
rejectUnauthorized?: boolean;
|
||||
checkServerIdentity?: ((host: string, cert: PeerCertificate) => Error | undefined) | undefined;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns an HTTP agent to be used for requests to Enterprise Search APIs
|
||||
*/
|
||||
class EnterpriseSearchHttpAgent {
|
||||
public httpAgent: HttpAgent = new http.Agent();
|
||||
|
||||
getHttpAgent() {
|
||||
return this.httpAgent;
|
||||
}
|
||||
|
||||
initializeHttpAgent(config: ConfigType) {
|
||||
if (!config.host) return;
|
||||
|
||||
try {
|
||||
const parsedHost = new URL(config.host);
|
||||
if (parsedHost.protocol === 'https:') {
|
||||
this.httpAgent = new https.Agent({
|
||||
ca: this.loadCertificateAuthorities(config.ssl.certificateAuthorities),
|
||||
...this.getAgentOptions(config.ssl.verificationMode),
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Ignore URL parsing errors and fall back to the HTTP agent
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Loads custom CA certificate files and returns all certificates as an array
|
||||
* This is a potentially expensive operation & why this helper is a class
|
||||
* initialized once on plugin init
|
||||
*/
|
||||
loadCertificateAuthorities(certificates: string | string[] | undefined): string[] {
|
||||
if (!certificates) return [];
|
||||
|
||||
const paths = Array.isArray(certificates) ? certificates : [certificates];
|
||||
return paths.map((path) => readFileSync(path, 'utf8'));
|
||||
}
|
||||
|
||||
/*
|
||||
* Convert verificationMode to rejectUnauthorized for more consistent config settings
|
||||
* with the rest of Kibana
|
||||
*/
|
||||
getAgentOptions(verificationMode: 'full' | 'certificate' | 'none') {
|
||||
const agentOptions: AgentOptions = {};
|
||||
|
||||
switch (verificationMode) {
|
||||
case 'none':
|
||||
agentOptions.rejectUnauthorized = false;
|
||||
break;
|
||||
case 'certificate':
|
||||
agentOptions.rejectUnauthorized = true;
|
||||
agentOptions.checkServerIdentity = () => undefined;
|
||||
break;
|
||||
case 'full':
|
||||
default:
|
||||
agentOptions.rejectUnauthorized = true;
|
||||
break;
|
||||
}
|
||||
|
||||
return agentOptions;
|
||||
}
|
||||
}
|
||||
|
||||
export const entSearchHttpAgent = new EnterpriseSearchHttpAgent();
|
|
@ -1,503 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockConfig, mockLogger, mockHttpAgent } from '../__mocks__';
|
||||
|
||||
import {
|
||||
ENTERPRISE_SEARCH_KIBANA_COOKIE,
|
||||
JSON_HEADER,
|
||||
ERROR_CONNECTING_HEADER,
|
||||
READ_ONLY_MODE_HEADER,
|
||||
} from '../../common/constants';
|
||||
|
||||
import { EnterpriseSearchRequestHandler } from './enterprise_search_request_handler';
|
||||
|
||||
jest.mock('node-fetch');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const fetchMock = require('node-fetch') as jest.Mock;
|
||||
|
||||
const { Response } = jest.requireActual('node-fetch');
|
||||
|
||||
const responseMock = {
|
||||
custom: jest.fn(),
|
||||
customError: jest.fn(),
|
||||
};
|
||||
const mockExpectedResponseHeaders = {
|
||||
[READ_ONLY_MODE_HEADER]: 'false',
|
||||
};
|
||||
|
||||
describe('EnterpriseSearchRequestHandler', () => {
|
||||
const enterpriseSearchRequestHandler = new EnterpriseSearchRequestHandler({
|
||||
config: mockConfig,
|
||||
log: mockLogger,
|
||||
}) as any;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
fetchMock.mockReset();
|
||||
});
|
||||
|
||||
describe('createRequest()', () => {
|
||||
it('makes an API call and returns the response', async () => {
|
||||
const responseBody = {
|
||||
results: [{ name: 'engine1' }],
|
||||
meta: { page: { total_results: 1 } },
|
||||
};
|
||||
|
||||
enterpriseSearchAPI.mockReturn(responseBody);
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/as/credentials/collection',
|
||||
});
|
||||
|
||||
await makeAPICall(requestHandler, {
|
||||
query: {
|
||||
type: 'indexed',
|
||||
pageIndex: 1,
|
||||
},
|
||||
});
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/as/credentials/collection?type=indexed&pageIndex=1',
|
||||
{ method: 'GET' }
|
||||
);
|
||||
|
||||
expect(responseMock.custom).toHaveBeenCalledWith({
|
||||
body: responseBody,
|
||||
statusCode: 200,
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
describe('request passing', () => {
|
||||
it('passes route method', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
|
||||
await makeAPICall(requestHandler, { route: { method: 'POST' } });
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/example', {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
await makeAPICall(requestHandler, { route: { method: 'DELETE' } });
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/example', {
|
||||
method: 'DELETE',
|
||||
});
|
||||
});
|
||||
|
||||
it('passes request body', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
await makeAPICall(requestHandler, { body: { bodacious: true } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/example', {
|
||||
body: '{"bodacious":true}',
|
||||
});
|
||||
});
|
||||
|
||||
it('passes a body if that body is a string buffer', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
await makeAPICall(requestHandler, { body: Buffer.from('{"bodacious":true}') });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/example', {
|
||||
body: '{"bodacious":true}',
|
||||
});
|
||||
});
|
||||
|
||||
it('passes request params', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
await makeAPICall(requestHandler, { query: { someQuery: false } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/api/example?someQuery=false'
|
||||
);
|
||||
});
|
||||
|
||||
it('passes custom params set by the handler, which override request params', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
params: { someQuery: true },
|
||||
});
|
||||
await makeAPICall(requestHandler, { query: { someQuery: false } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/api/example?someQuery=true'
|
||||
);
|
||||
});
|
||||
|
||||
it('correctly encodes query string parameters', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
await makeAPICall(requestHandler, { query: { 'page[current]': 1 } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/api/example?page%5Bcurrent%5D=1'
|
||||
);
|
||||
});
|
||||
|
||||
describe('encodePathParams', () => {
|
||||
it('correctly replaces :pathVariables with request.params', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/examples/:example/some/:id',
|
||||
});
|
||||
await makeAPICall(requestHandler, { params: { example: 'hello', id: 'world' } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/api/examples/hello/some/world'
|
||||
);
|
||||
});
|
||||
|
||||
it('correctly encodes path params as URI components', async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/examples/:example',
|
||||
});
|
||||
await makeAPICall(requestHandler, { params: { example: 'hello#@/$%^/&[]{}/";world' } });
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith(
|
||||
'http://localhost:3002/api/examples/hello%23%40%2F%24%25%5E%2F%26%5B%5D%7B%7D%2F%22%3Bworld'
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('response passing', () => {
|
||||
it('returns the response status code from Enterprise Search', async () => {
|
||||
enterpriseSearchAPI.mockReturn({}, { status: 201 });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/example',
|
||||
});
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/example');
|
||||
expect(responseMock.custom).toHaveBeenCalledWith({
|
||||
body: {},
|
||||
statusCode: 201,
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('filters out any _sessionData passed back from Enterprise Search', async () => {
|
||||
const jsonWithSessionData = {
|
||||
_sessionData: {
|
||||
secrets: 'no peeking',
|
||||
},
|
||||
regular: 'data',
|
||||
};
|
||||
|
||||
enterpriseSearchAPI.mockReturn(jsonWithSessionData, { headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/prep' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.custom).toHaveBeenCalledWith({
|
||||
statusCode: 200,
|
||||
body: {
|
||||
regular: 'data',
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('passes back the response body as-is if hasJsonResponse is false', async () => {
|
||||
const mockFile = new File(['mockFile'], 'mockFile.json');
|
||||
enterpriseSearchAPI.mockReturn(mockFile);
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/file',
|
||||
hasJsonResponse: false,
|
||||
});
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/file');
|
||||
expect(responseMock.custom).toHaveBeenCalledWith({
|
||||
body: expect.any(Buffer), // Unfortunately Response() buffers the body so we can't actually inspect/equality assert on it
|
||||
statusCode: 200,
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('error responses', () => {
|
||||
describe('handleClientError()', () => {
|
||||
afterEach(() => {
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/4xx');
|
||||
expect(mockLogger.error).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('passes back json.error', async () => {
|
||||
const error = 'some error message';
|
||||
enterpriseSearchAPI.mockReturn({ error }, { status: 404, headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/4xx' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 404,
|
||||
body: {
|
||||
message: 'some error message',
|
||||
attributes: { errors: ['some error message'] },
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('passes back json.errors', async () => {
|
||||
const errors = ['one', 'two', 'three'];
|
||||
enterpriseSearchAPI.mockReturn({ errors }, { status: 400, headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/4xx' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 400,
|
||||
body: {
|
||||
message: 'one,two,three',
|
||||
attributes: { errors: ['one', 'two', 'three'] },
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('handles empty json', async () => {
|
||||
enterpriseSearchAPI.mockReturn({}, { status: 400, headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/4xx' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 400,
|
||||
body: {
|
||||
message: 'Bad Request',
|
||||
attributes: { errors: ['Bad Request'] },
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('handles invalid json', async () => {
|
||||
enterpriseSearchAPI.mockReturn('invalid' as any, { status: 400, headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/4xx' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 400,
|
||||
body: {
|
||||
message: 'Bad Request',
|
||||
attributes: { errors: ['Bad Request'] },
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
|
||||
it('handles blank bodies', async () => {
|
||||
enterpriseSearchAPI.mockReturn(undefined as any, { status: 404 });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/4xx' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 404,
|
||||
body: {
|
||||
message: 'Not Found',
|
||||
attributes: { errors: ['Not Found'] },
|
||||
},
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('handleServerError()', async () => {
|
||||
enterpriseSearchAPI.mockReturn('something crashed!' as any, { status: 500 });
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/5xx' });
|
||||
|
||||
await makeAPICall(requestHandler);
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/5xx');
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 502,
|
||||
body: expect.stringContaining('Enterprise Search encountered an internal server error'),
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Enterprise Search Server Error 500 at <http://localhost:3002/api/5xx>: "something crashed!"'
|
||||
);
|
||||
});
|
||||
|
||||
it('handleReadOnlyModeError()', async () => {
|
||||
enterpriseSearchAPI.mockReturn(
|
||||
{ errors: ['Read only mode'] },
|
||||
{ status: 503, headers: { ...JSON_HEADER, [READ_ONLY_MODE_HEADER]: 'true' } }
|
||||
);
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/503' });
|
||||
|
||||
await makeAPICall(requestHandler);
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/503');
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 503,
|
||||
body: expect.stringContaining('Enterprise Search is in read-only mode'),
|
||||
headers: { [READ_ONLY_MODE_HEADER]: 'true' },
|
||||
});
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Cannot perform action: Enterprise Search is in read-only mode. Actions that create, update, or delete information are disabled.'
|
||||
);
|
||||
});
|
||||
|
||||
it('handleInvalidDataError()', async () => {
|
||||
enterpriseSearchAPI.mockReturn({ results: false });
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/invalid',
|
||||
hasValidData: (body?: any) => Array.isArray(body?.results),
|
||||
});
|
||||
|
||||
await makeAPICall(requestHandler);
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/invalid');
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 502,
|
||||
body: 'Invalid data received from Enterprise Search',
|
||||
headers: mockExpectedResponseHeaders,
|
||||
});
|
||||
expect(mockLogger.error).toHaveBeenCalledWith(
|
||||
'Invalid data received from <http://localhost:3002/api/invalid>: {"results":false}'
|
||||
);
|
||||
});
|
||||
|
||||
it('handleConnectionError()', async () => {
|
||||
enterpriseSearchAPI.mockReturnError();
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/api/failed' });
|
||||
|
||||
await makeAPICall(requestHandler);
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/failed');
|
||||
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 502,
|
||||
body: 'Error connecting to Enterprise Search: Failed',
|
||||
headers: { ...mockExpectedResponseHeaders, [ERROR_CONNECTING_HEADER]: 'true' },
|
||||
});
|
||||
expect(mockLogger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('handleAuthenticationError()', () => {
|
||||
afterEach(async () => {
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/unauthenticated',
|
||||
});
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
enterpriseSearchAPI.shouldHaveBeenCalledWith('http://localhost:3002/api/unauthenticated');
|
||||
expect(responseMock.customError).toHaveBeenCalledWith({
|
||||
statusCode: 502,
|
||||
body: 'Cannot authenticate Enterprise Search user',
|
||||
headers: { ...mockExpectedResponseHeaders, [ERROR_CONNECTING_HEADER]: 'true' },
|
||||
});
|
||||
expect(mockLogger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('errors when receiving a 401 response', async () => {
|
||||
enterpriseSearchAPI.mockReturn({}, { status: 401 });
|
||||
});
|
||||
|
||||
it('errors when redirected to /login', async () => {
|
||||
enterpriseSearchAPI.mockReturn({}, { url: 'http://localhost:3002/login' });
|
||||
});
|
||||
|
||||
it('errors when redirected to /ent/select', async () => {
|
||||
enterpriseSearchAPI.mockReturn({}, { url: 'http://localhost:3002/ent/select' });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('setResponseHeaders', async () => {
|
||||
enterpriseSearchAPI.mockReturn('anything' as any, {
|
||||
headers: { [READ_ONLY_MODE_HEADER]: 'true' },
|
||||
});
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(enterpriseSearchRequestHandler.headers).toEqual({
|
||||
[READ_ONLY_MODE_HEADER]: 'true',
|
||||
});
|
||||
});
|
||||
|
||||
describe('setSessionData', () => {
|
||||
it('sets the value of wsOAuthTokenPackage in a cookie', async () => {
|
||||
const tokenPackage = 'some_encrypted_secrets';
|
||||
|
||||
const mockNow = 'Thu, 04 Mar 2021 22:40:32 GMT';
|
||||
const mockInAnHour = 'Thu, 04 Mar 2021 23:40:32 GMT';
|
||||
jest.spyOn(global.Date, 'now').mockImplementationOnce(() => {
|
||||
return new Date(mockNow).valueOf();
|
||||
});
|
||||
|
||||
const sessionDataBody = {
|
||||
_sessionData: { wsOAuthTokenPackage: tokenPackage },
|
||||
regular: 'data',
|
||||
};
|
||||
|
||||
enterpriseSearchAPI.mockReturn(sessionDataBody, { headers: JSON_HEADER });
|
||||
|
||||
const requestHandler = enterpriseSearchRequestHandler.createRequest({ path: '/' });
|
||||
await makeAPICall(requestHandler);
|
||||
|
||||
expect(enterpriseSearchRequestHandler.headers).toEqual({
|
||||
['set-cookie']: `${ENTERPRISE_SEARCH_KIBANA_COOKIE}=${tokenPackage}; Path=/; Expires=${mockInAnHour}; SameSite=Lax; HttpOnly`,
|
||||
...mockExpectedResponseHeaders,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('isEmptyObj', async () => {
|
||||
expect(enterpriseSearchRequestHandler.isEmptyObj({})).toEqual(true);
|
||||
expect(enterpriseSearchRequestHandler.isEmptyObj({ empty: false })).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
const makeAPICall = (handler: Function, params = {}) => {
|
||||
const request = {
|
||||
headers: { authorization: 'Basic 123' },
|
||||
route: { method: 'GET' },
|
||||
body: {},
|
||||
...params,
|
||||
};
|
||||
return handler(null, request, responseMock);
|
||||
};
|
||||
|
||||
const enterpriseSearchAPI = {
|
||||
shouldHaveBeenCalledWith(expectedUrl: string, expectedParams = {}) {
|
||||
expect(fetchMock).toHaveBeenCalledWith(expectedUrl, {
|
||||
headers: { Authorization: 'Basic 123', ...JSON_HEADER },
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
agent: mockHttpAgent,
|
||||
...expectedParams,
|
||||
});
|
||||
},
|
||||
mockReturn(response?: object, options?: any) {
|
||||
fetchMock.mockImplementation(() => {
|
||||
const headers = Object.assign({}, mockExpectedResponseHeaders, options?.headers);
|
||||
return Promise.resolve(
|
||||
new Response(response ? JSON.stringify(response) : undefined, { ...options, headers })
|
||||
);
|
||||
});
|
||||
},
|
||||
mockReturnError() {
|
||||
fetchMock.mockImplementation(() => {
|
||||
return Promise.reject('Failed');
|
||||
});
|
||||
},
|
||||
};
|
|
@ -1,352 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import fetch, { Response } from 'node-fetch';
|
||||
import querystring from 'querystring';
|
||||
|
||||
import {
|
||||
RequestHandler,
|
||||
RequestHandlerContext,
|
||||
KibanaRequest,
|
||||
KibanaResponseFactory,
|
||||
Logger,
|
||||
} from '@kbn/core/server';
|
||||
|
||||
import { ConfigType } from '..';
|
||||
|
||||
import {
|
||||
ENTERPRISE_SEARCH_KIBANA_COOKIE,
|
||||
JSON_HEADER,
|
||||
ERROR_CONNECTING_HEADER,
|
||||
READ_ONLY_MODE_HEADER,
|
||||
} from '../../common/constants';
|
||||
|
||||
import { entSearchHttpAgent } from './enterprise_search_http_agent';
|
||||
|
||||
interface ConstructorDependencies {
|
||||
config: ConfigType;
|
||||
log: Logger;
|
||||
}
|
||||
interface RequestParams {
|
||||
path: string;
|
||||
params?: object;
|
||||
hasJsonResponse?: boolean;
|
||||
hasValidData?: Function;
|
||||
}
|
||||
interface ErrorResponse {
|
||||
message: string;
|
||||
attributes: {
|
||||
errors: string[];
|
||||
};
|
||||
}
|
||||
export interface IEnterpriseSearchRequestHandler {
|
||||
createRequest(requestParams?: RequestParams): RequestHandler<unknown, unknown, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* This helper lib creates a single standard DRY way of handling
|
||||
* Enterprise Search API requests.
|
||||
*
|
||||
* This handler assumes that it will essentially just proxy the
|
||||
* Enterprise Search API request, so the request body and request
|
||||
* parameters are simply passed through.
|
||||
*/
|
||||
export class EnterpriseSearchRequestHandler {
|
||||
private enterpriseSearchUrl: string;
|
||||
private log: Logger;
|
||||
private headers: Record<string, string> = {};
|
||||
private customHeaders: Record<string, string> = {};
|
||||
|
||||
constructor({ config, log }: ConstructorDependencies) {
|
||||
this.log = log;
|
||||
this.enterpriseSearchUrl = config.host as string;
|
||||
this.customHeaders = config.customHeaders as Record<string, string>;
|
||||
}
|
||||
|
||||
createRequest({
|
||||
path,
|
||||
params = {},
|
||||
hasJsonResponse = true,
|
||||
hasValidData = () => true,
|
||||
}: RequestParams) {
|
||||
return async (
|
||||
_context: RequestHandlerContext,
|
||||
request: KibanaRequest<unknown, unknown, unknown>,
|
||||
response: KibanaResponseFactory
|
||||
) => {
|
||||
try {
|
||||
// Set up API URL
|
||||
const encodedPath = this.encodePathParams(path, request.params as Record<string, string>);
|
||||
const queryParams = { ...(request.query as object), ...params };
|
||||
const queryString = !this.isEmptyObj(queryParams)
|
||||
? `?${querystring.stringify(queryParams)}`
|
||||
: '';
|
||||
const url = encodeURI(this.enterpriseSearchUrl) + encodedPath + queryString;
|
||||
|
||||
// Set up API options
|
||||
const options = {
|
||||
method: request.route.method as string,
|
||||
headers: {
|
||||
Authorization: request.headers.authorization as string,
|
||||
...JSON_HEADER,
|
||||
...this.customHeaders,
|
||||
},
|
||||
body: this.getBodyAsString(request.body as object | Buffer),
|
||||
agent: entSearchHttpAgent.getHttpAgent(),
|
||||
};
|
||||
|
||||
// Call the Enterprise Search API
|
||||
const apiResponse = await fetch(url, options);
|
||||
|
||||
// Handle response headers
|
||||
this.setResponseHeaders(apiResponse);
|
||||
|
||||
// Handle unauthenticated users / authentication redirects
|
||||
if (
|
||||
apiResponse.status === 401 ||
|
||||
apiResponse.url.endsWith('/login') ||
|
||||
apiResponse.url.endsWith('/ent/select')
|
||||
) {
|
||||
return this.handleAuthenticationError(response);
|
||||
}
|
||||
|
||||
// Handle 400-500+ responses from the Enterprise Search server
|
||||
const { status } = apiResponse;
|
||||
if (status >= 500) {
|
||||
if (this.headers[READ_ONLY_MODE_HEADER] === 'true') {
|
||||
// Handle 503 read-only mode errors
|
||||
return this.handleReadOnlyModeError(response);
|
||||
} else {
|
||||
// Handle unexpected server errors
|
||||
return this.handleServerError(response, apiResponse, url);
|
||||
}
|
||||
} else if (status >= 400) {
|
||||
return this.handleClientError(response, apiResponse);
|
||||
}
|
||||
|
||||
// Check returned data
|
||||
let responseBody;
|
||||
|
||||
if (hasJsonResponse) {
|
||||
const json = await apiResponse.json();
|
||||
|
||||
if (!hasValidData(json)) {
|
||||
return this.handleInvalidDataError(response, url, json);
|
||||
}
|
||||
|
||||
// Intercept data that is meant for the server side session
|
||||
const { _sessionData, ...responseJson } = json;
|
||||
if (_sessionData) {
|
||||
this.setSessionData(_sessionData);
|
||||
responseBody = responseJson;
|
||||
} else {
|
||||
responseBody = json;
|
||||
}
|
||||
} else {
|
||||
responseBody = apiResponse.body;
|
||||
}
|
||||
|
||||
// Pass successful responses back to the front-end
|
||||
return response.custom({
|
||||
statusCode: status,
|
||||
headers: this.headers,
|
||||
body: responseBody,
|
||||
});
|
||||
} catch (e) {
|
||||
// Catch connection errors
|
||||
return this.handleConnectionError(response, e);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* There are a number of different expected incoming bodies that we handle & pass on to Enterprise Search for ingestion:
|
||||
* - Standard object data (should be JSON stringified)
|
||||
* - Empty (should be passed as undefined and not as an empty obj)
|
||||
* - Raw buffers (passed on as a string, occurs when using the `skipBodyValidation` lib helper)
|
||||
*/
|
||||
getBodyAsString(body: object | Buffer): string | undefined {
|
||||
if (Buffer.isBuffer(body)) return body.toString();
|
||||
if (this.isEmptyObj(body)) return undefined;
|
||||
return JSON.stringify(body);
|
||||
}
|
||||
|
||||
/**
|
||||
* This path helper is similar to React Router's generatePath, but much simpler &
|
||||
* does not use regexes. It enables us to pass a static '/foo/:bar/baz' string to
|
||||
* createRequest({ path }) and have :bar be automatically replaced by the value of
|
||||
* request.params.bar.
|
||||
* It also (very importantly) wraps all URL request params with encodeURIComponent(),
|
||||
* which is an extra layer of encoding required by the Enterprise Search server in
|
||||
* order to correctly & safely parse user-generated IDs with special characters in
|
||||
* their names - just encodeURI alone won't work.
|
||||
*/
|
||||
encodePathParams(path: string, params: Record<string, string>) {
|
||||
const hasParams = path.includes(':');
|
||||
if (!hasParams) {
|
||||
return path;
|
||||
} else {
|
||||
return path
|
||||
.split('/')
|
||||
.map((pathPart) => {
|
||||
const isParam = pathPart.startsWith(':');
|
||||
if (!isParam) {
|
||||
return pathPart;
|
||||
} else {
|
||||
const pathParam = pathPart.replace(':', '');
|
||||
return encodeURIComponent(params[pathParam]);
|
||||
}
|
||||
})
|
||||
.join('/');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to grab a usable error body from Enterprise Search - this isn't
|
||||
* always possible because some of our internal endpoints send back blank
|
||||
* bodies, and sometimes the server sends back Ruby on Rails error pages
|
||||
*/
|
||||
async getErrorResponseBody(apiResponse: Response) {
|
||||
const { statusText } = apiResponse;
|
||||
const contentType = apiResponse.headers.get('content-type') || '';
|
||||
|
||||
// Default response
|
||||
let body: ErrorResponse = {
|
||||
message: statusText,
|
||||
attributes: { errors: [statusText] },
|
||||
};
|
||||
|
||||
try {
|
||||
if (contentType.includes('application/json')) {
|
||||
// Try parsing body as JSON
|
||||
const json = await apiResponse.json();
|
||||
|
||||
// Some of our internal endpoints return either an `error` or `errors` key,
|
||||
// which can both return either a string or array of strings ¯\_(ツ)_/¯
|
||||
const errors = json.error || json.errors || [statusText];
|
||||
body = {
|
||||
message: errors.toString(),
|
||||
attributes: { errors: Array.isArray(errors) ? errors : [errors] },
|
||||
};
|
||||
} else {
|
||||
// Try parsing body as text/html
|
||||
const text = await apiResponse.text();
|
||||
if (text) {
|
||||
body = {
|
||||
message: text,
|
||||
attributes: { errors: [text] },
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fail silently
|
||||
}
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Error response helpers
|
||||
*/
|
||||
|
||||
async handleClientError(response: KibanaResponseFactory, apiResponse: Response) {
|
||||
const { status } = apiResponse;
|
||||
const body = await this.getErrorResponseBody(apiResponse);
|
||||
|
||||
return response.customError({ statusCode: status, headers: this.headers, body });
|
||||
}
|
||||
|
||||
async handleServerError(response: KibanaResponseFactory, apiResponse: Response, url: string) {
|
||||
const { status } = apiResponse;
|
||||
const { message } = await this.getErrorResponseBody(apiResponse);
|
||||
|
||||
// Don't expose server errors to the front-end, as they may contain sensitive stack traces
|
||||
const errorMessage =
|
||||
'Enterprise Search encountered an internal server error. Please contact your system administrator if the problem persists.';
|
||||
|
||||
this.log.error(`Enterprise Search Server Error ${status} at <${url}>: ${message}`);
|
||||
return response.customError({ statusCode: 502, headers: this.headers, body: errorMessage });
|
||||
}
|
||||
|
||||
handleReadOnlyModeError(response: KibanaResponseFactory) {
|
||||
const errorMessage =
|
||||
'Enterprise Search is in read-only mode. Actions that create, update, or delete information are disabled.';
|
||||
|
||||
this.log.error(`Cannot perform action: ${errorMessage}`);
|
||||
return response.customError({ statusCode: 503, headers: this.headers, body: errorMessage });
|
||||
}
|
||||
|
||||
handleInvalidDataError(response: KibanaResponseFactory, url: string, json: object) {
|
||||
const errorMessage = 'Invalid data received from Enterprise Search';
|
||||
|
||||
this.log.error(`Invalid data received from <${url}>: ${JSON.stringify(json)}`);
|
||||
return response.customError({ statusCode: 502, headers: this.headers, body: errorMessage });
|
||||
}
|
||||
|
||||
handleConnectionError(response: KibanaResponseFactory, e: Error) {
|
||||
const errorMessage = `Error connecting to Enterprise Search: ${e?.message || e.toString()}`;
|
||||
const headers = { ...this.headers, [ERROR_CONNECTING_HEADER]: 'true' };
|
||||
|
||||
this.log.error(errorMessage);
|
||||
if (e instanceof Error) this.log.debug(e.stack as string);
|
||||
|
||||
return response.customError({ statusCode: 502, headers, body: errorMessage });
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: Kibana auto logs users out when it receives a 401 response, so we want to catch and
|
||||
* return 401 responses from Enterprise Search as a 502 so Kibana sessions aren't interrupted
|
||||
*/
|
||||
handleAuthenticationError(response: KibanaResponseFactory) {
|
||||
const errorMessage = 'Cannot authenticate Enterprise Search user';
|
||||
const headers = { ...this.headers, [ERROR_CONNECTING_HEADER]: 'true' };
|
||||
|
||||
this.log.error(errorMessage);
|
||||
return response.customError({ statusCode: 502, headers, body: errorMessage });
|
||||
}
|
||||
|
||||
/**
|
||||
* Set response headers
|
||||
*
|
||||
* Currently just forwards the read-only mode header, but we can expand this
|
||||
* in the future to pass more headers from Enterprise Search as we need them
|
||||
*/
|
||||
|
||||
setResponseHeaders(apiResponse: Response) {
|
||||
const readOnlyMode = apiResponse.headers.get(READ_ONLY_MODE_HEADER);
|
||||
this.headers[READ_ONLY_MODE_HEADER] = readOnlyMode as 'true' | 'false';
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract Session Data
|
||||
*
|
||||
* In the future, this will set the keys passed back from Enterprise Search
|
||||
* into the Kibana login session.
|
||||
* For now we'll explicity look for the Workplace Search OAuth token package
|
||||
* and stuff it into a cookie so it can be picked up later when we proxy the
|
||||
* OAuth callback.
|
||||
*/
|
||||
setSessionData(sessionData: { [key: string]: string }) {
|
||||
if (sessionData.wsOAuthTokenPackage) {
|
||||
const anHourFromNow = new Date(Date.now());
|
||||
anHourFromNow.setHours(anHourFromNow.getHours() + 1);
|
||||
|
||||
const cookiePayload = `${ENTERPRISE_SEARCH_KIBANA_COOKIE}=${sessionData.wsOAuthTokenPackage};`;
|
||||
const cookieRestrictions = `Path=/; Expires=${anHourFromNow.toUTCString()}; SameSite=Lax; HttpOnly`;
|
||||
|
||||
this.headers['set-cookie'] = `${cookiePayload} ${cookieRestrictions}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Misc helpers
|
||||
*/
|
||||
|
||||
isEmptyObj(obj: object) {
|
||||
return Object.keys(obj).length === 0;
|
||||
}
|
||||
}
|
|
@ -10,10 +10,6 @@ import { IScopedClusterClient } from '@kbn/core/server';
|
|||
|
||||
import { fetchConnectorByIndexName } from '@kbn/search-connectors';
|
||||
|
||||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../common/constants';
|
||||
|
||||
import { fetchCrawlerByIndexName } from '../crawler/fetch_crawlers';
|
||||
|
||||
import { fetchIndex } from './fetch_index';
|
||||
|
||||
jest.mock('@kbn/search-connectors', () => ({
|
||||
|
@ -29,10 +25,6 @@ jest.mock('@kbn/search-connectors', () => ({
|
|||
fetchConnectorByIndexName: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../crawler/fetch_crawlers', () => ({
|
||||
fetchCrawlerByIndexName: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('fetchIndex lib function', () => {
|
||||
const mockClient = {
|
||||
asCurrentUser: {
|
||||
|
@ -103,7 +95,6 @@ describe('fetchIndex lib function', () => {
|
|||
index_name: { aliases: [], data: 'full index' },
|
||||
})
|
||||
);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementationOnce(() => Promise.resolve(undefined));
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve(undefined)
|
||||
);
|
||||
|
@ -142,69 +133,6 @@ describe('fetchIndex lib function', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return data and stats for index and crawler if crawler is present', async () => {
|
||||
mockClient.asCurrentUser.indices.get.mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
index_name: { aliases: [], data: 'full index' },
|
||||
})
|
||||
);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
id: '1234',
|
||||
})
|
||||
);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve(undefined)
|
||||
);
|
||||
mockClient.asCurrentUser.indices.stats.mockImplementation(() => Promise.resolve(statsResponse));
|
||||
|
||||
await expect(
|
||||
fetchIndex(mockClient as unknown as IScopedClusterClient, 'index_name')
|
||||
).resolves.toEqual({
|
||||
...result,
|
||||
crawler: {
|
||||
id: '1234',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should return data and stats for index and crawler if a crawler registered as a connector is present', async () => {
|
||||
mockClient.asCurrentUser.count.mockReturnValue({ count: 0 });
|
||||
mockClient.asCurrentUser.search.mockReturnValue({
|
||||
hits: {
|
||||
hits: [{ _source: { status: 'in_progress' } }, { _source: { status: 'completed' } }],
|
||||
},
|
||||
});
|
||||
mockClient.asCurrentUser.indices.get.mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
index_name: { aliases: [], data: 'full index' },
|
||||
})
|
||||
);
|
||||
(fetchCrawlerByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
id: '1234',
|
||||
})
|
||||
);
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
doc: 'doc',
|
||||
service_type: ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE,
|
||||
})
|
||||
);
|
||||
mockClient.asCurrentUser.indices.stats.mockImplementation(() => Promise.resolve(statsResponse));
|
||||
|
||||
await expect(
|
||||
fetchIndex(mockClient as unknown as IScopedClusterClient, 'index_name')
|
||||
).resolves.toEqual({
|
||||
...result,
|
||||
connector: { doc: 'doc', service_type: ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE },
|
||||
count: 0,
|
||||
crawler: { id: '1234' },
|
||||
has_in_progress_syncs: true,
|
||||
has_pending_syncs: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw a 404 error if the index cannot be fonud', async () => {
|
||||
mockClient.asCurrentUser.indices.get.mockImplementation(() => Promise.resolve({}));
|
||||
(fetchConnectorByIndexName as jest.Mock).mockImplementationOnce(() =>
|
||||
|
|
|
@ -19,7 +19,6 @@ import {
|
|||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../common/constants';
|
||||
import { ElasticsearchIndexWithIngestion } from '../../../common/types/indices';
|
||||
import { isIndexNotFoundException } from '../../utils/identify_exceptions';
|
||||
import { fetchCrawlerByIndexName } from '../crawler/fetch_crawlers';
|
||||
|
||||
import { mapIndexStats } from './utils/map_index_stats';
|
||||
|
||||
|
@ -93,10 +92,5 @@ export const fetchIndex = async (
|
|||
};
|
||||
}
|
||||
|
||||
const crawler = await fetchCrawlerByIndexName(client, index);
|
||||
if (crawler) {
|
||||
return { ...indexResult, connector, crawler };
|
||||
}
|
||||
|
||||
return indexResult;
|
||||
};
|
||||
|
|
|
@ -9,7 +9,6 @@ import { IScopedClusterClient } from '@kbn/core/server';
|
|||
import { fetchConnectors } from '@kbn/search-connectors';
|
||||
|
||||
import { isNotNullish } from '../../../common/utils/is_not_nullish';
|
||||
import { fetchCrawlers } from '../crawler/fetch_crawlers';
|
||||
|
||||
import { getUnattachedIndexData } from './utils/get_index_data';
|
||||
|
||||
|
@ -24,12 +23,8 @@ export const fetchUnattachedIndices = async (
|
|||
}> => {
|
||||
const { indexNames } = await getUnattachedIndexData(client, searchQuery);
|
||||
const connectors = await fetchConnectors(client.asCurrentUser, indexNames);
|
||||
const crawlers = await fetchCrawlers(client, indexNames);
|
||||
|
||||
const connectedIndexNames = [
|
||||
...connectors.map((con) => con.index_name).filter(isNotNullish),
|
||||
...crawlers.map((crawler) => crawler.index_name).filter(isNotNullish),
|
||||
];
|
||||
const connectedIndexNames = [...connectors.map((con) => con.index_name).filter(isNotNullish)];
|
||||
|
||||
const indexNameSlice = indexNames
|
||||
.filter((indexName) => !connectedIndexNames.includes(indexName))
|
||||
|
|
|
@ -51,15 +51,11 @@ import {
|
|||
import { WS_TELEMETRY_NAME } from './collectors/workplace_search/telemetry';
|
||||
import { registerEnterpriseSearchIntegrations } from './integrations';
|
||||
|
||||
import { entSearchHttpAgent } from './lib/enterprise_search_http_agent';
|
||||
import { EnterpriseSearchRequestHandler } from './lib/enterprise_search_request_handler';
|
||||
|
||||
import { registerEnterpriseSearchRoutes } from './routes/enterprise_search';
|
||||
import { registerAnalyticsRoutes } from './routes/enterprise_search/analytics';
|
||||
import { registerApiKeysRoutes } from './routes/enterprise_search/api_keys';
|
||||
import { registerConfigDataRoute } from './routes/enterprise_search/config_data';
|
||||
import { registerConnectorRoutes } from './routes/enterprise_search/connectors';
|
||||
import { registerCrawlerRoutes } from './routes/enterprise_search/crawler/crawler';
|
||||
import { registerStatsRoutes } from './routes/enterprise_search/stats';
|
||||
import { registerTelemetryRoute } from './routes/enterprise_search/telemetry';
|
||||
|
||||
|
@ -131,11 +127,6 @@ export class EnterpriseSearchPlugin implements Plugin<void, void, PluginsSetup,
|
|||
);
|
||||
}
|
||||
|
||||
/*
|
||||
* Initialize config.ssl.certificateAuthorities file(s) - required for all API calls (+ access checks)
|
||||
*/
|
||||
entSearchHttpAgent.initializeHttpAgent(config);
|
||||
|
||||
/**
|
||||
* Register space/feature control
|
||||
*/
|
||||
|
@ -238,10 +229,8 @@ export class EnterpriseSearchPlugin implements Plugin<void, void, PluginsSetup,
|
|||
* Register routes
|
||||
*/
|
||||
const router = http.createRouter();
|
||||
const enterpriseSearchRequestHandler = new EnterpriseSearchRequestHandler({ config, log });
|
||||
const dependencies: RouteDependencies = {
|
||||
config,
|
||||
enterpriseSearchRequestHandler,
|
||||
getStartServices,
|
||||
globalConfigService: this.globalConfigService,
|
||||
licensing,
|
||||
|
@ -254,7 +243,6 @@ export class EnterpriseSearchPlugin implements Plugin<void, void, PluginsSetup,
|
|||
registerEnterpriseSearchRoutes(dependencies);
|
||||
// Enterprise Search Routes
|
||||
if (config.hasConnectors) registerConnectorRoutes(dependencies);
|
||||
if (config.hasWebCrawler) registerCrawlerRoutes(dependencies);
|
||||
registerStatsRoutes(dependencies);
|
||||
|
||||
// Analytics Routes (stand-alone product)
|
||||
|
@ -314,6 +302,7 @@ export class EnterpriseSearchPlugin implements Plugin<void, void, PluginsSetup,
|
|||
* Register a config for the search guide
|
||||
*/
|
||||
if (config.hasWebCrawler) {
|
||||
// TODO: Do we remove this guide with the removal of native crawler?
|
||||
guidedOnboarding?.registerGuideConfig(websiteSearchGuideId, websiteSearchGuideConfig);
|
||||
}
|
||||
if (config.hasConnectors) {
|
||||
|
|
|
@ -1,715 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { MockRouter, mockDependencies, mockRequestHandler } from '../../../__mocks__';
|
||||
|
||||
import { registerCrawlerRoutes } from './crawler';
|
||||
|
||||
describe('crawler routes', () => {
|
||||
describe('POST /internal/enterprise_search/crawler', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/crawler',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name and language', () => {
|
||||
const request = { body: { index_name: 'index-name', language: 'en' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly when language is null', () => {
|
||||
const request = { body: { index_name: 'index-name', language: null } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { body: { language: 'en' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without language', () => {
|
||||
const request = { body: { index_name: 'index-name' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/:crawlRequestId',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name and id', () => {
|
||||
const request = { params: { crawlRequestId: '12345', indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: { crawlRequestId: '12345' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without id', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with domain urls', () => {
|
||||
const request = {
|
||||
body: { overrides: { domain_allowlist: ['https://www.elastic.co'] } },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with max crawl depth', () => {
|
||||
const request = {
|
||||
body: { overrides: { max_crawl_depth: 10 } },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with seed urls', () => {
|
||||
const request = {
|
||||
body: { overrides: { seed_urls: ['https://www.elastic.co/guide'] } },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with sitemap urls', () => {
|
||||
const request = {
|
||||
body: { overrides: { sitemap_urls: ['https://www.elastic.co/sitemap1.xml'] } },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly when we set sitemap discovery', () => {
|
||||
const request = {
|
||||
body: { overrides: { sitemap_discovery_disabled: true } },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with empty overrides', () => {
|
||||
const request = { body: { overrides: {} }, params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domains', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly', () => {
|
||||
const request = {
|
||||
params: { indexName: 'index-name' },
|
||||
query: {
|
||||
'page[current]': 5,
|
||||
'page[size]': 10,
|
||||
},
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without required params', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/active/cancel',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /internal/enterprise_search/indices/{indexName}/crawler/domains', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with params and body', () => {
|
||||
const request = {
|
||||
body: { entry_points: [{ value: '/guide' }], name: 'https://elastic.co/guide' },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a name param', () => {
|
||||
const request = {
|
||||
body: { entry_points: [{ value: '/guide' }], name: 'https://elastic.co/guide' },
|
||||
params: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without a body', () => {
|
||||
const request = {
|
||||
body: {},
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'delete',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name and id', () => {
|
||||
const request = { params: { domainId: '1234', indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: { domainId: '1234' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without id', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'put',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with crawl rules', () => {
|
||||
const request = {
|
||||
body: {
|
||||
crawl_rules: [
|
||||
{
|
||||
id: '5678',
|
||||
order: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
params: { domainId: '1234', indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with deduplication enabled', () => {
|
||||
const request = {
|
||||
body: {
|
||||
deduplication_enabled: true,
|
||||
},
|
||||
params: { domainId: '1234', indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with deduplication fields', () => {
|
||||
const request = {
|
||||
body: {
|
||||
deduplication_fields: ['title', 'description'],
|
||||
},
|
||||
params: { domainId: '1234', indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name and id', () => {
|
||||
const request = { params: { domainId: '1234', indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: { domainId: '1234' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without id', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /internal/enterprise_search/crawler/validate_url', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/crawler/validate_url',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/crawler2/validate_url',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with body', () => {
|
||||
const request = {
|
||||
body: { checks: ['tcp', 'url_request'], url: 'elastic.co' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a body', () => {
|
||||
const request = {
|
||||
body: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /internal/enterprise_search/indices/{indexName}/crawler/process_crawls', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'post',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/process_crawls',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/process_crawls',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly', () => {
|
||||
const request = {
|
||||
body: { domains: ['https://elastic.co', 'https://swiftype.com'] },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly without body', () => {
|
||||
const request = {
|
||||
body: {},
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a name param', () => {
|
||||
const request = {
|
||||
body: { domains: ['https://elastic.co', 'https://swiftype.com'] },
|
||||
params: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly', () => {
|
||||
const request = {
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a name param', () => {
|
||||
const request = {
|
||||
params: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'put',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly', () => {
|
||||
const request = {
|
||||
body: { frequency: 7, unit: 'day', use_connector_schedule: true },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a name param', () => {
|
||||
const request = {
|
||||
body: { frequency: 7, unit: 'day', use_connector_schedule: true },
|
||||
params: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without a unit property in body', () => {
|
||||
const request = {
|
||||
body: { frequency: 7, use_connector_schedule: true },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without a frequency property in body', () => {
|
||||
const request = {
|
||||
body: { unit: 'day', use_connector_schedule: true },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
|
||||
it('fails validation without a use_connector_schedule property in body', () => {
|
||||
const request = {
|
||||
body: { frequency: 7, unit: 'day' },
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'delete',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly', () => {
|
||||
const request = {
|
||||
params: { indexName: 'index-name' },
|
||||
};
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without a name param', () => {
|
||||
const request = {
|
||||
params: {},
|
||||
};
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domain_configs', () => {
|
||||
let mockRouter: MockRouter;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockRouter = new MockRouter({
|
||||
method: 'get',
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domain_configs',
|
||||
});
|
||||
|
||||
registerCrawlerRoutes({
|
||||
...mockDependencies,
|
||||
router: mockRouter.router,
|
||||
});
|
||||
});
|
||||
|
||||
it('creates a request to enterprise search', () => {
|
||||
expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domain_configs',
|
||||
});
|
||||
});
|
||||
|
||||
it('validates correctly with name', () => {
|
||||
const request = { params: { indexName: 'index-name' } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with page[current]', () => {
|
||||
const request = { params: { indexName: 'index-name' }, query: { 'page[current]': 4 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('validates correctly with page[size]', () => {
|
||||
const request = { params: { indexName: 'index-name' }, query: { 'page[size]': 100 } };
|
||||
mockRouter.shouldValidate(request);
|
||||
});
|
||||
|
||||
it('fails validation without name', () => {
|
||||
const request = { params: {} };
|
||||
mockRouter.shouldThrow(request);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,484 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { deleteConnectorById, fetchConnectorByIndexName } from '@kbn/search-connectors';
|
||||
|
||||
import { ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE } from '../../../../common/constants';
|
||||
|
||||
import { ErrorCode } from '../../../../common/types/error_codes';
|
||||
import { addConnector } from '../../../lib/connectors/add_connector';
|
||||
import { fetchCrawlerByIndexName } from '../../../lib/crawler/fetch_crawlers';
|
||||
import { recreateConnectorDocument } from '../../../lib/crawler/post_connector';
|
||||
import { updateHtmlExtraction } from '../../../lib/crawler/put_html_extraction';
|
||||
import { deleteIndex } from '../../../lib/indices/delete_index';
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
import { createError } from '../../../utils/create_error';
|
||||
import { elasticsearchErrorHandler } from '../../../utils/elasticsearch_error_handler';
|
||||
|
||||
import { registerCrawlerCrawlRulesRoutes } from './crawler_crawl_rules';
|
||||
import { registerCrawlerEntryPointRoutes } from './crawler_entry_points';
|
||||
import { registerCrawlerMultipleSchedulesRoutes } from './crawler_multiple_schedules';
|
||||
import { registerCrawlerSitemapRoutes } from './crawler_sitemaps';
|
||||
|
||||
export function registerCrawlerRoutes(routeDependencies: RouteDependencies) {
|
||||
const { router, enterpriseSearchRequestHandler, log } = routeDependencies;
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/crawler',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
index_name: schema.string(),
|
||||
language: schema.oneOf([schema.string(), schema.literal(null)]),
|
||||
}),
|
||||
},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const connParams = {
|
||||
deleteExistingConnector: true,
|
||||
indexName: request.body.index_name,
|
||||
isNative: true,
|
||||
language: request.body.language,
|
||||
name: null,
|
||||
serviceType: ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE,
|
||||
};
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
|
||||
const indexExists = await client.asCurrentUser.indices.exists({
|
||||
index: request.body.index_name,
|
||||
});
|
||||
if (indexExists) {
|
||||
return createError({
|
||||
errorCode: ErrorCode.INDEX_ALREADY_EXISTS,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.addCrawler.indexExistsError',
|
||||
{
|
||||
defaultMessage: 'This index already exists',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 409,
|
||||
});
|
||||
}
|
||||
|
||||
const crawler = await fetchCrawlerByIndexName(client, request.body.index_name);
|
||||
if (crawler) {
|
||||
return createError({
|
||||
errorCode: ErrorCode.CRAWLER_ALREADY_EXISTS,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.addCrawler.crawlerExistsError',
|
||||
{
|
||||
defaultMessage: 'A crawler for this index already exists',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 409,
|
||||
});
|
||||
}
|
||||
|
||||
const connector = await fetchConnectorByIndexName(
|
||||
client.asCurrentUser,
|
||||
request.body.index_name
|
||||
);
|
||||
if (connector) {
|
||||
return createError({
|
||||
errorCode: ErrorCode.CONNECTOR_DOCUMENT_ALREADY_EXISTS,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.addCrawler.connectorExistsError',
|
||||
{
|
||||
defaultMessage: 'A connector for this index already exists',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 409,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
await addConnector(client, connParams);
|
||||
const res = await enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices',
|
||||
})(context, request, response);
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(res.payload.message);
|
||||
}
|
||||
return res;
|
||||
} catch (error) {
|
||||
// clean up connector index if it was created
|
||||
const createdConnector = await fetchConnectorByIndexName(
|
||||
client.asCurrentUser,
|
||||
request.body.index_name
|
||||
);
|
||||
if (createdConnector) {
|
||||
await deleteConnectorById(client.asCurrentUser, createdConnector.id);
|
||||
if (createdConnector.index_name) {
|
||||
await deleteIndex(client, createdConnector.index_name);
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/crawler/validate_url',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
checks: schema.arrayOf(schema.string()),
|
||||
url: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/crawler2/validate_url',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
overrides: schema.maybe(
|
||||
schema.object({
|
||||
domain_allowlist: schema.maybe(schema.arrayOf(schema.string())),
|
||||
max_crawl_depth: schema.maybe(schema.number()),
|
||||
seed_urls: schema.maybe(schema.arrayOf(schema.string())),
|
||||
sitemap_discovery_disabled: schema.maybe(schema.boolean()),
|
||||
sitemap_urls: schema.maybe(schema.arrayOf(schema.string())),
|
||||
})
|
||||
),
|
||||
}),
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/active/cancel',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
crawlRequestId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/:crawlRequestId',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
query: schema.object({
|
||||
'page[current]': schema.number(),
|
||||
'page[size]': schema.number(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
entry_points: schema.arrayOf(
|
||||
schema.object({
|
||||
value: schema.string(),
|
||||
})
|
||||
),
|
||||
name: schema.string(),
|
||||
}),
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
auth: schema.maybe(
|
||||
schema.nullable(
|
||||
schema.object({
|
||||
header: schema.maybe(schema.string()),
|
||||
password: schema.maybe(schema.string()),
|
||||
type: schema.string(),
|
||||
username: schema.maybe(schema.string()),
|
||||
})
|
||||
)
|
||||
),
|
||||
crawl_rules: schema.maybe(
|
||||
schema.arrayOf(
|
||||
schema.object({
|
||||
id: schema.string(),
|
||||
order: schema.number(),
|
||||
})
|
||||
)
|
||||
),
|
||||
deduplication_enabled: schema.maybe(schema.boolean()),
|
||||
deduplication_fields: schema.maybe(schema.arrayOf(schema.string())),
|
||||
}),
|
||||
params: schema.object({
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domain_configs',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
query: schema.object({
|
||||
'page[current]': schema.maybe(schema.number()),
|
||||
'page[size]': schema.maybe(schema.number()),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domain_configs',
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/process_crawls',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
domains: schema.maybe(schema.arrayOf(schema.string())),
|
||||
}),
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/process_crawls',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
frequency: schema.number(),
|
||||
unit: schema.string(),
|
||||
use_connector_schedule: schema.boolean(),
|
||||
}),
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/html_extraction',
|
||||
validate: {
|
||||
body: schema.object({
|
||||
extract_full_html: schema.boolean(),
|
||||
}),
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
|
||||
const connector = await fetchConnectorByIndexName(
|
||||
client.asCurrentUser,
|
||||
request.params.indexName
|
||||
);
|
||||
if (
|
||||
connector &&
|
||||
connector.service_type === ENTERPRISE_SEARCH_CONNECTOR_CRAWLER_SERVICE_TYPE
|
||||
) {
|
||||
await updateHtmlExtraction(client, request.body.extract_full_html, connector);
|
||||
return response.ok();
|
||||
} else {
|
||||
return createError({
|
||||
errorCode: ErrorCode.RESOURCE_NOT_FOUND,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.updateHtmlExtraction.noCrawlerFound',
|
||||
{
|
||||
defaultMessage: 'Could not find a crawler for this index',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/connector',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
const connector = await fetchConnectorByIndexName(
|
||||
client.asCurrentUser,
|
||||
request.params.indexName
|
||||
);
|
||||
if (connector) {
|
||||
return createError({
|
||||
errorCode: ErrorCode.CONNECTOR_DOCUMENT_ALREADY_EXISTS,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.recreateConnector.connectorExistsError',
|
||||
{
|
||||
defaultMessage: 'A connector for this index already exists',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 409,
|
||||
});
|
||||
}
|
||||
|
||||
const connectorId = await recreateConnectorDocument(client, request.params.indexName);
|
||||
return response.ok({ body: { connector_id: connectorId } });
|
||||
})
|
||||
);
|
||||
|
||||
registerCrawlerCrawlRulesRoutes(routeDependencies);
|
||||
registerCrawlerEntryPointRoutes(routeDependencies);
|
||||
registerCrawlerSitemapRoutes(routeDependencies);
|
||||
registerCrawlerMultipleSchedulesRoutes(routeDependencies);
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
|
||||
export function registerCrawlerCrawlRulesRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
}: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
pattern: schema.string(),
|
||||
policy: schema.string(),
|
||||
rule: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules/{crawlRuleId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
crawlRuleId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
order: schema.number(),
|
||||
pattern: schema.string(),
|
||||
policy: schema.string(),
|
||||
rule: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules/:crawlRuleId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules/{crawlRuleId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
crawlRuleId: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules/:crawlRuleId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
|
||||
export function registerCrawlerEntryPointRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
}: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
value: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points/{entryPointId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
entryPointId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
value: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points/:entryPointId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points/{entryPointId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
entryPointId: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points/:entryPointId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
|
||||
const extractionRuleSchema = schema.object({
|
||||
extraction_rule: schema.object({
|
||||
description: schema.string(),
|
||||
rules: schema.arrayOf(
|
||||
schema.object({
|
||||
content_from: schema.object({
|
||||
value: schema.nullable(schema.string()),
|
||||
value_type: schema.string(),
|
||||
}),
|
||||
field_name: schema.string(),
|
||||
multiple_objects_handling: schema.string(),
|
||||
selector: schema.string(),
|
||||
source_type: schema.string(),
|
||||
})
|
||||
),
|
||||
url_filters: schema.arrayOf(
|
||||
schema.object({ filter: schema.string(), pattern: schema.string() })
|
||||
),
|
||||
}),
|
||||
});
|
||||
|
||||
export function registerCrawlerExtractionRulesRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
}: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/extraction_rules',
|
||||
validate: {
|
||||
body: extractionRuleSchema,
|
||||
params: schema.object({
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/extraction_rules',
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/extraction_rules/{crawlRuleId}',
|
||||
validate: {
|
||||
body: extractionRuleSchema,
|
||||
params: schema.object({
|
||||
crawlRuleId: schema.string(),
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/extraction_rules/:crawlRuleId',
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/extraction_rules/{crawlRuleId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
crawlRuleId: schema.string(),
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/extraction_rules/:crawlRuleId',
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/extraction_rules/{crawlRuleId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
crawlRuleId: schema.string(),
|
||||
domainId: schema.string(),
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/extraction_rules/:crawlRuleId',
|
||||
})
|
||||
);
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { ErrorCode } from '../../../../common/types/error_codes';
|
||||
|
||||
import { fetchCrawlerCustomSchedulingByIndexName } from '../../../lib/crawler/fetch_crawler_multiple_schedules';
|
||||
import { postCrawlerCustomScheduling } from '../../../lib/crawler/post_crawler_multiple_schedules';
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
import { createError } from '../../../utils/create_error';
|
||||
import { elasticsearchErrorHandler } from '../../../utils/elasticsearch_error_handler';
|
||||
|
||||
export function registerCrawlerMultipleSchedulesRoutes({ router, log }: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/custom_scheduling',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
body: schema.mapOf(
|
||||
schema.string(),
|
||||
schema.object({
|
||||
name: schema.string(),
|
||||
interval: schema.string(),
|
||||
enabled: schema.boolean(),
|
||||
configurationOverrides: schema.object({
|
||||
maxCrawlDepth: schema.maybe(schema.number()),
|
||||
sitemapDiscoveryDisabled: schema.maybe(schema.boolean()),
|
||||
domainAllowlist: schema.maybe(schema.arrayOf(schema.string())),
|
||||
sitemapUrls: schema.maybe(schema.arrayOf(schema.string())),
|
||||
seedUrls: schema.maybe(schema.arrayOf(schema.string())),
|
||||
}),
|
||||
})
|
||||
),
|
||||
},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
const { params, body } = request;
|
||||
await postCrawlerCustomScheduling(client, params.indexName, body);
|
||||
return response.ok();
|
||||
})
|
||||
);
|
||||
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/custom_scheduling',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const { client } = (await context.core).elasticsearch;
|
||||
try {
|
||||
const { params } = request;
|
||||
const customScheduling = await fetchCrawlerCustomSchedulingByIndexName(
|
||||
client,
|
||||
params.indexName
|
||||
);
|
||||
return response.ok({
|
||||
body: customScheduling,
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
} catch (error) {
|
||||
if ((error as Error).message === ErrorCode.DOCUMENT_NOT_FOUND) {
|
||||
return createError({
|
||||
errorCode: (error as Error).message as ErrorCode,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.fetchCrawlerMultipleSchedules.documentNotFoundError',
|
||||
{
|
||||
defaultMessage: 'Crawler data could not be found.',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 404,
|
||||
});
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
|
||||
import type { RouteDependencies } from '../../../types';
|
||||
|
||||
export function registerCrawlerSitemapRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
}: RouteDependencies) {
|
||||
router.post(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
url: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.put(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
sitemapId: schema.string(),
|
||||
}),
|
||||
body: schema.object({
|
||||
url: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
router.delete(
|
||||
{
|
||||
path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
|
||||
validate: {
|
||||
params: schema.object({
|
||||
indexName: schema.string(),
|
||||
domainId: schema.string(),
|
||||
sitemapId: schema.string(),
|
||||
}),
|
||||
},
|
||||
},
|
||||
enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps/:sitemapId',
|
||||
params: {
|
||||
respond_with: 'index',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
|
@ -35,8 +35,6 @@ import type {
|
|||
MlInferenceHistoryResponse,
|
||||
} from '../../../common/types/pipelines';
|
||||
|
||||
import { fetchCrawlerByIndexName, fetchCrawlers } from '../../lib/crawler/fetch_crawlers';
|
||||
|
||||
import { createIndex } from '../../lib/indices/create_index';
|
||||
import { deleteAccessControlIndex } from '../../lib/indices/delete_access_control_index';
|
||||
import { indexOrAliasExists } from '../../lib/indices/exists_index';
|
||||
|
@ -72,12 +70,7 @@ import {
|
|||
} from '../../utils/identify_exceptions';
|
||||
import { getPrefixedInferencePipelineProcessorName } from '../../utils/ml_inference_pipeline_utils';
|
||||
|
||||
export function registerIndexRoutes({
|
||||
router,
|
||||
enterpriseSearchRequestHandler,
|
||||
log,
|
||||
ml,
|
||||
}: RouteDependencies) {
|
||||
export function registerIndexRoutes({ router, log, ml }: RouteDependencies) {
|
||||
router.get(
|
||||
{ path: '/internal/enterprise_search/search_indices', validate: false },
|
||||
elasticsearchErrorHandler(log, async (context, _, response) => {
|
||||
|
@ -127,11 +120,9 @@ export function registerIndexRoutes({
|
|||
size
|
||||
);
|
||||
const connectors = await fetchConnectors(client.asCurrentUser, indexNames);
|
||||
const crawlers = await fetchCrawlers(client, indexNames);
|
||||
const enrichedIndices = indices.map((index) => ({
|
||||
...index,
|
||||
connector: connectors.find((connector) => connector.index_name === index.name),
|
||||
crawler: crawlers.find((crawler) => crawler.index_name === index.name),
|
||||
}));
|
||||
|
||||
return response.ok({
|
||||
|
@ -198,19 +189,8 @@ export function registerIndexRoutes({
|
|||
const { client } = (await context.core).elasticsearch;
|
||||
|
||||
try {
|
||||
const crawler = await fetchCrawlerByIndexName(client, indexName);
|
||||
const connector = await fetchConnectorByIndexName(client.asCurrentUser, indexName);
|
||||
|
||||
if (crawler) {
|
||||
const crawlerRes = await enterpriseSearchRequestHandler.createRequest({
|
||||
path: `/api/ent/v1/internal/indices/${indexName}`,
|
||||
})(context, request, response);
|
||||
|
||||
if (crawlerRes.status !== 200) {
|
||||
throw new Error(crawlerRes.payload.message);
|
||||
}
|
||||
}
|
||||
|
||||
if (connector) {
|
||||
if (connector.service_type === CRAWLER_SERVICE_TYPE) {
|
||||
await deleteConnectorById(client.asCurrentUser, connector.id);
|
||||
|
@ -588,22 +568,6 @@ export function registerIndexRoutes({
|
|||
});
|
||||
}
|
||||
|
||||
const crawler = await fetchCrawlerByIndexName(client, request.body.index_name);
|
||||
|
||||
if (crawler) {
|
||||
return createError({
|
||||
errorCode: ErrorCode.CRAWLER_ALREADY_EXISTS,
|
||||
message: i18n.translate(
|
||||
'xpack.enterpriseSearch.server.routes.createApiIndex.crawlerExistsError',
|
||||
{
|
||||
defaultMessage: 'A crawler for this index already exists',
|
||||
}
|
||||
),
|
||||
response,
|
||||
statusCode: 409,
|
||||
});
|
||||
}
|
||||
|
||||
const connector = await fetchConnectorByIndexName(
|
||||
client.asCurrentUser,
|
||||
request.body.index_name
|
||||
|
|
|
@ -11,11 +11,7 @@ import { fetchSyncJobsStats } from '../../lib/stats/get_sync_jobs';
|
|||
import type { RouteDependencies } from '../../types';
|
||||
import { elasticsearchErrorHandler } from '../../utils/elasticsearch_error_handler';
|
||||
|
||||
export function registerStatsRoutes({
|
||||
enterpriseSearchRequestHandler,
|
||||
log,
|
||||
router,
|
||||
}: RouteDependencies) {
|
||||
export function registerStatsRoutes({ log, router }: RouteDependencies) {
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/stats/sync_jobs',
|
||||
|
@ -32,21 +28,4 @@ export function registerStatsRoutes({
|
|||
return response.ok({ body });
|
||||
})
|
||||
);
|
||||
router.get(
|
||||
{
|
||||
path: '/internal/enterprise_search/stats/cloud_health',
|
||||
validate: {},
|
||||
},
|
||||
elasticsearchErrorHandler(log, async (context, request, response) => {
|
||||
const MIN_MEMORY = 1289748481;
|
||||
const entSearchResponse = await enterpriseSearchRequestHandler.createRequest({
|
||||
path: '/api/ent/v1/internal/health',
|
||||
})(context, request, response);
|
||||
const hasMinConnectorMemory =
|
||||
entSearchResponse.payload?.jvm?.memory_usage?.heap_max > MIN_MEMORY;
|
||||
return response.ok({
|
||||
body: { has_min_connector_memory: hasMinConnectorMemory },
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import type { SecurityPluginSetup } from '@kbn/security-plugin/server';
|
|||
import type { SpacesPluginStart } from '@kbn/spaces-plugin/server';
|
||||
import type { UsageCollectionSetup } from '@kbn/usage-collection-plugin/server';
|
||||
|
||||
import type { IEnterpriseSearchRequestHandler } from './lib/enterprise_search_request_handler';
|
||||
import type { GlobalConfigService } from './services/global_config_service';
|
||||
|
||||
import type { ConfigType } from '.';
|
||||
|
@ -55,7 +54,6 @@ export interface PluginsStart {
|
|||
|
||||
export interface RouteDependencies {
|
||||
config: ConfigType;
|
||||
enterpriseSearchRequestHandler: IEnterpriseSearchRequestHandler;
|
||||
getSavedObjectsService?(): SavedObjectsServiceStart;
|
||||
getStartServices: StartServicesAccessor<PluginsStart, unknown>;
|
||||
globalConfigService: GlobalConfigService;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue