mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
Update usage collectors for connectors to collect full telemetry data (#176366)
## Summary This PR updates the usage collectors for plugins `enterprise_search` and `serverless_search` to collect full telemetry data, via [List connectors API](https://www.elastic.co/guide/en/elasticsearch/reference/current/list-connector-api.html) and [List connector sync jobs API](https://www.elastic.co/guide/en/elasticsearch/reference/current/list-connector-sync-jobs-api.html). ### Checklist - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) --------- Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
70f6349317
commit
68a2bd7c3c
24 changed files with 3713 additions and 245 deletions
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ConnectorSyncJob, Paginate } from '../types';
|
||||
import { fetchConnectors, fetchSyncJobs } from '..';
|
||||
import { collectConnectorStats } from './collect_connector_stats';
|
||||
import {
|
||||
expectedDeletedConnectorStats,
|
||||
expectedMysqlConnectorStats,
|
||||
expectedSpoConnectorStats,
|
||||
mysqlConnector,
|
||||
mysqlFullSyncJob,
|
||||
orphanedSyncJob,
|
||||
spoAccessControlSyncJob,
|
||||
spoConnector,
|
||||
spoFullSyncJob,
|
||||
spoIncrementalSyncJob,
|
||||
} from './collect_connector_stats_test_data';
|
||||
|
||||
jest.mock('.', () => ({
|
||||
fetchConnectors: jest.fn(),
|
||||
fetchSyncJobs: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('collect connector stats', () => {
|
||||
const mockClient = {
|
||||
indices: {
|
||||
stats: jest.fn(),
|
||||
},
|
||||
search: jest.fn(),
|
||||
};
|
||||
const mockSyncJobsResponse: Paginate<ConnectorSyncJob> = {
|
||||
_meta: {
|
||||
page: {
|
||||
from: 0,
|
||||
size: 5,
|
||||
total: 5,
|
||||
has_more_hits_than_total: false,
|
||||
},
|
||||
},
|
||||
data: [
|
||||
spoFullSyncJob,
|
||||
spoIncrementalSyncJob,
|
||||
spoAccessControlSyncJob,
|
||||
mysqlFullSyncJob,
|
||||
orphanedSyncJob,
|
||||
],
|
||||
};
|
||||
it('should collect connector stats', async () => {
|
||||
(fetchConnectors as jest.Mock).mockImplementation(() => [spoConnector, mysqlConnector]);
|
||||
(fetchSyncJobs as jest.Mock).mockImplementation(() => mockSyncJobsResponse);
|
||||
mockClient.indices.stats.mockImplementation((params: { index: any }) =>
|
||||
Promise.resolve({
|
||||
_all: {
|
||||
primaries: {
|
||||
docs: {
|
||||
count: params.index === spoConnector.index_name ? 1000 : 2000,
|
||||
},
|
||||
store: {
|
||||
size_in_bytes: params.index === spoConnector.index_name ? 10000 : 20000,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
mockClient.search.mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
aggregations: {
|
||||
table_count: {
|
||||
value: 7,
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const collectedConnectorStats = await collectConnectorStats(mockClient as any);
|
||||
|
||||
expect(collectedConnectorStats.sort((a, b) => (a.id > b.id ? 1 : -1))).toEqual([
|
||||
expectedSpoConnectorStats,
|
||||
expectedMysqlConnectorStats,
|
||||
expectedDeletedConnectorStats,
|
||||
]);
|
||||
});
|
||||
});
|
403
packages/kbn-search-connectors/lib/collect_connector_stats.ts
Normal file
403
packages/kbn-search-connectors/lib/collect_connector_stats.ts
Normal file
|
@ -0,0 +1,403 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
|
||||
import { AggregationsCardinalityAggregate } from '@elastic/elasticsearch/lib/api/types';
|
||||
import {
|
||||
Connector,
|
||||
ConnectorConfigProperties,
|
||||
ConnectorStats,
|
||||
ConnectorSyncJob,
|
||||
CRAWLER_SERVICE_TYPE,
|
||||
DataSourceSpecificStats,
|
||||
DocumentsStats,
|
||||
fetchConnectors,
|
||||
fetchSyncJobs,
|
||||
SyncJobStats,
|
||||
SyncJobStatsByState,
|
||||
SyncJobStatsByType,
|
||||
SyncJobStatsDetails,
|
||||
SyncJobType,
|
||||
SyncStatus,
|
||||
TriggerMethod,
|
||||
} from '..';
|
||||
|
||||
export const collectConnectorStats = async (
|
||||
client: ElasticsearchClient
|
||||
): Promise<ConnectorStats[]> => {
|
||||
const connectors = await fetchConnectors(client);
|
||||
const syncJobs: ConnectorSyncJob[] = [];
|
||||
|
||||
let hasMore: boolean | undefined;
|
||||
let from = 0;
|
||||
do {
|
||||
const result = await fetchSyncJobs(client, undefined, from);
|
||||
syncJobs.push(...result.data);
|
||||
hasMore = result._meta.page.has_more_hits_than_total;
|
||||
from += result._meta.page.size;
|
||||
} while (hasMore);
|
||||
|
||||
const connectorStatsArray: ConnectorStats[] = [];
|
||||
const syncJobsMap = groupSyncJobsByConnector(syncJobs);
|
||||
for (const connector of connectors) {
|
||||
// skip crawlers
|
||||
if (connector.service_type === CRAWLER_SERVICE_TYPE) {
|
||||
continue;
|
||||
}
|
||||
const connectorStats: ConnectorStats = {
|
||||
id: connector.id,
|
||||
serviceType: connector.service_type,
|
||||
isNative: connector.is_native,
|
||||
isDeleted: false,
|
||||
status: connector.status,
|
||||
indexName: connector.index_name,
|
||||
dlsEnabled: !!connector.configuration.use_document_level_security?.value,
|
||||
sslEnabled: connector.configuration.ssl_enabled
|
||||
? !!(connector.configuration.ssl_enabled as ConnectorConfigProperties).value
|
||||
: false,
|
||||
fetchSelectively: fetchSelectively(connector),
|
||||
textExtractionServiceEnabled: !!connector.configuration.use_text_extraction_service?.value,
|
||||
documents: await documentsStats(client, connector),
|
||||
dataSourceSpecific: await dataSourceSpecificStats(client, connector),
|
||||
scheduling: {
|
||||
accessControl: connector.scheduling.access_control,
|
||||
full: connector.scheduling.full,
|
||||
incremental: connector.scheduling.incremental,
|
||||
},
|
||||
};
|
||||
|
||||
if (connector.pipeline) {
|
||||
connectorStats.ingestPipeline = {
|
||||
name: connector.pipeline.name,
|
||||
extractBinaryContent: connector.pipeline.extract_binary_content,
|
||||
reduceWhitespace: connector.pipeline.reduce_whitespace,
|
||||
runMLInference: connector.pipeline.run_ml_inference,
|
||||
};
|
||||
}
|
||||
|
||||
if (connector.filtering.length > 0) {
|
||||
const filtering = connector.filtering[0];
|
||||
connectorStats.syncRules = {
|
||||
active: {
|
||||
withBasicRules:
|
||||
Array.isArray(filtering?.active?.rules) && filtering.active.rules.length > 1,
|
||||
withAdvancedRules:
|
||||
!!filtering?.active?.advanced_snippet?.value &&
|
||||
Object.keys(filtering.active.advanced_snippet.value).length > 0,
|
||||
},
|
||||
draft: {
|
||||
withBasicRules:
|
||||
Array.isArray(filtering?.draft?.rules) && filtering.draft.rules.length > 1,
|
||||
withAdvancedRules:
|
||||
!!filtering?.draft?.advanced_snippet?.value &&
|
||||
Object.keys(filtering.draft.advanced_snippet.value).length > 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (syncJobsMap.has(connector.id)) {
|
||||
// @ts-ignore
|
||||
connectorStats.syncJobs = syncJobsStats(syncJobsMap.get(connector.id));
|
||||
syncJobsMap.delete(connector.id);
|
||||
}
|
||||
connectorStatsArray.push(connectorStats);
|
||||
}
|
||||
|
||||
// process orphaned sync jobs
|
||||
for (const [connectorId, orphanedSyncJobs] of syncJobsMap) {
|
||||
const connectorStats: ConnectorStats = {
|
||||
id: connectorId,
|
||||
isDeleted: true,
|
||||
syncJobs: syncJobsStats(orphanedSyncJobs),
|
||||
};
|
||||
connectorStatsArray.push(connectorStats);
|
||||
}
|
||||
|
||||
return connectorStatsArray;
|
||||
};
|
||||
|
||||
function groupSyncJobsByConnector(syncJobs: ConnectorSyncJob[]): Map<string, ConnectorSyncJob[]> {
|
||||
const syncJobMaps: Map<string, ConnectorSyncJob[]> = new Map<string, ConnectorSyncJob[]>();
|
||||
for (const syncJob of syncJobs) {
|
||||
// filter out sync jobs for crawler
|
||||
if (syncJob.connector.service_type === CRAWLER_SERVICE_TYPE) {
|
||||
continue;
|
||||
}
|
||||
const connectorId = syncJob.connector.id ? syncJob.connector.id : 'undefined';
|
||||
if (!syncJobMaps.has(connectorId)) {
|
||||
syncJobMaps.set(connectorId, []);
|
||||
}
|
||||
// @ts-ignore
|
||||
syncJobMaps.get(connectorId).push(syncJob);
|
||||
}
|
||||
return syncJobMaps;
|
||||
}
|
||||
|
||||
function fetchSelectively(connector: Connector): boolean {
|
||||
const rcfMap: Record<string, string> = {
|
||||
azure_blob_storage: 'containers',
|
||||
confluence: 'spaces',
|
||||
github: 'repositories',
|
||||
jira: 'projects',
|
||||
mssql: 'tables',
|
||||
mysql: 'tables',
|
||||
oracle: 'tables',
|
||||
postgresql: 'tables',
|
||||
s3: 'buckets',
|
||||
servicenow: 'services',
|
||||
sharepoint_online: 'site_collections',
|
||||
sharepoint_server: 'site_collections',
|
||||
};
|
||||
|
||||
if (!connector.service_type || !(connector.service_type in rcfMap)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const rcfField = rcfMap[connector.service_type];
|
||||
if (!(rcfField in connector.configuration)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !(
|
||||
(connector.configuration[rcfField] as ConnectorConfigProperties).value as string
|
||||
).includes('*');
|
||||
}
|
||||
|
||||
const documentsStats = async (
|
||||
client: ElasticsearchClient,
|
||||
connector: Connector
|
||||
): Promise<DocumentsStats> => {
|
||||
const stats: DocumentsStats = {
|
||||
total: 0,
|
||||
volume: 0,
|
||||
inLastSync: connector.last_indexed_document_count ? connector.last_indexed_document_count : 0,
|
||||
};
|
||||
if (!connector.index_name) {
|
||||
return stats;
|
||||
}
|
||||
try {
|
||||
const indicesStatsResponse = await client.indices.stats({ index: connector.index_name });
|
||||
stats.total = indicesStatsResponse._all.primaries?.docs?.count ?? 0;
|
||||
stats.volume = indicesStatsResponse._all.primaries?.store?.size_in_bytes ?? 0;
|
||||
} catch (e) {
|
||||
/* empty */
|
||||
}
|
||||
|
||||
return stats;
|
||||
};
|
||||
|
||||
const dataSourceSpecificStats = async (
|
||||
client: ElasticsearchClient,
|
||||
connector: Connector
|
||||
): Promise<DataSourceSpecificStats> => {
|
||||
const stats: DataSourceSpecificStats = {};
|
||||
switch (connector.service_type) {
|
||||
case 'confluence':
|
||||
stats.confluence = {
|
||||
dataSourceType: (connector.configuration.data_source as ConnectorConfigProperties)
|
||||
?.value as string,
|
||||
};
|
||||
break;
|
||||
case 'github':
|
||||
stats.github = {
|
||||
isCloud:
|
||||
(connector.configuration.data_source as ConnectorConfigProperties)?.value ===
|
||||
'github_cloud',
|
||||
};
|
||||
break;
|
||||
case 'jira':
|
||||
stats.jira = {
|
||||
dataSourceType: (connector.configuration.data_source as ConnectorConfigProperties)
|
||||
?.value as string,
|
||||
};
|
||||
break;
|
||||
case 'mongodb':
|
||||
stats.mongodb = {
|
||||
directConnect: !!(connector.configuration.direct_connection as ConnectorConfigProperties)
|
||||
?.value,
|
||||
};
|
||||
break;
|
||||
case 'mssql':
|
||||
stats.mssql = {
|
||||
validateHost: !!(connector.configuration.validate_host as ConnectorConfigProperties)?.value,
|
||||
tables: connector.index_name ? await tableCounts(client, connector.index_name, 'table') : 0,
|
||||
};
|
||||
break;
|
||||
case 'mysql':
|
||||
stats.mysql = {
|
||||
tables: connector.index_name ? await tableCounts(client, connector.index_name, 'Table') : 0,
|
||||
};
|
||||
break;
|
||||
case 'oracle':
|
||||
stats.oracle = {
|
||||
tables: connector.index_name ? await tableCounts(client, connector.index_name, 'Table') : 0,
|
||||
};
|
||||
break;
|
||||
case 'postgresql':
|
||||
stats.postgresql = {
|
||||
tables: connector.index_name ? await tableCounts(client, connector.index_name, 'table') : 0,
|
||||
};
|
||||
break;
|
||||
case 'slack':
|
||||
stats.slack = {
|
||||
autoJoinChannelsEnabled: !!(
|
||||
connector.configuration.auto_join_channels as ConnectorConfigProperties
|
||||
)?.value,
|
||||
syncUsersEnabled: !!(connector.configuration.sync_users as ConnectorConfigProperties)
|
||||
?.value,
|
||||
fetchLastNDays: (connector.configuration.fetch_last_n_days as ConnectorConfigProperties)
|
||||
?.value as number,
|
||||
};
|
||||
break;
|
||||
case 'zoom':
|
||||
stats.zoom = {
|
||||
recordingAge: (connector.configuration.recording_age as ConnectorConfigProperties)
|
||||
?.value as number,
|
||||
};
|
||||
break;
|
||||
}
|
||||
return stats;
|
||||
};
|
||||
|
||||
const tableCounts = async (
|
||||
client: ElasticsearchClient,
|
||||
indexName: string,
|
||||
tableField: string
|
||||
): Promise<number> => {
|
||||
try {
|
||||
const aggs = {
|
||||
table_count: {
|
||||
cardinality: {
|
||||
field: `${tableField}.keyword`,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const searchResponse = await client.search({ index: indexName, aggs, size: 0 });
|
||||
|
||||
return (
|
||||
(searchResponse.aggregations?.table_count as AggregationsCardinalityAggregate).value ?? 0
|
||||
);
|
||||
} catch (e) {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
function syncJobsStats(syncJobs: ConnectorSyncJob[]): SyncJobStats {
|
||||
const stats: SyncJobStats = {
|
||||
overall: syncJobsStatsDetails(syncJobs),
|
||||
};
|
||||
|
||||
const syncJobsWithTextExtractionServiceEnabled = syncJobs.filter(
|
||||
(syncJob) => !!syncJob.connector.configuration.use_text_extraction_service?.value
|
||||
);
|
||||
if (syncJobsWithTextExtractionServiceEnabled.length > 0) {
|
||||
stats.withTextExtractionServiceEnabled = syncJobsStatsDetails(
|
||||
syncJobsWithTextExtractionServiceEnabled
|
||||
);
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
function syncJobsStatsDetails(syncJobs: ConnectorSyncJob[]): SyncJobStatsDetails {
|
||||
const stats: SyncJobStatsDetails = {
|
||||
total: syncJobs.length,
|
||||
};
|
||||
const last30DaysSyncJobs = recentSyncJobs(30, syncJobs);
|
||||
if (last30DaysSyncJobs.length > 0) {
|
||||
stats.last30Days = syncJobsStatsByType(last30DaysSyncJobs);
|
||||
}
|
||||
const last7DaysSyncJobs = recentSyncJobs(7, syncJobs);
|
||||
if (last7DaysSyncJobs.length > 0) {
|
||||
stats.last7Days = syncJobsStatsByType(last7DaysSyncJobs);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
function recentSyncJobs(days: number, syncJobs: ConnectorSyncJob[]): ConnectorSyncJob[] {
|
||||
const today = new Date();
|
||||
const nDaysAgo = new Date(today.setDate(today.getDate() - days));
|
||||
return syncJobs.filter((syncJob) => {
|
||||
const createdAt = new Date(syncJob.created_at);
|
||||
return !isNaN(createdAt.getDate()) && createdAt > nDaysAgo;
|
||||
});
|
||||
}
|
||||
|
||||
function syncJobsStatsByType(syncJobs: ConnectorSyncJob[]): SyncJobStatsByType {
|
||||
const stats: SyncJobStatsByType = {
|
||||
overall: syncJobsStatsByState(syncJobs),
|
||||
};
|
||||
const fullSyncJobs = syncJobs.filter((syncJob) => syncJob.job_type === SyncJobType.FULL);
|
||||
if (fullSyncJobs.length > 0) {
|
||||
stats.full = syncJobsStatsByState(fullSyncJobs);
|
||||
}
|
||||
const incrementalSyncJobs = syncJobs.filter(
|
||||
(syncJob) => syncJob.job_type === SyncJobType.INCREMENTAL
|
||||
);
|
||||
if (incrementalSyncJobs.length > 0) {
|
||||
stats.incremental = syncJobsStatsByState(incrementalSyncJobs);
|
||||
}
|
||||
const accessControlSyncJobs = syncJobs.filter(
|
||||
(syncJob) => syncJob.job_type === SyncJobType.ACCESS_CONTROL
|
||||
);
|
||||
if (accessControlSyncJobs.length > 0) {
|
||||
stats.accessControl = syncJobsStatsByState(accessControlSyncJobs);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
|
||||
function syncJobsStatsByState(syncJobs: ConnectorSyncJob[]): SyncJobStatsByState {
|
||||
let manual = 0;
|
||||
let scheduled = 0;
|
||||
let completed = 0;
|
||||
let errored = 0;
|
||||
let canceled = 0;
|
||||
let suspended = 0;
|
||||
let idle = 0;
|
||||
let running = 0;
|
||||
let duration = 0;
|
||||
|
||||
for (const syncJob of syncJobs) {
|
||||
completed += syncJob.status === SyncStatus.COMPLETED ? 1 : 0;
|
||||
errored += syncJob.status === SyncStatus.ERROR ? 1 : 0;
|
||||
canceled += syncJob.status === SyncStatus.CANCELED ? 1 : 0;
|
||||
suspended += syncJob.status === SyncStatus.SUSPENDED ? 1 : 0;
|
||||
running += syncJob.status === SyncStatus.IN_PROGRESS ? 1 : 0;
|
||||
manual += syncJob.trigger_method === TriggerMethod.ON_DEMAND ? 1 : 0;
|
||||
scheduled += syncJob.trigger_method === TriggerMethod.SCHEDULED ? 1 : 0;
|
||||
|
||||
if (syncJob.status in [SyncStatus.IN_PROGRESS, SyncStatus.CANCELING] && syncJob.last_seen) {
|
||||
const lastSeen = new Date(syncJob.last_seen);
|
||||
// A sync job with last_seen not updated for more than 5 mins is considered idle
|
||||
if (!isNaN(lastSeen.getTime()) && new Date().getTime() - lastSeen.getTime() > 5 * 60 * 1000) {
|
||||
idle += 1;
|
||||
}
|
||||
}
|
||||
if (syncJob.started_at && syncJob.completed_at) {
|
||||
const startedAt = new Date(syncJob.started_at);
|
||||
const completedAt = new Date(syncJob.completed_at);
|
||||
if (!isNaN(startedAt.getTime()) && !isNaN(completedAt.getTime())) {
|
||||
duration += Math.floor((completedAt.getTime() - startedAt.getTime()) / 1000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
total: syncJobs.length,
|
||||
manual,
|
||||
scheduled,
|
||||
completed,
|
||||
errored,
|
||||
canceled,
|
||||
suspended,
|
||||
idle,
|
||||
running,
|
||||
totalDurationSeconds: duration,
|
||||
} as SyncJobStatsByState;
|
||||
}
|
|
@ -0,0 +1,598 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// @ts-nocheck
|
||||
import {
|
||||
Connector,
|
||||
ConnectorStats,
|
||||
ConnectorStatus,
|
||||
ConnectorSyncJob,
|
||||
SyncJobType,
|
||||
SyncStatus,
|
||||
TriggerMethod,
|
||||
} from '..';
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
export const spoConnector: Connector = {
|
||||
id: '1',
|
||||
index_name: 'search_spo',
|
||||
is_native: false,
|
||||
status: ConnectorStatus.CONNECTED,
|
||||
service_type: 'sharepoint_online',
|
||||
last_indexed_document_count: 1000,
|
||||
pipeline: {
|
||||
extract_binary_content: false,
|
||||
name: 'ent-search-generic-ingestion',
|
||||
reduce_whitespace: true,
|
||||
run_ml_inference: false,
|
||||
},
|
||||
scheduling: {
|
||||
access_control: {
|
||||
enabled: true,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
full: {
|
||||
enabled: true,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
incremental: {
|
||||
enabled: false,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
use_document_level_security: {
|
||||
value: true,
|
||||
},
|
||||
use_text_extraction_service: {
|
||||
value: true,
|
||||
},
|
||||
site_collections: {
|
||||
value: 'test-site',
|
||||
},
|
||||
},
|
||||
filtering: [
|
||||
{
|
||||
active: {
|
||||
advanced_snippet: {
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
id: 'DEFAULT',
|
||||
},
|
||||
{
|
||||
id: 'NEW',
|
||||
},
|
||||
],
|
||||
},
|
||||
domain: 'DEFAULT',
|
||||
draft: {
|
||||
advanced_snippet: {
|
||||
value: {
|
||||
id: 'NEW',
|
||||
},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
id: 'DEFAULT',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const mysqlConnector: Connector = {
|
||||
id: '2',
|
||||
index_name: 'search_mysql',
|
||||
is_native: true,
|
||||
status: ConnectorStatus.ERROR,
|
||||
service_type: 'mysql',
|
||||
last_indexed_document_count: 2000,
|
||||
pipeline: {
|
||||
extract_binary_content: true,
|
||||
name: 'ent-search-generic-ingestion',
|
||||
reduce_whitespace: true,
|
||||
run_ml_inference: false,
|
||||
},
|
||||
scheduling: {
|
||||
access_control: {
|
||||
enabled: false,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
full: {
|
||||
enabled: true,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
incremental: {
|
||||
enabled: false,
|
||||
interval: '0 0 0 * * ?',
|
||||
},
|
||||
},
|
||||
configuration: {
|
||||
use_document_level_security: {
|
||||
value: false,
|
||||
},
|
||||
use_text_extraction_service: {
|
||||
value: false,
|
||||
},
|
||||
tables: {
|
||||
value: '*',
|
||||
},
|
||||
ssl_enabled: {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
filtering: [
|
||||
{
|
||||
active: {
|
||||
advanced_snippet: {
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
id: 'DEFAULT',
|
||||
},
|
||||
],
|
||||
},
|
||||
domain: 'DEFAULT',
|
||||
draft: {
|
||||
advanced_snippet: {
|
||||
value: {},
|
||||
},
|
||||
rules: [
|
||||
{
|
||||
id: 'DEFAULT',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const spoFullSyncJob: ConnectorSyncJob = {
|
||||
id: '1',
|
||||
job_type: SyncJobType.FULL,
|
||||
status: SyncStatus.COMPLETED,
|
||||
trigger_method: TriggerMethod.SCHEDULED,
|
||||
connector: {
|
||||
id: spoConnector.id,
|
||||
configuration: {
|
||||
use_text_extraction_service: {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
// created 10 days ago
|
||||
created_at: new Date(now - 10 * 24 * 3600 * 1000).toISOString(),
|
||||
// started 3 days ago
|
||||
started_at: new Date(now - 10 * 24 * 3600 * 1000).toISOString(),
|
||||
// completed after 100 seconds
|
||||
completed_at: new Date(now - 10 * 24 * 3600 * 1000 + 100 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
export const spoIncrementalSyncJob: ConnectorSyncJob = {
|
||||
id: '2',
|
||||
job_type: SyncJobType.INCREMENTAL,
|
||||
status: SyncStatus.ERROR,
|
||||
trigger_method: TriggerMethod.ON_DEMAND,
|
||||
connector: {
|
||||
id: spoConnector.id,
|
||||
configuration: {
|
||||
use_text_extraction_service: {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
// created 3 days ago
|
||||
created_at: new Date(now - 3 * 24 * 3600 * 1000).toISOString(),
|
||||
// started 3 days ago
|
||||
started_at: new Date(now - 3 * 24 * 3600 * 1000).toISOString(),
|
||||
// completed after 100 seconds
|
||||
completed_at: new Date(now - 3 * 24 * 3600 * 1000 + 100 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
export const spoAccessControlSyncJob: ConnectorSyncJob = {
|
||||
id: '3',
|
||||
job_type: SyncJobType.ACCESS_CONTROL,
|
||||
status: SyncStatus.COMPLETED,
|
||||
trigger_method: TriggerMethod.ON_DEMAND,
|
||||
connector: {
|
||||
id: spoConnector.id,
|
||||
configuration: {
|
||||
use_text_extraction_service: {
|
||||
value: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
// created 3 days ago
|
||||
created_at: new Date(now - 3 * 24 * 3600 * 1000).toISOString(),
|
||||
// started 3 days ago
|
||||
started_at: new Date(now - 3 * 24 * 3600 * 1000).toISOString(),
|
||||
// completed after 20 seconds
|
||||
completed_at: new Date(now - 3 * 24 * 3600 * 1000 + 20 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
export const mysqlFullSyncJob: ConnectorSyncJob = {
|
||||
id: '4',
|
||||
job_type: SyncJobType.FULL,
|
||||
status: SyncStatus.COMPLETED,
|
||||
trigger_method: TriggerMethod.SCHEDULED,
|
||||
connector: {
|
||||
id: mysqlConnector.id,
|
||||
configuration: {
|
||||
use_text_extraction_service: {
|
||||
value: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
// created 12 days ago
|
||||
created_at: new Date(now - 12 * 24 * 3600 * 1000).toISOString(),
|
||||
// started 12 days ago
|
||||
started_at: new Date(now - 12 * 24 * 3600 * 1000).toISOString(),
|
||||
// completed after 200 seconds
|
||||
completed_at: new Date(now - 12 * 24 * 3600 * 1000 + 200 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
export const orphanedSyncJob: ConnectorSyncJob = {
|
||||
id: '5',
|
||||
job_type: SyncJobType.FULL,
|
||||
status: SyncStatus.COMPLETED,
|
||||
trigger_method: TriggerMethod.ON_DEMAND,
|
||||
connector: {
|
||||
id: '3',
|
||||
configuration: {
|
||||
use_text_extraction_service: {
|
||||
value: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
// created 12 days ago
|
||||
created_at: new Date(now - 12 * 24 * 3600 * 1000).toISOString(),
|
||||
// started 12 days ago
|
||||
started_at: new Date(now - 12 * 24 * 3600 * 1000).toISOString(),
|
||||
// completed after 200 seconds
|
||||
completed_at: new Date(now - 12 * 24 * 3600 * 1000 + 200 * 1000).toISOString(),
|
||||
};
|
||||
|
||||
export const expectedSpoConnectorStats: ConnectorStats = {
|
||||
id: spoConnector.id,
|
||||
serviceType: spoConnector.service_type,
|
||||
isNative: spoConnector.is_native,
|
||||
isDeleted: false,
|
||||
status: spoConnector.status,
|
||||
indexName: spoConnector.index_name,
|
||||
dlsEnabled: true,
|
||||
sslEnabled: false,
|
||||
fetchSelectively: true,
|
||||
textExtractionServiceEnabled: true,
|
||||
documents: {
|
||||
total: 1000,
|
||||
volume: 10000,
|
||||
inLastSync: 1000,
|
||||
},
|
||||
dataSourceSpecific: {},
|
||||
scheduling: {
|
||||
accessControl: spoConnector.scheduling.access_control,
|
||||
full: spoConnector.scheduling.full,
|
||||
incremental: spoConnector.scheduling.incremental,
|
||||
},
|
||||
syncRules: {
|
||||
active: {
|
||||
withBasicRules: true,
|
||||
withAdvancedRules: false,
|
||||
},
|
||||
draft: {
|
||||
withBasicRules: false,
|
||||
withAdvancedRules: true,
|
||||
},
|
||||
},
|
||||
ingestPipeline: {
|
||||
name: spoConnector.pipeline.name,
|
||||
extractBinaryContent: spoConnector.pipeline.extract_binary_content,
|
||||
reduceWhitespace: spoConnector.pipeline.reduce_whitespace,
|
||||
runMLInference: spoConnector.pipeline.run_ml_inference,
|
||||
},
|
||||
syncJobs: {
|
||||
overall: {
|
||||
total: 3,
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: 3,
|
||||
manual: 2,
|
||||
scheduled: 1,
|
||||
completed: 2,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 220,
|
||||
},
|
||||
accessControl: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 20,
|
||||
},
|
||||
full: {
|
||||
total: 1,
|
||||
manual: 0,
|
||||
scheduled: 1,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
incremental: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 0,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: 2,
|
||||
manual: 2,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 120,
|
||||
},
|
||||
accessControl: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 20,
|
||||
},
|
||||
incremental: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 0,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
withTextExtractionServiceEnabled: {
|
||||
total: 3,
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: 3,
|
||||
manual: 2,
|
||||
scheduled: 1,
|
||||
completed: 2,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 220,
|
||||
},
|
||||
accessControl: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 20,
|
||||
},
|
||||
full: {
|
||||
total: 1,
|
||||
manual: 0,
|
||||
scheduled: 1,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
incremental: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 0,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: 2,
|
||||
manual: 2,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 120,
|
||||
},
|
||||
accessControl: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 20,
|
||||
},
|
||||
incremental: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 0,
|
||||
errored: 1,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const expectedMysqlConnectorStats: ConnectorStats = {
|
||||
id: mysqlConnector.id,
|
||||
serviceType: mysqlConnector.service_type,
|
||||
isNative: mysqlConnector.is_native,
|
||||
isDeleted: false,
|
||||
status: mysqlConnector.status,
|
||||
indexName: mysqlConnector.index_name,
|
||||
dlsEnabled: false,
|
||||
sslEnabled: true,
|
||||
fetchSelectively: false,
|
||||
textExtractionServiceEnabled: false,
|
||||
documents: {
|
||||
total: 2000,
|
||||
volume: 20000,
|
||||
inLastSync: 2000,
|
||||
},
|
||||
dataSourceSpecific: {
|
||||
mysql: {
|
||||
tables: 7,
|
||||
},
|
||||
},
|
||||
scheduling: {
|
||||
accessControl: mysqlConnector.scheduling.access_control,
|
||||
full: mysqlConnector.scheduling.full,
|
||||
incremental: mysqlConnector.scheduling.incremental,
|
||||
},
|
||||
syncRules: {
|
||||
active: {
|
||||
withBasicRules: false,
|
||||
withAdvancedRules: false,
|
||||
},
|
||||
draft: {
|
||||
withBasicRules: false,
|
||||
withAdvancedRules: false,
|
||||
},
|
||||
},
|
||||
ingestPipeline: {
|
||||
name: mysqlConnector.pipeline.name,
|
||||
extractBinaryContent: mysqlConnector.pipeline.extract_binary_content,
|
||||
reduceWhitespace: mysqlConnector.pipeline.reduce_whitespace,
|
||||
runMLInference: mysqlConnector.pipeline.run_ml_inference,
|
||||
},
|
||||
syncJobs: {
|
||||
overall: {
|
||||
total: 1,
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: 1,
|
||||
manual: 0,
|
||||
scheduled: 1,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 200,
|
||||
},
|
||||
full: {
|
||||
total: 1,
|
||||
manual: 0,
|
||||
scheduled: 1,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 200,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const expectedDeletedConnectorStats: ConnectorStats = {
|
||||
id: orphanedSyncJob.connector.id,
|
||||
isDeleted: true,
|
||||
syncJobs: {
|
||||
overall: {
|
||||
total: 1,
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 200,
|
||||
},
|
||||
full: {
|
||||
total: 1,
|
||||
manual: 1,
|
||||
scheduled: 0,
|
||||
completed: 1,
|
||||
errored: 0,
|
||||
canceled: 0,
|
||||
suspended: 0,
|
||||
idle: 0,
|
||||
running: 0,
|
||||
totalDurationSeconds: 200,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
|
@ -91,7 +91,9 @@ describe('createConnectorDocument', () => {
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: null,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
|
|
@ -101,7 +101,9 @@ export function createConnectorDocument({
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: null,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
*/
|
||||
|
||||
export * from './cancel_syncs';
|
||||
export * from './collect_connector_stats';
|
||||
export * from './create_connector';
|
||||
export * from './create_connector_document';
|
||||
export * from './create_connector_secret';
|
||||
|
|
124
packages/kbn-search-connectors/types/connector_stats.ts
Normal file
124
packages/kbn-search-connectors/types/connector_stats.ts
Normal file
|
@ -0,0 +1,124 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ConnectorStatus } from '..';
|
||||
|
||||
export interface ConnectorStats {
|
||||
id: string;
|
||||
serviceType?: string | null;
|
||||
isNative?: boolean;
|
||||
isDeleted: boolean;
|
||||
status?: ConnectorStatus;
|
||||
indexName?: string | null;
|
||||
dlsEnabled?: boolean;
|
||||
sslEnabled?: boolean;
|
||||
fetchSelectively?: boolean;
|
||||
textExtractionServiceEnabled?: boolean;
|
||||
documents?: DocumentsStats;
|
||||
dataSourceSpecific?: DataSourceSpecificStats;
|
||||
scheduling?: {
|
||||
accessControl: Scheduling;
|
||||
full: Scheduling;
|
||||
incremental: Scheduling;
|
||||
};
|
||||
syncRules?: {
|
||||
active: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
draft: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
};
|
||||
ingestPipeline?: {
|
||||
name: string;
|
||||
extractBinaryContent: boolean;
|
||||
reduceWhitespace: boolean;
|
||||
runMLInference: boolean;
|
||||
};
|
||||
syncJobs?: SyncJobStats;
|
||||
}
|
||||
|
||||
export interface DataSourceSpecificStats {
|
||||
confluence?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
github?: {
|
||||
isCloud: boolean;
|
||||
};
|
||||
jira?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
mongodb?: {
|
||||
directConnect: boolean;
|
||||
};
|
||||
mssql?: {
|
||||
validateHost: boolean;
|
||||
tables: number;
|
||||
};
|
||||
mysql?: {
|
||||
tables: number;
|
||||
};
|
||||
oracle?: {
|
||||
tables: number;
|
||||
};
|
||||
postgresql?: {
|
||||
tables: number;
|
||||
};
|
||||
slack?: {
|
||||
autoJoinChannelsEnabled: boolean;
|
||||
syncUsersEnabled: boolean;
|
||||
fetchLastNDays: number;
|
||||
};
|
||||
zoom?: {
|
||||
recordingAge: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface DocumentsStats {
|
||||
total: number;
|
||||
volume: number;
|
||||
inLastSync: number;
|
||||
}
|
||||
|
||||
interface Scheduling {
|
||||
enabled: boolean;
|
||||
interval: string;
|
||||
}
|
||||
|
||||
export interface SyncJobStats {
|
||||
overall: SyncJobStatsDetails;
|
||||
withTextExtractionServiceEnabled?: SyncJobStatsDetails;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsDetails {
|
||||
total: number;
|
||||
last30Days?: SyncJobStatsByType;
|
||||
last7Days?: SyncJobStatsByType;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByType {
|
||||
overall: SyncJobStatsByState;
|
||||
accessControl?: SyncJobStatsByState;
|
||||
full?: SyncJobStatsByState;
|
||||
incremental?: SyncJobStatsByState;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByState {
|
||||
total: number;
|
||||
manual: number;
|
||||
scheduled: number;
|
||||
completed: number;
|
||||
errored: number;
|
||||
canceled: number;
|
||||
suspended: number;
|
||||
idle: number;
|
||||
running: number;
|
||||
totalDurationSeconds: number;
|
||||
}
|
|
@ -230,7 +230,9 @@ export interface Connector {
|
|||
last_access_control_sync_error: string | null;
|
||||
last_access_control_sync_scheduled_at: string | null;
|
||||
last_access_control_sync_status: SyncStatus | null;
|
||||
last_deleted_document_count: number | null;
|
||||
last_incremental_sync_scheduled_at: string | null;
|
||||
last_indexed_document_count: number | null;
|
||||
last_seen: string | null;
|
||||
last_sync_error: string | null;
|
||||
last_sync_scheduled_at: string | null;
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
export * from './connectors';
|
||||
export * from './connectors_api';
|
||||
export * from './connector_stats';
|
||||
export * from './native_connectors';
|
||||
export * from './optimistic_concurrency';
|
||||
export * from './pagination';
|
||||
|
|
121
x-pack/plugins/enterprise_search/common/types/connector_stats.ts
Normal file
121
x-pack/plugins/enterprise_search/common/types/connector_stats.ts
Normal file
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export interface ConnectorStats {
|
||||
id: string;
|
||||
serviceType?: string | null;
|
||||
isNative?: boolean;
|
||||
isDeleted: boolean;
|
||||
status?: string;
|
||||
indexName?: string | null;
|
||||
dlsEnabled?: boolean;
|
||||
sslEnabled?: boolean;
|
||||
fetchSelectively?: boolean;
|
||||
textExtractionServiceEnabled?: boolean;
|
||||
documents?: DocumentsStats;
|
||||
dataSourceSpecific?: DataSourceSpecificStats;
|
||||
scheduling?: {
|
||||
accessControl: Scheduling;
|
||||
full: Scheduling;
|
||||
incremental: Scheduling;
|
||||
};
|
||||
syncRules?: {
|
||||
active: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
draft: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
};
|
||||
ingestPipeline?: {
|
||||
name: string;
|
||||
extractBinaryContent: boolean;
|
||||
reduceWhitespace: boolean;
|
||||
runMLInference: boolean;
|
||||
};
|
||||
syncJobs?: SyncJobStats;
|
||||
}
|
||||
|
||||
export interface DataSourceSpecificStats {
|
||||
confluence?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
github?: {
|
||||
isCloud: boolean;
|
||||
};
|
||||
jira?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
mongodb?: {
|
||||
directConnect: boolean;
|
||||
};
|
||||
mssql?: {
|
||||
validateHost: boolean;
|
||||
tables: number;
|
||||
};
|
||||
mysql?: {
|
||||
tables: number;
|
||||
};
|
||||
oracle?: {
|
||||
tables: number;
|
||||
};
|
||||
postgresql?: {
|
||||
tables: number;
|
||||
};
|
||||
slack?: {
|
||||
autoJoinChannelsEnabled: boolean;
|
||||
syncUsersEnabled: boolean;
|
||||
fetchLastNDays: number;
|
||||
};
|
||||
zoom?: {
|
||||
recordingAge: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface DocumentsStats {
|
||||
total: number;
|
||||
volume: number;
|
||||
inLastSync: number;
|
||||
}
|
||||
|
||||
interface Scheduling {
|
||||
enabled: boolean;
|
||||
interval: string;
|
||||
}
|
||||
|
||||
export interface SyncJobStats {
|
||||
overall: SyncJobStatsDetails;
|
||||
withTextExtractionServiceEnabled?: SyncJobStatsDetails;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsDetails {
|
||||
total: number;
|
||||
last30Days?: SyncJobStatsByType;
|
||||
last7Days?: SyncJobStatsByType;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByType {
|
||||
overall: SyncJobStatsByState;
|
||||
accessControl?: SyncJobStatsByState;
|
||||
full?: SyncJobStatsByState;
|
||||
incremental?: SyncJobStatsByState;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByState {
|
||||
total: number;
|
||||
manual: number;
|
||||
scheduled: number;
|
||||
completed: number;
|
||||
errored: number;
|
||||
canceled: number;
|
||||
suspended: number;
|
||||
idle: number;
|
||||
running: number;
|
||||
totalDurationSeconds: number;
|
||||
}
|
|
@ -71,5 +71,6 @@ export interface ClientConfigType {
|
|||
};
|
||||
}
|
||||
|
||||
export type { ConnectorStats } from './connector_stats';
|
||||
export type { ElasticsearchIndexWithPrivileges } from './indices';
|
||||
export type { KibanaDeps } from './kibana_deps';
|
||||
|
|
|
@ -121,7 +121,9 @@ export const indices: ElasticsearchIndexWithIngestion[] = [
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: SyncStatus.COMPLETED,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
@ -248,7 +250,9 @@ export const indices: ElasticsearchIndexWithIngestion[] = [
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: SyncStatus.COMPLETED,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
|
|
@ -127,7 +127,9 @@ export const connectorIndex: ConnectorViewIndex = {
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: SyncStatus.COMPLETED,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
@ -258,7 +260,9 @@ export const crawlerIndex: CrawlerViewIndex = {
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: SyncStatus.COMPLETED,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
|
|
@ -5,19 +5,18 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { mockLogger } from '../../__mocks__';
|
||||
|
||||
import { collectConnectorStats } from '@kbn/search-connectors';
|
||||
import { createCollectorFetchContextMock } from '@kbn/usage-collection-plugin/server/mocks';
|
||||
|
||||
import { ConnectorStats } from '../../../common/types';
|
||||
|
||||
import { registerTelemetryUsageCollector } from './telemetry';
|
||||
|
||||
const indexNotFoundError = {
|
||||
meta: {
|
||||
body: {
|
||||
error: {
|
||||
type: 'index_not_found_exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
jest.mock('@kbn/search-connectors', () => ({
|
||||
collectConnectorStats: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('Connectors Telemetry Usage Collector', () => {
|
||||
const makeUsageCollectorStub = jest.fn();
|
||||
|
@ -32,7 +31,7 @@ describe('Connectors Telemetry Usage Collector', () => {
|
|||
|
||||
describe('registerTelemetryUsageCollector', () => {
|
||||
it('should make and register the usage collector', () => {
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
|
||||
expect(registerStub).toHaveBeenCalledTimes(1);
|
||||
expect(makeUsageCollectorStub).toHaveBeenCalledTimes(1);
|
||||
|
@ -43,42 +42,30 @@ describe('Connectors Telemetry Usage Collector', () => {
|
|||
|
||||
describe('fetchTelemetryMetrics', () => {
|
||||
it('should return telemetry data', async () => {
|
||||
const fetchContextMock = createCollectorFetchContextMock();
|
||||
fetchContextMock.esClient.count = jest.fn().mockImplementation((query: any) =>
|
||||
Promise.resolve({
|
||||
count: query.query.bool.filter[0].term.is_native ? 5 : 2,
|
||||
})
|
||||
);
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
const connectorStats: ConnectorStats = {
|
||||
id: '1',
|
||||
isDeleted: false,
|
||||
};
|
||||
(collectConnectorStats as jest.Mock).mockImplementation(() => [connectorStats]);
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
const telemetryMetrics = await makeUsageCollectorStub.mock.calls[0][0].fetch(
|
||||
fetchContextMock
|
||||
createCollectorFetchContextMock()
|
||||
);
|
||||
|
||||
expect(telemetryMetrics).toEqual({
|
||||
native: {
|
||||
total: 5,
|
||||
},
|
||||
clients: {
|
||||
total: 2,
|
||||
},
|
||||
connectors: [connectorStats],
|
||||
});
|
||||
});
|
||||
it('should return default telemetry on index not found error', async () => {
|
||||
const fetchContextMock = createCollectorFetchContextMock();
|
||||
fetchContextMock.esClient.count = jest
|
||||
.fn()
|
||||
.mockImplementation(() => Promise.reject(indexNotFoundError));
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
it('should return default telemetry when collectConnectorStats raises error', async () => {
|
||||
(collectConnectorStats as jest.Mock).mockImplementation(() => {
|
||||
throw new Error();
|
||||
});
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
const telemetryMetrics = await makeUsageCollectorStub.mock.calls[0][0].fetch(
|
||||
fetchContextMock
|
||||
createCollectorFetchContextMock()
|
||||
);
|
||||
expect(telemetryMetrics).toEqual({
|
||||
native: {
|
||||
total: 0,
|
||||
},
|
||||
clients: {
|
||||
total: 0,
|
||||
},
|
||||
connectors: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,49 +5,329 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { ElasticsearchClient, Logger } from '@kbn/core/server';
|
||||
|
||||
import { CONNECTORS_INDEX } from '@kbn/search-connectors';
|
||||
import { collectConnectorStats } from '@kbn/search-connectors';
|
||||
import { UsageCollectionSetup } from '@kbn/usage-collection-plugin/server';
|
||||
|
||||
import { isIndexNotFoundException } from '../../utils/identify_exceptions';
|
||||
import { ConnectorStats } from '../../../common/types';
|
||||
|
||||
interface Telemetry {
|
||||
native: {
|
||||
total: number;
|
||||
};
|
||||
clients: {
|
||||
total: number;
|
||||
};
|
||||
connectors: ConnectorStats[];
|
||||
}
|
||||
|
||||
const defaultTelemetryMetrics: Telemetry = {
|
||||
native: {
|
||||
total: 0,
|
||||
},
|
||||
clients: {
|
||||
total: 0,
|
||||
},
|
||||
connectors: [],
|
||||
};
|
||||
|
||||
/**
|
||||
* Register the telemetry collector
|
||||
*/
|
||||
|
||||
export const registerTelemetryUsageCollector = (usageCollection: UsageCollectionSetup) => {
|
||||
export const registerTelemetryUsageCollector = (
|
||||
usageCollection: UsageCollectionSetup,
|
||||
log: Logger
|
||||
) => {
|
||||
const telemetryUsageCollector = usageCollection.makeUsageCollector<Telemetry>({
|
||||
type: 'connectors',
|
||||
isReady: () => true,
|
||||
schema: {
|
||||
native: {
|
||||
total: { type: 'long' },
|
||||
},
|
||||
clients: {
|
||||
total: { type: 'long' },
|
||||
connectors: {
|
||||
type: 'array',
|
||||
items: {
|
||||
id: { type: 'keyword' },
|
||||
serviceType: { type: 'keyword' },
|
||||
isNative: { type: 'boolean' },
|
||||
isDeleted: { type: 'boolean' },
|
||||
status: { type: 'keyword' },
|
||||
indexName: { type: 'keyword' },
|
||||
dlsEnabled: { type: 'boolean' },
|
||||
sslEnabled: { type: 'boolean' },
|
||||
fetchSelectively: { type: 'boolean' },
|
||||
textExtractionServiceEnabled: { type: 'boolean' },
|
||||
documents: {
|
||||
total: { type: 'long' },
|
||||
volume: { type: 'long' },
|
||||
inLastSync: { type: 'long' },
|
||||
},
|
||||
dataSourceSpecific: {
|
||||
confluence: {
|
||||
dataSourceType: { type: 'keyword' },
|
||||
},
|
||||
github: {
|
||||
isCloud: { type: 'boolean' },
|
||||
},
|
||||
jira: {
|
||||
dataSourceType: { type: 'keyword' },
|
||||
},
|
||||
mongodb: {
|
||||
directConnect: { type: 'boolean' },
|
||||
},
|
||||
mssql: {
|
||||
validateHost: { type: 'boolean' },
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
mysql: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
oracle: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
postgresql: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
slack: {
|
||||
autoJoinChannelsEnabled: { type: 'boolean' },
|
||||
syncUsersEnabled: { type: 'boolean' },
|
||||
fetchLastNDays: { type: 'long' },
|
||||
},
|
||||
zoom: {
|
||||
recordingAge: { type: 'long' },
|
||||
},
|
||||
},
|
||||
scheduling: {
|
||||
accessControl: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
full: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
incremental: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
},
|
||||
syncRules: {
|
||||
active: {
|
||||
withBasicRules: { type: 'boolean' },
|
||||
withAdvancedRules: { type: 'boolean' },
|
||||
},
|
||||
draft: {
|
||||
withBasicRules: { type: 'boolean' },
|
||||
withAdvancedRules: { type: 'boolean' },
|
||||
},
|
||||
},
|
||||
ingestPipeline: {
|
||||
name: { type: 'keyword' },
|
||||
extractBinaryContent: { type: 'boolean' },
|
||||
reduceWhitespace: { type: 'boolean' },
|
||||
runMLInference: { type: 'boolean' },
|
||||
},
|
||||
syncJobs: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
withTextExtractionServiceEnabled: {
|
||||
total: { type: 'long' },
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async fetch({ esClient }) {
|
||||
return await fetchTelemetryMetrics(esClient);
|
||||
return await fetchTelemetryMetrics(esClient, log);
|
||||
},
|
||||
});
|
||||
usageCollection.registerCollector(telemetryUsageCollector);
|
||||
|
@ -56,61 +336,17 @@ export const registerTelemetryUsageCollector = (usageCollection: UsageCollection
|
|||
/**
|
||||
* Fetch the aggregated telemetry metrics
|
||||
*/
|
||||
|
||||
// @ts-ignore
|
||||
export const fetchTelemetryMetrics = async (client: ElasticsearchClient): Promise<Telemetry> => {
|
||||
export const fetchTelemetryMetrics = async (
|
||||
client: ElasticsearchClient,
|
||||
log: Logger
|
||||
): Promise<Telemetry> => {
|
||||
try {
|
||||
const [nativeCountResponse, clientsCountResponse] = await Promise.all([
|
||||
client.count({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
is_native: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
must_not: [
|
||||
{
|
||||
term: {
|
||||
service_type: {
|
||||
value: 'elastic-crawler',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
client.count({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
is_native: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
const connectors = await collectConnectorStats(client);
|
||||
return {
|
||||
native: {
|
||||
total: nativeCountResponse.count,
|
||||
},
|
||||
clients: {
|
||||
total: clientsCountResponse.count,
|
||||
},
|
||||
connectors,
|
||||
} as Telemetry;
|
||||
} catch (error) {
|
||||
if (isIndexNotFoundException(error)) {
|
||||
return defaultTelemetryMetrics;
|
||||
}
|
||||
log.error(`Couldn't fetch telemetry due to error: ${error}`);
|
||||
return defaultTelemetryMetrics;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -93,7 +93,9 @@ describe('recreateConnectorDocument lib function', () => {
|
|||
last_access_control_sync_error: null,
|
||||
last_access_control_sync_scheduled_at: null,
|
||||
last_access_control_sync_status: null,
|
||||
last_deleted_document_count: null,
|
||||
last_incremental_sync_scheduled_at: null,
|
||||
last_indexed_document_count: null,
|
||||
last_seen: null,
|
||||
last_sync_error: null,
|
||||
last_sync_scheduled_at: null,
|
||||
|
|
|
@ -288,7 +288,7 @@ export class EnterpriseSearchPlugin implements Plugin {
|
|||
|
||||
if (usageCollection) {
|
||||
registerESTelemetryUsageCollector(usageCollection, savedObjectsStarted, this.logger);
|
||||
registerCNTelemetryUsageCollector(usageCollection);
|
||||
registerCNTelemetryUsageCollector(usageCollection, this.logger);
|
||||
if (config.canDeployEntSearch) {
|
||||
registerASTelemetryUsageCollector(usageCollection, savedObjectsStarted, this.logger);
|
||||
registerWSTelemetryUsageCollector(usageCollection, savedObjectsStarted, this.logger);
|
||||
|
|
121
x-pack/plugins/serverless_search/common/types/connector_stats.ts
Normal file
121
x-pack/plugins/serverless_search/common/types/connector_stats.ts
Normal file
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export interface ConnectorStats {
|
||||
id: string;
|
||||
serviceType?: string | null;
|
||||
isNative?: boolean;
|
||||
isDeleted: boolean;
|
||||
status?: string;
|
||||
indexName?: string | null;
|
||||
dlsEnabled?: boolean;
|
||||
sslEnabled?: boolean;
|
||||
fetchSelectively?: boolean;
|
||||
textExtractionServiceEnabled?: boolean;
|
||||
documents?: DocumentsStats;
|
||||
dataSourceSpecific?: DataSourceSpecificStats;
|
||||
scheduling?: {
|
||||
accessControl: Scheduling;
|
||||
full: Scheduling;
|
||||
incremental: Scheduling;
|
||||
};
|
||||
syncRules?: {
|
||||
active: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
draft: {
|
||||
withBasicRules: boolean;
|
||||
withAdvancedRules: boolean;
|
||||
};
|
||||
};
|
||||
ingestPipeline?: {
|
||||
name: string;
|
||||
extractBinaryContent: boolean;
|
||||
reduceWhitespace: boolean;
|
||||
runMLInference: boolean;
|
||||
};
|
||||
syncJobs?: SyncJobStats;
|
||||
}
|
||||
|
||||
export interface DataSourceSpecificStats {
|
||||
confluence?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
github?: {
|
||||
isCloud: boolean;
|
||||
};
|
||||
jira?: {
|
||||
dataSourceType: string;
|
||||
};
|
||||
mongodb?: {
|
||||
directConnect: boolean;
|
||||
};
|
||||
mssql?: {
|
||||
validateHost: boolean;
|
||||
tables: number;
|
||||
};
|
||||
mysql?: {
|
||||
tables: number;
|
||||
};
|
||||
oracle?: {
|
||||
tables: number;
|
||||
};
|
||||
postgresql?: {
|
||||
tables: number;
|
||||
};
|
||||
slack?: {
|
||||
autoJoinChannelsEnabled: boolean;
|
||||
syncUsersEnabled: boolean;
|
||||
fetchLastNDays: number;
|
||||
};
|
||||
zoom?: {
|
||||
recordingAge: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface DocumentsStats {
|
||||
total: number;
|
||||
volume: number;
|
||||
inLastSync: number;
|
||||
}
|
||||
|
||||
interface Scheduling {
|
||||
enabled: boolean;
|
||||
interval: string;
|
||||
}
|
||||
|
||||
export interface SyncJobStats {
|
||||
overall: SyncJobStatsDetails;
|
||||
withTextExtractionServiceEnabled?: SyncJobStatsDetails;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsDetails {
|
||||
total: number;
|
||||
last30Days?: SyncJobStatsByType;
|
||||
last7Days?: SyncJobStatsByType;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByType {
|
||||
overall: SyncJobStatsByState;
|
||||
accessControl?: SyncJobStatsByState;
|
||||
full?: SyncJobStatsByState;
|
||||
incremental?: SyncJobStatsByState;
|
||||
}
|
||||
|
||||
export interface SyncJobStatsByState {
|
||||
total: number;
|
||||
manual: number;
|
||||
scheduled: number;
|
||||
completed: number;
|
||||
errored: number;
|
||||
canceled: number;
|
||||
suspended: number;
|
||||
idle: number;
|
||||
running: number;
|
||||
totalDurationSeconds: number;
|
||||
}
|
|
@ -31,3 +31,5 @@ export interface FetchIndexResult {
|
|||
stats?: IndicesStatsIndicesStats;
|
||||
};
|
||||
}
|
||||
|
||||
export type { ConnectorStats } from './connector_stats';
|
||||
|
|
|
@ -7,16 +7,15 @@
|
|||
|
||||
import { registerTelemetryUsageCollector } from './telemetry';
|
||||
import { createCollectorFetchContextMock } from '@kbn/usage-collection-plugin/server/mocks';
|
||||
import { loggingSystemMock } from '@kbn/core-logging-server-mocks';
|
||||
import { collectConnectorStats } from '@kbn/search-connectors';
|
||||
import { ConnectorStats } from '../../../common/types';
|
||||
|
||||
const indexNotFoundError = {
|
||||
meta: {
|
||||
body: {
|
||||
error: {
|
||||
type: 'index_not_found_exception',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
jest.mock('@kbn/search-connectors', () => ({
|
||||
collectConnectorStats: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockLogger = loggingSystemMock.createLogger().get();
|
||||
|
||||
describe('Connectors Serverless Telemetry Usage Collector', () => {
|
||||
const makeUsageCollectorStub = jest.fn();
|
||||
|
@ -32,7 +31,7 @@ describe('Connectors Serverless Telemetry Usage Collector', () => {
|
|||
|
||||
describe('registerTelemetryUsageCollector', () => {
|
||||
it('should make and register the usage collector', () => {
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
|
||||
expect(registerStub).toHaveBeenCalledTimes(1);
|
||||
expect(makeUsageCollectorStub).toHaveBeenCalledTimes(1);
|
||||
|
@ -43,42 +42,30 @@ describe('Connectors Serverless Telemetry Usage Collector', () => {
|
|||
|
||||
describe('fetchTelemetryMetrics', () => {
|
||||
it('should return telemetry data', async () => {
|
||||
const fetchContextMock = createCollectorFetchContextMock();
|
||||
fetchContextMock.esClient.count = jest.fn().mockImplementation((query: any) =>
|
||||
Promise.resolve({
|
||||
count: query.query.bool.filter[0].term.is_native ? 5 : 2,
|
||||
})
|
||||
);
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
const connectorStats: ConnectorStats = {
|
||||
id: '1',
|
||||
isDeleted: false,
|
||||
};
|
||||
(collectConnectorStats as jest.Mock).mockImplementation(() => [connectorStats]);
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
const telemetryMetrics = await makeUsageCollectorStub.mock.calls[0][0].fetch(
|
||||
fetchContextMock
|
||||
createCollectorFetchContextMock()
|
||||
);
|
||||
|
||||
expect(telemetryMetrics).toEqual({
|
||||
native: {
|
||||
total: 5,
|
||||
},
|
||||
clients: {
|
||||
total: 2,
|
||||
},
|
||||
connectors: [connectorStats],
|
||||
});
|
||||
});
|
||||
it('should return default telemetry on index not found error', async () => {
|
||||
const fetchContextMock = createCollectorFetchContextMock();
|
||||
fetchContextMock.esClient.count = jest
|
||||
.fn()
|
||||
.mockImplementation(() => Promise.reject(indexNotFoundError));
|
||||
registerTelemetryUsageCollector(usageCollectionMock);
|
||||
it('should return default telemetry when collectConnectorStats raises error', async () => {
|
||||
(collectConnectorStats as jest.Mock).mockImplementation(() => {
|
||||
throw new Error();
|
||||
});
|
||||
registerTelemetryUsageCollector(usageCollectionMock, mockLogger);
|
||||
const telemetryMetrics = await makeUsageCollectorStub.mock.calls[0][0].fetch(
|
||||
fetchContextMock
|
||||
createCollectorFetchContextMock()
|
||||
);
|
||||
expect(telemetryMetrics).toEqual({
|
||||
native: {
|
||||
total: 0,
|
||||
},
|
||||
clients: {
|
||||
total: 0,
|
||||
},
|
||||
connectors: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,48 +5,329 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { ElasticsearchClient, Logger } from '@kbn/core/server';
|
||||
|
||||
import { CONNECTORS_INDEX } from '@kbn/search-connectors';
|
||||
import { collectConnectorStats } from '@kbn/search-connectors';
|
||||
import { UsageCollectionSetup } from '@kbn/usage-collection-plugin/server';
|
||||
import { isIndexNotFoundException } from '@kbn/search-connectors/utils/identify_exceptions';
|
||||
|
||||
import { ConnectorStats } from '../../../common/types';
|
||||
|
||||
interface Telemetry {
|
||||
native: {
|
||||
total: number;
|
||||
};
|
||||
clients: {
|
||||
total: number;
|
||||
};
|
||||
connectors: ConnectorStats[];
|
||||
}
|
||||
|
||||
const defaultTelemetryMetrics: Telemetry = {
|
||||
native: {
|
||||
total: 0,
|
||||
},
|
||||
clients: {
|
||||
total: 0,
|
||||
},
|
||||
connectors: [],
|
||||
};
|
||||
|
||||
/**
|
||||
* Register the telemetry collector
|
||||
*/
|
||||
|
||||
export const registerTelemetryUsageCollector = (usageCollection: UsageCollectionSetup) => {
|
||||
export const registerTelemetryUsageCollector = (
|
||||
usageCollection: UsageCollectionSetup,
|
||||
log: Logger
|
||||
) => {
|
||||
const telemetryUsageCollector = usageCollection.makeUsageCollector<Telemetry>({
|
||||
type: 'connectors_serverless',
|
||||
isReady: () => true,
|
||||
schema: {
|
||||
native: {
|
||||
total: { type: 'long' },
|
||||
},
|
||||
clients: {
|
||||
total: { type: 'long' },
|
||||
connectors: {
|
||||
type: 'array',
|
||||
items: {
|
||||
id: { type: 'keyword' },
|
||||
serviceType: { type: 'keyword' },
|
||||
isNative: { type: 'boolean' },
|
||||
isDeleted: { type: 'boolean' },
|
||||
status: { type: 'keyword' },
|
||||
indexName: { type: 'keyword' },
|
||||
dlsEnabled: { type: 'boolean' },
|
||||
sslEnabled: { type: 'boolean' },
|
||||
fetchSelectively: { type: 'boolean' },
|
||||
textExtractionServiceEnabled: { type: 'boolean' },
|
||||
documents: {
|
||||
total: { type: 'long' },
|
||||
volume: { type: 'long' },
|
||||
inLastSync: { type: 'long' },
|
||||
},
|
||||
dataSourceSpecific: {
|
||||
confluence: {
|
||||
dataSourceType: { type: 'keyword' },
|
||||
},
|
||||
github: {
|
||||
isCloud: { type: 'boolean' },
|
||||
},
|
||||
jira: {
|
||||
dataSourceType: { type: 'keyword' },
|
||||
},
|
||||
mongodb: {
|
||||
directConnect: { type: 'boolean' },
|
||||
},
|
||||
mssql: {
|
||||
validateHost: { type: 'boolean' },
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
mysql: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
oracle: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
postgresql: {
|
||||
tables: { type: 'long' },
|
||||
},
|
||||
slack: {
|
||||
autoJoinChannelsEnabled: { type: 'boolean' },
|
||||
syncUsersEnabled: { type: 'boolean' },
|
||||
fetchLastNDays: { type: 'long' },
|
||||
},
|
||||
zoom: {
|
||||
recordingAge: { type: 'long' },
|
||||
},
|
||||
},
|
||||
scheduling: {
|
||||
accessControl: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
full: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
incremental: {
|
||||
enabled: { type: 'boolean' },
|
||||
interval: { type: 'text' },
|
||||
},
|
||||
},
|
||||
syncRules: {
|
||||
active: {
|
||||
withBasicRules: { type: 'boolean' },
|
||||
withAdvancedRules: { type: 'boolean' },
|
||||
},
|
||||
draft: {
|
||||
withBasicRules: { type: 'boolean' },
|
||||
withAdvancedRules: { type: 'boolean' },
|
||||
},
|
||||
},
|
||||
ingestPipeline: {
|
||||
name: { type: 'keyword' },
|
||||
extractBinaryContent: { type: 'boolean' },
|
||||
reduceWhitespace: { type: 'boolean' },
|
||||
runMLInference: { type: 'boolean' },
|
||||
},
|
||||
syncJobs: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
withTextExtractionServiceEnabled: {
|
||||
total: { type: 'long' },
|
||||
last30Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
last7Days: {
|
||||
overall: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
accessControl: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
full: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
incremental: {
|
||||
total: { type: 'long' },
|
||||
manual: { type: 'long' },
|
||||
scheduled: { type: 'long' },
|
||||
completed: { type: 'long' },
|
||||
errored: { type: 'long' },
|
||||
canceled: { type: 'long' },
|
||||
suspended: { type: 'long' },
|
||||
idle: { type: 'long' },
|
||||
running: { type: 'long' },
|
||||
totalDurationSeconds: { type: 'long' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
async fetch({ esClient }) {
|
||||
return await fetchTelemetryMetrics(esClient);
|
||||
return await fetchTelemetryMetrics(esClient, log);
|
||||
},
|
||||
});
|
||||
usageCollection.registerCollector(telemetryUsageCollector);
|
||||
|
@ -56,60 +337,17 @@ export const registerTelemetryUsageCollector = (usageCollection: UsageCollection
|
|||
* Fetch the aggregated telemetry metrics
|
||||
*/
|
||||
|
||||
// @ts-ignore
|
||||
export const fetchTelemetryMetrics = async (client: ElasticsearchClient): Promise<Telemetry> => {
|
||||
export const fetchTelemetryMetrics = async (
|
||||
client: ElasticsearchClient,
|
||||
log: Logger
|
||||
): Promise<Telemetry> => {
|
||||
try {
|
||||
const [nativeCountResponse, clientsCountResponse] = await Promise.all([
|
||||
client.count({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
is_native: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
must_not: [
|
||||
{
|
||||
term: {
|
||||
service_type: {
|
||||
value: 'elastic-crawler',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
client.count({
|
||||
index: CONNECTORS_INDEX,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
term: {
|
||||
is_native: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
const connectors = await collectConnectorStats(client);
|
||||
return {
|
||||
native: {
|
||||
total: nativeCountResponse.count,
|
||||
},
|
||||
clients: {
|
||||
total: clientsCountResponse.count,
|
||||
},
|
||||
connectors,
|
||||
} as Telemetry;
|
||||
} catch (error) {
|
||||
if (isIndexNotFoundException(error)) {
|
||||
return defaultTelemetryMetrics;
|
||||
}
|
||||
log.error(`Couldn't fetch telemetry due to error: ${error}`);
|
||||
return defaultTelemetryMetrics;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -99,7 +99,7 @@ export class ServerlessSearchPlugin
|
|||
|
||||
if (usageCollection) {
|
||||
getStartServices().then(() => {
|
||||
registerTelemetryUsageCollector(usageCollection);
|
||||
registerTelemetryUsageCollector(usageCollection, this.logger);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -43,5 +43,6 @@
|
|||
"@kbn/code-editor",
|
||||
"@kbn/console-plugin",
|
||||
"@kbn/core-chrome-browser",
|
||||
"@kbn/core-logging-server-mocks",
|
||||
]
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue