mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* [ML] APM Correlations: Fix usage in load balancing/HA setups. (#115145) - The way we customized the use of search strategies caused issues with race conditions when multiple Kibana instances were used for load balancing. This PR migrates away from search strategies and uses regular APM API endpoints. - The task that manages calling the sequence of queries to run the correlations analysis is now in a custom React hook (useFailedTransactionsCorrelations / useLatencyCorrelations) instead of a task on the Kibana server side. While they show up as new lines/files in the git diff, the code for the hooks is more or less a combination of the previous useSearchStrategy and the server side service files that managed queries and state. - The consuming React UI components only needed minimal changes. The above mentioned hooks return the same data structure as the previously used useSearchStrategy. This also means functional UI tests didn't need any changes and should pass as is. - API integration tests have been added for the individual new endpoints. The test files that were previously used for the search strategies are still there to simulate a full analysis run, the assertions for the resulting data have the same values, it's just the structure that had to be adapted. - Previously all ES queries of the analysis were run sequentially. The new endpoints run ES queries in parallel where possible. Chunking is managed in the hooks on the client side. - For now the endpoints use the standard current user's esClient. I tried to use the APM client, but it was missing a wrapper for the fieldCaps method and I ran into a problem when trying to construct a random_score query. Sticking to the esClient allowed to leave most of the functions that run the actual queries unchanged. If possible I'd like to pick this up in a follow up. All the endpoints still use withApmSpan() now though. - The previous use of generators was also refactored away, as mentioned above, the queries are now run in parallel. Because we might run up to hundreds of similar requests for correlation analysis, we don't want the analysis to fail if just a single query fails like we did in the previous search strategy based task. I created a util splitAllSettledPromises() to handle Promise.allSettled() and split the results and errors to make the handling easier. Better naming suggestions are welcome 😅 . A future improvement could be to not run individual queries but combine them into nested aggs or using msearch. That's out of scope for this PR though. * [ML] Fix http client types.
This commit is contained in:
parent
2515207572
commit
0863a7f3a1
97 changed files with 2812 additions and 2757 deletions
|
@ -82,9 +82,5 @@ export const KS_TEST_THRESHOLD = 0.1;
|
|||
|
||||
export const ERROR_CORRELATION_THRESHOLD = 0.02;
|
||||
|
||||
/**
|
||||
* Field stats/top values sampling constants
|
||||
*/
|
||||
|
||||
export const SAMPLER_TOP_TERMS_THRESHOLD = 100000;
|
||||
export const SAMPLER_TOP_TERMS_SHARD_SIZE = 5000;
|
||||
export const DEFAULT_PERCENTILE_THRESHOLD = 95;
|
||||
export const DEBOUNCE_INTERVAL = 100;
|
|
@ -24,12 +24,8 @@ export interface FailedTransactionsCorrelation extends FieldValuePair {
|
|||
export type FailedTransactionsCorrelationsImpactThreshold =
|
||||
typeof FAILED_TRANSACTIONS_IMPACT_THRESHOLD[keyof typeof FAILED_TRANSACTIONS_IMPACT_THRESHOLD];
|
||||
|
||||
export interface FailedTransactionsCorrelationsParams {
|
||||
percentileThreshold: number;
|
||||
}
|
||||
|
||||
export interface FailedTransactionsCorrelationsRawResponse {
|
||||
log: string[];
|
||||
export interface FailedTransactionsCorrelationsResponse {
|
||||
ccsWarning: boolean;
|
||||
failedTransactionsCorrelations?: FailedTransactionsCorrelation[];
|
||||
percentileThresholdValue?: number;
|
||||
overallHistogram?: HistogramItem[];
|
|
@ -6,9 +6,9 @@
|
|||
*/
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { SearchStrategyParams } from './types';
|
||||
import { CorrelationsParams } from './types';
|
||||
|
||||
export interface FieldStatsCommonRequestParams extends SearchStrategyParams {
|
||||
export interface FieldStatsCommonRequestParams extends CorrelationsParams {
|
||||
samplerShardSize: number;
|
||||
}
|
||||
|
|
@ -14,22 +14,8 @@ export interface LatencyCorrelation extends FieldValuePair {
|
|||
ksTest: number;
|
||||
}
|
||||
|
||||
export interface LatencyCorrelationSearchServiceProgress {
|
||||
started: number;
|
||||
loadedHistogramStepsize: number;
|
||||
loadedOverallHistogram: number;
|
||||
loadedFieldCandidates: number;
|
||||
loadedFieldValuePairs: number;
|
||||
loadedHistograms: number;
|
||||
}
|
||||
|
||||
export interface LatencyCorrelationsParams {
|
||||
percentileThreshold: number;
|
||||
analyzeCorrelations: boolean;
|
||||
}
|
||||
|
||||
export interface LatencyCorrelationsRawResponse {
|
||||
log: string[];
|
||||
export interface LatencyCorrelationsResponse {
|
||||
ccsWarning: boolean;
|
||||
overallHistogram?: HistogramItem[];
|
||||
percentileThresholdValue?: number;
|
||||
latencyCorrelations?: LatencyCorrelation[];
|
|
@ -26,35 +26,20 @@ export interface ResponseHit {
|
|||
_source: ResponseHitSource;
|
||||
}
|
||||
|
||||
export interface RawResponseBase {
|
||||
ccsWarning: boolean;
|
||||
took: number;
|
||||
}
|
||||
|
||||
export interface SearchStrategyClientParamsBase {
|
||||
export interface CorrelationsClientParams {
|
||||
environment: string;
|
||||
kuery: string;
|
||||
serviceName?: string;
|
||||
transactionName?: string;
|
||||
transactionType?: string;
|
||||
}
|
||||
|
||||
export interface RawSearchStrategyClientParams
|
||||
extends SearchStrategyClientParamsBase {
|
||||
start?: string;
|
||||
end?: string;
|
||||
}
|
||||
|
||||
export interface SearchStrategyClientParams
|
||||
extends SearchStrategyClientParamsBase {
|
||||
start: number;
|
||||
end: number;
|
||||
}
|
||||
|
||||
export interface SearchStrategyServerParams {
|
||||
export interface CorrelationsServerParams {
|
||||
index: string;
|
||||
includeFrozen?: boolean;
|
||||
}
|
||||
|
||||
export type SearchStrategyParams = SearchStrategyClientParams &
|
||||
SearchStrategyServerParams;
|
||||
export type CorrelationsParams = CorrelationsClientParams &
|
||||
CorrelationsServerParams;
|
|
@ -6,9 +6,9 @@
|
|||
*/
|
||||
|
||||
import { FIELDS_TO_ADD_AS_CANDIDATE } from '../constants';
|
||||
import { hasPrefixToInclude } from '../utils';
|
||||
import { hasPrefixToInclude } from './has_prefix_to_include';
|
||||
|
||||
import type { FieldValuePair } from '../../../../common/search_strategies/types';
|
||||
import type { FieldValuePair } from '../types';
|
||||
|
||||
export const getPrioritizedFieldValuePairs = (
|
||||
fieldValuePairs: FieldValuePair[]
|
|
@ -5,4 +5,5 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export { failedTransactionsCorrelationsSearchServiceProvider } from './failed_transactions_correlations_search_service';
|
||||
export { getPrioritizedFieldValuePairs } from './get_prioritized_field_value_pairs';
|
||||
export { hasPrefixToInclude } from './has_prefix_to_include';
|
|
@ -1,15 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const APM_SEARCH_STRATEGIES = {
|
||||
APM_FAILED_TRANSACTIONS_CORRELATIONS: 'apmFailedTransactionsCorrelations',
|
||||
APM_LATENCY_CORRELATIONS: 'apmLatencyCorrelations',
|
||||
} as const;
|
||||
export type ApmSearchStrategies =
|
||||
typeof APM_SEARCH_STRATEGIES[keyof typeof APM_SEARCH_STRATEGIES];
|
||||
|
||||
export const DEFAULT_PERCENTILE_THRESHOLD = 95;
|
|
@ -19,7 +19,7 @@ import {
|
|||
import React, { Fragment, useState } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
import { FieldStats } from '../../../../../common/search_strategies/field_stats_types';
|
||||
import { FieldStats } from '../../../../../common/correlations/field_stats_types';
|
||||
import { OnAddFilter, TopValues } from './top_values';
|
||||
import { useTheme } from '../../../../hooks/use_theme';
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
EuiToolTip,
|
||||
} from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { FieldStats } from '../../../../../common/search_strategies/field_stats_types';
|
||||
import { FieldStats } from '../../../../../common/correlations/field_stats_types';
|
||||
import { asPercent } from '../../../../../common/utils/formatters';
|
||||
import { useTheme } from '../../../../hooks/use_theme';
|
||||
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { EuiAccordion, EuiCode, EuiPanel } from '@elastic/eui';
|
||||
import React from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { asAbsoluteDateTime } from '../../../../common/utils/formatters';
|
||||
|
||||
interface Props {
|
||||
logMessages: string[];
|
||||
}
|
||||
export function CorrelationsLog({ logMessages }: Props) {
|
||||
return (
|
||||
<EuiAccordion
|
||||
id="apmCorrelationsLogAccordion"
|
||||
buttonContent={i18n.translate('xpack.apm.correlations.logButtonContent', {
|
||||
defaultMessage: 'Log',
|
||||
})}
|
||||
>
|
||||
<EuiPanel color="subdued">
|
||||
{logMessages.map((logMessage, i) => {
|
||||
const [timestamp, message] = logMessage.split(': ');
|
||||
return (
|
||||
<p key={i}>
|
||||
<small>
|
||||
<EuiCode>{asAbsoluteDateTime(timestamp)}</EuiCode> {message}
|
||||
</small>
|
||||
</p>
|
||||
);
|
||||
})}
|
||||
</EuiPanel>
|
||||
</EuiAccordion>
|
||||
);
|
||||
}
|
|
@ -14,7 +14,7 @@ import type { Criteria } from '@elastic/eui/src/components/basic_table/basic_tab
|
|||
import { FETCH_STATUS } from '../../../hooks/use_fetcher';
|
||||
import { useUiTracker } from '../../../../../observability/public';
|
||||
import { useTheme } from '../../../hooks/use_theme';
|
||||
import type { FieldValuePair } from '../../../../common/search_strategies/types';
|
||||
import type { FieldValuePair } from '../../../../common/correlations/types';
|
||||
|
||||
const PAGINATION_SIZE_OPTIONS = [5, 10, 20, 50];
|
||||
|
||||
|
|
|
@ -29,23 +29,16 @@ import type { Direction } from '@elastic/eui/src/services/sort/sort_direction';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
|
||||
import {
|
||||
enableInspectEsQueries,
|
||||
useUiTracker,
|
||||
} from '../../../../../observability/public';
|
||||
import { useUiTracker } from '../../../../../observability/public';
|
||||
|
||||
import { asPercent } from '../../../../common/utils/formatters';
|
||||
import { FailedTransactionsCorrelation } from '../../../../common/search_strategies/failed_transactions_correlations/types';
|
||||
import {
|
||||
APM_SEARCH_STRATEGIES,
|
||||
DEFAULT_PERCENTILE_THRESHOLD,
|
||||
} from '../../../../common/search_strategies/constants';
|
||||
import { FieldStats } from '../../../../common/search_strategies/field_stats_types';
|
||||
import { FailedTransactionsCorrelation } from '../../../../common/correlations/failed_transactions_correlations/types';
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../common/correlations/constants';
|
||||
import { FieldStats } from '../../../../common/correlations/field_stats_types';
|
||||
|
||||
import { useApmPluginContext } from '../../../context/apm_plugin/use_apm_plugin_context';
|
||||
import { useLocalStorage } from '../../../hooks/useLocalStorage';
|
||||
import { FETCH_STATUS } from '../../../hooks/use_fetcher';
|
||||
import { useSearchStrategy } from '../../../hooks/use_search_strategy';
|
||||
import { useTheme } from '../../../hooks/use_theme';
|
||||
|
||||
import { ImpactBar } from '../../shared/ImpactBar';
|
||||
|
@ -53,14 +46,12 @@ import { push } from '../../shared/Links/url_helpers';
|
|||
|
||||
import { CorrelationsTable } from './correlations_table';
|
||||
import { FailedTransactionsCorrelationsHelpPopover } from './failed_transactions_correlations_help_popover';
|
||||
import { isErrorMessage } from './utils/is_error_message';
|
||||
import { getFailedTransactionsCorrelationImpactLabel } from './utils/get_failed_transactions_correlation_impact_label';
|
||||
import { getOverallHistogram } from './utils/get_overall_histogram';
|
||||
import {
|
||||
TransactionDistributionChart,
|
||||
TransactionDistributionChartData,
|
||||
} from '../../shared/charts/transaction_distribution_chart';
|
||||
import { CorrelationsLog } from './correlations_log';
|
||||
import { CorrelationsEmptyStatePrompt } from './empty_state_prompt';
|
||||
import { CrossClusterSearchCompatibilityWarning } from './cross_cluster_search_warning';
|
||||
import { CorrelationsProgressControls } from './progress_controls';
|
||||
|
@ -68,6 +59,8 @@ import { useTransactionColors } from './use_transaction_colors';
|
|||
import { CorrelationsContextPopover } from './context_popover';
|
||||
import { OnAddFilter } from './context_popover/top_values';
|
||||
|
||||
import { useFailedTransactionsCorrelations } from './use_failed_transactions_correlations';
|
||||
|
||||
export function FailedTransactionsCorrelations({
|
||||
onFilter,
|
||||
}: {
|
||||
|
@ -77,18 +70,12 @@ export function FailedTransactionsCorrelations({
|
|||
const transactionColors = useTransactionColors();
|
||||
|
||||
const {
|
||||
core: { notifications, uiSettings },
|
||||
core: { notifications },
|
||||
} = useApmPluginContext();
|
||||
const trackApmEvent = useUiTracker({ app: 'apm' });
|
||||
|
||||
const inspectEnabled = uiSettings.get<boolean>(enableInspectEsQueries);
|
||||
|
||||
const { progress, response, startFetch, cancelFetch } = useSearchStrategy(
|
||||
APM_SEARCH_STRATEGIES.APM_FAILED_TRANSACTIONS_CORRELATIONS,
|
||||
{
|
||||
percentileThreshold: DEFAULT_PERCENTILE_THRESHOLD,
|
||||
}
|
||||
);
|
||||
const { progress, response, startFetch, cancelFetch } =
|
||||
useFailedTransactionsCorrelations();
|
||||
|
||||
const fieldStats: Record<string, FieldStats> | undefined = useMemo(() => {
|
||||
return response.fieldStats?.reduce((obj, field) => {
|
||||
|
@ -97,7 +84,6 @@ export function FailedTransactionsCorrelations({
|
|||
}, {} as Record<string, FieldStats>);
|
||||
}, [response?.fieldStats]);
|
||||
|
||||
const progressNormalized = progress.loaded / progress.total;
|
||||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
response,
|
||||
progress.isRunning
|
||||
|
@ -368,7 +354,7 @@ export function FailedTransactionsCorrelations({
|
|||
}, [fieldStats, onAddFilter, showStats]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isErrorMessage(progress.error)) {
|
||||
if (progress.error) {
|
||||
notifications.toasts.addDanger({
|
||||
title: i18n.translate(
|
||||
'xpack.apm.correlations.failedTransactions.errorTitle',
|
||||
|
@ -377,7 +363,7 @@ export function FailedTransactionsCorrelations({
|
|||
'An error occurred performing correlations on failed transactions',
|
||||
}
|
||||
),
|
||||
text: progress.error.toString(),
|
||||
text: progress.error,
|
||||
});
|
||||
}
|
||||
}, [progress.error, notifications.toasts]);
|
||||
|
@ -439,7 +425,7 @@ export function FailedTransactionsCorrelations({
|
|||
|
||||
const showCorrelationsEmptyStatePrompt =
|
||||
correlationTerms.length < 1 &&
|
||||
(progressNormalized === 1 || !progress.isRunning);
|
||||
(progress.loaded === 1 || !progress.isRunning);
|
||||
|
||||
const transactionDistributionChartData: TransactionDistributionChartData[] =
|
||||
[];
|
||||
|
@ -457,8 +443,8 @@ export function FailedTransactionsCorrelations({
|
|||
if (Array.isArray(response.errorHistogram)) {
|
||||
transactionDistributionChartData.push({
|
||||
id: i18n.translate(
|
||||
'xpack.apm.transactionDistribution.chart.allFailedTransactionsLabel',
|
||||
{ defaultMessage: 'All failed transactions' }
|
||||
'xpack.apm.transactionDistribution.chart.failedTransactionsLabel',
|
||||
{ defaultMessage: 'Failed transactions' }
|
||||
),
|
||||
histogram: response.errorHistogram,
|
||||
});
|
||||
|
@ -525,7 +511,7 @@ export function FailedTransactionsCorrelations({
|
|||
<EuiText color="subdued" size="xs">
|
||||
<FormattedMessage
|
||||
id="xpack.apm.transactionDetails.tabs.failedTransactionsCorrelationsChartDescription"
|
||||
defaultMessage="Log-log plot for latency (x) by transactions (y) with overlapping bands for {br}{allTransactions}, {allFailedTransactions} and {focusTransaction}."
|
||||
defaultMessage="Log-log plot for latency (x) by transactions (y) with overlapping bands for {br}{allTransactions}, {failedTransactions} and {focusTransaction}."
|
||||
values={{
|
||||
br: <br />,
|
||||
allTransactions: (
|
||||
|
@ -536,13 +522,13 @@ export function FailedTransactionsCorrelations({
|
|||
/>
|
||||
</span>
|
||||
),
|
||||
allFailedTransactions: (
|
||||
failedTransactions: (
|
||||
<span
|
||||
style={{ color: transactionColors.ALL_FAILED_TRANSACTIONS }}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.apm.transactionDetails.tabs.failedTransactionsCorrelationsChartAllFailedTransactions"
|
||||
defaultMessage="all failed transactions"
|
||||
id="xpack.apm.transactionDetails.tabs.failedTransactionsCorrelationsChartFailedTransactions"
|
||||
defaultMessage="failed transactions"
|
||||
/>
|
||||
</span>
|
||||
),
|
||||
|
@ -621,7 +607,7 @@ export function FailedTransactionsCorrelations({
|
|||
<EuiSpacer size="s" />
|
||||
|
||||
<CorrelationsProgressControls
|
||||
progress={progressNormalized}
|
||||
progress={progress.loaded}
|
||||
isRunning={progress.isRunning}
|
||||
onRefresh={startFetch}
|
||||
onCancel={cancelFetch}
|
||||
|
@ -654,7 +640,6 @@ export function FailedTransactionsCorrelations({
|
|||
)}
|
||||
{showCorrelationsEmptyStatePrompt && <CorrelationsEmptyStatePrompt />}
|
||||
</div>
|
||||
{inspectEnabled && <CorrelationsLog logMessages={response.log ?? []} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -18,8 +18,7 @@ import { dataPluginMock } from 'src/plugins/data/public/mocks';
|
|||
import type { IKibanaSearchResponse } from 'src/plugins/data/public';
|
||||
import { EuiThemeProvider } from 'src/plugins/kibana_react/common';
|
||||
import { createKibanaReactContext } from 'src/plugins/kibana_react/public';
|
||||
import type { LatencyCorrelationsRawResponse } from '../../../../common/search_strategies/latency_correlations/types';
|
||||
import type { RawResponseBase } from '../../../../common/search_strategies/types';
|
||||
import type { LatencyCorrelationsResponse } from '../../../../common/correlations/latency_correlations/types';
|
||||
import { MockUrlParamsContextProvider } from '../../../context/url_params_context/mock_url_params_context_provider';
|
||||
import { ApmPluginContextValue } from '../../../context/apm_plugin/apm_plugin_context';
|
||||
import {
|
||||
|
@ -35,9 +34,7 @@ function Wrapper({
|
|||
dataSearchResponse,
|
||||
}: {
|
||||
children?: ReactNode;
|
||||
dataSearchResponse: IKibanaSearchResponse<
|
||||
LatencyCorrelationsRawResponse & RawResponseBase
|
||||
>;
|
||||
dataSearchResponse: IKibanaSearchResponse<LatencyCorrelationsResponse>;
|
||||
}) {
|
||||
const mockDataSearch = jest.fn(() => of(dataSearchResponse));
|
||||
|
||||
|
@ -99,9 +96,7 @@ describe('correlations', () => {
|
|||
isRunning: true,
|
||||
rawResponse: {
|
||||
ccsWarning: false,
|
||||
took: 1234,
|
||||
latencyCorrelations: [],
|
||||
log: [],
|
||||
},
|
||||
}}
|
||||
>
|
||||
|
@ -122,9 +117,7 @@ describe('correlations', () => {
|
|||
isRunning: false,
|
||||
rawResponse: {
|
||||
ccsWarning: false,
|
||||
took: 1234,
|
||||
latencyCorrelations: [],
|
||||
log: [],
|
||||
},
|
||||
}}
|
||||
>
|
||||
|
|
|
@ -25,22 +25,15 @@ import { EuiTableSortingType } from '@elastic/eui/src/components/basic_table/tab
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { FormattedMessage } from '@kbn/i18n/react';
|
||||
|
||||
import {
|
||||
enableInspectEsQueries,
|
||||
useUiTracker,
|
||||
} from '../../../../../observability/public';
|
||||
import { useUiTracker } from '../../../../../observability/public';
|
||||
|
||||
import { asPreciseDecimal } from '../../../../common/utils/formatters';
|
||||
import {
|
||||
APM_SEARCH_STRATEGIES,
|
||||
DEFAULT_PERCENTILE_THRESHOLD,
|
||||
} from '../../../../common/search_strategies/constants';
|
||||
import { LatencyCorrelation } from '../../../../common/search_strategies/latency_correlations/types';
|
||||
import { FieldStats } from '../../../../common/search_strategies/field_stats_types';
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../common/correlations/constants';
|
||||
import { LatencyCorrelation } from '../../../../common/correlations/latency_correlations/types';
|
||||
import { FieldStats } from '../../../../common/correlations/field_stats_types';
|
||||
|
||||
import { useApmPluginContext } from '../../../context/apm_plugin/use_apm_plugin_context';
|
||||
import { FETCH_STATUS } from '../../../hooks/use_fetcher';
|
||||
import { useSearchStrategy } from '../../../hooks/use_search_strategy';
|
||||
|
||||
import {
|
||||
TransactionDistributionChart,
|
||||
|
@ -50,33 +43,24 @@ import { push } from '../../shared/Links/url_helpers';
|
|||
|
||||
import { CorrelationsTable } from './correlations_table';
|
||||
import { LatencyCorrelationsHelpPopover } from './latency_correlations_help_popover';
|
||||
import { isErrorMessage } from './utils/is_error_message';
|
||||
import { getOverallHistogram } from './utils/get_overall_histogram';
|
||||
import { CorrelationsLog } from './correlations_log';
|
||||
import { CorrelationsEmptyStatePrompt } from './empty_state_prompt';
|
||||
import { CrossClusterSearchCompatibilityWarning } from './cross_cluster_search_warning';
|
||||
import { CorrelationsProgressControls } from './progress_controls';
|
||||
import { useTransactionColors } from './use_transaction_colors';
|
||||
import { CorrelationsContextPopover } from './context_popover';
|
||||
import { OnAddFilter } from './context_popover/top_values';
|
||||
import { useLatencyCorrelations } from './use_latency_correlations';
|
||||
|
||||
export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
||||
const transactionColors = useTransactionColors();
|
||||
|
||||
const {
|
||||
core: { notifications, uiSettings },
|
||||
core: { notifications },
|
||||
} = useApmPluginContext();
|
||||
|
||||
const displayLog = uiSettings.get<boolean>(enableInspectEsQueries);
|
||||
|
||||
const { progress, response, startFetch, cancelFetch } = useSearchStrategy(
|
||||
APM_SEARCH_STRATEGIES.APM_LATENCY_CORRELATIONS,
|
||||
{
|
||||
percentileThreshold: DEFAULT_PERCENTILE_THRESHOLD,
|
||||
analyzeCorrelations: true,
|
||||
}
|
||||
);
|
||||
const progressNormalized = progress.loaded / progress.total;
|
||||
const { progress, response, startFetch, cancelFetch } =
|
||||
useLatencyCorrelations();
|
||||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
response,
|
||||
progress.isRunning
|
||||
|
@ -90,7 +74,7 @@ export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
|||
}, [response?.fieldStats]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isErrorMessage(progress.error)) {
|
||||
if (progress.error) {
|
||||
notifications.toasts.addDanger({
|
||||
title: i18n.translate(
|
||||
'xpack.apm.correlations.latencyCorrelations.errorTitle',
|
||||
|
@ -98,7 +82,7 @@ export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
|||
defaultMessage: 'An error occurred fetching correlations',
|
||||
}
|
||||
),
|
||||
text: progress.error.toString(),
|
||||
text: progress.error,
|
||||
});
|
||||
}
|
||||
}, [progress.error, notifications.toasts]);
|
||||
|
@ -288,8 +272,7 @@ export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
|||
|
||||
const showCorrelationsTable = progress.isRunning || histogramTerms.length > 0;
|
||||
const showCorrelationsEmptyStatePrompt =
|
||||
histogramTerms.length < 1 &&
|
||||
(progressNormalized === 1 || !progress.isRunning);
|
||||
histogramTerms.length < 1 && (progress.loaded === 1 || !progress.isRunning);
|
||||
|
||||
const transactionDistributionChartData: TransactionDistributionChartData[] =
|
||||
[];
|
||||
|
@ -382,7 +365,7 @@ export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
|||
<EuiSpacer size="s" />
|
||||
|
||||
<CorrelationsProgressControls
|
||||
progress={progressNormalized}
|
||||
progress={progress.loaded}
|
||||
isRunning={progress.isRunning}
|
||||
onRefresh={startFetch}
|
||||
onCancel={cancelFetch}
|
||||
|
@ -415,7 +398,6 @@ export function LatencyCorrelations({ onFilter }: { onFilter: () => void }) {
|
|||
)}
|
||||
{showCorrelationsEmptyStatePrompt && <CorrelationsEmptyStatePrompt />}
|
||||
</div>
|
||||
{displayLog && <CorrelationsLog logMessages={response.log ?? []} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,399 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { ReactNode } from 'react';
|
||||
import { merge } from 'lodash';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { renderHook, act } from '@testing-library/react-hooks';
|
||||
|
||||
import { ApmPluginContextValue } from '../../../context/apm_plugin/apm_plugin_context';
|
||||
import {
|
||||
mockApmPluginContextValue,
|
||||
MockApmPluginContextWrapper,
|
||||
} from '../../../context/apm_plugin/mock_apm_plugin_context';
|
||||
import { delay } from '../../../utils/testHelpers';
|
||||
|
||||
import { fromQuery } from '../../shared/Links/url_helpers';
|
||||
|
||||
import { useFailedTransactionsCorrelations } from './use_failed_transactions_correlations';
|
||||
|
||||
function wrapper({
|
||||
children,
|
||||
error = false,
|
||||
}: {
|
||||
children?: ReactNode;
|
||||
error: boolean;
|
||||
}) {
|
||||
const httpMethodMock = jest.fn().mockImplementation(async (endpoint) => {
|
||||
await delay(100);
|
||||
if (error) {
|
||||
throw new Error('Something went wrong');
|
||||
}
|
||||
switch (endpoint) {
|
||||
case '/internal/apm/latency/overall_distribution':
|
||||
return {
|
||||
overallHistogram: [{ key: 'the-key', doc_count: 1234 }],
|
||||
percentileThresholdValue: 1.234,
|
||||
};
|
||||
case '/internal/apm/correlations/field_candidates':
|
||||
return { fieldCandidates: ['field-1', 'field2'] };
|
||||
case '/internal/apm/correlations/field_value_pairs':
|
||||
return {
|
||||
fieldValuePairs: [
|
||||
{ fieldName: 'field-name-1', fieldValue: 'field-value-1' },
|
||||
],
|
||||
};
|
||||
case '/internal/apm/correlations/p_values':
|
||||
return {
|
||||
failedTransactionsCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
doc_count: 123,
|
||||
bg_count: 1234,
|
||||
score: 0.66,
|
||||
pValue: 0.01,
|
||||
normalizedScore: 0.85,
|
||||
failurePercentage: 30,
|
||||
successPercentage: 70,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
},
|
||||
],
|
||||
};
|
||||
case '/internal/apm/correlations/field_stats':
|
||||
return {
|
||||
stats: [
|
||||
{ fieldName: 'field-name-1', count: 123 },
|
||||
{ fieldName: 'field-name-2', count: 1111 },
|
||||
],
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
const history = createMemoryHistory();
|
||||
jest.spyOn(history, 'push');
|
||||
jest.spyOn(history, 'replace');
|
||||
|
||||
history.replace({
|
||||
pathname: '/services/the-service-name/transactions/view',
|
||||
search: fromQuery({
|
||||
transactionName: 'the-transaction-name',
|
||||
rangeFrom: 'now-15m',
|
||||
rangeTo: 'now',
|
||||
}),
|
||||
});
|
||||
|
||||
const mockPluginContext = merge({}, mockApmPluginContextValue, {
|
||||
core: { http: { get: httpMethodMock, post: httpMethodMock } },
|
||||
}) as unknown as ApmPluginContextValue;
|
||||
|
||||
return (
|
||||
<MockApmPluginContextWrapper history={history} value={mockPluginContext}>
|
||||
{children}
|
||||
</MockApmPluginContextWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
describe('useFailedTransactionsCorrelations', () => {
|
||||
beforeEach(async () => {
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
// Running all pending timers and switching to real timers using Jest
|
||||
afterEach(() => {
|
||||
jest.runOnlyPendingTimers();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
describe('when successfully loading results', () => {
|
||||
it('should automatically start fetching results', async () => {
|
||||
const { result, unmount } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
expect(typeof result.current.startFetch).toEqual('function');
|
||||
expect(typeof result.current.cancelFetch).toEqual('function');
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should not have received any results after 50ms', async () => {
|
||||
const { result, unmount } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should receive partial updates and finish running', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0));
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0));
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.05));
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 0.05,
|
||||
});
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: undefined,
|
||||
errorHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
failedTransactionsCorrelations: undefined,
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.1));
|
||||
|
||||
// field candidates are an implementation detail and
|
||||
// will not be exposed, it will just set loaded to 0.1.
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 0.1,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(1));
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 1,
|
||||
});
|
||||
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: undefined,
|
||||
errorHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
failedTransactionsCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
doc_count: 123,
|
||||
bg_count: 1234,
|
||||
score: 0.66,
|
||||
pValue: 0.01,
|
||||
normalizedScore: 0.85,
|
||||
failurePercentage: 30,
|
||||
successPercentage: 70,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
},
|
||||
],
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() =>
|
||||
expect(result.current.response.fieldStats).toBeDefined()
|
||||
);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: false,
|
||||
loaded: 1,
|
||||
});
|
||||
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: [
|
||||
{ fieldName: 'field-name-1', count: 123 },
|
||||
{ fieldName: 'field-name-2', count: 1111 },
|
||||
],
|
||||
errorHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
failedTransactionsCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
doc_count: 123,
|
||||
bg_count: 1234,
|
||||
score: 0.66,
|
||||
pValue: 0.01,
|
||||
normalizedScore: 0.85,
|
||||
failurePercentage: 30,
|
||||
successPercentage: 70,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
},
|
||||
],
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('when throwing an error', () => {
|
||||
it('should automatically start fetching results', async () => {
|
||||
const { result, unmount } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should still be running after 50ms', async () => {
|
||||
const { result, unmount } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should stop and return an error after more than 100ms', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(150);
|
||||
await waitFor(() =>
|
||||
expect(result.current.progress.error).toBeDefined()
|
||||
);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: 'Something went wrong',
|
||||
isRunning: false,
|
||||
loaded: 0,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('when canceled', () => {
|
||||
it('should stop running', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useFailedTransactionsCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0));
|
||||
|
||||
expect(result.current.progress.isRunning).toBe(true);
|
||||
|
||||
act(() => {
|
||||
result.current.cancelFetch();
|
||||
});
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current.progress.isRunning).toEqual(false)
|
||||
);
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,257 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useCallback, useEffect, useMemo, useReducer, useRef } from 'react';
|
||||
import { chunk, debounce } from 'lodash';
|
||||
|
||||
import { IHttpFetchError } from 'src/core/public';
|
||||
|
||||
import { EVENT_OUTCOME } from '../../../../common/elasticsearch_fieldnames';
|
||||
import { EventOutcome } from '../../../../common/event_outcome';
|
||||
import {
|
||||
DEBOUNCE_INTERVAL,
|
||||
DEFAULT_PERCENTILE_THRESHOLD,
|
||||
} from '../../../../common/correlations/constants';
|
||||
import type {
|
||||
FailedTransactionsCorrelation,
|
||||
FailedTransactionsCorrelationsResponse,
|
||||
} from '../../../../common/correlations/failed_transactions_correlations/types';
|
||||
|
||||
import { callApmApi } from '../../../services/rest/createCallApmApi';
|
||||
|
||||
import {
|
||||
getInitialResponse,
|
||||
getFailedTransactionsCorrelationsSortedByScore,
|
||||
getReducer,
|
||||
CorrelationsProgress,
|
||||
} from './utils/analysis_hook_utils';
|
||||
import { useFetchParams } from './use_fetch_params';
|
||||
|
||||
// Overall progress is a float from 0 to 1.
|
||||
const LOADED_OVERALL_HISTOGRAM = 0.05;
|
||||
const LOADED_FIELD_CANDIDATES = LOADED_OVERALL_HISTOGRAM + 0.05;
|
||||
const LOADED_DONE = 1;
|
||||
const PROGRESS_STEP_P_VALUES = 0.9;
|
||||
|
||||
export function useFailedTransactionsCorrelations() {
|
||||
const fetchParams = useFetchParams();
|
||||
|
||||
// This use of useReducer (the dispatch function won't get reinstantiated
|
||||
// on every update) and debounce avoids flooding consuming components with updates.
|
||||
// `setResponse.flush()` can be used to enforce an update.
|
||||
const [response, setResponseUnDebounced] = useReducer(
|
||||
getReducer<FailedTransactionsCorrelationsResponse & CorrelationsProgress>(),
|
||||
getInitialResponse()
|
||||
);
|
||||
const setResponse = useMemo(
|
||||
() => debounce(setResponseUnDebounced, DEBOUNCE_INTERVAL),
|
||||
[]
|
||||
);
|
||||
|
||||
const abortCtrl = useRef(new AbortController());
|
||||
|
||||
const startFetch = useCallback(async () => {
|
||||
abortCtrl.current.abort();
|
||||
abortCtrl.current = new AbortController();
|
||||
|
||||
setResponse({
|
||||
...getInitialResponse(),
|
||||
isRunning: true,
|
||||
// explicitly set these to undefined to override a possible previous state.
|
||||
error: undefined,
|
||||
failedTransactionsCorrelations: undefined,
|
||||
percentileThresholdValue: undefined,
|
||||
overallHistogram: undefined,
|
||||
errorHistogram: undefined,
|
||||
fieldStats: undefined,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
try {
|
||||
// `responseUpdate` will be enriched with additional data with subsequent
|
||||
// calls to the overall histogram, field candidates, field value pairs, correlation results
|
||||
// and histogram data for statistically significant results.
|
||||
const responseUpdate: FailedTransactionsCorrelationsResponse = {
|
||||
ccsWarning: false,
|
||||
};
|
||||
|
||||
const [overallHistogramResponse, errorHistogramRespone] =
|
||||
await Promise.all([
|
||||
// Initial call to fetch the overall distribution for the log-log plot.
|
||||
callApmApi({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
percentileThreshold: DEFAULT_PERCENTILE_THRESHOLD,
|
||||
},
|
||||
},
|
||||
}),
|
||||
callApmApi({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
percentileThreshold: DEFAULT_PERCENTILE_THRESHOLD,
|
||||
termFilters: [
|
||||
{
|
||||
fieldName: EVENT_OUTCOME,
|
||||
fieldValue: EventOutcome.failure,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
const { overallHistogram, percentileThresholdValue } =
|
||||
overallHistogramResponse;
|
||||
const { overallHistogram: errorHistogram } = errorHistogramRespone;
|
||||
|
||||
responseUpdate.errorHistogram = errorHistogram;
|
||||
responseUpdate.overallHistogram = overallHistogram;
|
||||
responseUpdate.percentileThresholdValue = percentileThresholdValue;
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
setResponse({
|
||||
...responseUpdate,
|
||||
loaded: LOADED_OVERALL_HISTOGRAM,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
const { fieldCandidates: candidates } = await callApmApi({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
query: fetchParams,
|
||||
},
|
||||
});
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fieldCandidates = candidates.filter((t) => !(t === EVENT_OUTCOME));
|
||||
|
||||
setResponse({
|
||||
loaded: LOADED_FIELD_CANDIDATES,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
const failedTransactionsCorrelations: FailedTransactionsCorrelation[] =
|
||||
[];
|
||||
const fieldsToSample = new Set<string>();
|
||||
const chunkSize = 10;
|
||||
let chunkLoadCounter = 0;
|
||||
|
||||
const fieldCandidatesChunks = chunk(fieldCandidates, chunkSize);
|
||||
|
||||
for (const fieldCandidatesChunk of fieldCandidatesChunks) {
|
||||
const pValues = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/correlations/p_values',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: { ...fetchParams, fieldCandidates: fieldCandidatesChunk },
|
||||
},
|
||||
});
|
||||
|
||||
if (pValues.failedTransactionsCorrelations.length > 0) {
|
||||
pValues.failedTransactionsCorrelations.forEach((d) => {
|
||||
fieldsToSample.add(d.fieldName);
|
||||
});
|
||||
failedTransactionsCorrelations.push(
|
||||
...pValues.failedTransactionsCorrelations
|
||||
);
|
||||
responseUpdate.failedTransactionsCorrelations =
|
||||
getFailedTransactionsCorrelationsSortedByScore([
|
||||
...failedTransactionsCorrelations,
|
||||
]);
|
||||
}
|
||||
|
||||
chunkLoadCounter++;
|
||||
setResponse({
|
||||
...responseUpdate,
|
||||
loaded:
|
||||
LOADED_FIELD_CANDIDATES +
|
||||
(chunkLoadCounter / fieldCandidatesChunks.length) *
|
||||
PROGRESS_STEP_P_VALUES,
|
||||
});
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setResponse.flush();
|
||||
|
||||
const { stats } = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/correlations/field_stats',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
fieldsToSample: [...fieldsToSample],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
responseUpdate.fieldStats = stats;
|
||||
setResponse({ ...responseUpdate, loaded: LOADED_DONE, isRunning: false });
|
||||
setResponse.flush();
|
||||
} catch (e) {
|
||||
if (!abortCtrl.current.signal.aborted) {
|
||||
const err = e as Error | IHttpFetchError;
|
||||
setResponse({
|
||||
error:
|
||||
'response' in err
|
||||
? err.body?.message ?? err.response?.statusText
|
||||
: err.message,
|
||||
isRunning: false,
|
||||
});
|
||||
setResponse.flush();
|
||||
}
|
||||
}
|
||||
}, [fetchParams, setResponse]);
|
||||
|
||||
const cancelFetch = useCallback(() => {
|
||||
abortCtrl.current.abort();
|
||||
setResponse({
|
||||
isRunning: false,
|
||||
});
|
||||
setResponse.flush();
|
||||
}, [setResponse]);
|
||||
|
||||
// auto-update
|
||||
useEffect(() => {
|
||||
startFetch();
|
||||
return () => {
|
||||
abortCtrl.current.abort();
|
||||
};
|
||||
}, [startFetch, cancelFetch]);
|
||||
|
||||
const { error, loaded, isRunning, ...returnedResponse } = response;
|
||||
const progress = useMemo(
|
||||
() => ({
|
||||
error,
|
||||
loaded,
|
||||
isRunning,
|
||||
}),
|
||||
[error, loaded, isRunning]
|
||||
);
|
||||
|
||||
return {
|
||||
progress,
|
||||
response: returnedResponse,
|
||||
startFetch,
|
||||
cancelFetch,
|
||||
};
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { useApmServiceContext } from '../../../context/apm_service/use_apm_service_context';
|
||||
|
||||
import { useApmParams } from '../../../hooks/use_apm_params';
|
||||
import { useTimeRange } from '../../../hooks/use_time_range';
|
||||
|
||||
export const useFetchParams = () => {
|
||||
const { serviceName } = useApmServiceContext();
|
||||
|
||||
const {
|
||||
query: {
|
||||
kuery,
|
||||
environment,
|
||||
rangeFrom,
|
||||
rangeTo,
|
||||
transactionName,
|
||||
transactionType,
|
||||
},
|
||||
} = useApmParams('/services/{serviceName}/transactions/view');
|
||||
|
||||
const { start, end } = useTimeRange({ rangeFrom, rangeTo });
|
||||
|
||||
return useMemo(
|
||||
() => ({
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
environment,
|
||||
start,
|
||||
end,
|
||||
}),
|
||||
[
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
environment,
|
||||
start,
|
||||
end,
|
||||
]
|
||||
);
|
||||
};
|
|
@ -0,0 +1,360 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { ReactNode } from 'react';
|
||||
import { merge } from 'lodash';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { renderHook, act } from '@testing-library/react-hooks';
|
||||
|
||||
import { ApmPluginContextValue } from '../../../context/apm_plugin/apm_plugin_context';
|
||||
import {
|
||||
mockApmPluginContextValue,
|
||||
MockApmPluginContextWrapper,
|
||||
} from '../../../context/apm_plugin/mock_apm_plugin_context';
|
||||
import { delay } from '../../../utils/testHelpers';
|
||||
|
||||
import { fromQuery } from '../../shared/Links/url_helpers';
|
||||
|
||||
import { useLatencyCorrelations } from './use_latency_correlations';
|
||||
|
||||
function wrapper({
|
||||
children,
|
||||
error = false,
|
||||
}: {
|
||||
children?: ReactNode;
|
||||
error: boolean;
|
||||
}) {
|
||||
const httpMethodMock = jest.fn().mockImplementation(async (endpoint) => {
|
||||
await delay(100);
|
||||
if (error) {
|
||||
throw new Error('Something went wrong');
|
||||
}
|
||||
switch (endpoint) {
|
||||
case '/internal/apm/latency/overall_distribution':
|
||||
return {
|
||||
overallHistogram: [{ key: 'the-key', doc_count: 1234 }],
|
||||
percentileThresholdValue: 1.234,
|
||||
};
|
||||
case '/internal/apm/correlations/field_candidates':
|
||||
return { fieldCandidates: ['field-1', 'field2'] };
|
||||
case '/internal/apm/correlations/field_value_pairs':
|
||||
return {
|
||||
fieldValuePairs: [
|
||||
{ fieldName: 'field-name-1', fieldValue: 'field-value-1' },
|
||||
],
|
||||
};
|
||||
case '/internal/apm/correlations/significant_correlations':
|
||||
return {
|
||||
latencyCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
correlation: 0.5,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
ksTest: 0.001,
|
||||
},
|
||||
],
|
||||
};
|
||||
case '/internal/apm/correlations/field_stats':
|
||||
return {
|
||||
stats: [
|
||||
{ fieldName: 'field-name-1', count: 123 },
|
||||
{ fieldName: 'field-name-2', count: 1111 },
|
||||
],
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
});
|
||||
|
||||
const history = createMemoryHistory();
|
||||
jest.spyOn(history, 'push');
|
||||
jest.spyOn(history, 'replace');
|
||||
|
||||
history.replace({
|
||||
pathname: '/services/the-service-name/transactions/view',
|
||||
search: fromQuery({
|
||||
transactionName: 'the-transaction-name',
|
||||
rangeFrom: 'now-15m',
|
||||
rangeTo: 'now',
|
||||
}),
|
||||
});
|
||||
|
||||
const mockPluginContext = merge({}, mockApmPluginContextValue, {
|
||||
core: { http: { get: httpMethodMock, post: httpMethodMock } },
|
||||
}) as unknown as ApmPluginContextValue;
|
||||
|
||||
return (
|
||||
<MockApmPluginContextWrapper history={history} value={mockPluginContext}>
|
||||
{children}
|
||||
</MockApmPluginContextWrapper>
|
||||
);
|
||||
}
|
||||
|
||||
describe('useLatencyCorrelations', () => {
|
||||
beforeEach(async () => {
|
||||
jest.useFakeTimers();
|
||||
});
|
||||
afterEach(() => {
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
describe('when successfully loading results', () => {
|
||||
it('should automatically start fetching results', async () => {
|
||||
const { result, unmount } = renderHook(() => useLatencyCorrelations(), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
try {
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
expect(typeof result.current.startFetch).toEqual('function');
|
||||
expect(typeof result.current.cancelFetch).toEqual('function');
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should not have received any results after 50ms', async () => {
|
||||
const { result, unmount } = renderHook(() => useLatencyCorrelations(), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should receive partial updates and finish running', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useLatencyCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(150);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.05));
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 0.05,
|
||||
});
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: undefined,
|
||||
latencyCorrelations: undefined,
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.1));
|
||||
|
||||
// field candidates are an implementation detail and
|
||||
// will not be exposed, it will just set loaded to 0.1.
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 0.1,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.4));
|
||||
|
||||
// field value pairs are an implementation detail and
|
||||
// will not be exposed, it will just set loaded to 0.4.
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 0.4,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(1));
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: true,
|
||||
loaded: 1,
|
||||
});
|
||||
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: undefined,
|
||||
latencyCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
correlation: 0.5,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
ksTest: 0.001,
|
||||
},
|
||||
],
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
|
||||
jest.advanceTimersByTime(100);
|
||||
await waitFor(() =>
|
||||
expect(result.current.response.fieldStats).toBeDefined()
|
||||
);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: undefined,
|
||||
isRunning: false,
|
||||
loaded: 1,
|
||||
});
|
||||
|
||||
expect(result.current.response).toEqual({
|
||||
ccsWarning: false,
|
||||
fieldStats: [
|
||||
{ fieldName: 'field-name-1', count: 123 },
|
||||
{ fieldName: 'field-name-2', count: 1111 },
|
||||
],
|
||||
latencyCorrelations: [
|
||||
{
|
||||
fieldName: 'field-name-1',
|
||||
fieldValue: 'field-value-1',
|
||||
correlation: 0.5,
|
||||
histogram: [{ key: 'the-key', doc_count: 123 }],
|
||||
ksTest: 0.001,
|
||||
},
|
||||
],
|
||||
overallHistogram: [
|
||||
{
|
||||
doc_count: 1234,
|
||||
key: 'the-key',
|
||||
},
|
||||
],
|
||||
percentileThresholdValue: 1.234,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('when throwing an error', () => {
|
||||
it('should automatically start fetching results', async () => {
|
||||
const { result, unmount } = renderHook(() => useLatencyCorrelations(), {
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should still be running after 50ms', async () => {
|
||||
const { result, unmount } = renderHook(() => useLatencyCorrelations(), {
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(50);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
isRunning: true,
|
||||
loaded: 0,
|
||||
});
|
||||
expect(result.current.response).toEqual({ ccsWarning: false });
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
|
||||
it('should stop and return an error after more than 100ms', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useLatencyCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
initialProps: {
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(150);
|
||||
await waitFor(() =>
|
||||
expect(result.current.progress.error).toBeDefined()
|
||||
);
|
||||
|
||||
expect(result.current.progress).toEqual({
|
||||
error: 'Something went wrong',
|
||||
isRunning: false,
|
||||
loaded: 0,
|
||||
});
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('when canceled', () => {
|
||||
it('should stop running', async () => {
|
||||
const { result, unmount, waitFor } = renderHook(
|
||||
() => useLatencyCorrelations(),
|
||||
{
|
||||
wrapper,
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
jest.advanceTimersByTime(150);
|
||||
await waitFor(() => expect(result.current.progress.loaded).toBe(0.05));
|
||||
|
||||
expect(result.current.progress.isRunning).toBe(true);
|
||||
|
||||
act(() => {
|
||||
result.current.cancelFetch();
|
||||
});
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current.progress.isRunning).toEqual(false)
|
||||
);
|
||||
} finally {
|
||||
unmount();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,275 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useCallback, useEffect, useMemo, useReducer, useRef } from 'react';
|
||||
import { chunk, debounce } from 'lodash';
|
||||
|
||||
import { IHttpFetchError } from 'src/core/public';
|
||||
|
||||
import {
|
||||
DEBOUNCE_INTERVAL,
|
||||
DEFAULT_PERCENTILE_THRESHOLD,
|
||||
} from '../../../../common/correlations/constants';
|
||||
import type { FieldValuePair } from '../../../../common/correlations/types';
|
||||
import { getPrioritizedFieldValuePairs } from '../../../../common/correlations/utils';
|
||||
import type {
|
||||
LatencyCorrelation,
|
||||
LatencyCorrelationsResponse,
|
||||
} from '../../../../common/correlations/latency_correlations/types';
|
||||
|
||||
import { callApmApi } from '../../../services/rest/createCallApmApi';
|
||||
|
||||
import {
|
||||
getInitialResponse,
|
||||
getLatencyCorrelationsSortedByCorrelation,
|
||||
getReducer,
|
||||
CorrelationsProgress,
|
||||
} from './utils/analysis_hook_utils';
|
||||
import { useFetchParams } from './use_fetch_params';
|
||||
|
||||
// Overall progress is a float from 0 to 1.
|
||||
const LOADED_OVERALL_HISTOGRAM = 0.05;
|
||||
const LOADED_FIELD_CANDIDATES = LOADED_OVERALL_HISTOGRAM + 0.05;
|
||||
const LOADED_FIELD_VALUE_PAIRS = LOADED_FIELD_CANDIDATES + 0.3;
|
||||
const LOADED_DONE = 1;
|
||||
const PROGRESS_STEP_FIELD_VALUE_PAIRS = 0.3;
|
||||
const PROGRESS_STEP_CORRELATIONS = 0.6;
|
||||
|
||||
export function useLatencyCorrelations() {
|
||||
const fetchParams = useFetchParams();
|
||||
|
||||
// This use of useReducer (the dispatch function won't get reinstantiated
|
||||
// on every update) and debounce avoids flooding consuming components with updates.
|
||||
// `setResponse.flush()` can be used to enforce an update.
|
||||
const [response, setResponseUnDebounced] = useReducer(
|
||||
getReducer<LatencyCorrelationsResponse & CorrelationsProgress>(),
|
||||
getInitialResponse()
|
||||
);
|
||||
const setResponse = useMemo(
|
||||
() => debounce(setResponseUnDebounced, DEBOUNCE_INTERVAL),
|
||||
[]
|
||||
);
|
||||
|
||||
const abortCtrl = useRef(new AbortController());
|
||||
|
||||
const startFetch = useCallback(async () => {
|
||||
abortCtrl.current.abort();
|
||||
abortCtrl.current = new AbortController();
|
||||
|
||||
setResponse({
|
||||
...getInitialResponse(),
|
||||
isRunning: true,
|
||||
// explicitly set these to undefined to override a possible previous state.
|
||||
error: undefined,
|
||||
latencyCorrelations: undefined,
|
||||
percentileThresholdValue: undefined,
|
||||
overallHistogram: undefined,
|
||||
fieldStats: undefined,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
try {
|
||||
// `responseUpdate` will be enriched with additional data with subsequent
|
||||
// calls to the overall histogram, field candidates, field value pairs, correlation results
|
||||
// and histogram data for statistically significant results.
|
||||
const responseUpdate: LatencyCorrelationsResponse = {
|
||||
ccsWarning: false,
|
||||
};
|
||||
|
||||
// Initial call to fetch the overall distribution for the log-log plot.
|
||||
const { overallHistogram, percentileThresholdValue } = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
percentileThreshold: DEFAULT_PERCENTILE_THRESHOLD,
|
||||
},
|
||||
},
|
||||
});
|
||||
responseUpdate.overallHistogram = overallHistogram;
|
||||
responseUpdate.percentileThresholdValue = percentileThresholdValue;
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
setResponse({
|
||||
...responseUpdate,
|
||||
loaded: LOADED_OVERALL_HISTOGRAM,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
const { fieldCandidates } = await callApmApi({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
query: fetchParams,
|
||||
},
|
||||
});
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
setResponse({
|
||||
loaded: LOADED_FIELD_CANDIDATES,
|
||||
});
|
||||
setResponse.flush();
|
||||
|
||||
const chunkSize = 10;
|
||||
let chunkLoadCounter = 0;
|
||||
|
||||
const fieldValuePairs: FieldValuePair[] = [];
|
||||
const fieldCandidateChunks = chunk(fieldCandidates, chunkSize);
|
||||
|
||||
for (const fieldCandidateChunk of fieldCandidateChunks) {
|
||||
const fieldValuePairChunkResponse = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/correlations/field_value_pairs',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
fieldCandidates: fieldCandidateChunk,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (fieldValuePairChunkResponse.fieldValuePairs.length > 0) {
|
||||
fieldValuePairs.push(...fieldValuePairChunkResponse.fieldValuePairs);
|
||||
}
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
chunkLoadCounter++;
|
||||
setResponse({
|
||||
loaded:
|
||||
LOADED_FIELD_CANDIDATES +
|
||||
(chunkLoadCounter / fieldCandidateChunks.length) *
|
||||
PROGRESS_STEP_FIELD_VALUE_PAIRS,
|
||||
});
|
||||
}
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
setResponse.flush();
|
||||
|
||||
chunkLoadCounter = 0;
|
||||
|
||||
const fieldsToSample = new Set<string>();
|
||||
const latencyCorrelations: LatencyCorrelation[] = [];
|
||||
const fieldValuePairChunks = chunk(
|
||||
getPrioritizedFieldValuePairs(fieldValuePairs),
|
||||
chunkSize
|
||||
);
|
||||
|
||||
for (const fieldValuePairChunk of fieldValuePairChunks) {
|
||||
const significantCorrelations = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/correlations/significant_correlations',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: { ...fetchParams, fieldValuePairs: fieldValuePairChunk },
|
||||
},
|
||||
});
|
||||
|
||||
if (significantCorrelations.latencyCorrelations.length > 0) {
|
||||
significantCorrelations.latencyCorrelations.forEach((d) => {
|
||||
fieldsToSample.add(d.fieldName);
|
||||
});
|
||||
latencyCorrelations.push(
|
||||
...significantCorrelations.latencyCorrelations
|
||||
);
|
||||
responseUpdate.latencyCorrelations =
|
||||
getLatencyCorrelationsSortedByCorrelation([...latencyCorrelations]);
|
||||
}
|
||||
|
||||
chunkLoadCounter++;
|
||||
setResponse({
|
||||
...responseUpdate,
|
||||
loaded:
|
||||
LOADED_FIELD_VALUE_PAIRS +
|
||||
(chunkLoadCounter / fieldValuePairChunks.length) *
|
||||
PROGRESS_STEP_CORRELATIONS,
|
||||
});
|
||||
|
||||
if (abortCtrl.current.signal.aborted) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setResponse.flush();
|
||||
|
||||
const { stats } = await callApmApi({
|
||||
endpoint: 'POST /internal/apm/correlations/field_stats',
|
||||
signal: abortCtrl.current.signal,
|
||||
params: {
|
||||
body: {
|
||||
...fetchParams,
|
||||
fieldsToSample: [...fieldsToSample],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
responseUpdate.fieldStats = stats;
|
||||
setResponse({
|
||||
...responseUpdate,
|
||||
loaded: LOADED_DONE,
|
||||
isRunning: false,
|
||||
});
|
||||
setResponse.flush();
|
||||
} catch (e) {
|
||||
if (!abortCtrl.current.signal.aborted) {
|
||||
const err = e as Error | IHttpFetchError;
|
||||
setResponse({
|
||||
error:
|
||||
'response' in err
|
||||
? err.body?.message ?? err.response?.statusText
|
||||
: err.message,
|
||||
isRunning: false,
|
||||
});
|
||||
setResponse.flush();
|
||||
}
|
||||
}
|
||||
}, [fetchParams, setResponse]);
|
||||
|
||||
const cancelFetch = useCallback(() => {
|
||||
abortCtrl.current.abort();
|
||||
setResponse({
|
||||
isRunning: false,
|
||||
});
|
||||
setResponse.flush();
|
||||
}, [setResponse]);
|
||||
|
||||
// auto-update
|
||||
useEffect(() => {
|
||||
startFetch();
|
||||
return () => {
|
||||
abortCtrl.current.abort();
|
||||
};
|
||||
}, [startFetch, cancelFetch]);
|
||||
|
||||
const { error, loaded, isRunning, ...returnedResponse } = response;
|
||||
const progress = useMemo(
|
||||
() => ({
|
||||
error,
|
||||
loaded: Math.round(loaded * 100) / 100,
|
||||
isRunning,
|
||||
}),
|
||||
[error, loaded, isRunning]
|
||||
);
|
||||
|
||||
return {
|
||||
progress,
|
||||
response: returnedResponse,
|
||||
startFetch,
|
||||
cancelFetch,
|
||||
};
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { FailedTransactionsCorrelation } from '../../../../../common/correlations/failed_transactions_correlations/types';
|
||||
import type { LatencyCorrelation } from '../../../../../common/correlations/latency_correlations/types';
|
||||
|
||||
export interface CorrelationsProgress {
|
||||
error?: string;
|
||||
isRunning: boolean;
|
||||
loaded: number;
|
||||
}
|
||||
|
||||
export function getLatencyCorrelationsSortedByCorrelation(
|
||||
latencyCorrelations: LatencyCorrelation[]
|
||||
) {
|
||||
return latencyCorrelations.sort((a, b) => b.correlation - a.correlation);
|
||||
}
|
||||
|
||||
export function getFailedTransactionsCorrelationsSortedByScore(
|
||||
failedTransactionsCorrelations: FailedTransactionsCorrelation[]
|
||||
) {
|
||||
return failedTransactionsCorrelations.sort((a, b) => b.score - a.score);
|
||||
}
|
||||
|
||||
export const getInitialResponse = () => ({
|
||||
ccsWarning: false,
|
||||
isRunning: false,
|
||||
loaded: 0,
|
||||
});
|
||||
|
||||
export const getReducer =
|
||||
<T>() =>
|
||||
(prev: T, update: Partial<T>): T => ({
|
||||
...prev,
|
||||
...update,
|
||||
});
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import { getFailedTransactionsCorrelationImpactLabel } from './get_failed_transactions_correlation_impact_label';
|
||||
import { FAILED_TRANSACTIONS_IMPACT_THRESHOLD } from '../../../../../common/search_strategies/failed_transactions_correlations/constants';
|
||||
import { FAILED_TRANSACTIONS_IMPACT_THRESHOLD } from '../../../../../common/correlations/failed_transactions_correlations/constants';
|
||||
|
||||
const EXPECTED_RESULT = {
|
||||
HIGH: {
|
||||
|
|
|
@ -8,8 +8,8 @@
|
|||
import {
|
||||
FailedTransactionsCorrelation,
|
||||
FailedTransactionsCorrelationsImpactThreshold,
|
||||
} from '../../../../../common/search_strategies/failed_transactions_correlations/types';
|
||||
import { FAILED_TRANSACTIONS_IMPACT_THRESHOLD } from '../../../../../common/search_strategies/failed_transactions_correlations/constants';
|
||||
} from '../../../../../common/correlations/failed_transactions_correlations/types';
|
||||
import { FAILED_TRANSACTIONS_IMPACT_THRESHOLD } from '../../../../../common/correlations/failed_transactions_correlations/constants';
|
||||
|
||||
export function getFailedTransactionsCorrelationImpactLabel(
|
||||
pValue: FailedTransactionsCorrelation['pValue']
|
||||
|
|
|
@ -5,14 +5,14 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { LatencyCorrelationsRawResponse } from '../../../../../common/search_strategies/latency_correlations/types';
|
||||
import type { LatencyCorrelationsResponse } from '../../../../../common/correlations/latency_correlations/types';
|
||||
|
||||
import { getOverallHistogram } from './get_overall_histogram';
|
||||
|
||||
describe('getOverallHistogram', () => {
|
||||
it('returns "loading" when undefined and running', () => {
|
||||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
{} as LatencyCorrelationsRawResponse,
|
||||
{} as LatencyCorrelationsResponse,
|
||||
true
|
||||
);
|
||||
expect(overallHistogram).toStrictEqual(undefined);
|
||||
|
@ -22,7 +22,7 @@ describe('getOverallHistogram', () => {
|
|||
|
||||
it('returns "success" when undefined and not running', () => {
|
||||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
{} as LatencyCorrelationsRawResponse,
|
||||
{} as LatencyCorrelationsResponse,
|
||||
false
|
||||
);
|
||||
expect(overallHistogram).toStrictEqual([]);
|
||||
|
@ -34,7 +34,7 @@ describe('getOverallHistogram', () => {
|
|||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
{
|
||||
overallHistogram: [{ key: 1, doc_count: 1234 }],
|
||||
} as LatencyCorrelationsRawResponse,
|
||||
} as LatencyCorrelationsResponse,
|
||||
true
|
||||
);
|
||||
expect(overallHistogram).toStrictEqual([{ key: 1, doc_count: 1234 }]);
|
||||
|
@ -46,7 +46,7 @@ describe('getOverallHistogram', () => {
|
|||
const { overallHistogram, hasData, status } = getOverallHistogram(
|
||||
{
|
||||
overallHistogram: [{ key: 1, doc_count: 1234 }],
|
||||
} as LatencyCorrelationsRawResponse,
|
||||
} as LatencyCorrelationsResponse,
|
||||
false
|
||||
);
|
||||
expect(overallHistogram).toStrictEqual([{ key: 1, doc_count: 1234 }]);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { LatencyCorrelationsRawResponse } from '../../../../../common/search_strategies/latency_correlations/types';
|
||||
import type { LatencyCorrelationsResponse } from '../../../../../common/correlations/latency_correlations/types';
|
||||
|
||||
import { FETCH_STATUS } from '../../../../hooks/use_fetcher';
|
||||
|
||||
|
@ -13,7 +13,7 @@ import { FETCH_STATUS } from '../../../../hooks/use_fetcher';
|
|||
// of fetching more data such as correlation results. That's why we have to determine
|
||||
// the `status` of the data for the latency chart separately.
|
||||
export function getOverallHistogram(
|
||||
data: LatencyCorrelationsRawResponse,
|
||||
data: LatencyCorrelationsResponse,
|
||||
isRunning: boolean
|
||||
) {
|
||||
const overallHistogram =
|
||||
|
|
|
@ -23,7 +23,7 @@ import { FormattedMessage } from '@kbn/i18n/react';
|
|||
import { useUiTracker } from '../../../../../../observability/public';
|
||||
|
||||
import { getDurationFormatter } from '../../../../../common/utils/formatters';
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../../common/search_strategies/constants';
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../../common/correlations/constants';
|
||||
|
||||
import { useLegacyUrlParams } from '../../../../context/url_params_context/use_url_params';
|
||||
import { FETCH_STATUS } from '../../../../hooks/use_fetcher';
|
||||
|
@ -165,7 +165,7 @@ export function TransactionDistribution({
|
|||
<EuiText color="subdued" size="xs">
|
||||
<FormattedMessage
|
||||
id="xpack.apm.transactionDetails.tabs.transactionDistributionChartDescription"
|
||||
defaultMessage="Log-log plot for latency (x) by transactions (y) with overlapping bands for {allTransactions} and {allFailedTransactions}."
|
||||
defaultMessage="Log-log plot for latency (x) by transactions (y) with overlapping bands for {allTransactions} and {failedTransactions}."
|
||||
values={{
|
||||
allTransactions: (
|
||||
<span style={{ color: transactionColors.ALL_TRANSACTIONS }}>
|
||||
|
@ -175,13 +175,13 @@ export function TransactionDistribution({
|
|||
/>
|
||||
</span>
|
||||
),
|
||||
allFailedTransactions: (
|
||||
failedTransactions: (
|
||||
<span
|
||||
style={{ color: transactionColors.ALL_FAILED_TRANSACTIONS }}
|
||||
>
|
||||
<FormattedMessage
|
||||
id="xpack.apm.transactionDetails.tabs.transactionDistributionChartAllFailedTransactions"
|
||||
defaultMessage="all failed transactions"
|
||||
id="xpack.apm.transactionDetails.tabs.transactionDistributionChartFailedTransactions"
|
||||
defaultMessage="failed transactions"
|
||||
/>
|
||||
</span>
|
||||
),
|
||||
|
|
|
@ -5,77 +5,41 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useEffect, useMemo } from 'react';
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../../common/search_strategies/constants';
|
||||
import { RawSearchStrategyClientParams } from '../../../../../common/search_strategies/types';
|
||||
import { DEFAULT_PERCENTILE_THRESHOLD } from '../../../../../common/correlations/constants';
|
||||
import { EVENT_OUTCOME } from '../../../../../common/elasticsearch_fieldnames';
|
||||
import { EventOutcome } from '../../../../../common/event_outcome';
|
||||
|
||||
import { useApmPluginContext } from '../../../../context/apm_plugin/use_apm_plugin_context';
|
||||
import { useApmServiceContext } from '../../../../context/apm_service/use_apm_service_context';
|
||||
import { useLegacyUrlParams } from '../../../../context/url_params_context/use_url_params';
|
||||
import { useApmParams } from '../../../../hooks/use_apm_params';
|
||||
import { useFetcher, FETCH_STATUS } from '../../../../hooks/use_fetcher';
|
||||
import { useTimeRange } from '../../../../hooks/use_time_range';
|
||||
|
||||
import type { TransactionDistributionChartData } from '../../../shared/charts/transaction_distribution_chart';
|
||||
|
||||
import { isErrorMessage } from '../../correlations/utils/is_error_message';
|
||||
|
||||
function hasRequiredParams(params: RawSearchStrategyClientParams) {
|
||||
const { serviceName, environment, start, end } = params;
|
||||
return serviceName && environment && start && end;
|
||||
}
|
||||
import { useFetchParams } from '../../correlations/use_fetch_params';
|
||||
|
||||
export const useTransactionDistributionChartData = () => {
|
||||
const { serviceName, transactionType } = useApmServiceContext();
|
||||
const params = useFetchParams();
|
||||
|
||||
const {
|
||||
core: { notifications },
|
||||
} = useApmPluginContext();
|
||||
|
||||
const { urlParams } = useLegacyUrlParams();
|
||||
const { transactionName } = urlParams;
|
||||
|
||||
const {
|
||||
query: { kuery, environment, rangeFrom, rangeTo },
|
||||
} = useApmParams('/services/{serviceName}/transactions/view');
|
||||
|
||||
const { start, end } = useTimeRange({ rangeFrom, rangeTo });
|
||||
|
||||
const params = useMemo(
|
||||
() => ({
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
environment,
|
||||
start,
|
||||
end,
|
||||
}),
|
||||
[
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
environment,
|
||||
start,
|
||||
end,
|
||||
]
|
||||
);
|
||||
|
||||
const {
|
||||
// TODO The default object has `log: []` to retain compatibility with the shared search strategies code.
|
||||
// Remove once the other tabs are migrated away from search strategies.
|
||||
data: overallLatencyData = { log: [] },
|
||||
data: overallLatencyData = {},
|
||||
status: overallLatencyStatus,
|
||||
error: overallLatencyError,
|
||||
} = useFetcher(
|
||||
(callApmApi) => {
|
||||
if (hasRequiredParams(params)) {
|
||||
if (
|
||||
params.serviceName &&
|
||||
params.environment &&
|
||||
params.start &&
|
||||
params.end
|
||||
) {
|
||||
return callApmApi({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
|
@ -114,12 +78,15 @@ export const useTransactionDistributionChartData = () => {
|
|||
Array.isArray(overallLatencyHistogram) &&
|
||||
overallLatencyHistogram.length > 0;
|
||||
|
||||
// TODO The default object has `log: []` to retain compatibility with the shared search strategies code.
|
||||
// Remove once the other tabs are migrated away from search strategies.
|
||||
const { data: errorHistogramData = { log: [] }, error: errorHistogramError } =
|
||||
const { data: errorHistogramData = {}, error: errorHistogramError } =
|
||||
useFetcher(
|
||||
(callApmApi) => {
|
||||
if (hasRequiredParams(params)) {
|
||||
if (
|
||||
params.serviceName &&
|
||||
params.environment &&
|
||||
params.start &&
|
||||
params.end
|
||||
) {
|
||||
return callApmApi({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
|
@ -171,8 +138,8 @@ export const useTransactionDistributionChartData = () => {
|
|||
if (Array.isArray(errorHistogramData.overallHistogram)) {
|
||||
transactionDistributionChartData.push({
|
||||
id: i18n.translate(
|
||||
'xpack.apm.transactionDistribution.chart.allFailedTransactionsLabel',
|
||||
{ defaultMessage: 'All failed transactions' }
|
||||
'xpack.apm.transactionDistribution.chart.failedTransactionsLabel',
|
||||
{ defaultMessage: 'Failed transactions' }
|
||||
),
|
||||
histogram: errorHistogramData.overallHistogram,
|
||||
});
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { HistogramItem } from '../../../../../common/search_strategies/types';
|
||||
import type { HistogramItem } from '../../../../../common/correlations/types';
|
||||
|
||||
import { replaceHistogramDotsWithBars } from './index';
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import { i18n } from '@kbn/i18n';
|
|||
import { useChartTheme } from '../../../../../../observability/public';
|
||||
|
||||
import { getDurationFormatter } from '../../../../../common/utils/formatters';
|
||||
import type { HistogramItem } from '../../../../../common/search_strategies/types';
|
||||
import type { HistogramItem } from '../../../../../common/correlations/types';
|
||||
|
||||
import { FETCH_STATUS } from '../../../../hooks/use_fetcher';
|
||||
import { useTheme } from '../../../../hooks/use_theme';
|
||||
|
|
|
@ -1,218 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { useCallback, useEffect, useReducer, useRef } from 'react';
|
||||
import type { Subscription } from 'rxjs';
|
||||
|
||||
import {
|
||||
IKibanaSearchRequest,
|
||||
IKibanaSearchResponse,
|
||||
isCompleteResponse,
|
||||
isErrorResponse,
|
||||
} from '../../../../../src/plugins/data/public';
|
||||
import { useKibana } from '../../../../../src/plugins/kibana_react/public';
|
||||
|
||||
import type { RawSearchStrategyClientParams } from '../../common/search_strategies/types';
|
||||
import type { RawResponseBase } from '../../common/search_strategies/types';
|
||||
import type {
|
||||
LatencyCorrelationsParams,
|
||||
LatencyCorrelationsRawResponse,
|
||||
} from '../../common/search_strategies/latency_correlations/types';
|
||||
import type {
|
||||
FailedTransactionsCorrelationsParams,
|
||||
FailedTransactionsCorrelationsRawResponse,
|
||||
} from '../../common/search_strategies/failed_transactions_correlations/types';
|
||||
import {
|
||||
ApmSearchStrategies,
|
||||
APM_SEARCH_STRATEGIES,
|
||||
} from '../../common/search_strategies/constants';
|
||||
import { useApmServiceContext } from '../context/apm_service/use_apm_service_context';
|
||||
import { useLegacyUrlParams } from '../context/url_params_context/use_url_params';
|
||||
|
||||
import { ApmPluginStartDeps } from '../plugin';
|
||||
|
||||
import { useApmParams } from './use_apm_params';
|
||||
import { useTimeRange } from './use_time_range';
|
||||
|
||||
interface SearchStrategyProgress {
|
||||
error?: Error;
|
||||
isRunning: boolean;
|
||||
loaded: number;
|
||||
total: number;
|
||||
}
|
||||
|
||||
const getInitialRawResponse = <
|
||||
TRawResponse extends RawResponseBase
|
||||
>(): TRawResponse =>
|
||||
({
|
||||
ccsWarning: false,
|
||||
took: 0,
|
||||
} as TRawResponse);
|
||||
|
||||
const getInitialProgress = (): SearchStrategyProgress => ({
|
||||
isRunning: false,
|
||||
loaded: 0,
|
||||
total: 100,
|
||||
});
|
||||
|
||||
const getReducer =
|
||||
<T>() =>
|
||||
(prev: T, update: Partial<T>): T => ({
|
||||
...prev,
|
||||
...update,
|
||||
});
|
||||
|
||||
interface SearchStrategyReturnBase<TRawResponse extends RawResponseBase> {
|
||||
progress: SearchStrategyProgress;
|
||||
response: TRawResponse;
|
||||
startFetch: () => void;
|
||||
cancelFetch: () => void;
|
||||
}
|
||||
|
||||
// Function overload for Latency Correlations
|
||||
export function useSearchStrategy(
|
||||
searchStrategyName: typeof APM_SEARCH_STRATEGIES.APM_LATENCY_CORRELATIONS,
|
||||
searchStrategyParams: LatencyCorrelationsParams
|
||||
): SearchStrategyReturnBase<LatencyCorrelationsRawResponse & RawResponseBase>;
|
||||
|
||||
// Function overload for Failed Transactions Correlations
|
||||
export function useSearchStrategy(
|
||||
searchStrategyName: typeof APM_SEARCH_STRATEGIES.APM_FAILED_TRANSACTIONS_CORRELATIONS,
|
||||
searchStrategyParams: FailedTransactionsCorrelationsParams
|
||||
): SearchStrategyReturnBase<
|
||||
FailedTransactionsCorrelationsRawResponse & RawResponseBase
|
||||
>;
|
||||
|
||||
export function useSearchStrategy<
|
||||
TRawResponse extends RawResponseBase,
|
||||
TParams = unknown
|
||||
>(
|
||||
searchStrategyName: ApmSearchStrategies,
|
||||
searchStrategyParams?: TParams
|
||||
): SearchStrategyReturnBase<TRawResponse> {
|
||||
const {
|
||||
services: { data },
|
||||
} = useKibana<ApmPluginStartDeps>();
|
||||
|
||||
const { serviceName, transactionType } = useApmServiceContext();
|
||||
const {
|
||||
query: { kuery, environment, rangeFrom, rangeTo },
|
||||
} = useApmParams('/services/{serviceName}/transactions/view');
|
||||
const { start, end } = useTimeRange({ rangeFrom, rangeTo });
|
||||
const { urlParams } = useLegacyUrlParams();
|
||||
const { transactionName } = urlParams;
|
||||
|
||||
const [rawResponse, setRawResponse] = useReducer(
|
||||
getReducer<TRawResponse>(),
|
||||
getInitialRawResponse<TRawResponse>()
|
||||
);
|
||||
|
||||
const [fetchState, setFetchState] = useReducer(
|
||||
getReducer<SearchStrategyProgress>(),
|
||||
getInitialProgress()
|
||||
);
|
||||
|
||||
const abortCtrl = useRef(new AbortController());
|
||||
const searchSubscription$ = useRef<Subscription>();
|
||||
const searchStrategyParamsRef = useRef(searchStrategyParams);
|
||||
|
||||
const startFetch = useCallback(() => {
|
||||
searchSubscription$.current?.unsubscribe();
|
||||
abortCtrl.current.abort();
|
||||
abortCtrl.current = new AbortController();
|
||||
setFetchState({
|
||||
...getInitialProgress(),
|
||||
error: undefined,
|
||||
});
|
||||
|
||||
const request = {
|
||||
params: {
|
||||
environment,
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
start,
|
||||
end,
|
||||
...(searchStrategyParamsRef.current
|
||||
? { ...searchStrategyParamsRef.current }
|
||||
: {}),
|
||||
},
|
||||
};
|
||||
|
||||
// Submit the search request using the `data.search` service.
|
||||
searchSubscription$.current = data.search
|
||||
.search<
|
||||
IKibanaSearchRequest<RawSearchStrategyClientParams & (TParams | {})>,
|
||||
IKibanaSearchResponse<TRawResponse>
|
||||
>(request, {
|
||||
strategy: searchStrategyName,
|
||||
abortSignal: abortCtrl.current.signal,
|
||||
})
|
||||
.subscribe({
|
||||
next: (response: IKibanaSearchResponse<TRawResponse>) => {
|
||||
setRawResponse(response.rawResponse);
|
||||
setFetchState({
|
||||
isRunning: response.isRunning || false,
|
||||
...(response.loaded ? { loaded: response.loaded } : {}),
|
||||
...(response.total ? { total: response.total } : {}),
|
||||
});
|
||||
|
||||
if (isCompleteResponse(response)) {
|
||||
searchSubscription$.current?.unsubscribe();
|
||||
setFetchState({
|
||||
isRunning: false,
|
||||
});
|
||||
} else if (isErrorResponse(response)) {
|
||||
searchSubscription$.current?.unsubscribe();
|
||||
setFetchState({
|
||||
error: response as unknown as Error,
|
||||
isRunning: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
error: (error: Error) => {
|
||||
setFetchState({
|
||||
error,
|
||||
isRunning: false,
|
||||
});
|
||||
},
|
||||
});
|
||||
}, [
|
||||
searchStrategyName,
|
||||
data.search,
|
||||
environment,
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
kuery,
|
||||
start,
|
||||
end,
|
||||
]);
|
||||
|
||||
const cancelFetch = useCallback(() => {
|
||||
searchSubscription$.current?.unsubscribe();
|
||||
searchSubscription$.current = undefined;
|
||||
abortCtrl.current.abort();
|
||||
setFetchState({
|
||||
isRunning: false,
|
||||
});
|
||||
}, []);
|
||||
|
||||
// auto-update
|
||||
useEffect(() => {
|
||||
startFetch();
|
||||
return cancelFetch;
|
||||
}, [startFetch, cancelFetch]);
|
||||
|
||||
return {
|
||||
progress: fetchState,
|
||||
response: rawResponse,
|
||||
startFetch,
|
||||
cancelFetch,
|
||||
};
|
||||
}
|
|
@ -9,13 +9,13 @@ import { ElasticsearchClient } from 'kibana/server';
|
|||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import { buildSamplerAggregation } from '../../utils/field_stats_utils';
|
||||
import { FieldValuePair } from '../../../../../common/search_strategies/types';
|
||||
import { FieldValuePair } from '../../../../../common/correlations/types';
|
||||
import {
|
||||
FieldStatsCommonRequestParams,
|
||||
BooleanFieldStats,
|
||||
Aggs,
|
||||
TopValueBucket,
|
||||
} from '../../../../../common/search_strategies/field_stats_types';
|
||||
} from '../../../../../common/correlations/field_stats_types';
|
||||
import { getQueryWithParams } from '../get_query_with_params';
|
||||
|
||||
export const getBooleanFieldStatsRequest = (
|
|
@ -10,20 +10,20 @@ import { chunk } from 'lodash';
|
|||
import { ES_FIELD_TYPES } from '@kbn/field-types';
|
||||
import {
|
||||
FieldValuePair,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../../common/search_strategies/types';
|
||||
import { getRequestBase } from '../get_request_base';
|
||||
import { fetchKeywordFieldStats } from './get_keyword_field_stats';
|
||||
import { fetchNumericFieldStats } from './get_numeric_field_stats';
|
||||
CorrelationsParams,
|
||||
} from '../../../../../common/correlations/types';
|
||||
import {
|
||||
FieldStats,
|
||||
FieldStatsCommonRequestParams,
|
||||
} from '../../../../../common/search_strategies/field_stats_types';
|
||||
} from '../../../../../common/correlations/field_stats_types';
|
||||
import { getRequestBase } from '../get_request_base';
|
||||
import { fetchKeywordFieldStats } from './get_keyword_field_stats';
|
||||
import { fetchNumericFieldStats } from './get_numeric_field_stats';
|
||||
import { fetchBooleanFieldStats } from './get_boolean_field_stats';
|
||||
|
||||
export const fetchFieldsStats = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
fieldsToSample: string[],
|
||||
termFilters?: FieldValuePair[]
|
||||
): Promise<{ stats: FieldStats[]; errors: any[] }> => {
|
|
@ -7,15 +7,15 @@
|
|||
|
||||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { FieldValuePair } from '../../../../../common/search_strategies/types';
|
||||
import { getQueryWithParams } from '../get_query_with_params';
|
||||
import { buildSamplerAggregation } from '../../utils/field_stats_utils';
|
||||
import { FieldValuePair } from '../../../../../common/correlations/types';
|
||||
import {
|
||||
FieldStatsCommonRequestParams,
|
||||
KeywordFieldStats,
|
||||
Aggs,
|
||||
TopValueBucket,
|
||||
} from '../../../../../common/search_strategies/field_stats_types';
|
||||
} from '../../../../../common/correlations/field_stats_types';
|
||||
import { buildSamplerAggregation } from '../../utils/field_stats_utils';
|
||||
import { getQueryWithParams } from '../get_query_with_params';
|
||||
|
||||
export const getKeywordFieldStatsRequest = (
|
||||
params: FieldStatsCommonRequestParams,
|
|
@ -13,8 +13,8 @@ import {
|
|||
FieldStatsCommonRequestParams,
|
||||
TopValueBucket,
|
||||
Aggs,
|
||||
} from '../../../../../common/search_strategies/field_stats_types';
|
||||
import { FieldValuePair } from '../../../../../common/search_strategies/types';
|
||||
} from '../../../../../common/correlations/field_stats_types';
|
||||
import { FieldValuePair } from '../../../../../common/correlations/types';
|
||||
import { getQueryWithParams } from '../get_query_with_params';
|
||||
import { buildSamplerAggregation } from '../../utils/field_stats_utils';
|
||||
|
|
@ -15,7 +15,7 @@ import {
|
|||
PROCESSOR_EVENT,
|
||||
} from '../../../../common/elasticsearch_fieldnames';
|
||||
import { ProcessorEvent } from '../../../../common/processor_event';
|
||||
import { SearchStrategyClientParams } from '../../../../common/search_strategies/types';
|
||||
import { CorrelationsClientParams } from '../../../../common/correlations/types';
|
||||
|
||||
export function getCorrelationsFilters({
|
||||
environment,
|
||||
|
@ -25,7 +25,7 @@ export function getCorrelationsFilters({
|
|||
transactionName,
|
||||
start,
|
||||
end,
|
||||
}: SearchStrategyClientParams) {
|
||||
}: CorrelationsClientParams) {
|
||||
const correlationsFilters: ESFilter[] = [
|
||||
{ term: { [PROCESSOR_EVENT]: ProcessorEvent.transaction } },
|
||||
...rangeQuery(start, end),
|
|
@ -8,8 +8,8 @@
|
|||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type {
|
||||
FieldValuePair,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
import { getCorrelationsFilters } from './get_filters';
|
||||
|
||||
export const getTermsQuery = ({ fieldName, fieldValue }: FieldValuePair) => {
|
||||
|
@ -17,7 +17,7 @@ export const getTermsQuery = ({ fieldName, fieldValue }: FieldValuePair) => {
|
|||
};
|
||||
|
||||
interface QueryParams {
|
||||
params: SearchStrategyParams;
|
||||
params: CorrelationsParams;
|
||||
termFilters?: FieldValuePair[];
|
||||
}
|
||||
export const getQueryWithParams = ({ params, termFilters }: QueryParams) => {
|
|
@ -5,12 +5,12 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { SearchStrategyParams } from '../../../../common/search_strategies/types';
|
||||
import type { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
|
||||
export const getRequestBase = ({
|
||||
index,
|
||||
includeFrozen,
|
||||
}: SearchStrategyParams) => ({
|
||||
}: CorrelationsParams) => ({
|
||||
index,
|
||||
// matches APM's event client settings
|
||||
ignore_throttled: includeFrozen === undefined ? true : !includeFrozen,
|
|
@ -6,11 +6,13 @@
|
|||
*/
|
||||
|
||||
export { fetchFailedTransactionsCorrelationPValues } from './query_failure_correlation';
|
||||
export { fetchPValues } from './query_p_values';
|
||||
export { fetchSignificantCorrelations } from './query_significant_correlations';
|
||||
export { fetchTransactionDurationFieldCandidates } from './query_field_candidates';
|
||||
export { fetchTransactionDurationFieldValuePairs } from './query_field_value_pairs';
|
||||
export { fetchTransactionDurationFractions } from './query_fractions';
|
||||
export { fetchTransactionDurationPercentiles } from './query_percentiles';
|
||||
export { fetchTransactionDurationCorrelation } from './query_correlation';
|
||||
export { fetchTransactionDurationHistograms } from './query_histograms_generator';
|
||||
export { fetchTransactionDurationCorrelationWithHistogram } from './query_correlation_with_histogram';
|
||||
export { fetchTransactionDurationHistogramRangeSteps } from './query_histogram_range_steps';
|
||||
export { fetchTransactionDurationRanges } from './query_ranges';
|
|
@ -13,8 +13,8 @@ import { TRANSACTION_DURATION } from '../../../../common/elasticsearch_fieldname
|
|||
import type {
|
||||
FieldValuePair,
|
||||
ResponseHit,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
@ -33,7 +33,7 @@ export interface BucketCorrelation {
|
|||
}
|
||||
|
||||
export const getTransactionDurationCorrelationRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
expectations: number[],
|
||||
ranges: estypes.AggregationsAggregationRange[],
|
||||
fractions: number[],
|
||||
|
@ -87,7 +87,7 @@ export const getTransactionDurationCorrelationRequest = (
|
|||
|
||||
export const fetchTransactionDurationCorrelation = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
expectations: number[],
|
||||
ranges: estypes.AggregationsAggregationRange[],
|
||||
fractions: number[],
|
|
@ -10,10 +10,9 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
import { ENVIRONMENT_ALL } from '../../../../common/environment_filter_values';
|
||||
|
||||
import { searchServiceLogProvider } from '../search_service_log';
|
||||
import { latencyCorrelationsSearchServiceStateProvider } from '../latency_correlations/latency_correlations_search_service_state';
|
||||
import { splitAllSettledPromises } from '../utils';
|
||||
|
||||
import { fetchTransactionDurationHistograms } from './query_histograms_generator';
|
||||
import { fetchTransactionDurationCorrelationWithHistogram } from './query_correlation_with_histogram';
|
||||
|
||||
const params = {
|
||||
index: 'apm-*',
|
||||
|
@ -35,8 +34,8 @@ const fieldValuePairs = [
|
|||
{ fieldName: 'the-field-name-2', fieldValue: 'the-field-value-3' },
|
||||
];
|
||||
|
||||
describe('query_histograms_generator', () => {
|
||||
describe('fetchTransactionDurationHistograms', () => {
|
||||
describe('query_correlation_with_histogram', () => {
|
||||
describe('fetchTransactionDurationCorrelationWithHistogram', () => {
|
||||
it(`doesn't break on failing ES queries and adds messages to the log`, async () => {
|
||||
const esClientSearchMock = jest.fn(
|
||||
(
|
||||
|
@ -54,37 +53,29 @@ describe('query_histograms_generator', () => {
|
|||
search: esClientSearchMock,
|
||||
} as unknown as ElasticsearchClient;
|
||||
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
|
||||
let loadedHistograms = 0;
|
||||
const items = [];
|
||||
|
||||
for await (const item of fetchTransactionDurationHistograms(
|
||||
esClientMock,
|
||||
addLogMessage,
|
||||
params,
|
||||
state,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePairs
|
||||
)) {
|
||||
if (item !== undefined) {
|
||||
items.push(item);
|
||||
}
|
||||
loadedHistograms++;
|
||||
}
|
||||
const { fulfilled: items, rejected: errors } = splitAllSettledPromises(
|
||||
await Promise.allSettled(
|
||||
fieldValuePairs.map((fieldValuePair) =>
|
||||
fetchTransactionDurationCorrelationWithHistogram(
|
||||
esClientMock,
|
||||
params,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePair
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
expect(items.length).toEqual(0);
|
||||
expect(loadedHistograms).toEqual(3);
|
||||
expect(esClientSearchMock).toHaveBeenCalledTimes(3);
|
||||
expect(getLogMessages().map((d) => d.split(': ')[1])).toEqual([
|
||||
"Failed to fetch correlation/kstest for 'the-field-name-1/the-field-value-1'",
|
||||
"Failed to fetch correlation/kstest for 'the-field-name-2/the-field-value-2'",
|
||||
"Failed to fetch correlation/kstest for 'the-field-name-2/the-field-value-3'",
|
||||
expect(errors.map((e) => (e as Error).toString())).toEqual([
|
||||
'Error: fetchTransactionDurationCorrelation failed, did not return aggregations.',
|
||||
'Error: fetchTransactionDurationCorrelation failed, did not return aggregations.',
|
||||
'Error: fetchTransactionDurationCorrelation failed, did not return aggregations.',
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -112,34 +103,26 @@ describe('query_histograms_generator', () => {
|
|||
search: esClientSearchMock,
|
||||
} as unknown as ElasticsearchClient;
|
||||
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
|
||||
let loadedHistograms = 0;
|
||||
const items = [];
|
||||
|
||||
for await (const item of fetchTransactionDurationHistograms(
|
||||
esClientMock,
|
||||
addLogMessage,
|
||||
params,
|
||||
state,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePairs
|
||||
)) {
|
||||
if (item !== undefined) {
|
||||
items.push(item);
|
||||
}
|
||||
loadedHistograms++;
|
||||
}
|
||||
const { fulfilled: items, rejected: errors } = splitAllSettledPromises(
|
||||
await Promise.allSettled(
|
||||
fieldValuePairs.map((fieldValuePair) =>
|
||||
fetchTransactionDurationCorrelationWithHistogram(
|
||||
esClientMock,
|
||||
params,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePair
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
expect(items.length).toEqual(3);
|
||||
expect(loadedHistograms).toEqual(3);
|
||||
expect(esClientSearchMock).toHaveBeenCalledTimes(6);
|
||||
expect(getLogMessages().length).toEqual(0);
|
||||
expect(errors.length).toEqual(0);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
|
||||
import type { LatencyCorrelation } from '../../../../common/correlations/latency_correlations/types';
|
||||
import {
|
||||
CORRELATION_THRESHOLD,
|
||||
KS_TEST_THRESHOLD,
|
||||
} from '../../../../common/correlations/constants';
|
||||
|
||||
import { fetchTransactionDurationCorrelation } from './query_correlation';
|
||||
import { fetchTransactionDurationRanges } from './query_ranges';
|
||||
|
||||
export async function fetchTransactionDurationCorrelationWithHistogram(
|
||||
esClient: ElasticsearchClient,
|
||||
params: CorrelationsParams,
|
||||
expectations: number[],
|
||||
ranges: estypes.AggregationsAggregationRange[],
|
||||
fractions: number[],
|
||||
histogramRangeSteps: number[],
|
||||
totalDocCount: number,
|
||||
fieldValuePair: FieldValuePair
|
||||
): Promise<LatencyCorrelation | undefined> {
|
||||
const { correlation, ksTest } = await fetchTransactionDurationCorrelation(
|
||||
esClient,
|
||||
params,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
totalDocCount,
|
||||
[fieldValuePair]
|
||||
);
|
||||
|
||||
if (
|
||||
correlation !== null &&
|
||||
correlation > CORRELATION_THRESHOLD &&
|
||||
ksTest !== null &&
|
||||
ksTest < KS_TEST_THRESHOLD
|
||||
) {
|
||||
const logHistogram = await fetchTransactionDurationRanges(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps,
|
||||
[fieldValuePair]
|
||||
);
|
||||
return {
|
||||
...fieldValuePair,
|
||||
correlation,
|
||||
ksTest,
|
||||
histogram: logHistogram,
|
||||
};
|
||||
}
|
||||
}
|
|
@ -6,7 +6,8 @@
|
|||
*/
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import { SearchStrategyParams } from '../../../../common/search_strategies/types';
|
||||
import { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
import { FailedTransactionsCorrelation } from '../../../../common/correlations/failed_transactions_correlations/types';
|
||||
import { EVENT_OUTCOME } from '../../../../common/elasticsearch_fieldnames';
|
||||
import { EventOutcome } from '../../../../common/event_outcome';
|
||||
import { fetchTransactionDurationRanges } from './query_ranges';
|
||||
|
@ -14,7 +15,7 @@ import { getQueryWithParams, getTermsQuery } from './get_query_with_params';
|
|||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getFailureCorrelationRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
fieldName: string
|
||||
): estypes.SearchRequest => {
|
||||
const query = getQueryWithParams({
|
||||
|
@ -65,7 +66,7 @@ export const getFailureCorrelationRequest = (
|
|||
|
||||
export const fetchFailedTransactionsCorrelationPValues = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
histogramRangeSteps: number[],
|
||||
fieldName: string
|
||||
) => {
|
||||
|
@ -88,7 +89,7 @@ export const fetchFailedTransactionsCorrelationPValues = async (
|
|||
}>;
|
||||
|
||||
// Using for of to sequentially augment the results with histogram data.
|
||||
const result = [];
|
||||
const result: FailedTransactionsCorrelation[] = [];
|
||||
for (const bucket of overallResult.buckets) {
|
||||
// Scale the score into a value from 0 - 1
|
||||
// using a concave piecewise linear function in -log(p-value)
|
|
@ -10,7 +10,7 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
import { ENVIRONMENT_ALL } from '../../../../common/environment_filter_values';
|
||||
|
||||
import { hasPrefixToInclude } from '../utils';
|
||||
import { hasPrefixToInclude } from '../../../../common/correlations/utils';
|
||||
|
||||
import {
|
||||
fetchTransactionDurationFieldCandidates,
|
|
@ -11,15 +11,14 @@ import { ES_FIELD_TYPES } from '@kbn/field-types';
|
|||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type { SearchStrategyParams } from '../../../../common/search_strategies/types';
|
||||
|
||||
import type { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
import {
|
||||
FIELD_PREFIX_TO_EXCLUDE_AS_CANDIDATE,
|
||||
FIELDS_TO_ADD_AS_CANDIDATE,
|
||||
FIELDS_TO_EXCLUDE_AS_CANDIDATE,
|
||||
POPULATED_DOC_COUNT_SAMPLE_SIZE,
|
||||
} from '../constants';
|
||||
import { hasPrefixToInclude } from '../utils';
|
||||
} from '../../../../common/correlations/constants';
|
||||
import { hasPrefixToInclude } from '../../../../common/correlations/utils';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
@ -40,7 +39,7 @@ export const shouldBeExcluded = (fieldName: string) => {
|
|||
};
|
||||
|
||||
export const getRandomDocsRequest = (
|
||||
params: SearchStrategyParams
|
||||
params: CorrelationsParams
|
||||
): estypes.SearchRequest => ({
|
||||
...getRequestBase(params),
|
||||
body: {
|
||||
|
@ -59,7 +58,7 @@ export const getRandomDocsRequest = (
|
|||
|
||||
export const fetchTransactionDurationFieldCandidates = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams
|
||||
params: CorrelationsParams
|
||||
): Promise<{ fieldCandidates: string[] }> => {
|
||||
const { index } = params;
|
||||
// Get all supported fields
|
|
@ -10,9 +10,6 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
import { ENVIRONMENT_ALL } from '../../../../common/environment_filter_values';
|
||||
|
||||
import { searchServiceLogProvider } from '../search_service_log';
|
||||
import { latencyCorrelationsSearchServiceStateProvider } from '../latency_correlations/latency_correlations_search_service_state';
|
||||
|
||||
import {
|
||||
fetchTransactionDurationFieldValuePairs,
|
||||
getTermsAggRequest,
|
||||
|
@ -66,21 +63,14 @@ describe('query_field_value_pairs', () => {
|
|||
search: esClientSearchMock,
|
||||
} as unknown as ElasticsearchClient;
|
||||
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
|
||||
const resp = await fetchTransactionDurationFieldValuePairs(
|
||||
esClientMock,
|
||||
params,
|
||||
fieldCandidates,
|
||||
state,
|
||||
addLogMessage
|
||||
fieldCandidates
|
||||
);
|
||||
|
||||
const { progress } = state.getState();
|
||||
|
||||
expect(progress.loadedFieldValuePairs).toBe(1);
|
||||
expect(resp).toEqual([
|
||||
expect(resp.errors).toEqual([]);
|
||||
expect(resp.fieldValuePairs).toEqual([
|
||||
{ fieldName: 'myFieldCandidate1', fieldValue: 'myValue1' },
|
||||
{ fieldName: 'myFieldCandidate1', fieldValue: 'myValue2' },
|
||||
{ fieldName: 'myFieldCandidate2', fieldValue: 'myValue1' },
|
||||
|
@ -89,7 +79,6 @@ describe('query_field_value_pairs', () => {
|
|||
{ fieldName: 'myFieldCandidate3', fieldValue: 'myValue2' },
|
||||
]);
|
||||
expect(esClientSearchMock).toHaveBeenCalledTimes(3);
|
||||
expect(getLogMessages()).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
import { TERMS_SIZE } from '../../../../common/correlations/constants';
|
||||
|
||||
import { splitAllSettledPromises } from '../utils';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getTermsAggRequest = (
|
||||
params: CorrelationsParams,
|
||||
fieldName: string
|
||||
): estypes.SearchRequest => ({
|
||||
...getRequestBase(params),
|
||||
body: {
|
||||
query: getQueryWithParams({ params }),
|
||||
size: 0,
|
||||
aggs: {
|
||||
attribute_terms: {
|
||||
terms: {
|
||||
field: fieldName,
|
||||
size: TERMS_SIZE,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const fetchTransactionDurationFieldTerms = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: CorrelationsParams,
|
||||
fieldName: string
|
||||
): Promise<FieldValuePair[]> => {
|
||||
const resp = await esClient.search(getTermsAggRequest(params, fieldName));
|
||||
|
||||
if (resp.body.aggregations === undefined) {
|
||||
throw new Error(
|
||||
'fetchTransactionDurationFieldTerms failed, did not return aggregations.'
|
||||
);
|
||||
}
|
||||
|
||||
const buckets = (
|
||||
resp.body.aggregations
|
||||
.attribute_terms as estypes.AggregationsMultiBucketAggregate<{
|
||||
key: string;
|
||||
key_as_string?: string;
|
||||
}>
|
||||
)?.buckets;
|
||||
if (buckets?.length >= 1) {
|
||||
return buckets.map((d) => ({
|
||||
fieldName,
|
||||
// The terms aggregation returns boolean fields as { key: 0, key_as_string: "false" },
|
||||
// so we need to pick `key_as_string` if it's present, otherwise searches on boolean fields would fail later on.
|
||||
fieldValue: d.key_as_string ?? d.key,
|
||||
}));
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
export const fetchTransactionDurationFieldValuePairs = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: CorrelationsParams,
|
||||
fieldCandidates: string[]
|
||||
): Promise<{ fieldValuePairs: FieldValuePair[]; errors: any[] }> => {
|
||||
const { fulfilled: responses, rejected: errors } = splitAllSettledPromises(
|
||||
await Promise.allSettled(
|
||||
fieldCandidates.map((fieldCandidate) =>
|
||||
fetchTransactionDurationFieldTerms(esClient, params, fieldCandidate)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
return { fieldValuePairs: responses.flat(), errors };
|
||||
};
|
|
@ -47,6 +47,7 @@ describe('query_fractions', () => {
|
|||
} => {
|
||||
return {
|
||||
body: {
|
||||
hits: { total: { value: 3 } },
|
||||
aggregations: {
|
||||
latency_ranges: {
|
||||
buckets: [{ doc_count: 1 }, { doc_count: 2 }],
|
|
@ -8,14 +8,14 @@
|
|||
import { ElasticsearchClient } from 'kibana/server';
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import { SearchStrategyParams } from '../../../../common/search_strategies/types';
|
||||
import { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
import { TRANSACTION_DURATION } from '../../../../common/elasticsearch_fieldnames';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getTransactionDurationRangesRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
ranges: estypes.AggregationsAggregationRange[]
|
||||
): estypes.SearchRequest => ({
|
||||
...getRequestBase(params),
|
||||
|
@ -38,12 +38,20 @@ export const getTransactionDurationRangesRequest = (
|
|||
*/
|
||||
export const fetchTransactionDurationFractions = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
ranges: estypes.AggregationsAggregationRange[]
|
||||
): Promise<{ fractions: number[]; totalDocCount: number }> => {
|
||||
const resp = await esClient.search(
|
||||
getTransactionDurationRangesRequest(params, ranges)
|
||||
);
|
||||
|
||||
if ((resp.body.hits.total as estypes.SearchTotalHits).value === 0) {
|
||||
return {
|
||||
fractions: [],
|
||||
totalDocCount: 0,
|
||||
};
|
||||
}
|
||||
|
||||
if (resp.body.aggregations === undefined) {
|
||||
throw new Error(
|
||||
'fetchTransactionDurationFractions failed, did not return aggregations.'
|
|
@ -14,14 +14,14 @@ import type {
|
|||
FieldValuePair,
|
||||
HistogramItem,
|
||||
ResponseHit,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getTransactionDurationHistogramRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
interval: number,
|
||||
termFilters?: FieldValuePair[]
|
||||
): estypes.SearchRequest => ({
|
||||
|
@ -39,7 +39,7 @@ export const getTransactionDurationHistogramRequest = (
|
|||
|
||||
export const fetchTransactionDurationHistogram = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
interval: number,
|
||||
termFilters?: FieldValuePair[]
|
||||
): Promise<HistogramItem[]> => {
|
|
@ -12,7 +12,7 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import { TRANSACTION_DURATION } from '../../../../common/elasticsearch_fieldnames';
|
||||
import type { SearchStrategyParams } from '../../../../common/search_strategies/types';
|
||||
import type { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
@ -31,7 +31,7 @@ export const getHistogramRangeSteps = (
|
|||
};
|
||||
|
||||
export const getHistogramIntervalRequest = (
|
||||
params: SearchStrategyParams
|
||||
params: CorrelationsParams
|
||||
): estypes.SearchRequest => ({
|
||||
...getRequestBase(params),
|
||||
body: {
|
||||
|
@ -46,7 +46,7 @@ export const getHistogramIntervalRequest = (
|
|||
|
||||
export const fetchTransactionDurationHistogramRangeSteps = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams
|
||||
params: CorrelationsParams
|
||||
): Promise<number[]> => {
|
||||
const steps = 100;
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type { CorrelationsParams } from '../../../../common/correlations/types';
|
||||
import type { FailedTransactionsCorrelation } from '../../../../common/correlations/failed_transactions_correlations/types';
|
||||
import { ERROR_CORRELATION_THRESHOLD } from '../../../../common/correlations/constants';
|
||||
|
||||
import { splitAllSettledPromises } from '../utils';
|
||||
|
||||
import {
|
||||
fetchFailedTransactionsCorrelationPValues,
|
||||
fetchTransactionDurationHistogramRangeSteps,
|
||||
} from './index';
|
||||
|
||||
export const fetchPValues = async (
|
||||
esClient: ElasticsearchClient,
|
||||
paramsWithIndex: CorrelationsParams,
|
||||
fieldCandidates: string[]
|
||||
) => {
|
||||
const histogramRangeSteps = await fetchTransactionDurationHistogramRangeSteps(
|
||||
esClient,
|
||||
paramsWithIndex
|
||||
);
|
||||
|
||||
const { fulfilled, rejected } = splitAllSettledPromises(
|
||||
await Promise.allSettled(
|
||||
fieldCandidates.map((fieldName) =>
|
||||
fetchFailedTransactionsCorrelationPValues(
|
||||
esClient,
|
||||
paramsWithIndex,
|
||||
histogramRangeSteps,
|
||||
fieldName
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
const failedTransactionsCorrelations: FailedTransactionsCorrelation[] =
|
||||
fulfilled
|
||||
.flat()
|
||||
.filter(
|
||||
(record) =>
|
||||
record &&
|
||||
typeof record.pValue === 'number' &&
|
||||
record.pValue < ERROR_CORRELATION_THRESHOLD
|
||||
);
|
||||
|
||||
const ccsWarning =
|
||||
rejected.length > 0 && paramsWithIndex?.index.includes(':');
|
||||
|
||||
return { failedTransactionsCorrelations, ccsWarning };
|
||||
};
|
|
@ -10,18 +10,18 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import { TRANSACTION_DURATION } from '../../../../common/elasticsearch_fieldnames';
|
||||
import { SIGNIFICANT_VALUE_DIGITS } from '../../../../common/correlations/constants';
|
||||
import type {
|
||||
FieldValuePair,
|
||||
ResponseHit,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
import { SIGNIFICANT_VALUE_DIGITS } from '../constants';
|
||||
|
||||
export const getTransactionDurationPercentilesRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
percents?: number[],
|
||||
termFilters?: FieldValuePair[]
|
||||
): estypes.SearchRequest => {
|
||||
|
@ -50,7 +50,7 @@ export const getTransactionDurationPercentilesRequest = (
|
|||
|
||||
export const fetchTransactionDurationPercentiles = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
percents?: number[],
|
||||
termFilters?: FieldValuePair[]
|
||||
): Promise<{ totalDocs: number; percentiles: Record<string, number> }> => {
|
|
@ -13,14 +13,14 @@ import { TRANSACTION_DURATION } from '../../../../common/elasticsearch_fieldname
|
|||
import type {
|
||||
FieldValuePair,
|
||||
ResponseHit,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getTransactionDurationRangesRequest = (
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
rangesSteps: number[],
|
||||
termFilters?: FieldValuePair[]
|
||||
): estypes.SearchRequest => {
|
||||
|
@ -57,7 +57,7 @@ export const getTransactionDurationRangesRequest = (
|
|||
|
||||
export const fetchTransactionDurationRanges = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
params: CorrelationsParams,
|
||||
rangesSteps: number[],
|
||||
termFilters?: FieldValuePair[]
|
||||
): Promise<Array<{ key: number; doc_count: number }>> => {
|
|
@ -0,0 +1,87 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { range } from 'lodash';
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
CorrelationsParams,
|
||||
} from '../../../../common/correlations/types';
|
||||
import { LatencyCorrelation } from '../../../../common/correlations/latency_correlations/types';
|
||||
|
||||
import {
|
||||
computeExpectationsAndRanges,
|
||||
splitAllSettledPromises,
|
||||
} from '../utils';
|
||||
|
||||
import {
|
||||
fetchTransactionDurationCorrelationWithHistogram,
|
||||
fetchTransactionDurationFractions,
|
||||
fetchTransactionDurationHistogramRangeSteps,
|
||||
fetchTransactionDurationPercentiles,
|
||||
} from './index';
|
||||
|
||||
export const fetchSignificantCorrelations = async (
|
||||
esClient: ElasticsearchClient,
|
||||
paramsWithIndex: CorrelationsParams,
|
||||
fieldValuePairs: FieldValuePair[]
|
||||
) => {
|
||||
// Create an array of ranges [2, 4, 6, ..., 98]
|
||||
const percentileAggregationPercents = range(2, 100, 2);
|
||||
const { percentiles: percentilesRecords } =
|
||||
await fetchTransactionDurationPercentiles(
|
||||
esClient,
|
||||
paramsWithIndex,
|
||||
percentileAggregationPercents
|
||||
);
|
||||
|
||||
// We need to round the percentiles values
|
||||
// because the queries we're using based on it
|
||||
// later on wouldn't allow numbers with decimals.
|
||||
const percentiles = Object.values(percentilesRecords).map(Math.round);
|
||||
|
||||
const { expectations, ranges } = computeExpectationsAndRanges(percentiles);
|
||||
|
||||
const { fractions, totalDocCount } = await fetchTransactionDurationFractions(
|
||||
esClient,
|
||||
paramsWithIndex,
|
||||
ranges
|
||||
);
|
||||
|
||||
const histogramRangeSteps = await fetchTransactionDurationHistogramRangeSteps(
|
||||
esClient,
|
||||
paramsWithIndex
|
||||
);
|
||||
|
||||
const { fulfilled, rejected } = splitAllSettledPromises(
|
||||
await Promise.allSettled(
|
||||
fieldValuePairs.map((fieldValuePair) =>
|
||||
fetchTransactionDurationCorrelationWithHistogram(
|
||||
esClient,
|
||||
paramsWithIndex,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePair
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
const latencyCorrelations: LatencyCorrelation[] = fulfilled.filter(
|
||||
(d): d is LatencyCorrelation => d !== undefined
|
||||
);
|
||||
|
||||
const ccsWarning =
|
||||
rejected.length > 0 && paramsWithIndex?.index.includes(':');
|
||||
|
||||
return { latencyCorrelations, ccsWarning, totalDocCount };
|
||||
};
|
|
@ -6,7 +6,8 @@
|
|||
*/
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import { PERCENTILES_STEP } from '../constants';
|
||||
|
||||
import { PERCENTILES_STEP } from '../../../../common/correlations/constants';
|
||||
|
||||
export const computeExpectationsAndRanges = (
|
||||
percentiles: number[],
|
||||
|
@ -29,15 +30,17 @@ export const computeExpectationsAndRanges = (
|
|||
}
|
||||
tempFractions.push(PERCENTILES_STEP / 100);
|
||||
|
||||
const ranges = tempPercentiles.reduce((p, to) => {
|
||||
const from = p[p.length - 1]?.to;
|
||||
if (from !== undefined) {
|
||||
p.push({ from, to });
|
||||
} else {
|
||||
p.push({ to });
|
||||
}
|
||||
return p;
|
||||
}, [] as Array<{ from?: number; to?: number }>);
|
||||
const ranges = tempPercentiles
|
||||
.map((tP) => Math.round(tP))
|
||||
.reduce((p, to) => {
|
||||
const from = p[p.length - 1]?.to;
|
||||
if (from !== undefined) {
|
||||
p.push({ from, to });
|
||||
} else {
|
||||
p.push({ to });
|
||||
}
|
||||
return p;
|
||||
}, [] as Array<{ from?: number; to?: number }>);
|
||||
if (ranges.length > 0) {
|
||||
ranges.push({ from: ranges[ranges.length - 1].to });
|
||||
}
|
|
@ -6,4 +6,4 @@
|
|||
*/
|
||||
|
||||
export { computeExpectationsAndRanges } from './compute_expectations_and_ranges';
|
||||
export { hasPrefixToInclude } from './has_prefix_to_include';
|
||||
export { splitAllSettledPromises } from './split_all_settled_promises';
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
interface HandledPromises<T> {
|
||||
fulfilled: T[];
|
||||
rejected: unknown[];
|
||||
}
|
||||
|
||||
export const splitAllSettledPromises = <T = unknown>(
|
||||
promises: Array<PromiseSettledResult<T>>
|
||||
): HandledPromises<T> =>
|
||||
promises.reduce(
|
||||
(result, current) => {
|
||||
if (current.status === 'fulfilled') {
|
||||
result.fulfilled.push(current.value as T);
|
||||
} else if (current.status === 'rejected') {
|
||||
result.rejected.push(current.reason);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
{
|
||||
fulfilled: [],
|
||||
rejected: [],
|
||||
} as HandledPromises<T>
|
||||
);
|
|
@ -14,8 +14,8 @@ import { withApmSpan } from '../../utils/with_apm_span';
|
|||
import {
|
||||
getHistogramIntervalRequest,
|
||||
getHistogramRangeSteps,
|
||||
} from '../search_strategies/queries/query_histogram_range_steps';
|
||||
import { getTransactionDurationRangesRequest } from '../search_strategies/queries/query_ranges';
|
||||
} from '../correlations/queries/query_histogram_range_steps';
|
||||
import { getTransactionDurationRangesRequest } from '../correlations/queries/query_ranges';
|
||||
|
||||
import { getPercentileThresholdValue } from './get_percentile_threshold_value';
|
||||
import type {
|
||||
|
@ -27,9 +27,7 @@ export async function getOverallLatencyDistribution(
|
|||
options: OverallLatencyDistributionOptions
|
||||
) {
|
||||
return withApmSpan('get_overall_latency_distribution', async () => {
|
||||
const overallLatencyDistribution: OverallLatencyDistributionResponse = {
|
||||
log: [],
|
||||
};
|
||||
const overallLatencyDistribution: OverallLatencyDistributionResponse = {};
|
||||
|
||||
const { setup, termFilters, ...rawParams } = options;
|
||||
const { apmEventClient } = setup;
|
||||
|
|
|
@ -9,7 +9,7 @@ import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
|||
|
||||
import { ProcessorEvent } from '../../../common/processor_event';
|
||||
|
||||
import { getTransactionDurationPercentilesRequest } from '../search_strategies/queries/query_percentiles';
|
||||
import { getTransactionDurationPercentilesRequest } from '../correlations/queries/query_percentiles';
|
||||
|
||||
import type { OverallLatencyDistributionOptions } from './types';
|
||||
|
||||
|
|
|
@ -7,20 +7,19 @@
|
|||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
SearchStrategyClientParams,
|
||||
} from '../../../common/search_strategies/types';
|
||||
CorrelationsClientParams,
|
||||
} from '../../../common/correlations/types';
|
||||
|
||||
import { Setup } from '../helpers/setup_request';
|
||||
|
||||
export interface OverallLatencyDistributionOptions
|
||||
extends SearchStrategyClientParams {
|
||||
extends CorrelationsClientParams {
|
||||
percentileThreshold: number;
|
||||
termFilters?: FieldValuePair[];
|
||||
setup: Setup;
|
||||
}
|
||||
|
||||
export interface OverallLatencyDistributionResponse {
|
||||
log: string[];
|
||||
percentileThresholdValue?: number;
|
||||
overallHistogram?: Array<{
|
||||
key: number;
|
||||
|
|
|
@ -1,259 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { chunk } from 'lodash';
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import { EVENT_OUTCOME } from '../../../../common/elasticsearch_fieldnames';
|
||||
import { EventOutcome } from '../../../../common/event_outcome';
|
||||
import type {
|
||||
SearchStrategyClientParams,
|
||||
SearchStrategyServerParams,
|
||||
RawResponseBase,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
import type {
|
||||
FailedTransactionsCorrelationsParams,
|
||||
FailedTransactionsCorrelationsRawResponse,
|
||||
} from '../../../../common/search_strategies/failed_transactions_correlations/types';
|
||||
import type { ApmIndicesConfig } from '../../settings/apm_indices/get_apm_indices';
|
||||
import { searchServiceLogProvider } from '../search_service_log';
|
||||
import {
|
||||
fetchFailedTransactionsCorrelationPValues,
|
||||
fetchTransactionDurationFieldCandidates,
|
||||
fetchTransactionDurationPercentiles,
|
||||
fetchTransactionDurationRanges,
|
||||
fetchTransactionDurationHistogramRangeSteps,
|
||||
} from '../queries';
|
||||
import type { SearchServiceProvider } from '../search_strategy_provider';
|
||||
|
||||
import { failedTransactionsCorrelationsSearchServiceStateProvider } from './failed_transactions_correlations_search_service_state';
|
||||
|
||||
import { ERROR_CORRELATION_THRESHOLD } from '../constants';
|
||||
import { fetchFieldsStats } from '../queries/field_stats/get_fields_stats';
|
||||
|
||||
type FailedTransactionsCorrelationsSearchServiceProvider =
|
||||
SearchServiceProvider<
|
||||
FailedTransactionsCorrelationsParams & SearchStrategyClientParams,
|
||||
FailedTransactionsCorrelationsRawResponse & RawResponseBase
|
||||
>;
|
||||
|
||||
export const failedTransactionsCorrelationsSearchServiceProvider: FailedTransactionsCorrelationsSearchServiceProvider =
|
||||
(
|
||||
esClient: ElasticsearchClient,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
searchServiceParams: FailedTransactionsCorrelationsParams &
|
||||
SearchStrategyClientParams,
|
||||
includeFrozen: boolean
|
||||
) => {
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
|
||||
const state = failedTransactionsCorrelationsSearchServiceStateProvider();
|
||||
|
||||
async function fetchErrorCorrelations() {
|
||||
try {
|
||||
const indices = await getApmIndices();
|
||||
const params: FailedTransactionsCorrelationsParams &
|
||||
SearchStrategyClientParams &
|
||||
SearchStrategyServerParams = {
|
||||
...searchServiceParams,
|
||||
index: indices.transaction,
|
||||
includeFrozen,
|
||||
};
|
||||
|
||||
// 95th percentile to be displayed as a marker in the log log chart
|
||||
const { totalDocs, percentiles: percentilesResponseThresholds } =
|
||||
await fetchTransactionDurationPercentiles(
|
||||
esClient,
|
||||
params,
|
||||
params.percentileThreshold
|
||||
? [params.percentileThreshold]
|
||||
: undefined
|
||||
);
|
||||
const percentileThresholdValue =
|
||||
percentilesResponseThresholds[`${params.percentileThreshold}.0`];
|
||||
state.setPercentileThresholdValue(percentileThresholdValue);
|
||||
|
||||
addLogMessage(
|
||||
`Fetched ${params.percentileThreshold}th percentile value of ${percentileThresholdValue} based on ${totalDocs} documents.`
|
||||
);
|
||||
|
||||
// finish early if we weren't able to identify the percentileThresholdValue.
|
||||
if (percentileThresholdValue === undefined) {
|
||||
addLogMessage(
|
||||
`Abort service since percentileThresholdValue could not be determined.`
|
||||
);
|
||||
state.setProgress({
|
||||
loadedFieldCandidates: 1,
|
||||
loadedErrorCorrelations: 1,
|
||||
loadedOverallHistogram: 1,
|
||||
loadedFailedTransactionsCorrelations: 1,
|
||||
});
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const histogramRangeSteps =
|
||||
await fetchTransactionDurationHistogramRangeSteps(esClient, params);
|
||||
|
||||
const overallLogHistogramChartData =
|
||||
await fetchTransactionDurationRanges(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps
|
||||
);
|
||||
const errorLogHistogramChartData = await fetchTransactionDurationRanges(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps,
|
||||
[{ fieldName: EVENT_OUTCOME, fieldValue: EventOutcome.failure }]
|
||||
);
|
||||
|
||||
state.setProgress({ loadedOverallHistogram: 1 });
|
||||
state.setErrorHistogram(errorLogHistogramChartData);
|
||||
state.setOverallHistogram(overallLogHistogramChartData);
|
||||
|
||||
const { fieldCandidates: candidates } =
|
||||
await fetchTransactionDurationFieldCandidates(esClient, params);
|
||||
|
||||
const fieldCandidates = candidates.filter(
|
||||
(t) => !(t === EVENT_OUTCOME)
|
||||
);
|
||||
|
||||
addLogMessage(`Identified ${fieldCandidates.length} fieldCandidates.`);
|
||||
|
||||
state.setProgress({ loadedFieldCandidates: 1 });
|
||||
|
||||
let fieldCandidatesFetchedCount = 0;
|
||||
const fieldsToSample = new Set<string>();
|
||||
if (params !== undefined && fieldCandidates.length > 0) {
|
||||
const batches = chunk(fieldCandidates, 10);
|
||||
for (let i = 0; i < batches.length; i++) {
|
||||
try {
|
||||
const results = await Promise.allSettled(
|
||||
batches[i].map((fieldName) =>
|
||||
fetchFailedTransactionsCorrelationPValues(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps,
|
||||
fieldName
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
results.forEach((result, idx) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
const significantCorrelations = result.value.filter(
|
||||
(record) =>
|
||||
record &&
|
||||
record.pValue !== undefined &&
|
||||
record.pValue < ERROR_CORRELATION_THRESHOLD
|
||||
);
|
||||
|
||||
significantCorrelations.forEach((r) => {
|
||||
fieldsToSample.add(r.fieldName);
|
||||
});
|
||||
|
||||
state.addFailedTransactionsCorrelations(
|
||||
significantCorrelations
|
||||
);
|
||||
} else {
|
||||
// If one of the fields in the batch had an error
|
||||
addLogMessage(
|
||||
`Error getting error correlation for field ${batches[i][idx]}: ${result.reason}.`
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
state.setError(e);
|
||||
|
||||
if (params?.index.includes(':')) {
|
||||
state.setCcsWarning(true);
|
||||
}
|
||||
} finally {
|
||||
fieldCandidatesFetchedCount += batches[i].length;
|
||||
state.setProgress({
|
||||
loadedFailedTransactionsCorrelations:
|
||||
fieldCandidatesFetchedCount / fieldCandidates.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
addLogMessage(
|
||||
`Identified correlations for ${fieldCandidatesFetchedCount} fields out of ${fieldCandidates.length} candidates.`
|
||||
);
|
||||
}
|
||||
|
||||
addLogMessage(
|
||||
`Identified ${fieldsToSample.size} fields to sample for field statistics.`
|
||||
);
|
||||
|
||||
const { stats: fieldStats } = await fetchFieldsStats(
|
||||
esClient,
|
||||
params,
|
||||
[...fieldsToSample],
|
||||
[{ fieldName: EVENT_OUTCOME, fieldValue: EventOutcome.failure }]
|
||||
);
|
||||
|
||||
addLogMessage(
|
||||
`Retrieved field statistics for ${fieldStats.length} fields out of ${fieldsToSample.size} fields.`
|
||||
);
|
||||
|
||||
state.addFieldStats(fieldStats);
|
||||
} catch (e) {
|
||||
state.setError(e);
|
||||
}
|
||||
|
||||
addLogMessage(
|
||||
`Identified ${
|
||||
state.getState().failedTransactionsCorrelations.length
|
||||
} significant correlations relating to failed transactions.`
|
||||
);
|
||||
|
||||
state.setIsRunning(false);
|
||||
}
|
||||
|
||||
fetchErrorCorrelations();
|
||||
|
||||
return () => {
|
||||
const {
|
||||
ccsWarning,
|
||||
error,
|
||||
isRunning,
|
||||
overallHistogram,
|
||||
errorHistogram,
|
||||
percentileThresholdValue,
|
||||
progress,
|
||||
fieldStats,
|
||||
} = state.getState();
|
||||
|
||||
return {
|
||||
cancel: () => {
|
||||
addLogMessage(`Service cancelled.`);
|
||||
state.setIsCancelled(true);
|
||||
},
|
||||
error,
|
||||
meta: {
|
||||
loaded: Math.round(state.getOverallProgress() * 100),
|
||||
total: 100,
|
||||
isRunning,
|
||||
isPartial: isRunning,
|
||||
},
|
||||
rawResponse: {
|
||||
ccsWarning,
|
||||
log: getLogMessages(),
|
||||
took: Date.now() - progress.started,
|
||||
failedTransactionsCorrelations:
|
||||
state.getFailedTransactionsCorrelationsSortedByScore(),
|
||||
overallHistogram,
|
||||
errorHistogram,
|
||||
percentileThresholdValue,
|
||||
fieldStats,
|
||||
},
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { FailedTransactionsCorrelation } from '../../../../common/search_strategies/failed_transactions_correlations/types';
|
||||
|
||||
import type { HistogramItem } from '../../../../common/search_strategies/types';
|
||||
import { FieldStats } from '../../../../common/search_strategies/field_stats_types';
|
||||
|
||||
interface Progress {
|
||||
started: number;
|
||||
loadedFieldCandidates: number;
|
||||
loadedErrorCorrelations: number;
|
||||
loadedOverallHistogram: number;
|
||||
loadedFailedTransactionsCorrelations: number;
|
||||
}
|
||||
|
||||
export const failedTransactionsCorrelationsSearchServiceStateProvider = () => {
|
||||
let ccsWarning = false;
|
||||
function setCcsWarning(d: boolean) {
|
||||
ccsWarning = d;
|
||||
}
|
||||
|
||||
let error: Error;
|
||||
function setError(d: Error) {
|
||||
error = d;
|
||||
}
|
||||
|
||||
let isCancelled = false;
|
||||
function setIsCancelled(d: boolean) {
|
||||
isCancelled = d;
|
||||
}
|
||||
|
||||
let isRunning = true;
|
||||
function setIsRunning(d: boolean) {
|
||||
isRunning = d;
|
||||
}
|
||||
|
||||
let errorHistogram: HistogramItem[] | undefined;
|
||||
function setErrorHistogram(d: HistogramItem[]) {
|
||||
errorHistogram = d;
|
||||
}
|
||||
|
||||
let overallHistogram: HistogramItem[] | undefined;
|
||||
function setOverallHistogram(d: HistogramItem[]) {
|
||||
overallHistogram = d;
|
||||
}
|
||||
|
||||
let percentileThresholdValue: number;
|
||||
function setPercentileThresholdValue(d: number) {
|
||||
percentileThresholdValue = d;
|
||||
}
|
||||
|
||||
let progress: Progress = {
|
||||
started: Date.now(),
|
||||
loadedFieldCandidates: 0,
|
||||
loadedErrorCorrelations: 0,
|
||||
loadedOverallHistogram: 0,
|
||||
loadedFailedTransactionsCorrelations: 0,
|
||||
};
|
||||
function getOverallProgress() {
|
||||
return (
|
||||
progress.loadedFieldCandidates * 0.025 +
|
||||
progress.loadedFailedTransactionsCorrelations * (1 - 0.025)
|
||||
);
|
||||
}
|
||||
function setProgress(d: Partial<Omit<Progress, 'started'>>) {
|
||||
progress = {
|
||||
...progress,
|
||||
...d,
|
||||
};
|
||||
}
|
||||
|
||||
const fieldStats: FieldStats[] = [];
|
||||
function addFieldStats(stats: FieldStats[]) {
|
||||
fieldStats.push(...stats);
|
||||
}
|
||||
|
||||
const failedTransactionsCorrelations: FailedTransactionsCorrelation[] = [];
|
||||
function addFailedTransactionsCorrelation(d: FailedTransactionsCorrelation) {
|
||||
failedTransactionsCorrelations.push(d);
|
||||
}
|
||||
function addFailedTransactionsCorrelations(
|
||||
d: FailedTransactionsCorrelation[]
|
||||
) {
|
||||
failedTransactionsCorrelations.push(...d);
|
||||
}
|
||||
|
||||
function getFailedTransactionsCorrelationsSortedByScore() {
|
||||
return failedTransactionsCorrelations.sort((a, b) => b.score - a.score);
|
||||
}
|
||||
|
||||
function getState() {
|
||||
return {
|
||||
ccsWarning,
|
||||
error,
|
||||
isCancelled,
|
||||
isRunning,
|
||||
overallHistogram,
|
||||
errorHistogram,
|
||||
percentileThresholdValue,
|
||||
progress,
|
||||
failedTransactionsCorrelations,
|
||||
fieldStats,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
addFailedTransactionsCorrelation,
|
||||
addFailedTransactionsCorrelations,
|
||||
getOverallProgress,
|
||||
getState,
|
||||
getFailedTransactionsCorrelationsSortedByScore,
|
||||
setCcsWarning,
|
||||
setError,
|
||||
setIsCancelled,
|
||||
setIsRunning,
|
||||
setOverallHistogram,
|
||||
setErrorHistogram,
|
||||
setPercentileThresholdValue,
|
||||
setProgress,
|
||||
addFieldStats,
|
||||
};
|
||||
};
|
||||
|
||||
export type FailedTransactionsCorrelationsSearchServiceState = ReturnType<
|
||||
typeof failedTransactionsCorrelationsSearchServiceStateProvider
|
||||
>;
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { registerSearchStrategies } from './register_search_strategies';
|
|
@ -1,8 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { latencyCorrelationsSearchServiceProvider } from './latency_correlations_search_service';
|
|
@ -1,293 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { range } from 'lodash';
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type {
|
||||
RawResponseBase,
|
||||
SearchStrategyClientParams,
|
||||
SearchStrategyServerParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
import type {
|
||||
LatencyCorrelationsParams,
|
||||
LatencyCorrelationsRawResponse,
|
||||
} from '../../../../common/search_strategies/latency_correlations/types';
|
||||
|
||||
import type { ApmIndicesConfig } from '../../settings/apm_indices/get_apm_indices';
|
||||
|
||||
import {
|
||||
fetchTransactionDurationFieldCandidates,
|
||||
fetchTransactionDurationFieldValuePairs,
|
||||
fetchTransactionDurationFractions,
|
||||
fetchTransactionDurationPercentiles,
|
||||
fetchTransactionDurationHistograms,
|
||||
fetchTransactionDurationHistogramRangeSteps,
|
||||
fetchTransactionDurationRanges,
|
||||
} from '../queries';
|
||||
import { computeExpectationsAndRanges } from '../utils';
|
||||
import { searchServiceLogProvider } from '../search_service_log';
|
||||
import type { SearchServiceProvider } from '../search_strategy_provider';
|
||||
|
||||
import { latencyCorrelationsSearchServiceStateProvider } from './latency_correlations_search_service_state';
|
||||
import { fetchFieldsStats } from '../queries/field_stats/get_fields_stats';
|
||||
|
||||
type LatencyCorrelationsSearchServiceProvider = SearchServiceProvider<
|
||||
LatencyCorrelationsParams & SearchStrategyClientParams,
|
||||
LatencyCorrelationsRawResponse & RawResponseBase
|
||||
>;
|
||||
|
||||
export const latencyCorrelationsSearchServiceProvider: LatencyCorrelationsSearchServiceProvider =
|
||||
(
|
||||
esClient: ElasticsearchClient,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
searchServiceParams: LatencyCorrelationsParams & SearchStrategyClientParams,
|
||||
includeFrozen: boolean
|
||||
) => {
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
|
||||
async function fetchCorrelations() {
|
||||
let params:
|
||||
| (LatencyCorrelationsParams &
|
||||
SearchStrategyClientParams &
|
||||
SearchStrategyServerParams)
|
||||
| undefined;
|
||||
|
||||
try {
|
||||
const indices = await getApmIndices();
|
||||
params = {
|
||||
...searchServiceParams,
|
||||
index: indices.transaction,
|
||||
includeFrozen,
|
||||
};
|
||||
|
||||
// 95th percentile to be displayed as a marker in the log log chart
|
||||
const { totalDocs, percentiles: percentilesResponseThresholds } =
|
||||
await fetchTransactionDurationPercentiles(
|
||||
esClient,
|
||||
params,
|
||||
params.percentileThreshold
|
||||
? [params.percentileThreshold]
|
||||
: undefined
|
||||
);
|
||||
const percentileThresholdValue =
|
||||
percentilesResponseThresholds[`${params.percentileThreshold}.0`];
|
||||
state.setPercentileThresholdValue(percentileThresholdValue);
|
||||
|
||||
addLogMessage(
|
||||
`Fetched ${params.percentileThreshold}th percentile value of ${percentileThresholdValue} based on ${totalDocs} documents.`
|
||||
);
|
||||
|
||||
// finish early if we weren't able to identify the percentileThresholdValue.
|
||||
if (percentileThresholdValue === undefined) {
|
||||
addLogMessage(
|
||||
`Abort service since percentileThresholdValue could not be determined.`
|
||||
);
|
||||
state.setProgress({
|
||||
loadedHistogramStepsize: 1,
|
||||
loadedOverallHistogram: 1,
|
||||
loadedFieldCandidates: 1,
|
||||
loadedFieldValuePairs: 1,
|
||||
loadedHistograms: 1,
|
||||
});
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const histogramRangeSteps =
|
||||
await fetchTransactionDurationHistogramRangeSteps(esClient, params);
|
||||
state.setProgress({ loadedHistogramStepsize: 1 });
|
||||
|
||||
addLogMessage(`Loaded histogram range steps.`);
|
||||
|
||||
if (state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const overallLogHistogramChartData =
|
||||
await fetchTransactionDurationRanges(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps
|
||||
);
|
||||
state.setProgress({ loadedOverallHistogram: 1 });
|
||||
state.setOverallHistogram(overallLogHistogramChartData);
|
||||
|
||||
addLogMessage(`Loaded overall histogram chart data.`);
|
||||
|
||||
if (state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// finish early if correlation analysis is not required.
|
||||
if (params.analyzeCorrelations === false) {
|
||||
addLogMessage(
|
||||
`Finish service since correlation analysis wasn't requested.`
|
||||
);
|
||||
state.setProgress({
|
||||
loadedHistogramStepsize: 1,
|
||||
loadedOverallHistogram: 1,
|
||||
loadedFieldCandidates: 1,
|
||||
loadedFieldValuePairs: 1,
|
||||
loadedHistograms: 1,
|
||||
});
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create an array of ranges [2, 4, 6, ..., 98]
|
||||
const percentileAggregationPercents = range(2, 100, 2);
|
||||
const { percentiles: percentilesRecords } =
|
||||
await fetchTransactionDurationPercentiles(
|
||||
esClient,
|
||||
params,
|
||||
percentileAggregationPercents
|
||||
);
|
||||
|
||||
// We need to round the percentiles values
|
||||
// because the queries we're using based on it
|
||||
// later on wouldn't allow numbers with decimals.
|
||||
const percentiles = Object.values(percentilesRecords).map(Math.round);
|
||||
|
||||
addLogMessage(`Loaded percentiles.`);
|
||||
|
||||
if (state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const { fieldCandidates } =
|
||||
await fetchTransactionDurationFieldCandidates(esClient, params);
|
||||
|
||||
addLogMessage(`Identified ${fieldCandidates.length} fieldCandidates.`);
|
||||
|
||||
state.setProgress({ loadedFieldCandidates: 1 });
|
||||
|
||||
const fieldValuePairs = await fetchTransactionDurationFieldValuePairs(
|
||||
esClient,
|
||||
params,
|
||||
fieldCandidates,
|
||||
state,
|
||||
addLogMessage
|
||||
);
|
||||
|
||||
addLogMessage(`Identified ${fieldValuePairs.length} fieldValuePairs.`);
|
||||
|
||||
if (state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const { expectations, ranges } =
|
||||
computeExpectationsAndRanges(percentiles);
|
||||
|
||||
const { fractions, totalDocCount } =
|
||||
await fetchTransactionDurationFractions(esClient, params, ranges);
|
||||
|
||||
addLogMessage(
|
||||
`Loaded fractions and totalDocCount of ${totalDocCount}.`
|
||||
);
|
||||
|
||||
const fieldsToSample = new Set<string>();
|
||||
let loadedHistograms = 0;
|
||||
for await (const item of fetchTransactionDurationHistograms(
|
||||
esClient,
|
||||
addLogMessage,
|
||||
params,
|
||||
state,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
histogramRangeSteps,
|
||||
totalDocCount,
|
||||
fieldValuePairs
|
||||
)) {
|
||||
if (item !== undefined) {
|
||||
state.addLatencyCorrelation(item);
|
||||
fieldsToSample.add(item.fieldName);
|
||||
}
|
||||
loadedHistograms++;
|
||||
state.setProgress({
|
||||
loadedHistograms: loadedHistograms / fieldValuePairs.length,
|
||||
});
|
||||
}
|
||||
|
||||
addLogMessage(
|
||||
`Identified ${
|
||||
state.getState().latencyCorrelations.length
|
||||
} significant correlations out of ${
|
||||
fieldValuePairs.length
|
||||
} field/value pairs.`
|
||||
);
|
||||
|
||||
addLogMessage(
|
||||
`Identified ${fieldsToSample.size} fields to sample for field statistics.`
|
||||
);
|
||||
|
||||
const { stats: fieldStats } = await fetchFieldsStats(esClient, params, [
|
||||
...fieldsToSample,
|
||||
]);
|
||||
|
||||
addLogMessage(
|
||||
`Retrieved field statistics for ${fieldStats.length} fields out of ${fieldsToSample.size} fields.`
|
||||
);
|
||||
state.addFieldStats(fieldStats);
|
||||
} catch (e) {
|
||||
state.setError(e);
|
||||
}
|
||||
|
||||
if (state.getState().error !== undefined && params?.index.includes(':')) {
|
||||
state.setCcsWarning(true);
|
||||
}
|
||||
|
||||
state.setIsRunning(false);
|
||||
}
|
||||
|
||||
function cancel() {
|
||||
addLogMessage(`Service cancelled.`);
|
||||
state.setIsCancelled(true);
|
||||
}
|
||||
|
||||
fetchCorrelations();
|
||||
|
||||
return () => {
|
||||
const {
|
||||
ccsWarning,
|
||||
error,
|
||||
isRunning,
|
||||
overallHistogram,
|
||||
percentileThresholdValue,
|
||||
progress,
|
||||
fieldStats,
|
||||
} = state.getState();
|
||||
|
||||
return {
|
||||
cancel,
|
||||
error,
|
||||
meta: {
|
||||
loaded: Math.round(state.getOverallProgress() * 100),
|
||||
total: 100,
|
||||
isRunning,
|
||||
isPartial: isRunning,
|
||||
},
|
||||
rawResponse: {
|
||||
ccsWarning,
|
||||
log: getLogMessages(),
|
||||
took: Date.now() - progress.started,
|
||||
latencyCorrelations:
|
||||
state.getLatencyCorrelationsSortedByCorrelation(),
|
||||
percentileThresholdValue,
|
||||
overallHistogram,
|
||||
fieldStats,
|
||||
},
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { latencyCorrelationsSearchServiceStateProvider } from './latency_correlations_search_service_state';
|
||||
|
||||
describe('search service', () => {
|
||||
describe('latencyCorrelationsSearchServiceStateProvider', () => {
|
||||
it('initializes with default state', () => {
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
const defaultState = state.getState();
|
||||
const defaultProgress = state.getOverallProgress();
|
||||
|
||||
expect(defaultState.ccsWarning).toBe(false);
|
||||
expect(defaultState.error).toBe(undefined);
|
||||
expect(defaultState.isCancelled).toBe(false);
|
||||
expect(defaultState.isRunning).toBe(true);
|
||||
expect(defaultState.overallHistogram).toBe(undefined);
|
||||
expect(defaultState.progress.loadedFieldCandidates).toBe(0);
|
||||
expect(defaultState.progress.loadedFieldValuePairs).toBe(0);
|
||||
expect(defaultState.progress.loadedHistogramStepsize).toBe(0);
|
||||
expect(defaultState.progress.loadedHistograms).toBe(0);
|
||||
expect(defaultState.progress.loadedOverallHistogram).toBe(0);
|
||||
expect(defaultState.progress.started > 0).toBe(true);
|
||||
|
||||
expect(defaultProgress).toBe(0);
|
||||
});
|
||||
|
||||
it('returns updated state', () => {
|
||||
const state = latencyCorrelationsSearchServiceStateProvider();
|
||||
|
||||
state.setCcsWarning(true);
|
||||
state.setError(new Error('the-error-message'));
|
||||
state.setIsCancelled(true);
|
||||
state.setIsRunning(false);
|
||||
state.setOverallHistogram([{ key: 1392202800000, doc_count: 1234 }]);
|
||||
state.setProgress({ loadedHistograms: 0.5 });
|
||||
|
||||
const updatedState = state.getState();
|
||||
const updatedProgress = state.getOverallProgress();
|
||||
|
||||
expect(updatedState.ccsWarning).toBe(true);
|
||||
expect(updatedState.error?.message).toBe('the-error-message');
|
||||
expect(updatedState.isCancelled).toBe(true);
|
||||
expect(updatedState.isRunning).toBe(false);
|
||||
expect(updatedState.overallHistogram).toEqual([
|
||||
{ key: 1392202800000, doc_count: 1234 },
|
||||
]);
|
||||
expect(updatedState.progress.loadedFieldCandidates).toBe(0);
|
||||
expect(updatedState.progress.loadedFieldValuePairs).toBe(0);
|
||||
expect(updatedState.progress.loadedHistogramStepsize).toBe(0);
|
||||
expect(updatedState.progress.loadedHistograms).toBe(0.5);
|
||||
expect(updatedState.progress.loadedOverallHistogram).toBe(0);
|
||||
expect(updatedState.progress.started > 0).toBe(true);
|
||||
|
||||
expect(updatedProgress).toBe(0.45);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { HistogramItem } from '../../../../common/search_strategies/types';
|
||||
import type {
|
||||
LatencyCorrelationSearchServiceProgress,
|
||||
LatencyCorrelation,
|
||||
} from '../../../../common/search_strategies/latency_correlations/types';
|
||||
import { FieldStats } from '../../../../common/search_strategies/field_stats_types';
|
||||
|
||||
export const latencyCorrelationsSearchServiceStateProvider = () => {
|
||||
let ccsWarning = false;
|
||||
function setCcsWarning(d: boolean) {
|
||||
ccsWarning = d;
|
||||
}
|
||||
|
||||
let error: Error;
|
||||
function setError(d: Error) {
|
||||
error = d;
|
||||
}
|
||||
|
||||
let isCancelled = false;
|
||||
function getIsCancelled() {
|
||||
return isCancelled;
|
||||
}
|
||||
function setIsCancelled(d: boolean) {
|
||||
isCancelled = d;
|
||||
}
|
||||
|
||||
let isRunning = true;
|
||||
function setIsRunning(d: boolean) {
|
||||
isRunning = d;
|
||||
}
|
||||
|
||||
let overallHistogram: HistogramItem[] | undefined;
|
||||
function setOverallHistogram(d: HistogramItem[]) {
|
||||
overallHistogram = d;
|
||||
}
|
||||
|
||||
let percentileThresholdValue: number;
|
||||
function setPercentileThresholdValue(d: number) {
|
||||
percentileThresholdValue = d;
|
||||
}
|
||||
|
||||
let progress: LatencyCorrelationSearchServiceProgress = {
|
||||
started: Date.now(),
|
||||
loadedHistogramStepsize: 0,
|
||||
loadedOverallHistogram: 0,
|
||||
loadedFieldCandidates: 0,
|
||||
loadedFieldValuePairs: 0,
|
||||
loadedHistograms: 0,
|
||||
};
|
||||
function getOverallProgress() {
|
||||
return (
|
||||
progress.loadedHistogramStepsize * 0.025 +
|
||||
progress.loadedOverallHistogram * 0.025 +
|
||||
progress.loadedFieldCandidates * 0.025 +
|
||||
progress.loadedFieldValuePairs * 0.025 +
|
||||
progress.loadedHistograms * 0.9
|
||||
);
|
||||
}
|
||||
function setProgress(
|
||||
d: Partial<Omit<LatencyCorrelationSearchServiceProgress, 'started'>>
|
||||
) {
|
||||
progress = {
|
||||
...progress,
|
||||
...d,
|
||||
};
|
||||
}
|
||||
|
||||
const latencyCorrelations: LatencyCorrelation[] = [];
|
||||
function addLatencyCorrelation(d: LatencyCorrelation) {
|
||||
latencyCorrelations.push(d);
|
||||
}
|
||||
|
||||
function getLatencyCorrelationsSortedByCorrelation() {
|
||||
return latencyCorrelations.sort((a, b) => b.correlation - a.correlation);
|
||||
}
|
||||
const fieldStats: FieldStats[] = [];
|
||||
function addFieldStats(stats: FieldStats[]) {
|
||||
fieldStats.push(...stats);
|
||||
}
|
||||
|
||||
function getState() {
|
||||
return {
|
||||
ccsWarning,
|
||||
error,
|
||||
isCancelled,
|
||||
isRunning,
|
||||
overallHistogram,
|
||||
percentileThresholdValue,
|
||||
progress,
|
||||
latencyCorrelations,
|
||||
fieldStats,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
addLatencyCorrelation,
|
||||
getIsCancelled,
|
||||
getOverallProgress,
|
||||
getState,
|
||||
getLatencyCorrelationsSortedByCorrelation,
|
||||
setCcsWarning,
|
||||
setError,
|
||||
setIsCancelled,
|
||||
setIsRunning,
|
||||
setOverallHistogram,
|
||||
setPercentileThresholdValue,
|
||||
setProgress,
|
||||
addFieldStats,
|
||||
};
|
||||
};
|
||||
|
||||
export type LatencyCorrelationsSearchServiceState = ReturnType<
|
||||
typeof latencyCorrelationsSearchServiceStateProvider
|
||||
>;
|
|
@ -1,124 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
|
||||
import type { SearchServiceLog } from '../search_service_log';
|
||||
import type { LatencyCorrelationsSearchServiceState } from '../latency_correlations/latency_correlations_search_service_state';
|
||||
import { TERMS_SIZE } from '../constants';
|
||||
|
||||
import { getQueryWithParams } from './get_query_with_params';
|
||||
import { getRequestBase } from './get_request_base';
|
||||
|
||||
export const getTermsAggRequest = (
|
||||
params: SearchStrategyParams,
|
||||
fieldName: string
|
||||
): estypes.SearchRequest => ({
|
||||
...getRequestBase(params),
|
||||
body: {
|
||||
query: getQueryWithParams({ params }),
|
||||
size: 0,
|
||||
aggs: {
|
||||
attribute_terms: {
|
||||
terms: {
|
||||
field: fieldName,
|
||||
size: TERMS_SIZE,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const fetchTransactionDurationFieldTerms = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
fieldName: string,
|
||||
addLogMessage: SearchServiceLog['addLogMessage']
|
||||
): Promise<FieldValuePair[]> => {
|
||||
try {
|
||||
const resp = await esClient.search(getTermsAggRequest(params, fieldName));
|
||||
|
||||
if (resp.body.aggregations === undefined) {
|
||||
addLogMessage(
|
||||
`Failed to fetch terms for field candidate ${fieldName} fieldValuePairs, no aggregations returned.`,
|
||||
JSON.stringify(resp)
|
||||
);
|
||||
return [];
|
||||
}
|
||||
const buckets = (
|
||||
resp.body.aggregations
|
||||
.attribute_terms as estypes.AggregationsMultiBucketAggregate<{
|
||||
key: string;
|
||||
key_as_string?: string;
|
||||
}>
|
||||
)?.buckets;
|
||||
if (buckets?.length >= 1) {
|
||||
return buckets.map((d) => ({
|
||||
fieldName,
|
||||
// The terms aggregation returns boolean fields as { key: 0, key_as_string: "false" },
|
||||
// so we need to pick `key_as_string` if it's present, otherwise searches on boolean fields would fail later on.
|
||||
fieldValue: d.key_as_string ?? d.key,
|
||||
}));
|
||||
}
|
||||
} catch (e) {
|
||||
addLogMessage(
|
||||
`Failed to fetch terms for field candidate ${fieldName} fieldValuePairs.`,
|
||||
JSON.stringify(e)
|
||||
);
|
||||
}
|
||||
|
||||
return [];
|
||||
};
|
||||
|
||||
async function fetchInSequence(
|
||||
fieldCandidates: string[],
|
||||
fn: (fieldCandidate: string) => Promise<FieldValuePair[]>
|
||||
) {
|
||||
const results = [];
|
||||
|
||||
for (const fieldCandidate of fieldCandidates) {
|
||||
results.push(...(await fn(fieldCandidate)));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
export const fetchTransactionDurationFieldValuePairs = async (
|
||||
esClient: ElasticsearchClient,
|
||||
params: SearchStrategyParams,
|
||||
fieldCandidates: string[],
|
||||
state: LatencyCorrelationsSearchServiceState,
|
||||
addLogMessage: SearchServiceLog['addLogMessage']
|
||||
): Promise<FieldValuePair[]> => {
|
||||
let fieldValuePairsProgress = 1;
|
||||
|
||||
return await fetchInSequence(
|
||||
fieldCandidates,
|
||||
async function (fieldCandidate: string) {
|
||||
const fieldTerms = await fetchTransactionDurationFieldTerms(
|
||||
esClient,
|
||||
params,
|
||||
fieldCandidate,
|
||||
addLogMessage
|
||||
);
|
||||
|
||||
state.setProgress({
|
||||
loadedFieldValuePairs: fieldValuePairsProgress / fieldCandidates.length,
|
||||
});
|
||||
fieldValuePairsProgress++;
|
||||
|
||||
return fieldTerms;
|
||||
}
|
||||
);
|
||||
};
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type {
|
||||
FieldValuePair,
|
||||
SearchStrategyParams,
|
||||
} from '../../../../common/search_strategies/types';
|
||||
|
||||
import type { SearchServiceLog } from '../search_service_log';
|
||||
import type { LatencyCorrelationsSearchServiceState } from '../latency_correlations/latency_correlations_search_service_state';
|
||||
import { CORRELATION_THRESHOLD, KS_TEST_THRESHOLD } from '../constants';
|
||||
|
||||
import { getPrioritizedFieldValuePairs } from './get_prioritized_field_value_pairs';
|
||||
import { fetchTransactionDurationCorrelation } from './query_correlation';
|
||||
import { fetchTransactionDurationRanges } from './query_ranges';
|
||||
|
||||
export async function* fetchTransactionDurationHistograms(
|
||||
esClient: ElasticsearchClient,
|
||||
addLogMessage: SearchServiceLog['addLogMessage'],
|
||||
params: SearchStrategyParams,
|
||||
state: LatencyCorrelationsSearchServiceState,
|
||||
expectations: number[],
|
||||
ranges: estypes.AggregationsAggregationRange[],
|
||||
fractions: number[],
|
||||
histogramRangeSteps: number[],
|
||||
totalDocCount: number,
|
||||
fieldValuePairs: FieldValuePair[]
|
||||
) {
|
||||
for (const item of getPrioritizedFieldValuePairs(fieldValuePairs)) {
|
||||
if (params === undefined || item === undefined || state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// If one of the fields have an error
|
||||
// We don't want to stop the whole process
|
||||
try {
|
||||
const { correlation, ksTest } = await fetchTransactionDurationCorrelation(
|
||||
esClient,
|
||||
params,
|
||||
expectations,
|
||||
ranges,
|
||||
fractions,
|
||||
totalDocCount,
|
||||
[item]
|
||||
);
|
||||
|
||||
if (state.getIsCancelled()) {
|
||||
state.setIsRunning(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
correlation !== null &&
|
||||
correlation > CORRELATION_THRESHOLD &&
|
||||
ksTest !== null &&
|
||||
ksTest < KS_TEST_THRESHOLD
|
||||
) {
|
||||
const logHistogram = await fetchTransactionDurationRanges(
|
||||
esClient,
|
||||
params,
|
||||
histogramRangeSteps,
|
||||
[item]
|
||||
);
|
||||
yield {
|
||||
...item,
|
||||
correlation,
|
||||
ksTest,
|
||||
histogram: logHistogram,
|
||||
};
|
||||
} else {
|
||||
yield undefined;
|
||||
}
|
||||
} catch (e) {
|
||||
// don't fail the whole process for individual correlation queries,
|
||||
// just add the error to the internal log and check if we'd want to set the
|
||||
// cross-cluster search compatibility warning to true.
|
||||
addLogMessage(
|
||||
`Failed to fetch correlation/kstest for '${item.fieldName}/${item.fieldValue}'`,
|
||||
JSON.stringify(e)
|
||||
);
|
||||
if (params?.index.includes(':')) {
|
||||
state.setCcsWarning(true);
|
||||
}
|
||||
yield undefined;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { PluginSetup as DataPluginSetup } from 'src/plugins/data/server';
|
||||
|
||||
import { APM_SEARCH_STRATEGIES } from '../../../common/search_strategies/constants';
|
||||
|
||||
import type { ApmIndicesConfig } from '../settings/apm_indices/get_apm_indices';
|
||||
|
||||
import { failedTransactionsCorrelationsSearchServiceProvider } from './failed_transactions_correlations';
|
||||
import { latencyCorrelationsSearchServiceProvider } from './latency_correlations';
|
||||
import { searchStrategyProvider } from './search_strategy_provider';
|
||||
|
||||
export const registerSearchStrategies = (
|
||||
registerSearchStrategy: DataPluginSetup['search']['registerSearchStrategy'],
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
includeFrozen: boolean
|
||||
) => {
|
||||
registerSearchStrategy(
|
||||
APM_SEARCH_STRATEGIES.APM_LATENCY_CORRELATIONS,
|
||||
searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
getApmIndices,
|
||||
includeFrozen
|
||||
)
|
||||
);
|
||||
|
||||
registerSearchStrategy(
|
||||
APM_SEARCH_STRATEGIES.APM_FAILED_TRANSACTIONS_CORRELATIONS,
|
||||
searchStrategyProvider(
|
||||
failedTransactionsCorrelationsSearchServiceProvider,
|
||||
getApmIndices,
|
||||
includeFrozen
|
||||
)
|
||||
);
|
||||
};
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
searchServiceLogProvider,
|
||||
currentTimeAsString,
|
||||
} from './search_service_log';
|
||||
|
||||
describe('search service', () => {
|
||||
describe('currentTimeAsString', () => {
|
||||
it('returns the current time as a string', () => {
|
||||
const mockDate = new Date(1392202800000);
|
||||
// @ts-ignore ignore the mockImplementation callback error
|
||||
const spy = jest.spyOn(global, 'Date').mockReturnValue(mockDate);
|
||||
|
||||
const timeString = currentTimeAsString();
|
||||
|
||||
expect(timeString).toEqual('2014-02-12T11:00:00.000Z');
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('searchServiceLogProvider', () => {
|
||||
it('adds and retrieves messages from the log', async () => {
|
||||
const { addLogMessage, getLogMessages } = searchServiceLogProvider();
|
||||
|
||||
const mockDate = new Date(1392202800000);
|
||||
// @ts-ignore ignore the mockImplementation callback error
|
||||
const spy = jest.spyOn(global, 'Date').mockReturnValue(mockDate);
|
||||
|
||||
addLogMessage('the first message');
|
||||
addLogMessage('the second message');
|
||||
|
||||
expect(getLogMessages()).toEqual([
|
||||
'2014-02-12T11:00:00.000Z: the first message',
|
||||
'2014-02-12T11:00:00.000Z: the second message',
|
||||
]);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
interface LogMessage {
|
||||
timestamp: string;
|
||||
message: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export const currentTimeAsString = () => new Date().toISOString();
|
||||
|
||||
export const searchServiceLogProvider = () => {
|
||||
const log: LogMessage[] = [];
|
||||
|
||||
function addLogMessage(message: string, error?: string) {
|
||||
log.push({
|
||||
timestamp: currentTimeAsString(),
|
||||
message,
|
||||
...(error !== undefined ? { error } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
function getLogMessages() {
|
||||
return log.map((l) => `${l.timestamp}: ${l.message}`);
|
||||
}
|
||||
|
||||
return { addLogMessage, getLogMessages };
|
||||
};
|
||||
|
||||
export type SearchServiceLog = ReturnType<typeof searchServiceLogProvider>;
|
|
@ -1,302 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import { SearchStrategyDependencies } from 'src/plugins/data/server';
|
||||
|
||||
import { IKibanaSearchRequest } from '../../../../../../src/plugins/data/common';
|
||||
|
||||
import { ENVIRONMENT_ALL } from '../../../common/environment_filter_values';
|
||||
import type { LatencyCorrelationsParams } from '../../../common/search_strategies/latency_correlations/types';
|
||||
import type { RawSearchStrategyClientParams } from '../../../common/search_strategies/types';
|
||||
|
||||
import type { ApmIndicesConfig } from '../settings/apm_indices/get_apm_indices';
|
||||
|
||||
import { latencyCorrelationsSearchServiceProvider } from './latency_correlations';
|
||||
import { searchStrategyProvider } from './search_strategy_provider';
|
||||
|
||||
// helper to trigger promises in the async search service
|
||||
const flushPromises = () => new Promise(setImmediate);
|
||||
|
||||
const clientFieldCapsMock = () => ({ body: { fields: [] } });
|
||||
|
||||
// minimal client mock to fulfill search requirements of the async search service to succeed
|
||||
const clientSearchMock = (
|
||||
req: estypes.SearchRequest
|
||||
): { body: estypes.SearchResponse } => {
|
||||
let aggregations:
|
||||
| {
|
||||
transaction_duration_percentiles: estypes.AggregationsTDigestPercentilesAggregate;
|
||||
}
|
||||
| {
|
||||
transaction_duration_min: estypes.AggregationsValueAggregate;
|
||||
transaction_duration_max: estypes.AggregationsValueAggregate;
|
||||
}
|
||||
| {
|
||||
logspace_ranges: estypes.AggregationsMultiBucketAggregate<{
|
||||
from: number;
|
||||
doc_count: number;
|
||||
}>;
|
||||
}
|
||||
| {
|
||||
latency_ranges: estypes.AggregationsMultiBucketAggregate<{
|
||||
doc_count: number;
|
||||
}>;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
if (req?.body?.aggs !== undefined) {
|
||||
const aggs = req.body.aggs;
|
||||
// fetchTransactionDurationPercentiles
|
||||
if (aggs.transaction_duration_percentiles !== undefined) {
|
||||
aggregations = { transaction_duration_percentiles: { values: {} } };
|
||||
}
|
||||
|
||||
// fetchTransactionDurationCorrelation
|
||||
if (aggs.logspace_ranges !== undefined) {
|
||||
aggregations = { logspace_ranges: { buckets: [] } };
|
||||
}
|
||||
|
||||
// fetchTransactionDurationFractions
|
||||
if (aggs.latency_ranges !== undefined) {
|
||||
aggregations = { latency_ranges: { buckets: [] } };
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
body: {
|
||||
_shards: {
|
||||
failed: 0,
|
||||
successful: 1,
|
||||
total: 1,
|
||||
},
|
||||
took: 162,
|
||||
timed_out: false,
|
||||
hits: {
|
||||
hits: [],
|
||||
total: {
|
||||
value: 0,
|
||||
relation: 'eq',
|
||||
},
|
||||
},
|
||||
...(aggregations !== undefined ? { aggregations } : {}),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const getApmIndicesMock = async () =>
|
||||
({ transaction: 'apm-*' } as ApmIndicesConfig);
|
||||
|
||||
describe('APM Correlations search strategy', () => {
|
||||
describe('strategy interface', () => {
|
||||
it('returns a custom search strategy with a `search` and `cancel` function', async () => {
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
getApmIndicesMock,
|
||||
false
|
||||
);
|
||||
expect(typeof searchStrategy.search).toBe('function');
|
||||
expect(typeof searchStrategy.cancel).toBe('function');
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
let mockClientFieldCaps: jest.Mock;
|
||||
let mockClientSearch: jest.Mock;
|
||||
let mockGetApmIndicesMock: jest.Mock;
|
||||
let mockDeps: SearchStrategyDependencies;
|
||||
let params: Required<
|
||||
IKibanaSearchRequest<
|
||||
LatencyCorrelationsParams & RawSearchStrategyClientParams
|
||||
>
|
||||
>['params'];
|
||||
|
||||
beforeEach(() => {
|
||||
mockClientFieldCaps = jest.fn(clientFieldCapsMock);
|
||||
mockClientSearch = jest.fn(clientSearchMock);
|
||||
mockGetApmIndicesMock = jest.fn(getApmIndicesMock);
|
||||
mockDeps = {
|
||||
esClient: {
|
||||
asCurrentUser: {
|
||||
fieldCaps: mockClientFieldCaps,
|
||||
search: mockClientSearch,
|
||||
},
|
||||
},
|
||||
} as unknown as SearchStrategyDependencies;
|
||||
params = {
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
environment: ENVIRONMENT_ALL.value,
|
||||
kuery: '',
|
||||
percentileThreshold: 95,
|
||||
analyzeCorrelations: true,
|
||||
};
|
||||
});
|
||||
|
||||
describe('async functionality', () => {
|
||||
describe('when no params are provided', () => {
|
||||
it('throws an error', async () => {
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(() => searchStrategy.search({}, {}, mockDeps)).toThrow(
|
||||
'Invalid request parameters.'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when no ID is provided', () => {
|
||||
it('performs a client search with params', async () => {
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
await searchStrategy.search({ params }, {}, mockDeps).toPromise();
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
const [[request]] = mockClientSearch.mock.calls;
|
||||
|
||||
expect(request.index).toEqual('apm-*');
|
||||
expect(request.body).toEqual(
|
||||
expect.objectContaining({
|
||||
aggs: {
|
||||
transaction_duration_percentiles: {
|
||||
percentiles: {
|
||||
field: 'transaction.duration.us',
|
||||
hdr: { number_of_significant_value_digits: 3 },
|
||||
percents: [95],
|
||||
},
|
||||
},
|
||||
},
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{ term: { 'processor.event': 'transaction' } },
|
||||
{
|
||||
range: {
|
||||
'@timestamp': {
|
||||
format: 'epoch_millis',
|
||||
gte: 1577836800000,
|
||||
lte: 1609459200000,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
size: 0,
|
||||
track_total_hits: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when an ID with params is provided', () => {
|
||||
it('retrieves the current request', async () => {
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
const response = await searchStrategy
|
||||
.search({ params }, {}, mockDeps)
|
||||
.toPromise();
|
||||
|
||||
const searchStrategyId = response.id;
|
||||
|
||||
const response2 = await searchStrategy
|
||||
.search({ id: searchStrategyId, params }, {}, mockDeps)
|
||||
.toPromise();
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
expect(response2).toEqual(
|
||||
expect.objectContaining({ id: searchStrategyId })
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('if the client throws', () => {
|
||||
it('does not emit an error', async () => {
|
||||
mockClientSearch
|
||||
.mockReset()
|
||||
.mockRejectedValueOnce(new Error('client error'));
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
const response = await searchStrategy
|
||||
.search({ params }, {}, mockDeps)
|
||||
.toPromise();
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(response).toEqual(
|
||||
expect.objectContaining({ isRunning: true })
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('triggers the subscription only once', async () => {
|
||||
expect.assertions(2);
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
searchStrategy
|
||||
.search({ params }, {}, mockDeps)
|
||||
.subscribe((response) => {
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
expect(response).toEqual(
|
||||
expect.objectContaining({ loaded: 0, isRunning: true })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('response', () => {
|
||||
it('sends an updated response on consecutive search calls', async () => {
|
||||
const searchStrategy = await searchStrategyProvider(
|
||||
latencyCorrelationsSearchServiceProvider,
|
||||
mockGetApmIndicesMock,
|
||||
false
|
||||
);
|
||||
|
||||
const response1 = await searchStrategy
|
||||
.search({ params }, {}, mockDeps)
|
||||
.toPromise();
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
expect(typeof response1.id).toEqual('string');
|
||||
expect(response1).toEqual(
|
||||
expect.objectContaining({ loaded: 0, isRunning: true })
|
||||
);
|
||||
|
||||
await flushPromises();
|
||||
|
||||
const response2 = await searchStrategy
|
||||
.search({ id: response1.id, params }, {}, mockDeps)
|
||||
.toPromise();
|
||||
|
||||
expect(mockGetApmIndicesMock).toHaveBeenCalledTimes(1);
|
||||
expect(response2.id).toEqual(response1.id);
|
||||
expect(response2).toEqual(
|
||||
expect.objectContaining({ loaded: 100, isRunning: false })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,204 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import uuid from 'uuid';
|
||||
import { of } from 'rxjs';
|
||||
import { getOrElse } from 'fp-ts/lib/Either';
|
||||
import { pipe } from 'fp-ts/lib/pipeable';
|
||||
import * as t from 'io-ts';
|
||||
import { failure } from 'io-ts/lib/PathReporter';
|
||||
|
||||
import type { ElasticsearchClient } from 'src/core/server';
|
||||
|
||||
import type { ISearchStrategy } from '../../../../../../src/plugins/data/server';
|
||||
import {
|
||||
IKibanaSearchRequest,
|
||||
IKibanaSearchResponse,
|
||||
} from '../../../../../../src/plugins/data/common';
|
||||
|
||||
import type {
|
||||
RawResponseBase,
|
||||
RawSearchStrategyClientParams,
|
||||
SearchStrategyClientParams,
|
||||
} from '../../../common/search_strategies/types';
|
||||
import type {
|
||||
LatencyCorrelationsParams,
|
||||
LatencyCorrelationsRawResponse,
|
||||
} from '../../../common/search_strategies/latency_correlations/types';
|
||||
import type {
|
||||
FailedTransactionsCorrelationsParams,
|
||||
FailedTransactionsCorrelationsRawResponse,
|
||||
} from '../../../common/search_strategies/failed_transactions_correlations/types';
|
||||
import { rangeRt } from '../../routes/default_api_types';
|
||||
import type { ApmIndicesConfig } from '../settings/apm_indices/get_apm_indices';
|
||||
|
||||
interface SearchServiceState<TRawResponse extends RawResponseBase> {
|
||||
cancel: () => void;
|
||||
error: Error;
|
||||
meta: {
|
||||
loaded: number;
|
||||
total: number;
|
||||
isRunning: boolean;
|
||||
isPartial: boolean;
|
||||
};
|
||||
rawResponse: TRawResponse;
|
||||
}
|
||||
|
||||
type GetSearchServiceState<TRawResponse extends RawResponseBase> =
|
||||
() => SearchServiceState<TRawResponse>;
|
||||
|
||||
export type SearchServiceProvider<
|
||||
TSearchStrategyClientParams extends SearchStrategyClientParams,
|
||||
TRawResponse extends RawResponseBase
|
||||
> = (
|
||||
esClient: ElasticsearchClient,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
searchServiceParams: TSearchStrategyClientParams,
|
||||
includeFrozen: boolean
|
||||
) => GetSearchServiceState<TRawResponse>;
|
||||
|
||||
// Failed Transactions Correlations function overload
|
||||
export function searchStrategyProvider(
|
||||
searchServiceProvider: SearchServiceProvider<
|
||||
FailedTransactionsCorrelationsParams & SearchStrategyClientParams,
|
||||
FailedTransactionsCorrelationsRawResponse & RawResponseBase
|
||||
>,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
includeFrozen: boolean
|
||||
): ISearchStrategy<
|
||||
IKibanaSearchRequest<
|
||||
FailedTransactionsCorrelationsParams & RawSearchStrategyClientParams
|
||||
>,
|
||||
IKibanaSearchResponse<
|
||||
FailedTransactionsCorrelationsRawResponse & RawResponseBase
|
||||
>
|
||||
>;
|
||||
|
||||
// Latency Correlations function overload
|
||||
export function searchStrategyProvider(
|
||||
searchServiceProvider: SearchServiceProvider<
|
||||
LatencyCorrelationsParams & SearchStrategyClientParams,
|
||||
LatencyCorrelationsRawResponse & RawResponseBase
|
||||
>,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
includeFrozen: boolean
|
||||
): ISearchStrategy<
|
||||
IKibanaSearchRequest<
|
||||
LatencyCorrelationsParams & RawSearchStrategyClientParams
|
||||
>,
|
||||
IKibanaSearchResponse<LatencyCorrelationsRawResponse & RawResponseBase>
|
||||
>;
|
||||
|
||||
export function searchStrategyProvider<TRequestParams, TResponseParams>(
|
||||
searchServiceProvider: SearchServiceProvider<
|
||||
TRequestParams & SearchStrategyClientParams,
|
||||
TResponseParams & RawResponseBase
|
||||
>,
|
||||
getApmIndices: () => Promise<ApmIndicesConfig>,
|
||||
includeFrozen: boolean
|
||||
): ISearchStrategy<
|
||||
IKibanaSearchRequest<TRequestParams & RawSearchStrategyClientParams>,
|
||||
IKibanaSearchResponse<TResponseParams & RawResponseBase>
|
||||
> {
|
||||
const searchServiceMap = new Map<
|
||||
string,
|
||||
GetSearchServiceState<TResponseParams & RawResponseBase>
|
||||
>();
|
||||
|
||||
return {
|
||||
search: (request, options, deps) => {
|
||||
if (request.params === undefined) {
|
||||
throw new Error('Invalid request parameters.');
|
||||
}
|
||||
|
||||
const { start: startString, end: endString } = request.params;
|
||||
|
||||
// converts string based start/end to epochmillis
|
||||
const decodedRange = pipe(
|
||||
rangeRt.decode({ start: startString, end: endString }),
|
||||
getOrElse<t.Errors, { start: number; end: number }>((errors) => {
|
||||
throw new Error(failure(errors).join('\n'));
|
||||
})
|
||||
);
|
||||
|
||||
// The function to fetch the current state of the search service.
|
||||
// This will be either an existing service for a follow up fetch or a new one for new requests.
|
||||
let getSearchServiceState: GetSearchServiceState<
|
||||
TResponseParams & RawResponseBase
|
||||
>;
|
||||
|
||||
// If the request includes an ID, we require that the search service already exists
|
||||
// otherwise we throw an error. The client should never poll a service that's been cancelled or finished.
|
||||
// This also avoids instantiating search services when the service gets called with random IDs.
|
||||
if (typeof request.id === 'string') {
|
||||
const existingGetSearchServiceState = searchServiceMap.get(request.id);
|
||||
|
||||
if (typeof existingGetSearchServiceState === 'undefined') {
|
||||
throw new Error(
|
||||
`SearchService with ID '${request.id}' does not exist.`
|
||||
);
|
||||
}
|
||||
|
||||
getSearchServiceState = existingGetSearchServiceState;
|
||||
} else {
|
||||
const {
|
||||
start,
|
||||
end,
|
||||
environment,
|
||||
kuery,
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
...requestParams
|
||||
} = request.params;
|
||||
|
||||
getSearchServiceState = searchServiceProvider(
|
||||
deps.esClient.asCurrentUser,
|
||||
getApmIndices,
|
||||
{
|
||||
environment,
|
||||
kuery,
|
||||
serviceName,
|
||||
transactionName,
|
||||
transactionType,
|
||||
start: decodedRange.start,
|
||||
end: decodedRange.end,
|
||||
...(requestParams as unknown as TRequestParams),
|
||||
},
|
||||
includeFrozen
|
||||
);
|
||||
}
|
||||
|
||||
// Reuse the request's id or create a new one.
|
||||
const id = request.id ?? uuid();
|
||||
|
||||
const { error, meta, rawResponse } = getSearchServiceState();
|
||||
|
||||
if (error instanceof Error) {
|
||||
searchServiceMap.delete(id);
|
||||
throw error;
|
||||
} else if (meta.isRunning) {
|
||||
searchServiceMap.set(id, getSearchServiceState);
|
||||
} else {
|
||||
searchServiceMap.delete(id);
|
||||
}
|
||||
|
||||
return of({
|
||||
id,
|
||||
...meta,
|
||||
rawResponse,
|
||||
});
|
||||
},
|
||||
cancel: async (id, options, deps) => {
|
||||
const getSearchServiceState = searchServiceMap.get(id);
|
||||
if (getSearchServiceState !== undefined) {
|
||||
getSearchServiceState().cancel();
|
||||
searchServiceMap.delete(id);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
|
@ -15,7 +15,6 @@ import {
|
|||
PluginInitializerContext,
|
||||
} from 'src/core/server';
|
||||
import { isEmpty, mapValues } from 'lodash';
|
||||
import { SavedObjectsClient } from '../../../../src/core/server';
|
||||
import { mappingFromFieldMap } from '../../rule_registry/common/mapping_from_field_map';
|
||||
import { Dataset } from '../../rule_registry/server';
|
||||
import { APMConfig, APM_SERVER_FEATURE_ID } from '.';
|
||||
|
@ -26,7 +25,6 @@ import { registerFleetPolicyCallbacks } from './lib/fleet/register_fleet_policy_
|
|||
import { createApmTelemetry } from './lib/apm_telemetry';
|
||||
import { createApmEventClient } from './lib/helpers/create_es_client/create_apm_event_client';
|
||||
import { getInternalSavedObjectsClient } from './lib/helpers/get_internal_saved_objects_client';
|
||||
import { registerSearchStrategies } from './lib/search_strategies';
|
||||
import { createApmAgentConfigurationIndex } from './lib/settings/agent_configuration/create_agent_config_index';
|
||||
import { getApmIndices } from './lib/settings/apm_indices/get_apm_indices';
|
||||
import { createApmCustomLinkIndex } from './lib/settings/custom_link/create_custom_link_index';
|
||||
|
@ -197,25 +195,6 @@ export class APMPlugin
|
|||
logger: this.logger,
|
||||
});
|
||||
|
||||
// search strategies for async partial search results
|
||||
core.getStartServices().then(([coreStart]) => {
|
||||
(async () => {
|
||||
const savedObjectsClient = new SavedObjectsClient(
|
||||
coreStart.savedObjects.createInternalRepository()
|
||||
);
|
||||
|
||||
const includeFrozen = await coreStart.uiSettings
|
||||
.asScopedToClient(savedObjectsClient)
|
||||
.get(UI_SETTINGS.SEARCH_INCLUDE_FROZEN);
|
||||
|
||||
registerSearchStrategies(
|
||||
plugins.data.search.registerSearchStrategy,
|
||||
boundGetApmIndices,
|
||||
includeFrozen
|
||||
);
|
||||
})();
|
||||
});
|
||||
|
||||
core.deprecations.registerDeprecations({
|
||||
getDeprecations: getDeprecations({
|
||||
cloudSetup: plugins.cloud,
|
||||
|
|
256
x-pack/plugins/apm/server/routes/correlations.ts
Normal file
256
x-pack/plugins/apm/server/routes/correlations.ts
Normal file
|
@ -0,0 +1,256 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import * as t from 'io-ts';
|
||||
import Boom from '@hapi/boom';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { toNumberRt } from '@kbn/io-ts-utils';
|
||||
|
||||
import { isActivePlatinumLicense } from '../../common/license_check';
|
||||
|
||||
import { setupRequest } from '../lib/helpers/setup_request';
|
||||
import {
|
||||
fetchPValues,
|
||||
fetchSignificantCorrelations,
|
||||
fetchTransactionDurationFieldCandidates,
|
||||
fetchTransactionDurationFieldValuePairs,
|
||||
} from '../lib/correlations/queries';
|
||||
import { fetchFieldsStats } from '../lib/correlations/queries/field_stats/get_fields_stats';
|
||||
|
||||
import { withApmSpan } from '../utils/with_apm_span';
|
||||
|
||||
import { createApmServerRoute } from './create_apm_server_route';
|
||||
import { createApmServerRouteRepository } from './create_apm_server_route_repository';
|
||||
import { environmentRt, kueryRt, rangeRt } from './default_api_types';
|
||||
|
||||
const INVALID_LICENSE = i18n.translate('xpack.apm.correlations.license.text', {
|
||||
defaultMessage:
|
||||
'To use the correlations API, you must be subscribed to an Elastic Platinum license.',
|
||||
});
|
||||
|
||||
const fieldCandidatesRoute = createApmServerRoute({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
params: t.type({
|
||||
query: t.intersection([
|
||||
t.partial({
|
||||
serviceName: t.string,
|
||||
transactionName: t.string,
|
||||
transactionType: t.string,
|
||||
}),
|
||||
environmentRt,
|
||||
kueryRt,
|
||||
rangeRt,
|
||||
]),
|
||||
}),
|
||||
options: { tags: ['access:apm'] },
|
||||
handler: async (resources) => {
|
||||
const { context } = resources;
|
||||
if (!isActivePlatinumLicense(context.licensing.license)) {
|
||||
throw Boom.forbidden(INVALID_LICENSE);
|
||||
}
|
||||
|
||||
const { indices } = await setupRequest(resources);
|
||||
const esClient = resources.context.core.elasticsearch.client.asCurrentUser;
|
||||
|
||||
return withApmSpan(
|
||||
'get_correlations_field_candidates',
|
||||
async () =>
|
||||
await fetchTransactionDurationFieldCandidates(esClient, {
|
||||
...resources.params.query,
|
||||
index: indices.transaction,
|
||||
})
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const fieldStatsRoute = createApmServerRoute({
|
||||
endpoint: 'POST /internal/apm/correlations/field_stats',
|
||||
params: t.type({
|
||||
body: t.intersection([
|
||||
t.partial({
|
||||
serviceName: t.string,
|
||||
transactionName: t.string,
|
||||
transactionType: t.string,
|
||||
}),
|
||||
environmentRt,
|
||||
kueryRt,
|
||||
rangeRt,
|
||||
t.type({
|
||||
fieldsToSample: t.array(t.string),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
options: { tags: ['access:apm'] },
|
||||
handler: async (resources) => {
|
||||
const { context } = resources;
|
||||
if (!isActivePlatinumLicense(context.licensing.license)) {
|
||||
throw Boom.forbidden(INVALID_LICENSE);
|
||||
}
|
||||
|
||||
const { indices } = await setupRequest(resources);
|
||||
const esClient = resources.context.core.elasticsearch.client.asCurrentUser;
|
||||
|
||||
const { fieldsToSample, ...params } = resources.params.body;
|
||||
|
||||
return withApmSpan(
|
||||
'get_correlations_field_stats',
|
||||
async () =>
|
||||
await fetchFieldsStats(
|
||||
esClient,
|
||||
{
|
||||
...params,
|
||||
index: indices.transaction,
|
||||
},
|
||||
fieldsToSample
|
||||
)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const fieldValuePairsRoute = createApmServerRoute({
|
||||
endpoint: 'POST /internal/apm/correlations/field_value_pairs',
|
||||
params: t.type({
|
||||
body: t.intersection([
|
||||
t.partial({
|
||||
serviceName: t.string,
|
||||
transactionName: t.string,
|
||||
transactionType: t.string,
|
||||
}),
|
||||
environmentRt,
|
||||
kueryRt,
|
||||
rangeRt,
|
||||
t.type({
|
||||
fieldCandidates: t.array(t.string),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
options: { tags: ['access:apm'] },
|
||||
handler: async (resources) => {
|
||||
const { context } = resources;
|
||||
if (!isActivePlatinumLicense(context.licensing.license)) {
|
||||
throw Boom.forbidden(INVALID_LICENSE);
|
||||
}
|
||||
|
||||
const { indices } = await setupRequest(resources);
|
||||
const esClient = resources.context.core.elasticsearch.client.asCurrentUser;
|
||||
|
||||
const { fieldCandidates, ...params } = resources.params.body;
|
||||
|
||||
return withApmSpan(
|
||||
'get_correlations_field_value_pairs',
|
||||
async () =>
|
||||
await fetchTransactionDurationFieldValuePairs(
|
||||
esClient,
|
||||
{
|
||||
...params,
|
||||
index: indices.transaction,
|
||||
},
|
||||
fieldCandidates
|
||||
)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const significantCorrelationsRoute = createApmServerRoute({
|
||||
endpoint: 'POST /internal/apm/correlations/significant_correlations',
|
||||
params: t.type({
|
||||
body: t.intersection([
|
||||
t.partial({
|
||||
serviceName: t.string,
|
||||
transactionName: t.string,
|
||||
transactionType: t.string,
|
||||
}),
|
||||
environmentRt,
|
||||
kueryRt,
|
||||
rangeRt,
|
||||
t.type({
|
||||
fieldValuePairs: t.array(
|
||||
t.type({
|
||||
fieldName: t.string,
|
||||
fieldValue: t.union([t.string, toNumberRt]),
|
||||
})
|
||||
),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
options: { tags: ['access:apm'] },
|
||||
handler: async (resources) => {
|
||||
const { context } = resources;
|
||||
if (!isActivePlatinumLicense(context.licensing.license)) {
|
||||
throw Boom.forbidden(INVALID_LICENSE);
|
||||
}
|
||||
|
||||
const { indices } = await setupRequest(resources);
|
||||
const esClient = resources.context.core.elasticsearch.client.asCurrentUser;
|
||||
|
||||
const { fieldValuePairs, ...params } = resources.params.body;
|
||||
|
||||
const paramsWithIndex = {
|
||||
...params,
|
||||
index: indices.transaction,
|
||||
};
|
||||
|
||||
return withApmSpan(
|
||||
'get_significant_correlations',
|
||||
async () =>
|
||||
await fetchSignificantCorrelations(
|
||||
esClient,
|
||||
paramsWithIndex,
|
||||
fieldValuePairs
|
||||
)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
const pValuesRoute = createApmServerRoute({
|
||||
endpoint: 'POST /internal/apm/correlations/p_values',
|
||||
params: t.type({
|
||||
body: t.intersection([
|
||||
t.partial({
|
||||
serviceName: t.string,
|
||||
transactionName: t.string,
|
||||
transactionType: t.string,
|
||||
}),
|
||||
environmentRt,
|
||||
kueryRt,
|
||||
rangeRt,
|
||||
t.type({
|
||||
fieldCandidates: t.array(t.string),
|
||||
}),
|
||||
]),
|
||||
}),
|
||||
options: { tags: ['access:apm'] },
|
||||
handler: async (resources) => {
|
||||
const { context } = resources;
|
||||
if (!isActivePlatinumLicense(context.licensing.license)) {
|
||||
throw Boom.forbidden(INVALID_LICENSE);
|
||||
}
|
||||
|
||||
const { indices } = await setupRequest(resources);
|
||||
const esClient = resources.context.core.elasticsearch.client.asCurrentUser;
|
||||
|
||||
const { fieldCandidates, ...params } = resources.params.body;
|
||||
|
||||
const paramsWithIndex = {
|
||||
...params,
|
||||
index: indices.transaction,
|
||||
};
|
||||
|
||||
return withApmSpan(
|
||||
'get_p_values',
|
||||
async () => await fetchPValues(esClient, paramsWithIndex, fieldCandidates)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export const correlationsRouteRepository = createApmServerRouteRepository()
|
||||
.add(pValuesRoute)
|
||||
.add(fieldCandidatesRoute)
|
||||
.add(fieldStatsRoute)
|
||||
.add(fieldValuePairsRoute)
|
||||
.add(significantCorrelationsRoute);
|
|
@ -12,6 +12,7 @@ import type {
|
|||
import { PickByValue } from 'utility-types';
|
||||
import { alertsChartPreviewRouteRepository } from './alerts/chart_preview';
|
||||
import { backendsRouteRepository } from './backends';
|
||||
import { correlationsRouteRepository } from './correlations';
|
||||
import { createApmServerRouteRepository } from './create_apm_server_route_repository';
|
||||
import { environmentsRouteRepository } from './environments';
|
||||
import { errorsRouteRepository } from './errors';
|
||||
|
@ -60,6 +61,7 @@ const getTypedGlobalApmServerRouteRepository = () => {
|
|||
.merge(sourceMapsRouteRepository)
|
||||
.merge(apmFleetRouteRepository)
|
||||
.merge(backendsRouteRepository)
|
||||
.merge(correlationsRouteRepository)
|
||||
.merge(fallbackToTransactionsRouteRepository)
|
||||
.merge(historicalDataRouteRepository)
|
||||
.merge(eventMetadataRouteRepository);
|
||||
|
|
|
@ -7,234 +7,211 @@
|
|||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { IKibanaSearchRequest } from '../../../../../src/plugins/data/common';
|
||||
|
||||
import type { FailedTransactionsCorrelationsParams } from '../../../../plugins/apm/common/search_strategies/failed_transactions_correlations/types';
|
||||
import type { RawSearchStrategyClientParams } from '../../../../plugins/apm/common/search_strategies/types';
|
||||
import { APM_SEARCH_STRATEGIES } from '../../../../plugins/apm/common/search_strategies/constants';
|
||||
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import { parseBfetchResponse } from '../../common/utils/parse_b_fetch';
|
||||
import type { FailedTransactionsCorrelationsResponse } from '../../../../plugins/apm/common/correlations/failed_transactions_correlations/types';
|
||||
import { EVENT_OUTCOME } from '../../../../plugins/apm/common/elasticsearch_fieldnames';
|
||||
import { EventOutcome } from '../../../../plugins/apm/common/event_outcome';
|
||||
|
||||
// These tests go through the full sequence of queries required
|
||||
// to get the final results for a failed transactions correlation analysis.
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
const retry = getService('retry');
|
||||
const supertest = getService('legacySupertestAsApmReadUser');
|
||||
|
||||
const getRequestBody = () => {
|
||||
const request: IKibanaSearchRequest<
|
||||
FailedTransactionsCorrelationsParams & RawSearchStrategyClientParams
|
||||
> = {
|
||||
params: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
percentileThreshold: 95,
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
batch: [
|
||||
{
|
||||
request,
|
||||
options: { strategy: APM_SEARCH_STRATEGIES.APM_FAILED_TRANSACTIONS_CORRELATIONS },
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
// This matches the parameters used for the other tab's queries in `../correlations/*`.
|
||||
const getOptions = () => ({
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
});
|
||||
|
||||
registry.when('failed transactions without data', { config: 'trial', archives: [] }, () => {
|
||||
it.skip('queries the search strategy and returns results', async () => {
|
||||
const intialResponse = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(getRequestBody());
|
||||
it('handles the empty state', async () => {
|
||||
const overallDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(intialResponse.status).to.eql(
|
||||
expect(overallDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${intialResponse.status}'`
|
||||
);
|
||||
expect(intialResponse.body).to.eql(
|
||||
{},
|
||||
`Expected response body to be an empty object, actual response is in the text attribute. Got: '${JSON.stringify(
|
||||
intialResponse.body
|
||||
)}'`
|
||||
`Expected status to be '200', got '${overallDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
const body = parseBfetchResponse(intialResponse)[0];
|
||||
const errorDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
termFilters: [{ fieldName: EVENT_OUTCOME, fieldValue: EventOutcome.failure }],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(typeof body.result).to.be('object');
|
||||
const { result } = body;
|
||||
|
||||
expect(typeof result?.id).to.be('string');
|
||||
|
||||
// pass on id for follow up queries
|
||||
const searchStrategyId = result.id;
|
||||
|
||||
// follow up request body including search strategy ID
|
||||
const reqBody = getRequestBody();
|
||||
reqBody.batch[0].request.id = searchStrategyId;
|
||||
|
||||
let followUpResponse: Record<string, any> = {};
|
||||
|
||||
// continues querying until the search strategy finishes
|
||||
await retry.waitForWithTimeout(
|
||||
'search strategy eventually completes and returns full results',
|
||||
5000,
|
||||
async () => {
|
||||
const response = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(reqBody);
|
||||
|
||||
followUpResponse = parseBfetchResponse(response)[0];
|
||||
|
||||
return (
|
||||
followUpResponse?.result?.isRunning === false || followUpResponse?.error !== undefined
|
||||
);
|
||||
}
|
||||
expect(errorDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${errorDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
expect(followUpResponse?.error).to.eql(
|
||||
undefined,
|
||||
`search strategy should not return an error, got: ${JSON.stringify(
|
||||
followUpResponse?.error
|
||||
)}`
|
||||
const fieldCandidatesResponse = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
params: {
|
||||
query: getOptions(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(fieldCandidatesResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldCandidatesResponse.status}'`
|
||||
);
|
||||
|
||||
const followUpResult = followUpResponse.result;
|
||||
expect(followUpResult?.isRunning).to.eql(false, 'search strategy should not be running');
|
||||
expect(followUpResult?.isPartial).to.eql(
|
||||
false,
|
||||
'search strategy result should not be partial'
|
||||
const failedTransactionsCorrelationsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/p_values',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldCandidates: fieldCandidatesResponse.body?.fieldCandidates,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(failedTransactionsCorrelationsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${failedTransactionsCorrelationsResponse.status}'`
|
||||
);
|
||||
expect(followUpResult?.id).to.eql(
|
||||
searchStrategyId,
|
||||
'search strategy id should match original id'
|
||||
);
|
||||
expect(followUpResult?.isRestored).to.eql(
|
||||
true,
|
||||
'search strategy response should be restored'
|
||||
);
|
||||
expect(followUpResult?.loaded).to.eql(100, 'loaded state should be 100');
|
||||
expect(followUpResult?.total).to.eql(100, 'total state should be 100');
|
||||
|
||||
expect(typeof followUpResult?.rawResponse).to.be('object');
|
||||
const finalRawResponse: FailedTransactionsCorrelationsResponse = {
|
||||
ccsWarning: failedTransactionsCorrelationsResponse.body?.ccsWarning,
|
||||
percentileThresholdValue: overallDistributionResponse.body?.percentileThresholdValue,
|
||||
overallHistogram: overallDistributionResponse.body?.overallHistogram,
|
||||
failedTransactionsCorrelations:
|
||||
failedTransactionsCorrelationsResponse.body?.failedTransactionsCorrelations,
|
||||
};
|
||||
|
||||
const { rawResponse: finalRawResponse } = followUpResult;
|
||||
|
||||
expect(typeof finalRawResponse?.took).to.be('number');
|
||||
|
||||
expect(finalRawResponse?.failedTransactionsCorrelations.length).to.eql(
|
||||
expect(finalRawResponse?.failedTransactionsCorrelations?.length).to.eql(
|
||||
0,
|
||||
`Expected 0 identified correlations, got ${finalRawResponse?.failedTransactionsCorrelations.length}.`
|
||||
`Expected 0 identified correlations, got ${finalRawResponse?.failedTransactionsCorrelations?.length}.`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when('failed transactions with data', { config: 'trial', archives: ['8.0.0'] }, () => {
|
||||
it.skip('queries the search strategy and returns results', async () => {
|
||||
const intialResponse = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(getRequestBody());
|
||||
it('runs queries and returns results', async () => {
|
||||
const overallDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(intialResponse.status).to.eql(
|
||||
expect(overallDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${intialResponse.status}'`
|
||||
);
|
||||
expect(intialResponse.body).to.eql(
|
||||
{},
|
||||
`Expected response body to be an empty object, actual response is in the text attribute. Got: '${JSON.stringify(
|
||||
intialResponse.body
|
||||
)}'`
|
||||
`Expected status to be '200', got '${overallDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
const body = parseBfetchResponse(intialResponse)[0];
|
||||
const errorDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
termFilters: [{ fieldName: EVENT_OUTCOME, fieldValue: EventOutcome.failure }],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(typeof body.result).to.be('object');
|
||||
const { result } = body;
|
||||
|
||||
expect(typeof result?.id).to.be('string');
|
||||
|
||||
// pass on id for follow up queries
|
||||
const searchStrategyId = result.id;
|
||||
|
||||
// follow up request body including search strategy ID
|
||||
const reqBody = getRequestBody();
|
||||
reqBody.batch[0].request.id = searchStrategyId;
|
||||
|
||||
let followUpResponse: Record<string, any> = {};
|
||||
|
||||
// continues querying until the search strategy finishes
|
||||
await retry.waitForWithTimeout(
|
||||
'search strategy eventually completes and returns full results',
|
||||
5000,
|
||||
async () => {
|
||||
const response = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(reqBody);
|
||||
|
||||
followUpResponse = parseBfetchResponse(response)[0];
|
||||
|
||||
return (
|
||||
followUpResponse?.result?.isRunning === false || followUpResponse?.error !== undefined
|
||||
);
|
||||
}
|
||||
expect(errorDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${errorDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
expect(followUpResponse?.error).to.eql(
|
||||
undefined,
|
||||
`search strategy should not return an error, got: ${JSON.stringify(
|
||||
followUpResponse?.error
|
||||
)}`
|
||||
const fieldCandidatesResponse = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
params: {
|
||||
query: getOptions(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(fieldCandidatesResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldCandidatesResponse.status}'`
|
||||
);
|
||||
|
||||
const followUpResult = followUpResponse.result;
|
||||
expect(followUpResult?.isRunning).to.eql(false, 'search strategy should not be running');
|
||||
expect(followUpResult?.isPartial).to.eql(
|
||||
false,
|
||||
'search strategy result should not be partial'
|
||||
const fieldCandidates = fieldCandidatesResponse.body?.fieldCandidates.filter(
|
||||
(t) => !(t === EVENT_OUTCOME)
|
||||
);
|
||||
expect(followUpResult?.id).to.eql(
|
||||
searchStrategyId,
|
||||
'search strategy id should match original id'
|
||||
|
||||
// Identified 68 fieldCandidates.
|
||||
expect(fieldCandidates.length).to.eql(
|
||||
68,
|
||||
`Expected field candidates length to be '68', got '${fieldCandidates.length}'`
|
||||
);
|
||||
expect(followUpResult?.isRestored).to.eql(
|
||||
true,
|
||||
'search strategy response should be restored'
|
||||
|
||||
const failedTransactionsCorrelationsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/p_values',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldCandidates,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(failedTransactionsCorrelationsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${failedTransactionsCorrelationsResponse.status}'`
|
||||
);
|
||||
expect(followUpResult?.loaded).to.eql(100, 'loaded state should be 100');
|
||||
expect(followUpResult?.total).to.eql(100, 'total state should be 100');
|
||||
|
||||
expect(typeof followUpResult?.rawResponse).to.be('object');
|
||||
const fieldsToSample = new Set<string>();
|
||||
if (failedTransactionsCorrelationsResponse.body?.failedTransactionsCorrelations.length > 0) {
|
||||
failedTransactionsCorrelationsResponse.body?.failedTransactionsCorrelations.forEach((d) => {
|
||||
fieldsToSample.add(d.fieldName);
|
||||
});
|
||||
}
|
||||
|
||||
const { rawResponse: finalRawResponse } = followUpResult;
|
||||
const failedtransactionsFieldStats = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/field_stats',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldsToSample: [...fieldsToSample],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const finalRawResponse: FailedTransactionsCorrelationsResponse = {
|
||||
ccsWarning: failedTransactionsCorrelationsResponse.body?.ccsWarning,
|
||||
percentileThresholdValue: overallDistributionResponse.body?.percentileThresholdValue,
|
||||
overallHistogram: overallDistributionResponse.body?.overallHistogram,
|
||||
errorHistogram: errorDistributionResponse.body?.overallHistogram,
|
||||
failedTransactionsCorrelations:
|
||||
failedTransactionsCorrelationsResponse.body?.failedTransactionsCorrelations,
|
||||
fieldStats: failedtransactionsFieldStats.body?.stats,
|
||||
};
|
||||
|
||||
expect(typeof finalRawResponse?.took).to.be('number');
|
||||
expect(finalRawResponse?.percentileThresholdValue).to.be(1309695.875);
|
||||
expect(finalRawResponse?.errorHistogram.length).to.be(101);
|
||||
expect(finalRawResponse?.overallHistogram.length).to.be(101);
|
||||
expect(finalRawResponse?.fieldStats.length).to.be(26);
|
||||
expect(finalRawResponse?.errorHistogram?.length).to.be(101);
|
||||
expect(finalRawResponse?.overallHistogram?.length).to.be(101);
|
||||
expect(finalRawResponse?.fieldStats?.length).to.be(26);
|
||||
|
||||
expect(finalRawResponse?.failedTransactionsCorrelations.length).to.eql(
|
||||
expect(finalRawResponse?.failedTransactionsCorrelations?.length).to.eql(
|
||||
30,
|
||||
`Expected 30 identified correlations, got ${finalRawResponse?.failedTransactionsCorrelations.length}.`
|
||||
`Expected 30 identified correlations, got ${finalRawResponse?.failedTransactionsCorrelations?.length}.`
|
||||
);
|
||||
|
||||
expect(finalRawResponse?.log.map((d: string) => d.split(': ')[1])).to.eql([
|
||||
'Fetched 95th percentile value of 1309695.875 based on 1244 documents.',
|
||||
'Identified 68 fieldCandidates.',
|
||||
'Identified correlations for 68 fields out of 68 candidates.',
|
||||
'Identified 26 fields to sample for field statistics.',
|
||||
'Retrieved field statistics for 26 fields out of 26 fields.',
|
||||
'Identified 30 significant correlations relating to failed transactions.',
|
||||
]);
|
||||
|
||||
const sortedCorrelations = finalRawResponse?.failedTransactionsCorrelations.sort();
|
||||
const correlation = sortedCorrelations[0];
|
||||
const sortedCorrelations = finalRawResponse?.failedTransactionsCorrelations?.sort(
|
||||
(a, b) => b.score - a.score
|
||||
);
|
||||
const correlation = sortedCorrelations?.[0];
|
||||
|
||||
expect(typeof correlation).to.be('object');
|
||||
expect(correlation?.doc_count).to.be(31);
|
||||
|
@ -247,10 +224,12 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
expect(typeof correlation?.failurePercentage).to.be('number');
|
||||
expect(typeof correlation?.successPercentage).to.be('number');
|
||||
|
||||
const fieldStats = finalRawResponse?.fieldStats[0];
|
||||
const fieldStats = finalRawResponse?.fieldStats?.[0];
|
||||
expect(typeof fieldStats).to.be('object');
|
||||
expect(fieldStats.topValues.length).to.greaterThan(0);
|
||||
expect(fieldStats.topValuesSampleSize).to.greaterThan(0);
|
||||
expect(Array.isArray(fieldStats?.topValues) && fieldStats?.topValues?.length).to.greaterThan(
|
||||
0
|
||||
);
|
||||
expect(fieldStats?.topValuesSampleSize).to.greaterThan(0);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
|
||||
const endpoint = 'GET /internal/apm/correlations/field_candidates';
|
||||
|
||||
const getOptions = () => ({
|
||||
params: {
|
||||
query: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
registry.when('field candidates without data', { config: 'trial', archives: [] }, () => {
|
||||
it('handles the empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body?.fieldCandidates.length).to.be(14);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'field candidates with data and default args',
|
||||
{ config: 'trial', archives: ['8.0.0'] },
|
||||
() => {
|
||||
it('returns field candidates', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
expect(response.body?.fieldCandidates.length).to.be(69);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
|
||||
const endpoint = 'POST /internal/apm/correlations/field_value_pairs';
|
||||
|
||||
const getOptions = () => ({
|
||||
params: {
|
||||
body: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
fieldCandidates: [
|
||||
'service.version',
|
||||
'service.node.name',
|
||||
'service.framework.version',
|
||||
'service.language.version',
|
||||
'service.runtime.version',
|
||||
'kubernetes.pod.name',
|
||||
'kubernetes.pod.uid',
|
||||
'container.id',
|
||||
'source.ip',
|
||||
'client.ip',
|
||||
'host.ip',
|
||||
'service.environment',
|
||||
'process.args',
|
||||
'http.response.status_code',
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
registry.when('field value pairs without data', { config: 'trial', archives: [] }, () => {
|
||||
it('handles the empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body?.fieldValuePairs.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'field value pairs with data and default args',
|
||||
{ config: 'trial', archives: ['8.0.0'] },
|
||||
() => {
|
||||
it('returns field value pairs', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
expect(response.body?.fieldValuePairs.length).to.be(124);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
|
@ -7,134 +7,95 @@
|
|||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { IKibanaSearchRequest } from '../../../../../src/plugins/data/common';
|
||||
|
||||
import type { LatencyCorrelationsParams } from '../../../../plugins/apm/common/search_strategies/latency_correlations/types';
|
||||
import type { RawSearchStrategyClientParams } from '../../../../plugins/apm/common/search_strategies/types';
|
||||
import { APM_SEARCH_STRATEGIES } from '../../../../plugins/apm/common/search_strategies/constants';
|
||||
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import { parseBfetchResponse } from '../../common/utils/parse_b_fetch';
|
||||
import type { LatencyCorrelationsResponse } from '../../../../plugins/apm/common/correlations/latency_correlations/types';
|
||||
|
||||
// These tests go through the full sequence of queries required
|
||||
// to get the final results for a latency correlation analysis.
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
const retry = getService('retry');
|
||||
const supertest = getService('legacySupertestAsApmReadUser');
|
||||
|
||||
const getRequestBody = () => {
|
||||
const request: IKibanaSearchRequest<LatencyCorrelationsParams & RawSearchStrategyClientParams> =
|
||||
{
|
||||
params: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
percentileThreshold: 95,
|
||||
analyzeCorrelations: true,
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
batch: [
|
||||
{
|
||||
request,
|
||||
options: { strategy: APM_SEARCH_STRATEGIES.APM_LATENCY_CORRELATIONS },
|
||||
},
|
||||
],
|
||||
};
|
||||
};
|
||||
// This matches the parameters used for the other tab's queries in `../correlations/*`.
|
||||
const getOptions = () => ({
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'correlations latency_ml overall without data',
|
||||
'correlations latency overall without data',
|
||||
{ config: 'trial', archives: [] },
|
||||
() => {
|
||||
it('handles the empty state', async () => {
|
||||
const intialResponse = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(getRequestBody());
|
||||
const overallDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(intialResponse.status).to.eql(
|
||||
expect(overallDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${intialResponse.status}'`
|
||||
);
|
||||
expect(intialResponse.body).to.eql(
|
||||
{},
|
||||
`Expected response body to be an empty object, actual response is in the text attribute. Got: '${JSON.stringify(
|
||||
intialResponse.body
|
||||
)}'`
|
||||
`Expected status to be '200', got '${overallDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
const body = parseBfetchResponse(intialResponse)[0];
|
||||
const fieldCandidatesResponse = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
params: {
|
||||
query: getOptions(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(typeof body.result).to.be('object');
|
||||
const { result } = body;
|
||||
|
||||
expect(typeof result?.id).to.be('string');
|
||||
|
||||
// pass on id for follow up queries
|
||||
const searchStrategyId = result.id;
|
||||
|
||||
// follow up request body including search strategy ID
|
||||
const reqBody = getRequestBody();
|
||||
reqBody.batch[0].request.id = searchStrategyId;
|
||||
|
||||
let followUpResponse: Record<string, any> = {};
|
||||
|
||||
// continues querying until the search strategy finishes
|
||||
await retry.waitForWithTimeout(
|
||||
'search strategy eventually completes and returns full results',
|
||||
5000,
|
||||
async () => {
|
||||
const response = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(reqBody);
|
||||
|
||||
followUpResponse = parseBfetchResponse(response)[0];
|
||||
|
||||
return (
|
||||
followUpResponse?.result?.isRunning === false || followUpResponse?.error !== undefined
|
||||
);
|
||||
}
|
||||
expect(fieldCandidatesResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldCandidatesResponse.status}'`
|
||||
);
|
||||
|
||||
expect(followUpResponse?.error).to.eql(
|
||||
undefined,
|
||||
`search strategy should not return an error, got: ${JSON.stringify(
|
||||
followUpResponse?.error
|
||||
)}`
|
||||
const fieldValuePairsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/field_value_pairs',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldCandidates: fieldCandidatesResponse.body?.fieldCandidates,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(fieldValuePairsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldValuePairsResponse.status}'`
|
||||
);
|
||||
|
||||
const followUpResult = followUpResponse.result;
|
||||
expect(followUpResult?.isRunning).to.eql(false, 'search strategy should not be running');
|
||||
expect(followUpResult?.isPartial).to.eql(
|
||||
false,
|
||||
'search strategy result should not be partial'
|
||||
);
|
||||
expect(followUpResult?.id).to.eql(
|
||||
searchStrategyId,
|
||||
'search strategy id should match original id'
|
||||
);
|
||||
expect(followUpResult?.isRestored).to.eql(
|
||||
true,
|
||||
'search strategy response should be restored'
|
||||
);
|
||||
expect(followUpResult?.loaded).to.eql(100, 'loaded state should be 100');
|
||||
expect(followUpResult?.total).to.eql(100, 'total state should be 100');
|
||||
const significantCorrelationsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/significant_correlations',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldValuePairs: fieldValuePairsResponse.body?.fieldValuePairs,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(typeof followUpResult?.rawResponse).to.be('object');
|
||||
expect(significantCorrelationsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${significantCorrelationsResponse.status}'`
|
||||
);
|
||||
|
||||
const { rawResponse: finalRawResponse } = followUpResult;
|
||||
const finalRawResponse: LatencyCorrelationsResponse = {
|
||||
ccsWarning: significantCorrelationsResponse.body?.ccsWarning,
|
||||
percentileThresholdValue: overallDistributionResponse.body?.percentileThresholdValue,
|
||||
overallHistogram: overallDistributionResponse.body?.overallHistogram,
|
||||
latencyCorrelations: significantCorrelationsResponse.body?.latencyCorrelations,
|
||||
};
|
||||
|
||||
expect(typeof finalRawResponse?.took).to.be('number');
|
||||
expect(finalRawResponse?.percentileThresholdValue).to.be(undefined);
|
||||
expect(finalRawResponse?.overallHistogram).to.be(undefined);
|
||||
expect(finalRawResponse?.latencyCorrelations.length).to.be(0);
|
||||
expect(finalRawResponse?.log.map((d: string) => d.split(': ')[1])).to.eql([
|
||||
'Fetched 95th percentile value of undefined based on 0 documents.',
|
||||
'Abort service since percentileThresholdValue could not be determined.',
|
||||
]);
|
||||
expect(finalRawResponse?.latencyCorrelations?.length).to.be(0);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
@ -144,120 +105,121 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
{ config: 'trial', archives: ['8.0.0'] },
|
||||
() => {
|
||||
// putting this into a single `it` because the responses depend on each other
|
||||
it.skip('queries the search strategy and returns results', async () => {
|
||||
const intialResponse = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(getRequestBody());
|
||||
it('runs queries and returns results', async () => {
|
||||
const overallDistributionResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/latency/overall_distribution',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
percentileThreshold: 95,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(intialResponse.status).to.eql(
|
||||
expect(overallDistributionResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${intialResponse.status}'`
|
||||
);
|
||||
expect(intialResponse.body).to.eql(
|
||||
{},
|
||||
`Expected response body to be an empty object, actual response is in the text attribute. Got: '${JSON.stringify(
|
||||
intialResponse.body
|
||||
)}'`
|
||||
`Expected status to be '200', got '${overallDistributionResponse.status}'`
|
||||
);
|
||||
|
||||
const body = parseBfetchResponse(intialResponse)[0];
|
||||
const fieldCandidatesResponse = await apmApiClient.readUser({
|
||||
endpoint: 'GET /internal/apm/correlations/field_candidates',
|
||||
params: {
|
||||
query: getOptions(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(typeof body?.result).to.be('object');
|
||||
const { result } = body;
|
||||
|
||||
expect(typeof result?.id).to.be('string');
|
||||
|
||||
// pass on id for follow up queries
|
||||
const searchStrategyId = result.id;
|
||||
|
||||
expect(result?.loaded).to.be(0);
|
||||
expect(result?.total).to.be(100);
|
||||
expect(result?.isRunning).to.be(true);
|
||||
expect(result?.isPartial).to.be(true);
|
||||
expect(result?.isRestored).to.eql(
|
||||
false,
|
||||
`Expected response result to be not restored. Got: '${result?.isRestored}'`
|
||||
);
|
||||
expect(typeof result?.rawResponse).to.be('object');
|
||||
|
||||
const { rawResponse } = result;
|
||||
|
||||
expect(typeof rawResponse?.took).to.be('number');
|
||||
expect(rawResponse?.latencyCorrelations).to.eql([]);
|
||||
|
||||
// follow up request body including search strategy ID
|
||||
const reqBody = getRequestBody();
|
||||
reqBody.batch[0].request.id = searchStrategyId;
|
||||
|
||||
let followUpResponse: Record<string, any> = {};
|
||||
|
||||
// continues querying until the search strategy finishes
|
||||
await retry.waitForWithTimeout(
|
||||
'search strategy eventually completes and returns full results',
|
||||
5000,
|
||||
async () => {
|
||||
const response = await supertest
|
||||
.post(`/internal/bsearch`)
|
||||
.set('kbn-xsrf', 'foo')
|
||||
.send(reqBody);
|
||||
followUpResponse = parseBfetchResponse(response)[0];
|
||||
|
||||
return (
|
||||
followUpResponse?.result?.isRunning === false || followUpResponse?.error !== undefined
|
||||
);
|
||||
}
|
||||
expect(fieldCandidatesResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldCandidatesResponse.status}'`
|
||||
);
|
||||
|
||||
expect(followUpResponse?.error).to.eql(
|
||||
undefined,
|
||||
`Finished search strategy should not return an error, got: ${JSON.stringify(
|
||||
followUpResponse?.error
|
||||
)}`
|
||||
// Identified 69 fieldCandidates.
|
||||
expect(fieldCandidatesResponse.body?.fieldCandidates.length).to.eql(
|
||||
69,
|
||||
`Expected field candidates length to be '69', got '${fieldCandidatesResponse.body?.fieldCandidates.length}'`
|
||||
);
|
||||
|
||||
const followUpResult = followUpResponse.result;
|
||||
expect(followUpResult?.isRunning).to.eql(
|
||||
false,
|
||||
`Expected finished result not to be running. Got: ${followUpResult?.isRunning}`
|
||||
const fieldValuePairsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/field_value_pairs',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldCandidates: fieldCandidatesResponse.body?.fieldCandidates,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(fieldValuePairsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${fieldValuePairsResponse.status}'`
|
||||
);
|
||||
expect(followUpResult?.isPartial).to.eql(
|
||||
false,
|
||||
`Expected finished result not to be partial. Got: ${followUpResult?.isPartial}`
|
||||
|
||||
// Identified 379 fieldValuePairs.
|
||||
expect(fieldValuePairsResponse.body?.fieldValuePairs.length).to.eql(
|
||||
379,
|
||||
`Expected field value pairs length to be '379', got '${fieldValuePairsResponse.body?.fieldValuePairs.length}'`
|
||||
);
|
||||
expect(followUpResult?.id).to.be(searchStrategyId);
|
||||
expect(followUpResult?.isRestored).to.be(true);
|
||||
expect(followUpResult?.loaded).to.be(100);
|
||||
expect(followUpResult?.total).to.be(100);
|
||||
|
||||
expect(typeof followUpResult?.rawResponse).to.be('object');
|
||||
const significantCorrelationsResponse = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/significant_correlations',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldValuePairs: fieldValuePairsResponse.body?.fieldValuePairs,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const { rawResponse: finalRawResponse } = followUpResult;
|
||||
expect(significantCorrelationsResponse.status).to.eql(
|
||||
200,
|
||||
`Expected status to be '200', got '${significantCorrelationsResponse.status}'`
|
||||
);
|
||||
|
||||
expect(typeof finalRawResponse?.took).to.be('number');
|
||||
// Loaded fractions and totalDocCount of 1244.
|
||||
expect(significantCorrelationsResponse.body?.totalDocCount).to.eql(
|
||||
1244,
|
||||
`Expected 1244 total doc count, got ${significantCorrelationsResponse.body?.totalDocCount}.`
|
||||
);
|
||||
|
||||
const fieldsToSample = new Set<string>();
|
||||
if (significantCorrelationsResponse.body?.latencyCorrelations.length > 0) {
|
||||
significantCorrelationsResponse.body?.latencyCorrelations.forEach((d) => {
|
||||
fieldsToSample.add(d.fieldName);
|
||||
});
|
||||
}
|
||||
|
||||
const failedtransactionsFieldStats = await apmApiClient.readUser({
|
||||
endpoint: 'POST /internal/apm/correlations/field_stats',
|
||||
params: {
|
||||
body: {
|
||||
...getOptions(),
|
||||
fieldsToSample: [...fieldsToSample],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const finalRawResponse: LatencyCorrelationsResponse = {
|
||||
ccsWarning: significantCorrelationsResponse.body?.ccsWarning,
|
||||
percentileThresholdValue: overallDistributionResponse.body?.percentileThresholdValue,
|
||||
overallHistogram: overallDistributionResponse.body?.overallHistogram,
|
||||
latencyCorrelations: significantCorrelationsResponse.body?.latencyCorrelations,
|
||||
fieldStats: failedtransactionsFieldStats.body?.stats,
|
||||
};
|
||||
|
||||
// Fetched 95th percentile value of 1309695.875 based on 1244 documents.
|
||||
expect(finalRawResponse?.percentileThresholdValue).to.be(1309695.875);
|
||||
expect(finalRawResponse?.overallHistogram.length).to.be(101);
|
||||
expect(finalRawResponse?.fieldStats.length).to.be(12);
|
||||
expect(finalRawResponse?.overallHistogram?.length).to.be(101);
|
||||
expect(finalRawResponse?.fieldStats?.length).to.be(12);
|
||||
|
||||
expect(finalRawResponse?.latencyCorrelations.length).to.eql(
|
||||
// Identified 13 significant correlations out of 379 field/value pairs.
|
||||
expect(finalRawResponse?.latencyCorrelations?.length).to.eql(
|
||||
13,
|
||||
`Expected 13 identified correlations, got ${finalRawResponse?.latencyCorrelations.length}.`
|
||||
`Expected 13 identified correlations, got ${finalRawResponse?.latencyCorrelations?.length}.`
|
||||
);
|
||||
expect(finalRawResponse?.log.map((d: string) => d.split(': ')[1])).to.eql([
|
||||
'Fetched 95th percentile value of 1309695.875 based on 1244 documents.',
|
||||
'Loaded histogram range steps.',
|
||||
'Loaded overall histogram chart data.',
|
||||
'Loaded percentiles.',
|
||||
'Identified 69 fieldCandidates.',
|
||||
'Identified 379 fieldValuePairs.',
|
||||
'Loaded fractions and totalDocCount of 1244.',
|
||||
'Identified 13 significant correlations out of 379 field/value pairs.',
|
||||
'Identified 12 fields to sample for field statistics.',
|
||||
'Retrieved field statistics for 12 fields out of 12 fields.',
|
||||
]);
|
||||
|
||||
const correlation = finalRawResponse?.latencyCorrelations[0];
|
||||
|
||||
const correlation = finalRawResponse?.latencyCorrelations?.sort(
|
||||
(a, b) => b.correlation - a.correlation
|
||||
)[0];
|
||||
expect(typeof correlation).to.be('object');
|
||||
expect(correlation?.fieldName).to.be('transaction.result');
|
||||
expect(correlation?.fieldValue).to.be('success');
|
||||
|
@ -265,10 +227,12 @@ export default function ApiTest({ getService }: FtrProviderContext) {
|
|||
expect(correlation?.ksTest).to.be(4.806503252860024e-13);
|
||||
expect(correlation?.histogram.length).to.be(101);
|
||||
|
||||
const fieldStats = finalRawResponse?.fieldStats[0];
|
||||
const fieldStats = finalRawResponse?.fieldStats?.[0];
|
||||
expect(typeof fieldStats).to.be('object');
|
||||
expect(fieldStats.topValues.length).to.greaterThan(0);
|
||||
expect(fieldStats.topValuesSampleSize).to.greaterThan(0);
|
||||
expect(
|
||||
Array.isArray(fieldStats?.topValues) && fieldStats?.topValues?.length
|
||||
).to.greaterThan(0);
|
||||
expect(fieldStats?.topValuesSampleSize).to.greaterThan(0);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
|
||||
const endpoint = 'POST /internal/apm/correlations/p_values';
|
||||
|
||||
const getOptions = () => ({
|
||||
params: {
|
||||
body: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
fieldCandidates: [
|
||||
'service.version',
|
||||
'service.node.name',
|
||||
'service.framework.version',
|
||||
'service.language.version',
|
||||
'service.runtime.version',
|
||||
'kubernetes.pod.name',
|
||||
'kubernetes.pod.uid',
|
||||
'container.id',
|
||||
'source.ip',
|
||||
'client.ip',
|
||||
'host.ip',
|
||||
'service.environment',
|
||||
'process.args',
|
||||
'http.response.status_code',
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
registry.when('p values without data', { config: 'trial', archives: [] }, () => {
|
||||
it('handles the empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body?.failedTransactionsCorrelations.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'p values with data and default args',
|
||||
{ config: 'trial', archives: ['8.0.0'] },
|
||||
() => {
|
||||
it('returns p values', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
expect(response.body?.failedTransactionsCorrelations.length).to.be(15);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
|
||||
export default function ApiTest({ getService }: FtrProviderContext) {
|
||||
const apmApiClient = getService('apmApiClient');
|
||||
const registry = getService('registry');
|
||||
|
||||
const endpoint = 'POST /internal/apm/correlations/significant_correlations';
|
||||
|
||||
const getOptions = () => ({
|
||||
params: {
|
||||
body: {
|
||||
environment: 'ENVIRONMENT_ALL',
|
||||
start: '2020',
|
||||
end: '2021',
|
||||
kuery: '',
|
||||
fieldValuePairs: [
|
||||
{ fieldName: 'service.version', fieldValue: '2020-08-26 02:09:20' },
|
||||
{ fieldName: 'service.version', fieldValue: 'None' },
|
||||
{
|
||||
fieldName: 'service.node.name',
|
||||
fieldValue: 'af586da824b28435f3a8c8f0c016096502cd2495d64fb332db23312be88cfff6',
|
||||
},
|
||||
{
|
||||
fieldName: 'service.node.name',
|
||||
fieldValue: 'asdf',
|
||||
},
|
||||
{ fieldName: 'service.runtime.version', fieldValue: '12.18.3' },
|
||||
{ fieldName: 'service.runtime.version', fieldValue: '2.6.6' },
|
||||
{
|
||||
fieldName: 'kubernetes.pod.name',
|
||||
fieldValue: 'opbeans-node-6cf6cf6f58-r5q9l',
|
||||
},
|
||||
{
|
||||
fieldName: 'kubernetes.pod.name',
|
||||
fieldValue: 'opbeans-java-6dc7465984-h9sh5',
|
||||
},
|
||||
{
|
||||
fieldName: 'kubernetes.pod.uid',
|
||||
fieldValue: '8da9c944-e741-11ea-819e-42010a84004a',
|
||||
},
|
||||
{
|
||||
fieldName: 'kubernetes.pod.uid',
|
||||
fieldValue: '8e192c6c-e741-11ea-819e-42010a84004a',
|
||||
},
|
||||
{
|
||||
fieldName: 'container.id',
|
||||
fieldValue: 'af586da824b28435f3a8c8f0c016096502cd2495d64fb332db23312be88cfff6',
|
||||
},
|
||||
{
|
||||
fieldName: 'container.id',
|
||||
fieldValue: 'asdf',
|
||||
},
|
||||
{ fieldName: 'host.ip', fieldValue: '10.52.6.48' },
|
||||
{ fieldName: 'host.ip', fieldValue: '10.52.6.50' },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
registry.when('significant correlations without data', { config: 'trial', archives: [] }, () => {
|
||||
it('handles the empty state', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.be(200);
|
||||
expect(response.body?.latencyCorrelations.length).to.be(0);
|
||||
});
|
||||
});
|
||||
|
||||
registry.when(
|
||||
'significant correlations with data and default args',
|
||||
{ config: 'trial', archives: ['8.0.0'] },
|
||||
() => {
|
||||
it('returns significant correlations', async () => {
|
||||
const response = await apmApiClient.readUser({
|
||||
endpoint,
|
||||
...getOptions(),
|
||||
});
|
||||
|
||||
expect(response.status).to.eql(200);
|
||||
expect(response.body?.latencyCorrelations.length).to.be(7);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue