mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Merge branch 'master' of github.com:elastic/kibana into search/expose-resp-in-search-source-emits
This commit is contained in:
commit
da53ea8d93
78 changed files with 1491 additions and 328 deletions
|
@ -6,6 +6,8 @@
|
|||
|
||||
Create {kib} rules.
|
||||
|
||||
WARNING: This API supports <<token-api-authentication>> only.
|
||||
|
||||
[[create-rule-api-request]]
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
|
||||
Enable a rule.
|
||||
|
||||
WARNING: This API supports <<token-api-authentication>> only.
|
||||
|
||||
[[enable-rule-api-request]]
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
|
||||
Update the attributes for an existing rule.
|
||||
|
||||
WARNING: This API supports <<token-api-authentication>> only.
|
||||
|
||||
[[update-rule-api-request]]
|
||||
==== Request
|
||||
|
||||
|
|
31
docs/settings/url-drilldown-settings.asciidoc
Normal file
31
docs/settings/url-drilldown-settings.asciidoc
Normal file
|
@ -0,0 +1,31 @@
|
|||
[[url-drilldown-settings-kb]]
|
||||
=== URL drilldown settings in {kib}
|
||||
++++
|
||||
<titleabbrev>URL drilldown settings</titleabbrev>
|
||||
++++
|
||||
|
||||
Configure the URL drilldown settings in your `kibana.yml` configuration file.
|
||||
|
||||
[cols="2*<"]
|
||||
|===
|
||||
| [[url-drilldown-enabled]] `url_drilldown.enabled`
|
||||
| When `true`, enables URL drilldowns on your {kib} instance.
|
||||
|
||||
| [[external-URL-policy]] `externalUrl.policy`
|
||||
| Configures the external URL policies. URL drilldowns respect the global *External URL* service, which you can use to deny or allow external URLs.
|
||||
By default all external URLs are allowed.
|
||||
|===
|
||||
|
||||
For example, to allow external URLs only to the `example.com` domain with the `https` scheme, except for the `danger.example.com` sub-domain,
|
||||
which is denied even when `https` scheme is used:
|
||||
|
||||
["source","yml"]
|
||||
-----------
|
||||
externalUrl.policy:
|
||||
- allow: false
|
||||
host: danger.example.com
|
||||
- allow: true
|
||||
host: example.com
|
||||
protocol: https
|
||||
-----------
|
||||
|
|
@ -756,3 +756,4 @@ include::{kib-repo-dir}/settings/security-settings.asciidoc[]
|
|||
include::{kib-repo-dir}/settings/spaces-settings.asciidoc[]
|
||||
include::{kib-repo-dir}/settings/task-manager-settings.asciidoc[]
|
||||
include::{kib-repo-dir}/settings/telemetry-settings.asciidoc[]
|
||||
include::{kib-repo-dir}/settings/url-drilldown-settings.asciidoc[]
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
[[drilldowns]]
|
||||
== Create custom dashboard actions
|
||||
|
||||
Custom dashboard actions, also known as drilldowns, allow you to create
|
||||
workflows for analyzing and troubleshooting your data. Drilldowns apply only to the panel that you created the drilldown from, and are not shared across all of the panels. Each panel can have multiple drilldowns.
|
||||
Custom dashboard actions, or _drilldowns_, allow you to create workflows for analyzing and troubleshooting your data.
|
||||
Drilldowns apply only to the panel that you created the drilldown from, and are not shared across all panels. Each panel can have multiple drilldowns.
|
||||
|
||||
Third-party developers can create drilldowns. To learn how to code drilldowns, refer to {kib-repo}blob/{branch}/x-pack/examples/ui_actions_enhanced_examples[this example plugin].
|
||||
|
||||
|
@ -11,27 +11,23 @@ Third-party developers can create drilldowns. To learn how to code drilldowns, r
|
|||
[[supported-drilldowns]]
|
||||
=== Supported drilldowns
|
||||
|
||||
{kib} supports two types of drilldowns.
|
||||
|
||||
[NOTE]
|
||||
=====================================
|
||||
Some drilldowns are paid subscription features, while others are free.
|
||||
For a comparison of the Elastic subscription levels,
|
||||
refer https://www.elastic.co/subscriptions[the subscription page].
|
||||
=====================================
|
||||
{kib} supports dashboard and URL drilldowns.
|
||||
|
||||
[float]
|
||||
[[dashboard-drilldowns]]
|
||||
==== Dashboard drilldowns
|
||||
|
||||
Dashboard drilldowns enable you to open a dashboard from another dashboard,
|
||||
taking the time range, filters, and other parameters with you,
|
||||
taking the time range, filters, and other parameters with you
|
||||
so the context remains the same. Dashboard drilldowns help you to continue your analysis from a new perspective.
|
||||
|
||||
For example, if you have a dashboard that shows the overall status of multiple data center,
|
||||
you can create a drilldown that navigates from the overall status dashboard to a dashboard
|
||||
that shows a single data center or server.
|
||||
|
||||
[role="screenshot"]
|
||||
image:images/drilldown_on_piechart.gif[Drilldown on pie chart that navigates to another dashboard]
|
||||
|
||||
[float]
|
||||
[[url-drilldowns]]
|
||||
==== URL drilldowns
|
||||
|
@ -39,45 +35,25 @@ that shows a single data center or server.
|
|||
URL drilldowns enable you to navigate from a dashboard to internal or external URLs.
|
||||
Destination URLs can be dynamic, depending on the dashboard context or user interaction with a panel.
|
||||
For example, if you have a dashboard that shows data from a Github repository, you can create a URL drilldown
|
||||
that opens Github from the dashboard.
|
||||
that opens Github from the dashboard panel.
|
||||
|
||||
[role="screenshot"]
|
||||
image:images/url_drilldown_go_to_github.gif[Drilldown on pie chart that navigates to Github]
|
||||
|
||||
Some panels support multiple interactions, also known as triggers.
|
||||
The <<url-template-variables,variables>> you use to create a <<url_templating-language, URL template>> depends on the trigger you choose. URL drilldowns support these types of triggers:
|
||||
|
||||
* *Single click* — A single data point in the visualization.
|
||||
* *Single click* — A single data point in the panel.
|
||||
|
||||
* *Range selection* — A range of values in a visualization.
|
||||
* *Range selection* — A range of values in a panel.
|
||||
|
||||
For example, *Single click* has `{{event.value}}` and *Range selection* has `{{event.from}}` and `{{event.to}}`.
|
||||
|
||||
To disable URL drilldowns on your {kib} instance, add the following line to `kibana.yml` config file:
|
||||
|
||||
["source","yml"]
|
||||
-----------
|
||||
url_drilldown.enabled: false
|
||||
-----------
|
||||
|
||||
URL drilldown also respects the global *External URL* service, which can be used to deny/allow external URLs.
|
||||
By default all external URLs are allowed. To configure external URL policies you need to use `externalUrl.policy` setting in `kibana.yml`, for example:
|
||||
|
||||
["source","yml"]
|
||||
-----------
|
||||
externalUrl.policy:
|
||||
- allow: false
|
||||
host: danger.example.com
|
||||
- allow: true
|
||||
host: example.com
|
||||
protocol: https
|
||||
-----------
|
||||
|
||||
The above rules allow external URLs only to `example.com` domain with `https` scheme, except for `danger.example.com` sub-domain,
|
||||
which is denied even when `https` scheme is used.
|
||||
|
||||
[float]
|
||||
[[dashboard-drilldown-supported-panels]]
|
||||
=== Supported panels
|
||||
=== Supported panel types
|
||||
|
||||
The following panels support dashboard and URL drilldowns.
|
||||
The following panel types support drilldowns.
|
||||
|
||||
[options="header"]
|
||||
|===
|
||||
|
@ -138,7 +114,7 @@ The following panels support dashboard and URL drilldowns.
|
|||
|
||||
| TSVB
|
||||
^| X
|
||||
^|
|
||||
^| X
|
||||
|
||||
| Tag Cloud
|
||||
^| X
|
||||
|
@ -160,25 +136,23 @@ The following panels support dashboard and URL drilldowns.
|
|||
|
||||
[float]
|
||||
[[drilldowns-example]]
|
||||
=== Try it: Create a dashboard drilldown
|
||||
=== Create a dashboard drilldown
|
||||
|
||||
To create dashboard drilldowns, you create or locate the dashboards you want to connect, then configure the drilldown that allows you to easily open one dashboard from the other dashboard.
|
||||
|
||||
image:images/drilldown_on_piechart.gif[Drilldown on pie chart that navigates to another dashboard]
|
||||
|
||||
[float]
|
||||
==== Create the dashboard
|
||||
|
||||
. Add the *Sample web logs* data.
|
||||
|
||||
. Create a new dashboard, then add the following panels:
|
||||
. Create a new dashboard, then add the following panels from the *Visualize Library*:
|
||||
|
||||
* *[Logs] Heatmap*
|
||||
* *[Logs] Host, Visits, and Bytes Table*
|
||||
* *[Logs] Total Requests and Bytes*
|
||||
* *[Logs] Visitors by OS*
|
||||
+
|
||||
If you don’t see data for a panel, try changing the <<set-time-filter,time filter>>.
|
||||
If you don’t see the data on a panel, try changing the <<set-time-filter,time filter>>.
|
||||
|
||||
. Save the dashboard. In the *Title* field, enter `Host Overview`.
|
||||
|
||||
|
@ -197,79 +171,82 @@ Filter: `geo.src: CN`
|
|||
|
||||
. Open the *[Logs] Visitors by OS* panel menu, then select *Create drilldown*.
|
||||
|
||||
. Give the drilldown a name, then select *Go to dashboard*.
|
||||
. Click *Go to dashboard*.
|
||||
|
||||
. From the *Choose a destination dashboard* dropdown, select *Host Overview*.
|
||||
.. Give the drilldown a name. For example, `My Drilldown`.
|
||||
|
||||
. To carry over the filter, query, and date range, make sure that *Use filters and query from origin dashboard* and *Use date range from origin dashboard* are selected.
|
||||
+
|
||||
[role="screenshot"]
|
||||
image::images/drilldown_create.png[Create drilldown with entries for drilldown name and destination]
|
||||
.. From the *Choose a destination dashboard* dropdown, select *Host Overview*.
|
||||
|
||||
. Click *Create drilldown*.
|
||||
+
|
||||
The drilldown is stored as dashboard metadata.
|
||||
.. To use the geo.src filter, KQL query, and time filter, select *Use filters and query from origin dashboard* and *Use date range from origin dashboard*.
|
||||
|
||||
.. Click *Create drilldown*.
|
||||
|
||||
. Save the dashboard.
|
||||
+
|
||||
If you fail to save the dashboard, the drilldown is lost when you navigate away from the dashboard.
|
||||
|
||||
. In the *[Logs] Visitors by OS* panel, click *win 8*, then select the drilldown.
|
||||
. In the *[Logs] Visitors by OS* panel, click *win 8*, then select `My Drilldown`.
|
||||
+
|
||||
[role="screenshot"]
|
||||
image::images/drilldown_on_panel.png[Drilldown on pie chart that navigates to another dashboard]
|
||||
|
||||
. On the *Host Overview* dashboard, verify that the search query, filters,
|
||||
and date range are carried over.
|
||||
. On the *Host Overview* dashboard, verify that the geo.src filter, KQL query, and time filter are applied.
|
||||
|
||||
[float]
|
||||
[[create-a-url-drilldown]]
|
||||
=== Try it: Create a URL drilldown
|
||||
=== Create a URL drilldown
|
||||
|
||||
To create URL drilldowns, you add <<variables,variables>> to a URL template, which configures the behavior of the drilldown.
|
||||
|
||||
image:images/url_drilldown_go_to_github.gif[Drilldown on pie chart that navigates to Github]
|
||||
|
||||
. Add the *Sample web logs* data.
|
||||
|
||||
. Open the *[Logs] Web traffic* dashboard. This isn’t data from Github, but works for demonstration purposes.
|
||||
. Open the *[Logs] Web traffic* dashboard.
|
||||
|
||||
. In the toolbar, click *Edit*.
|
||||
|
||||
. Open the *[Logs] Visitors by OS* panel menu, then select *Create drilldown*.
|
||||
|
||||
.. In the *Name* field, enter `Show on Github`.
|
||||
. Click *Go to URL*.
|
||||
|
||||
.. Select *Go to URL*.
|
||||
.. Give the drilldown a name. For example, `Show on Github`.
|
||||
|
||||
.. Enter the URL template:
|
||||
.. For the *Trigger*, select *Single click*.
|
||||
|
||||
.. To navigate to the {kib} repository Github issues, enter the following in the *Enter URL* field:
|
||||
+
|
||||
[source, bash]
|
||||
----
|
||||
https://github.com/elastic/kibana/issues?q=is:issue+is:open+{{event.value}}
|
||||
----
|
||||
+
|
||||
The example URL navigates to {kib} issues on Github. `{{event.value}}` is substituted with a value associated with a selected pie slice.
|
||||
+
|
||||
[role="screenshot"]
|
||||
image:images/url_drilldown_url_template.png[URL template input]
|
||||
`{{event.value}}` is substituted with a value associated with a selected pie slice.
|
||||
|
||||
.. Click *Create drilldown*.
|
||||
+
|
||||
The drilldown is stored as dashboard metadata.
|
||||
|
||||
. Save the dashboard.
|
||||
+
|
||||
If you fail to save the dashboard, the drilldown is lost when you navigate away from the dashboard.
|
||||
|
||||
. On the *[Logs] Visitors by OS* panel, click any chart slice, then select *Show on Github*.
|
||||
+
|
||||
[role="screenshot"]
|
||||
image:images/url_drilldown_popup.png[URL drilldown popup]
|
||||
|
||||
. On the page that lists the issues in the {kib} repository, verify the slice value appears in Github.
|
||||
. In the list of {kib} repository issues, verify that the slice value appears.
|
||||
+
|
||||
[role="screenshot"]
|
||||
image:images/url_drilldown_github.png[Github]
|
||||
|
||||
[float]
|
||||
[[manage-drilldowns]]
|
||||
=== Manage drilldowns
|
||||
|
||||
Make changes to your drilldowns, make a copy of your drilldowns for another panel, and delete drilldowns.
|
||||
|
||||
. Open the panel menu that includes the drilldown, then click *Manage drilldowns*.
|
||||
|
||||
. On the *Manage* tab, use the following options:
|
||||
|
||||
* To change drilldowns, click *Edit* next to the drilldown you want to change, make your changes, then click *Save*.
|
||||
|
||||
* To make a copy, click *Copy* next to the drilldown you want to change, enter the drilldown name, then click *Create drilldown*.
|
||||
|
||||
* To delete a drilldown, select the drilldown you want to delete, then click *Delete*.
|
||||
|
||||
include::url-drilldown.asciidoc[]
|
||||
|
|
|
@ -92,7 +92,7 @@ pageLoadAssetSize:
|
|||
visTypeTable: 94934
|
||||
visTypeTagcloud: 37575
|
||||
visTypeTimelion: 68883
|
||||
visTypeTimeseries: 155203
|
||||
visTypeTimeseries: 55203
|
||||
visTypeVega: 153573
|
||||
visTypeVislib: 242838
|
||||
visTypeXy: 113478
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { EditorController, TSVB_EDITOR_NAME } from './editor_controller';
|
||||
export * from './lib';
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { TSVB_EDITOR_NAME } from './application';
|
||||
import { TSVB_EDITOR_NAME } from './application/editor_controller';
|
||||
import { PANEL_TYPES } from '../common/panel_types';
|
||||
import { isStringTypeIndexPattern } from '../common/index_patterns_utils';
|
||||
import { toExpressionAst } from './to_ast';
|
||||
|
|
|
@ -6,13 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import './application/index.scss';
|
||||
|
||||
import { PluginInitializerContext, CoreSetup, CoreStart, Plugin } from 'kibana/public';
|
||||
import { Plugin as ExpressionsPublicPlugin } from '../../expressions/public';
|
||||
import { VisualizationsSetup } from '../../visualizations/public';
|
||||
import { VisualizePluginSetup } from '../../visualize/public';
|
||||
import { EditorController, TSVB_EDITOR_NAME } from './application';
|
||||
import { EditorController, TSVB_EDITOR_NAME } from './application/editor_controller';
|
||||
|
||||
import { createMetricsFn } from './metrics_fn';
|
||||
import { metricsVisDefinition } from './metrics_type';
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
|
||||
import { KibanaContext } from '../../data/public';
|
||||
|
||||
import { getTimezone, validateInterval } from './application';
|
||||
import { getTimezone } from './application/lib/get_timezone';
|
||||
import { validateInterval } from './application/lib/validate_interval';
|
||||
import { getUISettings, getDataStart, getCoreStart } from './services';
|
||||
import { MAX_BUCKETS_SETTING, ROUTES } from '../common/constants';
|
||||
import { TimeseriesVisParams } from './types';
|
||||
|
|
|
@ -12,14 +12,16 @@ import { render, unmountComponentAtNode } from 'react-dom';
|
|||
|
||||
import { I18nProvider } from '@kbn/i18n/react';
|
||||
import { IUiSettingsClient } from 'kibana/public';
|
||||
import type { PersistedState } from '../../visualizations/public';
|
||||
import { VisualizationContainer } from '../../visualizations/public';
|
||||
import { ExpressionRenderDefinition } from '../../expressions/common/expression_renderers';
|
||||
import { TimeseriesRenderValue } from './metrics_fn';
|
||||
|
||||
import { VisualizationContainer, PersistedState } from '../../visualizations/public';
|
||||
|
||||
import { isVisTableData, TimeseriesVisData } from '../common/types';
|
||||
import { TimeseriesVisParams } from './types';
|
||||
import { getChartsSetup } from './services';
|
||||
|
||||
import type { TimeseriesVisParams } from './types';
|
||||
import type { ExpressionRenderDefinition } from '../../expressions/common';
|
||||
import type { TimeseriesRenderValue } from './metrics_fn';
|
||||
|
||||
const TimeseriesVisualization = lazy(
|
||||
() => import('./application/components/timeseries_visualization')
|
||||
);
|
||||
|
@ -39,6 +41,10 @@ export const getTimeseriesVisRenderer: (deps: {
|
|||
name: 'timeseries_vis',
|
||||
reuseDomNode: true,
|
||||
render: async (domNode, config, handlers) => {
|
||||
// Build optimization. Move app styles from main bundle
|
||||
// @ts-expect-error TS error, cannot find type declaration for scss
|
||||
await import('./application/index.scss');
|
||||
|
||||
handlers.onDestroy(() => {
|
||||
unmountComponentAtNode(domNode);
|
||||
});
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
*/
|
||||
|
||||
import { buildExpression, buildExpressionFunction } from '../../expressions/public';
|
||||
import { Vis } from '../../visualizations/public';
|
||||
import { TimeseriesExpressionFunctionDefinition } from './metrics_fn';
|
||||
import { TimeseriesVisParams } from './types';
|
||||
import type { Vis } from '../../visualizations/public';
|
||||
import type { TimeseriesExpressionFunctionDefinition } from './metrics_fn';
|
||||
import type { TimeseriesVisParams } from './types';
|
||||
|
||||
export const toExpressionAst = (vis: Vis<TimeseriesVisParams>) => {
|
||||
const timeseries = buildExpressionFunction<TimeseriesExpressionFunctionDefinition>('tsvb', {
|
||||
|
|
|
@ -260,9 +260,14 @@ export class AlertsClient {
|
|||
);
|
||||
const username = await this.getUserName();
|
||||
|
||||
const createdAPIKey = data.enabled
|
||||
? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name))
|
||||
: null;
|
||||
let createdAPIKey = null;
|
||||
try {
|
||||
createdAPIKey = data.enabled
|
||||
? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name))
|
||||
: null;
|
||||
} catch (error) {
|
||||
throw Boom.badRequest(`Error creating rule: could not create API key - ${error.message}`);
|
||||
}
|
||||
|
||||
this.validateActions(alertType, data.actions);
|
||||
|
||||
|
@ -727,9 +732,16 @@ export class AlertsClient {
|
|||
|
||||
const { actions, references } = await this.denormalizeActions(data.actions);
|
||||
const username = await this.getUserName();
|
||||
const createdAPIKey = attributes.enabled
|
||||
? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name))
|
||||
: null;
|
||||
|
||||
let createdAPIKey = null;
|
||||
try {
|
||||
createdAPIKey = attributes.enabled
|
||||
? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name))
|
||||
: null;
|
||||
} catch (error) {
|
||||
throw Boom.badRequest(`Error updating rule: could not create API key - ${error.message}`);
|
||||
}
|
||||
|
||||
const apiKeyAttributes = this.apiKeyAsAlertAttributes(createdAPIKey, username);
|
||||
const notifyWhen = getAlertNotifyWhenType(data.notifyWhen, data.throttle);
|
||||
|
||||
|
@ -837,12 +849,21 @@ export class AlertsClient {
|
|||
}
|
||||
|
||||
const username = await this.getUserName();
|
||||
|
||||
let createdAPIKey = null;
|
||||
try {
|
||||
createdAPIKey = await this.createAPIKey(
|
||||
this.generateAPIKeyName(attributes.alertTypeId, attributes.name)
|
||||
);
|
||||
} catch (error) {
|
||||
throw Boom.badRequest(
|
||||
`Error updating API key for rule: could not create API key - ${error.message}`
|
||||
);
|
||||
}
|
||||
|
||||
const updateAttributes = this.updateMeta({
|
||||
...attributes,
|
||||
...this.apiKeyAsAlertAttributes(
|
||||
await this.createAPIKey(this.generateAPIKeyName(attributes.alertTypeId, attributes.name)),
|
||||
username
|
||||
),
|
||||
...this.apiKeyAsAlertAttributes(createdAPIKey, username),
|
||||
updatedAt: new Date().toISOString(),
|
||||
updatedBy: username,
|
||||
});
|
||||
|
@ -944,13 +965,20 @@ export class AlertsClient {
|
|||
|
||||
if (attributes.enabled === false) {
|
||||
const username = await this.getUserName();
|
||||
|
||||
let createdAPIKey = null;
|
||||
try {
|
||||
createdAPIKey = await this.createAPIKey(
|
||||
this.generateAPIKeyName(attributes.alertTypeId, attributes.name)
|
||||
);
|
||||
} catch (error) {
|
||||
throw Boom.badRequest(`Error enabling rule: could not create API key - ${error.message}`);
|
||||
}
|
||||
|
||||
const updateAttributes = this.updateMeta({
|
||||
...attributes,
|
||||
enabled: true,
|
||||
...this.apiKeyAsAlertAttributes(
|
||||
await this.createAPIKey(this.generateAPIKeyName(attributes.alertTypeId, attributes.name)),
|
||||
username
|
||||
),
|
||||
...this.apiKeyAsAlertAttributes(createdAPIKey, username),
|
||||
updatedBy: username,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
|
|
|
@ -1701,6 +1701,18 @@ describe('create()', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('throws an error if API key creation throws', async () => {
|
||||
const data = getMockData();
|
||||
alertsClientParams.createAPIKey.mockImplementation(() => {
|
||||
throw new Error('no');
|
||||
});
|
||||
expect(
|
||||
async () => await alertsClient.create({ data })
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Error creating rule: could not create API key - no"`
|
||||
);
|
||||
});
|
||||
|
||||
test('throws error when ensureActionTypeEnabled throws', async () => {
|
||||
const data = getMockData();
|
||||
alertTypeRegistry.ensureAlertTypeEnabled.mockImplementation(() => {
|
||||
|
|
|
@ -359,6 +359,17 @@ describe('enable()', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('throws an error if API key creation throws', async () => {
|
||||
alertsClientParams.createAPIKey.mockImplementation(() => {
|
||||
throw new Error('no');
|
||||
});
|
||||
expect(
|
||||
async () => await alertsClient.enable({ id: '1' })
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Error enabling rule: could not create API key - no"`
|
||||
);
|
||||
});
|
||||
|
||||
test('falls back when failing to getDecryptedAsInternalUser', async () => {
|
||||
encryptedSavedObjects.getDecryptedAsInternalUser.mockRejectedValue(new Error('Fail'));
|
||||
|
||||
|
|
|
@ -692,6 +692,53 @@ describe('update()', () => {
|
|||
`);
|
||||
});
|
||||
|
||||
it('throws an error if API key creation throws', async () => {
|
||||
alertsClientParams.createAPIKey.mockImplementation(() => {
|
||||
throw new Error('no');
|
||||
});
|
||||
expect(
|
||||
async () =>
|
||||
await alertsClient.update({
|
||||
id: '1',
|
||||
data: {
|
||||
schedule: { interval: '10s' },
|
||||
name: 'abc',
|
||||
tags: ['foo'],
|
||||
params: {
|
||||
bar: true,
|
||||
},
|
||||
throttle: null,
|
||||
notifyWhen: 'onActiveAlert',
|
||||
actions: [
|
||||
{
|
||||
group: 'default',
|
||||
id: '1',
|
||||
params: {
|
||||
foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'default',
|
||||
id: '1',
|
||||
params: {
|
||||
foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'default',
|
||||
id: '2',
|
||||
params: {
|
||||
foo: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Error updating rule: could not create API key - no"`
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate params', async () => {
|
||||
alertTypeRegistry.get.mockReturnValueOnce({
|
||||
id: '123',
|
||||
|
|
|
@ -99,13 +99,13 @@ describe('updateApiKey()', () => {
|
|||
references: [],
|
||||
});
|
||||
encryptedSavedObjects.getDecryptedAsInternalUser.mockResolvedValue(existingEncryptedAlert);
|
||||
});
|
||||
|
||||
test('updates the API key for the alert', async () => {
|
||||
alertsClientParams.createAPIKey.mockResolvedValueOnce({
|
||||
apiKeysEnabled: true,
|
||||
result: { id: '234', name: '123', api_key: 'abc' },
|
||||
});
|
||||
});
|
||||
|
||||
test('updates the API key for the alert', async () => {
|
||||
await alertsClient.updateApiKey({ id: '1' });
|
||||
expect(unsecuredSavedObjectsClient.get).not.toHaveBeenCalled();
|
||||
expect(encryptedSavedObjects.getDecryptedAsInternalUser).toHaveBeenCalledWith('alert', '1', {
|
||||
|
@ -145,7 +145,22 @@ describe('updateApiKey()', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('throws an error if API key creation throws', async () => {
|
||||
alertsClientParams.createAPIKey.mockImplementation(() => {
|
||||
throw new Error('no');
|
||||
});
|
||||
expect(
|
||||
async () => await alertsClient.updateApiKey({ id: '1' })
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Error updating API key for rule: could not create API key - no"`
|
||||
);
|
||||
});
|
||||
|
||||
test('falls back to SOC when getDecryptedAsInternalUser throws an error', async () => {
|
||||
alertsClientParams.createAPIKey.mockResolvedValueOnce({
|
||||
apiKeysEnabled: true,
|
||||
result: { id: '234', name: '123', api_key: 'abc' },
|
||||
});
|
||||
encryptedSavedObjects.getDecryptedAsInternalUser.mockRejectedValueOnce(new Error('Fail'));
|
||||
unsecuredSavedObjectsClient.create.mockResolvedValueOnce({
|
||||
id: '1',
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
@import 'file_datavisualizer_view/index';
|
||||
@import 'results_view/index';
|
||||
@import 'analysis_summary/index';
|
||||
@import 'about_panel/index';
|
||||
@import 'import_summary/index';
|
||||
@import 'analysis_summary/index';
|
||||
@import 'edit_flyout/index';
|
||||
@import 'embedded_map/index';
|
||||
@import 'experimental_badge/index';
|
||||
@import 'file_contents/index';
|
||||
@import 'file_datavisualizer_view/index';
|
||||
@import 'import_summary/index';
|
||||
@import 'results_view/index';
|
||||
@import 'stats_table/index';
|
||||
@import 'top_values/top_values';
|
||||
|
|
|
@ -104,7 +104,7 @@ const Contents: FC<{
|
|||
username: string | null;
|
||||
}> = ({ value, index, username }) => {
|
||||
return (
|
||||
<EuiFlexItem>
|
||||
<EuiFlexItem data-test-subj="fileDataVisFilebeatConfigPanel">
|
||||
<EuiTitle size="s">
|
||||
<h5>
|
||||
<FormattedMessage
|
||||
|
|
|
@ -220,6 +220,7 @@ export const ResultsLinks: FC<Props> = ({
|
|||
<EuiFlexItem>
|
||||
<EuiCard
|
||||
icon={<EuiIcon size="xxl" type={`filebeatApp`} />}
|
||||
data-test-subj="fileDataVisFilebeatConfigLink"
|
||||
title={
|
||||
<FormattedMessage
|
||||
id="xpack.fileDataVisualizer.resultsLinks.fileBeatConfig"
|
||||
|
|
|
@ -485,14 +485,18 @@ const DropsInner = memo(function DropsInner(props: DropsInnerProps) {
|
|||
}, [order, registerDropTarget, dropTypes, keyboardMode]);
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true;
|
||||
if (activeDropTarget && activeDropTarget.id !== value.id) {
|
||||
setIsInZone(false);
|
||||
}
|
||||
setTimeout(() => {
|
||||
if (!activeDropTarget) {
|
||||
if (!activeDropTarget && isMounted) {
|
||||
setIsInZone(false);
|
||||
}
|
||||
}, 1000);
|
||||
return () => {
|
||||
isMounted = false;
|
||||
};
|
||||
}, [activeDropTarget, setIsInZone, value.id]);
|
||||
|
||||
const dragEnter = () => {
|
||||
|
|
|
@ -43,7 +43,6 @@ import {
|
|||
import { DragDrop, DragContext, DragDropIdentifier } from '../../../drag_drop';
|
||||
import { Suggestion, switchToSuggestion } from '../suggestion_helpers';
|
||||
import { buildExpression } from '../expression_helpers';
|
||||
import { debouncedComponent } from '../../../debounced_component';
|
||||
import { trackUiEvent } from '../../../lens_ui_telemetry';
|
||||
import {
|
||||
UiActionsStart,
|
||||
|
@ -368,7 +367,7 @@ export const InnerWorkspacePanel = React.memo(function InnerWorkspacePanel({
|
|||
);
|
||||
});
|
||||
|
||||
export const InnerVisualizationWrapper = ({
|
||||
export const VisualizationWrapper = ({
|
||||
expression,
|
||||
framePublicAPI,
|
||||
timefilter,
|
||||
|
@ -619,5 +618,3 @@ export const InnerVisualizationWrapper = ({
|
|||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export const VisualizationWrapper = debouncedComponent(InnerVisualizationWrapper);
|
||||
|
|
|
@ -1150,6 +1150,83 @@ describe('IndexPatternDimensionEditorPanel', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('respects groups on moving operations if some columns are not listed in groups', () => {
|
||||
// config:
|
||||
// a: col1,
|
||||
// b: col2, col3
|
||||
// c: col4
|
||||
// col5, col6 not in visualization groups
|
||||
// dragging col3 onto col1 in group a
|
||||
onDrop({
|
||||
...defaultProps,
|
||||
columnId: 'col1',
|
||||
droppedItem: draggingCol3,
|
||||
state: {
|
||||
...testState,
|
||||
layers: {
|
||||
first: {
|
||||
...testState.layers.first,
|
||||
columnOrder: ['col1', 'col2', 'col3', 'col4', 'col5', 'col6'],
|
||||
columns: {
|
||||
...testState.layers.first.columns,
|
||||
col5: {
|
||||
dataType: 'number',
|
||||
operationType: 'count',
|
||||
label: '',
|
||||
isBucketed: false,
|
||||
sourceField: 'Records',
|
||||
},
|
||||
col6: {
|
||||
dataType: 'number',
|
||||
operationType: 'count',
|
||||
label: '',
|
||||
isBucketed: false,
|
||||
sourceField: 'Records',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
groupId: 'a',
|
||||
dimensionGroups: [
|
||||
{ ...dimensionGroups[0], accessors: [{ columnId: 'col1' }] },
|
||||
{ ...dimensionGroups[1], accessors: [{ columnId: 'col2' }, { columnId: 'col3' }] },
|
||||
{ ...dimensionGroups[2] },
|
||||
],
|
||||
dropType: 'move_compatible',
|
||||
});
|
||||
|
||||
expect(setState).toBeCalledTimes(1);
|
||||
expect(setState).toHaveBeenCalledWith({
|
||||
...testState,
|
||||
layers: {
|
||||
first: {
|
||||
...testState.layers.first,
|
||||
columnOrder: ['col1', 'col2', 'col4', 'col5', 'col6'],
|
||||
columns: {
|
||||
col1: testState.layers.first.columns.col3,
|
||||
col2: testState.layers.first.columns.col2,
|
||||
col4: testState.layers.first.columns.col4,
|
||||
col5: {
|
||||
dataType: 'number',
|
||||
operationType: 'count',
|
||||
label: '',
|
||||
isBucketed: false,
|
||||
sourceField: 'Records',
|
||||
},
|
||||
col6: {
|
||||
dataType: 'number',
|
||||
operationType: 'count',
|
||||
label: '',
|
||||
isBucketed: false,
|
||||
sourceField: 'Records',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('respects groups on duplicating operations between compatible groups with overwrite', () => {
|
||||
// config:
|
||||
// a: col1,
|
||||
|
|
|
@ -147,9 +147,9 @@ function onMoveCompatible(
|
|||
columns: newColumns,
|
||||
};
|
||||
|
||||
const updatedColumnOrder = getColumnOrder(newLayer);
|
||||
let updatedColumnOrder = getColumnOrder(newLayer);
|
||||
|
||||
reorderByGroups(dimensionGroups, groupId, updatedColumnOrder, columnId);
|
||||
updatedColumnOrder = reorderByGroups(dimensionGroups, groupId, updatedColumnOrder, columnId);
|
||||
|
||||
// Time to replace
|
||||
setState(
|
||||
|
@ -342,8 +342,8 @@ function onSwapCompatible({
|
|||
newColumns[targetId] = sourceColumn;
|
||||
newColumns[sourceId] = targetColumn;
|
||||
|
||||
const updatedColumnOrder = swapColumnOrder(layer.columnOrder, sourceId, targetId);
|
||||
reorderByGroups(dimensionGroups, groupId, updatedColumnOrder, columnId);
|
||||
let updatedColumnOrder = swapColumnOrder(layer.columnOrder, sourceId, targetId);
|
||||
updatedColumnOrder = reorderByGroups(dimensionGroups, groupId, updatedColumnOrder, columnId);
|
||||
|
||||
// Time to replace
|
||||
setState(
|
||||
|
|
|
@ -860,6 +860,44 @@ describe('IndexPattern Data Source', () => {
|
|||
expect(operationDefinitionMap.testReference.toExpression).toHaveBeenCalled();
|
||||
expect(ast.chain[2]).toEqual('mock');
|
||||
});
|
||||
|
||||
it('should keep correct column mapping keys with reference columns present', async () => {
|
||||
const queryBaseState: IndexPatternBaseState = {
|
||||
currentIndexPatternId: '1',
|
||||
layers: {
|
||||
first: {
|
||||
indexPatternId: '1',
|
||||
columnOrder: ['col2', 'col1'],
|
||||
columns: {
|
||||
col1: {
|
||||
label: 'Count of records',
|
||||
dataType: 'date',
|
||||
isBucketed: false,
|
||||
sourceField: 'timefield',
|
||||
operationType: 'unique_count',
|
||||
},
|
||||
col2: {
|
||||
label: 'Reference',
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
// @ts-expect-error not a valid type
|
||||
operationType: 'testReference',
|
||||
references: ['col1'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const state = enrichBaseState(queryBaseState);
|
||||
|
||||
const ast = indexPatternDatasource.toExpression(state, 'first') as Ast;
|
||||
expect(JSON.parse(ast.chain[1].arguments.idMap[0] as string)).toEqual({
|
||||
'col-0-col1': expect.objectContaining({
|
||||
id: 'col1',
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1106,11 +1106,11 @@ describe('IndexPattern Data Source suggestions', () => {
|
|||
operation: expect.objectContaining({ dataType: 'date', isBucketed: true }),
|
||||
},
|
||||
{
|
||||
columnId: 'newid',
|
||||
columnId: 'ref',
|
||||
operation: expect.objectContaining({ dataType: 'number', isBucketed: false }),
|
||||
},
|
||||
{
|
||||
columnId: 'ref',
|
||||
columnId: 'newid',
|
||||
operation: expect.objectContaining({ dataType: 'number', isBucketed: false }),
|
||||
},
|
||||
],
|
||||
|
@ -1158,15 +1158,6 @@ describe('IndexPattern Data Source suggestions', () => {
|
|||
table: expect.objectContaining({
|
||||
changeType: 'extended',
|
||||
columns: [
|
||||
{
|
||||
columnId: 'newid',
|
||||
operation: {
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
label: 'Count of records',
|
||||
scale: 'ratio',
|
||||
},
|
||||
},
|
||||
{
|
||||
columnId: 'ref',
|
||||
operation: {
|
||||
|
@ -1176,6 +1167,15 @@ describe('IndexPattern Data Source suggestions', () => {
|
|||
scale: undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
columnId: 'newid',
|
||||
operation: {
|
||||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
label: 'Count of records',
|
||||
scale: 'ratio',
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
})
|
||||
|
|
|
@ -712,7 +712,12 @@ function addBucket(
|
|||
// they already had, with an extra level of detail.
|
||||
updatedColumnOrder = [...buckets, addedColumnId, ...metrics, ...references];
|
||||
}
|
||||
reorderByGroups(visualizationGroups, targetGroup, updatedColumnOrder, addedColumnId);
|
||||
updatedColumnOrder = reorderByGroups(
|
||||
visualizationGroups,
|
||||
targetGroup,
|
||||
updatedColumnOrder,
|
||||
addedColumnId
|
||||
);
|
||||
const tempLayer = {
|
||||
...resetIncomplete(layer, addedColumnId),
|
||||
columns: { ...layer.columns, [addedColumnId]: column },
|
||||
|
@ -749,16 +754,24 @@ export function reorderByGroups(
|
|||
});
|
||||
const columnGroupIndex: Record<string, number> = {};
|
||||
updatedColumnOrder.forEach((columnId) => {
|
||||
columnGroupIndex[columnId] = orderedVisualizationGroups.findIndex(
|
||||
const groupIndex = orderedVisualizationGroups.findIndex(
|
||||
(group) =>
|
||||
(columnId === addedColumnId && group.groupId === targetGroup) ||
|
||||
group.accessors.some((acc) => acc.columnId === columnId)
|
||||
);
|
||||
if (groupIndex !== -1) {
|
||||
columnGroupIndex[columnId] = groupIndex;
|
||||
} else {
|
||||
// referenced columns won't show up in visualization groups - put them in the back of the list. This will work as they are always metrics
|
||||
columnGroupIndex[columnId] = updatedColumnOrder.length;
|
||||
}
|
||||
});
|
||||
|
||||
updatedColumnOrder.sort((a, b) => {
|
||||
return [...updatedColumnOrder].sort((a, b) => {
|
||||
return columnGroupIndex[a] - columnGroupIndex[b];
|
||||
});
|
||||
} else {
|
||||
return updatedColumnOrder;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -899,12 +912,8 @@ export function getColumnOrder(layer: IndexPatternLayer): string[] {
|
|||
}
|
||||
});
|
||||
|
||||
const [direct, referenceBased] = _.partition(
|
||||
entries,
|
||||
([, col]) => operationDefinitionMap[col.operationType].input !== 'fullReference'
|
||||
);
|
||||
// If a reference has another reference as input, put it last in sort order
|
||||
referenceBased.sort(([idA, a], [idB, b]) => {
|
||||
entries.sort(([idA, a], [idB, b]) => {
|
||||
if ('references' in a && a.references.includes(idB)) {
|
||||
return 1;
|
||||
}
|
||||
|
@ -913,12 +922,9 @@ export function getColumnOrder(layer: IndexPatternLayer): string[] {
|
|||
}
|
||||
return 0;
|
||||
});
|
||||
const [aggregations, metrics] = _.partition(direct, ([, col]) => col.isBucketed);
|
||||
const [aggregations, metrics] = _.partition(entries, ([, col]) => col.isBucketed);
|
||||
|
||||
return aggregations
|
||||
.map(([id]) => id)
|
||||
.concat(metrics.map(([id]) => id))
|
||||
.concat(referenceBased.map(([id]) => id));
|
||||
return aggregations.map(([id]) => id).concat(metrics.map(([id]) => id));
|
||||
}
|
||||
|
||||
// Splits existing columnOrder into the three categories
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { IUiSettingsClient } from 'kibana/public';
|
||||
import { partition } from 'lodash';
|
||||
import {
|
||||
AggFunctionsMapping,
|
||||
EsaggsExpressionFunctionDefinition,
|
||||
|
@ -57,14 +58,24 @@ function getExpressionForLayer(
|
|||
|
||||
const columnEntries = columnOrder.map((colId) => [colId, columns[colId]] as const);
|
||||
|
||||
if (columnEntries.length) {
|
||||
const [referenceEntries, esAggEntries] = partition(
|
||||
columnEntries,
|
||||
([, col]) => operationDefinitionMap[col.operationType]?.input === 'fullReference'
|
||||
);
|
||||
|
||||
if (referenceEntries.length || esAggEntries.length) {
|
||||
const aggs: ExpressionAstExpressionBuilder[] = [];
|
||||
const expressions: ExpressionAstFunction[] = [];
|
||||
columnEntries.forEach(([colId, col]) => {
|
||||
referenceEntries.forEach(([colId, col]) => {
|
||||
const def = operationDefinitionMap[col.operationType];
|
||||
if (def.input === 'fullReference') {
|
||||
expressions.push(...def.toExpression(layer, colId, indexPattern));
|
||||
} else {
|
||||
}
|
||||
});
|
||||
|
||||
esAggEntries.forEach(([colId, col]) => {
|
||||
const def = operationDefinitionMap[col.operationType];
|
||||
if (def.input !== 'fullReference') {
|
||||
const wrapInFilter = Boolean(def.filterable && col.filter);
|
||||
let aggAst = def.toEsAggsFn(
|
||||
col,
|
||||
|
@ -101,8 +112,8 @@ function getExpressionForLayer(
|
|||
}
|
||||
});
|
||||
|
||||
const idMap = columnEntries.reduce((currentIdMap, [colId, column], index) => {
|
||||
const esAggsId = `col-${columnEntries.length === 1 ? 0 : index}-${colId}`;
|
||||
const idMap = esAggEntries.reduce((currentIdMap, [colId, column], index) => {
|
||||
const esAggsId = `col-${index}-${colId}`;
|
||||
return {
|
||||
...currentIdMap,
|
||||
[esAggsId]: {
|
||||
|
|
|
@ -74,6 +74,10 @@ exports[`xy_expression XYChart component it renders area 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -271,6 +275,10 @@ exports[`xy_expression XYChart component it renders bar 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -476,6 +484,10 @@ exports[`xy_expression XYChart component it renders horizontal bar 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -681,6 +693,10 @@ exports[`xy_expression XYChart component it renders line 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -878,6 +894,10 @@ exports[`xy_expression XYChart component it renders stacked area 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -1083,6 +1103,10 @@ exports[`xy_expression XYChart component it renders stacked bar 1`] = `
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
@ -1296,6 +1320,10 @@ exports[`xy_expression XYChart component it renders stacked horizontal bar 1`] =
|
|||
tickFormat={[Function]}
|
||||
title="a"
|
||||
/>
|
||||
<XyEndzones
|
||||
darkMode={false}
|
||||
histogramMode={false}
|
||||
/>
|
||||
<Connect(SpecInstance)
|
||||
areaSeriesStyle={
|
||||
Object {
|
||||
|
|
|
@ -58,6 +58,9 @@ Object {
|
|||
"type": "expression",
|
||||
},
|
||||
],
|
||||
"hideEndzones": Array [
|
||||
true,
|
||||
],
|
||||
"layers": Array [
|
||||
Object {
|
||||
"chain": Array [
|
||||
|
|
|
@ -79,4 +79,16 @@ describe('Axes Settings', () => {
|
|||
false
|
||||
);
|
||||
});
|
||||
|
||||
it('hides the endzone visibility flag if no setter is passed in', () => {
|
||||
const component = shallow(<AxisSettingsPopover {...props} />);
|
||||
expect(component.find('[data-test-subj="lnsshowEndzones"]').length).toBe(0);
|
||||
});
|
||||
|
||||
it('shows the switch if setter is present', () => {
|
||||
const component = shallow(
|
||||
<AxisSettingsPopover {...props} endzonesVisible={true} setEndzoneVisibility={() => {}} />
|
||||
);
|
||||
expect(component.find('[data-test-subj="lnsshowEndzones"]').prop('checked')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -71,6 +71,14 @@ export interface AxisSettingsPopoverProps {
|
|||
* Toggles the axis title visibility
|
||||
*/
|
||||
toggleAxisTitleVisibility: (axis: AxesSettingsConfigKeys, checked: boolean) => void;
|
||||
/**
|
||||
* Set endzone visibility
|
||||
*/
|
||||
setEndzoneVisibility?: (checked: boolean) => void;
|
||||
/**
|
||||
* Flag whether endzones are visible
|
||||
*/
|
||||
endzonesVisible?: boolean;
|
||||
}
|
||||
const popoverConfig = (
|
||||
axis: AxesSettingsConfigKeys,
|
||||
|
@ -138,6 +146,8 @@ export const AxisSettingsPopover: React.FunctionComponent<AxisSettingsPopoverPro
|
|||
areGridlinesVisible,
|
||||
isAxisTitleVisible,
|
||||
toggleAxisTitleVisibility,
|
||||
setEndzoneVisibility,
|
||||
endzonesVisible,
|
||||
}) => {
|
||||
const [title, setTitle] = useState<string | undefined>(axisTitle);
|
||||
|
||||
|
@ -212,6 +222,20 @@ export const AxisSettingsPopover: React.FunctionComponent<AxisSettingsPopoverPro
|
|||
onChange={() => toggleGridlinesVisibility(axis)}
|
||||
checked={areGridlinesVisible}
|
||||
/>
|
||||
{setEndzoneVisibility && (
|
||||
<>
|
||||
<EuiSpacer size="m" />
|
||||
<EuiSwitch
|
||||
compressed
|
||||
data-test-subj={`lnsshowEndzones`}
|
||||
label={i18n.translate('xpack.lens.xyChart.showEnzones', {
|
||||
defaultMessage: 'Show partial data markers',
|
||||
})}
|
||||
onChange={() => setEndzoneVisibility(!Boolean(endzonesVisible))}
|
||||
checked={Boolean(endzonesVisible)}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</ToolbarPopover>
|
||||
);
|
||||
};
|
||||
|
|
|
@ -44,6 +44,7 @@ import { createMockExecutionContext } from '../../../../../src/plugins/expressio
|
|||
import { mountWithIntl } from '@kbn/test/jest';
|
||||
import { chartPluginMock } from '../../../../../src/plugins/charts/public/mocks';
|
||||
import { EmptyPlaceholder } from '../shared_components/empty_placeholder';
|
||||
import { XyEndzones } from './x_domain';
|
||||
|
||||
const onClickValue = jest.fn();
|
||||
const onSelectRange = jest.fn();
|
||||
|
@ -549,6 +550,135 @@ describe('xy_expression', () => {
|
|||
}
|
||||
`);
|
||||
});
|
||||
|
||||
describe('endzones', () => {
|
||||
const { args } = sampleArgs();
|
||||
const data: LensMultiTable = {
|
||||
type: 'lens_multitable',
|
||||
tables: {
|
||||
first: createSampleDatatableWithRows([
|
||||
{ a: 1, b: 2, c: new Date('2021-04-22').valueOf(), d: 'Foo' },
|
||||
{ a: 1, b: 2, c: new Date('2021-04-23').valueOf(), d: 'Foo' },
|
||||
{ a: 1, b: 2, c: new Date('2021-04-24').valueOf(), d: 'Foo' },
|
||||
]),
|
||||
},
|
||||
dateRange: {
|
||||
// first and last bucket are partial
|
||||
fromDate: new Date('2021-04-22T12:00:00.000Z'),
|
||||
toDate: new Date('2021-04-24T12:00:00.000Z'),
|
||||
},
|
||||
};
|
||||
const timeArgs: XYArgs = {
|
||||
...args,
|
||||
layers: [
|
||||
{
|
||||
...args.layers[0],
|
||||
seriesType: 'line',
|
||||
xScaleType: 'time',
|
||||
isHistogram: true,
|
||||
splitAccessor: undefined,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
test('it extends interval if data is exceeding it', () => {
|
||||
const component = shallow(
|
||||
<XYChart
|
||||
{...defaultProps}
|
||||
minInterval={24 * 60 * 60 * 1000}
|
||||
data={data}
|
||||
args={timeArgs}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(Settings).prop('xDomain')).toEqual({
|
||||
// shortened to 24th midnight (elastic-charts automatically adds one min interval)
|
||||
max: new Date('2021-04-24').valueOf(),
|
||||
// extended to 22nd midnight because of first bucket
|
||||
min: new Date('2021-04-22').valueOf(),
|
||||
minInterval: 24 * 60 * 60 * 1000,
|
||||
});
|
||||
});
|
||||
|
||||
test('it renders endzone component bridging gap between domain and extended domain', () => {
|
||||
const component = shallow(
|
||||
<XYChart
|
||||
{...defaultProps}
|
||||
minInterval={24 * 60 * 60 * 1000}
|
||||
data={data}
|
||||
args={timeArgs}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(XyEndzones).dive().find('Endzones').props()).toEqual(
|
||||
expect.objectContaining({
|
||||
domainStart: new Date('2021-04-22T12:00:00.000Z').valueOf(),
|
||||
domainEnd: new Date('2021-04-24T12:00:00.000Z').valueOf(),
|
||||
domainMin: new Date('2021-04-22').valueOf(),
|
||||
domainMax: new Date('2021-04-24').valueOf(),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('should pass enabled histogram mode and min interval to endzones component', () => {
|
||||
const component = shallow(
|
||||
<XYChart
|
||||
{...defaultProps}
|
||||
minInterval={24 * 60 * 60 * 1000}
|
||||
data={data}
|
||||
args={timeArgs}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(XyEndzones).dive().find('Endzones').props()).toEqual(
|
||||
expect.objectContaining({
|
||||
interval: 24 * 60 * 60 * 1000,
|
||||
isFullBin: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('should pass disabled histogram mode and min interval to endzones component', () => {
|
||||
const component = shallow(
|
||||
<XYChart
|
||||
{...defaultProps}
|
||||
minInterval={24 * 60 * 60 * 1000}
|
||||
data={data}
|
||||
args={{
|
||||
...args,
|
||||
layers: [
|
||||
{
|
||||
...args.layers[0],
|
||||
seriesType: 'bar',
|
||||
xScaleType: 'time',
|
||||
isHistogram: true,
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(XyEndzones).dive().find('Endzones').props()).toEqual(
|
||||
expect.objectContaining({
|
||||
interval: 24 * 60 * 60 * 1000,
|
||||
isFullBin: true,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
test('it does not render endzones if disabled via settings', () => {
|
||||
const component = shallow(
|
||||
<XYChart
|
||||
{...defaultProps}
|
||||
minInterval={24 * 60 * 60 * 1000}
|
||||
data={data}
|
||||
args={{ ...timeArgs, hideEndzones: true }}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(XyEndzones).length).toEqual(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('it has xDomain undefined if the x is not a time scale or a histogram', () => {
|
||||
|
|
|
@ -57,6 +57,7 @@ import { desanitizeFilterContext } from '../utils';
|
|||
import { fittingFunctionDefinitions, getFitOptions } from './fitting_functions';
|
||||
import { getAxesConfiguration } from './axes_configuration';
|
||||
import { getColorAssignments } from './color_assignment';
|
||||
import { getXDomain, XyEndzones } from './x_domain';
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
|
@ -183,6 +184,13 @@ export const xyChart: ExpressionFunctionDefinition<
|
|||
defaultMessage: 'Define how curve type is rendered for a line chart',
|
||||
}),
|
||||
},
|
||||
hideEndzones: {
|
||||
types: ['boolean'],
|
||||
default: false,
|
||||
help: i18n.translate('xpack.lens.xyChart.hideEndzones.help', {
|
||||
defaultMessage: 'Hide endzone markers for partial data',
|
||||
}),
|
||||
},
|
||||
},
|
||||
fn(data: LensMultiTable, args: XYArgs) {
|
||||
return {
|
||||
|
@ -330,9 +338,17 @@ export function XYChart({
|
|||
renderMode,
|
||||
syncColors,
|
||||
}: XYChartRenderProps) {
|
||||
const { legend, layers, fittingFunction, gridlinesVisibilitySettings, valueLabels } = args;
|
||||
const {
|
||||
legend,
|
||||
layers,
|
||||
fittingFunction,
|
||||
gridlinesVisibilitySettings,
|
||||
valueLabels,
|
||||
hideEndzones,
|
||||
} = args;
|
||||
const chartTheme = chartsThemeService.useChartsTheme();
|
||||
const chartBaseTheme = chartsThemeService.useChartsBaseTheme();
|
||||
const darkMode = chartsThemeService.useDarkMode();
|
||||
const filteredLayers = getFilteredLayers(layers, data);
|
||||
|
||||
if (filteredLayers.length === 0) {
|
||||
|
@ -387,15 +403,13 @@ export function XYChart({
|
|||
const isTimeViz = data.dateRange && filteredLayers.every((l) => l.xScaleType === 'time');
|
||||
const isHistogramViz = filteredLayers.every((l) => l.isHistogram);
|
||||
|
||||
const xDomain = isTimeViz
|
||||
? {
|
||||
min: data.dateRange?.fromDate.getTime(),
|
||||
max: data.dateRange?.toDate.getTime(),
|
||||
minInterval,
|
||||
}
|
||||
: isHistogramViz
|
||||
? { minInterval }
|
||||
: undefined;
|
||||
const { baseDomain: rawXDomain, extendedDomain: xDomain } = getXDomain(
|
||||
layers,
|
||||
data,
|
||||
minInterval,
|
||||
Boolean(isTimeViz),
|
||||
Boolean(isHistogramViz)
|
||||
);
|
||||
|
||||
const getYAxesTitles = (
|
||||
axisSeries: Array<{ layer: string; accessor: string }>,
|
||||
|
@ -602,6 +616,22 @@ export function XYChart({
|
|||
/>
|
||||
))}
|
||||
|
||||
{!hideEndzones && (
|
||||
<XyEndzones
|
||||
baseDomain={rawXDomain}
|
||||
extendedDomain={xDomain}
|
||||
darkMode={darkMode}
|
||||
histogramMode={filteredLayers.every(
|
||||
(layer) =>
|
||||
layer.isHistogram &&
|
||||
(layer.seriesType.includes('stacked') || !layer.splitAccessor) &&
|
||||
(layer.seriesType.includes('stacked') ||
|
||||
!layer.seriesType.includes('bar') ||
|
||||
!chartHasMoreThanOneBarSeries)
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{filteredLayers.flatMap((layer, layerIndex) =>
|
||||
layer.accessors.map((accessor, accessorIndex) => {
|
||||
const {
|
||||
|
|
|
@ -51,6 +51,7 @@ describe('#toExpression', () => {
|
|||
fittingFunction: 'Carry',
|
||||
tickLabelsVisibilitySettings: { x: false, yLeft: true, yRight: true },
|
||||
gridlinesVisibilitySettings: { x: false, yLeft: true, yRight: true },
|
||||
hideEndzones: true,
|
||||
layers: [
|
||||
{
|
||||
layerId: 'first',
|
||||
|
|
|
@ -198,6 +198,7 @@ export const buildExpression = (
|
|||
},
|
||||
],
|
||||
valueLabels: [state?.valueLabels || 'hide'],
|
||||
hideEndzones: [state?.hideEndzones || false],
|
||||
layers: validLayers.map((layer) => {
|
||||
const columnToLabel = getColumnToLabelMap(layer, datasourceLayers[layer.layerId]);
|
||||
|
||||
|
|
|
@ -414,6 +414,7 @@ export interface XYArgs {
|
|||
tickLabelsVisibilitySettings?: AxesSettingsConfig & { type: 'lens_xy_tickLabelsConfig' };
|
||||
gridlinesVisibilitySettings?: AxesSettingsConfig & { type: 'lens_xy_gridlinesConfig' };
|
||||
curveType?: XYCurveType;
|
||||
hideEndzones?: boolean;
|
||||
}
|
||||
|
||||
export type XYCurveType = 'LINEAR' | 'CURVE_MONOTONE_X';
|
||||
|
@ -432,6 +433,7 @@ export interface XYState {
|
|||
tickLabelsVisibilitySettings?: AxesSettingsConfig;
|
||||
gridlinesVisibilitySettings?: AxesSettingsConfig;
|
||||
curveType?: XYCurveType;
|
||||
hideEndzones?: boolean;
|
||||
}
|
||||
|
||||
export type State = XYState;
|
||||
|
|
|
@ -818,6 +818,60 @@ describe('xy_visualization', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return an error if two incompatible xAccessors (multiple layers) are used', () => {
|
||||
// current incompatibility is only for date and numeric histograms as xAccessors
|
||||
const datasourceLayers = {
|
||||
first: mockDatasource.publicAPIMock,
|
||||
second: createMockDatasource('testDatasource').publicAPIMock,
|
||||
};
|
||||
datasourceLayers.first.getOperationForColumnId = jest.fn((id: string) =>
|
||||
id === 'a'
|
||||
? (({
|
||||
dataType: 'date',
|
||||
scale: 'interval',
|
||||
} as unknown) as Operation)
|
||||
: null
|
||||
);
|
||||
datasourceLayers.second.getOperationForColumnId = jest.fn((id: string) =>
|
||||
id === 'e'
|
||||
? (({
|
||||
dataType: 'number',
|
||||
scale: 'interval',
|
||||
} as unknown) as Operation)
|
||||
: null
|
||||
);
|
||||
expect(
|
||||
xyVisualization.getErrorMessages(
|
||||
{
|
||||
...exampleState(),
|
||||
layers: [
|
||||
{
|
||||
layerId: 'first',
|
||||
seriesType: 'area',
|
||||
splitAccessor: 'd',
|
||||
xAccessor: 'a',
|
||||
accessors: ['b'],
|
||||
},
|
||||
{
|
||||
layerId: 'second',
|
||||
seriesType: 'area',
|
||||
splitAccessor: 'd',
|
||||
xAccessor: 'e',
|
||||
accessors: ['b'],
|
||||
},
|
||||
],
|
||||
},
|
||||
datasourceLayers
|
||||
)
|
||||
).toEqual([
|
||||
{
|
||||
shortMessage: 'Wrong data type for Horizontal axis.',
|
||||
longMessage:
|
||||
'Data type mismatch for the Horizontal axis. Cannot mix date and number interval types.',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getWarningMessages', () => {
|
||||
|
|
|
@ -15,8 +15,14 @@ import { PaletteRegistry } from 'src/plugins/charts/public';
|
|||
import { DataPublicPluginStart } from 'src/plugins/data/public';
|
||||
import { getSuggestions } from './xy_suggestions';
|
||||
import { LayerContextMenu, XyToolbar, DimensionEditor } from './xy_config_panel';
|
||||
import { Visualization, OperationMetadata, VisualizationType, AccessorConfig } from '../types';
|
||||
import { State, SeriesType, visualizationTypes, XYLayerConfig } from './types';
|
||||
import {
|
||||
Visualization,
|
||||
OperationMetadata,
|
||||
VisualizationType,
|
||||
AccessorConfig,
|
||||
DatasourcePublicAPI,
|
||||
} from '../types';
|
||||
import { State, SeriesType, visualizationTypes, XYLayerConfig, XYState } from './types';
|
||||
import { isHorizontalChart } from './state_helpers';
|
||||
import { toExpression, toPreviewExpression, getSortedAccessors } from './to_expression';
|
||||
import { LensIconChartBarStacked } from '../assets/chart_bar_stacked';
|
||||
|
@ -374,6 +380,9 @@ export const getXyVisualization = ({
|
|||
}
|
||||
|
||||
if (datasourceLayers && state) {
|
||||
// temporary fix for #87068
|
||||
errors.push(...checkXAccessorCompatibility(state, datasourceLayers));
|
||||
|
||||
for (const layer of state.layers) {
|
||||
const datasourceAPI = datasourceLayers[layer.layerId];
|
||||
if (datasourceAPI) {
|
||||
|
@ -517,3 +526,47 @@ function newLayerState(seriesType: SeriesType, layerId: string): XYLayerConfig {
|
|||
accessors: [],
|
||||
};
|
||||
}
|
||||
|
||||
// min requirement for the bug:
|
||||
// * 2 or more layers
|
||||
// * at least one with date histogram
|
||||
// * at least one with interval function
|
||||
function checkXAccessorCompatibility(
|
||||
state: XYState,
|
||||
datasourceLayers: Record<string, DatasourcePublicAPI>
|
||||
) {
|
||||
const errors = [];
|
||||
const hasDateHistogramSet = state.layers.some(checkIntervalOperation('date', datasourceLayers));
|
||||
const hasNumberHistogram = state.layers.some(checkIntervalOperation('number', datasourceLayers));
|
||||
if (state.layers.length > 1 && hasDateHistogramSet && hasNumberHistogram) {
|
||||
errors.push({
|
||||
shortMessage: i18n.translate('xpack.lens.xyVisualization.dataTypeFailureXShort', {
|
||||
defaultMessage: `Wrong data type for {axis}.`,
|
||||
values: {
|
||||
axis: getAxisName('x', { isHorizontal: isHorizontalChart(state.layers) }),
|
||||
},
|
||||
}),
|
||||
longMessage: i18n.translate('xpack.lens.xyVisualization.dataTypeFailureXLong', {
|
||||
defaultMessage: `Data type mismatch for the {axis}. Cannot mix date and number interval types.`,
|
||||
values: {
|
||||
axis: getAxisName('x', { isHorizontal: isHorizontalChart(state.layers) }),
|
||||
},
|
||||
}),
|
||||
});
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
function checkIntervalOperation(
|
||||
dataType: 'date' | 'number',
|
||||
datasourceLayers: Record<string, DatasourcePublicAPI>
|
||||
) {
|
||||
return (layer: XYLayerConfig) => {
|
||||
const datasourceAPI = datasourceLayers[layer.layerId];
|
||||
if (!layer.xAccessor) {
|
||||
return false;
|
||||
}
|
||||
const operation = datasourceAPI?.getOperationForColumnId(layer.xAccessor);
|
||||
return Boolean(operation?.dataType === dataType && operation.scale === 'interval');
|
||||
};
|
||||
}
|
||||
|
|
103
x-pack/plugins/lens/public/xy_visualization/x_domain.tsx
Normal file
103
x-pack/plugins/lens/public/xy_visualization/x_domain.tsx
Normal file
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { uniq } from 'lodash';
|
||||
import React from 'react';
|
||||
import { Endzones } from '../../../../../src/plugins/charts/public';
|
||||
import { LensMultiTable } from '../types';
|
||||
import { LayerArgs } from './types';
|
||||
|
||||
export interface XDomain {
|
||||
min?: number;
|
||||
max?: number;
|
||||
minInterval?: number;
|
||||
}
|
||||
|
||||
export const getXDomain = (
|
||||
layers: LayerArgs[],
|
||||
data: LensMultiTable,
|
||||
minInterval: number | undefined,
|
||||
isTimeViz: boolean,
|
||||
isHistogram: boolean
|
||||
) => {
|
||||
const baseDomain = isTimeViz
|
||||
? {
|
||||
min: data.dateRange?.fromDate.getTime(),
|
||||
max: data.dateRange?.toDate.getTime(),
|
||||
minInterval,
|
||||
}
|
||||
: isHistogram
|
||||
? { minInterval }
|
||||
: undefined;
|
||||
|
||||
if (isHistogram && isFullyQualified(baseDomain)) {
|
||||
const xValues = uniq(
|
||||
layers
|
||||
.flatMap((layer) =>
|
||||
data.tables[layer.layerId].rows.map((row) => row[layer.xAccessor!].valueOf() as number)
|
||||
)
|
||||
.sort()
|
||||
);
|
||||
|
||||
const [firstXValue] = xValues;
|
||||
const lastXValue = xValues[xValues.length - 1];
|
||||
|
||||
const domainMin = Math.min(firstXValue, baseDomain.min);
|
||||
const domainMaxValue = baseDomain.max - baseDomain.minInterval;
|
||||
const domainMax = Math.max(domainMaxValue, lastXValue);
|
||||
|
||||
return {
|
||||
extendedDomain: {
|
||||
min: domainMin,
|
||||
max: domainMax,
|
||||
minInterval: baseDomain.minInterval,
|
||||
},
|
||||
baseDomain,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
baseDomain,
|
||||
extendedDomain: baseDomain,
|
||||
};
|
||||
};
|
||||
|
||||
function isFullyQualified(
|
||||
xDomain: XDomain | undefined
|
||||
): xDomain is { min: number; max: number; minInterval: number } {
|
||||
return Boolean(
|
||||
xDomain &&
|
||||
typeof xDomain.min === 'number' &&
|
||||
typeof xDomain.max === 'number' &&
|
||||
xDomain.minInterval
|
||||
);
|
||||
}
|
||||
|
||||
export const XyEndzones = function ({
|
||||
baseDomain,
|
||||
extendedDomain,
|
||||
histogramMode,
|
||||
darkMode,
|
||||
}: {
|
||||
baseDomain?: XDomain;
|
||||
extendedDomain?: XDomain;
|
||||
histogramMode: boolean;
|
||||
darkMode: boolean;
|
||||
}) {
|
||||
return isFullyQualified(baseDomain) && isFullyQualified(extendedDomain) ? (
|
||||
<Endzones
|
||||
isFullBin={!histogramMode}
|
||||
isDarkMode={darkMode}
|
||||
domainStart={baseDomain.min}
|
||||
domainEnd={baseDomain.max}
|
||||
interval={extendedDomain.minInterval}
|
||||
domainMin={extendedDomain.min}
|
||||
domainMax={extendedDomain.max}
|
||||
hideTooltips={false}
|
||||
/>
|
||||
) : null;
|
||||
};
|
|
@ -138,6 +138,29 @@ describe('XY Config panels', () => {
|
|||
|
||||
expect(component.find(AxisSettingsPopover).length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should pass in endzone visibility setter and current sate for time chart', () => {
|
||||
(frame.datasourceLayers.first.getOperationForColumnId as jest.Mock).mockReturnValue({
|
||||
dataType: 'date',
|
||||
});
|
||||
const state = testState();
|
||||
const component = shallow(
|
||||
<XyToolbar
|
||||
frame={frame}
|
||||
setState={jest.fn()}
|
||||
state={{
|
||||
...state,
|
||||
hideEndzones: true,
|
||||
layers: [{ ...state.layers[0], yConfig: [{ axisMode: 'right', forAccessor: 'foo' }] }],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(component.find(AxisSettingsPopover).at(0).prop('setEndzoneVisibility')).toBeFalsy();
|
||||
expect(component.find(AxisSettingsPopover).at(1).prop('setEndzoneVisibility')).toBeTruthy();
|
||||
expect(component.find(AxisSettingsPopover).at(1).prop('endzonesVisible')).toBe(false);
|
||||
expect(component.find(AxisSettingsPopover).at(2).prop('setEndzoneVisibility')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dimension Editor', () => {
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import './xy_config_panel.scss';
|
||||
import React, { useMemo, useState, memo } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { Position } from '@elastic/charts';
|
||||
import { Position, ScaleType } from '@elastic/charts';
|
||||
import { debounce } from 'lodash';
|
||||
import {
|
||||
EuiButtonGroup,
|
||||
|
@ -37,7 +37,7 @@ import { TooltipWrapper } from './tooltip_wrapper';
|
|||
import { getAxesConfiguration } from './axes_configuration';
|
||||
import { PalettePicker } from '../shared_components';
|
||||
import { getAccessorColorConfig, getColorAssignments } from './color_assignment';
|
||||
import { getSortedAccessors } from './to_expression';
|
||||
import { getScaleType, getSortedAccessors } from './to_expression';
|
||||
import { VisualOptionsPopover } from './visual_options_popover/visual_options_popover';
|
||||
|
||||
type UnwrapArray<T> = T extends Array<infer P> ? P : T;
|
||||
|
@ -187,6 +187,23 @@ export const XyToolbar = memo(function XyToolbar(props: VisualizationToolbarProp
|
|||
});
|
||||
};
|
||||
|
||||
// only allow changing endzone visibility if it could show up theoretically (if it's a time viz)
|
||||
const onChangeEndzoneVisiblity = state?.layers.every(
|
||||
(layer) =>
|
||||
layer.xAccessor &&
|
||||
getScaleType(
|
||||
props.frame.datasourceLayers[layer.layerId].getOperationForColumnId(layer.xAccessor),
|
||||
ScaleType.Linear
|
||||
) === 'time'
|
||||
)
|
||||
? (checked: boolean): void => {
|
||||
setState({
|
||||
...state,
|
||||
hideEndzones: !checked,
|
||||
});
|
||||
}
|
||||
: undefined;
|
||||
|
||||
const legendMode =
|
||||
state?.legend.isVisible && !state?.legend.showSingleSeries
|
||||
? 'auto'
|
||||
|
@ -278,6 +295,8 @@ export const XyToolbar = memo(function XyToolbar(props: VisualizationToolbarProp
|
|||
toggleGridlinesVisibility={onGridlinesVisibilitySettingsChange}
|
||||
isAxisTitleVisible={axisTitlesVisibilitySettings.x}
|
||||
toggleAxisTitleVisibility={onAxisTitlesVisibilitySettingsChange}
|
||||
endzonesVisible={!state?.hideEndzones}
|
||||
setEndzoneVisibility={onChangeEndzoneVisiblity}
|
||||
/>
|
||||
<TooltipWrapper
|
||||
tooltipContent={
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
import { Cluster } from './cluster';
|
||||
|
||||
describe('cluster', () => {
|
||||
|
@ -12,7 +13,7 @@ describe('cluster', () => {
|
|||
describe('fromUpstreamJSON factory method', () => {
|
||||
const upstreamJSON = {
|
||||
cluster_uuid: 'S-S4NNZDRV-g9c-JrIhx6A',
|
||||
};
|
||||
} as estypes.RootNodeInfoResponse;
|
||||
|
||||
it('returns correct Cluster instance', () => {
|
||||
const cluster = Cluster.fromUpstreamJSON(upstreamJSON);
|
||||
|
|
|
@ -5,28 +5,27 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { get } from 'lodash';
|
||||
import { estypes } from '@elastic/elasticsearch';
|
||||
|
||||
/**
|
||||
* This model deals with a cluster object from ES and converts it to Kibana downstream
|
||||
*/
|
||||
export class Cluster {
|
||||
public readonly uuid: string;
|
||||
|
||||
constructor({ uuid }: { uuid: string }) {
|
||||
this.uuid = uuid;
|
||||
}
|
||||
|
||||
public get downstreamJSON() {
|
||||
const json = {
|
||||
return {
|
||||
uuid: this.uuid,
|
||||
};
|
||||
|
||||
return json;
|
||||
}
|
||||
|
||||
// generate Pipeline object from elasticsearch response
|
||||
static fromUpstreamJSON(upstreamCluster: Record<string, string>) {
|
||||
const uuid = get(upstreamCluster, 'cluster_uuid') as string;
|
||||
static fromUpstreamJSON(upstreamCluster: estypes.RootNodeInfoResponse) {
|
||||
const uuid = upstreamCluster.cluster_uuid;
|
||||
return new Cluster({ uuid });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,20 +5,11 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CoreSetup,
|
||||
CoreStart,
|
||||
ILegacyCustomClusterClient,
|
||||
Logger,
|
||||
Plugin,
|
||||
PluginInitializerContext,
|
||||
} from 'src/core/server';
|
||||
import { CoreSetup, CoreStart, Logger, Plugin, PluginInitializerContext } from 'src/core/server';
|
||||
import { LicensingPluginSetup } from '../../licensing/server';
|
||||
import { PluginSetupContract as FeaturesPluginSetup } from '../../features/server';
|
||||
import { SecurityPluginSetup } from '../../security/server';
|
||||
|
||||
import { registerRoutes } from './routes';
|
||||
import type { LogstashRequestHandlerContext } from './types';
|
||||
|
||||
interface SetupDeps {
|
||||
licensing: LicensingPluginSetup;
|
||||
|
@ -28,8 +19,7 @@ interface SetupDeps {
|
|||
|
||||
export class LogstashPlugin implements Plugin {
|
||||
private readonly logger: Logger;
|
||||
private esClient?: ILegacyCustomClusterClient;
|
||||
private coreSetup?: CoreSetup;
|
||||
|
||||
constructor(context: PluginInitializerContext) {
|
||||
this.logger = context.logger.get();
|
||||
}
|
||||
|
@ -37,7 +27,6 @@ export class LogstashPlugin implements Plugin {
|
|||
setup(core: CoreSetup, deps: SetupDeps) {
|
||||
this.logger.debug('Setting up Logstash plugin');
|
||||
|
||||
this.coreSetup = core;
|
||||
registerRoutes(core.http.createRouter(), deps.security);
|
||||
|
||||
deps.features.registerElasticsearchFeature({
|
||||
|
@ -55,19 +44,5 @@ export class LogstashPlugin implements Plugin {
|
|||
});
|
||||
}
|
||||
|
||||
start(core: CoreStart) {
|
||||
const esClient = core.elasticsearch.legacy.createClient('logstash');
|
||||
|
||||
this.coreSetup!.http.registerRouteHandlerContext<LogstashRequestHandlerContext, 'logstash'>(
|
||||
'logstash',
|
||||
async (context, request) => {
|
||||
return { esClient: esClient.asScoped(request) };
|
||||
}
|
||||
);
|
||||
}
|
||||
stop() {
|
||||
if (this.esClient) {
|
||||
this.esClient.close();
|
||||
}
|
||||
}
|
||||
start(core: CoreStart) {}
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@ export function registerClusterLoadRoute(router: LogstashPluginRouter) {
|
|||
},
|
||||
wrapRouteWithLicenseCheck(checkLicense, async (context, request, response) => {
|
||||
try {
|
||||
const client = context.logstash!.esClient;
|
||||
const info = await client.callAsCurrentUser('info');
|
||||
const { client } = context.core.elasticsearch;
|
||||
const { body: info } = await client.asCurrentUser.info();
|
||||
return response.ok({
|
||||
body: {
|
||||
cluster: Cluster.fromUpstreamJSON(info).downstreamJSON,
|
||||
|
|
|
@ -23,14 +23,18 @@ export function registerPipelineDeleteRoute(router: LogstashPluginRouter) {
|
|||
wrapRouteWithLicenseCheck(
|
||||
checkLicense,
|
||||
router.handleLegacyErrors(async (context, request, response) => {
|
||||
const client = context.logstash!.esClient;
|
||||
const { id } = request.params;
|
||||
const { client } = context.core.elasticsearch;
|
||||
|
||||
await client.callAsCurrentUser('transport.request', {
|
||||
path: '/_logstash/pipeline/' + encodeURIComponent(request.params.id),
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
return response.noContent();
|
||||
try {
|
||||
await client.asCurrentUser.logstash.deletePipeline({ id });
|
||||
return response.noContent();
|
||||
} catch (e) {
|
||||
if (e.statusCode === 404) {
|
||||
return response.notFound();
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
|
|
|
@ -25,13 +25,13 @@ export function registerPipelineLoadRoute(router: LogstashPluginRouter) {
|
|||
wrapRouteWithLicenseCheck(
|
||||
checkLicense,
|
||||
router.handleLegacyErrors(async (context, request, response) => {
|
||||
const client = context.logstash!.esClient;
|
||||
const { id } = request.params;
|
||||
const { client } = context.core.elasticsearch;
|
||||
|
||||
const result = await client.callAsCurrentUser('transport.request', {
|
||||
path: '/_logstash/pipeline/' + encodeURIComponent(request.params.id),
|
||||
method: 'GET',
|
||||
ignore: [404],
|
||||
});
|
||||
const { body: result } = await client.asCurrentUser.logstash.getPipeline(
|
||||
{ id },
|
||||
{ ignore: [404] }
|
||||
);
|
||||
|
||||
if (result[request.params.id] === undefined) {
|
||||
return response.notFound();
|
||||
|
|
|
@ -42,12 +42,11 @@ export function registerPipelineSaveRoute(
|
|||
username = user?.username;
|
||||
}
|
||||
|
||||
const client = context.logstash!.esClient;
|
||||
const { client } = context.core.elasticsearch;
|
||||
const pipeline = Pipeline.fromDownstreamJSON(request.body, request.params.id, username);
|
||||
|
||||
await client.callAsCurrentUser('transport.request', {
|
||||
path: '/_logstash/pipeline/' + encodeURIComponent(pipeline.id),
|
||||
method: 'PUT',
|
||||
await client.asCurrentUser.logstash.putPipeline({
|
||||
id: pipeline.id,
|
||||
body: pipeline.upstreamJSON,
|
||||
});
|
||||
|
||||
|
|
|
@ -6,19 +6,19 @@
|
|||
*/
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import { LegacyAPICaller } from 'src/core/server';
|
||||
import { ElasticsearchClient } from 'src/core/server';
|
||||
import { wrapRouteWithLicenseCheck } from '../../../../licensing/server';
|
||||
|
||||
import { checkLicense } from '../../lib/check_license';
|
||||
import type { LogstashPluginRouter } from '../../types';
|
||||
|
||||
async function deletePipelines(callWithRequest: LegacyAPICaller, pipelineIds: string[]) {
|
||||
async function deletePipelines(client: ElasticsearchClient, pipelineIds: string[]) {
|
||||
const deletePromises = pipelineIds.map((pipelineId) => {
|
||||
return callWithRequest('transport.request', {
|
||||
path: '/_logstash/pipeline/' + encodeURIComponent(pipelineId),
|
||||
method: 'DELETE',
|
||||
})
|
||||
.then((success) => ({ success }))
|
||||
return client.logstash
|
||||
.deletePipeline({
|
||||
id: pipelineId,
|
||||
})
|
||||
.then((response) => ({ success: response.body }))
|
||||
.catch((error) => ({ error }));
|
||||
});
|
||||
|
||||
|
@ -45,8 +45,8 @@ export function registerPipelinesDeleteRoute(router: LogstashPluginRouter) {
|
|||
wrapRouteWithLicenseCheck(
|
||||
checkLicense,
|
||||
router.handleLegacyErrors(async (context, request, response) => {
|
||||
const client = context.logstash.esClient;
|
||||
const results = await deletePipelines(client.callAsCurrentUser, request.body.pipelineIds);
|
||||
const client = context.core.elasticsearch.client.asCurrentUser;
|
||||
const results = await deletePipelines(client, request.body.pipelineIds);
|
||||
|
||||
return response.ok({ body: { results } });
|
||||
})
|
||||
|
|
|
@ -6,21 +6,22 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { LegacyAPICaller } from 'src/core/server';
|
||||
import { ElasticsearchClient } from 'src/core/server';
|
||||
import type { LogstashPluginRouter } from '../../types';
|
||||
import { wrapRouteWithLicenseCheck } from '../../../../licensing/server';
|
||||
|
||||
import { PipelineListItem } from '../../models/pipeline_list_item';
|
||||
import { checkLicense } from '../../lib/check_license';
|
||||
|
||||
async function fetchPipelines(callWithRequest: LegacyAPICaller) {
|
||||
const params = {
|
||||
path: '/_logstash/pipeline',
|
||||
method: 'GET',
|
||||
ignore: [404],
|
||||
};
|
||||
|
||||
return await callWithRequest('transport.request', params);
|
||||
async function fetchPipelines(client: ElasticsearchClient) {
|
||||
const { body } = await client.transport.request(
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/_logstash/pipeline',
|
||||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
return body;
|
||||
}
|
||||
|
||||
export function registerPipelinesListRoute(router: LogstashPluginRouter) {
|
||||
|
@ -33,8 +34,8 @@ export function registerPipelinesListRoute(router: LogstashPluginRouter) {
|
|||
checkLicense,
|
||||
router.handleLegacyErrors(async (context, request, response) => {
|
||||
try {
|
||||
const client = context.logstash!.esClient;
|
||||
const pipelinesRecord = (await fetchPipelines(client.callAsCurrentUser)) as Record<
|
||||
const { client } = context.core.elasticsearch;
|
||||
const pipelinesRecord = (await fetchPipelines(client.asCurrentUser)) as Record<
|
||||
string,
|
||||
any
|
||||
>;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { ILegacyScopedClusterClient, IRouter, RequestHandlerContext } from 'src/core/server';
|
||||
import type { IRouter, RequestHandlerContext } from 'src/core/server';
|
||||
import type { LicensingApiRequestHandlerContext } from '../../licensing/server';
|
||||
|
||||
export interface PipelineListItemOptions {
|
||||
|
@ -19,9 +19,6 @@ export interface PipelineListItemOptions {
|
|||
* @internal
|
||||
*/
|
||||
export interface LogstashRequestHandlerContext extends RequestHandlerContext {
|
||||
logstash: {
|
||||
esClient: ILegacyScopedClusterClient;
|
||||
};
|
||||
licensing: LicensingApiRequestHandlerContext;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
"licensing",
|
||||
"management",
|
||||
"features",
|
||||
"savedObjects"
|
||||
"savedObjects",
|
||||
"share"
|
||||
],
|
||||
"optionalPlugins": [
|
||||
"security",
|
||||
|
|
|
@ -7,17 +7,28 @@
|
|||
|
||||
import { useContext } from 'react';
|
||||
|
||||
import type { ScopedHistory } from 'kibana/public';
|
||||
|
||||
import { coreMock } from '../../../../../../src/core/public/mocks';
|
||||
import { dataPluginMock } from '../../../../../../src/plugins/data/public/mocks';
|
||||
import { savedObjectsPluginMock } from '../../../../../../src/plugins/saved_objects/public/mocks';
|
||||
import { SharePluginStart } from '../../../../../../src/plugins/share/public';
|
||||
|
||||
import { Storage } from '../../../../../../src/plugins/kibana_utils/public';
|
||||
|
||||
import type { AppDependencies } from '../app_dependencies';
|
||||
import { MlSharedContext } from './shared_context';
|
||||
import type { GetMlSharedImportsReturnType } from '../../shared_imports';
|
||||
|
||||
const coreSetup = coreMock.createSetup();
|
||||
const coreStart = coreMock.createStart();
|
||||
const dataStart = dataPluginMock.createStartContract();
|
||||
|
||||
const appDependencies = {
|
||||
// Replace mock to support syntax using `.then()` as used in transform code.
|
||||
coreStart.savedObjects.client.find = jest.fn().mockResolvedValue({ savedObjects: [] });
|
||||
|
||||
const appDependencies: AppDependencies = {
|
||||
application: coreStart.application,
|
||||
chrome: coreStart.chrome,
|
||||
data: dataStart,
|
||||
docLinks: coreStart.docLinks,
|
||||
|
@ -28,11 +39,15 @@ const appDependencies = {
|
|||
storage: ({ get: jest.fn() } as unknown) as Storage,
|
||||
overlays: coreStart.overlays,
|
||||
http: coreSetup.http,
|
||||
history: {} as ScopedHistory,
|
||||
savedObjectsPlugin: savedObjectsPluginMock.createStartContract(),
|
||||
share: ({ urlGenerators: { getUrlGenerator: jest.fn() } } as unknown) as SharePluginStart,
|
||||
ml: {} as GetMlSharedImportsReturnType,
|
||||
};
|
||||
|
||||
export const useAppDependencies = () => {
|
||||
const ml = useContext(MlSharedContext);
|
||||
return { ...appDependencies, ml, savedObjects: jest.fn() };
|
||||
return { ...appDependencies, ml };
|
||||
};
|
||||
|
||||
export const useToastNotifications = () => {
|
||||
|
|
|
@ -5,17 +5,19 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CoreSetup, CoreStart } from 'src/core/public';
|
||||
import { DataPublicPluginStart } from 'src/plugins/data/public';
|
||||
import { SavedObjectsStart } from 'src/plugins/saved_objects/public';
|
||||
import { ScopedHistory } from 'kibana/public';
|
||||
import type { CoreSetup, CoreStart } from 'src/core/public';
|
||||
import type { DataPublicPluginStart } from 'src/plugins/data/public';
|
||||
import type { SavedObjectsStart } from 'src/plugins/saved_objects/public';
|
||||
import type { ScopedHistory } from 'kibana/public';
|
||||
import type { SharePluginStart } from 'src/plugins/share/public';
|
||||
|
||||
import { useKibana } from '../../../../../src/plugins/kibana_react/public';
|
||||
import { Storage } from '../../../../../src/plugins/kibana_utils/public';
|
||||
import type { Storage } from '../../../../../src/plugins/kibana_utils/public';
|
||||
|
||||
import type { GetMlSharedImportsReturnType } from '../shared_imports';
|
||||
|
||||
export interface AppDependencies {
|
||||
application: CoreStart['application'];
|
||||
chrome: CoreStart['chrome'];
|
||||
data: DataPublicPluginStart;
|
||||
docLinks: CoreStart['docLinks'];
|
||||
|
@ -28,6 +30,7 @@ export interface AppDependencies {
|
|||
overlays: CoreStart['overlays'];
|
||||
history: ScopedHistory;
|
||||
savedObjectsPlugin: SavedObjectsStart;
|
||||
share: SharePluginStart;
|
||||
ml: GetMlSharedImportsReturnType;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ export {
|
|||
} from './transform';
|
||||
export { TRANSFORM_LIST_COLUMN, TransformListAction, TransformListRow } from './transform_list';
|
||||
export { getTransformProgress, isCompletedBatchTransform } from './transform_stats';
|
||||
export { getDiscoverUrl } from './navigation';
|
||||
export {
|
||||
getEsAggFromAggConfig,
|
||||
isPivotAggsConfigWithUiSupport,
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getDiscoverUrl } from './navigation';
|
||||
|
||||
describe('navigation', () => {
|
||||
test('getDiscoverUrl should provide encoded url to Discover page', () => {
|
||||
expect(getDiscoverUrl('farequote-airline', 'http://example.com')).toBe(
|
||||
'http://example.com/app/discover#?_g=()&_a=(index:farequote-airline)'
|
||||
);
|
||||
});
|
||||
});
|
|
@ -7,28 +7,9 @@
|
|||
|
||||
import React, { FC } from 'react';
|
||||
import { Redirect } from 'react-router-dom';
|
||||
import rison from 'rison-node';
|
||||
|
||||
import { SECTION_SLUG } from '../constants';
|
||||
|
||||
/**
|
||||
* Gets a url for navigating to Discover page.
|
||||
* @param indexPatternId Index pattern ID.
|
||||
* @param baseUrl Base url.
|
||||
*/
|
||||
export function getDiscoverUrl(indexPatternId: string, baseUrl: string): string {
|
||||
const _g = rison.encode({});
|
||||
|
||||
// Add the index pattern ID to the appState part of the URL.
|
||||
const _a = rison.encode({
|
||||
index: indexPatternId,
|
||||
});
|
||||
|
||||
const hash = `/discover#?_g=${_g}&_a=${_a}`;
|
||||
|
||||
return `${baseUrl}/app${hash}`;
|
||||
}
|
||||
|
||||
export const RedirectToTransformManagement: FC = () => <Redirect to={`/${SECTION_SLUG.HOME}`} />;
|
||||
|
||||
export const RedirectToCreateTransform: FC<{ savedObjectId: string }> = ({ savedObjectId }) => (
|
||||
|
|
|
@ -28,8 +28,8 @@ export async function mountManagementSection(
|
|||
const { http, notifications, getStartServices } = coreSetup;
|
||||
const startServices = await getStartServices();
|
||||
const [core, plugins] = startServices;
|
||||
const { chrome, docLinks, i18n, overlays, savedObjects, uiSettings } = core;
|
||||
const { data } = plugins;
|
||||
const { application, chrome, docLinks, i18n, overlays, savedObjects, uiSettings } = core;
|
||||
const { data, share } = plugins;
|
||||
const { docTitle } = chrome;
|
||||
|
||||
// Initialize services
|
||||
|
@ -39,6 +39,7 @@ export async function mountManagementSection(
|
|||
|
||||
// AppCore/AppPlugins to be passed on as React context
|
||||
const appDependencies: AppDependencies = {
|
||||
application,
|
||||
chrome,
|
||||
data,
|
||||
docLinks,
|
||||
|
@ -51,6 +52,7 @@ export async function mountManagementSection(
|
|||
uiSettings,
|
||||
history,
|
||||
savedObjectsPlugin: plugins.savedObjects,
|
||||
share,
|
||||
ml: await getMlSharedImports(),
|
||||
};
|
||||
|
||||
|
|
|
@ -26,6 +26,11 @@ import {
|
|||
|
||||
import { toMountPoint } from '../../../../../../../../../src/plugins/kibana_react/public';
|
||||
|
||||
import {
|
||||
DISCOVER_APP_URL_GENERATOR,
|
||||
DiscoverUrlGeneratorState,
|
||||
} from '../../../../../../../../../src/plugins/discover/public';
|
||||
|
||||
import type { PutTransformsResponseSchema } from '../../../../../../common/api_schemas/transforms';
|
||||
import {
|
||||
isGetTransformsStatsResponseSchema,
|
||||
|
@ -36,7 +41,7 @@ import { PROGRESS_REFRESH_INTERVAL_MS } from '../../../../../../common/constants
|
|||
|
||||
import { getErrorMessage } from '../../../../../../common/utils/errors';
|
||||
|
||||
import { getTransformProgress, getDiscoverUrl } from '../../../../common';
|
||||
import { getTransformProgress } from '../../../../common';
|
||||
import { useApi } from '../../../../hooks/use_api';
|
||||
import { useAppDependencies, useToastNotifications } from '../../../../app_dependencies';
|
||||
import { RedirectToTransformManagement } from '../../../../common/navigation';
|
||||
|
@ -86,13 +91,45 @@ export const StepCreateForm: FC<StepCreateFormProps> = React.memo(
|
|||
const [progressPercentComplete, setProgressPercentComplete] = useState<undefined | number>(
|
||||
undefined
|
||||
);
|
||||
const [discoverLink, setDiscoverLink] = useState<string>();
|
||||
|
||||
const deps = useAppDependencies();
|
||||
const indexPatterns = deps.data.indexPatterns;
|
||||
const toastNotifications = useToastNotifications();
|
||||
const { getUrlGenerator } = deps.share.urlGenerators;
|
||||
const isDiscoverAvailable = deps.application.capabilities.discover?.show ?? false;
|
||||
|
||||
useEffect(() => {
|
||||
let unmounted = false;
|
||||
|
||||
onChange({ created, started, indexPatternId });
|
||||
|
||||
const getDiscoverUrl = async (): Promise<void> => {
|
||||
const state: DiscoverUrlGeneratorState = {
|
||||
indexPatternId,
|
||||
};
|
||||
|
||||
let discoverUrlGenerator;
|
||||
try {
|
||||
discoverUrlGenerator = getUrlGenerator(DISCOVER_APP_URL_GENERATOR);
|
||||
} catch (error) {
|
||||
// ignore error thrown when url generator is not available
|
||||
return;
|
||||
}
|
||||
|
||||
const discoverUrl = await discoverUrlGenerator.createUrl(state);
|
||||
if (!unmounted) {
|
||||
setDiscoverLink(discoverUrl);
|
||||
}
|
||||
};
|
||||
|
||||
if (started === true && indexPatternId !== undefined && isDiscoverAvailable) {
|
||||
getDiscoverUrl();
|
||||
}
|
||||
|
||||
return () => {
|
||||
unmounted = true;
|
||||
};
|
||||
// custom comparison
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [created, started, indexPatternId]);
|
||||
|
@ -477,7 +514,7 @@ export const StepCreateForm: FC<StepCreateFormProps> = React.memo(
|
|||
</EuiPanel>
|
||||
</EuiFlexItem>
|
||||
)}
|
||||
{started === true && indexPatternId !== undefined && (
|
||||
{isDiscoverAvailable && discoverLink !== undefined && (
|
||||
<EuiFlexItem style={PANEL_ITEM_STYLE}>
|
||||
<EuiCard
|
||||
icon={<EuiIcon size="xxl" type="discoverApp" />}
|
||||
|
@ -490,7 +527,7 @@ export const StepCreateForm: FC<StepCreateFormProps> = React.memo(
|
|||
defaultMessage: 'Use Discover to explore the transform.',
|
||||
}
|
||||
)}
|
||||
href={getDiscoverUrl(indexPatternId, deps.http.basePath.get())}
|
||||
href={discoverLink}
|
||||
data-test-subj="transformWizardCardDiscover"
|
||||
/>
|
||||
</EuiFlexItem>
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { cloneDeep } from 'lodash';
|
||||
import React from 'react';
|
||||
import { IntlProvider } from 'react-intl';
|
||||
|
||||
import { render, waitFor, screen } from '@testing-library/react';
|
||||
|
||||
import { TransformListRow } from '../../../../common';
|
||||
import { isDiscoverActionDisabled, DiscoverActionName } from './discover_action_name';
|
||||
|
||||
import transformListRow from '../../../../common/__mocks__/transform_list_row.json';
|
||||
|
||||
jest.mock('../../../../../shared_imports');
|
||||
jest.mock('../../../../../app/app_dependencies');
|
||||
|
||||
// @ts-expect-error mock data is too loosely typed
|
||||
const item: TransformListRow = transformListRow;
|
||||
|
||||
describe('Transform: Transform List Actions isDiscoverActionDisabled()', () => {
|
||||
it('should be disabled when more than one item is passed in', () => {
|
||||
expect(isDiscoverActionDisabled([item, item], false, true)).toBe(true);
|
||||
});
|
||||
it('should be disabled when forceDisable is true', () => {
|
||||
expect(isDiscoverActionDisabled([item], true, true)).toBe(true);
|
||||
});
|
||||
it('should be disabled when the index pattern is not available', () => {
|
||||
expect(isDiscoverActionDisabled([item], false, false)).toBe(true);
|
||||
});
|
||||
it('should be disabled when the transform started but has no index pattern', () => {
|
||||
const itemCopy = cloneDeep(item);
|
||||
itemCopy.stats.state = 'started';
|
||||
expect(isDiscoverActionDisabled([itemCopy], false, false)).toBe(true);
|
||||
});
|
||||
it('should be enabled when the transform started and has an index pattern', () => {
|
||||
const itemCopy = cloneDeep(item);
|
||||
itemCopy.stats.state = 'started';
|
||||
expect(isDiscoverActionDisabled([itemCopy], false, true)).toBe(false);
|
||||
});
|
||||
it('should be enabled when the index pattern is available', () => {
|
||||
expect(isDiscoverActionDisabled([item], false, true)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Transform: Transform List Actions <StopAction />', () => {
|
||||
it('renders an enabled button', async () => {
|
||||
// prepare
|
||||
render(
|
||||
<IntlProvider locale="en">
|
||||
<DiscoverActionName items={[item]} indexPatternExists={true} />
|
||||
</IntlProvider>
|
||||
);
|
||||
|
||||
// assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByTestId('transformDiscoverActionNameText disabled')
|
||||
).not.toBeInTheDocument();
|
||||
expect(screen.queryByTestId('transformDiscoverActionNameText enabled')).toBeInTheDocument();
|
||||
expect(screen.queryByText('View in Discover')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('renders a disabled button', async () => {
|
||||
// prepare
|
||||
const itemCopy = cloneDeep(item);
|
||||
itemCopy.stats.checkpointing.last.checkpoint = 0;
|
||||
render(
|
||||
<IntlProvider locale="en">
|
||||
<DiscoverActionName items={[itemCopy]} indexPatternExists={false} />
|
||||
</IntlProvider>
|
||||
);
|
||||
|
||||
// assert
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('transformDiscoverActionNameText disabled')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByTestId('transformDiscoverActionNameText enabled')
|
||||
).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('View in Discover')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,97 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { FC } from 'react';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { EuiToolTip } from '@elastic/eui';
|
||||
|
||||
import { TRANSFORM_STATE } from '../../../../../../common/constants';
|
||||
|
||||
import { getTransformProgress, TransformListRow } from '../../../../common';
|
||||
|
||||
export const discoverActionNameText = i18n.translate(
|
||||
'xpack.transform.transformList.discoverActionNameText',
|
||||
{
|
||||
defaultMessage: 'View in Discover',
|
||||
}
|
||||
);
|
||||
|
||||
export const isDiscoverActionDisabled = (
|
||||
items: TransformListRow[],
|
||||
forceDisable: boolean,
|
||||
indexPatternExists: boolean
|
||||
) => {
|
||||
if (items.length !== 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const item = items[0];
|
||||
|
||||
// Disable discover action if it's a batch transform and was never started
|
||||
const stoppedTransform = item.stats.state === TRANSFORM_STATE.STOPPED;
|
||||
const transformProgress = getTransformProgress(item);
|
||||
const isBatchTransform = typeof item.config.sync === 'undefined';
|
||||
const transformNeverStarted =
|
||||
stoppedTransform === true && transformProgress === undefined && isBatchTransform === true;
|
||||
|
||||
return forceDisable === true || indexPatternExists === false || transformNeverStarted === true;
|
||||
};
|
||||
|
||||
export interface DiscoverActionNameProps {
|
||||
indexPatternExists: boolean;
|
||||
items: TransformListRow[];
|
||||
}
|
||||
export const DiscoverActionName: FC<DiscoverActionNameProps> = ({ indexPatternExists, items }) => {
|
||||
const isBulkAction = items.length > 1;
|
||||
|
||||
const item = items[0];
|
||||
|
||||
// Disable discover action if it's a batch transform and was never started
|
||||
const stoppedTransform = item.stats.state === TRANSFORM_STATE.STOPPED;
|
||||
const transformProgress = getTransformProgress(item);
|
||||
const isBatchTransform = typeof item.config.sync === 'undefined';
|
||||
const transformNeverStarted =
|
||||
stoppedTransform && transformProgress === undefined && isBatchTransform === true;
|
||||
|
||||
let disabledTransformMessage;
|
||||
if (isBulkAction === true) {
|
||||
disabledTransformMessage = i18n.translate(
|
||||
'xpack.transform.transformList.discoverTransformBulkToolTip',
|
||||
{
|
||||
defaultMessage: 'Links to Discover are not supported as a bulk action.',
|
||||
}
|
||||
);
|
||||
} else if (!indexPatternExists) {
|
||||
disabledTransformMessage = i18n.translate(
|
||||
'xpack.transform.transformList.discoverTransformNoIndexPatternToolTip',
|
||||
{
|
||||
defaultMessage: `A Kibana index pattern is required for the destination index to be viewable in Discover`,
|
||||
}
|
||||
);
|
||||
} else if (transformNeverStarted) {
|
||||
disabledTransformMessage = i18n.translate(
|
||||
'xpack.transform.transformList.discoverTransformToolTip',
|
||||
{
|
||||
defaultMessage: `The transform needs to be started before it's available in Discover.`,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof disabledTransformMessage !== 'undefined') {
|
||||
return (
|
||||
<EuiToolTip position="top" content={disabledTransformMessage}>
|
||||
<span data-test-subj="transformDiscoverActionNameText disabled">
|
||||
{discoverActionNameText}
|
||||
</span>
|
||||
</EuiToolTip>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<span data-test-subj="transformDiscoverActionNameText enabled">{discoverActionNameText}</span>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export { useDiscoverAction } from './use_action_discover';
|
||||
export { DiscoverActionName } from './discover_action_name';
|
|
@ -0,0 +1,99 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import {
|
||||
DiscoverUrlGeneratorState,
|
||||
DISCOVER_APP_URL_GENERATOR,
|
||||
} from '../../../../../../../../../src/plugins/discover/public';
|
||||
|
||||
import { TransformListAction, TransformListRow } from '../../../../common';
|
||||
|
||||
import { useSearchItems } from '../../../../hooks/use_search_items';
|
||||
import { useAppDependencies } from '../../../../app_dependencies';
|
||||
|
||||
import {
|
||||
isDiscoverActionDisabled,
|
||||
discoverActionNameText,
|
||||
DiscoverActionName,
|
||||
} from './discover_action_name';
|
||||
|
||||
const getIndexPatternTitleFromTargetIndex = (item: TransformListRow) =>
|
||||
Array.isArray(item.config.dest.index) ? item.config.dest.index.join(',') : item.config.dest.index;
|
||||
|
||||
export type DiscoverAction = ReturnType<typeof useDiscoverAction>;
|
||||
export const useDiscoverAction = (forceDisable: boolean) => {
|
||||
const appDeps = useAppDependencies();
|
||||
const savedObjectsClient = appDeps.savedObjects.client;
|
||||
const indexPatterns = appDeps.data.indexPatterns;
|
||||
const { getUrlGenerator } = appDeps.share.urlGenerators;
|
||||
const isDiscoverAvailable = !!appDeps.application.capabilities.discover?.show;
|
||||
|
||||
const { getIndexPatternIdByTitle, loadIndexPatterns } = useSearchItems(undefined);
|
||||
|
||||
const [indexPatternsLoaded, setIndexPatternsLoaded] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function checkIndexPatternAvailability() {
|
||||
await loadIndexPatterns(savedObjectsClient, indexPatterns);
|
||||
setIndexPatternsLoaded(true);
|
||||
}
|
||||
|
||||
checkIndexPatternAvailability();
|
||||
}, [indexPatterns, loadIndexPatterns, savedObjectsClient]);
|
||||
|
||||
const clickHandler = useCallback(
|
||||
async (item: TransformListRow) => {
|
||||
let discoverUrlGenerator;
|
||||
try {
|
||||
discoverUrlGenerator = getUrlGenerator(DISCOVER_APP_URL_GENERATOR);
|
||||
} catch (error) {
|
||||
// ignore error thrown when url generator is not available
|
||||
return;
|
||||
}
|
||||
|
||||
const indexPatternTitle = getIndexPatternTitleFromTargetIndex(item);
|
||||
const indexPatternId = getIndexPatternIdByTitle(indexPatternTitle);
|
||||
const state: DiscoverUrlGeneratorState = {
|
||||
indexPatternId,
|
||||
};
|
||||
const path = await discoverUrlGenerator.createUrl(state);
|
||||
appDeps.application.navigateToApp('discover', { path });
|
||||
},
|
||||
[appDeps.application, getIndexPatternIdByTitle, getUrlGenerator]
|
||||
);
|
||||
|
||||
const indexPatternExists = useCallback(
|
||||
(item: TransformListRow) => {
|
||||
const indexPatternTitle = getIndexPatternTitleFromTargetIndex(item);
|
||||
const indexPatternId = getIndexPatternIdByTitle(indexPatternTitle);
|
||||
return indexPatternId !== undefined;
|
||||
},
|
||||
[getIndexPatternIdByTitle]
|
||||
);
|
||||
|
||||
const action: TransformListAction = useMemo(
|
||||
() => ({
|
||||
name: (item: TransformListRow) => {
|
||||
return <DiscoverActionName items={[item]} indexPatternExists={indexPatternExists(item)} />;
|
||||
},
|
||||
available: () => isDiscoverAvailable,
|
||||
enabled: (item: TransformListRow) =>
|
||||
indexPatternsLoaded &&
|
||||
!isDiscoverActionDisabled([item], forceDisable, indexPatternExists(item)),
|
||||
description: discoverActionNameText,
|
||||
icon: 'visTable',
|
||||
type: 'icon',
|
||||
onClick: clickHandler,
|
||||
'data-test-subj': 'transformActionDiscover',
|
||||
}),
|
||||
[forceDisable, indexPatternExists, indexPatternsLoaded, isDiscoverAvailable, clickHandler]
|
||||
);
|
||||
|
||||
return { action };
|
||||
};
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { render, fireEvent } from '@testing-library/react';
|
||||
import { render, fireEvent, waitFor } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import moment from 'moment-timezone';
|
||||
import { TransformListRow } from '../../../../common';
|
||||
|
@ -41,20 +41,26 @@ describe('Transform: Transform List <ExpandedRow />', () => {
|
|||
</MlSharedContext.Provider>
|
||||
);
|
||||
|
||||
expect(getByText('Details')).toBeInTheDocument();
|
||||
expect(getByText('Stats')).toBeInTheDocument();
|
||||
expect(getByText('JSON')).toBeInTheDocument();
|
||||
expect(getByText('Messages')).toBeInTheDocument();
|
||||
expect(getByText('Preview')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(getByText('Details')).toBeInTheDocument();
|
||||
expect(getByText('Stats')).toBeInTheDocument();
|
||||
expect(getByText('JSON')).toBeInTheDocument();
|
||||
expect(getByText('Messages')).toBeInTheDocument();
|
||||
expect(getByText('Preview')).toBeInTheDocument();
|
||||
|
||||
const tabContent = getByTestId('transformDetailsTabContent');
|
||||
expect(tabContent).toBeInTheDocument();
|
||||
const tabContent = getByTestId('transformDetailsTabContent');
|
||||
expect(tabContent).toBeInTheDocument();
|
||||
|
||||
expect(getByTestId('transformDetailsTab')).toHaveAttribute('aria-selected', 'true');
|
||||
expect(within(tabContent).getByText('General')).toBeInTheDocument();
|
||||
expect(getByTestId('transformDetailsTab')).toHaveAttribute('aria-selected', 'true');
|
||||
expect(within(tabContent).getByText('General')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
fireEvent.click(getByTestId('transformStatsTab'));
|
||||
expect(getByTestId('transformStatsTab')).toHaveAttribute('aria-selected', 'true');
|
||||
expect(within(tabContent).getByText('Stats')).toBeInTheDocument();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(getByTestId('transformStatsTab')).toHaveAttribute('aria-selected', 'true');
|
||||
const tabContent = getByTestId('transformDetailsTabContent');
|
||||
expect(within(tabContent).getByText('Stats')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -7,20 +7,26 @@
|
|||
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
|
||||
import { useActions } from './use_actions';
|
||||
|
||||
jest.mock('../../../../../shared_imports');
|
||||
jest.mock('../../../../../app/app_dependencies');
|
||||
|
||||
import { useActions } from './use_actions';
|
||||
|
||||
describe('Transform: Transform List Actions', () => {
|
||||
test('useActions()', () => {
|
||||
const { result } = renderHook(() => useActions({ forceDisable: false, transformNodes: 1 }));
|
||||
test('useActions()', async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() =>
|
||||
useActions({ forceDisable: false, transformNodes: 1 })
|
||||
);
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
const actions = result.current.actions;
|
||||
|
||||
// Using `any` for the callback. Somehow the EUI types don't pass
|
||||
// on the `data-test-subj` attribute correctly. We're interested
|
||||
// in the runtime result here anyway.
|
||||
expect(actions.map((a: any) => a['data-test-subj'])).toStrictEqual([
|
||||
'transformActionDiscover',
|
||||
'transformActionStart',
|
||||
'transformActionStop',
|
||||
'transformActionEdit',
|
||||
|
|
|
@ -13,6 +13,7 @@ import { TransformListRow } from '../../../../common';
|
|||
|
||||
import { useCloneAction } from '../action_clone';
|
||||
import { useDeleteAction, DeleteActionModal } from '../action_delete';
|
||||
import { useDiscoverAction } from '../action_discover';
|
||||
import { EditTransformFlyout } from '../edit_transform_flyout';
|
||||
import { useEditAction } from '../action_edit';
|
||||
import { useStartAction, StartActionModal } from '../action_start';
|
||||
|
@ -30,6 +31,7 @@ export const useActions = ({
|
|||
} => {
|
||||
const cloneAction = useCloneAction(forceDisable, transformNodes);
|
||||
const deleteAction = useDeleteAction(forceDisable);
|
||||
const discoverAction = useDiscoverAction(forceDisable);
|
||||
const editAction = useEditAction(forceDisable, transformNodes);
|
||||
const startAction = useStartAction(forceDisable, transformNodes);
|
||||
const stopAction = useStopAction(forceDisable);
|
||||
|
@ -45,6 +47,7 @@ export const useActions = ({
|
|||
</>
|
||||
),
|
||||
actions: [
|
||||
discoverAction.action,
|
||||
startAction.action,
|
||||
stopAction.action,
|
||||
editAction.action,
|
||||
|
|
|
@ -13,8 +13,11 @@ jest.mock('../../../../../shared_imports');
|
|||
jest.mock('../../../../../app/app_dependencies');
|
||||
|
||||
describe('Transform: Job List Columns', () => {
|
||||
test('useColumns()', () => {
|
||||
const { result } = renderHook(() => useColumns([], () => {}, 1, []));
|
||||
test('useColumns()', async () => {
|
||||
const { result, waitForNextUpdate } = renderHook(() => useColumns([], () => {}, 1, []));
|
||||
|
||||
await waitForNextUpdate();
|
||||
|
||||
const columns: ReturnType<typeof useColumns>['columns'] = result.current.columns;
|
||||
|
||||
expect(columns).toHaveLength(7);
|
||||
|
|
|
@ -7,11 +7,12 @@
|
|||
|
||||
import { i18n as kbnI18n } from '@kbn/i18n';
|
||||
|
||||
import { CoreSetup } from 'src/core/public';
|
||||
import { DataPublicPluginStart } from 'src/plugins/data/public';
|
||||
import { HomePublicPluginSetup } from 'src/plugins/home/public';
|
||||
import { SavedObjectsStart } from 'src/plugins/saved_objects/public';
|
||||
import { ManagementSetup } from '../../../../src/plugins/management/public';
|
||||
import type { CoreSetup } from 'src/core/public';
|
||||
import type { DataPublicPluginStart } from 'src/plugins/data/public';
|
||||
import type { HomePublicPluginSetup } from 'src/plugins/home/public';
|
||||
import type { SavedObjectsStart } from 'src/plugins/saved_objects/public';
|
||||
import type { ManagementSetup } from 'src/plugins/management/public';
|
||||
import type { SharePluginStart } from 'src/plugins/share/public';
|
||||
import { registerFeature } from './register_feature';
|
||||
|
||||
export interface PluginsDependencies {
|
||||
|
@ -19,6 +20,7 @@ export interface PluginsDependencies {
|
|||
management: ManagementSetup;
|
||||
home: HomePublicPluginSetup;
|
||||
savedObjects: SavedObjectsStart;
|
||||
share: SharePluginStart;
|
||||
}
|
||||
|
||||
export class TransformUiPlugin {
|
||||
|
|
|
@ -10,13 +10,13 @@ import { FtrProviderContext } from '../../../ftr_provider_context';
|
|||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const supertest = getService('supertest');
|
||||
const es = getService('legacyEs');
|
||||
const es = getService('es');
|
||||
|
||||
describe('load', () => {
|
||||
it('should return the ES cluster info', async () => {
|
||||
const { body } = await supertest.get('/api/logstash/cluster').expect(200);
|
||||
|
||||
const responseFromES = await es.info();
|
||||
const { body: responseFromES } = await es.info();
|
||||
expect(body.cluster.uuid).to.eql(responseFromES.cluster_uuid);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -269,6 +269,9 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
|
||||
await ml.testExecution.logTestStep('imports the file');
|
||||
await ml.dataVisualizerFileBased.startImportAndWaitForProcessing();
|
||||
|
||||
await ml.testExecution.logTestStep('creates filebeat config');
|
||||
await ml.dataVisualizerFileBased.selectCreateFilebeatConfig();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -89,6 +89,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
get destinationIndex(): string {
|
||||
return `user-${this.transformId}`;
|
||||
},
|
||||
discoverAdjustSuperDatePicker: true,
|
||||
expected: {
|
||||
pivotAdvancedEditorValueArr: ['{', ' "group_by": {', ' "category.keyword": {'],
|
||||
pivotAdvancedEditorValue: {
|
||||
|
@ -210,6 +211,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
],
|
||||
},
|
||||
],
|
||||
discoverQueryHits: '7,270',
|
||||
},
|
||||
} as PivotTransformTestData,
|
||||
{
|
||||
|
@ -247,6 +249,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
get destinationIndex(): string {
|
||||
return `user-${this.transformId}`;
|
||||
},
|
||||
discoverAdjustSuperDatePicker: false,
|
||||
expected: {
|
||||
pivotAdvancedEditorValueArr: ['{', ' "group_by": {', ' "geoip.country_iso_code": {'],
|
||||
pivotAdvancedEditorValue: {
|
||||
|
@ -294,6 +297,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
rows: 5,
|
||||
},
|
||||
histogramCharts: [],
|
||||
discoverQueryHits: '10',
|
||||
},
|
||||
} as PivotTransformTestData,
|
||||
{
|
||||
|
@ -317,6 +321,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
get destinationIndex(): string {
|
||||
return `user-${this.transformId}`;
|
||||
},
|
||||
discoverAdjustSuperDatePicker: true,
|
||||
expected: {
|
||||
latestPreview: {
|
||||
column: 0,
|
||||
|
@ -342,6 +347,7 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
'July 12th 2019, 23:31:12',
|
||||
],
|
||||
},
|
||||
discoverQueryHits: '10',
|
||||
},
|
||||
} as LatestTransformTestData,
|
||||
];
|
||||
|
@ -533,6 +539,26 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
progress: testData.expected.row.progress,
|
||||
});
|
||||
});
|
||||
|
||||
it('navigates to discover and displays results of the destination index', async () => {
|
||||
await transform.testExecution.logTestStep('should show the actions popover');
|
||||
await transform.table.assertTransformRowActions(testData.transformId, false);
|
||||
|
||||
await transform.testExecution.logTestStep('should navigate to discover');
|
||||
await transform.table.clickTransformRowAction('Discover');
|
||||
|
||||
if (testData.discoverAdjustSuperDatePicker) {
|
||||
await transform.discover.assertNoResults(testData.destinationIndex);
|
||||
await transform.testExecution.logTestStep(
|
||||
'should switch quick select lookback to years'
|
||||
);
|
||||
await transform.discover.assertSuperDatePickerToggleQuickMenuButtonExists();
|
||||
await transform.discover.openSuperDatePicker();
|
||||
await transform.discover.quickSelectYears();
|
||||
}
|
||||
|
||||
await transform.discover.assertDiscoverQueryHits(testData.expected.discoverQueryHits);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
@ -66,6 +66,7 @@ export interface BaseTransformTestData {
|
|||
transformDescription: string;
|
||||
expected: any;
|
||||
destinationIndex: string;
|
||||
discoverAdjustSuperDatePicker: boolean;
|
||||
}
|
||||
|
||||
export interface PivotTransformTestData extends BaseTransformTestData {
|
||||
|
|
|
@ -125,5 +125,11 @@ export function MachineLearningDataVisualizerFileBasedProvider(
|
|||
await testSubjects.existOrFail('mlFileImportSuccessCallout');
|
||||
});
|
||||
},
|
||||
|
||||
async selectCreateFilebeatConfig() {
|
||||
await testSubjects.scrollIntoView('fileDataVisFilebeatConfigLink');
|
||||
await testSubjects.click('fileDataVisFilebeatConfigLink');
|
||||
await testSubjects.existOrFail('fileDataVisFilebeatConfigPanel');
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
65
x-pack/test/functional/services/transform/discover.ts
Normal file
65
x-pack/test/functional/services/transform/discover.ts
Normal file
|
@ -0,0 +1,65 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
export function TransformDiscoverProvider({ getService }: FtrProviderContext) {
|
||||
const find = getService('find');
|
||||
const testSubjects = getService('testSubjects');
|
||||
|
||||
return {
|
||||
async assertDiscoverQueryHits(expectedDiscoverQueryHits: string) {
|
||||
await testSubjects.existOrFail('discoverQueryHits');
|
||||
|
||||
const actualDiscoverQueryHits = await testSubjects.getVisibleText('discoverQueryHits');
|
||||
|
||||
expect(actualDiscoverQueryHits).to.eql(
|
||||
expectedDiscoverQueryHits,
|
||||
`Discover query hits should be ${expectedDiscoverQueryHits}, got ${actualDiscoverQueryHits}`
|
||||
);
|
||||
},
|
||||
|
||||
async assertNoResults(expectedDestinationIndex: string) {
|
||||
// Discover should use the destination index pattern
|
||||
const actualIndexPatternSwitchLinkText = await (
|
||||
await testSubjects.find('indexPattern-switch-link')
|
||||
).getVisibleText();
|
||||
expect(actualIndexPatternSwitchLinkText).to.eql(
|
||||
expectedDestinationIndex,
|
||||
`Destination index should be ${expectedDestinationIndex}, got ${actualIndexPatternSwitchLinkText}`
|
||||
);
|
||||
|
||||
await testSubjects.existOrFail('discoverNoResults');
|
||||
},
|
||||
|
||||
async assertSuperDatePickerToggleQuickMenuButtonExists() {
|
||||
await testSubjects.existOrFail('superDatePickerToggleQuickMenuButton');
|
||||
},
|
||||
|
||||
async openSuperDatePicker() {
|
||||
await testSubjects.click('superDatePickerToggleQuickMenuButton');
|
||||
await testSubjects.existOrFail('superDatePickerQuickMenu');
|
||||
},
|
||||
|
||||
async quickSelectYears() {
|
||||
const quickMenuElement = await testSubjects.find('superDatePickerQuickMenu');
|
||||
|
||||
// No test subject, select "Years" to look back 15 years instead of 15 minutes.
|
||||
await find.selectValue(`[aria-label*="Time unit"]`, 'y');
|
||||
|
||||
// Apply
|
||||
const applyButton = await quickMenuElement.findByClassName('euiQuickSelect__applyButton');
|
||||
const actualApplyButtonText = await applyButton.getVisibleText();
|
||||
expect(actualApplyButtonText).to.be('Apply');
|
||||
|
||||
await applyButton.click();
|
||||
await testSubjects.existOrFail('discoverQueryHits');
|
||||
},
|
||||
};
|
||||
}
|
|
@ -9,6 +9,7 @@ import { FtrProviderContext } from '../../ftr_provider_context';
|
|||
|
||||
import { TransformAPIProvider } from './api';
|
||||
import { TransformEditFlyoutProvider } from './edit_flyout';
|
||||
import { TransformDiscoverProvider } from './discover';
|
||||
import { TransformManagementProvider } from './management';
|
||||
import { TransformNavigationProvider } from './navigation';
|
||||
import { TransformSecurityCommonProvider } from './security_common';
|
||||
|
@ -22,6 +23,7 @@ import { MachineLearningTestResourcesProvider } from '../ml/test_resources';
|
|||
|
||||
export function TransformProvider(context: FtrProviderContext) {
|
||||
const api = TransformAPIProvider(context);
|
||||
const discover = TransformDiscoverProvider(context);
|
||||
const editFlyout = TransformEditFlyoutProvider(context);
|
||||
const management = TransformManagementProvider(context);
|
||||
const navigation = TransformNavigationProvider(context);
|
||||
|
@ -35,6 +37,7 @@ export function TransformProvider(context: FtrProviderContext) {
|
|||
|
||||
return {
|
||||
api,
|
||||
discover,
|
||||
editFlyout,
|
||||
management,
|
||||
navigation,
|
||||
|
|
|
@ -9,6 +9,8 @@ import expect from '@kbn/expect';
|
|||
|
||||
import { FtrProviderContext } from '../../ftr_provider_context';
|
||||
|
||||
type TransformRowActionName = 'Clone' | 'Delete' | 'Edit' | 'Start' | 'Stop' | 'Discover';
|
||||
|
||||
export function TransformTableProvider({ getService }: FtrProviderContext) {
|
||||
const retry = getService('retry');
|
||||
const testSubjects = getService('testSubjects');
|
||||
|
@ -238,6 +240,7 @@ export function TransformTableProvider({ getService }: FtrProviderContext) {
|
|||
|
||||
await testSubjects.existOrFail('transformActionClone');
|
||||
await testSubjects.existOrFail('transformActionDelete');
|
||||
await testSubjects.existOrFail('transformActionDiscover');
|
||||
await testSubjects.existOrFail('transformActionEdit');
|
||||
|
||||
if (isTransformRunning) {
|
||||
|
@ -251,7 +254,7 @@ export function TransformTableProvider({ getService }: FtrProviderContext) {
|
|||
|
||||
public async assertTransformRowActionEnabled(
|
||||
transformId: string,
|
||||
action: 'Delete' | 'Start' | 'Stop' | 'Clone' | 'Edit',
|
||||
action: TransformRowActionName,
|
||||
expectedValue: boolean
|
||||
) {
|
||||
const selector = `transformAction${action}`;
|
||||
|
@ -274,7 +277,7 @@ export function TransformTableProvider({ getService }: FtrProviderContext) {
|
|||
|
||||
public async clickTransformRowActionWithRetry(
|
||||
transformId: string,
|
||||
action: 'Delete' | 'Start' | 'Stop' | 'Clone' | 'Edit'
|
||||
action: TransformRowActionName
|
||||
) {
|
||||
await retry.tryForTime(30 * 1000, async () => {
|
||||
await browser.pressKeys(browser.keys.ESCAPE);
|
||||
|
@ -285,7 +288,7 @@ export function TransformTableProvider({ getService }: FtrProviderContext) {
|
|||
});
|
||||
}
|
||||
|
||||
public async clickTransformRowAction(action: string) {
|
||||
public async clickTransformRowAction(action: TransformRowActionName) {
|
||||
await testSubjects.click(`transformAction${action}`);
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue