Merge branch 'master' into eui/33.0

This commit is contained in:
Greg Thompson 2021-05-11 09:57:49 -05:00
commit 30ade23490
No known key found for this signature in database
GPG key ID: ED1F695C1077B958
855 changed files with 9352 additions and 8219 deletions

1
.github/CODEOWNERS vendored
View file

@ -188,6 +188,7 @@
/src/core/ @elastic/kibana-core
/src/plugins/saved_objects_tagging_oss @elastic/kibana-core
/config/kibana.yml @elastic/kibana-core
/x-pack/plugins/banners/ @elastic/kibana-core
/x-pack/plugins/features/ @elastic/kibana-core
/x-pack/plugins/licensing/ @elastic/kibana-core
/x-pack/plugins/global_search/ @elastic/kibana-core

2
Jenkinsfile vendored
View file

@ -6,7 +6,7 @@ kibanaLibrary.load()
kibanaPipeline(timeoutMinutes: 210, checkPrChanges: true, setCommitStatus: true) {
slackNotifications.onFailure(disabled: !params.NOTIFY_ON_FAILURE) {
githubPr.withDefaultPrComments {
ciStats.trackBuild(requireSuccess: githubPr.isPr()) {
ciStats.trackBuild(requireSuccess: githubPr.isTrackedBranchPr()) {
catchError {
retryable.enable()
kibanaPipeline.allCiTasks()

View file

@ -1,15 +1,15 @@
# Kibana Style Guide
---
id: kibStyleGuide
slug: /kibana-dev-docs/styleguide
title: StyleGuide
summary: JavaScript/TypeScript styleguide.
date: 2021-05-06
tags: ['kibana', 'onboarding', 'dev', 'styleguide', 'typescript', 'javascript']
---
This guide applies to all development within the Kibana project and is
recommended for the development of all Kibana plugins.
- [General](#general)
- [HTML](#html)
- [API endpoints](#api-endpoints)
- [TypeScript/JavaScript](#typeScript/javaScript)
- [SASS files](#sass-files)
- [React](#react)
Besides the content in this style guide, the following style guides may also apply
to all development within the Kibana project. Please make sure to also read them:
@ -52,9 +52,7 @@ This part contains style guide rules around general (framework agnostic) HTML us
Use camel case for the values of attributes such as `id` and `data-test-subj` selectors.
```html
<button id="veryImportantButton" data-test-subj="clickMeButton">
Click me
</button>
<button id="veryImportantButton" data-test-subj="clickMeButton">Click me</button>
```
The only exception is in cases where you're dynamically creating the value, and you need to use
@ -378,6 +376,20 @@ import inFoo from 'foo/child';
import inSibling from '../foo/child';
```
#### Avoid export \* in top level index.ts files
The exports in `common/index.ts`, `public/index.ts` and `server/index.ts` dictate a plugin's public API. The public API should be carefully controlled, and using `export *` makes it very easy for a developer working on internal changes to export a new public API unintentionally.
```js
// good
export { foo } from 'foo';
export { child } from './child';
// bad
export * from 'foo/child';
export * from '../foo/child';
```
### Global definitions
Don't do this. Everything should be wrapped in a module that can be depended on
@ -592,20 +604,20 @@ Do not use setters, they cause more problems than they can solve.
### Avoid circular dependencies
As part of a future effort to use correct and idempotent build tools we need our code to be
able to be represented as a directed acyclic graph. We must avoid having circular dependencies
both on code and type imports to achieve that. One of the most critical parts is the plugins
code. We've developed a tool to identify plugins with circular dependencies which
has allowed us to build a list of plugins who have circular dependencies
between each other.
able to be represented as a directed acyclic graph. We must avoid having circular dependencies
both on code and type imports to achieve that. One of the most critical parts is the plugins
code. We've developed a tool to identify plugins with circular dependencies which
has allowed us to build a list of plugins who have circular dependencies
between each other.
When building plugins we should avoid importing from plugins
who are known to have circular dependencies at the moment as well as introducing
new circular dependencies. You can run the same tool we use on our CI locally by
typing `node scripts/find_plugins_with_circular_deps --debug`. It will error out in
case new circular dependencies has been added with your changes
When building plugins we should avoid importing from plugins
who are known to have circular dependencies at the moment as well as introducing
new circular dependencies. You can run the same tool we use on our CI locally by
typing `node scripts/find_plugins_with_circular_deps --debug`. It will error out in
case new circular dependencies has been added with your changes
(which will also happen in the CI) as well as print out the current list of
the known circular dependencies which, as mentioned before, should not be imported
by your code until the circular dependencies on these have been solved.
the known circular dependencies which, as mentioned before, should not be imported
by your code until the circular dependencies on these have been solved.
## SASS files
@ -626,10 +638,8 @@ import './component.scss';
// All other imports below the SASS import
export const Component = () => {
return (
<div className="plgComponent" />
);
}
return <div className="plgComponent" />;
};
```
```scss

View file

@ -144,6 +144,7 @@ readonly links: {
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createRollupJobsRequest: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;

File diff suppressed because one or more lines are too long

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [IClusterClient](./kibana-plugin-core-server.iclusterclient.md)<!-- -->.
> Use [IClusterClient](./kibana-plugin-core-server.iclusterclient.md)<!-- -->. 7.16
>
Represents an Elasticsearch cluster API client created by the platform. It allows to call API on behalf of the internal Kibana user and the actual user that is derived from the request headers (via `asScoped(...)`<!-- -->).

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [ICustomClusterClient](./kibana-plugin-core-server.icustomclusterclient.md)<!-- -->.
> Use [ICustomClusterClient](./kibana-plugin-core-server.icustomclusterclient.md)<!-- -->. 7.16
>
Represents an Elasticsearch cluster API client created by a plugin. It allows to call API on behalf of the internal Kibana user and the actual user that is derived from the request headers (via `asScoped(...)`<!-- -->).

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [IScopedClusterClient](./kibana-plugin-core-server.iscopedclusterclient.md)<!-- -->.
> Use [IScopedClusterClient](./kibana-plugin-core-server.iscopedclusterclient.md)<!-- -->. 7.16
>
Serves the same purpose as "normal" `ClusterClient` but exposes additional `callAsCurrentUser` method that doesn't use credentials of the Kibana internal user (as `callAsInternalUser` does) to request Elasticsearch API, but rather passes HTTP headers extracted from the current user request to the API.

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
The set of options that defines how API call should be made and result be processed.

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [IClusterClient](./kibana-plugin-core-server.iclusterclient.md)<!-- -->.
> Use [IClusterClient](./kibana-plugin-core-server.iclusterclient.md)<!-- -->. 7.16
>
Represents an Elasticsearch cluster API client created by the platform. It allows to call API on behalf of the internal Kibana user and the actual user that is derived from the request headers (via `asScoped(...)`<!-- -->).

View file

@ -4,7 +4,7 @@
## LegacyElasticsearchError interface
@<!-- -->deprecated. The new elasticsearch client doesn't wrap errors anymore.
@<!-- -->deprecated. The new elasticsearch client doesn't wrap errors anymore. 7.16
<b>Signature:</b>

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [IScopedClusterClient.asCurrentUser](./kibana-plugin-core-server.iscopedclusterclient.ascurrentuser.md)<!-- -->.
> Use [IScopedClusterClient.asCurrentUser](./kibana-plugin-core-server.iscopedclusterclient.ascurrentuser.md)<!-- -->. 7.16
>
Calls specified `endpoint` with provided `clientParams` on behalf of the user initiated request to the Kibana server (via HTTP request headers). See [LegacyAPICaller](./kibana-plugin-core-server.legacyapicaller.md)<!-- -->.

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [IScopedClusterClient.asInternalUser](./kibana-plugin-core-server.iscopedclusterclient.asinternaluser.md)<!-- -->.
> Use [IScopedClusterClient.asInternalUser](./kibana-plugin-core-server.iscopedclusterclient.asinternaluser.md)<!-- -->. 7.16
>
Calls specified `endpoint` with provided `clientParams` on behalf of the Kibana internal user. See [LegacyAPICaller](./kibana-plugin-core-server.legacyapicaller.md)<!-- -->.

View file

@ -6,7 +6,7 @@
> Warning: This API is now obsolete.
>
> Use [scoped cluster client](./kibana-plugin-core-server.iscopedclusterclient.md)<!-- -->.
> Use [scoped cluster client](./kibana-plugin-core-server.iscopedclusterclient.md)<!-- -->. 7.16
>
Serves the same purpose as the normal [cluster client](./kibana-plugin-core-server.iclusterclient.md) but exposes an additional `asCurrentUser` method that doesn't use credentials of the Kibana internal user (as `asInternalUser` does) to request Elasticsearch API, but rather passes HTTP headers extracted from the current user request to the API instead.

View file

@ -108,7 +108,7 @@ The plugin integrates with the core system via lifecycle events: `setup`<!-- -->
| [KibanaRequestRoute](./kibana-plugin-core-server.kibanarequestroute.md) | Request specific route information exposed to a handler. |
| [LegacyAPICaller](./kibana-plugin-core-server.legacyapicaller.md) | |
| [LegacyCallAPIOptions](./kibana-plugin-core-server.legacycallapioptions.md) | The set of options that defines how API call should be made and result be processed. |
| [LegacyElasticsearchError](./kibana-plugin-core-server.legacyelasticsearcherror.md) | @<!-- -->deprecated. The new elasticsearch client doesn't wrap errors anymore. |
| [LegacyElasticsearchError](./kibana-plugin-core-server.legacyelasticsearcherror.md) | @<!-- -->deprecated. The new elasticsearch client doesn't wrap errors anymore. 7.16 |
| [LegacyRequest](./kibana-plugin-core-server.legacyrequest.md) | |
| [LoggerContextConfigInput](./kibana-plugin-core-server.loggercontextconfiginput.md) | |
| [LoggingServiceSetup](./kibana-plugin-core-server.loggingservicesetup.md) | Provides APIs to plugins for customizing the plugin's logger. |

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -6,6 +6,7 @@
> Warning: This API is now obsolete.
>
> 7.16
>
<b>Signature:</b>

View file

@ -7,6 +7,7 @@
{kib} supports the following authentication mechanisms:
- <<multiple-authentication-providers>>
- <<basic-authentication>>
- <<token-authentication>>
- <<pki-authentication>>
@ -16,7 +17,12 @@
- <<anonymous-authentication>>
- <<http-authentication>>
Enable multiple authentication mechanisms at the same time specifying a prioritized list of the authentication _providers_ (typically of various types) in the configuration. Providers are consulted in ascending order. Make sure each configured provider has a unique name (e.g. `basic1` or `saml1` in the configuration example) and `order` setting. In the event that two or more providers have the same name or `order`, {kib} will fail to start.
For an introduction to {kib}'s security features, including the login process, refer to <<tutorial-secure-access-to-kibana>>.
[[multiple-authentication-providers]]
==== Multiple authentication providers
Enable multiple authentication mechanisms at the same time by specifying a prioritized list of the authentication _providers_ (typically of various types) in the configuration. Providers are consulted in ascending order. Make sure each configured provider has a unique name (e.g. `basic1` or `saml1` in the configuration example) and `order` setting. In the event that two or more providers have the same name or `order`, {kib} will fail to start.
When two or more providers are configured, you can choose the provider you want to use on the Login Selector UI. The order the providers appear is determined by the `order` setting. The appearance of the specific provider entry can be customized with the `description`, `hint`, and `icon` settings.
@ -24,7 +30,7 @@ TIP: To provide login instructions to users, use the `xpack.security.loginHelp`
If you don't want a specific provider to show up at the Login Selector UI (e.g. to only support third-party initiated login) you can hide it with `showInSelector` setting set to `false`. However, in this case, the provider is presented in the provider chain and may be consulted during authentication based on its `order`. To disable the provider, use the `enabled` setting.
TIP: The Login Selector UI can also be disabled or enabled with `xpack.security.authc.selector.enabled` setting.
TIP: The Login Selector UI can also be disabled or enabled with `xpack.security.authc.selector.enabled` setting.
Here is how your `kibana.yml` and Login Selector UI can look like if you deal with multiple authentication providers:
@ -292,9 +298,9 @@ xpack.security.authc.providers:
order: 1
-----------------------------------------------
IMPORTANT: {kib} uses SPNEGO, which wraps the Kerberos protocol for use with HTTP, extending it to web applications.
IMPORTANT: {kib} uses SPNEGO, which wraps the Kerberos protocol for use with HTTP, extending it to web applications.
At the end of the Kerberos handshake, {kib} forwards the service ticket to {es}, then {es} unpacks the service ticket and responds with an access and refresh token, which are used for subsequent authentication.
On every {es} node that {kib} connects to, the keytab file should always contain the HTTP service principal for the {kib} host.
On every {es} node that {kib} connects to, the keytab file should always contain the HTTP service principal for the {kib} host.
The HTTP service principal name must have the `HTTP/kibana.domain.local@KIBANA.DOMAIN.LOCAL` format.
@ -386,7 +392,7 @@ xpack.security.authc.providers:
[[anonymous-access-and-embedding]]
===== Anonymous access and embedding
One of the most popular use cases for anonymous access is when you embed {kib} into other applications and don't want to force your users to log in to view it.
One of the most popular use cases for anonymous access is when you embed {kib} into other applications and don't want to force your users to log in to view it.
If you configured {kib} to use anonymous access as the sole authentication mechanism, you don't need to do anything special while embedding {kib}.
If you have multiple authentication providers enabled, and you want to automatically log in anonymous users when embedding dashboards and visualizations:

View file

@ -108,7 +108,7 @@
"@elastic/good": "^9.0.1-kibana3",
"@elastic/maki": "6.3.0",
"@elastic/node-crypto": "1.2.1",
"@elastic/numeral": "^2.5.0",
"@elastic/numeral": "^2.5.1",
"@elastic/react-search-ui": "^1.5.1",
"@elastic/request-crypto": "1.1.4",
"@elastic/safer-lodash-set": "link:bazel-bin/packages/elastic-safer-lodash-set/npm_module",
@ -315,7 +315,7 @@
"proxy-from-env": "1.0.0",
"proxyquire": "1.8.0",
"puid": "1.0.7",
"puppeteer": "npm:@elastic/puppeteer@5.4.1-patch.1",
"puppeteer": "^8.0.0",
"query-string": "^6.13.2",
"raw-loader": "^3.1.0",
"rbush": "^3.0.1",
@ -587,7 +587,6 @@
"@types/pretty-ms": "^5.0.0",
"@types/prop-types": "^15.7.3",
"@types/proper-lockfile": "^3.0.1",
"@types/puppeteer": "^5.4.1",
"@types/rbush": "^3.0.0",
"@types/reach__router": "^1.2.6",
"@types/react": "^16.9.36",

View file

@ -36,10 +36,9 @@ describe('applyDeprecations', () => {
const addDeprecation = jest.fn();
const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
const alteredConfig = { foo: 'bar' };
const handlerA = jest.fn().mockReturnValue(alteredConfig);
const handlerB = jest.fn().mockImplementation((conf) => conf);
const handlerA = jest.fn().mockReturnValue({ unset: [{ path: 'deprecated' }] });
const handlerB = jest.fn().mockReturnValue(undefined);
applyDeprecations(
initialConfig,
@ -47,8 +46,6 @@ describe('applyDeprecations', () => {
createAddDeprecation
);
expect(handlerA).toHaveBeenCalledWith(initialConfig, 'pathA', addDeprecation);
expect(handlerB).toHaveBeenCalledWith(alteredConfig, 'pathB', addDeprecation);
expect(createAddDeprecation).toBeCalledTimes(2);
expect(createAddDeprecation).toHaveBeenNthCalledWith(1, 'pathA');
expect(createAddDeprecation).toHaveBeenNthCalledWith(2, 'pathB');
@ -60,8 +57,15 @@ describe('applyDeprecations', () => {
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
const alteredConfig = { foo: 'bar' };
const handlerA = jest.fn().mockReturnValue(alteredConfig);
const handlerB = jest.fn().mockImplementation((conf) => conf);
const configs: Array<{ fn: string; config: Record<string, any> }> = [];
const handlerA = jest.fn().mockImplementation((config) => {
// the first argument is mutated between calls, we store a copy of it
configs.push({ fn: 'handlerA', config: { ...config } });
return { unset: [{ path: 'deprecated' }] };
});
const handlerB = jest.fn().mockImplementation((config) => {
configs.push({ fn: 'handlerB', config: { ...config } });
});
applyDeprecations(
initialConfig,
@ -69,8 +73,10 @@ describe('applyDeprecations', () => {
createAddDeprecation
);
expect(handlerA).toHaveBeenCalledWith(initialConfig, 'pathA', addDeprecation);
expect(handlerB).toHaveBeenCalledWith(alteredConfig, 'pathB', addDeprecation);
expect(configs).toEqual([
{ fn: 'handlerA', config: initialConfig },
{ fn: 'handlerB', config: alteredConfig },
]);
});
it('returns the migrated config', () => {
@ -94,4 +100,40 @@ describe('applyDeprecations', () => {
expect(initialConfig).toEqual({ foo: 'bar', deprecated: 'deprecated' });
expect(migrated).toEqual({ foo: 'bar' });
});
it('ignores a command for unknown path', () => {
const addDeprecation = jest.fn();
const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
const handler = jest.fn().mockImplementation((config) => {
return { unset: [{ path: 'unknown' }] };
});
const migrated = applyDeprecations(
initialConfig,
[wrapHandler(handler, 'pathA')],
createAddDeprecation
);
expect(migrated).toEqual(initialConfig);
});
it('ignores an unknown command', () => {
const addDeprecation = jest.fn();
const createAddDeprecation = jest.fn().mockReturnValue(addDeprecation);
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
const handler = jest.fn().mockImplementation((config) => {
return { rewrite: [{ path: 'foo' }] };
});
const migrated = applyDeprecations(
initialConfig,
[wrapHandler(handler, 'pathA')],
createAddDeprecation
);
expect(migrated).toEqual(initialConfig);
});
});

View file

@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
import { cloneDeep } from 'lodash';
import { cloneDeep, unset } from 'lodash';
import { set } from '@elastic/safer-lodash-set';
import { ConfigDeprecationWithContext, AddConfigDeprecation } from './types';
const noopAddDeprecationFactory: () => AddConfigDeprecation = () => () => undefined;
@ -22,9 +23,21 @@ export const applyDeprecations = (
deprecations: ConfigDeprecationWithContext[],
createAddDeprecation: (pluginId: string) => AddConfigDeprecation = noopAddDeprecationFactory
) => {
let processed = cloneDeep(config);
const result = cloneDeep(config);
deprecations.forEach(({ deprecation, path }) => {
processed = deprecation(processed, path, createAddDeprecation(path));
const commands = deprecation(result, path, createAddDeprecation(path));
if (commands) {
if (commands.set) {
commands.set.forEach(function ({ path: commandPath, value }) {
set(result, commandPath, value);
});
}
if (commands.unset) {
commands.unset.forEach(function ({ path: commandPath }) {
unset(result, commandPath);
});
}
}
});
return processed;
return result;
};

View file

@ -29,15 +29,15 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
renamed: 'toberenamed',
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
const commands = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toEqual({
set: [
{
path: 'myplugin.renamed',
value: 'toberenamed',
},
],
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -64,16 +64,8 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = rename('deprecated', 'new')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
new: 'new',
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
});
const commands = rename('deprecated', 'new')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toBeUndefined();
expect(addDeprecation).toHaveBeenCalledTimes(0);
});
it('handles nested keys', () => {
@ -88,22 +80,19 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = rename('oldsection.deprecated', 'newsection.renamed')(
const commands = rename('oldsection.deprecated', 'newsection.renamed')(
rawConfig,
'myplugin',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
oldsection: {},
newsection: {
renamed: 'toberenamed',
expect(commands).toEqual({
set: [
{
path: 'myplugin.newsection.renamed',
value: 'toberenamed',
},
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
],
unset: [{ path: 'myplugin.oldsection.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -127,11 +116,9 @@ describe('DeprecationFactory', () => {
renamed: 'renamed',
},
};
const processed = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
renamed: 'renamed',
},
const commands = rename('deprecated', 'renamed')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -162,19 +149,19 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
renamed: 'toberenamed',
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
expect(commands).toEqual({
set: [
{
path: 'myplugin.renamed',
value: 'toberenamed',
},
],
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -202,19 +189,19 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = renameFromRoot('oldplugin.deprecated', 'newplugin.renamed')(
const commands = renameFromRoot('oldplugin.deprecated', 'newplugin.renamed')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
oldplugin: {
valid: 'valid',
},
newplugin: {
renamed: 'toberenamed',
property: 'value',
},
expect(commands).toEqual({
set: [
{
path: 'newplugin.renamed',
value: 'toberenamed',
},
],
unset: [{ path: 'oldplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -242,20 +229,12 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.new')(
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.new')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
new: 'new',
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
});
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
});
@ -266,15 +245,13 @@ describe('DeprecationFactory', () => {
renamed: 'renamed',
},
};
const processed = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
const commands = renameFromRoot('myplugin.deprecated', 'myplugin.renamed')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
renamed: 'renamed',
},
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
@ -306,14 +283,9 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
@ -343,17 +315,10 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = unused('section.deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
section: {},
},
someOtherPlugin: {
property: 'value',
},
const commands = unused('section.deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toEqual({
unset: [{ path: 'myplugin.section.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
@ -379,15 +344,8 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
});
const commands = unused('deprecated')(rawConfig, 'myplugin', addDeprecation);
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
});
});
@ -403,20 +361,14 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = unusedFromRoot('myplugin.deprecated')(
const commands = unusedFromRoot('myplugin.deprecated')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
expect(commands).toEqual({
unset: [{ path: 'myplugin.deprecated' }],
});
expect(addDeprecation.mock.calls).toMatchInlineSnapshot(`
Array [
Array [
@ -442,19 +394,12 @@ describe('DeprecationFactory', () => {
property: 'value',
},
};
const processed = unusedFromRoot('myplugin.deprecated')(
const commands = unusedFromRoot('myplugin.deprecated')(
rawConfig,
'does-not-matter',
addDeprecation
);
expect(processed).toEqual({
myplugin: {
valid: 'valid',
},
someOtherPlugin: {
property: 'value',
},
});
expect(commands).toBeUndefined();
expect(addDeprecation).toBeCalledTimes(0);
});
});

View file

@ -7,13 +7,12 @@
*/
import { get } from 'lodash';
import { set } from '@elastic/safer-lodash-set';
import { unset } from '@kbn/std';
import {
ConfigDeprecation,
AddConfigDeprecation,
ConfigDeprecationFactory,
DeprecatedConfigDetails,
ConfigDeprecationCommand,
} from './types';
const _rename = (
@ -23,20 +22,16 @@ const _rename = (
oldKey: string,
newKey: string,
details?: Partial<DeprecatedConfigDetails>
) => {
): void | ConfigDeprecationCommand => {
const fullOldPath = getPath(rootPath, oldKey);
const oldValue = get(config, fullOldPath);
if (oldValue === undefined) {
return config;
return;
}
unset(config, fullOldPath);
const fullNewPath = getPath(rootPath, newKey);
const newValue = get(config, fullNewPath);
if (newValue === undefined) {
set(config, fullNewPath, oldValue);
addDeprecation({
message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}"`,
correctiveActions: {
@ -46,6 +41,10 @@ const _rename = (
},
...details,
});
return {
set: [{ path: fullNewPath, value: oldValue }],
unset: [{ path: fullOldPath }],
};
} else {
addDeprecation({
message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}". However both key are present, ignoring "${fullOldPath}"`,
@ -59,7 +58,9 @@ const _rename = (
});
}
return config;
return {
unset: [{ path: fullOldPath }],
};
};
const _unused = (
@ -68,12 +69,11 @@ const _unused = (
addDeprecation: AddConfigDeprecation,
unusedKey: string,
details?: Partial<DeprecatedConfigDetails>
) => {
): void | ConfigDeprecationCommand => {
const fullPath = getPath(rootPath, unusedKey);
if (get(config, fullPath) === undefined) {
return config;
return;
}
unset(config, fullPath);
addDeprecation({
message: `${fullPath} is deprecated and is no longer used`,
correctiveActions: {
@ -83,7 +83,9 @@ const _unused = (
},
...details,
});
return config;
return {
unset: [{ path: fullPath }],
};
};
const rename = (

View file

@ -8,6 +8,7 @@
export type {
ConfigDeprecation,
ConfigDeprecationCommand,
ConfigDeprecationWithContext,
ConfigDeprecationFactory,
AddConfigDeprecation,

View file

@ -5,7 +5,7 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import type { RecursiveReadonly } from '@kbn/utility-types';
/**
* Config deprecation hook used when invoking a {@link ConfigDeprecation}
*
@ -41,14 +41,29 @@ export interface DeprecatedConfigDetails {
* @remarks
* This should only be manually implemented if {@link ConfigDeprecationFactory} does not provide the proper helpers for a specific
* deprecation need.
* @param config must not be mutated, return {@link ConfigDeprecationCommand} to change config shape.
*
* @example
* ```typescript
* const provider: ConfigDeprecation = (config, path) => ({ unset: [{ key: 'path.to.key' }] })
* ```
* @internal
*/
export type ConfigDeprecation = (
config: RecursiveReadonly<Record<string, any>>,
fromPath: string,
addDeprecation: AddConfigDeprecation
) => void | ConfigDeprecationCommand;
/**
* Outcome of deprecation operation. Allows mutating config values in a declarative way.
*
* @public
*/
export type ConfigDeprecation = (
config: Record<string, any>,
fromPath: string,
addDeprecation: AddConfigDeprecation
) => Record<string, any>;
export interface ConfigDeprecationCommand {
set?: Array<{ path: string; value: any }>;
unset?: Array<{ path: string }>;
}
/**
* A provider that should returns a list of {@link ConfigDeprecation}.
@ -60,7 +75,7 @@ export type ConfigDeprecation = (
* const provider: ConfigDeprecationProvider = ({ rename, unused }) => [
* rename('oldKey', 'newKey'),
* unused('deprecatedKey'),
* myCustomDeprecation,
* (config, path) => ({ unset: [{ key: 'path.to.key' }] })
* ]
* ```
*

View file

@ -12,6 +12,7 @@ export type {
ConfigDeprecationProvider,
ConfigDeprecationWithContext,
ConfigDeprecation,
ConfigDeprecationCommand,
} from './deprecation';
export { applyDeprecations, configDeprecationFactory } from './deprecation';

View file

@ -7,6 +7,8 @@ If running elasticsearch from source, elasticsearch needs to be cloned to a sibl
To run, go to the Kibana root and run `node scripts/es --help` to get the latest command line options.
The script attempts to preserve the existing interfaces used by Elasticsearch CLI. This includes passing through options with the `-E` argument and the `ES_JAVA_OPTS` environment variable for Java options.
### Examples
Run a snapshot install with a trial license

View file

@ -236,6 +236,7 @@ exports.Cluster = class Cluster {
* @param {String} installPath
* @param {Object} options
* @property {string|Array} options.esArgs
* @property {string} options.esJavaOpts
* @return {undefined}
*/
_exec(installPath, options = {}) {
@ -268,14 +269,17 @@ exports.Cluster = class Cluster {
this._log.debug('%s %s', ES_BIN, args.join(' '));
options.esEnvVars = options.esEnvVars || {};
let esJavaOpts = `${options.esJavaOpts || ''} ${process.env.ES_JAVA_OPTS || ''}`;
// ES now automatically sets heap size to 50% of the machine's available memory
// so we need to set it to a smaller size for local dev and CI
// especially because we currently run many instances of ES on the same machine during CI
options.esEnvVars.ES_JAVA_OPTS =
(options.esEnvVars.ES_JAVA_OPTS ? `${options.esEnvVars.ES_JAVA_OPTS} ` : '') +
'-Xms1g -Xmx1g';
// inital and max must be the same, so we only need to check the max
if (!esJavaOpts.includes('Xmx')) {
esJavaOpts += ' -Xms1g -Xmx1g';
}
this._log.debug('ES_JAVA_OPTS: %s', esJavaOpts.trim());
this._process = execa(ES_BIN, args, {
cwd: installPath,
@ -283,7 +287,7 @@ exports.Cluster = class Cluster {
...(installPath ? { ES_TMPDIR: path.resolve(installPath, 'ES_TMPDIR') } : {}),
...process.env,
JAVA_HOME: '', // By default, we want to always unset JAVA_HOME so that the bundled JDK will be used
...(options.esEnvVars || {}),
ES_JAVA_OPTS: esJavaOpts.trim(),
},
stdio: ['ignore', 'pipe', 'pipe'],
});

View file

@ -71,11 +71,17 @@ function mockEsBin({ exitCode, start }) {
);
}
const initialEnv = { ...process.env };
beforeEach(() => {
jest.resetAllMocks();
extractConfigFiles.mockImplementation((config) => config);
});
afterEach(() => {
process.env = { ...initialEnv };
});
describe('#installSource()', () => {
it('awaits installSource() promise and returns { installPath }', async () => {
let resolveInstallSource;
@ -355,6 +361,25 @@ describe('#run()', () => {
]
`);
});
it('sets default Java heap', async () => {
mockEsBin({ start: true });
const cluster = new Cluster({ log });
await cluster.run();
expect(execa.mock.calls[0][2].env.ES_JAVA_OPTS).toEqual('-Xms1g -Xmx1g');
});
it('allows Java heap to be overwritten', async () => {
mockEsBin({ start: true });
process.env.ES_JAVA_OPTS = '-Xms5g -Xmx5g';
const cluster = new Cluster({ log });
await cluster.run();
expect(execa.mock.calls[0][2].env.ES_JAVA_OPTS).toEqual('-Xms5g -Xmx5g');
});
});
describe('#stop()', () => {

View file

@ -46,7 +46,7 @@ pageLoadAssetSize:
lens: 96624
licenseManagement: 41817
licensing: 29004
lists: 228500
lists: 280504
logstash: 53548
management: 46112
maps: 80000

View file

@ -21,7 +21,9 @@ export const formatErrors = (errors: t.Errors): string[] => {
.map((entry) => entry.key)
.join(',');
const nameContext = error.context.find((entry) => entry.type?.name?.length > 0);
const nameContext = error.context.find(
(entry) => entry.type != null && entry.type.name != null && entry.type.name.length > 0
);
const suppliedValue =
keyContext !== '' ? keyContext : nameContext != null ? nameContext.type.name : '';
const value = isObject(error.value) ? JSON.stringify(error.value) : error.value;

View file

@ -41,12 +41,14 @@ export * from './from';
export * from './id';
export * from './iso_date_string';
export * from './language';
export * from './list_types';
export * from './max_signals';
export * from './meta';
export * from './name';
export * from './non_empty_array';
export * from './non_empty_or_nullable_string_array';
export * from './non_empty_string';
export * from './non_empty_string_array';
export * from './normalized_ml_job_id';
export * from './only_false_allowed';
export * from './operator';
@ -61,6 +63,7 @@ export * from './severity';
export * from './severity_mapping';
export * from './string_to_positive_number';
export * from './tags';
export * from './test_utils';
export * from './threat';
export * from './threat_mapping';
export * from './threat_subtechnique';

View file

@ -8,7 +8,7 @@
import * as t from 'io-ts';
import { operator } from '../operator';
import { listOperator as operator } from '../list_operator';
import { NonEmptyString } from '../../non_empty_string';
export const entriesExists = t.exact(

View file

@ -10,7 +10,7 @@ import * as t from 'io-ts';
import { NonEmptyString } from '../../non_empty_string';
import { type } from '../type';
import { operator } from '../operator';
import { listOperator as operator } from '../list_operator';
export const entriesList = t.exact(
t.type({

View file

@ -8,7 +8,7 @@
import * as t from 'io-ts';
import { NonEmptyString } from '../../non_empty_string';
import { operator } from '../operator';
import { listOperator as operator } from '../list_operator';
export const entriesMatch = t.exact(
t.type({

View file

@ -8,7 +8,7 @@
import * as t from 'io-ts';
import { operator } from '../operator';
import { listOperator as operator } from '../list_operator';
import { nonEmptyOrNullableStringArray } from '../../non_empty_or_nullable_string_array';
import { NonEmptyString } from '../../non_empty_string';

View file

@ -8,7 +8,7 @@
import * as t from 'io-ts';
import { NonEmptyString } from '../../non_empty_string';
import { operator } from '../operator';
import { listOperator as operator } from '../list_operator';
export const entriesMatchWildcard = t.exact(
t.type({

View file

@ -8,10 +8,11 @@
export * from './comment';
export * from './create_comment';
export * from './default_comments_array';
export * from './default_comments_array';
export * from './default_create_comments_array';
export * from './default_namespace';
export * from './default_namespace_array';
export * from './default_update_comments_array';
export * from './endpoint';
export * from './entries';
export * from './entries_exist';
export * from './entries_list';
@ -26,7 +27,7 @@ export * from './lists';
export * from './lists_default_array';
export * from './non_empty_entries_array';
export * from './non_empty_nested_entries_array';
export * from './operator';
export * from './list_operator';
export * from './os_type';
export * from './type';
export * from './update_comment';

View file

@ -8,14 +8,14 @@
import * as t from 'io-ts';
export const operator = t.keyof({ excluded: null, included: null });
export type Operator = t.TypeOf<typeof operator>;
export enum OperatorEnum {
export const listOperator = t.keyof({ excluded: null, included: null });
export type ListOperator = t.TypeOf<typeof listOperator>;
export enum ListOperatorEnum {
INCLUDED = 'included',
EXCLUDED = 'excluded',
}
export enum OperatorTypeEnum {
export enum ListOperatorTypeEnum {
NESTED = 'nested',
MATCH = 'match',
MATCH_ANY = 'match_any',

View file

@ -37,3 +37,6 @@ export const type = t.keyof({
short: null,
text: null,
});
export const typeOrUndefined = t.union([type, t.undefined]);
export type Type = t.TypeOf<typeof type>;

View file

@ -1,16 +1,15 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { pipe } from 'fp-ts/lib/pipeable';
import { left } from 'fp-ts/lib/Either';
import { foldLeftRight, getPaths } from '../../shared_imports';
import { NonEmptyStringArray } from './non_empty_string_array';
import { foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
import { NonEmptyStringArray } from '.';
describe('non_empty_string_array', () => {
test('it should FAIL validation when given "null"', () => {

View file

@ -1,8 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import * as t from 'io-ts';
@ -13,7 +14,6 @@ import { Either } from 'fp-ts/lib/Either';
* - A string that is not empty (which will be turned into an array of size 1)
* - A comma separated string that can turn into an array by splitting on it
* - Example input converted to output: "a,b,c" -> ["a", "b", "c"]
* @deprecated Use packages/kbn-securitysolution-io-ts-utils
*/
export const NonEmptyStringArray = new t.Type<string[], string, unknown>(
'NonEmptyStringArray',
@ -37,12 +37,6 @@ export const NonEmptyStringArray = new t.Type<string[], string, unknown>(
String
);
/**
* @deprecated Use packages/kbn-securitysolution-io-ts-utils
*/
export type NonEmptyStringArray = t.OutputOf<typeof NonEmptyStringArray>;
/**
* @deprecated Use packages/kbn-securitysolution-io-ts-utils
*/
export type NonEmptyStringArrayDecoded = t.TypeOf<typeof NonEmptyStringArray>;

View file

@ -22,5 +22,5 @@ export const parseScheduleDates = (time: string): moment.Moment | null => {
? dateMath.parse(time)
: null;
return formattedDate ?? null;
return formattedDate != null ? formattedDate : null;
};

View file

@ -36,7 +36,7 @@ interface TestClusterFactoryOptions {
* */
dataArchive?: string;
esArgs?: string[];
esEnvVars?: Record<string, any>;
esJavaOpts?: string;
clusterName?: string;
log: ToolingLog;
ssl?: boolean;
@ -52,7 +52,7 @@ export function createTestEsCluster(options: TestClusterFactoryOptions) {
esFrom = esTestConfig.getBuildFrom(),
dataArchive,
esArgs: customEsArgs = [],
esEnvVars,
esJavaOpts,
clusterName: customClusterName = 'es-test-cluster',
ssl,
} = options;
@ -107,7 +107,7 @@ export function createTestEsCluster(options: TestClusterFactoryOptions) {
await cluster.start(installPath, {
password: config.password,
esArgs,
esEnvVars,
esJavaOpts,
});
}

View file

@ -172,7 +172,7 @@ export const schema = Joi.object()
license: Joi.string().default('basic'),
from: Joi.string().default('snapshot'),
serverArgs: Joi.array(),
serverEnvVars: Joi.object(),
esJavaOpts: Joi.string(),
dataArchive: Joi.string(),
ssl: Joi.boolean().default(false),
})

View file

@ -29,7 +29,7 @@ export async function runElasticsearch({
const ssl = config.get('esTestCluster.ssl');
const license = config.get('esTestCluster.license');
const esArgs = config.get('esTestCluster.serverArgs');
const esEnvVars = config.get('esTestCluster.serverEnvVars');
const esJavaOpts = config.get('esTestCluster.esJavaOpts');
const isSecurityEnabled = esArgs.includes('xpack.security.enabled=true');
const cluster = createTestEsCluster({
@ -43,7 +43,7 @@ export async function runElasticsearch({
esFrom: esFrom || config.get('esTestCluster.from'),
dataArchive: config.get('esTestCluster.dataArchive'),
esArgs,
esEnvVars,
esJavaOpts,
ssl,
});

View file

@ -184,7 +184,7 @@ export class DocLinksService {
remoteClustersProxy: `${ELASTICSEARCH_DOCS}modules-remote-clusters.html#proxy-mode`,
remoteClusersProxySettings: `${ELASTICSEARCH_DOCS}modules-remote-clusters.html#remote-cluster-proxy-settings`,
scriptParameters: `${ELASTICSEARCH_DOCS}modules-scripting-using.html#prefer-params`,
transportSettings: `${ELASTICSEARCH_DOCS}modules-transport.html`,
transportSettings: `${ELASTICSEARCH_DOCS}modules-network.html#common-network-settings`,
typesRemoval: `${ELASTICSEARCH_DOCS}removal-of-types.html`,
deprecationLogging: `${ELASTICSEARCH_DOCS}logging.html#deprecation-logging`,
},
@ -319,6 +319,7 @@ export class DocLinksService {
createSnapshotLifecyclePolicy: `${ELASTICSEARCH_DOCS}slm-api-put-policy.html`,
createRoleMapping: `${ELASTICSEARCH_DOCS}security-api-put-role-mapping.html`,
createRoleMappingTemplates: `${ELASTICSEARCH_DOCS}security-api-put-role-mapping.html#_role_templates`,
createRollupJobsRequest: `${ELASTICSEARCH_DOCS}rollup-put-job.html#rollup-put-job-api-request-body`,
createApiKey: `${ELASTICSEARCH_DOCS}security-api-create-api-key.html`,
createPipeline: `${ELASTICSEARCH_DOCS}put-pipeline-api.html`,
createTransformRequest: `${ELASTICSEARCH_DOCS}put-transform.html#put-transform-request-body`,
@ -544,6 +545,7 @@ export interface DocLinksStart {
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createRollupJobsRequest: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;

View file

@ -629,6 +629,7 @@ export interface DocLinksStart {
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createRollupJobsRequest: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;

View file

@ -6,29 +6,26 @@
* Side Public License, v 1.
*/
import { has, get } from 'lodash';
import { ConfigDeprecationProvider, ConfigDeprecation } from '@kbn/config';
const configPathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(process.env, 'CONFIG_PATH')) {
if (process.env?.CONFIG_PATH) {
addDeprecation({
message: `Environment variable CONFIG_PATH is deprecated. It has been replaced with KBN_PATH_CONF pointing to a config folder`,
});
}
return settings;
};
const dataPathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(process.env, 'DATA_PATH')) {
if (process.env?.DATA_PATH) {
addDeprecation({
message: `Environment variable "DATA_PATH" will be removed. It has been replaced with kibana.yml setting "path.data"`,
});
}
return settings;
};
const rewriteBasePathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'server.basePath') && !has(settings, 'server.rewriteBasePath')) {
if (settings.server?.basePath && !settings.server?.rewriteBasePath) {
addDeprecation({
message:
'You should set server.basePath along with server.rewriteBasePath. Starting in 7.0, Kibana ' +
@ -37,20 +34,19 @@ const rewriteBasePathDeprecation: ConfigDeprecation = (settings, fromPath, addDe
'current behavior and silence this warning.',
});
}
return settings;
};
const rewriteCorsSettings: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
const corsSettings = get(settings, 'server.cors');
if (typeof get(settings, 'server.cors') === 'boolean') {
const corsSettings = settings.server?.cors;
if (typeof corsSettings === 'boolean') {
addDeprecation({
message: '"server.cors" is deprecated and has been replaced by "server.cors.enabled"',
});
settings.server.cors = {
enabled: corsSettings,
return {
set: [{ path: 'server.cors', value: { enabled: corsSettings } }],
};
}
return settings;
};
const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
@ -59,7 +55,7 @@ const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati
const SELF_POLICIES = Object.freeze(['script-src', 'style-src']);
const SELF_STRING = `'self'`;
const rules: string[] = get(settings, 'csp.rules');
const rules: string[] = settings.csp?.rules;
if (rules) {
const parsed = new Map(
rules.map((ruleStr) => {
@ -68,34 +64,39 @@ const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati
})
);
settings.csp.rules = [...parsed].map(([policy, sourceList]) => {
if (sourceList.find((source) => source.includes(NONCE_STRING))) {
addDeprecation({
message: `csp.rules no longer supports the {nonce} syntax. Replacing with 'self' in ${policy}`,
});
sourceList = sourceList.filter((source) => !source.includes(NONCE_STRING));
return {
set: [
{
path: 'csp.rules',
value: [...parsed].map(([policy, sourceList]) => {
if (sourceList.find((source) => source.includes(NONCE_STRING))) {
addDeprecation({
message: `csp.rules no longer supports the {nonce} syntax. Replacing with 'self' in ${policy}`,
});
sourceList = sourceList.filter((source) => !source.includes(NONCE_STRING));
// Add 'self' if not present
if (!sourceList.find((source) => source.includes(SELF_STRING))) {
sourceList.push(SELF_STRING);
}
}
// Add 'self' if not present
if (!sourceList.find((source) => source.includes(SELF_STRING))) {
sourceList.push(SELF_STRING);
}
}
if (
SELF_POLICIES.includes(policy) &&
!sourceList.find((source) => source.includes(SELF_STRING))
) {
addDeprecation({
message: `csp.rules must contain the 'self' source. Automatically adding to ${policy}.`,
});
sourceList.push(SELF_STRING);
}
if (
SELF_POLICIES.includes(policy) &&
!sourceList.find((source) => source.includes(SELF_STRING))
) {
addDeprecation({
message: `csp.rules must contain the 'self' source. Automatically adding to ${policy}.`,
});
sourceList.push(SELF_STRING);
}
return `${policy} ${sourceList.join(' ')}`.trim();
});
return `${policy} ${sourceList.join(' ')}`.trim();
}),
},
],
};
}
return settings;
};
const mapManifestServiceUrlDeprecation: ConfigDeprecation = (
@ -103,7 +104,7 @@ const mapManifestServiceUrlDeprecation: ConfigDeprecation = (
fromPath,
addDeprecation
) => {
if (has(settings, 'map.manifestServiceUrl')) {
if (settings.map?.manifestServiceUrl) {
addDeprecation({
message:
'You should no longer use the map.manifestServiceUrl setting in kibana.yml to configure the location ' +
@ -112,11 +113,10 @@ const mapManifestServiceUrlDeprecation: ConfigDeprecation = (
'modified for use in production environments.',
});
}
return settings;
};
const opsLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.events.ops')) {
if (settings.logging?.events?.ops) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
@ -127,11 +127,10 @@ const opsLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDe
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
});
}
return settings;
};
const requestLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.events.request') || has(settings, 'logging.events.response')) {
if (settings.logging?.events?.request || settings.logging?.events?.response) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
@ -142,11 +141,10 @@ const requestLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, a
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
});
}
return settings;
};
const timezoneLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.timezone')) {
if (settings.logging?.timezone) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingtimezone',
@ -157,11 +155,10 @@ const timezoneLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDe
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
});
}
return settings;
};
const destLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.dest')) {
if (settings.logging?.dest) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingdest',
@ -172,11 +169,10 @@ const destLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprec
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
});
}
return settings;
};
const quietLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.quiet')) {
if (settings.logging?.quiet) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingquiet',
@ -185,11 +181,10 @@ const quietLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDepre
'in 8.0. Moving forward, you can use "logging.root.level:error" in your logging configuration. ',
});
}
return settings;
};
const silentLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.silent')) {
if (settings.logging?.silent) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingsilent',
@ -198,11 +193,10 @@ const silentLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDepr
'in 8.0. Moving forward, you can use "logging.root.level:off" in your logging configuration. ',
});
}
return settings;
};
const verboseLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.verbose')) {
if (settings.logging?.verbose) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingverbose',
@ -211,7 +205,6 @@ const verboseLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDep
'in 8.0. Moving forward, you can use "logging.root.level:all" in your logging configuration. ',
});
}
return settings;
};
const jsonLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
@ -219,7 +212,7 @@ const jsonLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprec
// the dev CLI code in src/dev/cli_dev_mode/using_server_process.ts manually
// specifies `--logging.json=false`. Since it's executed in a child process, the
// ` legacyLoggingConfigSchema` returns `true` for the TTY check on `process.stdout.isTTY`
if (has(settings, 'logging.json') && settings.env !== 'development') {
if (settings.logging?.json && settings.env !== 'development') {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
@ -232,11 +225,10 @@ const jsonLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprec
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx',
});
}
return settings;
};
const logRotateDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.rotate')) {
if (settings.logging?.rotate) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender',
@ -247,11 +239,10 @@ const logRotateDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecat
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender',
});
}
return settings;
};
const logEventsLogDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.events.log')) {
if (settings.logging?.events?.log) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
@ -260,11 +251,10 @@ const logEventsLogDeprecation: ConfigDeprecation = (settings, fromPath, addDepre
'in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration. ',
});
}
return settings;
};
const logEventsErrorDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.events.error')) {
if (settings.logging?.events?.error) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents',
@ -273,18 +263,16 @@ const logEventsErrorDeprecation: ConfigDeprecation = (settings, fromPath, addDep
'in 8.0. Moving forward, you can use "logging.root.level: error" in your logging configuration. ',
});
}
return settings;
};
const logFilterDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => {
if (has(settings, 'logging.filter')) {
if (settings.logging?.filter) {
addDeprecation({
documentationUrl:
'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingfilter',
message: '"logging.filter" has been deprecated and will be removed in 8.0.',
});
}
return settings;
};
export const coreDeprecationProvider: ConfigDeprecationProvider = ({ rename, unusedFromRoot }) => [

View file

@ -5,6 +5,7 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { set } from '@elastic/safer-lodash-set';
import type { ConfigDeprecationProvider } from '@kbn/config';
import { configDeprecationFactory, applyDeprecations } from '@kbn/config';
@ -38,7 +39,7 @@ export const getDeprecationsFor = ({
settings?: Record<string, any>;
path: string;
}) => {
return collectDeprecations(provider, { [path]: settings }, path);
return collectDeprecations(provider, set({}, path, settings), path);
};
export const getDeprecationsForGlobalSettings = ({

View file

@ -147,7 +147,7 @@ const deprecations: ConfigDeprecationProvider = () => [
(settings, fromPath, addDeprecation) => {
const es = settings[fromPath];
if (!es) {
return settings;
return;
}
if (es.username === 'elastic') {
addDeprecation({
@ -171,7 +171,7 @@ const deprecations: ConfigDeprecationProvider = () => [
message: `Setting [${fromPath}.logQueries] is deprecated and no longer used. You should set the log level to "debug" for the "elasticsearch.queries" context in "logging.loggers" or use "logging.verbose: true".`,
});
}
return settings;
return;
},
];

View file

@ -140,6 +140,7 @@ import {
*
* @public
* @deprecated
* @removeBy 7.16
*/
export interface LegacyCallAPIOptions {
/**
@ -157,6 +158,7 @@ export interface LegacyCallAPIOptions {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface LegacyAPICaller {
@ -312,6 +314,7 @@ export interface LegacyAPICaller {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface AssistantAPIClientParams extends GenericParams {
@ -321,17 +324,20 @@ export interface AssistantAPIClientParams extends GenericParams {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export type MIGRATION_ASSISTANCE_INDEX_ACTION = 'upgrade' | 'reindex';
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export type MIGRATION_DEPRECATION_LEVEL = 'none' | 'info' | 'warning' | 'critical';
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface AssistanceAPIResponse {
@ -344,6 +350,7 @@ export interface AssistanceAPIResponse {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface DeprecationAPIClientParams extends GenericParams {
@ -353,6 +360,7 @@ export interface DeprecationAPIClientParams extends GenericParams {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface DeprecationInfo {
@ -364,6 +372,7 @@ export interface DeprecationInfo {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface IndexSettingsDeprecationInfo {
@ -372,6 +381,7 @@ export interface IndexSettingsDeprecationInfo {
/**
* @deprecated
* @removeBy 7.16
* @public
* */
export interface DeprecationAPIResponse {

View file

@ -78,6 +78,7 @@ const callAPI = async (
* See {@link LegacyClusterClient}.
*
* @deprecated Use {@link IClusterClient}.
* @removeBy 7.16
* @public
*/
export type ILegacyClusterClient = Pick<LegacyClusterClient, 'callAsInternalUser' | 'asScoped'>;
@ -89,6 +90,7 @@ export type ILegacyClusterClient = Pick<LegacyClusterClient, 'callAsInternalUser
*
* See {@link LegacyClusterClient}.
* @deprecated Use {@link ICustomClusterClient}.
* @removeBy 7.16
* @public
*/
export type ILegacyCustomClusterClient = Pick<
@ -99,6 +101,7 @@ export type ILegacyCustomClusterClient = Pick<
/**
* {@inheritDoc IClusterClient}
* @deprecated Use {@link IClusterClient}.
* @removeBy 7.16
* @public
*/
export class LegacyClusterClient implements ILegacyClusterClient {

View file

@ -17,6 +17,7 @@ enum ErrorCode {
/**
* @deprecated. The new elasticsearch client doesn't wrap errors anymore.
* @removeBy 7.16
* @public
* */
export interface LegacyElasticsearchError extends Boom.Boom {

View file

@ -19,6 +19,7 @@ import { LegacyAPICaller, LegacyCallAPIOptions } from './api_types';
* See {@link LegacyScopedClusterClient}.
*
* @deprecated Use {@link IScopedClusterClient}.
* @removeBy 7.16
* @public
*/
export type ILegacyScopedClusterClient = Pick<
@ -29,6 +30,7 @@ export type ILegacyScopedClusterClient = Pick<
/**
* {@inheritDoc IScopedClusterClient}
* @deprecated Use {@link IScopedClusterClient | scoped cluster client}.
* @removeBy 7.16
* @public
*/
export class LegacyScopedClusterClient implements ILegacyScopedClusterClient {
@ -46,6 +48,7 @@ export class LegacyScopedClusterClient implements ILegacyScopedClusterClient {
* Kibana internal user.
* See {@link LegacyAPICaller}.
* @deprecated Use {@link IScopedClusterClient.asInternalUser}.
* @removeBy 7.16
*
* @param endpoint - String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams - A dictionary of parameters that will be passed directly to the Elasticsearch JS client.
@ -64,6 +67,7 @@ export class LegacyScopedClusterClient implements ILegacyScopedClusterClient {
* user initiated request to the Kibana server (via HTTP request headers).
* See {@link LegacyAPICaller}.
* @deprecated Use {@link IScopedClusterClient.asCurrentUser}.
* @removeBy 7.16
*
* @param endpoint - String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams - A dictionary of parameters that will be passed directly to the Elasticsearch JS client.

View file

@ -36,6 +36,7 @@ export interface ElasticsearchServiceSetup {
readonly config$: Observable<ElasticsearchConfig>;
/**
* @deprecated
* @removeBy 7.16
* Use {@link ElasticsearchServiceStart.legacy | ElasticsearchServiceStart.legacy.createClient} instead.
*
* Create application specific Elasticsearch cluster API client with customized config. See {@link ILegacyClusterClient}.
@ -60,6 +61,7 @@ export interface ElasticsearchServiceSetup {
) => ILegacyCustomClusterClient;
/**
* @removeBy 7.16
* @deprecated
* Use {@link ElasticsearchServiceStart.legacy | ElasticsearchServiceStart.legacy.client} instead.
*
@ -131,6 +133,9 @@ export interface ElasticsearchServiceStart {
/**
* Create application specific Elasticsearch cluster API client with customized config. See {@link ILegacyClusterClient}.
*
* @deprecated
* @removeBy 7.16
*
* @param type Unique identifier of the client
* @param clientConfig A config consists of Elasticsearch JS client options and
* valid sub-set of Elasticsearch service config.
@ -153,6 +158,9 @@ export interface ElasticsearchServiceStart {
/**
* A pre-configured {@link ILegacyClusterClient | legacy Elasticsearch client}.
*
* @deprecated
* @removeBy 7.16
*
* @example
* ```js
* const client = core.elasticsearch.legacy.client;

View file

@ -11,6 +11,7 @@ import { set } from '@elastic/safer-lodash-set';
import _ from 'lodash';
import { SavedObjectUnsanitizedDoc } from '../../serialization';
import { DocumentMigrator } from './document_migrator';
import { TransformSavedObjectDocumentError } from './transform_saved_object_document_error';
import { loggingSystemMock } from '../../../logging/logging_system.mock';
import { SavedObjectsType } from '../../types';
import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry';
@ -724,6 +725,12 @@ describe('DocumentMigrator', () => {
it('logs the original error and throws a transform error if a document transform fails', () => {
const log = mockLogger;
const failedDoc = {
id: 'smelly',
type: 'dog',
attributes: {},
migrationVersion: {},
};
const migrator = new DocumentMigrator({
...testOpts(),
typeRegistry: createRegistry({
@ -737,12 +744,6 @@ describe('DocumentMigrator', () => {
log,
});
migrator.prepareMigrations();
const failedDoc = {
id: 'smelly',
type: 'dog',
attributes: {},
migrationVersion: {},
};
try {
migrator.migrate(_.cloneDeep(failedDoc));
expect('Did not throw').toEqual('But it should have!');
@ -751,6 +752,7 @@ describe('DocumentMigrator', () => {
"Failed to transform document smelly. Transform: dog:1.2.3
Doc: {\\"id\\":\\"smelly\\",\\"type\\":\\"dog\\",\\"attributes\\":{},\\"migrationVersion\\":{}}"
`);
expect(error).toBeInstanceOf(TransformSavedObjectDocumentError);
expect(loggingSystemMock.collect(mockLoggerFactory).error[0][0]).toMatchInlineSnapshot(
`[Error: Dang diggity!]`
);

View file

@ -62,6 +62,7 @@ import {
SavedObjectsType,
} from '../../types';
import { MigrationLogger } from './migration_logger';
import { TransformSavedObjectDocumentError } from '.';
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
import { SavedObjectMigrationFn, SavedObjectMigrationMap } from '../types';
import { DEFAULT_NAMESPACE_STRING } from '../../service/lib/utils';
@ -679,9 +680,15 @@ function wrapWithTry(
const failedTransform = `${type.name}:${version}`;
const failedDoc = JSON.stringify(doc);
log.error(error);
throw new Error(
`Failed to transform document ${doc?.id}. Transform: ${failedTransform}\nDoc: ${failedDoc}`
// To make debugging failed migrations easier, we add items needed to convert the
// saved object id to the full raw id (the id only contains the uuid part) and the full error itself
throw new TransformSavedObjectDocumentError(
doc.id,
doc.type,
doc.namespace,
failedTransform,
failedDoc,
error
);
}
};

View file

@ -15,3 +15,9 @@ export type { MigrationResult, MigrationStatus } from './migration_coordinator';
export { createMigrationEsClient } from './migration_es_client';
export type { MigrationEsClient } from './migration_es_client';
export { excludeUnusedTypesQuery } from './elastic_index';
export { TransformSavedObjectDocumentError } from './transform_saved_object_document_error';
export type {
DocumentsTransformFailed,
DocumentsTransformSuccess,
TransformErrorObjects,
} from './migrate_raw_docs';

View file

@ -7,10 +7,17 @@
*/
import { set } from '@elastic/safer-lodash-set';
import * as Either from 'fp-ts/lib/Either';
import _ from 'lodash';
import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry';
import { SavedObjectsSerializer } from '../../serialization';
import { migrateRawDocs } from './migrate_raw_docs';
import {
DocumentsTransformFailed,
DocumentsTransformSuccess,
migrateRawDocs,
migrateRawDocsSafely,
} from './migrate_raw_docs';
import { TransformSavedObjectDocumentError } from './transform_saved_object_document_error';
describe('migrateRawDocs', () => {
test('converts raw docs to saved objects', async () => {
@ -120,3 +127,156 @@ describe('migrateRawDocs', () => {
).rejects.toThrowErrorMatchingInlineSnapshot(`"error during transform"`);
});
});
describe('migrateRawDocsSafely', () => {
beforeEach(() => {
jest.clearAllMocks();
});
test('converts raw docs to saved objects', async () => {
const transform = jest.fn<any, any>((doc: any) => [
set(_.cloneDeep(doc), 'attributes.name', 'HOI!'),
]);
const task = migrateRawDocsSafely(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[
{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } },
{ _id: 'c:d', _source: { type: 'c', c: { name: 'DDD' } } },
]
);
const result = (await task()) as Either.Right<DocumentsTransformSuccess>;
expect(result._tag).toEqual('Right');
expect(result.right.processedDocs).toEqual([
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
{
_id: 'c:d',
_source: { type: 'c', c: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
]);
const obj1 = {
id: 'b',
type: 'a',
attributes: { name: 'AAA' },
migrationVersion: {},
references: [],
};
const obj2 = {
id: 'd',
type: 'c',
attributes: { name: 'DDD' },
migrationVersion: {},
references: [],
};
expect(transform).toHaveBeenCalledTimes(2);
expect(transform).toHaveBeenNthCalledWith(1, obj1);
expect(transform).toHaveBeenNthCalledWith(2, obj2);
});
test('returns a `left` tag when encountering a corrupt saved object document', async () => {
const transform = jest.fn<any, any>((doc: any) => [
set(_.cloneDeep(doc), 'attributes.name', 'TADA'),
]);
const task = migrateRawDocsSafely(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[
{ _id: 'foo:b', _source: { type: 'a', a: { name: 'AAA' } } },
{ _id: 'c:d', _source: { type: 'c', c: { name: 'DDD' } } },
]
);
const result = (await task()) as Either.Left<DocumentsTransformFailed>;
expect(transform).toHaveBeenCalledTimes(1);
expect(result._tag).toEqual('Left');
expect(Object.keys(result.left)).toEqual(['type', 'corruptDocumentIds', 'transformErrors']);
expect(result.left.corruptDocumentIds.length).toEqual(1);
expect(result.left.transformErrors.length).toEqual(0);
});
test('handles when one document is transformed into multiple documents', async () => {
const transform = jest.fn<any, any>((doc: any) => [
set(_.cloneDeep(doc), 'attributes.name', 'HOI!'),
{ id: 'bar', type: 'foo', attributes: { name: 'baz' } },
]);
const task = migrateRawDocsSafely(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }]
);
const result = (await task()) as Either.Right<DocumentsTransformSuccess>;
expect(result._tag).toEqual('Right');
expect(result.right.processedDocs).toEqual([
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
{
_id: 'foo:bar',
_source: { type: 'foo', foo: { name: 'baz' }, references: [] },
},
]);
const obj = {
id: 'b',
type: 'a',
attributes: { name: 'AAA' },
migrationVersion: {},
references: [],
};
expect(transform).toHaveBeenCalledTimes(1);
expect(transform).toHaveBeenCalledWith(obj);
});
test('instance of Either.left containing transform errors when the transform function throws a TransformSavedObjectDocument error', async () => {
const transform = jest.fn<any, any>((doc: any) => {
throw new TransformSavedObjectDocumentError(
`${doc.id}`,
`${doc.type}`,
`${doc.namespace}`,
`${doc.type}1.2.3`,
JSON.stringify(doc),
new Error('error during transform')
);
});
const task = migrateRawDocsSafely(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }] // this is the raw doc
);
const result = (await task()) as Either.Left<DocumentsTransformFailed>;
expect(transform).toHaveBeenCalledTimes(1);
expect(result._tag).toEqual('Left');
expect(result.left.corruptDocumentIds.length).toEqual(0);
expect(result.left.transformErrors.length).toEqual(1);
expect(result.left.transformErrors[0].err.message).toMatchInlineSnapshot(`
"Failed to transform document b. Transform: a1.2.3
Doc: {\\"type\\":\\"a\\",\\"id\\":\\"b\\",\\"attributes\\":{\\"name\\":\\"AAA\\"},\\"references\\":[],\\"migrationVersion\\":{}}"
`);
});
test("instance of Either.left containing errors when the transform function throws an error that isn't a TransformSavedObjectDocument error", async () => {
const transform = jest.fn<any, any>((doc: any) => {
throw new Error('error during transform');
});
const task = migrateRawDocsSafely(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }] // this is the raw doc
);
const result = (await task()) as Either.Left<DocumentsTransformFailed>;
expect(transform).toHaveBeenCalledTimes(1);
expect(result._tag).toEqual('Left');
expect(result.left.corruptDocumentIds.length).toEqual(0);
expect(result.left.transformErrors.length).toEqual(1);
expect(result.left.transformErrors[0]).toMatchInlineSnapshot(`
Object {
"err": [Error: error during transform],
"rawId": "a:b",
}
`);
});
});

View file

@ -9,13 +9,32 @@
/*
* This file provides logic for migrating raw documents.
*/
import * as TaskEither from 'fp-ts/lib/TaskEither';
import * as Either from 'fp-ts/lib/Either';
import {
SavedObjectSanitizedDoc,
SavedObjectsRawDoc,
SavedObjectsSerializer,
SavedObjectUnsanitizedDoc,
} from '../../serialization';
import { MigrateAndConvertFn } from './document_migrator';
import { TransformSavedObjectDocumentError } from '.';
export interface DocumentsTransformFailed {
readonly type: string;
readonly corruptDocumentIds: string[];
readonly transformErrors: TransformErrorObjects[];
}
export interface DocumentsTransformSuccess {
readonly processedDocs: SavedObjectsRawDoc[];
}
export interface TransformErrorObjects {
readonly rawId: string;
readonly err: TransformSavedObjectDocumentError | Error;
}
type MigrateFn = (
doc: SavedObjectUnsanitizedDoc<unknown>
) => Promise<Array<SavedObjectUnsanitizedDoc<unknown>>>;
/**
* Error thrown when saved object migrations encounter a corrupt saved object.
@ -37,7 +56,6 @@ export class CorruptSavedObjectError extends Error {
/**
* Applies the specified migration function to every saved object document in the list
* of raw docs. Any raw docs that are not valid saved objects will simply be passed through.
*
* @param {TransformFn} migrateDoc
* @param {SavedObjectsRawDoc[]} rawDocs
* @returns {SavedObjectsRawDoc[]}
@ -52,15 +70,9 @@ export async function migrateRawDocs(
for (const raw of rawDocs) {
const options = { namespaceTreatment: 'lax' as const };
if (serializer.isRawSavedObject(raw, options)) {
const savedObject = serializer.rawToSavedObject(raw, options);
savedObject.migrationVersion = savedObject.migrationVersion || {};
const savedObject = convertToRawAddMigrationVersion(raw, options, serializer);
processedDocs.push(
...(await migrateDocWithoutBlocking(savedObject)).map((attrs) =>
serializer.savedObjectToRaw({
references: [],
...attrs,
})
)
...(await migrateMapToRawDoc(migrateDocWithoutBlocking, savedObject, serializer))
);
} else {
throw new CorruptSavedObjectError(raw._id);
@ -69,6 +81,58 @@ export async function migrateRawDocs(
return processedDocs;
}
/**
* Applies the specified migration function to every saved object document provided
* and converts the saved object to a raw document.
* Captures the ids and errors from any documents that are not valid saved objects or
* for which the transformation function failed.
* @returns {TaskEither.TaskEither<DocumentsTransformFailed, DocumentsTransformSuccess>}
*/
export function migrateRawDocsSafely(
serializer: SavedObjectsSerializer,
migrateDoc: MigrateAndConvertFn,
rawDocs: SavedObjectsRawDoc[]
): TaskEither.TaskEither<DocumentsTransformFailed, DocumentsTransformSuccess> {
return async () => {
const migrateDocNonBlocking = transformNonBlocking(migrateDoc);
const processedDocs: SavedObjectsRawDoc[] = [];
const transformErrors: TransformErrorObjects[] = [];
const corruptSavedObjectIds: string[] = [];
const options = { namespaceTreatment: 'lax' as const };
for (const raw of rawDocs) {
if (serializer.isRawSavedObject(raw, options)) {
try {
const savedObject = convertToRawAddMigrationVersion(raw, options, serializer);
processedDocs.push(
...(await migrateMapToRawDoc(migrateDocNonBlocking, savedObject, serializer))
);
} catch (err) {
if (err instanceof TransformSavedObjectDocumentError) {
// the doc id we get from the error is only the uuid part
// we use the original raw document _id instead
transformErrors.push({
rawId: raw._id,
err,
});
} else {
transformErrors.push({ rawId: raw._id, err }); // cases we haven't accounted for yet
}
}
} else {
corruptSavedObjectIds.push(raw._id);
}
}
if (corruptSavedObjectIds.length > 0 || transformErrors.length > 0) {
return Either.left({
type: 'documents_transform_failed',
corruptDocumentIds: [...corruptSavedObjectIds],
transformErrors,
});
}
return Either.right({ processedDocs });
};
}
/**
* Migration transform functions are potentially CPU heavy e.g. doing decryption/encryption
* or (de)/serializing large JSON payloads.
@ -92,3 +156,40 @@ function transformNonBlocking(
});
});
}
/**
* Applies the specified migration function to every saved object document provided
* and converts the saved object to a raw document
* @param {MigrateFn} transformNonBlocking
* @param {SavedObjectsRawDoc[]} rawDoc
* @returns {Promise<SavedObjectsRawDoc[]>}
*/
async function migrateMapToRawDoc(
migrateMethod: MigrateFn,
savedObject: SavedObjectSanitizedDoc<unknown>,
serializer: SavedObjectsSerializer
): Promise<SavedObjectsRawDoc[]> {
return [...(await migrateMethod(savedObject))].map((attrs) =>
serializer.savedObjectToRaw({
references: [],
...attrs,
})
);
}
/**
* Sanitizes the raw saved object document
* @param {SavedObjectRawDoc} rawDoc
* @param options
* @param {SavedObjectsSerializer} serializer
* @returns {SavedObjectSanitizedDoc<unknown>}
*/
function convertToRawAddMigrationVersion(
rawDoc: SavedObjectsRawDoc,
options: { namespaceTreatment: 'lax' },
serializer: SavedObjectsSerializer
): SavedObjectSanitizedDoc<unknown> {
const savedObject = serializer.rawToSavedObject(rawDoc, options);
savedObject.migrationVersion = savedObject.migrationVersion || {};
return savedObject;
}

View file

@ -0,0 +1,60 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { TransformSavedObjectDocumentError } from './transform_saved_object_document_error';
describe('TransformSavedObjectDocumentError', () => {
it('is a special error', () => {
const originalError = new Error('Dang diggity!');
const err = new TransformSavedObjectDocumentError(
'id',
'type',
'namespace',
'failedTransform',
'failedDoc',
originalError
);
expect(err).toBeInstanceOf(TransformSavedObjectDocumentError);
expect(err.id).toEqual('id');
expect(err.namespace).toEqual('namespace');
expect(err.stack).not.toBeNull();
});
it('constructs an special error message', () => {
const originalError = new Error('Dang diggity!');
const err = new TransformSavedObjectDocumentError(
'id',
'type',
'namespace',
'failedTransform',
'failedDoc',
originalError
);
expect(err.message).toMatchInlineSnapshot(
`
"Failed to transform document id. Transform: failedTransform
Doc: failedDoc"
`
);
});
it('handles undefined namespace', () => {
const originalError = new Error('Dang diggity!');
const err = new TransformSavedObjectDocumentError(
'id',
'type',
undefined,
'failedTransform',
'failedDoc',
originalError
);
expect(err.message).toMatchInlineSnapshot(
`
"Failed to transform document id. Transform: failedTransform
Doc: failedDoc"
`
);
});
});

View file

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
/**
* Error thrown when saved object migrations encounter a transformation error.
* Transformation errors happen when a transform function throws an error for an unsanitized saved object
* The id (doc.id) reported in this error class is just the uuid part and doesn't tell users what the full elasticsearch id is.
* in order to convert the id to the serialized version further upstream using serializer.generateRawId, we need to provide the following items:
* - namespace: doc.namespace,
* - type: doc.type,
* - id: doc.id,
* The new error class helps with v2 migrations.
* For backward compatibility with v1 migrations, the error message is the same as what was previously thrown as a plain error
*/
export class TransformSavedObjectDocumentError extends Error {
constructor(
public readonly id: string,
public readonly type: string,
public readonly namespace: string | undefined,
public readonly failedTransform: string, // created by document_migrator wrapWithTry as `${type.name}:${version}`;
public readonly failedDoc: string,
public readonly originalError: Error
) {
super(`Failed to transform document ${id}. Transform: ${failedTransform}\nDoc: ${failedDoc}`);
}
}

View file

@ -35,7 +35,7 @@ import { SavedObjectsMigrationConfigType } from '../../saved_objects_config';
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
import { SavedObjectsType } from '../../types';
import { runResilientMigrator } from '../../migrationsv2';
import { migrateRawDocs } from '../core/migrate_raw_docs';
import { migrateRawDocsSafely } from '../core/migrate_raw_docs';
export interface KibanaMigratorOptions {
client: ElasticsearchClient;
@ -135,7 +135,6 @@ export class KibanaMigrator {
if (!rerun) {
this.status$.next({ status: 'running' });
}
this.migrationResult = this.runMigrationsInternal().then((result) => {
// Similar to above, don't publish status updates when rerunning in CI.
if (!rerun) {
@ -185,7 +184,11 @@ export class KibanaMigrator {
logger: this.log,
preMigrationScript: indexMap[index].script,
transformRawDocs: (rawDocs: SavedObjectsRawDoc[]) =>
migrateRawDocs(this.serializer, this.documentMigrator.migrateAndConvert, rawDocs),
migrateRawDocsSafely(
this.serializer,
this.documentMigrator.migrateAndConvert,
rawDocs
),
migrationVersionPerType: this.documentMigrator.migrationVersion,
indexPrefix: index,
migrationsConfig: this.soMigrationsConfig,

View file

@ -129,18 +129,6 @@ describe('actions', () => {
});
});
describe('transformDocs', () => {
it('calls catchRetryableEsClientErrors when the promise rejects', async () => {
const task = Actions.transformDocs(client, () => Promise.resolve([]), [], 'my_index', false);
try {
await task();
} catch (e) {
/** ignore */
}
expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError);
});
});
describe('reindex', () => {
it('calls catchRetryableEsClientErrors when the promise rejects', async () => {
const task = Actions.reindex(

View file

@ -22,6 +22,10 @@ import {
catchRetryableEsClientErrors,
RetryableEsClientError,
} from './catch_retryable_es_client_errors';
import {
DocumentsTransformFailed,
DocumentsTransformSuccess,
} from '../../migrations/core/migrate_raw_docs';
export type { RetryableEsClientError };
/**
@ -46,6 +50,7 @@ export interface ActionErrorTypeMap {
incompatible_mapping_exception: IncompatibleMappingException;
alias_not_found_exception: AliasNotFound;
remove_index_not_a_concrete_index: RemoveIndexNotAConcreteIndex;
documents_transform_failed: DocumentsTransformFailed;
}
/**
@ -523,28 +528,13 @@ export const closePit = (
};
/*
* Transform outdated docs and write them to the index.
* Transform outdated docs
* */
export const transformDocs = (
client: ElasticsearchClient,
transformRawDocs: TransformRawDocs,
outdatedDocuments: SavedObjectsRawDoc[],
index: string,
// used for testing purposes only
refresh: estypes.Refresh
): TaskEither.TaskEither<
RetryableEsClientError | IndexNotFound | TargetIndexHadWriteBlock,
'bulk_index_succeeded'
> =>
pipe(
TaskEither.tryCatch(
() => transformRawDocs(outdatedDocuments),
(e) => {
throw e;
}
),
TaskEither.chain((docs) => bulkOverwriteTransformedDocuments(client, index, docs, refresh))
);
outdatedDocuments: SavedObjectsRawDoc[]
): TaskEither.TaskEither<DocumentsTransformFailed, DocumentsTransformSuccess> =>
transformRawDocs(outdatedDocuments);
/** @internal */
export interface ReindexResponse {
@ -747,8 +737,6 @@ export const waitForPickupUpdatedMappingsTask = flow(
}
)
);
/** @internal */
export interface AliasNotFound {
type: 'alias_not_found_exception';
}

View file

@ -41,6 +41,8 @@ import {
import * as Either from 'fp-ts/lib/Either';
import * as Option from 'fp-ts/lib/Option';
import { ResponseError } from '@elastic/elasticsearch/lib/errors';
import { DocumentsTransformFailed, DocumentsTransformSuccess } from '../../migrations/core';
import { TaskEither } from 'fp-ts/lib/TaskEither';
const { startES } = kbnTestServer.createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
@ -1014,41 +1016,30 @@ describe('migration actions', () => {
});
describe('transformDocs', () => {
it('applies "transformRawDocs" and writes result into an index', async () => {
const index = 'transform_docs_index';
it('applies "transformRawDocs" and returns the transformed documents', async () => {
const originalDocs = [
{ _id: 'foo:1', _source: { type: 'dashboard', value: 1 } },
{ _id: 'foo:2', _source: { type: 'dashboard', value: 2 } },
];
const createIndexTask = createIndex(client, index, {
dynamic: true,
properties: {},
});
await createIndexTask();
async function tranformRawDocs(docs: SavedObjectsRawDoc[]): Promise<SavedObjectsRawDoc[]> {
for (const doc of docs) {
doc._source.value += 1;
}
return docs;
function innerTransformRawDocs(
docs: SavedObjectsRawDoc[]
): TaskEither<DocumentsTransformFailed, DocumentsTransformSuccess> {
return async () => {
const processedDocs: SavedObjectsRawDoc[] = [];
for (const doc of docs) {
doc._source.value += 1;
processedDocs.push(doc);
}
return Either.right({ processedDocs });
};
}
const transformTask = transformDocs(innerTransformRawDocs, originalDocs);
const transformTask = transformDocs(client, tranformRawDocs, originalDocs, index, 'wait_for');
const result = (await transformTask()) as Either.Right<'bulk_index_succeeded'>;
expect(result.right).toBe('bulk_index_succeeded');
const { body } = await client.search<{ value: number }>({
index,
});
const hits = body.hits.hits;
const foo1 = hits.find((h) => h._id === 'foo:1');
expect(foo1?._source?.value).toBe(2);
const foo2 = hits.find((h) => h._id === 'foo:2');
const resultsWithProcessDocs = ((await transformTask()) as Either.Right<DocumentsTransformSuccess>)
.right.processedDocs;
expect(resultsWithProcessDocs.length).toEqual(2);
const foo2 = resultsWithProcessDocs.find((h) => h._id === 'foo:2');
expect(foo2?._source?.value).toBe(3);
});
});

View file

@ -100,7 +100,7 @@ describe('migration v2', () => {
await root.setup();
await expect(root.start()).rejects.toThrow(
/Unable to migrate the corrupt saved object document with _id: 'index-pattern:test_index\*'/
'Unable to complete saved object migrations for the [.kibana] index: Migrations failed. Reason: Corrupt saved object documents: index-pattern:test_index*. To allow migrations to proceed, please delete these documents.'
);
const logFileContent = await asyncReadFile(logFilePath, 'utf-8');

View file

@ -0,0 +1,154 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
import { Root } from '../../../root';
const logFilePath = Path.join(__dirname, 'migration_test_corrupt_docs_kibana.log');
const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
// ignore errors if it doesn't exist
await asyncUnlink(logFilePath).catch(() => void 0);
}
describe('migration v2 with corrupt saved object documents', () => {
let esServer: kbnTestServer.TestElasticsearchUtils;
let root: Root;
beforeAll(async () => {
await removeLogFile();
});
afterAll(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
await new Promise((resolve) => setTimeout(resolve, 10000));
});
it('collects corrupt saved object documents accross batches', async () => {
const { startES } = kbnTestServer.createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// original uncorrupt SO:
// {
// type: 'foo', // 'bar', 'baz'
// foo: {}, // bar: {}, baz: {}
// migrationVersion: {
// foo: '7.13.0',
// },
// },
// original corrupt SO example:
// {
// id: 'bar:123'
// type: 'foo',
// foo: {},
// migrationVersion: {
// foo: '7.13.0',
// },
// },
// contains migrated index with 8.0 aliases to skip migration, but run outdated doc search
dataArchive: Path.join(
__dirname,
'archives',
'8.0.0_migrated_with_corrupt_outdated_docs.zip'
),
},
},
});
root = createRoot();
esServer = await startES();
const coreSetup = await root.setup();
coreSetup.savedObjects.registerType({
name: 'foo',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
coreSetup.savedObjects.registerType({
name: 'bar',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
coreSetup.savedObjects.registerType({
name: 'baz',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
try {
await root.start();
} catch (err) {
const corruptFooSOs = /foo:/g;
const corruptBarSOs = /bar:/g;
const corruptBazSOs = /baz:/g;
expect(
[
...err.message.matchAll(corruptFooSOs),
...err.message.matchAll(corruptBarSOs),
...err.message.matchAll(corruptBazSOs),
].length
).toEqual(16);
}
});
});
function createRoot() {
return kbnTestServer.createRootWithCorePlugins(
{
migrations: {
skip: false,
enableV2: true,
batchSize: 5,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
appenders: ['file'],
},
],
},
},
{
oss: true,
}
);
}

View file

@ -10,7 +10,6 @@ import { errors as EsErrors } from '@elastic/elasticsearch';
import * as Option from 'fp-ts/lib/Option';
import { Logger, LogMeta } from '../../logging';
import type { ElasticsearchClient } from '../../elasticsearch';
import { CorruptSavedObjectError } from '../migrations/core/migrate_raw_docs';
import { Model, Next, stateActionMachine } from './state_action_machine';
import { cleanup } from './migrations_state_machine_cleanup';
import { State } from './types';
@ -74,7 +73,6 @@ const logActionResponse = (
) => {
logger.debug(logMessagePrefix + `${state.controlState} RESPONSE`, res as LogMeta);
};
const dumpExecutionLog = (logger: Logger, logMessagePrefix: string, executionLog: ExecutionLog) => {
logger.error(logMessagePrefix + 'migration failed, dumping execution log:');
executionLog.forEach((log) => {
@ -211,11 +209,6 @@ export async function migrationStateActionMachine({
logger.error(e);
dumpExecutionLog(logger, logMessagePrefix, executionLog);
if (e instanceof CorruptSavedObjectError) {
throw new Error(
`${e.message} To allow migrations to proceed, please delete this document from the [${initialState.indexPrefix}_${initialState.kibanaVersion}_001] index.`
);
}
const newError = new Error(
`Unable to complete saved object migrations for the [${initialState.indexPrefix}] index. ${e}`

View file

@ -36,12 +36,15 @@ import type {
CloneTempToSource,
SetTempWriteBlock,
WaitForYellowSourceState,
TransformedDocumentsBulkIndex,
ReindexSourceToTempIndexBulk,
} from './types';
import { SavedObjectsRawDoc } from '..';
import { AliasAction, RetryableEsClientError } from './actions';
import { createInitialState, model } from './model';
import { ResponseType } from './next';
import { SavedObjectsMigrationConfigType } from '../saved_objects_config';
import { TransformErrorObjects, TransformSavedObjectDocumentError } from '../migrations/core';
describe('migrations v2 model', () => {
const baseState: BaseState = {
@ -778,6 +781,8 @@ describe('migrations v2 model', () => {
targetIndex: '.kibana_7.11.0_001',
tempIndexMappings: { properties: {} },
lastHitSortValue: undefined,
corruptDocumentIds: [],
transformErrors: [],
};
it('REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_INDEX if the index has outdated documents to reindex', () => {
@ -802,6 +807,23 @@ describe('migrations v2 model', () => {
expect(newState.controlState).toBe('REINDEX_SOURCE_TO_TEMP_CLOSE_PIT');
expect(newState.sourceIndexPitId).toBe('pit_id');
});
it('REINDEX_SOURCE_TO_TEMP_READ -> FATAL if no outdated documents to reindex and transform failures seen with previous outdated documents', () => {
const testState: ReindexSourceToTempRead = {
...state,
corruptDocumentIds: ['a:b'],
transformErrors: [],
};
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_READ'> = Either.right({
outdatedDocuments: [],
lastHitSortValue: undefined,
});
const newState = model(testState, res) as FatalState;
expect(newState.controlState).toBe('FATAL');
expect(newState.reason).toMatchInlineSnapshot(
`"Migrations failed. Reason: Corrupt saved object documents: a:b. To allow migrations to proceed, please delete these documents."`
);
});
});
describe('REINDEX_SOURCE_TO_TEMP_CLOSE_PIT', () => {
@ -833,37 +855,88 @@ describe('migrations v2 model', () => {
sourceIndexPitId: 'pit_id',
targetIndex: '.kibana_7.11.0_001',
lastHitSortValue: undefined,
corruptDocumentIds: [],
transformErrors: [],
};
const processedDocs = [
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
] as SavedObjectsRawDoc[];
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_READ if action succeeded', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.right(
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_INDEX_BULK if action succeeded', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.right({
processedDocs,
});
const newState = model(state, res);
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_INDEX_BULK');
});
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_READ if action succeeded but we have carried through previous failures', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.right({
processedDocs,
});
const testState = {
...state,
corruptDocumentIds: ['a:b'],
transformErrors: [],
};
const newState = model(testState, res) as ReindexSourceToTempIndex;
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_READ');
expect(newState.corruptDocumentIds.length).toEqual(1);
expect(newState.transformErrors.length).toEqual(0);
});
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_READ when response is left documents_transform_failed', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.left({
type: 'documents_transform_failed',
corruptDocumentIds: ['a:b'],
transformErrors: [],
});
const newState = model(state, res) as ReindexSourceToTempRead;
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_READ');
expect(newState.corruptDocumentIds.length).toEqual(1);
expect(newState.transformErrors.length).toEqual(0);
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
});
describe('REINDEX_SOURCE_TO_TEMP_INDEX_BULK', () => {
const transformedDocs = [
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
] as SavedObjectsRawDoc[];
const reindexSourceToTempIndexBulkState: ReindexSourceToTempIndexBulk = {
...baseState,
controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK',
transformedDocs,
versionIndexReadyActions: Option.none,
sourceIndex: Option.some('.kibana') as Option.Some<string>,
sourceIndexPitId: 'pit_id',
targetIndex: '.kibana_7.11.0_001',
lastHitSortValue: undefined,
};
test('REINDEX_SOURCE_TO_TEMP_INDEX_BULK -> REINDEX_SOURCE_TO_TEMP_READ if action succeeded', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX_BULK'> = Either.right(
'bulk_index_succeeded'
);
const newState = model(state, res);
const newState = model(reindexSourceToTempIndexBulkState, res);
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_READ');
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_READ when response is left target_index_had_write_block', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.left({
type: 'target_index_had_write_block',
test('REINDEX_SOURCE_TO_TEMP_INDEX_BULK should throw a throwBadResponse error if action failed', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX_BULK'> = Either.left({
type: 'retryable_es_client_error',
message: 'random documents bulk index error',
});
const newState = model(state, res) as ReindexSourceToTempRead;
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_READ');
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
it('REINDEX_SOURCE_TO_TEMP_INDEX -> REINDEX_SOURCE_TO_TEMP_READ when response is left index_not_found_exception for temp index', () => {
const res: ResponseType<'REINDEX_SOURCE_TO_TEMP_INDEX'> = Either.left({
type: 'index_not_found_exception',
index: state.tempIndex,
});
const newState = model(state, res) as ReindexSourceToTempRead;
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_READ');
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
const newState = model(reindexSourceToTempIndexBulkState, res);
expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_INDEX_BULK');
expect(newState.retryCount).toEqual(1);
expect(newState.retryDelay).toEqual(2000);
});
});
@ -943,6 +1016,8 @@ describe('migrations v2 model', () => {
targetIndex: '.kibana_7.11.0_001',
lastHitSortValue: undefined,
hasTransformedDocs: false,
corruptDocumentIds: [],
transformErrors: [],
};
it('OUTDATED_DOCUMENTS_SEARCH_READ -> OUTDATED_DOCUMENTS_TRANSFORM if found documents to transform', () => {
@ -967,6 +1042,37 @@ describe('migrations v2 model', () => {
expect(newState.controlState).toBe('OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT');
expect(newState.pitId).toBe('pit_id');
});
it('OUTDATED_DOCUMENTS_SEARCH_READ -> FATAL if no outdated documents to transform and we have failed document migrations', () => {
const corruptDocumentIdsCarriedOver = ['a:somethingelse'];
const originalTransformError = new Error('something went wrong');
const transFormErr = new TransformSavedObjectDocumentError(
'123',
'vis',
undefined,
'randomvis: 7.12.0',
'failedDoc',
originalTransformError
);
const transformationErrors = [
{ rawId: 'bob:tail', err: transFormErr },
] as TransformErrorObjects[];
const res: ResponseType<'OUTDATED_DOCUMENTS_SEARCH_READ'> = Either.right({
outdatedDocuments: [],
lastHitSortValue: undefined,
});
const transformErrorsState: OutdatedDocumentsSearchRead = {
...state,
corruptDocumentIds: [...corruptDocumentIdsCarriedOver],
transformErrors: [...transformationErrors],
};
const newState = model(transformErrorsState, res) as FatalState;
expect(newState.controlState).toBe('FATAL');
expect(newState.reason.includes('Migrations failed. Reason:')).toBe(true);
expect(newState.reason.includes('Corrupt saved object documents: ')).toBe(true);
expect(newState.reason.includes('Transformation errors: ')).toBe(true);
expect(newState.reason.includes('randomvis: 7.12.0')).toBe(true);
});
});
describe('OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT', () => {
@ -1006,9 +1112,20 @@ describe('migrations v2 model', () => {
});
describe('OUTDATED_DOCUMENTS_TRANSFORM', () => {
const outdatedDocuments = ([
Symbol('raw saved object doc'),
] as unknown) as SavedObjectsRawDoc[];
const outdatedDocuments = [{ _id: '1', _source: { type: 'vis' } }];
const corruptDocumentIds = ['a:somethingelse'];
const originalTransformError = new Error('Dang diggity!');
const transFormErr = new TransformSavedObjectDocumentError(
'id',
'type',
'namespace',
'failedTransform',
'failedDoc',
originalTransformError
);
const transformationErrors = [
{ rawId: 'bob:tail', err: transFormErr },
] as TransformErrorObjects[];
const outdatedDocumentsTransformState: OutdatedDocumentsTransform = {
...baseState,
controlState: 'OUTDATED_DOCUMENTS_TRANSFORM',
@ -1016,18 +1133,132 @@ describe('migrations v2 model', () => {
sourceIndex: Option.some('.kibana') as Option.Some<string>,
targetIndex: '.kibana_7.11.0_001',
outdatedDocuments,
corruptDocumentIds: [],
transformErrors: [],
pitId: 'pit_id',
lastHitSortValue: [3, 4],
hasTransformedDocs: false,
};
test('OUTDATED_DOCUMENTS_TRANSFORM -> OUTDATED_DOCUMENTS_SEARCH_READ if action succeeds', () => {
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.right(
'bulk_index_succeeded'
);
const newState = model(outdatedDocumentsTransformState, res);
expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ');
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
describe('OUTDATED_DOCUMENTS_TRANSFORM if action succeeds', () => {
const processedDocs = [
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
] as SavedObjectsRawDoc[];
test('OUTDATED_DOCUMENTS_TRANSFORM -> TRANSFORMED_DOCUMENTS_BULK_INDEX if action succeeds', () => {
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.right({ processedDocs });
const newState = model(
outdatedDocumentsTransformState,
res
) as TransformedDocumentsBulkIndex;
expect(newState.controlState).toEqual('TRANSFORMED_DOCUMENTS_BULK_INDEX');
expect(newState.transformedDocs).toEqual(processedDocs);
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
test('OUTDATED_DOCUMENTS_TRANSFORM -> OUTDATED_DOCUMENTS_SEARCH_READ if there are are existing documents that failed transformation', () => {
const outdatedDocumentsTransformStateWithFailedDocuments: OutdatedDocumentsTransform = {
...outdatedDocumentsTransformState,
corruptDocumentIds: [...corruptDocumentIds],
transformErrors: [],
};
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.right({ processedDocs });
const newState = model(
outdatedDocumentsTransformStateWithFailedDocuments,
res
) as OutdatedDocumentsSearchRead;
expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ');
expect(newState.corruptDocumentIds).toEqual(corruptDocumentIds);
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
test('OUTDATED_DOCUMENTS_TRANSFORM -> OUTDATED_DOCUMENTS_SEARCH_READ if there are are existing documents that failed transformation because of transform errors', () => {
const outdatedDocumentsTransformStateWithFailedDocuments: OutdatedDocumentsTransform = {
...outdatedDocumentsTransformState,
corruptDocumentIds: [],
transformErrors: [...transformationErrors],
};
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.right({ processedDocs });
const newState = model(
outdatedDocumentsTransformStateWithFailedDocuments,
res
) as OutdatedDocumentsSearchRead;
expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ');
expect(newState.corruptDocumentIds.length).toEqual(0);
expect(newState.transformErrors.length).toEqual(1);
expect(newState.retryCount).toEqual(0);
expect(newState.retryDelay).toEqual(0);
});
});
describe('OUTDATED_DOCUMENTS_TRANSFORM if action fails', () => {
test('OUTDATED_DOCUMENTS_TRANSFORM -> OUTDATED_DOCUMENTS_SEARCH_READ adding newly failed documents to state if documents failed the transform', () => {
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.left({
type: 'documents_transform_failed',
corruptDocumentIds,
transformErrors: [],
});
const newState = model(
outdatedDocumentsTransformState,
res
) as OutdatedDocumentsSearchRead;
expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ');
expect(newState.corruptDocumentIds).toEqual(corruptDocumentIds);
});
test('OUTDATED_DOCUMENTS_TRANSFORM -> OUTDATED_DOCUMENTS_SEARCH_READ combines newly failed documents with those already on state if documents failed the transform', () => {
const newFailedTransformDocumentIds = ['b:other', 'c:__'];
const outdatedDocumentsTransformStateWithFailedDocuments: OutdatedDocumentsTransform = {
...outdatedDocumentsTransformState,
corruptDocumentIds: [...corruptDocumentIds],
transformErrors: [...transformationErrors],
};
const res: ResponseType<'OUTDATED_DOCUMENTS_TRANSFORM'> = Either.left({
type: 'documents_transform_failed',
corruptDocumentIds: newFailedTransformDocumentIds,
transformErrors: transformationErrors,
});
const newState = model(
outdatedDocumentsTransformStateWithFailedDocuments,
res
) as OutdatedDocumentsSearchRead;
expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ');
expect(newState.corruptDocumentIds).toEqual([
...corruptDocumentIds,
...newFailedTransformDocumentIds,
]);
});
});
});
describe('TRANSFORMED_DOCUMENTS_BULK_INDEX', () => {
const transformedDocs = [
{
_id: 'a:b',
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
},
] as SavedObjectsRawDoc[];
const transformedDocumentsBulkIndexState: TransformedDocumentsBulkIndex = {
...baseState,
controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX',
transformedDocs,
versionIndexReadyActions: Option.none,
sourceIndex: Option.some('.kibana') as Option.Some<string>,
targetIndex: '.kibana_7.11.0_001',
pitId: 'pit_id',
lastHitSortValue: [3, 4],
hasTransformedDocs: false,
};
test('TRANSFORMED_DOCUMENTS_BULK_INDEX should throw a throwBadResponse error if action failed', () => {
const res: ResponseType<'TRANSFORMED_DOCUMENTS_BULK_INDEX'> = Either.left({
type: 'retryable_es_client_error',
message: 'random documents bulk index error',
});
const newState = model(
transformedDocumentsBulkIndexState,
res
) as TransformedDocumentsBulkIndex;
expect(newState.controlState).toEqual('TRANSFORMED_DOCUMENTS_BULK_INDEX');
expect(newState.retryCount).toEqual(1);
expect(newState.retryDelay).toEqual(2000);
});
});

View file

@ -16,7 +16,7 @@ import { IndexMapping } from '../mappings';
import { ResponseType } from './next';
import { SavedObjectsMigrationVersion } from '../types';
import { disableUnknownTypeMappingFields } from '../migrations/core/migration_context';
import { excludeUnusedTypesQuery } from '../migrations/core';
import { excludeUnusedTypesQuery, TransformErrorObjects } from '../migrations/core';
import { SavedObjectsMigrationConfigType } from '../saved_objects_config';
/**
@ -97,6 +97,31 @@ function getAliases(indices: FetchIndexResponse) {
}, {} as Record<string, string>);
}
/**
* Constructs migration failure message strings from corrupt document ids and document transformation errors
*/
function extractTransformFailuresReason(
corruptDocumentIds: string[],
transformErrors: TransformErrorObjects[]
): { corruptDocsReason: string; transformErrsReason: string } {
const corruptDocumentIdReason =
corruptDocumentIds.length > 0
? ` Corrupt saved object documents: ${corruptDocumentIds.join(',')}`
: '';
// we have both the saved object Id and the stack trace in each `transformErrors` item.
const transformErrorsReason =
transformErrors.length > 0
? ' Transformation errors: ' +
transformErrors
.map((errObj) => `${errObj.rawId}: ${errObj.err.message}\n ${errObj.err.stack ?? ''}`)
.join('/n')
: '';
return {
corruptDocsReason: corruptDocumentIdReason,
transformErrsReason: transformErrorsReason,
};
}
const delayRetryState = <S extends State>(
state: S,
errorMessage: string,
@ -481,11 +506,15 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
controlState: 'REINDEX_SOURCE_TO_TEMP_READ',
sourceIndexPitId: res.right.pitId,
lastHitSortValue: undefined,
// placeholders to collect document transform problems
corruptDocumentIds: [],
transformErrors: [],
};
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'REINDEX_SOURCE_TO_TEMP_READ') {
// we carry through any failures we've seen with transforming documents on state
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
if (res.right.outdatedDocuments.length > 0) {
@ -495,11 +524,27 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
outdatedDocuments: res.right.outdatedDocuments,
lastHitSortValue: res.right.lastHitSortValue,
};
} else {
// we don't have any more outdated documents and need to either fail or move on to updating the target mappings.
if (stateP.corruptDocumentIds.length > 0 || stateP.transformErrors.length > 0) {
const { corruptDocsReason, transformErrsReason } = extractTransformFailuresReason(
stateP.corruptDocumentIds,
stateP.transformErrors
);
return {
...stateP,
controlState: 'FATAL',
reason: `Migrations failed. Reason:${corruptDocsReason}${transformErrsReason}. To allow migrations to proceed, please delete these documents.`,
};
} else {
// we don't have any more outdated documents and we haven't encountered any document transformation issues.
// Close the PIT search and carry on with the happy path.
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_CLOSE_PIT',
};
}
}
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_CLOSE_PIT',
};
} else {
throwBadResponse(stateP, res);
}
@ -516,34 +561,55 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'REINDEX_SOURCE_TO_TEMP_INDEX') {
// We follow a similar control flow as for
// outdated document search -> outdated document transform -> transform documents bulk index
// collecting issues along the way rather than failing
// REINDEX_SOURCE_TO_TEMP_INDEX handles the document transforms
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_READ',
};
} else {
const left = res.left;
if (
isLeftTypeof(left, 'target_index_had_write_block') ||
(isLeftTypeof(left, 'index_not_found_exception') && left.index === stateP.tempIndex)
) {
// index_not_found_exception:
// another instance completed the MARK_VERSION_INDEX_READY and
// removed the temp index.
// target_index_had_write_block
// another instance completed the SET_TEMP_WRITE_BLOCK step adding a
// write block to the temp index.
//
// For simplicity we continue linearly through the next steps even if
// we know another instance already completed these.
if (stateP.corruptDocumentIds.length === 0 && stateP.transformErrors.length === 0) {
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK', // handles the actual bulk indexing into temp index
transformedDocs: [...res.right.processedDocs],
};
} else {
// we don't have any transform issues with the current batch of outdated docs but
// we have carried through previous transformation issues.
// The migration will ultimately fail but before we do that, continue to
// search through remaining docs for more issues and pass the previous failures along on state
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_READ',
};
}
// should never happen
throwBadResponse(stateP, res as never);
} else {
// we have failures from the current batch of documents and add them to the lists
const left = res.left;
if (isLeftTypeof(left, 'documents_transform_failed')) {
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_READ',
corruptDocumentIds: [...stateP.corruptDocumentIds, ...left.corruptDocumentIds],
transformErrors: [...stateP.transformErrors, ...left.transformErrors],
};
} else {
// should never happen
throwBadResponse(stateP, res as never);
}
}
} else if (stateP.controlState === 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'REINDEX_SOURCE_TO_TEMP_READ',
// we're still on the happy path with no transformation failures seen.
corruptDocumentIds: [],
transformErrors: [],
};
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'SET_TEMP_WRITE_BLOCK') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
@ -611,6 +677,8 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
pitId: res.right.pitId,
lastHitSortValue: undefined,
hasTransformedDocs: false,
corruptDocumentIds: [],
transformErrors: [],
};
} else {
throwBadResponse(stateP, res);
@ -626,14 +694,88 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
lastHitSortValue: res.right.lastHitSortValue,
};
} else {
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT',
};
// we don't have any more outdated documents and need to either fail or move on to updating the target mappings.
if (stateP.corruptDocumentIds.length > 0 || stateP.transformErrors.length > 0) {
const { corruptDocsReason, transformErrsReason } = extractTransformFailuresReason(
stateP.corruptDocumentIds,
stateP.transformErrors
);
return {
...stateP,
controlState: 'FATAL',
reason: `Migrations failed. Reason:${corruptDocsReason}${transformErrsReason}. To allow migrations to proceed, please delete these documents.`,
};
} else {
// If there are no more results we have transformed all outdated
// documents and we didn't encounter any corrupt documents or transformation errors
// and can proceed to the next step
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT',
};
}
}
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'OUTDATED_DOCUMENTS_TRANSFORM') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
// we haven't seen corrupt documents or any transformation errors thus far in the migration
// index the migrated docs
if (stateP.corruptDocumentIds.length === 0 && stateP.transformErrors.length === 0) {
return {
...stateP,
controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX',
transformedDocs: [...res.right.processedDocs],
hasTransformedDocs: true,
};
} else {
// We have seen corrupt documents and/or transformation errors
// skip indexing and go straight to reading and transforming more docs
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_READ',
};
}
} else {
if (isLeftTypeof(res.left, 'documents_transform_failed')) {
// continue to build up any more transformation errors before failing the migration.
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_READ',
corruptDocumentIds: [...stateP.corruptDocumentIds, ...res.left.corruptDocumentIds],
transformErrors: [...stateP.transformErrors, ...res.left.transformErrors],
hasTransformedDocs: false,
};
} else {
throwBadResponse(stateP, res as never);
}
}
} else if (stateP.controlState === 'TRANSFORMED_DOCUMENTS_BULK_INDEX') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_READ',
corruptDocumentIds: [],
transformErrors: [],
hasTransformedDocs: true,
};
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'UPDATE_TARGET_MAPPINGS') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK',
updateTargetMappingsTaskId: res.right.taskId,
};
} else {
throwBadResponse(stateP, res as never);
}
} else if (stateP.controlState === 'OUTDATED_DOCUMENTS_REFRESH') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
@ -661,28 +803,6 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'OUTDATED_DOCUMENTS_TRANSFORM') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'OUTDATED_DOCUMENTS_SEARCH_READ',
hasTransformedDocs: true,
};
} else {
throwBadResponse(stateP, res as never);
}
} else if (stateP.controlState === 'UPDATE_TARGET_MAPPINGS') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {
return {
...stateP,
controlState: 'UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK',
updateTargetMappingsTaskId: res.right.taskId,
};
} else {
throwBadResponse(stateP, res);
}
} else if (stateP.controlState === 'UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK') {
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
if (Either.isRight(res)) {

View file

@ -32,6 +32,8 @@ import type {
SetTempWriteBlock,
WaitForYellowSourceState,
TransformRawDocs,
TransformedDocumentsBulkIndex,
ReindexSourceToTempIndexBulk,
OutdatedDocumentsSearchOpenPit,
OutdatedDocumentsSearchRead,
OutdatedDocumentsSearchClosePit,
@ -82,11 +84,12 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra
REINDEX_SOURCE_TO_TEMP_CLOSE_PIT: (state: ReindexSourceToTempClosePit) =>
Actions.closePit(client, state.sourceIndexPitId),
REINDEX_SOURCE_TO_TEMP_INDEX: (state: ReindexSourceToTempIndex) =>
Actions.transformDocs(
Actions.transformDocs(transformRawDocs, state.outdatedDocuments),
REINDEX_SOURCE_TO_TEMP_INDEX_BULK: (state: ReindexSourceToTempIndexBulk) =>
Actions.bulkOverwriteTransformedDocuments(
client,
transformRawDocs,
state.outdatedDocuments,
state.tempIndex,
state.transformedDocs,
/**
* Since we don't run a search against the target index, we disable "refresh" to speed up
* the migration process.
@ -121,11 +124,12 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra
OUTDATED_DOCUMENTS_REFRESH: (state: OutdatedDocumentsRefresh) =>
Actions.refreshIndex(client, state.targetIndex),
OUTDATED_DOCUMENTS_TRANSFORM: (state: OutdatedDocumentsTransform) =>
Actions.transformDocs(
Actions.transformDocs(transformRawDocs, state.outdatedDocuments),
TRANSFORMED_DOCUMENTS_BULK_INDEX: (state: TransformedDocumentsBulkIndex) =>
Actions.bulkOverwriteTransformedDocuments(
client,
transformRawDocs,
state.outdatedDocuments,
state.targetIndex,
state.transformedDocs,
/**
* Since we don't run a search against the target index, we disable "refresh" to speed up
* the migration process.

View file

@ -6,12 +6,18 @@
* Side Public License, v 1.
*/
import * as TaskEither from 'fp-ts/lib/TaskEither';
import * as Option from 'fp-ts/lib/Option';
import { estypes } from '@elastic/elasticsearch';
import { ControlState } from './state_action_machine';
import { AliasAction } from './actions';
import { IndexMapping } from '../mappings';
import { SavedObjectsRawDoc } from '..';
import { TransformErrorObjects } from '../migrations/core';
import {
DocumentsTransformFailed,
DocumentsTransformSuccess,
} from '../migrations/core/migrate_raw_docs';
export type MigrationLogLevel = 'error' | 'info';
@ -175,6 +181,8 @@ export interface ReindexSourceToTempRead extends PostInitState {
readonly controlState: 'REINDEX_SOURCE_TO_TEMP_READ';
readonly sourceIndexPitId: string;
readonly lastHitSortValue: number[] | undefined;
readonly corruptDocumentIds: string[];
readonly transformErrors: TransformErrorObjects[];
}
export interface ReindexSourceToTempClosePit extends PostInitState {
@ -187,6 +195,15 @@ export interface ReindexSourceToTempIndex extends PostInitState {
readonly outdatedDocuments: SavedObjectsRawDoc[];
readonly sourceIndexPitId: string;
readonly lastHitSortValue: number[] | undefined;
readonly corruptDocumentIds: string[];
readonly transformErrors: TransformErrorObjects[];
}
export interface ReindexSourceToTempIndexBulk extends PostInitState {
readonly controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK';
readonly transformedDocs: SavedObjectsRawDoc[];
readonly sourceIndexPitId: string;
readonly lastHitSortValue: number[] | undefined;
}
export type SetTempWriteBlock = PostInitState & {
@ -233,6 +250,8 @@ export interface OutdatedDocumentsSearchRead extends PostInitState {
readonly pitId: string;
readonly lastHitSortValue: number[] | undefined;
readonly hasTransformedDocs: boolean;
readonly corruptDocumentIds: string[];
readonly transformErrors: TransformErrorObjects[];
}
export interface OutdatedDocumentsSearchClosePit extends PostInitState {
@ -249,12 +268,24 @@ export interface OutdatedDocumentsRefresh extends PostInitState {
}
export interface OutdatedDocumentsTransform extends PostInitState {
/** Transform a batch of outdated documents to their latest version and write them to the target index */
/** Transform a batch of outdated documents to their latest version*/
readonly controlState: 'OUTDATED_DOCUMENTS_TRANSFORM';
readonly pitId: string;
readonly outdatedDocuments: SavedObjectsRawDoc[];
readonly lastHitSortValue: number[] | undefined;
readonly hasTransformedDocs: boolean;
readonly corruptDocumentIds: string[];
readonly transformErrors: TransformErrorObjects[];
}
export interface TransformedDocumentsBulkIndex extends PostInitState {
/**
* Write the up-to-date transformed documents to the target index
*/
readonly controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX';
readonly transformedDocs: SavedObjectsRawDoc[];
readonly lastHitSortValue: number[] | undefined;
readonly hasTransformedDocs: boolean;
readonly pitId: string;
}
export interface MarkVersionIndexReady extends PostInitState {
@ -351,6 +382,7 @@ export type State =
| ReindexSourceToTempRead
| ReindexSourceToTempClosePit
| ReindexSourceToTempIndex
| ReindexSourceToTempIndexBulk
| SetTempWriteBlock
| CloneTempToSource
| UpdateTargetMappingsState
@ -363,6 +395,7 @@ export type State =
| OutdatedDocumentsRefresh
| MarkVersionIndexReady
| MarkVersionIndexReadyConflict
| TransformedDocumentsBulkIndex
| LegacyCreateReindexTargetState
| LegacySetWriteBlockState
| LegacyReindexState
@ -376,4 +409,6 @@ export type AllControlStates = State['controlState'];
*/
export type AllActionStates = Exclude<AllControlStates, 'FATAL' | 'DONE'>;
export type TransformRawDocs = (rawDocs: SavedObjectsRawDoc[]) => Promise<SavedObjectsRawDoc[]>;
export type TransformRawDocs = (
rawDocs: SavedObjectsRawDoc[]
) => TaskEither.TaskEither<DocumentsTransformFailed, DocumentsTransformSuccess>;

View file

@ -5,7 +5,6 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { SavedObjectsClientContract } from '../saved_objects/types';
import { UiSettingsParams, UserProvidedValues, PublicUiSettingsParams } from '../../types';
export type {

View file

@ -558,6 +558,15 @@ describe('ui settings', () => {
bar: 'user-provided',
});
});
it('throws if mutates the result of getAll()', async () => {
const { uiSettings } = setup({ esDocSource: {} });
const result = await uiSettings.getAll();
expect(() => {
result.foo = 'bar';
}).toThrow();
});
});
describe('#get()', () => {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
import { defaultsDeep, omit } from 'lodash';
import { omit } from 'lodash';
import { SavedObjectsErrorHelpers } from '../saved_objects';
import { SavedObjectsClientContract } from '../saved_objects/types';
@ -35,10 +35,7 @@ interface UserProvidedValue<T = unknown> {
isOverridden?: boolean;
}
type UiSettingsRawValue = UiSettingsParams & UserProvidedValue;
type UserProvided<T = unknown> = Record<string, UserProvidedValue<T>>;
type UiSettingsRaw = Record<string, UiSettingsRawValue>;
export class UiSettingsClient implements IUiSettingsClient {
private readonly type: UiSettingsServiceOptions['type'];
@ -47,6 +44,7 @@ export class UiSettingsClient implements IUiSettingsClient {
private readonly savedObjectsClient: UiSettingsServiceOptions['savedObjectsClient'];
private readonly overrides: NonNullable<UiSettingsServiceOptions['overrides']>;
private readonly defaults: NonNullable<UiSettingsServiceOptions['defaults']>;
private readonly defaultValues: Record<string, unknown>;
private readonly log: Logger;
private readonly cache: Cache;
@ -56,10 +54,15 @@ export class UiSettingsClient implements IUiSettingsClient {
this.id = id;
this.buildNum = buildNum;
this.savedObjectsClient = savedObjectsClient;
this.defaults = defaults;
this.overrides = overrides;
this.log = log;
this.cache = new Cache();
this.defaults = defaults;
const defaultValues: Record<string, unknown> = {};
Object.keys(this.defaults).forEach((key) => {
defaultValues[key] = this.defaults[key].value;
});
this.defaultValues = defaultValues;
}
getRegistered() {
@ -72,17 +75,21 @@ export class UiSettingsClient implements IUiSettingsClient {
async get<T = any>(key: string): Promise<T> {
const all = await this.getAll();
return all[key];
return all[key] as T;
}
async getAll<T = any>() {
const raw = await this.getRaw();
const result = { ...this.defaultValues };
return Object.keys(raw).reduce((all, key) => {
const item = raw[key];
all[key] = ('userValue' in item ? item.userValue : item.value) as T;
return all;
}, {} as Record<string, T>);
const userProvided = await this.getUserProvided();
Object.keys(userProvided).forEach((key) => {
if (userProvided[key].userValue !== undefined) {
result[key] = userProvided[key].userValue;
}
});
Object.freeze(result);
return result as Record<string, T>;
}
async getUserProvided<T = unknown>(): Promise<UserProvided<T>> {
@ -142,11 +149,6 @@ export class UiSettingsClient implements IUiSettingsClient {
}
}
private async getRaw(): Promise<UiSettingsRaw> {
const userProvided = await this.getUserProvided();
return defaultsDeep({}, userProvided, this.defaults);
}
private validateKey(key: string, value: unknown) {
const definition = this.defaults[key];
if (value === null || definition === undefined) return;

View file

@ -20,9 +20,7 @@ interface Options {
type CircularDepList = Set<string>;
const allowedList: CircularDepList = new Set([
'x-pack/plugins/apm -> x-pack/plugins/infra',
'x-pack/plugins/lists -> x-pack/plugins/security_solution',
'x-pack/plugins/security -> x-pack/plugins/spaces',
]);
run(

View file

@ -27,7 +27,7 @@ const mockSource2 = { excludes: ['bar-*'] };
const indexPattern = ({
title: 'foo',
fields: [{ name: 'foo-bar' }, { name: 'field1' }, { name: 'field2' }],
fields: [{ name: 'foo-bar' }, { name: 'field1' }, { name: 'field2' }, { name: '_id' }],
getComputedFields,
getSourceFiltering: () => mockSource,
} as unknown) as IndexPattern;
@ -68,7 +68,7 @@ describe('SearchSource', () => {
beforeEach(() => {
const getConfigMock = jest
.fn()
.mockImplementation((param) => param === 'metaFields' && ['_type', '_source'])
.mockImplementation((param) => param === 'metaFields' && ['_type', '_source', '_id'])
.mockName('getConfig');
mockSearchMethod = jest
@ -458,6 +458,28 @@ describe('SearchSource', () => {
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
});
test('excludes metafields from the request', async () => {
searchSource.setField('index', ({
...indexPattern,
getComputedFields: () => ({
storedFields: [],
scriptFields: [],
docvalueFields: [],
}),
} as unknown) as IndexPattern);
searchSource.setField('fields', [{ field: '*', include_unmapped: 'true' }]);
const request = searchSource.getSearchRequestBody();
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
searchSource.setField('fields', ['foo-bar', 'foo--bar', 'field1', 'field2']);
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
searchSource.removeField('fields');
searchSource.setField('fieldsFromSource', ['foo-bar', 'foo--bar', 'field1', 'field2']);
expect(request.fields).toEqual([{ field: 'field1' }, { field: 'field2' }]);
});
test('returns all scripted fields when one fields entry is *', async () => {
searchSource.setField('index', ({
...indexPattern,

View file

@ -682,6 +682,7 @@ export class SearchSource {
searchRequest.body = searchRequest.body || {};
const { body, index, query, filters, highlightAll } = searchRequest;
searchRequest.indexType = this.getIndexType(index);
const metaFields = getConfig(UI_SETTINGS.META_FIELDS);
// get some special field types from the index pattern
const { docvalueFields, scriptFields, storedFields, runtimeFields } = index
@ -712,7 +713,7 @@ export class SearchSource {
body._source = sourceFilters;
}
const filter = fieldWildcardFilter(body._source.excludes, getConfig(UI_SETTINGS.META_FIELDS));
const filter = fieldWildcardFilter(body._source.excludes, metaFields);
// also apply filters to provided fields & default docvalueFields
body.fields = body.fields.filter((fld: SearchFieldValue) => filter(this.getFieldName(fld)));
fieldsFromSource = fieldsFromSource.filter((fld: SearchFieldValue) =>
@ -793,17 +794,21 @@ export class SearchSource {
const field2Name = this.getFieldName(fld2);
return field1Name === field2Name;
}
).map((fld: SearchFieldValue) => {
const fieldName = this.getFieldName(fld);
if (Object.keys(docvaluesIndex).includes(fieldName)) {
// either provide the field object from computed docvalues,
// or merge the user-provided field with the one in docvalues
return typeof fld === 'string'
? docvaluesIndex[fld]
: this.getFieldFromDocValueFieldsOrIndexPattern(docvaluesIndex, fld, index);
}
return fld;
});
)
.filter((fld: SearchFieldValue) => {
return !metaFields.includes(this.getFieldName(fld));
})
.map((fld: SearchFieldValue) => {
const fieldName = this.getFieldName(fld);
if (Object.keys(docvaluesIndex).includes(fieldName)) {
// either provide the field object from computed docvalues,
// or merge the user-provided field with the one in docvalues
return typeof fld === 'string'
? docvaluesIndex[fld]
: this.getFieldFromDocValueFieldsOrIndexPattern(docvaluesIndex, fld, index);
}
return fld;
});
}
} else {
body.fields = filteredDocvalueFields;

View file

@ -0,0 +1,70 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { registerTestBed, TestBed } from '@kbn/test/jest';
import { FilterEditor, Props } from '.';
import React from 'react';
jest.mock('@elastic/eui', () => {
const original = jest.requireActual('@elastic/eui');
return {
...original,
EuiCodeEditor: (props: any) => (
<input
data-test-subj={props['data-test-subj'] || 'mockEuiCodeEditor'}
value={props.value}
onChange={async (eve: any) => {
props.onChange(eve.target.value);
}}
/>
),
};
});
describe('<FilterEditor />', () => {
describe('writing query dsl', () => {
let testBed: TestBed;
beforeEach(async () => {
const defaultProps: Omit<Props, 'intl'> = {
filter: {
meta: {
type: 'phase',
} as any,
},
indexPatterns: [],
onCancel: jest.fn(),
onSubmit: jest.fn(),
};
testBed = await registerTestBed(FilterEditor, { defaultProps })();
});
it('requires a non-empty JSON object', async () => {
const { exists, find } = testBed;
expect(exists('customEditorInput')).toBe(true);
find('customEditorInput').simulate('change', {
target: { value: '{ }' },
});
expect(find('saveFilter').props().disabled).toBe(true);
find('customEditorInput').simulate('change', {
target: { value: '{' }, // bad JSON
});
expect(find('saveFilter').props().disabled).toBe(true);
find('customEditorInput').simulate('change', {
target: { value: '{ "something": "here" }' },
});
expect(find('saveFilter').props().disabled).toBe(false);
});
});
});

View file

@ -48,7 +48,7 @@ import {
getFilterParams,
} from '../../../../common';
interface Props {
export interface Props {
filter: Filter;
indexPatterns: IIndexPattern[];
onSubmit: (filter: Filter) => void;
@ -333,6 +333,7 @@ class FilterEditorUI extends Component<Props, State> {
mode="json"
width="100%"
height="250px"
data-test-subj="customEditorInput"
/>
</EuiFormRow>
);
@ -415,7 +416,8 @@ class FilterEditorUI extends Component<Props, State> {
if (isCustomEditorOpen) {
try {
return Boolean(JSON.parse(queryDsl));
const queryDslJson = JSON.parse(queryDsl);
return Object.keys(queryDslJson).length > 0;
} catch (e) {
return false;
}

View file

@ -10,7 +10,6 @@ import { compact } from 'lodash';
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
import classNames from 'classnames';
import React, { Component } from 'react';
import ResizeObserver from 'resize-observer-polyfill';
import { get, isEqual } from 'lodash';
import { EuiIconProps } from '@elastic/eui';
@ -100,8 +99,6 @@ class SearchBarUI extends Component<SearchBarProps, State> {
private services = this.props.kibana.services;
private savedQueryService = this.services.data.query.savedQueries;
public filterBarRef: Element | null = null;
public filterBarWrapperRef: Element | null = null;
public static getDerivedStateFromProps(nextProps: SearchBarProps, prevState: State) {
if (isEqual(prevState.currentProps, nextProps)) {
@ -212,19 +209,6 @@ class SearchBarUI extends Component<SearchBarProps, State> {
);
}
public setFilterBarHeight = () => {
requestAnimationFrame(() => {
const height =
this.filterBarRef && this.state.isFiltersVisible ? this.filterBarRef.clientHeight : 0;
if (this.filterBarWrapperRef) {
this.filterBarWrapperRef.setAttribute('style', `height: ${height}px`);
}
});
};
// member-ordering rules conflict with use-before-declaration rules
public ro = new ResizeObserver(this.setFilterBarHeight);
public onSave = async (savedQueryMeta: SavedQueryMeta, saveAsNew = false) => {
if (!this.state.query) return;
@ -352,20 +336,6 @@ class SearchBarUI extends Component<SearchBarProps, State> {
}
};
public componentDidMount() {
if (this.filterBarRef) {
this.setFilterBarHeight();
this.ro.observe(this.filterBarRef);
}
}
public componentDidUpdate() {
if (this.filterBarRef) {
this.setFilterBarHeight();
this.ro.unobserve(this.filterBarRef);
}
}
public render() {
const savedQueryManagement = this.state.query && this.props.onClearSavedQuery && (
<SavedQueryManagementComponent
@ -422,26 +392,14 @@ class SearchBarUI extends Component<SearchBarProps, State> {
'globalFilterGroup__wrapper-isVisible': this.state.isFiltersVisible,
});
filterBar = (
<div
id="GlobalFilterGroup"
ref={(node) => {
this.filterBarWrapperRef = node;
}}
className={filterGroupClasses}
>
<div
ref={(node) => {
this.filterBarRef = node;
}}
>
<FilterBar
className="globalFilterGroup__filterBar"
filters={this.props.filters!}
onFiltersUpdated={this.props.onFiltersUpdated}
indexPatterns={this.props.indexPatterns!}
appName={this.services.appName}
/>
</div>
<div id="GlobalFilterGroup" className={filterGroupClasses}>
<FilterBar
className="globalFilterGroup__filterBar"
filters={this.props.filters!}
onFiltersUpdated={this.props.onFiltersUpdated}
indexPatterns={this.props.indexPatterns!}
appName={this.services.appName}
/>
</div>
);
}

View file

@ -33,8 +33,9 @@ export class FieldFormatsService {
return {
fieldFormatServiceFactory: async (uiSettings: IUiSettingsClient) => {
const fieldFormatsRegistry = new FieldFormatsRegistry();
const uiConfigs = await uiSettings.getAll();
const coreUiConfigs = await uiSettings.getAll();
const registeredUiSettings = uiSettings.getRegistered();
const uiConfigs = { ...coreUiConfigs };
Object.keys(registeredUiSettings).forEach((key) => {
if (has(uiConfigs, key) && registeredUiSettings[key].type === 'json') {

View file

@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
import { AddConfigDeprecation, CoreSetup, CoreStart, PluginConfigDescriptor } from 'kibana/server';
import type { CoreSetup, CoreStart, PluginConfigDescriptor } from 'kibana/server';
import { get } from 'lodash';
import { configSchema, ConfigSchema } from '../config';
@ -19,16 +19,12 @@ export const config: PluginConfigDescriptor<ConfigSchema> = {
deprecations: ({ renameFromRoot }) => [
// TODO: Remove deprecation once defaultAppId is deleted
renameFromRoot('kibana.defaultAppId', 'kibana_legacy.defaultAppId', { silent: true }),
(
completeConfig: Record<string, any>,
rootPath: string,
addDeprecation: AddConfigDeprecation
) => {
(completeConfig, rootPath, addDeprecation) => {
if (
get(completeConfig, 'kibana.defaultAppId') === undefined &&
get(completeConfig, 'kibana_legacy.defaultAppId') === undefined
) {
return completeConfig;
return;
}
addDeprecation({
message: `kibana.defaultAppId is deprecated and will be removed in 8.0. Please use the \`defaultRoute\` advanced setting instead`,
@ -40,7 +36,6 @@ export const config: PluginConfigDescriptor<ConfigSchema> = {
],
},
});
return completeConfig;
},
],
};

View file

@ -8,8 +8,9 @@
import { i18n } from '@kbn/i18n';
export const USE_DATA_SERVICE = 'labs:canvas:useDataService';
export const TIME_TO_PRESENT = 'labs:presentation:timeToPresent';
export const LABS_PROJECT_PREFIX = 'labs:';
export const USE_DATA_SERVICE = `${LABS_PROJECT_PREFIX}canvas:useDataService` as const;
export const TIME_TO_PRESENT = `${LABS_PROJECT_PREFIX}presentation:timeToPresent` as const;
export const projectIDs = [TIME_TO_PRESENT, USE_DATA_SERVICE] as const;
export const environmentNames = ['kibana', 'browser', 'session'] as const;

View file

@ -7,7 +7,6 @@
*/
import {
environmentNames,
EnvironmentName,
projectIDs,
projects,
@ -15,6 +14,7 @@ import {
Project,
getProjectIDs,
SolutionName,
LABS_PROJECT_PREFIX,
} from '../../../common';
import { PresentationUtilPluginStartDeps } from '../../types';
import { KibanaPluginServiceFactory } from '../create';
@ -31,6 +31,16 @@ export type LabsServiceFactory = KibanaPluginServiceFactory<
PresentationUtilPluginStartDeps
>;
const clearLabsFromStorage = (storage: Storage) => {
projectIDs.forEach((projectID) => storage.removeItem(projectID));
// This is a redundancy, to catch any labs that may have been removed above.
// We could consider gathering telemetry to see how often this happens, or this may be unnecessary.
Object.keys(storage)
.filter((key) => key.startsWith(LABS_PROJECT_PREFIX))
.forEach((key) => storage.removeItem(key));
};
export const labsServiceFactory: LabsServiceFactory = ({ coreStart }) => {
const { uiSettings } = coreStart;
const localStorage = window.localStorage;
@ -75,17 +85,18 @@ export const labsServiceFactory: LabsServiceFactory = ({ coreStart }) => {
};
const reset = () => {
localStorage.clear();
sessionStorage.clear();
environmentNames.forEach((env) =>
projectIDs.forEach((id) => setProjectStatus(id, env, projects[id].isActive))
);
clearLabsFromStorage(localStorage);
clearLabsFromStorage(sessionStorage);
projectIDs.forEach((id) => setProjectStatus(id, 'kibana', projects[id].isActive));
};
const isProjectEnabled = (id: ProjectID) => getProject(id).status.isEnabled;
return {
getProjectIDs,
getProjects,
getProject,
isProjectEnabled,
reset,
setProjectStatus,
};

View file

@ -20,6 +20,7 @@ import {
} from '../../common';
export interface PresentationLabsService {
isProjectEnabled: (id: ProjectID) => boolean;
getProjectIDs: () => typeof projectIDs;
getProject: (id: ProjectID) => Project;
getProjects: (solutions?: SolutionName[]) => Record<ProjectID, Project>;

View file

@ -46,13 +46,17 @@ export const labsServiceFactory: LabsServiceFactory = () => {
};
const reset = () => {
// This is normally not ok, but it's our isolated Storybook instance.
storage.clear();
};
const isProjectEnabled = (id: ProjectID) => getProject(id).status.isEnabled;
return {
getProjectIDs,
getProjects,
getProject,
isProjectEnabled,
reset,
setProjectStatus,
};

View file

@ -64,11 +64,13 @@ export const labsServiceFactory: LabsServiceFactory = () => {
const setProjectStatus = (id: ProjectID, env: EnvironmentName, value: boolean) => {
statuses[id] = { ...statuses[id], [env]: value };
};
const isProjectEnabled = (id: ProjectID) => getProject(id).status.isEnabled;
return {
getProjectIDs,
getProject,
getProjects,
isProjectEnabled,
setProjectStatus,
reset: () => {
statuses = reset();

View file

@ -13,7 +13,8 @@ export default function ({ getService, getPageObjects }) {
const security = getService('security');
const PageObjects = getPageObjects(['common', 'home', 'settings']);
describe('test large number of fields', function () {
// FLAKY: https://github.com/elastic/kibana/issues/89031
describe.skip('test large number of fields', function () {
this.tags(['skipCloud']);
const EXPECTED_FIELD_COUNT = '10006';

View file

@ -14,7 +14,8 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
const PageObjects = getPageObjects(['common', 'visualize', 'visEditor', 'header', 'timePicker']);
const comboBox = getService('comboBox');
describe('dynamic options', () => {
// FLAKY: https://github.com/elastic/kibana/issues/98974
describe.skip('dynamic options', () => {
describe('without chained controls', () => {
beforeEach(async () => {
await PageObjects.common.navigateToApp('visualize');

Some files were not shown because too many files have changed in this diff Show more