mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Move KibanaMigrator into Server SavedObjectsService (#43433)
* Rename SavedObjectsService -> SavedObjectsLegacyService * Expose legacy pluginSpecs from Core LegacyService * Expose legacy uiExports from Core LegacyService * Move kibana config to NP * Expose pluginExtendedConfig from LegacyService * Make KibanaMigrator NP compatible * KibanaMigrator -> NP SavedObjectsService * SavedObjectsService never stop retrying ES connection error * Move waiting for migrations to complete till after legacy service start * Fix ESArchiver's KibanaMigrator * Fix reload logging config tests * Run migrations on savedobjects start * Fix env tests * Fix and make legacy tests more robust/isolated * Cleanup code * Fix invalid config test * Fix SavedObject Migrations logging test * SavedObjectsService tests * Lifecycle logging and improve getting kibanaConfig instance * Fix awaitMigration bug and test * Fix typing error * Review comments * Remove unecessary KibanaConfig class * Move legacy plugin config extension, specs, uiExports entirely into Core uiExports, specs, disabledSpecs, config now get injected into KbnServer * Fix config deprecation test * Use existing logger mock * Create SavedObjectsConfig for migration config * Define KibanaMigratorContract type * KibanaMigratorContract -> IKibanaMigrator + docs improvements * Fix esArchiver's KibanaMigrator * Fix plugin generator integration test * ConfigServiceContract -> IConfigService * Address review comments * Review nits * Document migrations.skip config * Review comments continued... * awaitMigrations -> runMigrations * Type improvements
This commit is contained in:
parent
0262f6184c
commit
85c8232c0b
106 changed files with 1404 additions and 1119 deletions
|
@ -1,12 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [InternalCoreStart](./kibana-plugin-server.internalcorestart.md)
|
||||
|
||||
## InternalCoreStart interface
|
||||
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export interface InternalCoreStart
|
||||
```
|
|
@ -21,8 +21,6 @@ The plugin integrates with the core system via lifecycle events: `setup`<!-- -->
|
|||
| [ElasticsearchErrorHelpers](./kibana-plugin-server.elasticsearcherrorhelpers.md) | Helpers for working with errors returned from the Elasticsearch service.Since the internal data of errors are subject to change, consumers of the Elasticsearch service should always use these helpers to classify errors instead of checking error internals such as <code>body.error.header[WWW-Authenticate]</code> |
|
||||
| [KibanaRequest](./kibana-plugin-server.kibanarequest.md) | Kibana specific abstraction for an incoming request. |
|
||||
| [SavedObjectsErrorHelpers](./kibana-plugin-server.savedobjectserrorhelpers.md) | |
|
||||
| [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) | |
|
||||
| [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) | |
|
||||
| [ScopedClusterClient](./kibana-plugin-server.scopedclusterclient.md) | Serves the same purpose as "normal" <code>ClusterClient</code> but exposes additional <code>callAsCurrentUser</code> method that doesn't use credentials of the Kibana internal user (as <code>callAsInternalUser</code> does) to request Elasticsearch API, but rather passes HTTP headers extracted from the current user request to the API |
|
||||
|
||||
## Enumerations
|
||||
|
@ -52,7 +50,6 @@ The plugin integrates with the core system via lifecycle events: `setup`<!-- -->
|
|||
| [HttpServiceStart](./kibana-plugin-server.httpservicestart.md) | |
|
||||
| [IContextContainer](./kibana-plugin-server.icontextcontainer.md) | An object that handles registration of context providers and configuring handlers with context. |
|
||||
| [IKibanaSocket](./kibana-plugin-server.ikibanasocket.md) | A tiny abstraction for TCP socket. |
|
||||
| [InternalCoreStart](./kibana-plugin-server.internalcorestart.md) | |
|
||||
| [IRouter](./kibana-plugin-server.irouter.md) | Registers route handlers for specified resource path and method. |
|
||||
| [KibanaRequestRoute](./kibana-plugin-server.kibanarequestroute.md) | Request specific route information exposed to a handler. |
|
||||
| [LegacyRequest](./kibana-plugin-server.legacyrequest.md) | |
|
||||
|
@ -96,7 +93,6 @@ The plugin integrates with the core system via lifecycle events: `setup`<!-- -->
|
|||
| [SavedObjectsMigrationVersion](./kibana-plugin-server.savedobjectsmigrationversion.md) | Information about the migrations that have been applied to this SavedObject. When Kibana starts up, KibanaMigrator detects outdated documents and migrates them based on this value. For each migration that has been applied, the plugin's name is used as a key and the latest migration version as the value. |
|
||||
| [SavedObjectsRawDoc](./kibana-plugin-server.savedobjectsrawdoc.md) | A raw document as represented directly in the saved object index. |
|
||||
| [SavedObjectsResolveImportErrorsOptions](./kibana-plugin-server.savedobjectsresolveimporterrorsoptions.md) | Options to control the "resolve import" operation. |
|
||||
| [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) | |
|
||||
| [SavedObjectsUpdateOptions](./kibana-plugin-server.savedobjectsupdateoptions.md) | |
|
||||
| [SavedObjectsUpdateResponse](./kibana-plugin-server.savedobjectsupdateresponse.md) | |
|
||||
| [SessionStorage](./kibana-plugin-server.sessionstorage.md) | Provides an interface to store and retrieve data across requests. |
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) > [(constructor)](./kibana-plugin-server.savedobjectsschema._constructor_.md)
|
||||
|
||||
## SavedObjectsSchema.(constructor)
|
||||
|
||||
Constructs a new instance of the `SavedObjectsSchema` class
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
constructor(schemaDefinition?: SavedObjectsSchemaDefinition);
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| schemaDefinition | <code>SavedObjectsSchemaDefinition</code> | |
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) > [getConvertToAliasScript](./kibana-plugin-server.savedobjectsschema.getconverttoaliasscript.md)
|
||||
|
||||
## SavedObjectsSchema.getConvertToAliasScript() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
getConvertToAliasScript(type: string): string | undefined;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| type | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`string | undefined`
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) > [getIndexForType](./kibana-plugin-server.savedobjectsschema.getindexfortype.md)
|
||||
|
||||
## SavedObjectsSchema.getIndexForType() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
getIndexForType(config: Config, type: string): string | undefined;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| config | <code>Config</code> | |
|
||||
| type | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`string | undefined`
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) > [isHiddenType](./kibana-plugin-server.savedobjectsschema.ishiddentype.md)
|
||||
|
||||
## SavedObjectsSchema.isHiddenType() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
isHiddenType(type: string): boolean;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| type | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`boolean`
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md) > [isNamespaceAgnostic](./kibana-plugin-server.savedobjectsschema.isnamespaceagnostic.md)
|
||||
|
||||
## SavedObjectsSchema.isNamespaceAgnostic() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
isNamespaceAgnostic(type: string): boolean;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| type | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`boolean`
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSchema](./kibana-plugin-server.savedobjectsschema.md)
|
||||
|
||||
## SavedObjectsSchema class
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export declare class SavedObjectsSchema
|
||||
```
|
||||
|
||||
## Constructors
|
||||
|
||||
| Constructor | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [(constructor)(schemaDefinition)](./kibana-plugin-server.savedobjectsschema._constructor_.md) | | Constructs a new instance of the <code>SavedObjectsSchema</code> class |
|
||||
|
||||
## Methods
|
||||
|
||||
| Method | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [getConvertToAliasScript(type)](./kibana-plugin-server.savedobjectsschema.getconverttoaliasscript.md) | | |
|
||||
| [getIndexForType(config, type)](./kibana-plugin-server.savedobjectsschema.getindexfortype.md) | | |
|
||||
| [isHiddenType(type)](./kibana-plugin-server.savedobjectsschema.ishiddentype.md) | | |
|
||||
| [isNamespaceAgnostic(type)](./kibana-plugin-server.savedobjectsschema.isnamespaceagnostic.md) | | |
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) > [(constructor)](./kibana-plugin-server.savedobjectsserializer._constructor_.md)
|
||||
|
||||
## SavedObjectsSerializer.(constructor)
|
||||
|
||||
Constructs a new instance of the `SavedObjectsSerializer` class
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
constructor(schema: SavedObjectsSchema);
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| schema | <code>SavedObjectsSchema</code> | |
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) > [generateRawId](./kibana-plugin-server.savedobjectsserializer.generaterawid.md)
|
||||
|
||||
## SavedObjectsSerializer.generateRawId() method
|
||||
|
||||
Given a saved object type and id, generates the compound id that is stored in the raw document.
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
generateRawId(namespace: string | undefined, type: string, id?: string): string;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| namespace | <code>string | undefined</code> | |
|
||||
| type | <code>string</code> | |
|
||||
| id | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`string`
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) > [isRawSavedObject](./kibana-plugin-server.savedobjectsserializer.israwsavedobject.md)
|
||||
|
||||
## SavedObjectsSerializer.isRawSavedObject() method
|
||||
|
||||
Determines whether or not the raw document can be converted to a saved object.
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
isRawSavedObject(rawDoc: RawDoc): any;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| rawDoc | <code>RawDoc</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`any`
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md)
|
||||
|
||||
## SavedObjectsSerializer class
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export declare class SavedObjectsSerializer
|
||||
```
|
||||
|
||||
## Constructors
|
||||
|
||||
| Constructor | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [(constructor)(schema)](./kibana-plugin-server.savedobjectsserializer._constructor_.md) | | Constructs a new instance of the <code>SavedObjectsSerializer</code> class |
|
||||
|
||||
## Methods
|
||||
|
||||
| Method | Modifiers | Description |
|
||||
| --- | --- | --- |
|
||||
| [generateRawId(namespace, type, id)](./kibana-plugin-server.savedobjectsserializer.generaterawid.md) | | Given a saved object type and id, generates the compound id that is stored in the raw document. |
|
||||
| [isRawSavedObject(rawDoc)](./kibana-plugin-server.savedobjectsserializer.israwsavedobject.md) | | Determines whether or not the raw document can be converted to a saved object. |
|
||||
| [rawToSavedObject(doc)](./kibana-plugin-server.savedobjectsserializer.rawtosavedobject.md) | | Converts a document from the format that is stored in elasticsearch to the saved object client format. |
|
||||
| [savedObjectToRaw(savedObj)](./kibana-plugin-server.savedobjectsserializer.savedobjecttoraw.md) | | Converts a document from the saved object client format to the format that is stored in elasticsearch. |
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) > [rawToSavedObject](./kibana-plugin-server.savedobjectsserializer.rawtosavedobject.md)
|
||||
|
||||
## SavedObjectsSerializer.rawToSavedObject() method
|
||||
|
||||
Converts a document from the format that is stored in elasticsearch to the saved object client format.
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
rawToSavedObject(doc: RawDoc): SanitizedSavedObjectDoc;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| doc | <code>RawDoc</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`SanitizedSavedObjectDoc`
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsSerializer](./kibana-plugin-server.savedobjectsserializer.md) > [savedObjectToRaw](./kibana-plugin-server.savedobjectsserializer.savedobjecttoraw.md)
|
||||
|
||||
## SavedObjectsSerializer.savedObjectToRaw() method
|
||||
|
||||
Converts a document from the saved object client format to the format that is stored in elasticsearch.
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
savedObjectToRaw(savedObj: SanitizedSavedObjectDoc): RawDoc;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| savedObj | <code>SanitizedSavedObjectDoc</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`RawDoc`
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [addScopedSavedObjectsClientWrapperFactory](./kibana-plugin-server.savedobjectsservice.addscopedsavedobjectsclientwrapperfactory.md)
|
||||
|
||||
## SavedObjectsService.addScopedSavedObjectsClientWrapperFactory property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
addScopedSavedObjectsClientWrapperFactory: ScopedSavedObjectsClientProvider<Request>['addClientWrapperFactory'];
|
||||
```
|
|
@ -1,22 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [getSavedObjectsRepository](./kibana-plugin-server.savedobjectsservice.getsavedobjectsrepository.md)
|
||||
|
||||
## SavedObjectsService.getSavedObjectsRepository() method
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
getSavedObjectsRepository(...rest: any[]): any;
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| rest | <code>any[]</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`any`
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [getScopedSavedObjectsClient](./kibana-plugin-server.savedobjectsservice.getscopedsavedobjectsclient.md)
|
||||
|
||||
## SavedObjectsService.getScopedSavedObjectsClient property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
getScopedSavedObjectsClient: ScopedSavedObjectsClientProvider<Request>['getClient'];
|
||||
```
|
|
@ -1,16 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [importExport](./kibana-plugin-server.savedobjectsservice.importexport.md)
|
||||
|
||||
## SavedObjectsService.importExport property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
importExport: {
|
||||
objectLimit: number;
|
||||
importSavedObjects(options: SavedObjectsImportOptions): Promise<SavedObjectsImportResponse>;
|
||||
resolveImportErrors(options: SavedObjectsResolveImportErrorsOptions): Promise<SavedObjectsImportResponse>;
|
||||
getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise<Readable>;
|
||||
};
|
||||
```
|
|
@ -1,30 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md)
|
||||
|
||||
## SavedObjectsService interface
|
||||
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
export interface SavedObjectsService<Request = any>
|
||||
```
|
||||
|
||||
## Properties
|
||||
|
||||
| Property | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| [addScopedSavedObjectsClientWrapperFactory](./kibana-plugin-server.savedobjectsservice.addscopedsavedobjectsclientwrapperfactory.md) | <code>ScopedSavedObjectsClientProvider<Request>['addClientWrapperFactory']</code> | |
|
||||
| [getScopedSavedObjectsClient](./kibana-plugin-server.savedobjectsservice.getscopedsavedobjectsclient.md) | <code>ScopedSavedObjectsClientProvider<Request>['getClient']</code> | |
|
||||
| [importExport](./kibana-plugin-server.savedobjectsservice.importexport.md) | <code>{</code><br/><code> objectLimit: number;</code><br/><code> importSavedObjects(options: SavedObjectsImportOptions): Promise<SavedObjectsImportResponse>;</code><br/><code> resolveImportErrors(options: SavedObjectsResolveImportErrorsOptions): Promise<SavedObjectsImportResponse>;</code><br/><code> getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise<Readable>;</code><br/><code> }</code> | |
|
||||
| [SavedObjectsClient](./kibana-plugin-server.savedobjectsservice.savedobjectsclient.md) | <code>typeof SavedObjectsClient</code> | |
|
||||
| [schema](./kibana-plugin-server.savedobjectsservice.schema.md) | <code>SavedObjectsSchema</code> | |
|
||||
| [types](./kibana-plugin-server.savedobjectsservice.types.md) | <code>string[]</code> | |
|
||||
|
||||
## Methods
|
||||
|
||||
| Method | Description |
|
||||
| --- | --- |
|
||||
| [getSavedObjectsRepository(rest)](./kibana-plugin-server.savedobjectsservice.getsavedobjectsrepository.md) | |
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [SavedObjectsClient](./kibana-plugin-server.savedobjectsservice.savedobjectsclient.md)
|
||||
|
||||
## SavedObjectsService.SavedObjectsClient property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
SavedObjectsClient: typeof SavedObjectsClient;
|
||||
```
|
|
@ -1,11 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [schema](./kibana-plugin-server.savedobjectsservice.schema.md)
|
||||
|
||||
## SavedObjectsService.schema property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
schema: SavedObjectsSchema;
|
||||
```
|
|
@ -1,11 +0,0 @@
|
|||
<!-- Do not edit this file. It is automatically generated by API Documenter. -->
|
||||
|
||||
[Home](./index.md) > [kibana-plugin-server](./kibana-plugin-server.md) > [SavedObjectsService](./kibana-plugin-server.savedobjectsservice.md) > [types](./kibana-plugin-server.savedobjectsservice.types.md)
|
||||
|
||||
## SavedObjectsService.types property
|
||||
|
||||
<b>Signature:</b>
|
||||
|
||||
```typescript
|
||||
types: string[];
|
||||
```
|
2
kibana.d.ts
vendored
2
kibana.d.ts
vendored
|
@ -42,7 +42,7 @@ export namespace Legacy {
|
|||
export type Request = LegacyKibanaServer.Request;
|
||||
export type ResponseToolkit = LegacyKibanaServer.ResponseToolkit;
|
||||
export type SavedObjectsClient = LegacyKibanaServer.SavedObjectsClient;
|
||||
export type SavedObjectsService = LegacyKibanaServer.SavedObjectsService;
|
||||
export type SavedObjectsService = LegacyKibanaServer.SavedObjectsLegacyService;
|
||||
export type Server = LegacyKibanaServer.Server;
|
||||
|
||||
export type InitPluginFunction = LegacyKibanaPluginSpec.InitPluginFunction;
|
||||
|
|
|
@ -88,7 +88,12 @@ describe(`running the plugin-generator via 'node scripts/generate_plugin.js plug
|
|||
await withProcRunner(log, async proc => {
|
||||
await proc.run('kibana', {
|
||||
cmd: 'yarn',
|
||||
args: ['start', '--optimize.enabled=false', '--logging.json=false'],
|
||||
args: [
|
||||
'start',
|
||||
'--optimize.enabled=false',
|
||||
'--logging.json=false',
|
||||
'--migrations.skip=true',
|
||||
],
|
||||
cwd: generatedPath,
|
||||
wait: /ispec_plugin.+Status changed from uninitialized to green - Ready/,
|
||||
});
|
||||
|
|
|
@ -25,9 +25,12 @@ const INVALID_CONFIG_PATH = resolve(__dirname, '__fixtures__/invalid_config.yml'
|
|||
|
||||
describe('cli invalid config support', function () {
|
||||
it('exits with statusCode 64 and logs a single line when config is invalid', function () {
|
||||
// Unused keys only throw once LegacyService starts, so disable migrations so that Core
|
||||
// will finish the start lifecycle without a running Elasticsearch instance.
|
||||
const { error, status, stdout } = spawnSync(process.execPath, [
|
||||
'src/cli',
|
||||
'--config', INVALID_CONFIG_PATH
|
||||
'--config', INVALID_CONFIG_PATH,
|
||||
'--migrations.skip=true'
|
||||
], {
|
||||
cwd: ROOT_DIR
|
||||
});
|
||||
|
|
|
@ -83,7 +83,7 @@ describe('Server logging configuration', function () {
|
|||
it('should be reloadable via SIGHUP process signaling', async function () {
|
||||
expect.assertions(3);
|
||||
|
||||
child = spawn(process.execPath, [kibanaPath, '--config', testConfigFile, '--oss'], {
|
||||
child = spawn(process.execPath, [kibanaPath, '--config', testConfigFile, '--oss', '--verbose'], {
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
|
@ -114,7 +114,9 @@ describe('Server logging configuration', function () {
|
|||
const data = JSON.parse(line);
|
||||
sawJson = true;
|
||||
|
||||
if (data.tags.includes('listening')) {
|
||||
// We know the sighup handler will be registered before
|
||||
// root.setup() is called
|
||||
if (data.message.includes('setting up root')) {
|
||||
isJson = false;
|
||||
setLoggingJson(false);
|
||||
|
||||
|
@ -128,10 +130,9 @@ describe('Server logging configuration', function () {
|
|||
// the switch yet, so we ignore before switching over.
|
||||
} else {
|
||||
// Kibana has successfully stopped logging json, so kill the server.
|
||||
|
||||
sawNonjson = true;
|
||||
|
||||
child.kill();
|
||||
child && child.kill();
|
||||
child = undefined;
|
||||
}
|
||||
})
|
||||
|
@ -178,10 +179,11 @@ describe('Server logging configuration', function () {
|
|||
'--config', testConfigFile,
|
||||
'--logging.dest', logPath,
|
||||
'--plugins.initialize', 'false',
|
||||
'--logging.json', 'false'
|
||||
'--logging.json', 'false',
|
||||
'--verbose'
|
||||
]);
|
||||
|
||||
watchFileUntil(logPath, /http server running/, 2 * minute)
|
||||
watchFileUntil(logPath, /starting server/, 2 * minute)
|
||||
.then(() => {
|
||||
// once the server is running, archive the log file and issue SIGHUP
|
||||
fs.renameSync(logPath, logPathArchived);
|
||||
|
@ -190,8 +192,8 @@ describe('Server logging configuration', function () {
|
|||
.then(() => watchFileUntil(logPath, /Reloaded logging configuration due to SIGHUP/, 10 * second))
|
||||
.then(contents => {
|
||||
const lines = contents.toString().split('\n');
|
||||
// should be the first and only new line of the log file
|
||||
expect(lines).toHaveLength(2);
|
||||
// should be the first line of the new log file
|
||||
expect(lines[0]).toMatch(/Reloaded logging configuration due to SIGHUP/);
|
||||
child.kill();
|
||||
})
|
||||
.then(done, done);
|
||||
|
|
|
@ -194,7 +194,6 @@ export default function (program) {
|
|||
.option('--plugins <path>', 'an alias for --plugin-dir', pluginDirCollector)
|
||||
.option('--optimize', 'Optimize and then stop the server');
|
||||
|
||||
|
||||
if (CAN_REPL) {
|
||||
command.option('--repl', 'Run the server with a REPL prompt and access to the server object');
|
||||
}
|
||||
|
@ -240,7 +239,7 @@ export default function (program) {
|
|||
repl: !!opts.repl,
|
||||
basePath: !!opts.basePath,
|
||||
optimize: !!opts.optimize,
|
||||
oss: !!opts.oss,
|
||||
oss: !!opts.oss
|
||||
},
|
||||
features: {
|
||||
isClusterModeSupported: CAN_CLUSTER,
|
||||
|
|
|
@ -70,6 +70,22 @@ export async function bootstrap({
|
|||
|
||||
const root = new Root(rawConfigService.getConfig$(), env, onRootShutdown);
|
||||
|
||||
process.on('SIGHUP', () => {
|
||||
const cliLogger = root.logger.get('cli');
|
||||
cliLogger.info('Reloading logging configuration due to SIGHUP.', { tags: ['config'] });
|
||||
|
||||
try {
|
||||
rawConfigService.reloadConfig();
|
||||
} catch (err) {
|
||||
return shutdown(err);
|
||||
}
|
||||
|
||||
cliLogger.info('Reloaded logging configuration due to SIGHUP.', { tags: ['config'] });
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => shutdown());
|
||||
process.on('SIGTERM', () => shutdown());
|
||||
|
||||
function shutdown(reason?: Error) {
|
||||
rawConfigService.stop();
|
||||
return root.shutdown(reason);
|
||||
|
@ -87,22 +103,6 @@ export async function bootstrap({
|
|||
cliLogger.info('Optimization done.');
|
||||
await shutdown();
|
||||
}
|
||||
|
||||
process.on('SIGHUP', () => {
|
||||
const cliLogger = root.logger.get('cli');
|
||||
cliLogger.info('Reloading logging configuration due to SIGHUP.', { tags: ['config'] });
|
||||
|
||||
try {
|
||||
rawConfigService.reloadConfig();
|
||||
} catch (err) {
|
||||
return shutdown(err);
|
||||
}
|
||||
|
||||
cliLogger.info('Reloaded logging configuration due to SIGHUP.', { tags: ['config'] });
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => shutdown());
|
||||
process.on('SIGTERM', () => shutdown());
|
||||
}
|
||||
|
||||
function onRootShutdown(reason?: any) {
|
||||
|
|
|
@ -20,11 +20,13 @@
|
|||
import { BehaviorSubject } from 'rxjs';
|
||||
import { ObjectToConfigAdapter } from './object_to_config_adapter';
|
||||
|
||||
import { ConfigService } from './config_service';
|
||||
import { IConfigService } from './config_service';
|
||||
|
||||
type ConfigServiceContract = PublicMethodsOf<ConfigService>;
|
||||
const createConfigServiceMock = () => {
|
||||
const mocked: jest.Mocked<ConfigServiceContract> = {
|
||||
const createConfigServiceMock = ({
|
||||
atPath = {},
|
||||
getConfig$ = {},
|
||||
}: { atPath?: Record<string, any>; getConfig$?: Record<string, any> } = {}) => {
|
||||
const mocked: jest.Mocked<IConfigService> = {
|
||||
atPath: jest.fn(),
|
||||
getConfig$: jest.fn(),
|
||||
optionalAtPath: jest.fn(),
|
||||
|
@ -33,8 +35,8 @@ const createConfigServiceMock = () => {
|
|||
isEnabledAtPath: jest.fn(),
|
||||
setSchema: jest.fn(),
|
||||
};
|
||||
mocked.atPath.mockReturnValue(new BehaviorSubject({}));
|
||||
mocked.getConfig$.mockReturnValue(new BehaviorSubject(new ObjectToConfigAdapter({})));
|
||||
mocked.atPath.mockReturnValue(new BehaviorSubject(atPath));
|
||||
mocked.getConfig$.mockReturnValue(new BehaviorSubject(new ObjectToConfigAdapter(getConfig$)));
|
||||
mocked.getUsedPaths.mockResolvedValue([]);
|
||||
mocked.getUnusedPaths.mockResolvedValue([]);
|
||||
mocked.isEnabledAtPath.mockResolvedValue(true);
|
||||
|
|
|
@ -26,6 +26,9 @@ import { Config, ConfigPath, Env } from '.';
|
|||
import { Logger, LoggerFactory } from '../logging';
|
||||
import { hasConfigPathIntersection } from './config';
|
||||
|
||||
/** @internal */
|
||||
export type IConfigService = PublicMethodsOf<ConfigService>;
|
||||
|
||||
/** @internal */
|
||||
export class ConfigService {
|
||||
private readonly log: Logger;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export { ConfigService } from './config_service';
|
||||
export { ConfigService, IConfigService } from './config_service';
|
||||
export { RawConfigService } from './raw_config_service';
|
||||
export { Config, ConfigPath, isConfigPath, hasConfigPathIntersection } from './config';
|
||||
export { ObjectToConfigAdapter } from './object_to_config_adapter';
|
||||
|
|
41
src/core/server/core_context.mock.ts
Normal file
41
src/core/server/core_context.mock.ts
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { CoreContext } from './core_context';
|
||||
import { getEnvOptions } from './config/__mocks__/env';
|
||||
import { Env, IConfigService } from './config';
|
||||
import { loggingServiceMock } from './logging/logging_service.mock';
|
||||
import { configServiceMock } from './config/config_service.mock';
|
||||
import { ILoggingService } from './logging';
|
||||
|
||||
function create({
|
||||
env = Env.createDefault(getEnvOptions()),
|
||||
logger = loggingServiceMock.create(),
|
||||
configService = configServiceMock.create(),
|
||||
}: {
|
||||
env?: Env;
|
||||
logger?: jest.Mocked<ILoggingService>;
|
||||
configService?: jest.Mocked<IConfigService>;
|
||||
} = {}): CoreContext {
|
||||
return { coreId: Symbol(), env, logger, configService };
|
||||
}
|
||||
|
||||
export const mockCoreContext = {
|
||||
create,
|
||||
};
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { ConfigService, Env } from './config';
|
||||
import { IConfigService, Env } from './config';
|
||||
import { LoggerFactory } from './logging';
|
||||
|
||||
/** @internal */
|
||||
|
@ -31,6 +31,6 @@ export type CoreId = symbol;
|
|||
export interface CoreContext {
|
||||
coreId: CoreId;
|
||||
env: Env;
|
||||
configService: ConfigService;
|
||||
configService: IConfigService;
|
||||
logger: LoggerFactory;
|
||||
}
|
||||
|
|
58
src/core/server/elasticsearch/retry_call_cluster.test.ts
Normal file
58
src/core/server/elasticsearch/retry_call_cluster.test.ts
Normal file
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import elasticsearch from 'elasticsearch';
|
||||
import { retryCallCluster } from './retry_call_cluster';
|
||||
|
||||
describe('retryCallCluster', () => {
|
||||
it('retries ES API calls that rejects with NoConnection errors', () => {
|
||||
expect.assertions(1);
|
||||
const callEsApi = jest.fn();
|
||||
let i = 0;
|
||||
callEsApi.mockImplementation(() => {
|
||||
return i++ <= 2
|
||||
? Promise.reject(new elasticsearch.errors.NoConnections())
|
||||
: Promise.resolve('success');
|
||||
});
|
||||
const retried = retryCallCluster(callEsApi);
|
||||
return expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`);
|
||||
});
|
||||
|
||||
it('rejects when ES API calls reject with other errors', async () => {
|
||||
expect.assertions(3);
|
||||
const callEsApi = jest.fn();
|
||||
let i = 0;
|
||||
callEsApi.mockImplementation(() => {
|
||||
i++;
|
||||
|
||||
return i === 1
|
||||
? Promise.reject(new Error('unknown error'))
|
||||
: i === 2
|
||||
? Promise.resolve('success')
|
||||
: i === 3 || i === 4
|
||||
? Promise.reject(new elasticsearch.errors.NoConnections())
|
||||
: i === 5
|
||||
? Promise.reject(new Error('unknown error'))
|
||||
: null;
|
||||
});
|
||||
const retried = retryCallCluster(callEsApi);
|
||||
await expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`);
|
||||
await expect(retried('endpoint')).resolves.toMatchInlineSnapshot(`"success"`);
|
||||
return expect(retried('endpoint')).rejects.toMatchInlineSnapshot(`[Error: unknown error]`);
|
||||
});
|
||||
});
|
58
src/core/server/elasticsearch/retry_call_cluster.ts
Normal file
58
src/core/server/elasticsearch/retry_call_cluster.ts
Normal file
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { retryWhen, concatMap } from 'rxjs/operators';
|
||||
import { defer, throwError, iif, timer } from 'rxjs';
|
||||
import elasticsearch from 'elasticsearch';
|
||||
import { CallAPIOptions } from '.';
|
||||
|
||||
/**
|
||||
* Retries the provided Elasticsearch API call when a `NoConnections` error is
|
||||
* encountered. The API call will be retried once a second, indefinitely, until
|
||||
* a successful response or a different error is received.
|
||||
*
|
||||
* @param apiCaller
|
||||
*/
|
||||
|
||||
// TODO: Replace with APICaller from './scoped_cluster_client' once #46668 is merged
|
||||
export function retryCallCluster(
|
||||
apiCaller: (
|
||||
endpoint: string,
|
||||
clientParams: Record<string, any>,
|
||||
options?: CallAPIOptions
|
||||
) => Promise<any>
|
||||
) {
|
||||
return (endpoint: string, clientParams: Record<string, any> = {}, options?: CallAPIOptions) => {
|
||||
return defer(() => apiCaller(endpoint, clientParams, options))
|
||||
.pipe(
|
||||
retryWhen(errors =>
|
||||
errors.pipe(
|
||||
concatMap((error, i) =>
|
||||
iif(
|
||||
() => error instanceof elasticsearch.errors.NoConnections,
|
||||
timer(1000),
|
||||
throwError(error)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.toPromise();
|
||||
};
|
||||
}
|
|
@ -35,7 +35,11 @@ jest.doMock('./elasticsearch/elasticsearch_service', () => ({
|
|||
ElasticsearchService: jest.fn(() => mockElasticsearchService),
|
||||
}));
|
||||
|
||||
export const mockLegacyService = { setup: jest.fn(), start: jest.fn(), stop: jest.fn() };
|
||||
export const mockLegacyService = {
|
||||
setup: jest.fn().mockReturnValue({ uiExports: {} }),
|
||||
start: jest.fn(),
|
||||
stop: jest.fn(),
|
||||
};
|
||||
jest.mock('./legacy/legacy_service', () => ({
|
||||
LegacyService: jest.fn(() => mockLegacyService),
|
||||
}));
|
||||
|
@ -45,3 +49,9 @@ export const mockConfigService = configServiceMock.create();
|
|||
jest.doMock('./config/config_service', () => ({
|
||||
ConfigService: jest.fn(() => mockConfigService),
|
||||
}));
|
||||
|
||||
import { savedObjectsServiceMock } from './saved_objects/saved_objects_service.mock';
|
||||
export const mockSavedObjectsService = savedObjectsServiceMock.create();
|
||||
jest.doMock('./saved_objects/saved_objects_service', () => ({
|
||||
SavedObjectsService: jest.fn(() => mockSavedObjectsService),
|
||||
}));
|
||||
|
|
|
@ -55,6 +55,7 @@ import {
|
|||
} from './http';
|
||||
import { PluginsServiceSetup, PluginsServiceStart, PluginOpaqueId } from './plugins';
|
||||
import { ContextSetup } from './context';
|
||||
import { SavedObjectsServiceStart } from './saved_objects';
|
||||
|
||||
export { bootstrap } from './bootstrap';
|
||||
export { ConfigPath, ConfigService } from './config';
|
||||
|
@ -152,7 +153,7 @@ export {
|
|||
SavedObjectsResolveImportErrorsOptions,
|
||||
SavedObjectsSchema,
|
||||
SavedObjectsSerializer,
|
||||
SavedObjectsService,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObjectsUpdateOptions,
|
||||
SavedObjectsUpdateResponse,
|
||||
} from './saved_objects';
|
||||
|
@ -232,9 +233,11 @@ export interface InternalCoreSetup {
|
|||
}
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @internal
|
||||
*/
|
||||
export interface InternalCoreStart {} // eslint-disable-line @typescript-eslint/no-empty-interface
|
||||
export interface InternalCoreStart {
|
||||
savedObjects: SavedObjectsServiceStart;
|
||||
}
|
||||
|
||||
export {
|
||||
ContextSetup,
|
||||
|
|
34
src/core/server/kibana_config.ts
Normal file
34
src/core/server/kibana_config.ts
Normal file
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { schema, TypeOf } from '@kbn/config-schema';
|
||||
|
||||
export type KibanaConfigType = TypeOf<typeof config.schema>;
|
||||
|
||||
export const config = {
|
||||
path: 'kibana',
|
||||
schema: schema.object({
|
||||
enabled: schema.boolean({ defaultValue: true }),
|
||||
defaultAppId: schema.string({ defaultValue: 'home' }),
|
||||
index: schema.string({ defaultValue: '.kibana' }),
|
||||
disableWelcomeScreen: schema.boolean({ defaultValue: false }),
|
||||
autocompleteTerminateAfter: schema.duration({ defaultValue: 100000 }),
|
||||
autocompleteTimeout: schema.duration({ defaultValue: 1000 }),
|
||||
}),
|
||||
};
|
|
@ -1,53 +1,5 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`once LegacyService is set up in \`devClusterMaster\` mode creates ClusterManager with base path proxy.: cli args. cluster manager with base path proxy 1`] = `
|
||||
Object {
|
||||
"basePath": true,
|
||||
"dev": true,
|
||||
"open": false,
|
||||
"optimize": false,
|
||||
"oss": false,
|
||||
"quiet": true,
|
||||
"repl": false,
|
||||
"silent": false,
|
||||
"watch": false,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`once LegacyService is set up in \`devClusterMaster\` mode creates ClusterManager with base path proxy.: config. cluster manager with base path proxy 1`] = `
|
||||
Object {
|
||||
"server": Object {
|
||||
"autoListen": true,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`once LegacyService is set up in \`devClusterMaster\` mode creates ClusterManager without base path proxy.: cluster manager without base path proxy 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
Object {
|
||||
"basePath": false,
|
||||
"dev": true,
|
||||
"open": false,
|
||||
"optimize": false,
|
||||
"oss": false,
|
||||
"quiet": false,
|
||||
"repl": false,
|
||||
"silent": true,
|
||||
"watch": false,
|
||||
},
|
||||
Object {
|
||||
"server": Object {
|
||||
"autoListen": true,
|
||||
},
|
||||
},
|
||||
undefined,
|
||||
],
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`once LegacyService is set up with connection info creates legacy kbnServer and closes it if \`listen\` fails. 1`] = `"something failed"`;
|
||||
|
||||
exports[`once LegacyService is set up with connection info reconfigures logging configuration if new config is received.: applyLoggingConfiguration params 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
|
@ -60,8 +12,6 @@ Array [
|
|||
]
|
||||
`;
|
||||
|
||||
exports[`once LegacyService is set up with connection info throws if fails to retrieve initial config. 1`] = `"something failed"`;
|
||||
|
||||
exports[`once LegacyService is set up without connection info reconfigures logging configuration if new config is received.: applyLoggingConfiguration params 1`] = `
|
||||
Array [
|
||||
Array [
|
||||
|
|
|
@ -21,6 +21,14 @@ import { BehaviorSubject, throwError } from 'rxjs';
|
|||
|
||||
jest.mock('../../../legacy/server/kbn_server');
|
||||
jest.mock('../../../cli/cluster/cluster_manager');
|
||||
jest.mock('./plugins/find_legacy_plugin_specs.ts', () => ({
|
||||
findLegacyPluginSpecs: (settings: Record<string, any>) => ({
|
||||
pluginSpecs: [],
|
||||
pluginExtendedConfig: settings,
|
||||
disabledPluginSpecs: [],
|
||||
uiExports: [],
|
||||
}),
|
||||
}));
|
||||
|
||||
import { LegacyService } from '.';
|
||||
// @ts-ignore: implicit any for JS file
|
||||
|
@ -36,6 +44,8 @@ import { HttpServiceStart, BasePathProxyServer } from '../http';
|
|||
import { loggingServiceMock } from '../logging/logging_service.mock';
|
||||
import { DiscoveredPlugin, DiscoveredPluginInternal } from '../plugins';
|
||||
import { PluginsServiceSetup, PluginsServiceStart } from '../plugins/plugins_service';
|
||||
import { SavedObjectsServiceStart } from 'src/core/server/saved_objects/saved_objects_service';
|
||||
import { KibanaMigrator } from '../saved_objects/migrations';
|
||||
|
||||
const MockKbnServer: jest.Mock<KbnServer> = KbnServer as any;
|
||||
|
||||
|
@ -55,6 +65,7 @@ let setupDeps: {
|
|||
let startDeps: {
|
||||
core: {
|
||||
http: HttpServiceStart;
|
||||
savedObjects: SavedObjectsServiceStart;
|
||||
plugins: PluginsServiceStart;
|
||||
};
|
||||
plugins: Record<string, unknown>;
|
||||
|
@ -95,6 +106,9 @@ beforeEach(() => {
|
|||
http: {
|
||||
isListening: () => true,
|
||||
},
|
||||
savedObjects: {
|
||||
migrator: {} as KibanaMigrator,
|
||||
},
|
||||
plugins: { contracts: new Map() },
|
||||
},
|
||||
plugins: {},
|
||||
|
@ -130,13 +144,15 @@ describe('once LegacyService is set up with connection info', () => {
|
|||
|
||||
expect(MockKbnServer).toHaveBeenCalledTimes(1);
|
||||
expect(MockKbnServer).toHaveBeenCalledWith(
|
||||
{ server: { autoListen: true } },
|
||||
{ server: { autoListen: true } },
|
||||
{
|
||||
setupDeps,
|
||||
startDeps,
|
||||
handledConfigPaths: ['foo.bar'],
|
||||
logger,
|
||||
}
|
||||
},
|
||||
{ disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] }
|
||||
);
|
||||
|
||||
const [mockKbnServer] = MockKbnServer.mock.instances;
|
||||
|
@ -158,13 +174,15 @@ describe('once LegacyService is set up with connection info', () => {
|
|||
|
||||
expect(MockKbnServer).toHaveBeenCalledTimes(1);
|
||||
expect(MockKbnServer).toHaveBeenCalledWith(
|
||||
{ server: { autoListen: true } },
|
||||
{ server: { autoListen: true } },
|
||||
{
|
||||
setupDeps,
|
||||
startDeps,
|
||||
handledConfigPaths: ['foo.bar'],
|
||||
logger,
|
||||
}
|
||||
},
|
||||
{ disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] }
|
||||
);
|
||||
|
||||
const [mockKbnServer] = MockKbnServer.mock.instances;
|
||||
|
@ -184,7 +202,9 @@ describe('once LegacyService is set up with connection info', () => {
|
|||
});
|
||||
|
||||
await legacyService.setup(setupDeps);
|
||||
await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingSnapshot();
|
||||
await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"something failed"`
|
||||
);
|
||||
|
||||
const [mockKbnServer] = MockKbnServer.mock.instances;
|
||||
expect(mockKbnServer.listen).toHaveBeenCalled();
|
||||
|
@ -200,8 +220,12 @@ describe('once LegacyService is set up with connection info', () => {
|
|||
configService: configService as any,
|
||||
});
|
||||
|
||||
await legacyService.setup(setupDeps);
|
||||
await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingSnapshot();
|
||||
await expect(legacyService.setup(setupDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"something failed"`
|
||||
);
|
||||
await expect(legacyService.start(startDeps)).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Legacy service is not setup yet."`
|
||||
);
|
||||
|
||||
expect(MockKbnServer).not.toHaveBeenCalled();
|
||||
expect(MockClusterManager).not.toHaveBeenCalled();
|
||||
|
@ -285,13 +309,15 @@ describe('once LegacyService is set up without connection info', () => {
|
|||
test('creates legacy kbnServer with `autoListen: false`.', () => {
|
||||
expect(MockKbnServer).toHaveBeenCalledTimes(1);
|
||||
expect(MockKbnServer).toHaveBeenCalledWith(
|
||||
{ server: { autoListen: true } },
|
||||
{ server: { autoListen: true } },
|
||||
{
|
||||
setupDeps,
|
||||
startDeps,
|
||||
handledConfigPaths: ['foo.bar'],
|
||||
logger,
|
||||
}
|
||||
},
|
||||
{ disabledPluginSpecs: [], pluginSpecs: [], uiExports: [] }
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -332,9 +358,9 @@ describe('once LegacyService is set up in `devClusterMaster` mode', () => {
|
|||
await devClusterLegacyService.setup(setupDeps);
|
||||
await devClusterLegacyService.start(startDeps);
|
||||
|
||||
expect(MockClusterManager.create.mock.calls).toMatchSnapshot(
|
||||
'cluster manager without base path proxy'
|
||||
);
|
||||
const [[cliArgs, , basePathProxy]] = MockClusterManager.create.mock.calls;
|
||||
expect(cliArgs.basePath).toBe(false);
|
||||
expect(basePathProxy).not.toBeDefined();
|
||||
});
|
||||
|
||||
test('creates ClusterManager with base path proxy.', async () => {
|
||||
|
@ -355,9 +381,8 @@ describe('once LegacyService is set up in `devClusterMaster` mode', () => {
|
|||
|
||||
expect(MockClusterManager.create).toBeCalledTimes(1);
|
||||
|
||||
const [[cliArgs, config, basePathProxy]] = MockClusterManager.create.mock.calls;
|
||||
expect(cliArgs).toMatchSnapshot('cli args. cluster manager with base path proxy');
|
||||
expect(config).toMatchSnapshot('config. cluster manager with base path proxy');
|
||||
const [[cliArgs, , basePathProxy]] = MockClusterManager.create.mock.calls;
|
||||
expect(cliArgs.basePath).toEqual(true);
|
||||
expect(basePathProxy).toBeInstanceOf(BasePathProxyServer);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -18,15 +18,18 @@
|
|||
*/
|
||||
|
||||
import { combineLatest, ConnectableObservable, EMPTY, Observable, Subscription } from 'rxjs';
|
||||
import { first, map, mergeMap, publishReplay, tap } from 'rxjs/operators';
|
||||
import { first, map, publishReplay, tap } from 'rxjs/operators';
|
||||
import { CoreService } from '../../types';
|
||||
import { InternalCoreSetup, InternalCoreStart } from '../../server';
|
||||
import { InternalCoreSetup, InternalCoreStart } from '../';
|
||||
import { SavedObjectsLegacyUiExports } from '../types';
|
||||
import { Config } from '../config';
|
||||
import { CoreContext } from '../core_context';
|
||||
import { DevConfig, DevConfigType } from '../dev';
|
||||
import { BasePathProxyServer, HttpConfig, HttpConfigType } from '../http';
|
||||
import { Logger } from '../logging';
|
||||
import { PluginsServiceSetup, PluginsServiceStart } from '../plugins';
|
||||
import { findLegacyPluginSpecs } from './plugins';
|
||||
import { LegacyPluginSpec } from './plugins/find_legacy_plugin_specs';
|
||||
|
||||
interface LegacyKbnServer {
|
||||
applyLoggingConfiguration: (settings: Readonly<Record<string, any>>) => void;
|
||||
|
@ -70,13 +73,30 @@ export interface LegacyServiceStartDeps {
|
|||
}
|
||||
|
||||
/** @internal */
|
||||
export class LegacyService implements CoreService {
|
||||
export interface LegacyServiceSetup {
|
||||
pluginSpecs: LegacyPluginSpec[];
|
||||
uiExports: SavedObjectsLegacyUiExports;
|
||||
pluginExtendedConfig: Config;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class LegacyService implements CoreService<LegacyServiceSetup> {
|
||||
private readonly log: Logger;
|
||||
private readonly devConfig$: Observable<DevConfig>;
|
||||
private readonly httpConfig$: Observable<HttpConfig>;
|
||||
private kbnServer?: LegacyKbnServer;
|
||||
private configSubscription?: Subscription;
|
||||
private setupDeps?: LegacyServiceSetupDeps;
|
||||
private update$: ConnectableObservable<Config> | undefined;
|
||||
private legacyRawConfig: Config | undefined;
|
||||
private legacyPlugins:
|
||||
| {
|
||||
pluginSpecs: LegacyPluginSpec[];
|
||||
disabledPluginSpecs: LegacyPluginSpec[];
|
||||
uiExports: SavedObjectsLegacyUiExports;
|
||||
}
|
||||
| undefined;
|
||||
private settings: Record<string, any> | undefined;
|
||||
|
||||
constructor(private readonly coreContext: CoreContext) {
|
||||
this.log = coreContext.logger.get('legacy-service');
|
||||
|
@ -87,17 +107,11 @@ export class LegacyService implements CoreService {
|
|||
.atPath<HttpConfigType>('server')
|
||||
.pipe(map(rawConfig => new HttpConfig(rawConfig, coreContext.env)));
|
||||
}
|
||||
|
||||
public async setup(setupDeps: LegacyServiceSetupDeps) {
|
||||
this.setupDeps = setupDeps;
|
||||
}
|
||||
public async start(startDeps: LegacyServiceStartDeps) {
|
||||
const { setupDeps } = this;
|
||||
if (!setupDeps) {
|
||||
throw new Error('Legacy service is not setup yet.');
|
||||
}
|
||||
this.log.debug('starting legacy service');
|
||||
|
||||
const update$ = this.coreContext.configService.getConfig$().pipe(
|
||||
this.update$ = this.coreContext.configService.getConfig$().pipe(
|
||||
tap(config => {
|
||||
if (this.kbnServer !== undefined) {
|
||||
this.kbnServer.applyLoggingConfiguration(config.toRaw());
|
||||
|
@ -107,21 +121,66 @@ export class LegacyService implements CoreService {
|
|||
publishReplay(1)
|
||||
) as ConnectableObservable<Config>;
|
||||
|
||||
this.configSubscription = update$.connect();
|
||||
this.configSubscription = this.update$.connect();
|
||||
|
||||
// Receive initial config and create kbnServer/ClusterManager.
|
||||
this.kbnServer = await update$
|
||||
this.settings = await this.update$
|
||||
.pipe(
|
||||
first(),
|
||||
mergeMap(async config => {
|
||||
if (this.coreContext.env.isDevClusterMaster) {
|
||||
await this.createClusterManager(config);
|
||||
return;
|
||||
}
|
||||
return await this.createKbnServer(config, setupDeps, startDeps);
|
||||
})
|
||||
map(config => getLegacyRawConfig(config))
|
||||
)
|
||||
.toPromise();
|
||||
|
||||
const {
|
||||
pluginSpecs,
|
||||
pluginExtendedConfig,
|
||||
disabledPluginSpecs,
|
||||
uiExports,
|
||||
} = await findLegacyPluginSpecs(this.settings, this.coreContext.logger);
|
||||
|
||||
this.legacyPlugins = {
|
||||
pluginSpecs,
|
||||
disabledPluginSpecs,
|
||||
uiExports,
|
||||
};
|
||||
|
||||
this.legacyRawConfig = pluginExtendedConfig;
|
||||
|
||||
// check for unknown uiExport types
|
||||
if (uiExports.unknown && uiExports.unknown.length > 0) {
|
||||
throw new Error(
|
||||
`Unknown uiExport types: ${uiExports.unknown
|
||||
.map(({ pluginSpec, type }) => `${type} from ${pluginSpec.getId()}`)
|
||||
.join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
pluginSpecs,
|
||||
uiExports,
|
||||
pluginExtendedConfig,
|
||||
};
|
||||
}
|
||||
|
||||
public async start(startDeps: LegacyServiceStartDeps) {
|
||||
const { setupDeps } = this;
|
||||
if (!setupDeps || !this.legacyRawConfig || !this.legacyPlugins || !this.settings) {
|
||||
throw new Error('Legacy service is not setup yet.');
|
||||
}
|
||||
this.log.debug('starting legacy service');
|
||||
|
||||
// Receive initial config and create kbnServer/ClusterManager.
|
||||
|
||||
if (this.coreContext.env.isDevClusterMaster) {
|
||||
await this.createClusterManager(this.legacyRawConfig);
|
||||
} else {
|
||||
this.kbnServer = await this.createKbnServer(
|
||||
this.settings,
|
||||
this.legacyRawConfig,
|
||||
setupDeps,
|
||||
startDeps,
|
||||
this.legacyPlugins
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public async stop() {
|
||||
|
@ -151,24 +210,35 @@ export class LegacyService implements CoreService {
|
|||
|
||||
require('../../../cli/cluster/cluster_manager').create(
|
||||
this.coreContext.env.cliArgs,
|
||||
getLegacyRawConfig(config),
|
||||
config,
|
||||
await basePathProxy$.toPromise()
|
||||
);
|
||||
}
|
||||
|
||||
private async createKbnServer(
|
||||
settings: Record<string, any>,
|
||||
config: Config,
|
||||
setupDeps: LegacyServiceSetupDeps,
|
||||
startDeps: LegacyServiceStartDeps
|
||||
startDeps: LegacyServiceStartDeps,
|
||||
legacyPlugins: {
|
||||
pluginSpecs: LegacyPluginSpec[];
|
||||
disabledPluginSpecs: LegacyPluginSpec[];
|
||||
uiExports: SavedObjectsLegacyUiExports;
|
||||
}
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const KbnServer = require('../../../legacy/server/kbn_server');
|
||||
const kbnServer: LegacyKbnServer = new KbnServer(getLegacyRawConfig(config), {
|
||||
handledConfigPaths: await this.coreContext.configService.getUsedPaths(),
|
||||
setupDeps,
|
||||
startDeps,
|
||||
logger: this.coreContext.logger,
|
||||
});
|
||||
const kbnServer: LegacyKbnServer = new KbnServer(
|
||||
settings,
|
||||
config,
|
||||
{
|
||||
handledConfigPaths: await this.coreContext.configService.getUsedPaths(),
|
||||
setupDeps,
|
||||
startDeps,
|
||||
logger: this.coreContext.logger,
|
||||
},
|
||||
legacyPlugins
|
||||
);
|
||||
|
||||
// The kbnWorkerType check is necessary to prevent the repl
|
||||
// from being started multiple times in different processes.
|
||||
|
|
136
src/core/server/legacy/plugins/find_legacy_plugin_specs.ts
Normal file
136
src/core/server/legacy/plugins/find_legacy_plugin_specs.ts
Normal file
|
@ -0,0 +1,136 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Observable, merge, forkJoin } from 'rxjs';
|
||||
import { toArray, tap, distinct, map } from 'rxjs/operators';
|
||||
import {
|
||||
findPluginSpecs,
|
||||
defaultConfig,
|
||||
// @ts-ignore
|
||||
} from '../../../../legacy/plugin_discovery/find_plugin_specs.js';
|
||||
import { LoggerFactory } from '../../logging';
|
||||
import { collectUiExports as collectLegacyUiExports } from '../../../../legacy/ui/ui_exports/collect_ui_exports';
|
||||
import { Config } from '../../config';
|
||||
|
||||
export interface LegacyPluginPack {
|
||||
getPath(): string;
|
||||
}
|
||||
|
||||
export interface LegacyPluginSpec {
|
||||
getId: () => unknown;
|
||||
getExpectedKibanaVersion: () => string;
|
||||
getConfigPrefix: () => string;
|
||||
}
|
||||
|
||||
export async function findLegacyPluginSpecs(settings: unknown, loggerFactory: LoggerFactory) {
|
||||
const configToMutate: Config = defaultConfig(settings);
|
||||
const {
|
||||
pack$,
|
||||
invalidDirectoryError$,
|
||||
invalidPackError$,
|
||||
otherError$,
|
||||
deprecation$,
|
||||
invalidVersionSpec$,
|
||||
spec$,
|
||||
disabledSpec$,
|
||||
}: {
|
||||
pack$: Observable<LegacyPluginPack>;
|
||||
invalidDirectoryError$: Observable<{ path: string }>;
|
||||
invalidPackError$: Observable<{ path: string }>;
|
||||
otherError$: Observable<unknown>;
|
||||
deprecation$: Observable<unknown>;
|
||||
invalidVersionSpec$: Observable<LegacyPluginSpec>;
|
||||
spec$: Observable<LegacyPluginSpec>;
|
||||
disabledSpec$: Observable<LegacyPluginSpec>;
|
||||
} = findPluginSpecs(settings, configToMutate) as any;
|
||||
|
||||
const logger = loggerFactory.get('legacy-plugins');
|
||||
|
||||
const log$ = merge(
|
||||
pack$.pipe(
|
||||
tap(definition => {
|
||||
const path = definition.getPath();
|
||||
logger.debug(`Found plugin at ${path}`, { path });
|
||||
})
|
||||
),
|
||||
|
||||
invalidDirectoryError$.pipe(
|
||||
tap(error => {
|
||||
logger.warn(`Unable to scan directory for plugins "${error.path}"`, {
|
||||
err: error,
|
||||
dir: error.path,
|
||||
});
|
||||
})
|
||||
),
|
||||
|
||||
invalidPackError$.pipe(
|
||||
tap(error => {
|
||||
logger.warn(`Skipping non-plugin directory at ${error.path}`, {
|
||||
path: error.path,
|
||||
});
|
||||
})
|
||||
),
|
||||
|
||||
otherError$.pipe(
|
||||
tap(error => {
|
||||
// rethrow unhandled errors, which will fail the server
|
||||
throw error;
|
||||
})
|
||||
),
|
||||
|
||||
invalidVersionSpec$.pipe(
|
||||
map(spec => {
|
||||
const name = spec.getId();
|
||||
const pluginVersion = spec.getExpectedKibanaVersion();
|
||||
// @ts-ignore
|
||||
const kibanaVersion = settings.pkg.version;
|
||||
return `Plugin "${name}" was disabled because it expected Kibana version "${pluginVersion}", and found "${kibanaVersion}".`;
|
||||
}),
|
||||
distinct(),
|
||||
tap(message => {
|
||||
logger.warn(message);
|
||||
})
|
||||
),
|
||||
|
||||
deprecation$.pipe(
|
||||
tap(({ spec, message }) => {
|
||||
const deprecationLogger = loggerFactory.get(
|
||||
'plugins',
|
||||
spec.getConfigPrefix(),
|
||||
'config',
|
||||
'deprecation'
|
||||
);
|
||||
deprecationLogger.warn(message);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
const [disabledPluginSpecs, pluginSpecs] = await forkJoin(
|
||||
disabledSpec$.pipe(toArray()),
|
||||
spec$.pipe(toArray()),
|
||||
log$.pipe(toArray())
|
||||
).toPromise();
|
||||
|
||||
return {
|
||||
disabledPluginSpecs,
|
||||
pluginSpecs,
|
||||
pluginExtendedConfig: configToMutate,
|
||||
uiExports: collectLegacyUiExports(pluginSpecs),
|
||||
};
|
||||
}
|
19
src/core/server/legacy/plugins/index.ts
Normal file
19
src/core/server/legacy/plugins/index.ts
Normal file
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
export { findLegacyPluginSpecs } from './find_legacy_plugin_specs';
|
|
@ -24,4 +24,4 @@ export { LogLevel } from './log_level';
|
|||
/** @internal */
|
||||
export { config, LoggingConfigType } from './logging_config';
|
||||
/** @internal */
|
||||
export { LoggingService } from './logging_service';
|
||||
export { LoggingService, ILoggingService } from './logging_service';
|
||||
|
|
|
@ -19,10 +19,9 @@
|
|||
|
||||
// Test helpers to simplify mocking logs and collecting all their outputs
|
||||
import { Logger } from './logger';
|
||||
import { LoggingService } from './logging_service';
|
||||
import { ILoggingService } from './logging_service';
|
||||
import { LoggerFactory } from './logger_factory';
|
||||
|
||||
type LoggingServiceContract = PublicMethodsOf<LoggingService>;
|
||||
type MockedLogger = jest.Mocked<Logger>;
|
||||
|
||||
const createLoggingServiceMock = () => {
|
||||
|
@ -36,7 +35,7 @@ const createLoggingServiceMock = () => {
|
|||
warn: jest.fn(),
|
||||
};
|
||||
|
||||
const mocked: jest.Mocked<LoggingServiceContract> = {
|
||||
const mocked: jest.Mocked<ILoggingService> = {
|
||||
get: jest.fn(),
|
||||
asLoggerFactory: jest.fn(),
|
||||
upgrade: jest.fn(),
|
||||
|
@ -65,7 +64,7 @@ const collectLoggingServiceMock = (loggerFactory: LoggerFactory) => {
|
|||
};
|
||||
|
||||
const clearLoggingServiceMock = (loggerFactory: LoggerFactory) => {
|
||||
const mockedLoggerFactory = (loggerFactory as unknown) as jest.Mocked<LoggingServiceContract>;
|
||||
const mockedLoggerFactory = (loggerFactory as unknown) as jest.Mocked<ILoggingService>;
|
||||
mockedLoggerFactory.get.mockClear();
|
||||
mockedLoggerFactory.asLoggerFactory.mockClear();
|
||||
mockedLoggerFactory.upgrade.mockClear();
|
||||
|
|
|
@ -24,6 +24,7 @@ import { LoggerAdapter } from './logger_adapter';
|
|||
import { LoggerFactory } from './logger_factory';
|
||||
import { LoggingConfigType, LoggerConfigType, LoggingConfig } from './logging_config';
|
||||
|
||||
export type ILoggingService = PublicMethodsOf<LoggingService>;
|
||||
/**
|
||||
* Service that is responsible for maintaining loggers and logger appenders.
|
||||
* @internal
|
||||
|
|
|
@ -30,3 +30,7 @@ export { getSortedObjectsForExport, SavedObjectsExportOptions } from './export';
|
|||
export { SavedObjectsSerializer, RawDoc as SavedObjectsRawDoc } from './serialization';
|
||||
|
||||
export { SavedObjectsMigrationLogger } from './migrations/core/migration_logger';
|
||||
|
||||
export { SavedObjectsService, SavedObjectsServiceStart } from './saved_objects_service';
|
||||
|
||||
export { config } from './saved_objects_config';
|
||||
|
|
|
@ -17,4 +17,10 @@
|
|||
* under the License.
|
||||
*/
|
||||
export { getTypes, getProperty, getRootProperties, getRootPropertiesObjects } from './lib';
|
||||
export { FieldMapping, MappingMeta, MappingProperties, IndexMapping } from './types';
|
||||
export {
|
||||
FieldMapping,
|
||||
MappingMeta,
|
||||
MappingProperties,
|
||||
IndexMapping,
|
||||
SavedObjectsMapping,
|
||||
} from './types';
|
||||
|
|
|
@ -42,6 +42,11 @@ export interface MappingProperties {
|
|||
[field: string]: FieldMapping;
|
||||
}
|
||||
|
||||
export interface SavedObjectsMapping {
|
||||
pluginId: string;
|
||||
properties: MappingProperties;
|
||||
}
|
||||
|
||||
export interface MappingMeta {
|
||||
// A dictionary of key -> md5 hash (e.g. 'dashboard': '24234qdfa3aefa3wa')
|
||||
// with each key being a root-level mapping property, and each value being
|
||||
|
|
|
@ -98,9 +98,19 @@ If a plugin is disbled, all of its documents are retained in the Kibana index. T
|
|||
|
||||
Kibana index migrations expose a few config settings which might be tweaked:
|
||||
|
||||
* `migrations.scrollDuration` - The [scroll](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html#scroll-search-context) value used to read batches of documents from the source index. Defaults to `15m`.
|
||||
* `migrations.batchSize` - The number of documents to read / transform / write at a time during index migrations
|
||||
* `migrations.pollInterval` - How often, in milliseconds, secondary Kibana instances will poll to see if the primary Kibana instance has finished migrating the index.
|
||||
* `migrations.scrollDuration` - The
|
||||
[scroll](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html#scroll-search-context)
|
||||
value used to read batches of documents from the source index. Defaults to
|
||||
`15m`.
|
||||
* `migrations.batchSize` - The number of documents to read / transform / write
|
||||
at a time during index migrations
|
||||
* `migrations.pollInterval` - How often, in milliseconds, secondary Kibana
|
||||
instances will poll to see if the primary Kibana instance has finished
|
||||
migrating the index.
|
||||
* `migrations.skip` - Skip running migrations on startup (defaults to false).
|
||||
This should only be used for running integration tests without a running
|
||||
elasticsearch cluster. Note: even though migrations won't run on startup,
|
||||
individual docs will still be migrated when read from ES.
|
||||
|
||||
## Example
|
||||
|
||||
|
|
|
@ -20,6 +20,10 @@
|
|||
import _ from 'lodash';
|
||||
import { RawSavedObjectDoc } from '../../serialization';
|
||||
import { DocumentMigrator } from './document_migrator';
|
||||
import { loggingServiceMock } from '../../../logging/logging_service.mock';
|
||||
|
||||
const mockLoggerFactory = loggingServiceMock.create();
|
||||
const mockLogger = mockLoggerFactory.get('mock logger');
|
||||
|
||||
describe('DocumentMigrator', () => {
|
||||
function testOpts() {
|
||||
|
@ -27,7 +31,7 @@ describe('DocumentMigrator', () => {
|
|||
kibanaVersion: '25.2.3',
|
||||
migrations: {},
|
||||
validateDoc: _.noop,
|
||||
log: jest.fn(),
|
||||
log: mockLogger,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -474,7 +478,7 @@ describe('DocumentMigrator', () => {
|
|||
});
|
||||
|
||||
it('logs the document and transform that failed', () => {
|
||||
const log = jest.fn();
|
||||
const log = mockLogger;
|
||||
const migrator = new DocumentMigrator({
|
||||
...testOpts(),
|
||||
migrations: {
|
||||
|
@ -497,28 +501,26 @@ describe('DocumentMigrator', () => {
|
|||
expect('Did not throw').toEqual('But it should have!');
|
||||
} catch (error) {
|
||||
expect(error.message).toMatch(/Dang diggity!/);
|
||||
const warning = log.mock.calls.filter(([[level]]) => level === 'warning')[0][1];
|
||||
const warning = loggingServiceMock.collect(mockLoggerFactory).warn[0][0];
|
||||
expect(warning).toContain(JSON.stringify(failedDoc));
|
||||
expect(warning).toContain('dog:1.2.3');
|
||||
}
|
||||
});
|
||||
|
||||
it('logs message in transform function', () => {
|
||||
const logStash: string[] = [];
|
||||
const logTestMsg = '...said the joker to the thief';
|
||||
const migrator = new DocumentMigrator({
|
||||
...testOpts(),
|
||||
migrations: {
|
||||
dog: {
|
||||
'1.2.3': (doc, log) => {
|
||||
log!.info(logTestMsg);
|
||||
log.info(logTestMsg);
|
||||
log.warning(logTestMsg);
|
||||
return doc;
|
||||
},
|
||||
},
|
||||
},
|
||||
log: (path: string[], message: string) => {
|
||||
logStash.push(message);
|
||||
},
|
||||
log: mockLogger,
|
||||
});
|
||||
const doc = {
|
||||
id: 'joker',
|
||||
|
@ -527,7 +529,8 @@ describe('DocumentMigrator', () => {
|
|||
migrationVersion: {},
|
||||
};
|
||||
migrator.migrate(doc);
|
||||
expect(logStash[0]).toEqual(logTestMsg);
|
||||
expect(loggingServiceMock.collect(mockLoggerFactory).info[0][0]).toEqual(logTestMsg);
|
||||
expect(loggingServiceMock.collect(mockLoggerFactory).warn[1][0]).toEqual(logTestMsg);
|
||||
});
|
||||
|
||||
test('extracts the latest migration version info', () => {
|
||||
|
|
|
@ -64,26 +64,26 @@ import Boom from 'boom';
|
|||
import _ from 'lodash';
|
||||
import cloneDeep from 'lodash.clonedeep';
|
||||
import Semver from 'semver';
|
||||
import { Logger } from '../../../logging';
|
||||
import { RawSavedObjectDoc } from '../../serialization';
|
||||
import { SavedObjectsMigrationVersion } from '../../types';
|
||||
import { LogFn, SavedObjectsMigrationLogger, MigrationLogger } from './migration_logger';
|
||||
import { MigrationLogger, SavedObjectsMigrationLogger } from './migration_logger';
|
||||
|
||||
export type TransformFn = (
|
||||
doc: RawSavedObjectDoc,
|
||||
log?: SavedObjectsMigrationLogger
|
||||
) => RawSavedObjectDoc;
|
||||
export type TransformFn = (doc: RawSavedObjectDoc) => RawSavedObjectDoc;
|
||||
|
||||
type MigrationFn = (doc: RawSavedObjectDoc, log: SavedObjectsMigrationLogger) => RawSavedObjectDoc;
|
||||
|
||||
type ValidateDoc = (doc: RawSavedObjectDoc) => void;
|
||||
|
||||
interface MigrationDefinition {
|
||||
[type: string]: { [version: string]: TransformFn };
|
||||
export interface MigrationDefinition {
|
||||
[type: string]: { [version: string]: MigrationFn };
|
||||
}
|
||||
|
||||
interface Opts {
|
||||
kibanaVersion: string;
|
||||
migrations: MigrationDefinition;
|
||||
validateDoc: ValidateDoc;
|
||||
log: LogFn;
|
||||
log: Logger;
|
||||
}
|
||||
|
||||
interface ActiveMigrations {
|
||||
|
@ -125,7 +125,7 @@ export class DocumentMigrator implements VersionedTransformer {
|
|||
constructor(opts: Opts) {
|
||||
validateMigrationDefinition(opts.migrations);
|
||||
|
||||
this.migrations = buildActiveMigrations(opts.migrations, new MigrationLogger(opts.log));
|
||||
this.migrations = buildActiveMigrations(opts.migrations, opts.log);
|
||||
this.transformDoc = buildDocumentTransform({
|
||||
kibanaVersion: opts.kibanaVersion,
|
||||
migrations: this.migrations,
|
||||
|
@ -207,10 +207,7 @@ function validateMigrationDefinition(migrations: MigrationDefinition) {
|
|||
* From: { type: { version: fn } }
|
||||
* To: { type: { latestVersion: string, transforms: [{ version: string, transform: fn }] } }
|
||||
*/
|
||||
function buildActiveMigrations(
|
||||
migrations: MigrationDefinition,
|
||||
log: SavedObjectsMigrationLogger
|
||||
): ActiveMigrations {
|
||||
function buildActiveMigrations(migrations: MigrationDefinition, log: Logger): ActiveMigrations {
|
||||
return _.mapValues(migrations, (versions, prop) => {
|
||||
const transforms = Object.entries(versions)
|
||||
.map(([version, transform]) => ({
|
||||
|
@ -299,15 +296,10 @@ function markAsUpToDate(doc: RawSavedObjectDoc, migrations: ActiveMigrations) {
|
|||
* If a specific transform function fails, this tacks on a bit of information
|
||||
* about the document and transform that caused the failure.
|
||||
*/
|
||||
function wrapWithTry(
|
||||
version: string,
|
||||
prop: string,
|
||||
transform: TransformFn,
|
||||
log: SavedObjectsMigrationLogger
|
||||
) {
|
||||
function wrapWithTry(version: string, prop: string, transform: MigrationFn, log: Logger) {
|
||||
return function tryTransformDoc(doc: RawSavedObjectDoc) {
|
||||
try {
|
||||
const result = transform(doc, log);
|
||||
const result = transform(doc, new MigrationLogger(log));
|
||||
|
||||
// A basic sanity check to help migration authors detect basic errors
|
||||
// (e.g. forgetting to return the transformed doc)
|
||||
|
@ -319,7 +311,7 @@ function wrapWithTry(
|
|||
} catch (error) {
|
||||
const failedTransform = `${prop}:${version}`;
|
||||
const failedDoc = JSON.stringify(doc);
|
||||
log.warning(
|
||||
log.warn(
|
||||
`Failed to transform document ${doc}. Transform: ${failedTransform}\nDoc: ${failedDoc}`
|
||||
);
|
||||
throw error;
|
||||
|
|
|
@ -21,6 +21,7 @@ import _ from 'lodash';
|
|||
import { SavedObjectsSchema } from '../../schema';
|
||||
import { RawSavedObjectDoc, SavedObjectsSerializer } from '../../serialization';
|
||||
import { IndexMigrator } from './index_migrator';
|
||||
import { loggingServiceMock } from '../../../logging/logging_service.mock';
|
||||
|
||||
describe('IndexMigrator', () => {
|
||||
let testOpts: any;
|
||||
|
@ -30,7 +31,7 @@ describe('IndexMigrator', () => {
|
|||
batchSize: 10,
|
||||
callCluster: jest.fn(),
|
||||
index: '.kibana',
|
||||
log: jest.fn(),
|
||||
log: loggingServiceMock.create().get(),
|
||||
mappingProperties: {},
|
||||
pollInterval: 1,
|
||||
scrollDuration: '1m',
|
||||
|
|
|
@ -24,13 +24,14 @@
|
|||
* serves as a central blueprint for what migrations will end up doing.
|
||||
*/
|
||||
|
||||
import { Logger } from 'src/core/server/logging';
|
||||
import { SavedObjectsSerializer } from '../../serialization';
|
||||
import { MappingProperties } from '../../mappings';
|
||||
import { buildActiveMappings } from './build_active_mappings';
|
||||
import { CallCluster } from './call_cluster';
|
||||
import { VersionedTransformer } from './document_migrator';
|
||||
import { fetchInfo, FullIndexInfo } from './elastic_index';
|
||||
import { LogFn, SavedObjectsMigrationLogger, MigrationLogger } from './migration_logger';
|
||||
import { SavedObjectsMigrationLogger, MigrationLogger } from './migration_logger';
|
||||
|
||||
export interface MigrationOpts {
|
||||
batchSize: number;
|
||||
|
@ -38,7 +39,7 @@ export interface MigrationOpts {
|
|||
scrollDuration: string;
|
||||
callCluster: CallCluster;
|
||||
index: string;
|
||||
log: LogFn;
|
||||
log: Logger;
|
||||
mappingProperties: MappingProperties;
|
||||
documentMigrator: VersionedTransformer;
|
||||
serializer: SavedObjectsSerializer;
|
||||
|
@ -71,8 +72,7 @@ export interface Context {
|
|||
* and various info needed to migrate the source index.
|
||||
*/
|
||||
export async function migrationContext(opts: MigrationOpts): Promise<Context> {
|
||||
const { callCluster } = opts;
|
||||
const log = new MigrationLogger(opts.log);
|
||||
const { log, callCluster } = opts;
|
||||
const alias = opts.index;
|
||||
const source = createSourceContext(await fetchInfo(callCluster, alias), alias);
|
||||
const dest = createDestContext(source, alias, opts.mappingProperties);
|
||||
|
@ -82,7 +82,7 @@ export async function migrationContext(opts: MigrationOpts): Promise<Context> {
|
|||
alias,
|
||||
source,
|
||||
dest,
|
||||
log,
|
||||
log: new MigrationLogger(log),
|
||||
batchSize: opts.batchSize,
|
||||
documentMigrator: opts.documentMigrator,
|
||||
pollInterval: opts.pollInterval,
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { Logger } from 'src/core/server/logging';
|
||||
|
||||
/*
|
||||
* This file provides a helper class for ensuring that all logging
|
||||
* in the migration system is done in a fairly uniform way.
|
||||
|
@ -32,13 +34,13 @@ export interface SavedObjectsMigrationLogger {
|
|||
}
|
||||
|
||||
export class MigrationLogger implements SavedObjectsMigrationLogger {
|
||||
private log: LogFn;
|
||||
private logger: Logger;
|
||||
|
||||
constructor(log: LogFn) {
|
||||
this.log = log;
|
||||
constructor(log: Logger) {
|
||||
this.logger = log;
|
||||
}
|
||||
|
||||
public info = (msg: string) => this.log(['info', 'migrations'], msg);
|
||||
public debug = (msg: string) => this.log(['debug', 'migrations'], msg);
|
||||
public warning = (msg: string) => this.log(['warning', 'migrations'], msg);
|
||||
public info = (msg: string) => this.logger.info(msg);
|
||||
public debug = (msg: string) => this.logger.debug(msg);
|
||||
public warning = (msg: string) => this.logger.warn(msg);
|
||||
}
|
||||
|
|
|
@ -17,4 +17,4 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export { KibanaMigrator } from './kibana';
|
||||
export { KibanaMigrator, IKibanaMigrator } from './kibana';
|
||||
|
|
|
@ -17,4 +17,4 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export { KibanaMigrator } from './kibana_migrator';
|
||||
export { KibanaMigrator, IKibanaMigrator } from './kibana_migrator';
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { KibanaMigrator, mergeProperties } from './kibana_migrator';
|
||||
import { buildActiveMappings } from '../core';
|
||||
import { SavedObjectsMapping } from '../../mappings';
|
||||
|
||||
const createMigrator = (
|
||||
{
|
||||
savedObjectMappings,
|
||||
}: {
|
||||
savedObjectMappings: SavedObjectsMapping[];
|
||||
} = { savedObjectMappings: [] }
|
||||
) => {
|
||||
const mockMigrator: jest.Mocked<PublicMethodsOf<KibanaMigrator>> = {
|
||||
runMigrations: jest.fn(),
|
||||
getActiveMappings: jest.fn(),
|
||||
migrateDocument: jest.fn(),
|
||||
};
|
||||
|
||||
mockMigrator.getActiveMappings.mockReturnValue(
|
||||
buildActiveMappings({ properties: mergeProperties(savedObjectMappings) })
|
||||
);
|
||||
mockMigrator.migrateDocument.mockImplementation(doc => doc);
|
||||
return mockMigrator;
|
||||
};
|
||||
|
||||
export const mockKibanaMigrator = {
|
||||
create: createMigrator,
|
||||
};
|
|
@ -18,13 +18,14 @@
|
|||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { KbnServer, KibanaMigrator } from './kibana_migrator';
|
||||
import { KibanaMigratorOptions, KibanaMigrator } from './kibana_migrator';
|
||||
import { loggingServiceMock } from '../../../logging/logging_service.mock';
|
||||
|
||||
describe('KibanaMigrator', () => {
|
||||
describe('getActiveMappings', () => {
|
||||
it('returns full index mappings w/ core properties', () => {
|
||||
const { kbnServer } = mockKbnServer();
|
||||
kbnServer.uiExports.savedObjectMappings = [
|
||||
const options = mockOptions();
|
||||
options.savedObjectMappings = [
|
||||
{
|
||||
pluginId: 'aaa',
|
||||
properties: { amap: { type: 'text' } },
|
||||
|
@ -34,13 +35,13 @@ describe('KibanaMigrator', () => {
|
|||
properties: { bmap: { type: 'text' } },
|
||||
},
|
||||
];
|
||||
const mappings = new KibanaMigrator({ kbnServer }).getActiveMappings();
|
||||
const mappings = new KibanaMigrator(options).getActiveMappings();
|
||||
expect(mappings).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('Fails if duplicate mappings are defined', () => {
|
||||
const { kbnServer } = mockKbnServer();
|
||||
kbnServer.uiExports.savedObjectMappings = [
|
||||
const options = mockOptions();
|
||||
options.savedObjectMappings = [
|
||||
{
|
||||
pluginId: 'aaa',
|
||||
properties: { amap: { type: 'text' } },
|
||||
|
@ -50,56 +51,27 @@ describe('KibanaMigrator', () => {
|
|||
properties: { amap: { type: 'long' } },
|
||||
},
|
||||
];
|
||||
expect(() => new KibanaMigrator({ kbnServer }).getActiveMappings()).toThrow(
|
||||
expect(() => new KibanaMigrator(options).getActiveMappings()).toThrow(
|
||||
/Plugin bbb is attempting to redefine mapping "amap"/
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('awaitMigration', () => {
|
||||
it('changes isMigrated to true if migrations were skipped', async () => {
|
||||
const { kbnServer } = mockKbnServer();
|
||||
kbnServer.server.plugins.elasticsearch = undefined;
|
||||
const result = await new KibanaMigrator({ kbnServer }).awaitMigration();
|
||||
describe('runMigrations', () => {
|
||||
it('resolves isMigrated if migrations were skipped', async () => {
|
||||
const skipMigrations = true;
|
||||
const result = await new KibanaMigrator(mockOptions()).runMigrations(skipMigrations);
|
||||
expect(result).toEqual([{ status: 'skipped' }, { status: 'skipped' }]);
|
||||
});
|
||||
|
||||
it('waits for kbnServer.ready and elasticsearch.ready before attempting migrations', async () => {
|
||||
const { kbnServer } = mockKbnServer();
|
||||
it('only runs migrations once if called multiple times', async () => {
|
||||
const options = mockOptions();
|
||||
const clusterStub = jest.fn<any, any>(() => ({ status: 404 }));
|
||||
const waitUntilReady = jest.fn(async () => undefined);
|
||||
|
||||
kbnServer.server.plugins.elasticsearch = {
|
||||
waitUntilReady,
|
||||
getCluster() {
|
||||
expect(kbnServer.ready as any).toHaveBeenCalledTimes(1);
|
||||
expect(waitUntilReady).toHaveBeenCalledTimes(1);
|
||||
|
||||
return {
|
||||
callWithInternalUser: clusterStub,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
const migrationResults = await new KibanaMigrator({ kbnServer }).awaitMigration();
|
||||
expect(migrationResults.length).toEqual(2);
|
||||
});
|
||||
|
||||
it('only handles and deletes index templates once', async () => {
|
||||
const { kbnServer } = mockKbnServer();
|
||||
const clusterStub = jest.fn<any, any>(() => ({ status: 404 }));
|
||||
const waitUntilReady = jest.fn(async () => undefined);
|
||||
|
||||
kbnServer.server.plugins.elasticsearch = {
|
||||
waitUntilReady,
|
||||
getCluster() {
|
||||
return {
|
||||
callWithInternalUser: clusterStub,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
await new KibanaMigrator({ kbnServer }).awaitMigration();
|
||||
options.callCluster = clusterStub;
|
||||
const migrator = new KibanaMigrator(options);
|
||||
await migrator.runMigrations();
|
||||
await migrator.runMigrations();
|
||||
|
||||
// callCluster with "cat.templates" is called by "deleteIndexTemplates" function
|
||||
// and should only be done once
|
||||
|
@ -111,75 +83,60 @@ describe('KibanaMigrator', () => {
|
|||
});
|
||||
});
|
||||
|
||||
function mockKbnServer({ configValues }: { configValues?: any } = {}) {
|
||||
function mockOptions({ configValues }: { configValues?: any } = {}): KibanaMigratorOptions {
|
||||
const callCluster = jest.fn();
|
||||
const kbnServer: KbnServer = {
|
||||
version: '8.2.3',
|
||||
ready: jest.fn(async () => undefined),
|
||||
uiExports: {
|
||||
savedObjectsManagement: {},
|
||||
savedObjectValidations: {},
|
||||
savedObjectMigrations: {},
|
||||
savedObjectMappings: [
|
||||
{
|
||||
pluginId: 'testtype',
|
||||
properties: {
|
||||
testtype: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
return {
|
||||
logger: loggingServiceMock.create().get(),
|
||||
kibanaVersion: '8.2.3',
|
||||
savedObjectValidations: {},
|
||||
savedObjectMigrations: {},
|
||||
savedObjectMappings: [
|
||||
{
|
||||
pluginId: 'testtype',
|
||||
properties: {
|
||||
testtype: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'testtype2',
|
||||
properties: {
|
||||
testtype2: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'testtype2',
|
||||
properties: {
|
||||
testtype2: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
savedObjectSchemas: {
|
||||
testtype2: {
|
||||
isNamespaceAgnostic: false,
|
||||
indexPattern: 'other-index',
|
||||
},
|
||||
},
|
||||
],
|
||||
savedObjectSchemas: {
|
||||
testtype2: {
|
||||
isNamespaceAgnostic: false,
|
||||
indexPattern: 'other-index',
|
||||
},
|
||||
},
|
||||
server: {
|
||||
config: () => ({
|
||||
get: ((name: string) => {
|
||||
if (configValues && configValues[name]) {
|
||||
return configValues[name];
|
||||
}
|
||||
switch (name) {
|
||||
case 'kibana.index':
|
||||
return '.my-index';
|
||||
case 'migrations.batchSize':
|
||||
return 20;
|
||||
case 'migrations.pollInterval':
|
||||
return 20000;
|
||||
case 'migrations.scrollDuration':
|
||||
return '10m';
|
||||
default:
|
||||
throw new Error(`Unexpected config ${name}`);
|
||||
}
|
||||
}) as any,
|
||||
}),
|
||||
log: _.noop as any,
|
||||
plugins: {
|
||||
elasticsearch: {
|
||||
getCluster: () => ({
|
||||
callWithInternalUser: callCluster,
|
||||
}),
|
||||
waitUntilReady: async () => undefined,
|
||||
},
|
||||
},
|
||||
kibanaConfig: {
|
||||
enabled: true,
|
||||
index: '.my-index',
|
||||
} as KibanaMigratorOptions['kibanaConfig'],
|
||||
savedObjectsConfig: {
|
||||
batchSize: 20,
|
||||
pollInterval: 20000,
|
||||
scrollDuration: '10m',
|
||||
skip: false,
|
||||
},
|
||||
config: {
|
||||
get: (name: string) => {
|
||||
if (configValues && configValues[name]) {
|
||||
return configValues[name];
|
||||
} else {
|
||||
throw new Error(`Unexpected config ${name}`);
|
||||
}
|
||||
},
|
||||
} as KibanaMigratorOptions['config'],
|
||||
callCluster,
|
||||
};
|
||||
|
||||
return { kbnServer, callCluster };
|
||||
}
|
||||
|
|
|
@ -22,81 +22,112 @@
|
|||
* (the shape of the mappings and documents in the index).
|
||||
*/
|
||||
|
||||
import { once } from 'lodash';
|
||||
import { MappingProperties } from '../../mappings';
|
||||
import { Logger } from 'src/core/server/logging';
|
||||
import { KibanaConfigType } from 'src/core/server/kibana_config';
|
||||
import { MappingProperties, SavedObjectsMapping, IndexMapping } from '../../mappings';
|
||||
import { SavedObjectsSchema, SavedObjectsSchemaDefinition } from '../../schema';
|
||||
import { SavedObjectsManagementDefinition } from '../../management';
|
||||
import { RawSavedObjectDoc, SavedObjectsSerializer } from '../../serialization';
|
||||
import { docValidator } from '../../validation';
|
||||
import { buildActiveMappings, CallCluster, IndexMigrator, LogFn } from '../core';
|
||||
import { DocumentMigrator, VersionedTransformer } from '../core/document_migrator';
|
||||
import { docValidator, PropertyValidators } from '../../validation';
|
||||
import { buildActiveMappings, CallCluster, IndexMigrator } from '../core';
|
||||
import {
|
||||
DocumentMigrator,
|
||||
VersionedTransformer,
|
||||
MigrationDefinition,
|
||||
} from '../core/document_migrator';
|
||||
import { createIndexMap } from '../core/build_index_map';
|
||||
import { SavedObjectsConfigType } from '../../saved_objects_config';
|
||||
import { Config } from '../../../config';
|
||||
export interface KbnServer {
|
||||
server: Server;
|
||||
version: string;
|
||||
ready: () => Promise<any>;
|
||||
uiExports: {
|
||||
savedObjectMappings: any[];
|
||||
savedObjectMigrations: any;
|
||||
savedObjectValidations: any;
|
||||
savedObjectSchemas: SavedObjectsSchemaDefinition;
|
||||
savedObjectsManagement: SavedObjectsManagementDefinition;
|
||||
};
|
||||
|
||||
export interface KibanaMigratorOptions {
|
||||
callCluster: CallCluster;
|
||||
config: Config;
|
||||
savedObjectsConfig: SavedObjectsConfigType;
|
||||
kibanaConfig: KibanaConfigType;
|
||||
kibanaVersion: string;
|
||||
logger: Logger;
|
||||
savedObjectMappings: SavedObjectsMapping[];
|
||||
savedObjectMigrations: MigrationDefinition;
|
||||
savedObjectSchemas: SavedObjectsSchemaDefinition;
|
||||
savedObjectValidations: PropertyValidators;
|
||||
}
|
||||
|
||||
interface Server {
|
||||
log: LogFn;
|
||||
config: () => {
|
||||
get: {
|
||||
(path: 'kibana.index' | 'migrations.scrollDuration'): string;
|
||||
(path: 'migrations.batchSize' | 'migrations.pollInterval'): number;
|
||||
};
|
||||
};
|
||||
plugins: { elasticsearch: ElasticsearchPlugin | undefined };
|
||||
}
|
||||
|
||||
interface ElasticsearchPlugin {
|
||||
getCluster: (name: 'admin') => { callWithInternalUser: CallCluster };
|
||||
waitUntilReady: () => Promise<any>;
|
||||
}
|
||||
export type IKibanaMigrator = Pick<KibanaMigrator, keyof KibanaMigrator>;
|
||||
|
||||
/**
|
||||
* Manages the shape of mappings and documents in the Kibana index.
|
||||
*
|
||||
* @export
|
||||
* @class KibanaMigrator
|
||||
*/
|
||||
export class KibanaMigrator {
|
||||
private readonly callCluster: CallCluster;
|
||||
private readonly config: Config;
|
||||
private readonly savedObjectsConfig: SavedObjectsConfigType;
|
||||
private readonly documentMigrator: VersionedTransformer;
|
||||
private readonly kibanaConfig: KibanaConfigType;
|
||||
private readonly log: Logger;
|
||||
private readonly mappingProperties: MappingProperties;
|
||||
private readonly schema: SavedObjectsSchema;
|
||||
private readonly serializer: SavedObjectsSerializer;
|
||||
private migrationResult?: Promise<Array<{ status: string }>>;
|
||||
|
||||
/**
|
||||
* Creates an instance of KibanaMigrator.
|
||||
*/
|
||||
constructor({
|
||||
callCluster,
|
||||
config,
|
||||
kibanaConfig,
|
||||
savedObjectsConfig,
|
||||
kibanaVersion,
|
||||
logger,
|
||||
savedObjectMappings,
|
||||
savedObjectMigrations,
|
||||
savedObjectSchemas,
|
||||
savedObjectValidations,
|
||||
}: KibanaMigratorOptions) {
|
||||
this.config = config;
|
||||
this.callCluster = callCluster;
|
||||
this.kibanaConfig = kibanaConfig;
|
||||
this.savedObjectsConfig = savedObjectsConfig;
|
||||
this.schema = new SavedObjectsSchema(savedObjectSchemas);
|
||||
this.serializer = new SavedObjectsSerializer(this.schema);
|
||||
this.mappingProperties = mergeProperties(savedObjectMappings || []);
|
||||
this.log = logger;
|
||||
this.documentMigrator = new DocumentMigrator({
|
||||
kibanaVersion,
|
||||
migrations: savedObjectMigrations || {},
|
||||
validateDoc: docValidator(savedObjectValidations || {}),
|
||||
log: this.log,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates the mappings and documents in the Kibana index. This will run only
|
||||
* once and subsequent calls will return the result of the original call.
|
||||
*
|
||||
* @returns
|
||||
* @memberof KibanaMigrator
|
||||
* @returns - A promise which resolves once all migrations have been applied.
|
||||
* The promise resolves with an array of migration statuses, one for each
|
||||
* elasticsearch index which was migrated.
|
||||
*/
|
||||
public awaitMigration = once(async () => {
|
||||
const { server } = this.kbnServer;
|
||||
|
||||
// Wait until the plugins have been found an initialized...
|
||||
await this.kbnServer.ready();
|
||||
|
||||
// We can't do anything if the elasticsearch plugin has been disabled.
|
||||
if (!server.plugins.elasticsearch) {
|
||||
server.log(
|
||||
['warning', 'migration'],
|
||||
'The elasticsearch plugin is disabled. Skipping migrations.'
|
||||
);
|
||||
return Object.keys(this.mappingProperties).map(() => ({ status: 'skipped' }));
|
||||
public runMigrations(skipMigrations: boolean = false): Promise<Array<{ status: string }>> {
|
||||
if (this.migrationResult === undefined) {
|
||||
this.migrationResult = this.runMigrationsInternal(skipMigrations);
|
||||
}
|
||||
|
||||
// Wait until elasticsearch is green...
|
||||
await server.plugins.elasticsearch.waitUntilReady();
|
||||
return this.migrationResult;
|
||||
}
|
||||
|
||||
const config = server.config() as Config;
|
||||
const kibanaIndexName = config.get('kibana.index');
|
||||
private runMigrationsInternal(skipMigrations: boolean) {
|
||||
if (skipMigrations) {
|
||||
this.log.warn(
|
||||
'Skipping Saved Object migrations on startup. Note: Individual documents will still be migrated when read or written.'
|
||||
);
|
||||
return Promise.resolve(
|
||||
Object.keys(this.mappingProperties).map(() => ({ status: 'skipped' }))
|
||||
);
|
||||
}
|
||||
|
||||
const kibanaIndexName = this.kibanaConfig.index;
|
||||
const indexMap = createIndexMap({
|
||||
config,
|
||||
config: this.config,
|
||||
kibanaIndexName,
|
||||
indexMap: this.mappingProperties,
|
||||
schema: this.schema,
|
||||
|
@ -104,14 +135,14 @@ export class KibanaMigrator {
|
|||
|
||||
const migrators = Object.keys(indexMap).map(index => {
|
||||
return new IndexMigrator({
|
||||
batchSize: config.get('migrations.batchSize'),
|
||||
callCluster: server.plugins.elasticsearch!.getCluster('admin').callWithInternalUser,
|
||||
batchSize: this.savedObjectsConfig.batchSize,
|
||||
callCluster: this.callCluster,
|
||||
documentMigrator: this.documentMigrator,
|
||||
index,
|
||||
log: this.log,
|
||||
mappingProperties: indexMap[index].typeMappings,
|
||||
pollInterval: config.get('migrations.pollInterval'),
|
||||
scrollDuration: config.get('migrations.scrollDuration'),
|
||||
pollInterval: this.savedObjectsConfig.pollInterval,
|
||||
scrollDuration: this.savedObjectsConfig.scrollDuration,
|
||||
serializer: this.serializer,
|
||||
// Only necessary for the migrator of the kibana index.
|
||||
obsoleteIndexTemplatePattern:
|
||||
|
@ -120,61 +151,22 @@ export class KibanaMigrator {
|
|||
});
|
||||
});
|
||||
|
||||
if (migrators.length === 0) {
|
||||
throw new Error(`Migrations failed to run, no mappings found or Kibana is not "ready".`);
|
||||
}
|
||||
|
||||
return Promise.all(migrators.map(migrator => migrator.migrate()));
|
||||
});
|
||||
|
||||
private kbnServer: KbnServer;
|
||||
private documentMigrator: VersionedTransformer;
|
||||
private mappingProperties: MappingProperties;
|
||||
private log: LogFn;
|
||||
private serializer: SavedObjectsSerializer;
|
||||
private readonly schema: SavedObjectsSchema;
|
||||
|
||||
/**
|
||||
* Creates an instance of KibanaMigrator.
|
||||
*
|
||||
* @param opts
|
||||
* @prop {KbnServer} kbnServer - An instance of the Kibana server object.
|
||||
* @memberof KibanaMigrator
|
||||
*/
|
||||
constructor({ kbnServer }: { kbnServer: KbnServer }) {
|
||||
this.kbnServer = kbnServer;
|
||||
|
||||
this.schema = new SavedObjectsSchema(kbnServer.uiExports.savedObjectSchemas);
|
||||
this.serializer = new SavedObjectsSerializer(this.schema);
|
||||
|
||||
this.mappingProperties = mergeProperties(kbnServer.uiExports.savedObjectMappings || []);
|
||||
|
||||
this.log = (meta: string[], message: string) => kbnServer.server.log(meta, message);
|
||||
|
||||
this.documentMigrator = new DocumentMigrator({
|
||||
kibanaVersion: kbnServer.version,
|
||||
migrations: kbnServer.uiExports.savedObjectMigrations || {},
|
||||
validateDoc: docValidator(kbnServer.uiExports.savedObjectValidations || {}),
|
||||
log: this.log,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the index mappings defined by Kibana's enabled plugins.
|
||||
*
|
||||
* @returns
|
||||
* @memberof KibanaMigrator
|
||||
*/
|
||||
public getActiveMappings() {
|
||||
public getActiveMappings(): IndexMapping {
|
||||
return buildActiveMappings({ properties: this.mappingProperties });
|
||||
}
|
||||
|
||||
/**
|
||||
* Migrates an individual doc to the latest version, as defined by the plugin migrations.
|
||||
*
|
||||
* @param {RawSavedObjectDoc} doc
|
||||
* @returns {RawSavedObjectDoc}
|
||||
* @memberof KibanaMigrator
|
||||
* @param doc - The saved object to migrate
|
||||
* @returns `doc` with all registered migrations applied.
|
||||
*/
|
||||
public migrateDocument(doc: RawSavedObjectDoc): RawSavedObjectDoc {
|
||||
return this.documentMigrator.migrate(doc);
|
||||
|
@ -185,7 +177,7 @@ export class KibanaMigrator {
|
|||
* Merges savedObjectMappings properties into a single object, verifying that
|
||||
* no mappings are redefined.
|
||||
*/
|
||||
function mergeProperties(mappings: any[]): MappingProperties {
|
||||
export function mergeProperties(mappings: SavedObjectsMapping[]): MappingProperties {
|
||||
return mappings.reduce((acc, { pluginId, properties }) => {
|
||||
const duplicate = Object.keys(properties).find(k => acc.hasOwnProperty(k));
|
||||
if (duplicate) {
|
||||
|
|
|
@ -17,22 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { collectUiExports } from './collect_ui_exports';
|
||||
import { schema, TypeOf } from '@kbn/config-schema';
|
||||
|
||||
export function uiExportsMixin(kbnServer) {
|
||||
kbnServer.uiExports = collectUiExports(
|
||||
kbnServer.pluginSpecs
|
||||
);
|
||||
export type SavedObjectsConfigType = TypeOf<typeof config.schema>;
|
||||
|
||||
// check for unknown uiExport types
|
||||
const { unknown = [] } = kbnServer.uiExports;
|
||||
if (!unknown.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error(`Unknown uiExport types: ${
|
||||
unknown
|
||||
.map(({ pluginSpec, type }) => `${type} from ${pluginSpec.getId()}`)
|
||||
.join(', ')
|
||||
}`);
|
||||
}
|
||||
export const config = {
|
||||
path: 'migrations',
|
||||
schema: schema.object({
|
||||
batchSize: schema.number({ defaultValue: 100 }),
|
||||
scrollDuration: schema.string({ defaultValue: '15m' }),
|
||||
pollInterval: schema.number({ defaultValue: 1500 }),
|
||||
skip: schema.boolean({ defaultValue: false }),
|
||||
}),
|
||||
};
|
49
src/core/server/saved_objects/saved_objects_service.mock.ts
Normal file
49
src/core/server/saved_objects/saved_objects_service.mock.ts
Normal file
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { SavedObjectsService, SavedObjectsServiceStart } from './saved_objects_service';
|
||||
import { mockKibanaMigrator } from './migrations/kibana/kibana_migrator.mock';
|
||||
|
||||
type SavedObjectsServiceContract = PublicMethodsOf<SavedObjectsService>;
|
||||
|
||||
const createStartContractMock = () => {
|
||||
const startContract: jest.Mocked<SavedObjectsServiceStart> = {
|
||||
migrator: mockKibanaMigrator.create(),
|
||||
};
|
||||
|
||||
return startContract;
|
||||
};
|
||||
|
||||
const createsavedObjectsServiceMock = () => {
|
||||
const mocked: jest.Mocked<SavedObjectsServiceContract> = {
|
||||
setup: jest.fn(),
|
||||
start: jest.fn(),
|
||||
stop: jest.fn(),
|
||||
};
|
||||
|
||||
mocked.setup.mockResolvedValue({});
|
||||
mocked.start.mockResolvedValue(createStartContractMock());
|
||||
mocked.stop.mockResolvedValue();
|
||||
return mocked;
|
||||
};
|
||||
|
||||
export const savedObjectsServiceMock = {
|
||||
create: createsavedObjectsServiceMock,
|
||||
createStartContract: createStartContractMock,
|
||||
};
|
113
src/core/server/saved_objects/saved_objects_service.test.ts
Normal file
113
src/core/server/saved_objects/saved_objects_service.test.ts
Normal file
|
@ -0,0 +1,113 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
jest.mock('./migrations/kibana/kibana_migrator');
|
||||
|
||||
import { SavedObjectsService, SavedObjectsSetupDeps } from './saved_objects_service';
|
||||
import { mockCoreContext } from '../core_context.mock';
|
||||
import { KibanaMigrator } from './migrations/kibana/kibana_migrator';
|
||||
import { of } from 'rxjs';
|
||||
import elasticsearch from 'elasticsearch';
|
||||
import { Env } from '../config';
|
||||
import { configServiceMock } from '../mocks';
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('SavedObjectsService', () => {
|
||||
describe('#setup()', () => {
|
||||
it('creates a KibanaMigrator which retries NoConnections errors from callAsInternalUser', async () => {
|
||||
const coreContext = mockCoreContext.create();
|
||||
let i = 0;
|
||||
const clusterClient = {
|
||||
callAsInternalUser: jest
|
||||
.fn()
|
||||
.mockImplementation(() =>
|
||||
i++ <= 2
|
||||
? Promise.reject(new elasticsearch.errors.NoConnections())
|
||||
: Promise.resolve('success')
|
||||
),
|
||||
};
|
||||
|
||||
const soService = new SavedObjectsService(coreContext);
|
||||
const coreSetup = ({
|
||||
elasticsearch: { adminClient$: of(clusterClient) },
|
||||
legacy: { uiExports: {}, pluginExtendedConfig: {} },
|
||||
} as unknown) as SavedObjectsSetupDeps;
|
||||
|
||||
await soService.setup(coreSetup);
|
||||
|
||||
return expect((KibanaMigrator as jest.Mock).mock.calls[0][0].callCluster()).resolves.toMatch(
|
||||
'success'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#start()', () => {
|
||||
it('skips KibanaMigrator migrations when --optimize=true', async () => {
|
||||
const coreContext = mockCoreContext.create({
|
||||
env: ({ cliArgs: { optimize: true }, packageInfo: { version: 'x.x.x' } } as unknown) as Env,
|
||||
});
|
||||
const soService = new SavedObjectsService(coreContext);
|
||||
const coreSetup = ({
|
||||
elasticsearch: { adminClient$: of({ callAsInternalUser: jest.fn() }) },
|
||||
legacy: { uiExports: {}, pluginExtendedConfig: {} },
|
||||
} as unknown) as SavedObjectsSetupDeps;
|
||||
|
||||
await soService.setup(coreSetup);
|
||||
const migrator = (KibanaMigrator as jest.Mock<KibanaMigrator>).mock.instances[0];
|
||||
await soService.start({});
|
||||
expect(migrator.runMigrations).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('skips KibanaMigrator migrations when migrations.skip=true', async () => {
|
||||
const configService = configServiceMock.create({ atPath: { skip: true } });
|
||||
const coreContext = mockCoreContext.create({ configService });
|
||||
const soService = new SavedObjectsService(coreContext);
|
||||
const coreSetup = ({
|
||||
elasticsearch: { adminClient$: of({ callAsInternalUser: jest.fn() }) },
|
||||
legacy: { uiExports: {}, pluginExtendedConfig: {} },
|
||||
} as unknown) as SavedObjectsSetupDeps;
|
||||
|
||||
await soService.setup(coreSetup);
|
||||
const migrator = (KibanaMigrator as jest.Mock<KibanaMigrator>).mock.instances[0];
|
||||
await soService.start({});
|
||||
expect(migrator.runMigrations).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('resolves with KibanaMigrator after waiting for migrations to complete', async () => {
|
||||
const configService = configServiceMock.create({ atPath: { skip: false } });
|
||||
const coreContext = mockCoreContext.create({ configService });
|
||||
const soService = new SavedObjectsService(coreContext);
|
||||
const coreSetup = ({
|
||||
elasticsearch: { adminClient$: of({ callAsInternalUser: jest.fn() }) },
|
||||
legacy: { uiExports: {}, pluginExtendedConfig: {} },
|
||||
} as unknown) as SavedObjectsSetupDeps;
|
||||
|
||||
await soService.setup(coreSetup);
|
||||
const migrator = (KibanaMigrator as jest.Mock<KibanaMigrator>).mock.instances[0];
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(0);
|
||||
const startContract = await soService.start({});
|
||||
expect(startContract.migrator).toBeInstanceOf(KibanaMigrator);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledWith(false);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
126
src/core/server/saved_objects/saved_objects_service.ts
Normal file
126
src/core/server/saved_objects/saved_objects_service.ts
Normal file
|
@ -0,0 +1,126 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { CoreService } from 'src/core/types';
|
||||
import { first } from 'rxjs/operators';
|
||||
import { KibanaMigrator, IKibanaMigrator } from './migrations';
|
||||
import { CoreContext } from '../core_context';
|
||||
import { LegacyServiceSetup } from '../legacy/legacy_service';
|
||||
import { ElasticsearchServiceSetup } from '../elasticsearch';
|
||||
import { KibanaConfigType } from '../kibana_config';
|
||||
import { retryCallCluster } from '../elasticsearch/retry_call_cluster';
|
||||
import { SavedObjectsConfigType } from './saved_objects_config';
|
||||
import { Logger } from '..';
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||
export interface SavedObjectsServiceSetup {}
|
||||
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
export interface SavedObjectsServiceStart {
|
||||
migrator: IKibanaMigrator;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export interface SavedObjectsSetupDeps {
|
||||
legacy: LegacyServiceSetup;
|
||||
elasticsearch: ElasticsearchServiceSetup;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
||||
export interface SavedObjectsStartDeps {}
|
||||
|
||||
export class SavedObjectsService
|
||||
implements CoreService<SavedObjectsServiceSetup, SavedObjectsServiceStart> {
|
||||
private migrator: KibanaMigrator | undefined;
|
||||
logger: Logger;
|
||||
|
||||
constructor(private readonly coreContext: CoreContext) {
|
||||
this.logger = coreContext.logger.get('savedobjects-service');
|
||||
}
|
||||
|
||||
public async setup(coreSetup: SavedObjectsSetupDeps) {
|
||||
this.logger.debug('Setting up SavedObjects service');
|
||||
|
||||
const {
|
||||
savedObjectSchemas,
|
||||
savedObjectMappings,
|
||||
savedObjectMigrations,
|
||||
savedObjectValidations,
|
||||
} = await coreSetup.legacy.uiExports;
|
||||
|
||||
const adminClient = await coreSetup.elasticsearch.adminClient$.pipe(first()).toPromise();
|
||||
|
||||
const kibanaConfig = await this.coreContext.configService
|
||||
.atPath<KibanaConfigType>('kibana')
|
||||
.pipe(first())
|
||||
.toPromise();
|
||||
|
||||
const savedObjectsConfig = await this.coreContext.configService
|
||||
.atPath<SavedObjectsConfigType>('migrations')
|
||||
.pipe(first())
|
||||
.toPromise();
|
||||
|
||||
this.migrator = new KibanaMigrator({
|
||||
savedObjectSchemas,
|
||||
savedObjectMappings,
|
||||
savedObjectMigrations,
|
||||
savedObjectValidations,
|
||||
logger: this.coreContext.logger.get('migrations'),
|
||||
kibanaVersion: this.coreContext.env.packageInfo.version,
|
||||
config: coreSetup.legacy.pluginExtendedConfig,
|
||||
savedObjectsConfig,
|
||||
kibanaConfig,
|
||||
callCluster: retryCallCluster(adminClient.callAsInternalUser),
|
||||
});
|
||||
|
||||
return ({} as any) as Promise<SavedObjectsServiceSetup>;
|
||||
}
|
||||
|
||||
public async start(core: SavedObjectsStartDeps): Promise<SavedObjectsServiceStart> {
|
||||
this.logger.debug('Starting SavedObjects service');
|
||||
|
||||
/**
|
||||
* Note: We want to ensure that migrations have completed before
|
||||
* continuing with further Core startup steps that might use SavedObjects
|
||||
* such as running the legacy server, legacy plugins and allowing incoming
|
||||
* HTTP requests.
|
||||
*
|
||||
* However, our build system optimize step and some tests depend on the
|
||||
* HTTP server running without an Elasticsearch server being available.
|
||||
* So, when the `migrations.skip` is true, we skip migrations altogether.
|
||||
*/
|
||||
const cliArgs = this.coreContext.env.cliArgs;
|
||||
const savedObjectsConfig = await this.coreContext.configService
|
||||
.atPath<SavedObjectsConfigType>('migrations')
|
||||
.pipe(first())
|
||||
.toPromise();
|
||||
const skipMigrations = cliArgs.optimize || savedObjectsConfig.skip;
|
||||
await this.migrator!.runMigrations(skipMigrations);
|
||||
|
||||
return { migrator: this.migrator! };
|
||||
}
|
||||
|
||||
public async stop() {}
|
||||
}
|
|
@ -26,10 +26,12 @@ interface SavedObjectsSchemaTypeDefinition {
|
|||
convertToAliasScript?: string;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export interface SavedObjectsSchemaDefinition {
|
||||
[key: string]: SavedObjectsSchemaTypeDefinition;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class SavedObjectsSchema {
|
||||
private readonly definition?: SavedObjectsSchemaDefinition;
|
||||
constructor(schemaDefinition?: SavedObjectsSchemaDefinition) {
|
||||
|
|
|
@ -77,6 +77,7 @@ function assertNonEmptyString(value: string, name: string) {
|
|||
}
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export class SavedObjectsSerializer {
|
||||
private readonly schema: SavedObjectsSchema;
|
||||
|
||||
|
|
|
@ -26,9 +26,10 @@ import { SavedObjectsSchema } from '../schema';
|
|||
import { SavedObjectsResolveImportErrorsOptions } from '../import/types';
|
||||
|
||||
/**
|
||||
* @public
|
||||
* @internal
|
||||
* @deprecated
|
||||
*/
|
||||
export interface SavedObjectsService<Request = any> {
|
||||
export interface SavedObjectsLegacyService<Request = any> {
|
||||
// ATTENTION: these types are incomplete
|
||||
addScopedSavedObjectsClientWrapperFactory: ScopedSavedObjectsClientProvider<
|
||||
Request
|
||||
|
|
|
@ -263,7 +263,7 @@ describe('SavedObjectsRepository', () => {
|
|||
onBeforeWrite = jest.fn();
|
||||
migrator = {
|
||||
migrateDocument: jest.fn(doc => doc),
|
||||
awaitMigration: async () => ({ status: 'skipped' }),
|
||||
runMigrations: async () => ({ status: 'skipped' }),
|
||||
};
|
||||
|
||||
const serializer = new SavedObjectsSerializer(schema);
|
||||
|
@ -297,7 +297,7 @@ describe('SavedObjectsRepository', () => {
|
|||
});
|
||||
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
|
||||
|
@ -313,7 +313,7 @@ describe('SavedObjectsRepository', () => {
|
|||
}
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('formats Elasticsearch response', async () => {
|
||||
|
@ -552,7 +552,7 @@ describe('SavedObjectsRepository', () => {
|
|||
|
||||
describe('#bulkCreate', () => {
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
callAdminCluster.mockReturnValue({
|
||||
|
@ -576,7 +576,7 @@ describe('SavedObjectsRepository', () => {
|
|||
])
|
||||
).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('formats Elasticsearch request', async () => {
|
||||
|
@ -998,7 +998,7 @@ describe('SavedObjectsRepository', () => {
|
|||
|
||||
describe('#delete', () => {
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
callAdminCluster.mockReturnValue({ result: 'deleted' });
|
||||
|
@ -1008,7 +1008,7 @@ describe('SavedObjectsRepository', () => {
|
|||
})
|
||||
).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('throws notFound when ES is unable to find the document', async () => {
|
||||
|
@ -1114,14 +1114,14 @@ describe('SavedObjectsRepository', () => {
|
|||
|
||||
describe('#find', () => {
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
|
||||
callAdminCluster.mockReturnValue(noNamespaceSearchResults);
|
||||
await expect(savedObjectsRepository.find({ type: 'foo' })).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('requires type to be defined', async () => {
|
||||
|
@ -1315,7 +1315,7 @@ describe('SavedObjectsRepository', () => {
|
|||
};
|
||||
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
|
||||
|
@ -1324,7 +1324,7 @@ describe('SavedObjectsRepository', () => {
|
|||
savedObjectsRepository.get('index-pattern', 'logstash-*')
|
||||
).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('formats Elasticsearch response when there is no namespace', async () => {
|
||||
|
@ -1408,7 +1408,7 @@ describe('SavedObjectsRepository', () => {
|
|||
|
||||
describe('#bulkGet', () => {
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
|
||||
|
@ -1421,7 +1421,7 @@ describe('SavedObjectsRepository', () => {
|
|||
])
|
||||
).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveBeenCalledTimes(1);
|
||||
expect(migrator.runMigrations).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('prepends type to id when getting objects when there is no namespace', async () => {
|
||||
|
@ -1662,7 +1662,7 @@ describe('SavedObjectsRepository', () => {
|
|||
});
|
||||
|
||||
it('waits until migrations are complete before proceeding', async () => {
|
||||
migrator.awaitMigration = jest.fn(async () =>
|
||||
migrator.runMigrations = jest.fn(async () =>
|
||||
expect(callAdminCluster).not.toHaveBeenCalled()
|
||||
);
|
||||
|
||||
|
@ -1672,7 +1672,7 @@ describe('SavedObjectsRepository', () => {
|
|||
})
|
||||
).resolves.toBeDefined();
|
||||
|
||||
expect(migrator.awaitMigration).toHaveReturnedTimes(1);
|
||||
expect(migrator.runMigrations).toHaveReturnedTimes(1);
|
||||
});
|
||||
|
||||
it('mockReturnValue current ES document _seq_no and _primary_term encoded as version', async () => {
|
||||
|
|
|
@ -125,7 +125,7 @@ export class SavedObjectsRepository {
|
|||
this._onBeforeWrite = onBeforeWrite;
|
||||
|
||||
this._unwrappedCallCluster = async (...args: Parameters<CallCluster>) => {
|
||||
await migrator.awaitMigration();
|
||||
await migrator.runMigrations();
|
||||
return callCluster(...args);
|
||||
};
|
||||
this._schema = schema;
|
||||
|
|
|
@ -18,6 +18,10 @@
|
|||
*/
|
||||
|
||||
import { SavedObjectsClient } from './service/saved_objects_client';
|
||||
import { SavedObjectsMapping } from './mappings';
|
||||
import { MigrationDefinition } from './migrations/core/document_migrator';
|
||||
import { SavedObjectsSchemaDefinition } from './schema';
|
||||
import { PropertyValidators } from './validation';
|
||||
|
||||
/**
|
||||
* Information about the migrations that have been applied to this SavedObject.
|
||||
|
@ -201,3 +205,15 @@ export interface SavedObjectsBaseOptions {
|
|||
* @public
|
||||
*/
|
||||
export type SavedObjectsClientContract = Pick<SavedObjectsClient, keyof SavedObjectsClient>;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* @deprecated
|
||||
*/
|
||||
export interface SavedObjectsLegacyUiExports {
|
||||
unknown: [{ pluginSpec: { getId: () => unknown }; type: unknown }] | undefined;
|
||||
savedObjectMappings: SavedObjectsMapping[];
|
||||
savedObjectMigrations: MigrationDefinition;
|
||||
savedObjectSchemas: SavedObjectsSchemaDefinition;
|
||||
savedObjectValidations: PropertyValidators;
|
||||
}
|
||||
|
|
|
@ -10,6 +10,8 @@ import { ConfigOptions } from 'elasticsearch';
|
|||
import { DetailedPeerCertificate } from 'tls';
|
||||
import { Duration } from 'moment';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import { KibanaConfigType } from 'src/core/server/kibana_config';
|
||||
import { Logger as Logger_2 } from 'src/core/server/logging';
|
||||
import { ObjectType } from '@kbn/config-schema';
|
||||
import { Observable } from 'rxjs';
|
||||
import { PeerCertificate } from 'tls';
|
||||
|
@ -299,8 +301,12 @@ export interface InternalCoreSetup {
|
|||
http: HttpServiceSetup;
|
||||
}
|
||||
|
||||
// @public (undocumented)
|
||||
// @internal (undocumented)
|
||||
export interface InternalCoreStart {
|
||||
// Warning: (ae-forgotten-export) The symbol "SavedObjectsServiceStart" needs to be exported by the entry point index.d.ts
|
||||
//
|
||||
// (undocumented)
|
||||
savedObjects: SavedObjectsServiceStart;
|
||||
}
|
||||
|
||||
// @public
|
||||
|
@ -396,6 +402,8 @@ export interface LegacyServiceSetupDeps {
|
|||
|
||||
// @public @deprecated (undocumented)
|
||||
export interface LegacyServiceStartDeps {
|
||||
// Warning: (ae-incompatible-release-tags) The symbol "core" is marked as @public, but its signature references "InternalCoreStart" which is marked as @internal
|
||||
//
|
||||
// (undocumented)
|
||||
core: InternalCoreStart & {
|
||||
plugins: PluginsServiceStart;
|
||||
|
@ -955,6 +963,31 @@ export interface SavedObjectsImportUnsupportedTypeError {
|
|||
type: 'unsupported_type';
|
||||
}
|
||||
|
||||
// @internal @deprecated (undocumented)
|
||||
export interface SavedObjectsLegacyService<Request = any> {
|
||||
// Warning: (ae-forgotten-export) The symbol "ScopedSavedObjectsClientProvider" needs to be exported by the entry point index.d.ts
|
||||
//
|
||||
// (undocumented)
|
||||
addScopedSavedObjectsClientWrapperFactory: ScopedSavedObjectsClientProvider<Request>['addClientWrapperFactory'];
|
||||
// (undocumented)
|
||||
getSavedObjectsRepository(...rest: any[]): any;
|
||||
// (undocumented)
|
||||
getScopedSavedObjectsClient: ScopedSavedObjectsClientProvider<Request>['getClient'];
|
||||
// (undocumented)
|
||||
importExport: {
|
||||
objectLimit: number;
|
||||
importSavedObjects(options: SavedObjectsImportOptions): Promise<SavedObjectsImportResponse>;
|
||||
resolveImportErrors(options: SavedObjectsResolveImportErrorsOptions): Promise<SavedObjectsImportResponse>;
|
||||
getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise<Readable>;
|
||||
};
|
||||
// (undocumented)
|
||||
SavedObjectsClient: typeof SavedObjectsClient;
|
||||
// (undocumented)
|
||||
schema: SavedObjectsSchema;
|
||||
// (undocumented)
|
||||
types: string[];
|
||||
}
|
||||
|
||||
// @public (undocumented)
|
||||
export interface SavedObjectsMigrationLogger {
|
||||
// (undocumented)
|
||||
|
@ -1003,9 +1036,7 @@ export interface SavedObjectsResolveImportErrorsOptions {
|
|||
supportedTypes: string[];
|
||||
}
|
||||
|
||||
// Warning: (ae-missing-release-tag) "SavedObjectsSchema" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public (undocumented)
|
||||
// @internal (undocumented)
|
||||
export class SavedObjectsSchema {
|
||||
// Warning: (ae-forgotten-export) The symbol "SavedObjectsSchemaDefinition" needs to be exported by the entry point index.d.ts
|
||||
constructor(schemaDefinition?: SavedObjectsSchemaDefinition);
|
||||
|
@ -1019,9 +1050,7 @@ export class SavedObjectsSchema {
|
|||
isNamespaceAgnostic(type: string): boolean;
|
||||
}
|
||||
|
||||
// Warning: (ae-missing-release-tag) "SavedObjectsSerializer" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal)
|
||||
//
|
||||
// @public (undocumented)
|
||||
// @internal (undocumented)
|
||||
export class SavedObjectsSerializer {
|
||||
constructor(schema: SavedObjectsSchema);
|
||||
generateRawId(namespace: string | undefined, type: string, id?: string): string;
|
||||
|
@ -1031,33 +1060,6 @@ export class SavedObjectsSerializer {
|
|||
savedObjectToRaw(savedObj: SanitizedSavedObjectDoc): SavedObjectsRawDoc;
|
||||
}
|
||||
|
||||
// @public (undocumented)
|
||||
export interface SavedObjectsService<Request = any> {
|
||||
// Warning: (ae-forgotten-export) The symbol "ScopedSavedObjectsClientProvider" needs to be exported by the entry point index.d.ts
|
||||
//
|
||||
// (undocumented)
|
||||
addScopedSavedObjectsClientWrapperFactory: ScopedSavedObjectsClientProvider<Request>['addClientWrapperFactory'];
|
||||
// (undocumented)
|
||||
getSavedObjectsRepository(...rest: any[]): any;
|
||||
// (undocumented)
|
||||
getScopedSavedObjectsClient: ScopedSavedObjectsClientProvider<Request>['getClient'];
|
||||
// (undocumented)
|
||||
importExport: {
|
||||
objectLimit: number;
|
||||
importSavedObjects(options: SavedObjectsImportOptions): Promise<SavedObjectsImportResponse>;
|
||||
resolveImportErrors(options: SavedObjectsResolveImportErrorsOptions): Promise<SavedObjectsImportResponse>;
|
||||
getSortedObjectsForExport(options: SavedObjectsExportOptions): Promise<Readable>;
|
||||
};
|
||||
// Warning: (ae-incompatible-release-tags) The symbol "SavedObjectsClient" is marked as @public, but its signature references "SavedObjectsClient" which is marked as @internal
|
||||
//
|
||||
// (undocumented)
|
||||
SavedObjectsClient: typeof SavedObjectsClient;
|
||||
// (undocumented)
|
||||
schema: SavedObjectsSchema;
|
||||
// (undocumented)
|
||||
types: string[];
|
||||
}
|
||||
|
||||
// @public (undocumented)
|
||||
export interface SavedObjectsUpdateOptions extends SavedObjectsBaseOptions {
|
||||
// (undocumented)
|
||||
|
|
|
@ -23,6 +23,7 @@ import {
|
|||
mockLegacyService,
|
||||
mockPluginsService,
|
||||
mockConfigService,
|
||||
mockSavedObjectsService,
|
||||
} from './index.test.mocks';
|
||||
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
|
@ -51,6 +52,7 @@ test('sets up services on "setup"', async () => {
|
|||
expect(mockElasticsearchService.setup).not.toHaveBeenCalled();
|
||||
expect(mockPluginsService.setup).not.toHaveBeenCalled();
|
||||
expect(mockLegacyService.setup).not.toHaveBeenCalled();
|
||||
expect(mockSavedObjectsService.setup).not.toHaveBeenCalled();
|
||||
|
||||
await server.setup();
|
||||
|
||||
|
@ -58,6 +60,7 @@ test('sets up services on "setup"', async () => {
|
|||
expect(mockElasticsearchService.setup).toHaveBeenCalledTimes(1);
|
||||
expect(mockPluginsService.setup).toHaveBeenCalledTimes(1);
|
||||
expect(mockLegacyService.setup).toHaveBeenCalledTimes(1);
|
||||
expect(mockSavedObjectsService.setup).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('runs services on "start"', async () => {
|
||||
|
@ -70,10 +73,12 @@ test('runs services on "start"', async () => {
|
|||
|
||||
expect(mockHttpService.start).not.toHaveBeenCalled();
|
||||
expect(mockLegacyService.start).not.toHaveBeenCalled();
|
||||
expect(mockSavedObjectsService.start).not.toHaveBeenCalled();
|
||||
await server.start();
|
||||
|
||||
expect(mockHttpService.start).toHaveBeenCalledTimes(1);
|
||||
expect(mockLegacyService.start).toHaveBeenCalledTimes(1);
|
||||
expect(mockSavedObjectsService.start).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('does not fail on "setup" if there are unused paths detected', async () => {
|
||||
|
@ -93,6 +98,7 @@ test('stops services on "stop"', async () => {
|
|||
expect(mockElasticsearchService.stop).not.toHaveBeenCalled();
|
||||
expect(mockPluginsService.stop).not.toHaveBeenCalled();
|
||||
expect(mockLegacyService.stop).not.toHaveBeenCalled();
|
||||
expect(mockSavedObjectsService.stop).not.toHaveBeenCalled();
|
||||
|
||||
await server.stop();
|
||||
|
||||
|
@ -100,6 +106,7 @@ test('stops services on "stop"', async () => {
|
|||
expect(mockElasticsearchService.stop).toHaveBeenCalledTimes(1);
|
||||
expect(mockPluginsService.stop).toHaveBeenCalledTimes(1);
|
||||
expect(mockLegacyService.stop).toHaveBeenCalledTimes(1);
|
||||
expect(mockSavedObjectsService.stop).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test(`doesn't setup core services if config validation fails`, async () => {
|
||||
|
|
|
@ -26,11 +26,14 @@ import { HttpService, HttpServiceSetup } from './http';
|
|||
import { LegacyService } from './legacy';
|
||||
import { Logger, LoggerFactory } from './logging';
|
||||
import { PluginsService, config as pluginsConfig } from './plugins';
|
||||
import { SavedObjectsService } from '../server/saved_objects';
|
||||
|
||||
import { config as elasticsearchConfig } from './elasticsearch';
|
||||
import { config as httpConfig } from './http';
|
||||
import { config as loggingConfig } from './logging';
|
||||
import { config as devConfig } from './dev';
|
||||
import { config as kibanaConfig } from './kibana_config';
|
||||
import { config as savedObjectsConfig } from './saved_objects';
|
||||
import { mapToObject } from '../utils/';
|
||||
import { ContextService } from './context';
|
||||
import { InternalCoreSetup } from './index';
|
||||
|
@ -42,9 +45,10 @@ export class Server {
|
|||
private readonly context: ContextService;
|
||||
private readonly elasticsearch: ElasticsearchService;
|
||||
private readonly http: HttpService;
|
||||
private readonly plugins: PluginsService;
|
||||
private readonly legacy: LegacyService;
|
||||
private readonly log: Logger;
|
||||
private readonly plugins: PluginsService;
|
||||
private readonly savedObjects: SavedObjectsService;
|
||||
|
||||
constructor(
|
||||
readonly config$: Observable<Config>,
|
||||
|
@ -60,6 +64,7 @@ export class Server {
|
|||
this.plugins = new PluginsService(core);
|
||||
this.legacy = new LegacyService(core);
|
||||
this.elasticsearch = new ElasticsearchService(core);
|
||||
this.savedObjects = new SavedObjectsService(core);
|
||||
}
|
||||
|
||||
public async setup() {
|
||||
|
@ -88,18 +93,26 @@ export class Server {
|
|||
this.registerCoreContext(coreSetup);
|
||||
const pluginsSetup = await this.plugins.setup(coreSetup);
|
||||
|
||||
await this.legacy.setup({
|
||||
const legacySetup = await this.legacy.setup({
|
||||
core: { ...coreSetup, plugins: pluginsSetup },
|
||||
plugins: mapToObject(pluginsSetup.contracts),
|
||||
});
|
||||
|
||||
await this.savedObjects.setup({
|
||||
elasticsearch: elasticsearchServiceSetup,
|
||||
legacy: legacySetup,
|
||||
});
|
||||
|
||||
return coreSetup;
|
||||
}
|
||||
|
||||
public async start() {
|
||||
this.log.debug('starting server');
|
||||
const pluginsStart = await this.plugins.start({});
|
||||
const savedObjectsStart = await this.savedObjects.start({});
|
||||
|
||||
const coreStart = {
|
||||
savedObjects: savedObjectsStart,
|
||||
plugins: pluginsStart,
|
||||
};
|
||||
|
||||
|
@ -109,6 +122,7 @@ export class Server {
|
|||
});
|
||||
|
||||
await this.http.start();
|
||||
|
||||
return coreStart;
|
||||
}
|
||||
|
||||
|
@ -117,6 +131,7 @@ export class Server {
|
|||
|
||||
await this.legacy.stop();
|
||||
await this.plugins.stop();
|
||||
await this.savedObjects.stop();
|
||||
await this.elasticsearch.stop();
|
||||
await this.http.stop();
|
||||
}
|
||||
|
@ -148,6 +163,8 @@ export class Server {
|
|||
[httpConfig.path, httpConfig.schema],
|
||||
[pluginsConfig.path, pluginsConfig.schema],
|
||||
[devConfig.path, devConfig.schema],
|
||||
[kibanaConfig.path, kibanaConfig.schema],
|
||||
[savedObjectsConfig.path, savedObjectsConfig.schema],
|
||||
];
|
||||
|
||||
for (const [path, schema] of schemas) {
|
||||
|
|
|
@ -77,39 +77,41 @@ export async function deleteKibanaIndices({ client, stats, log }) {
|
|||
*/
|
||||
export async function migrateKibanaIndex({ client, log, kibanaPluginIds }) {
|
||||
const uiExports = await getUiExports(kibanaPluginIds);
|
||||
const version = await loadElasticVersion();
|
||||
const kibanaVersion = await loadKibanaVersion();
|
||||
|
||||
const config = {
|
||||
'kibana.index': '.kibana',
|
||||
'migrations.scrollDuration': '5m',
|
||||
'migrations.batchSize': 100,
|
||||
'migrations.pollInterval': 100,
|
||||
'xpack.task_manager.index': '.kibana_task_manager',
|
||||
};
|
||||
const ready = async () => undefined;
|
||||
const elasticsearch = {
|
||||
getCluster: () => ({
|
||||
callWithInternalUser: (path, ...args) => _.get(client, path).call(client, ...args),
|
||||
}),
|
||||
waitUntilReady: ready,
|
||||
|
||||
const migratorOptions = {
|
||||
config: { get: path => config[path] },
|
||||
savedObjectsConfig: {
|
||||
'scrollDuration': '5m',
|
||||
'batchSize': 100,
|
||||
'pollInterval': 100,
|
||||
},
|
||||
kibanaConfig: {
|
||||
index: '.kibana',
|
||||
},
|
||||
logger: {
|
||||
trace: log.verbose.bind(log),
|
||||
debug: log.debug.bind(log),
|
||||
info: log.info.bind(log),
|
||||
warn: log.warning.bind(log),
|
||||
error: log.error.bind(log),
|
||||
},
|
||||
version: kibanaVersion,
|
||||
savedObjectSchemas: uiExports.savedObjectSchemas,
|
||||
savedObjectMappings: uiExports.savedObjectMappings,
|
||||
savedObjectMigrations: uiExports.savedObjectMigrations,
|
||||
savedObjectValidations: uiExports.savedObjectValidations,
|
||||
callCluster: (path, ...args) => _.get(client, path).call(client, ...args),
|
||||
};
|
||||
|
||||
const server = {
|
||||
log: ([logType, messageType], ...args) => log[logType](`[${messageType}] ${args.join(' ')}`),
|
||||
config: () => ({ get: path => config[path] }),
|
||||
plugins: { elasticsearch },
|
||||
};
|
||||
|
||||
const kbnServer = {
|
||||
server,
|
||||
version,
|
||||
uiExports,
|
||||
ready,
|
||||
};
|
||||
|
||||
return await new KibanaMigrator({ kbnServer }).awaitMigration();
|
||||
return await new KibanaMigrator(migratorOptions).runMigrations();
|
||||
}
|
||||
|
||||
async function loadElasticVersion() {
|
||||
async function loadKibanaVersion() {
|
||||
const readFile = promisify(fs.readFile);
|
||||
const packageJson = await readFile(path.join(__dirname, '../../../../package.json'));
|
||||
return JSON.parse(packageJson).version;
|
||||
|
|
|
@ -39,7 +39,7 @@ import {
|
|||
isInvalidPackError,
|
||||
} from './errors';
|
||||
|
||||
function defaultConfig(settings) {
|
||||
export function defaultConfig(settings) {
|
||||
return Config.withDefaultSchema(
|
||||
transformDeprecations(settings)
|
||||
);
|
||||
|
|
|
@ -103,7 +103,10 @@ export class PluginSpec {
|
|||
throw createInvalidPluginError(this, 'plugin.publicDir must be an absolute path');
|
||||
}
|
||||
if (basename(this._publicDir) !== 'public') {
|
||||
throw createInvalidPluginError(this, `publicDir for plugin ${this.getId()} must end with a "public" directory.`);
|
||||
throw createInvalidPluginError(
|
||||
this,
|
||||
`publicDir for plugin ${this.getId()} must end with a "public" directory.`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -145,7 +148,9 @@ export class PluginSpec {
|
|||
// the version of kibana down to the patch level. If these two versions need
|
||||
// to diverge, they can specify a kibana.version in the package to indicate the
|
||||
// version of kibana the plugin is intended to work with.
|
||||
return this._kibanaVersion || get(this.getPack().getPkg(), 'kibana.version') || this.getVersion();
|
||||
return (
|
||||
this._kibanaVersion || get(this.getPack().getPkg(), 'kibana.version') || this.getVersion()
|
||||
);
|
||||
}
|
||||
|
||||
isVersionCompatible(actualKibanaVersion) {
|
||||
|
|
|
@ -25,7 +25,7 @@ const RUN_KBN_SERVER_STARTUP = require.resolve('./fixtures/run_kbn_server_startu
|
|||
const SETUP_NODE_ENV = require.resolve('../../../../setup_node_env');
|
||||
const SECOND = 1000;
|
||||
|
||||
describe('config/deprecation warnings mixin', function () {
|
||||
describe('config/deprecation warnings', function () {
|
||||
this.timeout(15 * SECOND);
|
||||
|
||||
let stdio = '';
|
||||
|
@ -99,11 +99,11 @@ describe('config/deprecation warnings mixin', function () {
|
|||
}
|
||||
})
|
||||
.filter(Boolean)
|
||||
.filter(line => (
|
||||
.filter(line =>
|
||||
line.type === 'log' &&
|
||||
line.tags.includes('deprecation') &&
|
||||
line.tags.includes('warning')
|
||||
));
|
||||
);
|
||||
|
||||
expect(deprecationLines).to.have.length(1);
|
||||
expect(deprecationLines[0]).to.have.property('message', 'uiSettings.enabled is deprecated and is no longer used');
|
||||
|
|
|
@ -154,12 +154,6 @@ export default () => Joi.object({
|
|||
data: Joi.string().default(getData())
|
||||
}).default(),
|
||||
|
||||
migrations: Joi.object({
|
||||
batchSize: Joi.number().default(100),
|
||||
scrollDuration: Joi.string().default('15m'),
|
||||
pollInterval: Joi.number().default(1500),
|
||||
}).default(),
|
||||
|
||||
stats: Joi.object({
|
||||
maximumWaitTimeForAllCollectorsInS: Joi.number().default(60)
|
||||
}).default(),
|
||||
|
|
6
src/legacy/server/kbn_server.d.ts
vendored
6
src/legacy/server/kbn_server.d.ts
vendored
|
@ -26,7 +26,7 @@ import {
|
|||
ElasticsearchServiceSetup,
|
||||
LoggerFactory,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsService,
|
||||
SavedObjectsLegacyService,
|
||||
} from '../../core/server';
|
||||
|
||||
import { LegacyServiceSetupDeps, LegacyServiceStartDeps } from '../../core/server/';
|
||||
|
@ -62,7 +62,7 @@ declare module 'hapi' {
|
|||
interface Server {
|
||||
config: () => KibanaConfig;
|
||||
indexPatternsServiceFactory: IndexPatternsServiceFactory;
|
||||
savedObjects: SavedObjectsService;
|
||||
savedObjects: SavedObjectsLegacyService;
|
||||
usage: { collectorSet: any };
|
||||
injectUiAppVars: (pluginName: string, getAppVars: () => { [key: string]: any }) => void;
|
||||
getHiddenUiAppById(appId: string): UiApp;
|
||||
|
@ -127,4 +127,4 @@ export { Server, Request, ResponseToolkit } from 'hapi';
|
|||
|
||||
// Re-export commonly accessed api types.
|
||||
export { IndexPatternsService } from './index_patterns';
|
||||
export { SavedObjectsService, SavedObjectsClient } from 'src/core/server';
|
||||
export { SavedObjectsLegacyService, SavedObjectsClient } from 'src/core/server';
|
||||
|
|
|
@ -24,7 +24,6 @@ import { isWorker } from 'cluster';
|
|||
import { fromRoot, pkg } from '../utils';
|
||||
import { Config } from './config';
|
||||
import loggingConfiguration from './logging/configuration';
|
||||
import configSetupMixin from './config/setup';
|
||||
import httpMixin from './http';
|
||||
import { coreMixin } from './core';
|
||||
import { loggingMixin } from './logging';
|
||||
|
@ -50,14 +49,16 @@ import { i18nMixin } from './i18n';
|
|||
const rootDir = fromRoot('.');
|
||||
|
||||
export default class KbnServer {
|
||||
constructor(settings, core) {
|
||||
constructor(settings, config, core, legacyPlugins) {
|
||||
this.name = pkg.name;
|
||||
this.version = pkg.version;
|
||||
this.build = pkg.build || false;
|
||||
this.rootDir = rootDir;
|
||||
this.settings = settings || {};
|
||||
this.config = config;
|
||||
|
||||
const { setupDeps, startDeps, handledConfigPaths, logger } = core;
|
||||
|
||||
this.newPlatform = {
|
||||
coreContext: {
|
||||
logger,
|
||||
|
@ -70,12 +71,13 @@ export default class KbnServer {
|
|||
},
|
||||
};
|
||||
|
||||
this.uiExports = legacyPlugins.uiExports;
|
||||
this.pluginSpecs = legacyPlugins.pluginSpecs;
|
||||
this.disabledPluginSpecs = legacyPlugins.disabledPluginSpecs;
|
||||
|
||||
this.ready = constant(this.mixin(
|
||||
Plugins.waitForInitSetupMixin,
|
||||
|
||||
// sets this.config, reads this.settings
|
||||
configSetupMixin,
|
||||
|
||||
// sets this.server
|
||||
httpMixin,
|
||||
|
||||
|
@ -101,7 +103,7 @@ export default class KbnServer {
|
|||
// tell the config we are done loading plugins
|
||||
configCompleteMixin,
|
||||
|
||||
// setup this.uiExports and this.uiBundles
|
||||
// setup this.uiBundles
|
||||
uiMixin,
|
||||
indexPatternsMixin,
|
||||
|
||||
|
@ -161,8 +163,6 @@ export default class KbnServer {
|
|||
|
||||
const { server, config } = this;
|
||||
|
||||
await server.kibanaMigrator.awaitMigration();
|
||||
|
||||
if (isWorker) {
|
||||
// help parent process know when we are ready
|
||||
process.send(['WORKER_LISTENING']);
|
||||
|
|
|
@ -16,96 +16,9 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { map, distinct, toArray, tap } from 'rxjs/operators';
|
||||
import { findPluginSpecs } from '../../plugin_discovery';
|
||||
|
||||
import { Plugin } from './lib';
|
||||
|
||||
export async function scanMixin(kbnServer, server, config) {
|
||||
const {
|
||||
pack$,
|
||||
invalidDirectoryError$,
|
||||
invalidPackError$,
|
||||
otherError$,
|
||||
deprecation$,
|
||||
invalidVersionSpec$,
|
||||
spec$,
|
||||
disabledSpec$,
|
||||
} = findPluginSpecs(kbnServer.settings, config);
|
||||
|
||||
const logging$ = Rx.merge(
|
||||
pack$.pipe(
|
||||
tap(definition => {
|
||||
const path = definition.getPath();
|
||||
server.logWithMetadata(['plugin', 'debug'], `Found plugin at ${path}`, {
|
||||
path
|
||||
});
|
||||
})
|
||||
),
|
||||
|
||||
invalidDirectoryError$.pipe(
|
||||
tap(error => {
|
||||
server.logWithMetadata(['plugin', 'warning'], `${error.code}: Unable to scan directory for plugins "${error.path}"`, {
|
||||
err: error,
|
||||
dir: error.path
|
||||
});
|
||||
})
|
||||
),
|
||||
|
||||
invalidPackError$.pipe(
|
||||
tap(error => {
|
||||
server.logWithMetadata(['plugin', 'warning'], `Skipping non-plugin directory at ${error.path}`, {
|
||||
path: error.path
|
||||
});
|
||||
})
|
||||
),
|
||||
|
||||
otherError$.pipe(
|
||||
tap(error => {
|
||||
// rethrow unhandled errors, which will fail the server
|
||||
throw error;
|
||||
})
|
||||
),
|
||||
|
||||
invalidVersionSpec$.pipe(
|
||||
map(spec => {
|
||||
const name = spec.getId();
|
||||
const pluginVersion = spec.getExpectedKibanaVersion();
|
||||
const kibanaVersion = config.get('pkg.version');
|
||||
return `Plugin "${name}" was disabled because it expected Kibana version "${pluginVersion}", and found "${kibanaVersion}".`;
|
||||
}),
|
||||
distinct(),
|
||||
tap(message => {
|
||||
server.log(['plugin', 'warning'], message);
|
||||
})
|
||||
),
|
||||
|
||||
deprecation$.pipe(
|
||||
tap(({ spec, message }) => {
|
||||
server.log(['warning', spec.getConfigPrefix(), 'config', 'deprecation'], message);
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
const enabledSpecs$ = spec$.pipe(
|
||||
toArray(),
|
||||
tap(specs => {
|
||||
kbnServer.pluginSpecs = specs;
|
||||
})
|
||||
);
|
||||
|
||||
const disabledSpecs$ = disabledSpec$.pipe(
|
||||
toArray(),
|
||||
tap(specs => {
|
||||
kbnServer.disabledPluginSpecs = specs;
|
||||
})
|
||||
);
|
||||
|
||||
// await completion of enabledSpecs$, disabledSpecs$, and logging$
|
||||
await Rx.merge(logging$, enabledSpecs$, disabledSpecs$).toPromise();
|
||||
|
||||
export async function scanMixin(kbnServer) {
|
||||
kbnServer.plugins = kbnServer.pluginSpecs.map(spec => (
|
||||
new Plugin(kbnServer, spec)
|
||||
));
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
// Disable lint errors for imports from src/core/server/saved_objects until SavedObjects migration is complete
|
||||
/* eslint-disable @kbn/eslint/no-restricted-paths */
|
||||
|
||||
import { KibanaMigrator } from '../../../core/server/saved_objects/migrations';
|
||||
import { SavedObjectsSchema } from '../../../core/server/saved_objects/schema';
|
||||
import { SavedObjectsSerializer } from '../../../core/server/saved_objects/serialization';
|
||||
import {
|
||||
|
@ -58,7 +57,7 @@ function getImportableAndExportableTypes({ kbnServer, visibleTypes }) {
|
|||
}
|
||||
|
||||
export function savedObjectsMixin(kbnServer, server) {
|
||||
const migrator = new KibanaMigrator({ kbnServer });
|
||||
const migrator = kbnServer.newPlatform.start.core.savedObjects.migrator;
|
||||
const mappings = migrator.getActiveMappings();
|
||||
const allTypes = Object.keys(getRootPropertiesObjects(mappings));
|
||||
const schema = new SavedObjectsSchema(kbnServer.uiExports.savedObjectSchemas);
|
||||
|
|
|
@ -18,6 +18,48 @@
|
|||
*/
|
||||
|
||||
import { savedObjectsMixin } from './saved_objects_mixin';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
import { mockKibanaMigrator } from '../../../core/server/saved_objects/migrations/kibana/kibana_migrator.mock';
|
||||
|
||||
const savedObjectMappings = [
|
||||
{
|
||||
pluginId: 'testtype',
|
||||
properties: {
|
||||
testtype: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'testtype2',
|
||||
properties: {
|
||||
doc1: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
doc2: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'secretPlugin',
|
||||
properties: {
|
||||
hiddentype: {
|
||||
properties: {
|
||||
secret: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const migrator = mockKibanaMigrator.create({ savedObjectMappings });
|
||||
|
||||
describe('Saved Objects Mixin', () => {
|
||||
let mockKbnServer;
|
||||
|
@ -55,6 +97,9 @@ describe('Saved Objects Mixin', () => {
|
|||
},
|
||||
};
|
||||
mockKbnServer = {
|
||||
newPlatform: {
|
||||
start: { core: { savedObjects: { migrator } } },
|
||||
},
|
||||
server: mockServer,
|
||||
ready: () => {},
|
||||
pluginSpecs: {
|
||||
|
@ -63,6 +108,7 @@ describe('Saved Objects Mixin', () => {
|
|||
},
|
||||
},
|
||||
uiExports: {
|
||||
savedObjectMappings,
|
||||
savedObjectSchemas: {
|
||||
hiddentype: {
|
||||
hidden: true,
|
||||
|
@ -71,43 +117,6 @@ describe('Saved Objects Mixin', () => {
|
|||
indexPattern: 'other-index',
|
||||
},
|
||||
},
|
||||
savedObjectMappings: [
|
||||
{
|
||||
pluginId: 'testtype',
|
||||
properties: {
|
||||
testtype: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'testtype2',
|
||||
properties: {
|
||||
doc1: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
doc2: {
|
||||
properties: {
|
||||
name: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pluginId: 'secretPlugin',
|
||||
properties: {
|
||||
hiddentype: {
|
||||
properties: {
|
||||
secret: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
});
|
||||
|
@ -290,7 +299,7 @@ describe('Saved Objects Mixin', () => {
|
|||
});
|
||||
|
||||
it('should call underlining callCluster', async () => {
|
||||
stubCallCluster.mockImplementation(method => {
|
||||
mockCallCluster.mockImplementation(method => {
|
||||
if (method === 'indices.get') {
|
||||
return { status: 404 };
|
||||
} else if (method === 'indices.getAlias') {
|
||||
|
@ -301,7 +310,7 @@ describe('Saved Objects Mixin', () => {
|
|||
});
|
||||
const client = await service.getScopedSavedObjectsClient();
|
||||
await client.create('testtype');
|
||||
expect(stubCallCluster).toHaveBeenCalled();
|
||||
expect(mockCallCluster).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -17,14 +17,15 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
import { SavedObjectsLegacyUiExports } from 'src/core/server/types';
|
||||
// @ts-ignore
|
||||
import { UI_EXPORT_DEFAULTS } from './ui_export_defaults';
|
||||
// @ts-ignore
|
||||
import * as uiExportTypeReducers from './ui_export_types';
|
||||
// @ts-ignore
|
||||
import { reduceExportSpecs } from '../../plugin_discovery';
|
||||
|
||||
export function collectUiExports(pluginSpecs) {
|
||||
return reduceExportSpecs(
|
||||
pluginSpecs,
|
||||
uiExportTypeReducers,
|
||||
UI_EXPORT_DEFAULTS
|
||||
);
|
||||
export function collectUiExports(pluginSpecs: unknown[]): SavedObjectsLegacyUiExports {
|
||||
return reduceExportSpecs(pluginSpecs, uiExportTypeReducers, UI_EXPORT_DEFAULTS);
|
||||
}
|
|
@ -18,4 +18,3 @@
|
|||
*/
|
||||
|
||||
export { collectUiExports } from './collect_ui_exports';
|
||||
export { uiExportsMixin } from './ui_exports_mixin';
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { uiExportsMixin } from './ui_exports';
|
||||
import { fieldFormatsMixin } from './field_formats';
|
||||
import { tutorialsMixin } from './tutorials_mixin';
|
||||
import { uiAppsMixin } from './ui_apps';
|
||||
|
@ -27,7 +26,6 @@ import { uiRenderMixin } from './ui_render';
|
|||
import { uiSettingsMixin } from './ui_settings';
|
||||
|
||||
export async function uiMixin(kbnServer) {
|
||||
await kbnServer.mixin(uiExportsMixin);
|
||||
await kbnServer.mixin(uiAppsMixin);
|
||||
await kbnServer.mixin(uiBundlesMixin);
|
||||
await kbnServer.mixin(uiSettingsMixin);
|
||||
|
|
|
@ -60,6 +60,7 @@ module.exports = function (grunt) {
|
|||
'--plugins.initialize=false',
|
||||
'--optimize.bundleFilter=tests',
|
||||
'--server.port=5610',
|
||||
'--migrations.skip=true'
|
||||
];
|
||||
|
||||
const NODE = 'node';
|
||||
|
|
|
@ -285,7 +285,7 @@ async function migrateIndex({ callCluster, index, migrations, mappingProperties,
|
|||
obsoleteIndexTemplatePattern,
|
||||
mappingProperties,
|
||||
batchSize: 10,
|
||||
log: _.noop,
|
||||
log: { info: _.noop, debug: _.noop, warn: _.noop },
|
||||
pollInterval: 50,
|
||||
scrollDuration: '5m',
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectsSchema()),
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
"../typings/lodash.topath/*.ts",
|
||||
"typings/**/*",
|
||||
],
|
||||
"exclude": [
|
||||
"plugin_functional/plugins/**/*"
|
||||
|
|
|
@ -17,10 +17,8 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { Config } from './config';
|
||||
import { transformDeprecations } from './transform_deprecations';
|
||||
type MethodKeysOf<T> = {
|
||||
[K in keyof T]: T[K] extends (...args: any[]) => any ? K : never;
|
||||
}[keyof T];
|
||||
|
||||
export default function (kbnServer) {
|
||||
const settings = transformDeprecations(kbnServer.settings);
|
||||
kbnServer.config = Config.withDefaultSchema(settings);
|
||||
}
|
||||
type PublicMethodsOf<T> = Pick<T, MethodKeysOf<T>>;
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import { ElasticsearchPlugin } from 'src/legacy/core_plugins/elasticsearch';
|
||||
import { SavedObjectsService } from 'src/legacy/server/kbn_server';
|
||||
import { SavedObjectsLegacyService } from 'src/legacy/server/kbn_server';
|
||||
import { callWithInternalUserFactory } from '../../client/call_with_internal_user_factory';
|
||||
|
||||
export interface MlTelemetry {
|
||||
|
@ -30,7 +30,7 @@ export function createMlTelemetry(count: number = 0): MlTelemetry {
|
|||
// savedObjects
|
||||
export function storeMlTelemetry(
|
||||
elasticsearchPlugin: ElasticsearchPlugin,
|
||||
savedObjects: SavedObjectsService,
|
||||
savedObjects: SavedObjectsLegacyService,
|
||||
mlTelemetry: MlTelemetry
|
||||
): void {
|
||||
const savedObjectsClient = getSavedObjectsClient(elasticsearchPlugin, savedObjects);
|
||||
|
@ -42,7 +42,7 @@ export function storeMlTelemetry(
|
|||
// needs savedObjects and elasticsearchPlugin
|
||||
export function getSavedObjectsClient(
|
||||
elasticsearchPlugin: ElasticsearchPlugin,
|
||||
savedObjects: SavedObjectsService
|
||||
savedObjects: SavedObjectsLegacyService
|
||||
): any {
|
||||
const { SavedObjectsClient, getSavedObjectsRepository } = savedObjects;
|
||||
const callWithInternalUser = callWithInternalUserFactory(elasticsearchPlugin);
|
||||
|
@ -52,7 +52,7 @@ export function getSavedObjectsClient(
|
|||
|
||||
export async function incrementFileDataVisualizerIndexCreationCount(
|
||||
elasticsearchPlugin: ElasticsearchPlugin,
|
||||
savedObjects: SavedObjectsService
|
||||
savedObjects: SavedObjectsLegacyService
|
||||
): Promise<void> {
|
||||
const savedObjectsClient = getSavedObjectsClient(elasticsearchPlugin, savedObjects);
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import Boom from 'boom';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { ServerRoute } from 'hapi';
|
||||
import { KibanaConfig, SavedObjectsService } from 'src/legacy/server/kbn_server';
|
||||
import { KibanaConfig, SavedObjectsLegacyService } from 'src/legacy/server/kbn_server';
|
||||
import { HttpServiceSetup, Logger, PluginInitializerContext } from 'src/core/server';
|
||||
import { ElasticsearchPlugin } from 'src/legacy/core_plugins/elasticsearch';
|
||||
import { XPackMainPlugin } from '../../../xpack_main/xpack_main';
|
||||
|
@ -73,7 +73,7 @@ export interface MlCoreSetup {
|
|||
addAppLinksToSampleDataset: () => any;
|
||||
injectUiAppVars: (id: string, callback: () => {}) => any;
|
||||
http: MlHttpServiceSetup;
|
||||
savedObjects: SavedObjectsService;
|
||||
savedObjects: SavedObjectsLegacyService;
|
||||
usage: {
|
||||
collectorSet: {
|
||||
makeUsageCollector: any;
|
||||
|
@ -99,7 +99,7 @@ export interface RouteInitialization {
|
|||
elasticsearchPlugin: ElasticsearchPlugin;
|
||||
route(route: ServerRoute | ServerRoute[]): void;
|
||||
xpackMainPlugin?: MlXpackMainPlugin;
|
||||
savedObjects?: SavedObjectsService;
|
||||
savedObjects?: SavedObjectsLegacyService;
|
||||
spacesPlugin: any;
|
||||
}
|
||||
export interface UsageInitialization {
|
||||
|
@ -110,7 +110,7 @@ export interface UsageInitialization {
|
|||
register: (collector: any) => void;
|
||||
};
|
||||
};
|
||||
savedObjects: SavedObjectsService;
|
||||
savedObjects: SavedObjectsLegacyService;
|
||||
}
|
||||
|
||||
export class Plugin {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
import {
|
||||
SavedObjectsSchema,
|
||||
SavedObjectsService,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsImportResponse,
|
||||
SavedObjectsImportOptions,
|
||||
|
@ -42,7 +42,7 @@ describe('copySavedObjectsToSpaces', () => {
|
|||
const setup = (setupOpts: SetupOpts) => {
|
||||
const savedObjectsClient = (null as unknown) as SavedObjectsClientContract;
|
||||
|
||||
const savedObjectsService: SavedObjectsService = ({
|
||||
const savedObjectsService: SavedObjectsLegacyService = ({
|
||||
importExport: {
|
||||
objectLimit: 1000,
|
||||
getSortedObjectsForExport:
|
||||
|
@ -73,7 +73,7 @@ describe('copySavedObjectsToSpaces', () => {
|
|||
schema: new SavedObjectsSchema({
|
||||
globalType: { isNamespaceAgnostic: true },
|
||||
}),
|
||||
} as unknown) as SavedObjectsService;
|
||||
} as unknown) as SavedObjectsLegacyService;
|
||||
|
||||
return {
|
||||
savedObjectsClient,
|
||||
|
|
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { SavedObjectsClientContract, SavedObjectsService, SavedObject } from 'src/core/server';
|
||||
import {
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObject,
|
||||
} from 'src/core/server';
|
||||
import { Readable } from 'stream';
|
||||
import { SavedObjectsClientProviderOptions } from 'src/core/server';
|
||||
import { spaceIdToNamespace } from '../utils/namespace';
|
||||
|
@ -20,7 +24,7 @@ export const COPY_TO_SPACES_SAVED_OBJECTS_CLIENT_OPTS: SavedObjectsClientProvide
|
|||
|
||||
export function copySavedObjectsToSpacesFactory(
|
||||
savedObjectsClient: SavedObjectsClientContract,
|
||||
savedObjectsService: SavedObjectsService
|
||||
savedObjectsService: SavedObjectsLegacyService
|
||||
) {
|
||||
const { importExport, types, schema } = savedObjectsService;
|
||||
const eligibleTypes = getEligibleTypes({ types, schema });
|
||||
|
|
|
@ -4,8 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { SavedObjectsService } from 'src/core/server';
|
||||
import { SavedObjectsLegacyService } from 'src/core/server';
|
||||
|
||||
export function getEligibleTypes({ types, schema }: Pick<SavedObjectsService, 'schema' | 'types'>) {
|
||||
export function getEligibleTypes({
|
||||
types,
|
||||
schema,
|
||||
}: Pick<SavedObjectsLegacyService, 'schema' | 'types'>) {
|
||||
return types.filter(type => !schema.isNamespaceAgnostic(type));
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
import {
|
||||
SavedObjectsSchema,
|
||||
SavedObjectsService,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsImportResponse,
|
||||
SavedObjectsResolveImportErrorsOptions,
|
||||
|
@ -42,7 +42,7 @@ const expectStreamToContainObjects = async (
|
|||
|
||||
describe('resolveCopySavedObjectsToSpacesConflicts', () => {
|
||||
const setup = (setupOpts: SetupOpts) => {
|
||||
const savedObjectsService: SavedObjectsService = ({
|
||||
const savedObjectsService: SavedObjectsLegacyService = ({
|
||||
importExport: {
|
||||
objectLimit: 1000,
|
||||
getSortedObjectsForExport:
|
||||
|
@ -76,7 +76,7 @@ describe('resolveCopySavedObjectsToSpacesConflicts', () => {
|
|||
schema: new SavedObjectsSchema({
|
||||
globalType: { isNamespaceAgnostic: true },
|
||||
}),
|
||||
} as unknown) as SavedObjectsService;
|
||||
} as unknown) as SavedObjectsLegacyService;
|
||||
|
||||
const savedObjectsClient = (null as unknown) as SavedObjectsClientContract;
|
||||
|
||||
|
|
|
@ -4,7 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { SavedObjectsClientContract, SavedObjectsService, SavedObject } from 'src/core/server';
|
||||
import {
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObject,
|
||||
} from 'src/core/server';
|
||||
import { Readable } from 'stream';
|
||||
import { spaceIdToNamespace } from '../utils/namespace';
|
||||
import { CopyOptions, ResolveConflictsOptions, CopyResponse } from './types';
|
||||
|
@ -15,7 +19,7 @@ import { createReadableStreamFromArray } from './lib/readable_stream_from_array'
|
|||
|
||||
export function resolveCopySavedObjectsToSpacesConflictsFactory(
|
||||
savedObjectsClient: SavedObjectsClientContract,
|
||||
savedObjectsService: SavedObjectsService
|
||||
savedObjectsService: SavedObjectsLegacyService
|
||||
) {
|
||||
const { importExport, types, schema } = savedObjectsService;
|
||||
const eligibleTypes = getEligibleTypes({ types, schema });
|
||||
|
|
|
@ -10,7 +10,7 @@ import * as Rx from 'rxjs';
|
|||
import Boom from 'boom';
|
||||
import { getClient } from '../../../../server/lib/get_client_shield';
|
||||
import { createDefaultSpace } from './create_default_space';
|
||||
import { SavedObjectsService } from 'src/core/server';
|
||||
import { SavedObjectsLegacyService } from 'src/core/server';
|
||||
import { ElasticsearchServiceSetup } from 'src/core/server';
|
||||
|
||||
let mockCallWithRequest;
|
||||
|
@ -83,7 +83,7 @@ const createMockDeps = (settings: MockServerSettings = {}) => {
|
|||
|
||||
return {
|
||||
config: mockServer.config(),
|
||||
savedObjects: (mockServer.savedObjects as unknown) as SavedObjectsService,
|
||||
savedObjects: (mockServer.savedObjects as unknown) as SavedObjectsLegacyService,
|
||||
elasticsearch: ({
|
||||
dataClient$: Rx.of({
|
||||
callAsInternalUser: jest.fn(),
|
||||
|
|
|
@ -7,12 +7,12 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { first } from 'rxjs/operators';
|
||||
import { SavedObjectsService, CoreSetup } from 'src/core/server';
|
||||
import { SavedObjectsLegacyService, CoreSetup } from 'src/core/server';
|
||||
import { DEFAULT_SPACE_ID } from '../../common/constants';
|
||||
|
||||
interface Deps {
|
||||
elasticsearch: CoreSetup['elasticsearch'];
|
||||
savedObjects: SavedObjectsService;
|
||||
savedObjects: SavedObjectsLegacyService;
|
||||
}
|
||||
|
||||
export async function createDefaultSpace({ elasticsearch, savedObjects }: Deps) {
|
||||
|
|
|
@ -12,7 +12,7 @@ import { initSpacesOnRequestInterceptor } from './on_request_interceptor';
|
|||
import {
|
||||
HttpServiceSetup,
|
||||
CoreSetup,
|
||||
SavedObjectsService,
|
||||
SavedObjectsLegacyService,
|
||||
SavedObjectsErrorHelpers,
|
||||
} from '../../../../../../../src/core/server';
|
||||
import {
|
||||
|
@ -168,7 +168,7 @@ describe('onPostAuthInterceptor', () => {
|
|||
serverDefaultRoute: defaultRoute,
|
||||
serverBasePath: '',
|
||||
},
|
||||
savedObjects: (savedObjectsService as unknown) as SavedObjectsService,
|
||||
savedObjects: (savedObjectsService as unknown) as SavedObjectsLegacyService,
|
||||
} as LegacyAPI;
|
||||
|
||||
const service = new SpacesService(loggingMock, () => legacyAPI);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue