mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
* migrate most libs * migrate last lib files * fix get_relationships * fix getSavedObject * migrate tests to TS * address review comments * move test files outside of __jest__ folder
This commit is contained in:
parent
1b60422704
commit
c70ee3e9a9
28 changed files with 412 additions and 357 deletions
|
@ -1,63 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { importLegacyFile } from '../import_legacy_file';
|
||||
|
||||
describe('importFile', () => {
|
||||
it('should import a file', async () => {
|
||||
class FileReader {
|
||||
readAsText(text) {
|
||||
this.onload({
|
||||
target: {
|
||||
result: JSON.stringify({ text }),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const file = 'foo';
|
||||
|
||||
const imported = await importLegacyFile(file, FileReader);
|
||||
expect(imported).toEqual({ text: file });
|
||||
});
|
||||
|
||||
it('should throw errors', async () => {
|
||||
class FileReader {
|
||||
readAsText() {
|
||||
this.onload({
|
||||
target: {
|
||||
result: 'not_parseable',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const file = 'foo';
|
||||
|
||||
try {
|
||||
await importLegacyFile(file, FileReader);
|
||||
} catch (e) {
|
||||
// There isn't a great way to handle throwing exceptions
|
||||
// with async/await but this seems to work :shrug:
|
||||
expect(() => {
|
||||
throw e;
|
||||
}).toThrow();
|
||||
}
|
||||
});
|
||||
});
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { extractExportDetails, SavedObjectsExportResultDetails } from '../extract_export_details';
|
||||
import { extractExportDetails, SavedObjectsExportResultDetails } from './extract_export_details';
|
||||
|
||||
describe('extractExportDetails', () => {
|
||||
const objLine = (id: string, type: string) => {
|
|
@ -19,7 +19,11 @@
|
|||
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
export async function fetchExportByTypeAndSearch(types, search, includeReferencesDeep = false) {
|
||||
export async function fetchExportByTypeAndSearch(
|
||||
types: string[],
|
||||
search: string | undefined,
|
||||
includeReferencesDeep: boolean = false
|
||||
): Promise<Blob> {
|
||||
return await kfetch({
|
||||
method: 'POST',
|
||||
pathname: '/api/saved_objects/_export',
|
|
@ -19,7 +19,10 @@
|
|||
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
export async function fetchExportObjects(objects, includeReferencesDeep = false) {
|
||||
export async function fetchExportObjects(
|
||||
objects: any[],
|
||||
includeReferencesDeep: boolean = false
|
||||
): Promise<Blob> {
|
||||
return await kfetch({
|
||||
method: 'POST',
|
||||
pathname: '/api/saved_objects/_export',
|
|
@ -18,13 +18,14 @@
|
|||
*/
|
||||
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
import { SavedObjectsFindOptions } from 'src/core/public';
|
||||
import { keysToCamelCaseShallow } from './case_conversion';
|
||||
|
||||
export async function findObjects(findOptions) {
|
||||
export async function findObjects(findOptions: SavedObjectsFindOptions) {
|
||||
const response = await kfetch({
|
||||
method: 'GET',
|
||||
pathname: '/api/kibana/management/saved_objects/_find',
|
||||
query: findOptions,
|
||||
query: findOptions as Record<string, any>,
|
||||
});
|
||||
|
||||
return keysToCamelCaseShallow(response);
|
|
@ -17,6 +17,6 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export function getDefaultTitle(object) {
|
||||
export function getDefaultTitle(object: { id: string; type: string }) {
|
||||
return `${object.type} [id=${object.id}]`;
|
||||
}
|
|
@ -17,24 +17,24 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { getRelationships } from '../get_relationships';
|
||||
import { getRelationships } from './get_relationships';
|
||||
|
||||
describe('getRelationships', () => {
|
||||
it('should make an http request', async () => {
|
||||
const $http = jest.fn();
|
||||
const $http = jest.fn() as any;
|
||||
const basePath = 'test';
|
||||
|
||||
await getRelationships('dashboard', 1, ['search', 'index-pattern'], $http, basePath);
|
||||
await getRelationships('dashboard', '1', ['search', 'index-pattern'], $http, basePath);
|
||||
expect($http.mock.calls.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle successful responses', async () => {
|
||||
const $http = jest.fn().mockImplementation(() => ({ data: [1, 2] }));
|
||||
const $http = jest.fn().mockImplementation(() => ({ data: [1, 2] })) as any;
|
||||
const basePath = 'test';
|
||||
|
||||
const response = await getRelationships(
|
||||
'dashboard',
|
||||
1,
|
||||
'1',
|
||||
['search', 'index-pattern'],
|
||||
$http,
|
||||
basePath
|
||||
|
@ -44,23 +44,17 @@ describe('getRelationships', () => {
|
|||
|
||||
it('should handle errors', async () => {
|
||||
const $http = jest.fn().mockImplementation(() => {
|
||||
throw {
|
||||
data: {
|
||||
error: 'Test error',
|
||||
statusCode: 500,
|
||||
},
|
||||
const err = new Error();
|
||||
(err as any).data = {
|
||||
error: 'Test error',
|
||||
statusCode: 500,
|
||||
};
|
||||
});
|
||||
throw err;
|
||||
}) as any;
|
||||
const basePath = 'test';
|
||||
|
||||
try {
|
||||
await getRelationships('dashboard', 1, ['search', 'index-pattern'], $http, basePath);
|
||||
} catch (e) {
|
||||
// There isn't a great way to handle throwing exceptions
|
||||
// with async/await but this seems to work :shrug:
|
||||
expect(() => {
|
||||
throw e;
|
||||
}).toThrow();
|
||||
}
|
||||
await expect(
|
||||
getRelationships('dashboard', '1', ['search', 'index-pattern'], $http, basePath)
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(`"Test error"`);
|
||||
});
|
||||
});
|
|
@ -17,9 +17,17 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { IHttpService } from 'angular';
|
||||
import { get } from 'lodash';
|
||||
import { SavedObjectRelation } from '../types';
|
||||
|
||||
export async function getRelationships(type, id, savedObjectTypes, $http, basePath) {
|
||||
export async function getRelationships(
|
||||
type: string,
|
||||
id: string,
|
||||
savedObjectTypes: string[],
|
||||
$http: IHttpService,
|
||||
basePath: string
|
||||
): Promise<SavedObjectRelation[]> {
|
||||
const url = `${basePath}/api/kibana/management/saved_objects/relationships/${encodeURIComponent(
|
||||
type
|
||||
)}/${encodeURIComponent(id)}`;
|
||||
|
@ -27,19 +35,19 @@ export async function getRelationships(type, id, savedObjectTypes, $http, basePa
|
|||
method: 'GET',
|
||||
url,
|
||||
params: {
|
||||
savedObjectTypes: savedObjectTypes,
|
||||
savedObjectTypes,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await $http(options);
|
||||
return response ? response.data : undefined;
|
||||
const response = await $http<SavedObjectRelation[]>(options);
|
||||
return response?.data;
|
||||
} catch (resp) {
|
||||
const respBody = get(resp, 'data', {});
|
||||
const respBody = get(resp, 'data', {}) as any;
|
||||
const err = new Error(respBody.message || respBody.error || `${resp.status} Response`);
|
||||
|
||||
err.statusCode = respBody.statusCode || resp.status;
|
||||
err.body = respBody;
|
||||
(err as any).statusCode = respBody.statusCode || resp.status;
|
||||
(err as any).body = respBody;
|
||||
|
||||
throw err;
|
||||
}
|
|
@ -17,10 +17,18 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { IHttpService } from 'angular';
|
||||
import chrome from 'ui/chrome';
|
||||
|
||||
const apiBase = chrome.addBasePath('/api/kibana/management/saved_objects/scroll');
|
||||
export async function getSavedObjectCounts($http, typesToInclude, searchString) {
|
||||
const results = await $http.post(`${apiBase}/counts`, { typesToInclude, searchString });
|
||||
export async function getSavedObjectCounts(
|
||||
$http: IHttpService,
|
||||
typesToInclude: string[],
|
||||
searchString: string
|
||||
): Promise<Record<string, number>> {
|
||||
const results = await $http.post<Record<string, number>>(`${apiBase}/counts`, {
|
||||
typesToInclude,
|
||||
searchString,
|
||||
});
|
||||
return results.data;
|
||||
}
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export function getSavedObjectLabel(type) {
|
||||
export function getSavedObjectLabel(type: string) {
|
||||
switch (type) {
|
||||
case 'index-pattern':
|
||||
case 'index-patterns':
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
|
||||
export async function importFile(file, overwriteAll = false) {
|
||||
export async function importFile(file: Blob, overwriteAll: boolean = false) {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
return await kfetch({
|
|
@ -0,0 +1,37 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { importLegacyFile } from './import_legacy_file';
|
||||
|
||||
describe('importFile', () => {
|
||||
it('should import a file with valid json format', async () => {
|
||||
const file = new File([`{"text": "foo"}`], 'file.json');
|
||||
|
||||
const imported = await importLegacyFile(file);
|
||||
expect(imported).toEqual({ text: 'foo' });
|
||||
});
|
||||
|
||||
it('should throw errors when file content is not parseable', async () => {
|
||||
const file = new File([`not_parseable`], 'file.json');
|
||||
|
||||
await expect(importLegacyFile(file)).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Unexpected token o in JSON at position 1"`
|
||||
);
|
||||
});
|
||||
});
|
|
@ -17,10 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export async function importLegacyFile(file, FileReader = window.FileReader) {
|
||||
export async function importLegacyFile(file: File) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const fr = new FileReader();
|
||||
fr.onload = ({ target: { result } }) => {
|
||||
fr.onload = event => {
|
||||
const result = event.target!.result as string;
|
||||
try {
|
||||
resolve(JSON.parse(result));
|
||||
} catch (e) {
|
|
@ -17,100 +17,110 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { canViewInApp } from '../in_app_url';
|
||||
import { Capabilities } from '../../../../../../../../core/public';
|
||||
import { canViewInApp } from './in_app_url';
|
||||
|
||||
const createCapabilities = (sections: Record<string, any>): Capabilities => {
|
||||
return {
|
||||
navLinks: {},
|
||||
management: {},
|
||||
catalogue: {},
|
||||
...sections,
|
||||
};
|
||||
};
|
||||
|
||||
describe('canViewInApp', () => {
|
||||
it('should handle saved searches', () => {
|
||||
let uiCapabilities = {
|
||||
let uiCapabilities = createCapabilities({
|
||||
discover: {
|
||||
show: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'search')).toEqual(true);
|
||||
expect(canViewInApp(uiCapabilities, 'searches')).toEqual(true);
|
||||
|
||||
uiCapabilities = {
|
||||
uiCapabilities = createCapabilities({
|
||||
discover: {
|
||||
show: false,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'search')).toEqual(false);
|
||||
expect(canViewInApp(uiCapabilities, 'searches')).toEqual(false);
|
||||
});
|
||||
|
||||
it('should handle visualizations', () => {
|
||||
let uiCapabilities = {
|
||||
let uiCapabilities = createCapabilities({
|
||||
visualize: {
|
||||
show: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'visualization')).toEqual(true);
|
||||
expect(canViewInApp(uiCapabilities, 'visualizations')).toEqual(true);
|
||||
|
||||
uiCapabilities = {
|
||||
uiCapabilities = createCapabilities({
|
||||
visualize: {
|
||||
show: false,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'visualization')).toEqual(false);
|
||||
expect(canViewInApp(uiCapabilities, 'visualizations')).toEqual(false);
|
||||
});
|
||||
|
||||
it('should handle index patterns', () => {
|
||||
let uiCapabilities = {
|
||||
let uiCapabilities = createCapabilities({
|
||||
management: {
|
||||
kibana: {
|
||||
index_patterns: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'index-pattern')).toEqual(true);
|
||||
expect(canViewInApp(uiCapabilities, 'index-patterns')).toEqual(true);
|
||||
expect(canViewInApp(uiCapabilities, 'indexPatterns')).toEqual(true);
|
||||
|
||||
uiCapabilities = {
|
||||
uiCapabilities = createCapabilities({
|
||||
management: {
|
||||
kibana: {
|
||||
index_patterns: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'index-pattern')).toEqual(false);
|
||||
expect(canViewInApp(uiCapabilities, 'index-patterns')).toEqual(false);
|
||||
expect(canViewInApp(uiCapabilities, 'indexPatterns')).toEqual(false);
|
||||
});
|
||||
|
||||
it('should handle dashboards', () => {
|
||||
let uiCapabilities = {
|
||||
let uiCapabilities = createCapabilities({
|
||||
dashboard: {
|
||||
show: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'dashboard')).toEqual(true);
|
||||
expect(canViewInApp(uiCapabilities, 'dashboards')).toEqual(true);
|
||||
|
||||
uiCapabilities = {
|
||||
uiCapabilities = createCapabilities({
|
||||
dashboard: {
|
||||
show: false,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'dashboard')).toEqual(false);
|
||||
expect(canViewInApp(uiCapabilities, 'dashboards')).toEqual(false);
|
||||
});
|
||||
|
||||
it('should have a default case', () => {
|
||||
let uiCapabilities = {
|
||||
let uiCapabilities = createCapabilities({
|
||||
foo: {
|
||||
show: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'foo')).toEqual(true);
|
||||
|
||||
uiCapabilities = {
|
||||
uiCapabilities = createCapabilities({
|
||||
foo: {
|
||||
show: false,
|
||||
},
|
||||
};
|
||||
});
|
||||
expect(canViewInApp(uiCapabilities, 'foo')).toEqual(false);
|
||||
});
|
||||
});
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export * from './fetch_export_by_type_and_search';
|
||||
export * from './fetch_export_objects';
|
||||
export * from './in_app_url';
|
||||
export * from './get_relationships';
|
||||
export * from './get_saved_object_counts';
|
||||
export * from './get_saved_object_label';
|
||||
export * from './import_file';
|
||||
export * from './import_legacy_file';
|
||||
export * from './parse_query';
|
||||
export * from './resolve_import_errors';
|
||||
export * from './resolve_saved_objects';
|
||||
export * from './log_legacy_import';
|
||||
export * from './process_import_response';
|
||||
export * from './get_default_title';
|
||||
export * from './find_objects';
|
||||
export * from './extract_export_details';
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
export { fetchExportByTypeAndSearch } from './fetch_export_by_type_and_search';
|
||||
export { fetchExportObjects } from './fetch_export_objects';
|
||||
export { canViewInApp } from './in_app_url';
|
||||
export { getRelationships } from './get_relationships';
|
||||
export { getSavedObjectCounts } from './get_saved_object_counts';
|
||||
export { getSavedObjectLabel } from './get_saved_object_label';
|
||||
export { importFile } from './import_file';
|
||||
export { importLegacyFile } from './import_legacy_file';
|
||||
export { parseQuery } from './parse_query';
|
||||
export { resolveImportErrors } from './resolve_import_errors';
|
||||
export {
|
||||
resolveIndexPatternConflicts,
|
||||
resolveSavedObjects,
|
||||
resolveSavedSearches,
|
||||
saveObject,
|
||||
saveObjects,
|
||||
} from './resolve_saved_objects';
|
||||
export { logLegacyImport } from './log_legacy_import';
|
||||
export {
|
||||
processImportResponse,
|
||||
ProcessedImportResponse,
|
||||
FailedImport,
|
||||
} from './process_import_response';
|
||||
export { getDefaultTitle } from './get_default_title';
|
||||
export { findObjects } from './find_objects';
|
||||
export { extractExportDetails, SavedObjectsExportResultDetails } from './extract_export_details';
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { parseQuery } from '.';
|
||||
|
||||
export const isSameQuery = (query1, query2) => {
|
||||
const parsedQuery1 = parseQuery(query1);
|
||||
const parsedQuery2 = parseQuery(query2);
|
||||
|
||||
if (parsedQuery1.queryText === parsedQuery2.queryText) {
|
||||
if (parsedQuery1.visibleTypes === parsedQuery2.visibleTypes) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { parseQuery } from '../parse_query';
|
||||
import { parseQuery } from './parse_query';
|
||||
|
||||
describe('getQueryText', () => {
|
||||
it('should know how to get the text out of the AST', () => {
|
|
@ -17,15 +17,15 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
export function parseQuery(query) {
|
||||
let queryText = undefined;
|
||||
let visibleTypes = undefined;
|
||||
export function parseQuery(query: any) {
|
||||
let queryText;
|
||||
let visibleTypes;
|
||||
|
||||
if (query) {
|
||||
if (query.ast.getTermClauses().length) {
|
||||
queryText = query.ast
|
||||
.getTermClauses()
|
||||
.map(clause => clause.value)
|
||||
.map((clause: any) => clause.value)
|
||||
.join(' ');
|
||||
}
|
||||
if (query.ast.getFieldClauses('type')) {
|
|
@ -17,7 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { processImportResponse } from '../process_import_response';
|
||||
import {
|
||||
SavedObjectsImportConflictError,
|
||||
SavedObjectsImportUnknownError,
|
||||
SavedObjectsImportMissingReferencesError,
|
||||
} from 'src/core/public';
|
||||
import { processImportResponse } from './process_import_response';
|
||||
|
||||
describe('processImportResponse()', () => {
|
||||
test('works when no errors exist in the response', () => {
|
||||
|
@ -36,32 +41,28 @@ describe('processImportResponse()', () => {
|
|||
successCount: 0,
|
||||
errors: [
|
||||
{
|
||||
obj: {
|
||||
type: 'a',
|
||||
id: '1',
|
||||
},
|
||||
type: 'a',
|
||||
id: '1',
|
||||
error: {
|
||||
type: 'conflict',
|
||||
},
|
||||
} as SavedObjectsImportConflictError,
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = processImportResponse(response);
|
||||
expect(result.failedImports).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"type": "conflict",
|
||||
},
|
||||
"obj": Object {
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"type": "conflict",
|
||||
},
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test('unknown errors get added to failedImports', () => {
|
||||
|
@ -70,32 +71,28 @@ Array [
|
|||
successCount: 0,
|
||||
errors: [
|
||||
{
|
||||
obj: {
|
||||
type: 'a',
|
||||
id: '1',
|
||||
},
|
||||
type: 'a',
|
||||
id: '1',
|
||||
error: {
|
||||
type: 'unknown',
|
||||
},
|
||||
} as SavedObjectsImportUnknownError,
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = processImportResponse(response);
|
||||
expect(result.failedImports).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"type": "unknown",
|
||||
},
|
||||
"obj": Object {
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"type": "unknown",
|
||||
},
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
test('missing references get added to failedImports', () => {
|
||||
|
@ -104,10 +101,8 @@ Array [
|
|||
successCount: 0,
|
||||
errors: [
|
||||
{
|
||||
obj: {
|
||||
type: 'a',
|
||||
id: '1',
|
||||
},
|
||||
type: 'a',
|
||||
id: '1',
|
||||
error: {
|
||||
type: 'missing_references',
|
||||
references: [
|
||||
|
@ -116,31 +111,29 @@ Array [
|
|||
id: '2',
|
||||
},
|
||||
],
|
||||
},
|
||||
} as SavedObjectsImportMissingReferencesError,
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = processImportResponse(response);
|
||||
expect(result.failedImports).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"references": Array [
|
||||
Array [
|
||||
Object {
|
||||
"id": "2",
|
||||
"type": "index-pattern",
|
||||
"error": Object {
|
||||
"references": Array [
|
||||
Object {
|
||||
"id": "2",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
],
|
||||
"type": "missing_references",
|
||||
},
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
],
|
||||
"type": "missing_references",
|
||||
},
|
||||
"obj": Object {
|
||||
"obj": Object {
|
||||
"id": "1",
|
||||
"type": "a",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
`);
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -26,15 +26,17 @@ import {
|
|||
SavedObjectsImportError,
|
||||
} from 'src/core/public';
|
||||
|
||||
export interface FailedImport {
|
||||
obj: Pick<SavedObjectsImportError, 'id' | 'type' | 'title'>;
|
||||
error:
|
||||
| SavedObjectsImportConflictError
|
||||
| SavedObjectsImportUnsupportedTypeError
|
||||
| SavedObjectsImportMissingReferencesError
|
||||
| SavedObjectsImportUnknownError;
|
||||
}
|
||||
|
||||
export interface ProcessedImportResponse {
|
||||
failedImports: Array<{
|
||||
obj: Pick<SavedObjectsImportError, 'id' | 'type' | 'title'>;
|
||||
error:
|
||||
| SavedObjectsImportConflictError
|
||||
| SavedObjectsImportUnsupportedTypeError
|
||||
| SavedObjectsImportMissingReferencesError
|
||||
| SavedObjectsImportUnknownError;
|
||||
}>;
|
||||
failedImports: FailedImport[];
|
||||
unmatchedReferences: Array<{
|
||||
existingIndexPatternId: string;
|
||||
list: Array<Record<string, any>>;
|
||||
|
|
|
@ -17,12 +17,16 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { resolveImportErrors } from '../resolve_import_errors';
|
||||
|
||||
jest.mock('ui/kfetch', () => ({ kfetch: jest.fn() }));
|
||||
|
||||
function getFormData(form) {
|
||||
const formData = {};
|
||||
import { SavedObjectsImportUnknownError } from 'src/core/public';
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
import { resolveImportErrors } from './resolve_import_errors';
|
||||
|
||||
const kfetchMock = kfetch as jest.Mock;
|
||||
|
||||
function getFormData(form: Map<string, any>) {
|
||||
const formData: Record<string, any> = {};
|
||||
for (const [key, val] of form.entries()) {
|
||||
if (key === 'retries') {
|
||||
formData[key] = JSON.parse(val);
|
||||
|
@ -69,7 +73,7 @@ Object {
|
|||
},
|
||||
error: {
|
||||
type: 'unknown',
|
||||
},
|
||||
} as SavedObjectsImportUnknownError,
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -94,8 +98,7 @@ Object {
|
|||
});
|
||||
|
||||
test('resolves conflicts', async () => {
|
||||
const { kfetch } = require('ui/kfetch');
|
||||
kfetch.mockResolvedValueOnce({
|
||||
kfetchMock.mockResolvedValueOnce({
|
||||
success: true,
|
||||
successCount: 1,
|
||||
});
|
||||
|
@ -136,7 +139,7 @@ Object {
|
|||
"status": "success",
|
||||
}
|
||||
`);
|
||||
const formData = getFormData(kfetch.mock.calls[0][0].body);
|
||||
const formData = getFormData(kfetchMock.mock.calls[0][0].body);
|
||||
expect(formData).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"file": "undefined",
|
||||
|
@ -153,8 +156,7 @@ Object {
|
|||
});
|
||||
|
||||
test('resolves missing references', async () => {
|
||||
const { kfetch } = require('ui/kfetch');
|
||||
kfetch.mockResolvedValueOnce({
|
||||
kfetchMock.mockResolvedValueOnce({
|
||||
success: true,
|
||||
successCount: 2,
|
||||
});
|
||||
|
@ -201,7 +203,7 @@ Object {
|
|||
"status": "success",
|
||||
}
|
||||
`);
|
||||
const formData = getFormData(kfetch.mock.calls[0][0].body);
|
||||
const formData = getFormData(kfetchMock.mock.calls[0][0].body);
|
||||
expect(formData).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"file": "undefined",
|
||||
|
@ -274,8 +276,7 @@ Object {
|
|||
});
|
||||
|
||||
test('handles missing references then conflicts on the same errored objects', async () => {
|
||||
const { kfetch } = require('ui/kfetch');
|
||||
kfetch.mockResolvedValueOnce({
|
||||
kfetchMock.mockResolvedValueOnce({
|
||||
success: false,
|
||||
successCount: 0,
|
||||
errors: [
|
||||
|
@ -288,7 +289,7 @@ Object {
|
|||
},
|
||||
],
|
||||
});
|
||||
kfetch.mockResolvedValueOnce({
|
||||
kfetchMock.mockResolvedValueOnce({
|
||||
success: true,
|
||||
successCount: 1,
|
||||
});
|
||||
|
@ -333,7 +334,7 @@ Object {
|
|||
"status": "success",
|
||||
}
|
||||
`);
|
||||
const formData1 = getFormData(kfetch.mock.calls[0][0].body);
|
||||
const formData1 = getFormData(kfetchMock.mock.calls[0][0].body);
|
||||
expect(formData1).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"file": "undefined",
|
||||
|
@ -353,7 +354,7 @@ Object {
|
|||
],
|
||||
}
|
||||
`);
|
||||
const formData2 = getFormData(kfetch.mock.calls[1][0].body);
|
||||
const formData2 = getFormData(kfetchMock.mock.calls[1][0].body);
|
||||
expect(formData2).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"file": "undefined",
|
|
@ -18,8 +18,16 @@
|
|||
*/
|
||||
|
||||
import { kfetch } from 'ui/kfetch';
|
||||
import { FailedImport } from './process_import_response';
|
||||
|
||||
async function callResolveImportErrorsApi(file, retries) {
|
||||
interface RetryObject {
|
||||
id: string;
|
||||
type: string;
|
||||
overwrite?: boolean;
|
||||
replaceReferences?: any[];
|
||||
}
|
||||
|
||||
async function callResolveImportErrorsApi(file: File, retries: any) {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
formData.append('retries', JSON.stringify(retries));
|
||||
|
@ -39,7 +47,12 @@ function mapImportFailureToRetryObject({
|
|||
overwriteDecisionCache,
|
||||
replaceReferencesCache,
|
||||
state,
|
||||
}) {
|
||||
}: {
|
||||
failure: FailedImport;
|
||||
overwriteDecisionCache: Map<string, boolean>;
|
||||
replaceReferencesCache: Map<string, any[]>;
|
||||
state: any;
|
||||
}): RetryObject | undefined {
|
||||
const { isOverwriteAllChecked, unmatchedReferences } = state;
|
||||
const isOverwriteGranted =
|
||||
isOverwriteAllChecked ||
|
||||
|
@ -86,27 +99,32 @@ function mapImportFailureToRetryObject({
|
|||
};
|
||||
}
|
||||
|
||||
export async function resolveImportErrors({ getConflictResolutions, state }) {
|
||||
export async function resolveImportErrors({
|
||||
getConflictResolutions,
|
||||
state,
|
||||
}: {
|
||||
getConflictResolutions: (objects: any[]) => Promise<Record<string, boolean>>;
|
||||
state: { importCount: number; failedImports?: FailedImport[] } & Record<string, any>;
|
||||
}) {
|
||||
const overwriteDecisionCache = new Map();
|
||||
const replaceReferencesCache = new Map();
|
||||
let { importCount: successImportCount, failedImports: importFailures = [] } = state;
|
||||
const { file, isOverwriteAllChecked } = state;
|
||||
|
||||
const doesntHaveOverwriteDecision = ({ obj }) => {
|
||||
const doesntHaveOverwriteDecision = ({ obj }: FailedImport) => {
|
||||
return !overwriteDecisionCache.has(`${obj.type}:${obj.id}`);
|
||||
};
|
||||
const getOverwriteDecision = ({ obj }) => {
|
||||
const getOverwriteDecision = ({ obj }: FailedImport) => {
|
||||
return overwriteDecisionCache.get(`${obj.type}:${obj.id}`);
|
||||
};
|
||||
const callMapImportFailure = failure => {
|
||||
return mapImportFailureToRetryObject({
|
||||
const callMapImportFailure = (failure: FailedImport) =>
|
||||
mapImportFailureToRetryObject({
|
||||
failure,
|
||||
overwriteDecisionCache,
|
||||
replaceReferencesCache,
|
||||
state,
|
||||
});
|
||||
};
|
||||
const isNotSkipped = failure => {
|
||||
const isNotSkipped = (failure: FailedImport) => {
|
||||
return (
|
||||
(failure.error.type !== 'conflict' && failure.error.type !== 'missing_references') ||
|
||||
getOverwriteDecision(failure)
|
||||
|
@ -131,7 +149,7 @@ export async function resolveImportErrors({ getConflictResolutions, state }) {
|
|||
}
|
||||
|
||||
// Build retries array
|
||||
const retries = importFailures.map(callMapImportFailure).filter(obj => !!obj);
|
||||
const retries = importFailures.map(callMapImportFailure).filter(obj => !!obj) as RetryObject[];
|
||||
for (const { error, obj } of importFailures) {
|
||||
if (error.type !== 'missing_references') {
|
||||
continue;
|
|
@ -22,21 +22,30 @@ import {
|
|||
resolveIndexPatternConflicts,
|
||||
saveObjects,
|
||||
saveObject,
|
||||
} from '../resolve_saved_objects';
|
||||
} from './resolve_saved_objects';
|
||||
import {
|
||||
SavedObject,
|
||||
SavedObjectLoader,
|
||||
} from '../../../../../../../../plugins/saved_objects/public';
|
||||
import { IndexPatternsContract } from '../../../../../../../../plugins/data/public';
|
||||
|
||||
jest.mock('../../../../../../../../../plugins/kibana_utils/public', () => ({
|
||||
SavedObjectNotFound: class SavedObjectNotFound extends Error {
|
||||
constructor(options) {
|
||||
super();
|
||||
for (const option in options) {
|
||||
if (options.hasOwnProperty(option)) {
|
||||
this[option] = options[option];
|
||||
}
|
||||
class SavedObjectNotFound extends Error {
|
||||
constructor(options: Record<string, any>) {
|
||||
super();
|
||||
for (const option in options) {
|
||||
if (options.hasOwnProperty(option)) {
|
||||
(this as any)[option] = options[option];
|
||||
}
|
||||
}
|
||||
},
|
||||
}));
|
||||
import { SavedObjectNotFound } from '../../../../../../../../../plugins/kibana_utils/public';
|
||||
}
|
||||
}
|
||||
|
||||
const openModalMock = jest.fn();
|
||||
|
||||
const createObj = (props: Partial<SavedObject>): SavedObject =>
|
||||
({
|
||||
...props,
|
||||
} as SavedObject);
|
||||
|
||||
describe('resolveSavedObjects', () => {
|
||||
describe('resolveSavedObjects', () => {
|
||||
|
@ -61,7 +70,7 @@ describe('resolveSavedObjects', () => {
|
|||
},
|
||||
];
|
||||
|
||||
const indexPatterns = {
|
||||
const indexPatterns = ({
|
||||
get: async () => {
|
||||
return {
|
||||
create: () => '2',
|
||||
|
@ -73,7 +82,7 @@ describe('resolveSavedObjects', () => {
|
|||
cache: {
|
||||
clear: () => {},
|
||||
},
|
||||
};
|
||||
} as unknown) as IndexPatternsContract;
|
||||
|
||||
const services = [
|
||||
{
|
||||
|
@ -115,11 +124,17 @@ describe('resolveSavedObjects', () => {
|
|||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
] as SavedObjectLoader[];
|
||||
|
||||
const overwriteAll = false;
|
||||
|
||||
const result = await resolveSavedObjects(savedObjects, overwriteAll, services, indexPatterns);
|
||||
const result = await resolveSavedObjects(
|
||||
savedObjects,
|
||||
overwriteAll,
|
||||
services,
|
||||
indexPatterns,
|
||||
openModalMock
|
||||
);
|
||||
|
||||
expect(result.conflictedIndexPatterns.length).toBe(3);
|
||||
expect(result.conflictedSavedObjectsLinkedToSavedSearches.length).toBe(0);
|
||||
|
@ -147,7 +162,7 @@ describe('resolveSavedObjects', () => {
|
|||
},
|
||||
];
|
||||
|
||||
const indexPatterns = {
|
||||
const indexPatterns = ({
|
||||
get: async () => {
|
||||
return {
|
||||
create: () => '2',
|
||||
|
@ -159,7 +174,7 @@ describe('resolveSavedObjects', () => {
|
|||
cache: {
|
||||
clear: () => {},
|
||||
},
|
||||
};
|
||||
} as unknown) as IndexPatternsContract;
|
||||
|
||||
const services = [
|
||||
{
|
||||
|
@ -202,11 +217,17 @@ describe('resolveSavedObjects', () => {
|
|||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
] as SavedObjectLoader[];
|
||||
|
||||
const overwriteAll = false;
|
||||
|
||||
const result = await resolveSavedObjects(savedObjects, overwriteAll, services, indexPatterns);
|
||||
const result = await resolveSavedObjects(
|
||||
savedObjects,
|
||||
overwriteAll,
|
||||
services,
|
||||
indexPatterns,
|
||||
openModalMock
|
||||
);
|
||||
|
||||
expect(result.conflictedIndexPatterns.length).toBe(1);
|
||||
expect(result.conflictedSavedObjectsLinkedToSavedSearches.length).toBe(1);
|
||||
|
@ -223,7 +244,7 @@ describe('resolveSavedObjects', () => {
|
|||
{
|
||||
obj: {
|
||||
searchSource: {
|
||||
getOwnField: field => {
|
||||
getOwnField: (field: string) => {
|
||||
return field === 'index' ? '1' : undefined;
|
||||
},
|
||||
},
|
||||
|
@ -234,7 +255,7 @@ describe('resolveSavedObjects', () => {
|
|||
{
|
||||
obj: {
|
||||
searchSource: {
|
||||
getOwnField: field => {
|
||||
getOwnField: (field: string) => {
|
||||
return field === 'index' ? '3' : undefined;
|
||||
},
|
||||
},
|
||||
|
@ -277,7 +298,7 @@ describe('resolveSavedObjects', () => {
|
|||
{
|
||||
obj: {
|
||||
searchSource: {
|
||||
getOwnField: field => {
|
||||
getOwnField: (field: string) => {
|
||||
return field === 'index' ? '1' : [{ meta: { index: 'filterIndex' } }];
|
||||
},
|
||||
setField: jest.fn(),
|
||||
|
@ -289,7 +310,7 @@ describe('resolveSavedObjects', () => {
|
|||
{
|
||||
obj: {
|
||||
searchSource: {
|
||||
getOwnField: field => {
|
||||
getOwnField: (field: string) => {
|
||||
return field === 'index' ? '3' : undefined;
|
||||
},
|
||||
},
|
||||
|
@ -330,12 +351,12 @@ describe('resolveSavedObjects', () => {
|
|||
const save = jest.fn();
|
||||
|
||||
const objs = [
|
||||
{
|
||||
createObj({
|
||||
save,
|
||||
},
|
||||
{
|
||||
}),
|
||||
createObj({
|
||||
save,
|
||||
},
|
||||
}),
|
||||
];
|
||||
|
||||
const overwriteAll = false;
|
||||
|
@ -349,9 +370,9 @@ describe('resolveSavedObjects', () => {
|
|||
describe('saveObject', () => {
|
||||
it('should save the object', async () => {
|
||||
const save = jest.fn();
|
||||
const obj = {
|
||||
const obj = createObj({
|
||||
save,
|
||||
};
|
||||
});
|
||||
|
||||
const overwriteAll = false;
|
||||
|
|
@ -18,9 +18,17 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { OverlayStart } from 'src/core/public';
|
||||
import {
|
||||
SavedObject,
|
||||
SavedObjectLoader,
|
||||
} from '../../../../../../../../plugins/saved_objects/public';
|
||||
import { IndexPatternsContract, IIndexPattern } from '../../../../../../../../plugins/data/public';
|
||||
|
||||
async function getSavedObject(doc, services) {
|
||||
const service = services.find(service => service.type === doc._type);
|
||||
type SavedObjectsRawDoc = Record<string, any>;
|
||||
|
||||
async function getSavedObject(doc: SavedObjectsRawDoc, services: SavedObjectLoader[]) {
|
||||
const service = services.find(s => s.type === doc._type);
|
||||
if (!service) {
|
||||
return;
|
||||
}
|
||||
|
@ -31,7 +39,12 @@ async function getSavedObject(doc, services) {
|
|||
return obj;
|
||||
}
|
||||
|
||||
function addJsonFieldToIndexPattern(target, sourceString, fieldName, indexName) {
|
||||
function addJsonFieldToIndexPattern(
|
||||
target: Record<string, any>,
|
||||
sourceString: string,
|
||||
fieldName: string,
|
||||
indexName: string
|
||||
) {
|
||||
if (sourceString) {
|
||||
try {
|
||||
target[fieldName] = JSON.parse(sourceString);
|
||||
|
@ -50,7 +63,12 @@ function addJsonFieldToIndexPattern(target, sourceString, fieldName, indexName)
|
|||
}
|
||||
}
|
||||
}
|
||||
async function importIndexPattern(doc, indexPatterns, overwriteAll, confirmModalPromise) {
|
||||
async function importIndexPattern(
|
||||
doc: SavedObjectsRawDoc,
|
||||
indexPatterns: IndexPatternsContract,
|
||||
overwriteAll: boolean,
|
||||
openConfirm: OverlayStart['openConfirm']
|
||||
) {
|
||||
// TODO: consolidate this is the code in create_index_pattern_wizard.js
|
||||
const emptyPattern = await indexPatterns.make();
|
||||
const {
|
||||
|
@ -66,7 +84,7 @@ async function importIndexPattern(doc, indexPatterns, overwriteAll, confirmModal
|
|||
id: doc._id,
|
||||
title,
|
||||
timeFieldName,
|
||||
};
|
||||
} as IIndexPattern;
|
||||
if (type) {
|
||||
importedIndexPattern.type = type;
|
||||
}
|
||||
|
@ -79,9 +97,9 @@ async function importIndexPattern(doc, indexPatterns, overwriteAll, confirmModal
|
|||
let newId = await emptyPattern.create(overwriteAll);
|
||||
if (!newId) {
|
||||
// We can override and we want to prompt for confirmation
|
||||
const isConfirmed = await confirmModalPromise(
|
||||
const isConfirmed = await openConfirm(
|
||||
i18n.translate('kbn.management.indexPattern.confirmOverwriteLabel', {
|
||||
values: { title: this.title },
|
||||
values: { title },
|
||||
defaultMessage: "Are you sure you want to overwrite '{title}'?",
|
||||
}),
|
||||
{
|
||||
|
@ -96,7 +114,7 @@ async function importIndexPattern(doc, indexPatterns, overwriteAll, confirmModal
|
|||
);
|
||||
|
||||
if (isConfirmed) {
|
||||
newId = await emptyPattern.create(true);
|
||||
newId = (await emptyPattern.create(true)) as string;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
@ -105,7 +123,7 @@ async function importIndexPattern(doc, indexPatterns, overwriteAll, confirmModal
|
|||
return newId;
|
||||
}
|
||||
|
||||
async function importDocument(obj, doc, overwriteAll) {
|
||||
async function importDocument(obj: SavedObject, doc: SavedObjectsRawDoc, overwriteAll: boolean) {
|
||||
await obj.applyESResp({
|
||||
references: doc._references || [],
|
||||
...doc,
|
||||
|
@ -113,12 +131,12 @@ async function importDocument(obj, doc, overwriteAll) {
|
|||
return await obj.save({ confirmOverwrite: !overwriteAll });
|
||||
}
|
||||
|
||||
function groupByType(docs) {
|
||||
function groupByType(docs: SavedObjectsRawDoc[]): Record<string, SavedObjectsRawDoc[]> {
|
||||
const defaultDocTypes = {
|
||||
searches: [],
|
||||
indexPatterns: [],
|
||||
other: [],
|
||||
};
|
||||
} as Record<string, SavedObjectsRawDoc[]>;
|
||||
|
||||
return docs.reduce((types, doc) => {
|
||||
switch (doc._type) {
|
||||
|
@ -135,14 +153,14 @@ function groupByType(docs) {
|
|||
}, defaultDocTypes);
|
||||
}
|
||||
|
||||
async function awaitEachItemInParallel(list, op) {
|
||||
async function awaitEachItemInParallel<T, R>(list: T[], op: (item: T) => R) {
|
||||
return await Promise.all(list.map(item => op(item)));
|
||||
}
|
||||
|
||||
export async function resolveIndexPatternConflicts(
|
||||
resolutions,
|
||||
conflictedIndexPatterns,
|
||||
overwriteAll
|
||||
resolutions: Array<{ oldId: string; newId: string }>,
|
||||
conflictedIndexPatterns: any[],
|
||||
overwriteAll: boolean
|
||||
) {
|
||||
let importCount = 0;
|
||||
|
||||
|
@ -160,15 +178,13 @@ export async function resolveIndexPatternConflicts(
|
|||
}
|
||||
|
||||
// Resolve filter index reference:
|
||||
const filter = (obj.searchSource.getOwnField('filter') || []).map(filter => {
|
||||
if (!(filter.meta && filter.meta.index)) {
|
||||
return filter;
|
||||
const filter = (obj.searchSource.getOwnField('filter') || []).map((f: any) => {
|
||||
if (!(f.meta && f.meta.index)) {
|
||||
return f;
|
||||
}
|
||||
|
||||
resolution = resolutions.find(({ oldId }) => oldId === filter.meta.index);
|
||||
return resolution
|
||||
? { ...filter, ...{ meta: { ...filter.meta, index: resolution.newId } } }
|
||||
: filter;
|
||||
resolution = resolutions.find(({ oldId }) => oldId === f.meta.index);
|
||||
return resolution ? { ...f, ...{ meta: { ...f.meta, index: resolution.newId } } } : f;
|
||||
});
|
||||
|
||||
if (filter.length > 0) {
|
||||
|
@ -186,7 +202,7 @@ export async function resolveIndexPatternConflicts(
|
|||
return importCount;
|
||||
}
|
||||
|
||||
export async function saveObjects(objs, overwriteAll) {
|
||||
export async function saveObjects(objs: SavedObject[], overwriteAll: boolean) {
|
||||
let importCount = 0;
|
||||
await awaitEachItemInParallel(objs, async obj => {
|
||||
if (await saveObject(obj, overwriteAll)) {
|
||||
|
@ -196,11 +212,16 @@ export async function saveObjects(objs, overwriteAll) {
|
|||
return importCount;
|
||||
}
|
||||
|
||||
export async function saveObject(obj, overwriteAll) {
|
||||
export async function saveObject(obj: SavedObject, overwriteAll: boolean) {
|
||||
return await obj.save({ confirmOverwrite: !overwriteAll });
|
||||
}
|
||||
|
||||
export async function resolveSavedSearches(savedSearches, services, indexPatterns, overwriteAll) {
|
||||
export async function resolveSavedSearches(
|
||||
savedSearches: any[],
|
||||
services: SavedObjectLoader[],
|
||||
indexPatterns: IndexPatternsContract,
|
||||
overwriteAll: boolean
|
||||
) {
|
||||
let importCount = 0;
|
||||
await awaitEachItemInParallel(savedSearches, async searchDoc => {
|
||||
const obj = await getSavedObject(searchDoc, services);
|
||||
|
@ -216,18 +237,18 @@ export async function resolveSavedSearches(savedSearches, services, indexPattern
|
|||
}
|
||||
|
||||
export async function resolveSavedObjects(
|
||||
savedObjects,
|
||||
overwriteAll,
|
||||
services,
|
||||
indexPatterns,
|
||||
confirmModalPromise
|
||||
savedObjects: SavedObjectsRawDoc[],
|
||||
overwriteAll: boolean,
|
||||
services: SavedObjectLoader[],
|
||||
indexPatterns: IndexPatternsContract,
|
||||
confirmModalPromise: OverlayStart['openConfirm']
|
||||
) {
|
||||
const docTypes = groupByType(savedObjects);
|
||||
|
||||
// Keep track of how many we actually import because the user
|
||||
// can cancel an override
|
||||
let importedObjectCount = 0;
|
||||
const failedImports = [];
|
||||
const failedImports: any[] = [];
|
||||
// Start with the index patterns since everything is dependent on them
|
||||
await awaitEachItemInParallel(docTypes.indexPatterns, async indexPatternDoc => {
|
||||
try {
|
||||
|
@ -247,18 +268,18 @@ export async function resolveSavedObjects(
|
|||
|
||||
// We want to do the same for saved searches, but we want to keep them separate because they need
|
||||
// to be applied _first_ because other saved objects can be dependent on those saved searches existing
|
||||
const conflictedSearchDocs = [];
|
||||
const conflictedSearchDocs: any[] = [];
|
||||
// Keep a record of the index patterns assigned to our imported saved objects that do not
|
||||
// exist. We will provide a way for the user to manually select a new index pattern for those
|
||||
// saved objects.
|
||||
const conflictedIndexPatterns = [];
|
||||
const conflictedIndexPatterns: any[] = [];
|
||||
// Keep a record of any objects which fail to import for unknown reasons.
|
||||
|
||||
// It's possible to have saved objects that link to saved searches which then link to index patterns
|
||||
// and those could error out, but the error comes as an index pattern not found error. We can't resolve
|
||||
// those the same as way as normal index pattern not found errors, but when those are fixed, it's very
|
||||
// likely that these saved objects will work once resaved so keep them around to resave them.
|
||||
const conflictedSavedObjectsLinkedToSavedSearches = [];
|
||||
const conflictedSavedObjectsLinkedToSavedSearches: any[] = [];
|
||||
|
||||
await awaitEachItemInParallel(docTypes.searches, async searchDoc => {
|
||||
const obj = await getSavedObject(searchDoc, services);
|
|
@ -17,7 +17,25 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { SavedObjectReference } from 'src/core/public';
|
||||
import { SavedObject, SavedObjectReference } from 'src/core/public';
|
||||
|
||||
export interface SavedObjectMetadata {
|
||||
icon?: string;
|
||||
title?: string;
|
||||
editUrl?: string;
|
||||
inAppUrl?: { path: string; uiCapabilitiesPath: string };
|
||||
}
|
||||
|
||||
export type SavedObjectWithMetadata<T = unknown> = SavedObject<T> & {
|
||||
meta: SavedObjectMetadata;
|
||||
};
|
||||
|
||||
export interface SavedObjectRelation {
|
||||
id: string;
|
||||
type: string;
|
||||
relationship: 'child' | 'parent';
|
||||
meta: SavedObjectMetadata;
|
||||
}
|
||||
|
||||
export interface ObjectField {
|
||||
type: FieldType;
|
||||
|
|
|
@ -56,7 +56,7 @@ export class SavedObjectLoader {
|
|||
* @param id
|
||||
* @returns {Promise<SavedObject>}
|
||||
*/
|
||||
async get(id: string) {
|
||||
async get(id?: string) {
|
||||
// @ts-ignore
|
||||
const obj = new this.Class(id);
|
||||
return obj.init();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue