mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
* [esArchiver] auto-create the default space object when necessary * [esArchiver] check for spaces plugin to create default space
This commit is contained in:
parent
1ef606e4cc
commit
219bc53591
6 changed files with 74 additions and 10 deletions
|
@ -46,7 +46,7 @@ const pipeline = (...streams) => streams
|
|||
.pipe(dest)
|
||||
));
|
||||
|
||||
export async function loadAction({ name, skipExisting, client, dataDir, log }) {
|
||||
export async function loadAction({ name, skipExisting, client, dataDir, log, kibanaUrl }) {
|
||||
const inputDir = resolve(dataDir, name);
|
||||
const stats = createStats(name, log);
|
||||
const files = prioritizeMappings(await readDirectory(inputDir));
|
||||
|
@ -68,7 +68,7 @@ export async function loadAction({ name, skipExisting, client, dataDir, log }) {
|
|||
|
||||
await createPromiseFromStreams([
|
||||
recordStream,
|
||||
createCreateIndexStream({ client, stats, skipExisting, log }),
|
||||
createCreateIndexStream({ client, stats, skipExisting, log, kibanaUrl }),
|
||||
createIndexDocRecordsStream(client, stats),
|
||||
]);
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ const defaultConfigPath = resolveConfigPath('test/functional/config.js');
|
|||
cmd
|
||||
.description(`CLI to manage archiving/restoring data in elasticsearch`)
|
||||
.option('--es-url [url]', 'url for elasticsearch')
|
||||
.option('--kibana-url [url]', 'url for kibana (only necessary if using "load" method)')
|
||||
.option(`--dir [path]`, 'where archives are stored')
|
||||
.option('--verbose', 'turn on verbose logging')
|
||||
.option('--config [path]', 'path to a functional test config file to use for default values', resolveConfigPath, defaultConfigPath)
|
||||
|
@ -83,6 +84,7 @@ async function execute(operation, ...args) {
|
|||
// load default values from the specified config file
|
||||
const config = await readConfigFile(log, resolve(cmd.config));
|
||||
if (!cmd.esUrl) cmd.esUrl = formatUrl(config.get('servers.elasticsearch'));
|
||||
if (!cmd.kibanaUrl) cmd.kibanaUrl = formatUrl(config.get('servers.kibana'));
|
||||
if (!cmd.dir) cmd.dir = config.get('esArchiver.directory');
|
||||
}
|
||||
|
||||
|
@ -97,6 +99,7 @@ async function execute(operation, ...args) {
|
|||
if (!cmd.esUrl) {
|
||||
error('You must specify either --es-url or --config flags');
|
||||
}
|
||||
|
||||
if (!cmd.dir) {
|
||||
error('You must specify either --dir or --config flags');
|
||||
}
|
||||
|
@ -119,6 +122,7 @@ async function execute(operation, ...args) {
|
|||
log,
|
||||
client,
|
||||
dataDir: resolve(cmd.dir),
|
||||
kibanaUrl: cmd.kibanaUrl
|
||||
});
|
||||
await esArchiver[operation](...args);
|
||||
} finally {
|
||||
|
|
|
@ -26,10 +26,11 @@ import {
|
|||
} from './actions';
|
||||
|
||||
export class EsArchiver {
|
||||
constructor({ client, dataDir, log }) {
|
||||
constructor({ client, dataDir, log, kibanaUrl }) {
|
||||
this.client = client;
|
||||
this.dataDir = dataDir;
|
||||
this.log = log;
|
||||
this.kibanaUrl = kibanaUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -68,6 +69,7 @@ export class EsArchiver {
|
|||
client: this.client,
|
||||
dataDir: this.dataDir,
|
||||
log: this.log,
|
||||
kibanaUrl: this.kibanaUrl,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -20,11 +20,11 @@
|
|||
import { Transform } from 'stream';
|
||||
|
||||
import { get, once } from 'lodash';
|
||||
import { deleteKibanaIndices } from './kibana_index';
|
||||
import { deleteKibanaIndices, isSpacesEnabled, createDefaultSpace } from './kibana_index';
|
||||
|
||||
import { deleteIndex } from './delete_index';
|
||||
|
||||
export function createCreateIndexStream({ client, stats, skipExisting, log }) {
|
||||
export function createCreateIndexStream({ client, stats, skipExisting, log, kibanaUrl }) {
|
||||
const skipDocsFromIndices = new Set();
|
||||
|
||||
// If we're trying to import Kibana index docs, we need to ensure that
|
||||
|
@ -41,7 +41,7 @@ export function createCreateIndexStream({ client, stats, skipExisting, log }) {
|
|||
stream.push(record);
|
||||
}
|
||||
|
||||
async function handleIndex(stream, record) {
|
||||
async function handleIndex(record) {
|
||||
const { index, settings, mappings } = record.value;
|
||||
|
||||
async function attemptToCreate(attemptNumber = 1) {
|
||||
|
@ -49,11 +49,17 @@ export function createCreateIndexStream({ client, stats, skipExisting, log }) {
|
|||
if (index.startsWith('.kibana')) {
|
||||
await clearKibanaIndices();
|
||||
}
|
||||
|
||||
await client.indices.create({
|
||||
method: 'PUT',
|
||||
index,
|
||||
body: { settings, mappings },
|
||||
});
|
||||
|
||||
if (index.startsWith('.kibana') && await isSpacesEnabled({ kibanaUrl })) {
|
||||
await createDefaultSpace({ index, client });
|
||||
}
|
||||
|
||||
stats.createdIndex(index, { settings });
|
||||
} catch (err) {
|
||||
if (get(err, 'body.error.type') !== 'resource_already_exists_exception' || attemptNumber >= 3) {
|
||||
|
@ -82,7 +88,7 @@ export function createCreateIndexStream({ client, stats, skipExisting, log }) {
|
|||
try {
|
||||
switch (record && record.type) {
|
||||
case 'index':
|
||||
await handleIndex(this, record);
|
||||
await handleIndex(record);
|
||||
break;
|
||||
|
||||
case 'doc':
|
||||
|
|
|
@ -22,6 +22,9 @@ import fs from 'fs';
|
|||
import path from 'path';
|
||||
import { promisify } from 'util';
|
||||
import { toArray } from 'rxjs/operators';
|
||||
import wreck from 'wreck';
|
||||
|
||||
import { deleteIndex } from './delete_index';
|
||||
import { collectUiExports } from '../../../ui/ui_exports';
|
||||
import { KibanaMigrator } from '../../../server/saved_objects/migrations';
|
||||
import { findPluginSpecs } from '../../../plugin_discovery';
|
||||
|
@ -44,18 +47,25 @@ const buildUiExports = _.once(async () => {
|
|||
/**
|
||||
* Deletes all indices that start with `.kibana`
|
||||
*/
|
||||
export async function deleteKibanaIndices({ client, stats }) {
|
||||
export async function deleteKibanaIndices({ client, stats, log }) {
|
||||
const kibanaIndices = await client.cat.indices({ index: '.kibana*', format: 'json' });
|
||||
const indexNames = kibanaIndices.map(x => x.index);
|
||||
if (!indexNames.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
await client.indices.putSettings({
|
||||
index: indexNames,
|
||||
body: { index: { blocks: { read_only: false } } },
|
||||
});
|
||||
await client.indices.delete({ index: indexNames });
|
||||
indexNames.forEach(stats.deletedIndex);
|
||||
|
||||
await deleteIndex({
|
||||
client,
|
||||
stats,
|
||||
index: indexNames,
|
||||
log,
|
||||
});
|
||||
|
||||
return indexNames;
|
||||
}
|
||||
|
||||
|
@ -102,3 +112,42 @@ async function loadElasticVersion() {
|
|||
const packageJson = await readFile(path.join(__dirname, '../../../../package.json'));
|
||||
return JSON.parse(packageJson).version;
|
||||
}
|
||||
|
||||
const spacesEnabledCache = new Map();
|
||||
export async function isSpacesEnabled({ kibanaUrl }) {
|
||||
if (!spacesEnabledCache.has(kibanaUrl)) {
|
||||
const statuses = await getKibanaStatuses({ kibanaUrl });
|
||||
spacesEnabledCache.set(kibanaUrl, !!statuses.find(({ id }) => id.startsWith('plugin:spaces@')));
|
||||
}
|
||||
|
||||
return spacesEnabledCache.get(kibanaUrl);
|
||||
}
|
||||
|
||||
async function getKibanaStatuses({ kibanaUrl }) {
|
||||
try {
|
||||
const { payload } = await wreck.get('/api/status', {
|
||||
baseUrl: kibanaUrl,
|
||||
json: true
|
||||
});
|
||||
return payload.status.statuses;
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to fetch Kibana status API response from Kibana at ${kibanaUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function createDefaultSpace({ index, client }) {
|
||||
await client.index({
|
||||
index,
|
||||
type: 'doc',
|
||||
id: 'space:default',
|
||||
body: {
|
||||
type: 'space',
|
||||
updated_at: new Date().toISOString(),
|
||||
space: {
|
||||
name: 'Default Space',
|
||||
description: 'This is the default space',
|
||||
_reserved: true
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import { format as formatUrl } from 'url';
|
||||
|
||||
import { EsArchiver } from '../../../src/es_archiver';
|
||||
import * as KibanaServer from './kibana_server';
|
||||
|
||||
|
@ -35,6 +37,7 @@ export function EsArchiverProvider({ getService, hasService }) {
|
|||
client,
|
||||
dataDir,
|
||||
log,
|
||||
kibanaUrl: formatUrl(config.get('servers.kibana'))
|
||||
});
|
||||
|
||||
if (hasService('kibanaServer')) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue