mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Ingest Manger] Move asset getters out of registry (#83214)
## Summary Packages/Archives aren't limited to the registry any longer. Continue moving file- & cache-related functions from services/registry to services/archive. Move `getAsset` and `pathParts` to archive/index. The behavior is the same for now, but it's more accurate to separate these from registry namespace. Registry has `fetch*` and other functions for dealing with the online service.
This commit is contained in:
parent
5a91818baa
commit
4721b3211a
10 changed files with 75 additions and 73 deletions
|
@ -4,9 +4,10 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ArchivePackage } from '../../../../common/types';
|
||||
import { ArchivePackage, AssetParts } from '../../../../common/types';
|
||||
import { PackageInvalidArchiveError, PackageUnsupportedMediaTypeError } from '../../../errors';
|
||||
import {
|
||||
cacheGet,
|
||||
cacheSet,
|
||||
cacheDelete,
|
||||
getArchiveFilelist,
|
||||
|
@ -100,3 +101,40 @@ export const deletePackageCache = (name: string, version: string) => {
|
|||
// this has been populated in unpackArchiveToCache()
|
||||
paths?.forEach((path) => cacheDelete(path));
|
||||
};
|
||||
|
||||
export function getPathParts(path: string): AssetParts {
|
||||
let dataset;
|
||||
|
||||
let [pkgkey, service, type, file] = path.split('/');
|
||||
|
||||
// if it's a data stream
|
||||
if (service === 'data_stream') {
|
||||
// save the dataset name
|
||||
dataset = type;
|
||||
// drop the `data_stream/dataset-name` portion & re-parse
|
||||
[pkgkey, service, type, file] = path.replace(`data_stream/${dataset}/`, '').split('/');
|
||||
}
|
||||
|
||||
// This is to cover for the fields.yml files inside the "fields" directory
|
||||
if (file === undefined) {
|
||||
file = type;
|
||||
type = 'fields';
|
||||
service = '';
|
||||
}
|
||||
|
||||
return {
|
||||
pkgkey,
|
||||
service,
|
||||
type,
|
||||
file,
|
||||
dataset,
|
||||
path,
|
||||
} as AssetParts;
|
||||
}
|
||||
|
||||
export function getAsset(key: string) {
|
||||
const buffer = cacheGet(key);
|
||||
if (buffer === undefined) throw new Error(`Cannot find asset ${key}`);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
|
|
@ -5,15 +5,15 @@
|
|||
*/
|
||||
|
||||
import { CallESAsCurrentUser, ElasticsearchAssetType } from '../../../../types';
|
||||
import * as Registry from '../../registry';
|
||||
import { getAsset, getPathParts } from '../../archive';
|
||||
|
||||
export async function installILMPolicy(paths: string[], callCluster: CallESAsCurrentUser) {
|
||||
const ilmPaths = paths.filter((path) => isILMPolicy(path));
|
||||
if (!ilmPaths.length) return;
|
||||
await Promise.all(
|
||||
ilmPaths.map(async (path) => {
|
||||
const body = Registry.getAsset(path).toString('utf-8');
|
||||
const { file } = Registry.pathParts(path);
|
||||
const body = getAsset(path).toString('utf-8');
|
||||
const { file } = getPathParts(path);
|
||||
const name = file.substr(0, file.lastIndexOf('.'));
|
||||
try {
|
||||
await callCluster('transport.request', {
|
||||
|
@ -28,7 +28,7 @@ export async function installILMPolicy(paths: string[], callCluster: CallESAsCur
|
|||
);
|
||||
}
|
||||
const isILMPolicy = (path: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return pathParts.type === ElasticsearchAssetType.ilmPolicy;
|
||||
};
|
||||
export async function policyExists(
|
||||
|
|
|
@ -11,7 +11,8 @@ import {
|
|||
ElasticsearchAssetType,
|
||||
InstallablePackage,
|
||||
} from '../../../../types';
|
||||
import * as Registry from '../../registry';
|
||||
import { ArchiveEntry } from '../../registry';
|
||||
import { getAsset, getPathParts } from '../../archive';
|
||||
import { CallESAsCurrentUser } from '../../../../types';
|
||||
import { saveInstalledEsRefs } from '../../packages/install';
|
||||
import { getInstallationObject } from '../../packages';
|
||||
|
@ -127,7 +128,7 @@ export async function installPipelinesForDataStream({
|
|||
dataStream,
|
||||
packageVersion: pkgVersion,
|
||||
});
|
||||
const content = Registry.getAsset(path).toString('utf-8');
|
||||
const content = getAsset(path).toString('utf-8');
|
||||
pipelines.push({
|
||||
name,
|
||||
nameForInstallation,
|
||||
|
@ -192,10 +193,10 @@ async function installPipeline({
|
|||
return { id: pipeline.nameForInstallation, type: ElasticsearchAssetType.ingestPipeline };
|
||||
}
|
||||
|
||||
const isDirectory = ({ path }: Registry.ArchiveEntry) => path.endsWith('/');
|
||||
const isDirectory = ({ path }: ArchiveEntry) => path.endsWith('/');
|
||||
|
||||
const isDataStreamPipeline = (path: string, dataStreamDataset: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return (
|
||||
!isDirectory({ path }) &&
|
||||
pathParts.type === ElasticsearchAssetType.ingestPipeline &&
|
||||
|
@ -204,7 +205,7 @@ const isDataStreamPipeline = (path: string, dataStreamDataset: string) => {
|
|||
);
|
||||
};
|
||||
const isPipeline = (path: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return pathParts.type === ElasticsearchAssetType.ingestPipeline;
|
||||
};
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import { CallESAsCurrentUser } from '../../../../types';
|
|||
import { Field, loadFieldsFromYaml, processFields } from '../../fields/field';
|
||||
import { getPipelineNameForInstallation } from '../ingest_pipeline/install';
|
||||
import { generateMappings, generateTemplateName, getTemplate } from './template';
|
||||
import * as Registry from '../../registry';
|
||||
import { getAsset, getPathParts } from '../../archive';
|
||||
import { removeAssetsFromInstalledEsByType, saveInstalledEsRefs } from '../../packages/install';
|
||||
|
||||
export const installTemplates = async (
|
||||
|
@ -76,9 +76,9 @@ export const installTemplates = async (
|
|||
const installPreBuiltTemplates = async (paths: string[], callCluster: CallESAsCurrentUser) => {
|
||||
const templatePaths = paths.filter((path) => isTemplate(path));
|
||||
const templateInstallPromises = templatePaths.map(async (path) => {
|
||||
const { file } = Registry.pathParts(path);
|
||||
const { file } = getPathParts(path);
|
||||
const templateName = file.substr(0, file.lastIndexOf('.'));
|
||||
const content = JSON.parse(Registry.getAsset(path).toString('utf8'));
|
||||
const content = JSON.parse(getAsset(path).toString('utf8'));
|
||||
let templateAPIPath = '_template';
|
||||
|
||||
// v2 index templates need to be installed through the new API endpoint.
|
||||
|
@ -121,9 +121,9 @@ const installPreBuiltComponentTemplates = async (
|
|||
) => {
|
||||
const templatePaths = paths.filter((path) => isComponentTemplate(path));
|
||||
const templateInstallPromises = templatePaths.map(async (path) => {
|
||||
const { file } = Registry.pathParts(path);
|
||||
const { file } = getPathParts(path);
|
||||
const templateName = file.substr(0, file.lastIndexOf('.'));
|
||||
const content = JSON.parse(Registry.getAsset(path).toString('utf8'));
|
||||
const content = JSON.parse(getAsset(path).toString('utf8'));
|
||||
|
||||
const callClusterParams: {
|
||||
method: string;
|
||||
|
@ -151,12 +151,12 @@ const installPreBuiltComponentTemplates = async (
|
|||
};
|
||||
|
||||
const isTemplate = (path: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return pathParts.type === ElasticsearchAssetType.indexTemplate;
|
||||
};
|
||||
|
||||
const isComponentTemplate = (path: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return pathParts.type === ElasticsearchAssetType.componentTemplate;
|
||||
};
|
||||
|
||||
|
|
|
@ -4,8 +4,4 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as Registry from '../../registry';
|
||||
|
||||
export const getAsset = (path: string): Buffer => {
|
||||
return Registry.getAsset(path);
|
||||
};
|
||||
export { getAsset } from '../../archive';
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import { SavedObjectsClientContract } from 'kibana/server';
|
||||
|
||||
import { saveInstalledEsRefs } from '../../packages/install';
|
||||
import * as Registry from '../../registry';
|
||||
import { getPathParts } from '../../archive';
|
||||
import {
|
||||
ElasticsearchAssetType,
|
||||
EsAssetReference,
|
||||
|
@ -104,7 +104,7 @@ export const installTransform = async (
|
|||
};
|
||||
|
||||
const isTransform = (path: string) => {
|
||||
const pathParts = Registry.pathParts(path);
|
||||
const pathParts = getPathParts(path);
|
||||
return !path.endsWith('/') && pathParts.type === ElasticsearchAssetType.transform;
|
||||
};
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
SavedObjectsClientContract,
|
||||
} from 'src/core/server';
|
||||
import { PACKAGES_SAVED_OBJECT_TYPE } from '../../../../../common';
|
||||
import * as Registry from '../../registry';
|
||||
import { getAsset, getPathParts } from '../../archive';
|
||||
import {
|
||||
AssetType,
|
||||
KibanaAssetType,
|
||||
|
@ -57,7 +57,7 @@ const AssetInstallers: Record<
|
|||
};
|
||||
|
||||
export async function getKibanaAsset(key: string): Promise<ArchiveAsset> {
|
||||
const buffer = Registry.getAsset(key);
|
||||
const buffer = getAsset(key);
|
||||
|
||||
// cache values are buffers. convert to string / JSON
|
||||
return JSON.parse(buffer.toString('utf8'));
|
||||
|
@ -117,14 +117,14 @@ export async function getKibanaAssets(
|
|||
): Promise<Record<KibanaAssetType, ArchiveAsset[]>> {
|
||||
const kibanaAssetTypes = Object.values(KibanaAssetType);
|
||||
const isKibanaAssetType = (path: string) => {
|
||||
const parts = Registry.pathParts(path);
|
||||
const parts = getPathParts(path);
|
||||
|
||||
return parts.service === 'kibana' && (kibanaAssetTypes as string[]).includes(parts.type);
|
||||
};
|
||||
|
||||
const filteredPaths = paths
|
||||
.filter(isKibanaAssetType)
|
||||
.map<[string, AssetParts]>((path) => [path, Registry.pathParts(path)]);
|
||||
.map<[string, AssetParts]>((path) => [path, getPathParts(path)]);
|
||||
|
||||
const assetArrays: Array<Promise<ArchiveAsset[]>> = [];
|
||||
for (const assetType of kibanaAssetTypes) {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
import { InstallablePackage } from '../../../types';
|
||||
import * as Registry from '../registry';
|
||||
import { getArchiveFilelist } from '../archive/cache';
|
||||
import { getArchiveFilelist, getAsset } from '../archive';
|
||||
|
||||
// paths from RegistryPackage are routes to the assets on EPR
|
||||
// e.g. `/package/nginx/1.2.0/data_stream/access/fields/fields.yml`
|
||||
|
@ -59,7 +59,7 @@ export async function getAssetsData(
|
|||
// Gather all asset data
|
||||
const assets = getAssets(packageInfo, filter, datasetName);
|
||||
const entries: Registry.ArchiveEntry[] = assets.map((path) => {
|
||||
const buffer = Registry.getAsset(path);
|
||||
const buffer = getAsset(path);
|
||||
|
||||
return { path, buffer };
|
||||
});
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
*/
|
||||
|
||||
import { AssetParts } from '../../../types';
|
||||
import { getBufferExtractor, pathParts, splitPkgKey } from './index';
|
||||
import { getPathParts } from '../archive';
|
||||
import { getBufferExtractor, splitPkgKey } from './index';
|
||||
import { untarBuffer, unzipBuffer } from './extract';
|
||||
|
||||
const testPaths = [
|
||||
|
@ -46,7 +47,7 @@ const testPaths = [
|
|||
|
||||
test('testPathParts', () => {
|
||||
for (const value of testPaths) {
|
||||
expect(pathParts(value.path)).toStrictEqual(value.assetParts as AssetParts);
|
||||
expect(getPathParts(value.path)).toStrictEqual(value.assetParts as AssetParts);
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import semver from 'semver';
|
|||
import { Response } from 'node-fetch';
|
||||
import { URL } from 'url';
|
||||
import {
|
||||
AssetParts,
|
||||
AssetsGroupedByServiceByType,
|
||||
CategoryId,
|
||||
CategorySummaryList,
|
||||
|
@ -18,8 +17,12 @@ import {
|
|||
RegistrySearchResults,
|
||||
RegistrySearchResult,
|
||||
} from '../../../types';
|
||||
import { unpackArchiveToCache } from '../archive';
|
||||
import { cacheGet, getArchiveFilelist, setArchiveFilelist } from '../archive';
|
||||
import {
|
||||
getArchiveFilelist,
|
||||
getPathParts,
|
||||
setArchiveFilelist,
|
||||
unpackArchiveToCache,
|
||||
} from '../archive';
|
||||
import { fetchUrl, getResponse, getResponseStream } from './requests';
|
||||
import { streamToBuffer } from './streams';
|
||||
import { getRegistryUrl } from './registry_url';
|
||||
|
@ -146,36 +149,6 @@ export async function getRegistryPackage(
|
|||
return { paths, registryPackageInfo };
|
||||
}
|
||||
|
||||
export function pathParts(path: string): AssetParts {
|
||||
let dataset;
|
||||
|
||||
let [pkgkey, service, type, file] = path.split('/');
|
||||
|
||||
// if it's a data stream
|
||||
if (service === 'data_stream') {
|
||||
// save the dataset name
|
||||
dataset = type;
|
||||
// drop the `data_stream/dataset-name` portion & re-parse
|
||||
[pkgkey, service, type, file] = path.replace(`data_stream/${dataset}/`, '').split('/');
|
||||
}
|
||||
|
||||
// This is to cover for the fields.yml files inside the "fields" directory
|
||||
if (file === undefined) {
|
||||
file = type;
|
||||
type = 'fields';
|
||||
service = '';
|
||||
}
|
||||
|
||||
return {
|
||||
pkgkey,
|
||||
service,
|
||||
type,
|
||||
file,
|
||||
dataset,
|
||||
path,
|
||||
} as AssetParts;
|
||||
}
|
||||
|
||||
export async function ensureCachedArchiveInfo(
|
||||
name: string,
|
||||
version: string,
|
||||
|
@ -204,19 +177,12 @@ async function fetchArchiveBuffer(
|
|||
return { archiveBuffer, archivePath };
|
||||
}
|
||||
|
||||
export function getAsset(key: string) {
|
||||
const buffer = cacheGet(key);
|
||||
if (buffer === undefined) throw new Error(`Cannot find asset ${key}`);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
export function groupPathsByService(paths: string[]): AssetsGroupedByServiceByType {
|
||||
const kibanaAssetTypes = Object.values<string>(KibanaAssetType);
|
||||
|
||||
// ASK: best way, if any, to avoid `any`?
|
||||
const assets = paths.reduce((map: any, path) => {
|
||||
const parts = pathParts(path.replace(/^\/package\//, ''));
|
||||
const parts = getPathParts(path.replace(/^\/package\//, ''));
|
||||
if (parts.service === 'kibana' && kibanaAssetTypes.includes(parts.type)) {
|
||||
if (!map[parts.service]) map[parts.service] = {};
|
||||
if (!map[parts.service][parts.type]) map[parts.service][parts.type] = [];
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue