mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Fleet] Support DLM from package-spec (#161905)
This commit is contained in:
parent
13b8864c48
commit
885fb43651
5 changed files with 112 additions and 34 deletions
|
@ -336,6 +336,7 @@ export enum RegistryDataStreamKeys {
|
|||
elasticsearch = 'elasticsearch',
|
||||
dataset_is_prefix = 'dataset_is_prefix',
|
||||
routing_rules = 'routing_rules',
|
||||
lifecycle = 'lifecycle',
|
||||
}
|
||||
|
||||
export interface RegistryDataStream {
|
||||
|
@ -353,6 +354,7 @@ export interface RegistryDataStream {
|
|||
[RegistryDataStreamKeys.elasticsearch]?: RegistryElasticsearch;
|
||||
[RegistryDataStreamKeys.dataset_is_prefix]?: boolean;
|
||||
[RegistryDataStreamKeys.routing_rules]?: RegistryDataStreamRoutingRules[];
|
||||
[RegistryDataStreamKeys.lifecycle]?: RegistryDataStreamLifecycle;
|
||||
}
|
||||
|
||||
export interface RegistryElasticsearch {
|
||||
|
@ -385,6 +387,10 @@ export interface RegistryDataStreamRoutingRules {
|
|||
}>;
|
||||
}
|
||||
|
||||
export interface RegistryDataStreamLifecycle {
|
||||
data_retention: string;
|
||||
}
|
||||
|
||||
export type RegistryVarType =
|
||||
| 'integer'
|
||||
| 'bool'
|
||||
|
@ -605,6 +611,7 @@ export interface IndexTemplate {
|
|||
template: {
|
||||
settings: any;
|
||||
mappings: any;
|
||||
lifecycle?: any;
|
||||
};
|
||||
data_stream: { hidden?: boolean };
|
||||
composed_of: string[];
|
||||
|
|
|
@ -459,7 +459,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/README.md'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {},
|
||||
assetsMap: {},
|
||||
})
|
||||
).toThrowError("No manifest.yml file found for data stream 'stream1'");
|
||||
});
|
||||
|
@ -470,7 +470,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.alloc(1),
|
||||
},
|
||||
})
|
||||
|
@ -483,7 +483,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.from(
|
||||
`
|
||||
title: Custom Logs`,
|
||||
|
@ -502,7 +502,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.from(
|
||||
`
|
||||
title: Custom Logs
|
||||
|
@ -532,7 +532,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.from(
|
||||
`
|
||||
title: Custom Logs
|
||||
|
@ -565,7 +565,7 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
manifestsAndRoutingRules: {
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.from(
|
||||
`
|
||||
title: Custom Logs
|
||||
|
@ -610,6 +610,41 @@ describe('parseAndVerifyDataStreams', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse lifecycle', async () => {
|
||||
expect(
|
||||
parseAndVerifyDataStreams({
|
||||
paths: ['input-only-0.1.0/data_stream/stream1/manifest.yml'],
|
||||
pkgName: 'input-only',
|
||||
pkgVersion: '0.1.0',
|
||||
assetsMap: {
|
||||
'input-only-0.1.0/data_stream/stream1/manifest.yml': Buffer.from(
|
||||
`
|
||||
title: Custom Logs
|
||||
type: logs
|
||||
dataset: ds
|
||||
version: 0.1.0`,
|
||||
'utf8'
|
||||
),
|
||||
'input-only-0.1.0/data_stream/stream1/lifecycle.yml': Buffer.from(
|
||||
`data_retention: "7d"`,
|
||||
'utf8'
|
||||
),
|
||||
},
|
||||
})
|
||||
).toEqual([
|
||||
{
|
||||
dataset: 'ds',
|
||||
package: 'input-only',
|
||||
path: 'stream1',
|
||||
release: 'ga',
|
||||
title: 'Custom Logs',
|
||||
type: 'logs',
|
||||
elasticsearch: {},
|
||||
lifecycle: { data_retention: '7d' },
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseAndVerifyStreams', () => {
|
||||
|
|
|
@ -24,6 +24,8 @@ import type {
|
|||
RegistryStream,
|
||||
RegistryVarsEntry,
|
||||
PackageSpecManifest,
|
||||
RegistryDataStreamRoutingRules,
|
||||
RegistryDataStreamLifecycle,
|
||||
} from '../../../../common/types';
|
||||
import {
|
||||
RegistryInputKeys,
|
||||
|
@ -41,6 +43,7 @@ const readFileAsync = promisify(readFile);
|
|||
export const MANIFEST_NAME = 'manifest.yml';
|
||||
export const DATASTREAM_MANIFEST_NAME = 'manifest.yml';
|
||||
export const DATASTREAM_ROUTING_RULES_NAME = 'routing_rules.yml';
|
||||
export const DATASTREAM_LIFECYCLE_NAME = 'lifecycle.yml';
|
||||
|
||||
const DEFAULT_RELEASE_VALUE = 'ga';
|
||||
|
||||
|
@ -128,6 +131,18 @@ const registryPolicyTemplateProps = Object.values(RegistryPolicyTemplateKeys);
|
|||
const registryStreamProps = Object.values(RegistryStreamKeys);
|
||||
const registryDataStreamProps = Object.values(RegistryDataStreamKeys);
|
||||
|
||||
const PARSE_AND_VERIFY_ASSETS_NAME = [
|
||||
MANIFEST_NAME,
|
||||
DATASTREAM_ROUTING_RULES_NAME,
|
||||
DATASTREAM_LIFECYCLE_NAME,
|
||||
];
|
||||
/**
|
||||
* Filter assets needed for the parse and verify archive function
|
||||
*/
|
||||
export function filterAssetPathForParseAndVerifyArchive(assetPath: string): boolean {
|
||||
return PARSE_AND_VERIFY_ASSETS_NAME.some((endWithPath) => assetPath.endsWith(endWithPath));
|
||||
}
|
||||
|
||||
/*
|
||||
This function generates a package info object (see type `ArchivePackage`) by parsing and verifying the `manifest.yml` file as well
|
||||
as the directory structure for the given package archive and other files adhering to the package spec: https://github.com/elastic/package-spec.
|
||||
|
@ -144,21 +159,18 @@ export async function generatePackageInfoFromArchiveBuffer(
|
|||
archiveBuffer: Buffer,
|
||||
contentType: string
|
||||
): Promise<{ paths: string[]; packageInfo: ArchivePackage }> {
|
||||
const manifestsAndRoutingRules: AssetsBufferMap = {};
|
||||
const assetsMap: AssetsBufferMap = {};
|
||||
const entries = await unpackBufferEntries(archiveBuffer, contentType);
|
||||
const paths: string[] = [];
|
||||
entries.forEach(({ path: bufferPath, buffer }) => {
|
||||
paths.push(bufferPath);
|
||||
if (
|
||||
buffer &&
|
||||
(bufferPath.endsWith(MANIFEST_NAME) || bufferPath.endsWith(DATASTREAM_ROUTING_RULES_NAME))
|
||||
) {
|
||||
manifestsAndRoutingRules[bufferPath] = buffer;
|
||||
if (buffer && filterAssetPathForParseAndVerifyArchive(bufferPath)) {
|
||||
assetsMap[bufferPath] = buffer;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
packageInfo: parseAndVerifyArchive(paths, manifestsAndRoutingRules),
|
||||
packageInfo: parseAndVerifyArchive(paths, assetsMap),
|
||||
paths,
|
||||
};
|
||||
}
|
||||
|
@ -171,21 +183,21 @@ export async function _generatePackageInfoFromPaths(
|
|||
paths: string[],
|
||||
topLevelDir: string
|
||||
): Promise<ArchivePackage> {
|
||||
const manifestsAndRoutingRules: AssetsBufferMap = {};
|
||||
const assetsMap: AssetsBufferMap = {};
|
||||
await Promise.all(
|
||||
paths.map(async (filePath) => {
|
||||
if (filePath.endsWith(MANIFEST_NAME) || filePath.endsWith(DATASTREAM_ROUTING_RULES_NAME)) {
|
||||
manifestsAndRoutingRules[filePath] = await readFileAsync(filePath);
|
||||
if (filterAssetPathForParseAndVerifyArchive(filePath)) {
|
||||
assetsMap[filePath] = await readFileAsync(filePath);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return parseAndVerifyArchive(paths, manifestsAndRoutingRules, topLevelDir);
|
||||
return parseAndVerifyArchive(paths, assetsMap, topLevelDir);
|
||||
}
|
||||
|
||||
export function parseAndVerifyArchive(
|
||||
paths: string[],
|
||||
manifestsAndRoutingRules: AssetsBufferMap,
|
||||
assetsMap: AssetsBufferMap,
|
||||
topLevelDirOverride?: string
|
||||
): ArchivePackage {
|
||||
// The top-level directory must match pkgName-pkgVersion, and no other top-level files or directories may be present
|
||||
|
@ -200,7 +212,7 @@ export function parseAndVerifyArchive(
|
|||
|
||||
// The package must contain a manifest file ...
|
||||
const manifestFile = path.posix.join(toplevelDir, MANIFEST_NAME);
|
||||
const manifestBuffer = manifestsAndRoutingRules[manifestFile];
|
||||
const manifestBuffer = assetsMap[manifestFile];
|
||||
if (!paths.includes(manifestFile) || !manifestBuffer) {
|
||||
throw new PackageInvalidArchiveError(
|
||||
`Package at top-level directory ${toplevelDir} must contain a top-level ${MANIFEST_NAME} file.`
|
||||
|
@ -249,7 +261,7 @@ export function parseAndVerifyArchive(
|
|||
pkgName: parsed.name,
|
||||
pkgVersion: parsed.version,
|
||||
pkgBasePathOverride: topLevelDirOverride,
|
||||
manifestsAndRoutingRules,
|
||||
assetsMap,
|
||||
});
|
||||
|
||||
if (parsedDataStreams.length) {
|
||||
|
@ -294,10 +306,10 @@ export function parseAndVerifyDataStreams(opts: {
|
|||
paths: string[];
|
||||
pkgName: string;
|
||||
pkgVersion: string;
|
||||
manifestsAndRoutingRules: AssetsBufferMap;
|
||||
assetsMap: AssetsBufferMap;
|
||||
pkgBasePathOverride?: string;
|
||||
}): RegistryDataStream[] {
|
||||
const { paths, pkgName, pkgVersion, manifestsAndRoutingRules, pkgBasePathOverride } = opts;
|
||||
const { paths, pkgName, pkgVersion, assetsMap: assetsMap, pkgBasePathOverride } = opts;
|
||||
// A data stream is made up of a subdirectory of name-version/data_stream/, containing a manifest.yml
|
||||
const dataStreamPaths = new Set<string>();
|
||||
const dataStreams: RegistryDataStream[] = [];
|
||||
|
@ -316,7 +328,7 @@ export function parseAndVerifyDataStreams(opts: {
|
|||
dataStreamPaths.forEach((dataStreamPath) => {
|
||||
const fullDataStreamPath = path.posix.join(dataStreamsBasePath, dataStreamPath);
|
||||
const manifestFile = path.posix.join(fullDataStreamPath, DATASTREAM_MANIFEST_NAME);
|
||||
const manifestBuffer = manifestsAndRoutingRules[manifestFile];
|
||||
const manifestBuffer = assetsMap[manifestFile];
|
||||
if (!paths.includes(manifestFile) || !manifestBuffer) {
|
||||
throw new PackageInvalidArchiveError(
|
||||
`No manifest.yml file found for data stream '${dataStreamPath}'`
|
||||
|
@ -333,15 +345,28 @@ export function parseAndVerifyDataStreams(opts: {
|
|||
}
|
||||
|
||||
// Routing rules
|
||||
const routingRulesFiles = path.posix.join(fullDataStreamPath, DATASTREAM_ROUTING_RULES_NAME);
|
||||
const routingRulesBuffer = manifestsAndRoutingRules[routingRulesFiles];
|
||||
let dataStreamRoutingRules: any;
|
||||
const routingRulesPath = path.posix.join(fullDataStreamPath, DATASTREAM_ROUTING_RULES_NAME);
|
||||
const routingRulesBuffer = assetsMap[routingRulesPath];
|
||||
let dataStreamRoutingRules: RegistryDataStreamRoutingRules[] | undefined;
|
||||
if (routingRulesBuffer) {
|
||||
try {
|
||||
dataStreamRoutingRules = yaml.safeLoad(routingRulesBuffer.toString());
|
||||
} catch (error) {
|
||||
throw new PackageInvalidArchiveError(
|
||||
`Could not parse package manifest for data stream '${dataStreamPath}': ${error}.`
|
||||
`Could not parse routing rules for data stream '${dataStreamPath}': ${error}.`
|
||||
);
|
||||
}
|
||||
}
|
||||
// Lifecycle
|
||||
const lifecyclePath = path.posix.join(fullDataStreamPath, DATASTREAM_LIFECYCLE_NAME);
|
||||
const lifecyleBuffer = assetsMap[lifecyclePath];
|
||||
let dataStreamLifecyle: RegistryDataStreamLifecycle | undefined;
|
||||
if (lifecyleBuffer) {
|
||||
try {
|
||||
dataStreamLifecyle = yaml.safeLoad(lifecyleBuffer.toString());
|
||||
} catch (error) {
|
||||
throw new PackageInvalidArchiveError(
|
||||
`Could not parse lifecycle for data stream '${dataStreamPath}': ${error}.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -385,6 +410,10 @@ export function parseAndVerifyDataStreams(opts: {
|
|||
dataStreamObject.routing_rules = dataStreamRoutingRules;
|
||||
}
|
||||
|
||||
if (dataStreamLifecyle) {
|
||||
dataStreamObject.lifecycle = dataStreamLifecyle;
|
||||
}
|
||||
|
||||
if (ingestPipeline) {
|
||||
dataStreamObject.ingest_pipeline = ingestPipeline;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import { appContextService } from '../../app_context';
|
|||
|
||||
import { getArchiveEntry, setArchiveEntry, setArchiveFilelist, setPackageInfo } from '.';
|
||||
import type { ArchiveEntry } from '.';
|
||||
import { DATASTREAM_ROUTING_RULES_NAME, MANIFEST_NAME, parseAndVerifyArchive } from './parse';
|
||||
import { filterAssetPathForParseAndVerifyArchive, parseAndVerifyArchive } from './parse';
|
||||
|
||||
const ONE_BYTE = 1024 * 1024;
|
||||
// could be anything, picked this from https://github.com/elastic/elastic-agent-client/issues/17
|
||||
|
@ -207,7 +207,7 @@ export const getEsPackage = async (
|
|||
return undefined;
|
||||
}
|
||||
|
||||
const manifestsAndRoutingRules: Record<string, Buffer> = {};
|
||||
const assetsMap: Record<string, Buffer> = {};
|
||||
const entries: ArchiveEntry[] = assets.map(packageAssetToArchiveEntry);
|
||||
const paths: string[] = [];
|
||||
entries.forEach(({ path, buffer }) => {
|
||||
|
@ -216,16 +216,14 @@ export const getEsPackage = async (
|
|||
paths.push(path);
|
||||
}
|
||||
paths.push(path);
|
||||
if (path.endsWith(MANIFEST_NAME) && buffer) {
|
||||
manifestsAndRoutingRules[path] = buffer;
|
||||
} else if (path.endsWith(DATASTREAM_ROUTING_RULES_NAME) && buffer) {
|
||||
manifestsAndRoutingRules[path] = buffer;
|
||||
if (buffer && filterAssetPathForParseAndVerifyArchive(path)) {
|
||||
assetsMap[path] = buffer;
|
||||
}
|
||||
});
|
||||
// // Add asset references to cache
|
||||
setArchiveFilelist({ name: pkgName, version: pkgVersion }, paths);
|
||||
|
||||
const packageInfo = parseAndVerifyArchive(paths, manifestsAndRoutingRules);
|
||||
const packageInfo = parseAndVerifyArchive(paths, assetsMap);
|
||||
setPackageInfo({ name: pkgName, version: pkgVersion, packageInfo });
|
||||
|
||||
return {
|
||||
|
|
|
@ -45,6 +45,8 @@ import {
|
|||
forEachMappings,
|
||||
} from '../../../experimental_datastream_features_helper';
|
||||
|
||||
import { appContextService } from '../../../app_context';
|
||||
|
||||
import {
|
||||
generateMappings,
|
||||
generateTemplateName,
|
||||
|
@ -286,6 +288,7 @@ export function buildComponentTemplates(params: {
|
|||
pipelineName?: string;
|
||||
defaultSettings: IndexTemplate['template']['settings'];
|
||||
experimentalDataStreamFeature?: ExperimentalDataStreamFeature;
|
||||
lifecycle?: IndexTemplate['template']['lifecycle'];
|
||||
}) {
|
||||
const {
|
||||
templateName,
|
||||
|
@ -295,6 +298,7 @@ export function buildComponentTemplates(params: {
|
|||
mappings,
|
||||
pipelineName,
|
||||
experimentalDataStreamFeature,
|
||||
lifecycle,
|
||||
} = params;
|
||||
const packageTemplateName = `${templateName}${PACKAGE_TEMPLATE_SUFFIX}`;
|
||||
const userSettingsTemplateName = `${templateName}${USER_SETTINGS_TEMPLATE_SUFFIX}`;
|
||||
|
@ -375,6 +379,7 @@ export function buildComponentTemplates(params: {
|
|||
}
|
||||
: {}),
|
||||
},
|
||||
...(lifecycle ? { lifecycle } : {}),
|
||||
},
|
||||
_meta,
|
||||
};
|
||||
|
@ -522,6 +527,9 @@ export function prepareTemplate({
|
|||
const templateIndexPattern = generateTemplateIndexPattern(dataStream);
|
||||
const templatePriority = getTemplatePriority(dataStream);
|
||||
|
||||
const isILMPolicyDisabled = appContextService.getConfig()?.internal?.disableILMPolicies ?? false;
|
||||
const lifecyle = isILMPolicyDisabled && dataStream.lifecycle ? dataStream.lifecycle : undefined;
|
||||
|
||||
const pipelineName = getPipelineNameForDatastream({ dataStream, packageVersion });
|
||||
|
||||
const defaultSettings = buildDefaultSettings({
|
||||
|
@ -540,6 +548,7 @@ export function prepareTemplate({
|
|||
pipelineName,
|
||||
registryElasticsearch: dataStream.elasticsearch,
|
||||
experimentalDataStreamFeature,
|
||||
lifecycle: lifecyle,
|
||||
});
|
||||
|
||||
const template = getTemplate({
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue