mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Fleet] Make upload and registry package info consistent (#126915)
* Update docker image + set up initial validation test
* Get validation test passing
* Remove erroneous test load call
* Address PR review + improve comments + rename validation.ts -> parse.ts
* Replace packages in fleet_packages.json
* Add temp debug log to debug CI failures
* Use a non-colliding package in bundled package tests
* (debug) Add logging output
* (debug) More logging
* (debug) Log bundled package dir in module
* Use absolute path for bundled packages
* Remove debug logs + use KIBANA_BUILD_LOCATION if it exists
* Add support for developer.bundledPackageLocation config value
* (debug) Try some more logs
* (debug) Try some more logs
* Fix test hopefully 🤞
* Fix other failing tests
* Move default for bundled package dir to schema definition
* Fix schema default value for bundledPackageLocation
* Fix snapshot
* Fix regression in bundled packages fetch
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
714f3b2b91
commit
88f12fdc37
26 changed files with 312 additions and 80 deletions
|
@ -12,4 +12,25 @@
|
|||
in order to verify package integrity.
|
||||
*/
|
||||
|
||||
[]
|
||||
[
|
||||
{
|
||||
"name": "apm",
|
||||
"version": "8.1.0"
|
||||
},
|
||||
{
|
||||
"name": "elastic_agent",
|
||||
"version": "1.3.0"
|
||||
},
|
||||
{
|
||||
"name": "endpoint",
|
||||
"version": "1.5.0"
|
||||
},
|
||||
{
|
||||
"name": "fleet_server",
|
||||
"version": "1.1.0"
|
||||
},
|
||||
{
|
||||
"name": "synthetics",
|
||||
"version": "0.9.2"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -35,6 +35,7 @@ export interface FleetConfigType {
|
|||
developer?: {
|
||||
disableRegistryVersionCheck?: boolean;
|
||||
allowAgentUpgradeSourceUri?: boolean;
|
||||
bundledPackageLocation?: string;
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -305,6 +305,7 @@ export enum RegistryDataStreamKeys {
|
|||
}
|
||||
|
||||
export interface RegistryDataStream {
|
||||
[key: string]: any;
|
||||
[RegistryDataStreamKeys.type]: string;
|
||||
[RegistryDataStreamKeys.ilm_policy]?: string;
|
||||
[RegistryDataStreamKeys.hidden]?: boolean;
|
||||
|
@ -323,6 +324,7 @@ export interface RegistryElasticsearch {
|
|||
privileges?: RegistryDataStreamPrivileges;
|
||||
'index_template.settings'?: estypes.IndicesIndexSettings;
|
||||
'index_template.mappings'?: estypes.MappingTypeMapping;
|
||||
'ingest_pipeline.name'?: string;
|
||||
}
|
||||
|
||||
export interface RegistryDataStreamPrivileges {
|
||||
|
|
|
@ -70,4 +70,5 @@ export interface PackageSpecScreenshot {
|
|||
title: string;
|
||||
size?: string;
|
||||
type?: string;
|
||||
path?: string;
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
|
||||
import { schema } from '@kbn/config-schema';
|
||||
import type { TypeOf } from '@kbn/config-schema';
|
||||
import type { PluginConfigDescriptor, PluginInitializerContext } from 'src/core/server';
|
||||
|
@ -40,6 +42,8 @@ export type {
|
|||
} from './types';
|
||||
export { AgentNotFoundError, FleetUnauthorizedError } from './errors';
|
||||
|
||||
const DEFAULT_BUNDLED_PACKAGE_LOCATION = path.join(__dirname, '../target/bundled_packages');
|
||||
|
||||
export const config: PluginConfigDescriptor = {
|
||||
exposeToBrowser: {
|
||||
epm: true,
|
||||
|
@ -130,6 +134,7 @@ export const config: PluginConfigDescriptor = {
|
|||
developer: schema.object({
|
||||
disableRegistryVersionCheck: schema.boolean({ defaultValue: false }),
|
||||
allowAgentUpgradeSourceUri: schema.boolean({ defaultValue: false }),
|
||||
bundledPackageLocation: schema.string({ defaultValue: DEFAULT_BUNDLED_PACKAGE_LOCATION }),
|
||||
}),
|
||||
}),
|
||||
};
|
||||
|
|
|
@ -115,7 +115,7 @@ Object {
|
|||
"meta": Object {
|
||||
"package": Object {
|
||||
"name": "apm",
|
||||
"version": "8.2.0-dev3",
|
||||
"version": "8.2.0-dev4",
|
||||
},
|
||||
},
|
||||
"name": "Elastic APM",
|
||||
|
|
|
@ -24,7 +24,7 @@ export function useDockerRegistry() {
|
|||
|
||||
let dockerProcess: ChildProcess | undefined;
|
||||
async function startDockerRegistryServer() {
|
||||
const dockerImage = `docker.elastic.co/package-registry/distribution@sha256:8b4ce36ecdf86e6cfdf781d9df8d564a014add9afc9aec21cf2c5a68ff82d3ab`;
|
||||
const dockerImage = `docker.elastic.co/package-registry/distribution@sha256:b3dfc6a11ff7dce82ba8689ea9eeb54e353c6b4bfd2d28127b20ef72fd8883e9`;
|
||||
|
||||
const args = ['run', '--rm', '-p', `${packageRegistryPort}:8080`, dockerImage];
|
||||
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
|
||||
import JSON5 from 'json5';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
|
||||
import * as Registry from '../services/epm/registry';
|
||||
import { generatePackageInfoFromArchiveBuffer } from '../services/epm/archive';
|
||||
|
||||
import { createAppContextStartContractMock } from '../mocks';
|
||||
import { appContextService } from '../services';
|
||||
|
||||
import { useDockerRegistry } from './helpers';
|
||||
|
||||
describe('validate bundled packages', () => {
|
||||
const registryUrl = useDockerRegistry();
|
||||
let mockContract: ReturnType<typeof createAppContextStartContractMock>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockContract = createAppContextStartContractMock({ registryUrl });
|
||||
appContextService.start(mockContract);
|
||||
});
|
||||
|
||||
async function getBundledPackageEntries() {
|
||||
const configFilePath = path.resolve(REPO_ROOT, 'fleet_packages.json');
|
||||
const configFile = await fs.readFile(configFilePath, 'utf8');
|
||||
const bundledPackages = JSON5.parse(configFile);
|
||||
|
||||
return bundledPackages as Array<{ name: string; version: string }>;
|
||||
}
|
||||
|
||||
async function setupPackageObjects() {
|
||||
const bundledPackages = await getBundledPackageEntries();
|
||||
|
||||
const packageObjects = await Promise.all(
|
||||
bundledPackages.map(async (bundledPackage) => {
|
||||
const registryPackage = await Registry.getRegistryPackage(
|
||||
bundledPackage.name,
|
||||
bundledPackage.version
|
||||
);
|
||||
|
||||
const packageArchive = await Registry.fetchArchiveBuffer(
|
||||
bundledPackage.name,
|
||||
bundledPackage.version
|
||||
);
|
||||
|
||||
return { registryPackage, packageArchive };
|
||||
})
|
||||
);
|
||||
|
||||
return packageObjects;
|
||||
}
|
||||
|
||||
it('generates matching package info objects for uploaded and registry packages', async () => {
|
||||
const packageObjects = await setupPackageObjects();
|
||||
|
||||
for (const packageObject of packageObjects) {
|
||||
const { registryPackage, packageArchive } = packageObject;
|
||||
|
||||
const archivePackageInfo = await generatePackageInfoFromArchiveBuffer(
|
||||
packageArchive.archiveBuffer,
|
||||
'application/zip'
|
||||
);
|
||||
|
||||
expect(archivePackageInfo.packageInfo.data_streams).toEqual(
|
||||
registryPackage.packageInfo.data_streams
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
|
@ -21,6 +21,7 @@ import type { PackagePolicyServiceInterface } from '../services/package_policy';
|
|||
import type { AgentPolicyServiceInterface } from '../services';
|
||||
import type { FleetAppContext } from '../plugin';
|
||||
import { createMockTelemetryEventsSender } from '../telemetry/__mocks__';
|
||||
import type { FleetConfigType } from '../../common';
|
||||
import { createFleetAuthzMock } from '../../common';
|
||||
import { agentServiceMock } from '../services/agents/agent_service.mock';
|
||||
import type { FleetRequestHandlerContext } from '../types';
|
||||
|
@ -39,11 +40,14 @@ export interface MockedFleetAppContext extends FleetAppContext {
|
|||
logger: ReturnType<ReturnType<typeof loggingSystemMock.create>['get']>;
|
||||
}
|
||||
|
||||
export const createAppContextStartContractMock = (): MockedFleetAppContext => {
|
||||
export const createAppContextStartContractMock = (
|
||||
configOverrides: Partial<FleetConfigType> = {}
|
||||
): MockedFleetAppContext => {
|
||||
const config = {
|
||||
agents: { enabled: true, elasticsearch: {} },
|
||||
enabled: true,
|
||||
agentIdVerificationEnabled: true,
|
||||
...configOverrides,
|
||||
};
|
||||
|
||||
const config$ = of(config);
|
||||
|
|
|
@ -23,7 +23,7 @@ import { getBufferExtractor } from './extract';
|
|||
|
||||
export * from './cache';
|
||||
export { getBufferExtractor, untarBuffer, unzipBuffer } from './extract';
|
||||
export { parseAndVerifyArchiveBuffer as parseAndVerifyArchiveEntries } from './validation';
|
||||
export { generatePackageInfoFromArchiveBuffer } from './parse';
|
||||
|
||||
export interface ArchiveEntry {
|
||||
path: string;
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { merge } from '@kbn/std';
|
||||
import yaml from 'js-yaml';
|
||||
import { pick, uniq } from 'lodash';
|
||||
|
||||
|
@ -32,6 +33,45 @@ import { unpackBufferEntries } from './index';
|
|||
const MANIFESTS: Record<string, Buffer> = {};
|
||||
const MANIFEST_NAME = 'manifest.yml';
|
||||
|
||||
const DEFAULT_RELEASE_VALUE = 'ga';
|
||||
|
||||
// Ingest pipelines are specified in a `data_stream/<name>/elasticsearch/ingest_pipeline/` directory where a `default`
|
||||
// ingest pipeline should be specified by one of these filenames.
|
||||
const DEFAULT_INGEST_PIPELINE_VALUE = 'default';
|
||||
const DEFAULT_INGEST_PIPELINE_FILE_NAME_YML = 'default.yml';
|
||||
const DEFAULT_INGEST_PIPELINE_FILE_NAME_JSON = 'default.json';
|
||||
|
||||
// Borrowed from https://github.com/elastic/kibana/blob/main/x-pack/plugins/security_solution/common/utils/expand_dotted.ts
|
||||
// with some alterations around non-object values. The package registry service expands some dotted fields from manifest files,
|
||||
// so we need to do the same here.
|
||||
const expandDottedField = (dottedFieldName: string, val: unknown): object => {
|
||||
const parts = dottedFieldName.split('.');
|
||||
|
||||
if (parts.length === 1) {
|
||||
return { [parts[0]]: val };
|
||||
} else {
|
||||
return { [parts[0]]: expandDottedField(parts.slice(1).join('.'), val) };
|
||||
}
|
||||
};
|
||||
|
||||
export const expandDottedObject = (dottedObj: object) => {
|
||||
if (typeof dottedObj !== 'object' || Array.isArray(dottedObj)) {
|
||||
return dottedObj;
|
||||
}
|
||||
return Object.entries(dottedObj).reduce(
|
||||
(acc, [key, val]) => merge(acc, expandDottedField(key, val)),
|
||||
{}
|
||||
);
|
||||
};
|
||||
|
||||
export const expandDottedEntries = (obj: object) => {
|
||||
return Object.entries<any>(obj).reduce<any>((acc, [key, value]) => {
|
||||
acc[key] = expandDottedObject(value);
|
||||
|
||||
return acc;
|
||||
}, {} as Record<string, any>);
|
||||
};
|
||||
|
||||
// not sure these are 100% correct but they do the job here
|
||||
// keeping them local until others need them
|
||||
type OptionalPropertyOf<T extends object> = Exclude<
|
||||
|
@ -76,10 +116,19 @@ const registryPolicyTemplateProps = Object.values(RegistryPolicyTemplateKeys);
|
|||
const registryStreamProps = Object.values(RegistryStreamKeys);
|
||||
const registryDataStreamProps = Object.values(RegistryDataStreamKeys);
|
||||
|
||||
// TODO: everything below performs verification of manifest.yml files, and hence duplicates functionality already implemented in the
|
||||
// package registry. At some point this should probably be replaced (or enhanced) with verification based on
|
||||
// https://github.com/elastic/package-spec/
|
||||
export async function parseAndVerifyArchiveBuffer(
|
||||
/*
|
||||
This function generates a package info object (see type `ArchivePackage`) by parsing and verifying the `manifest.yml` file as well
|
||||
as the directory structure for the given package archive and other files adhering to the package spec: https://github.com/elastic/package-spec.
|
||||
|
||||
Currently, this process is duplicative of logic that's already implemented in the Package Registry codebase,
|
||||
e.g. https://github.com/elastic/package-registry/blob/main/packages/package.go. Because of this duplication, it's likely for our parsing/verification
|
||||
logic to fall out of sync with the registry codebase's implementation.
|
||||
|
||||
This should be addressed in https://github.com/elastic/kibana/issues/115032
|
||||
where we'll no longer use the package registry endpoint as a source of truth for package info objects, and instead Fleet will _always_ generate
|
||||
them in the manner implemented below.
|
||||
*/
|
||||
export async function generatePackageInfoFromArchiveBuffer(
|
||||
archiveBuffer: Buffer,
|
||||
contentType: string
|
||||
): Promise<{ paths: string[]; packageInfo: ArchivePackage }> {
|
||||
|
@ -144,8 +193,13 @@ function parseAndVerifyArchive(paths: string[]): ArchivePackage {
|
|||
);
|
||||
}
|
||||
|
||||
parsed.data_streams = parseAndVerifyDataStreams(paths, parsed.name, parsed.version);
|
||||
const parsedDataStreams = parseAndVerifyDataStreams(paths, parsed.name, parsed.version);
|
||||
if (parsedDataStreams.length) {
|
||||
parsed.data_streams = parsedDataStreams;
|
||||
}
|
||||
|
||||
parsed.policy_templates = parseAndVerifyPolicyTemplates(manifest);
|
||||
|
||||
// add readme if exists
|
||||
const readme = parseAndVerifyReadme(paths, parsed.name, parsed.version);
|
||||
if (readme) {
|
||||
|
@ -202,11 +256,11 @@ export function parseAndVerifyDataStreams(
|
|||
|
||||
const {
|
||||
title: dataStreamTitle,
|
||||
release,
|
||||
release = DEFAULT_RELEASE_VALUE,
|
||||
type,
|
||||
dataset,
|
||||
ingest_pipeline: ingestPipeline,
|
||||
streams: manifestStreams,
|
||||
elasticsearch,
|
||||
...restOfProps
|
||||
} = manifest;
|
||||
if (!(dataStreamTitle && type)) {
|
||||
|
@ -214,28 +268,75 @@ export function parseAndVerifyDataStreams(
|
|||
`Invalid manifest for data stream '${dataStreamPath}': one or more fields missing of 'title', 'type'`
|
||||
);
|
||||
}
|
||||
|
||||
let ingestPipeline;
|
||||
const ingestPipelinePaths = paths.filter((path) =>
|
||||
path.startsWith(`${pkgKey}/data_stream/${dataStreamPath}/elasticsearch/ingest_pipeline`)
|
||||
);
|
||||
|
||||
if (
|
||||
ingestPipelinePaths.length &&
|
||||
(ingestPipelinePaths.some((ingestPipelinePath) =>
|
||||
ingestPipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_YML)
|
||||
) ||
|
||||
ingestPipelinePaths.some((ingestPipelinePath) =>
|
||||
ingestPipelinePath.endsWith(DEFAULT_INGEST_PIPELINE_FILE_NAME_JSON)
|
||||
))
|
||||
) {
|
||||
ingestPipeline = DEFAULT_INGEST_PIPELINE_VALUE;
|
||||
}
|
||||
|
||||
const streams = parseAndVerifyStreams(manifestStreams, dataStreamPath);
|
||||
|
||||
const parsedElasticsearchEntry: Record<string, any> = {};
|
||||
|
||||
if (ingestPipeline) {
|
||||
parsedElasticsearchEntry['ingest_pipeline.name'] = DEFAULT_INGEST_PIPELINE_VALUE;
|
||||
}
|
||||
|
||||
if (elasticsearch?.privileges) {
|
||||
parsedElasticsearchEntry.privileges = elasticsearch.privileges;
|
||||
}
|
||||
|
||||
if (elasticsearch?.index_template?.mappings) {
|
||||
parsedElasticsearchEntry['index_template.mappings'] = expandDottedEntries(
|
||||
elasticsearch.index_template.mappings
|
||||
);
|
||||
}
|
||||
|
||||
if (elasticsearch?.index_template?.settings) {
|
||||
parsedElasticsearchEntry['index_template.settings'] = expandDottedEntries(
|
||||
elasticsearch.index_template.settings
|
||||
);
|
||||
}
|
||||
|
||||
// Build up the stream object here so we can conditionally insert nullable fields. The package registry omits undefined
|
||||
// fields, so we're mimicking that behavior here.
|
||||
const dataStreamObject: RegistryDataStream = {
|
||||
title: dataStreamTitle,
|
||||
release,
|
||||
type,
|
||||
package: pkgName,
|
||||
dataset: dataset || `${pkgName}.${dataStreamPath}`,
|
||||
path: dataStreamPath,
|
||||
elasticsearch: parsedElasticsearchEntry,
|
||||
};
|
||||
|
||||
if (ingestPipeline) {
|
||||
dataStreamObject.ingest_pipeline = ingestPipeline;
|
||||
}
|
||||
|
||||
if (streams.length) {
|
||||
dataStreamObject.streams = streams;
|
||||
}
|
||||
|
||||
dataStreams.push(
|
||||
Object.entries(restOfProps).reduce(
|
||||
(validatedDataStream, [key, value]) => {
|
||||
if (registryDataStreamProps.includes(key as RegistryDataStreamKeys)) {
|
||||
// @ts-expect-error
|
||||
validatedDataStream[key] = value;
|
||||
}
|
||||
return validatedDataStream;
|
||||
},
|
||||
{
|
||||
title: dataStreamTitle,
|
||||
release,
|
||||
type,
|
||||
package: pkgName,
|
||||
dataset: dataset || `${pkgName}.${dataStreamPath}`,
|
||||
ingest_pipeline: ingestPipeline,
|
||||
path: dataStreamPath,
|
||||
streams,
|
||||
Object.entries(restOfProps).reduce((validatedDataStream, [key, value]) => {
|
||||
if (registryDataStreamProps.includes(key as RegistryDataStreamKeys)) {
|
||||
validatedDataStream[key] = value;
|
||||
}
|
||||
)
|
||||
return validatedDataStream;
|
||||
}, dataStreamObject)
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -261,25 +362,27 @@ export function parseAndVerifyStreams(
|
|||
`Invalid manifest for data stream ${dataStreamPath}: stream is missing one or more fields of: input, title`
|
||||
);
|
||||
}
|
||||
|
||||
const vars = parseAndVerifyVars(manifestVars, `data stream ${dataStreamPath}`);
|
||||
|
||||
// default template path name see https://github.com/elastic/package-registry/blob/master/util/dataset.go#L143
|
||||
const streamObject: RegistryStream = {
|
||||
input,
|
||||
title: streamTitle,
|
||||
template_path: templatePath || 'stream.yml.hbs',
|
||||
};
|
||||
|
||||
if (vars.length) {
|
||||
streamObject.vars = vars;
|
||||
}
|
||||
|
||||
streams.push(
|
||||
Object.entries(restOfProps).reduce(
|
||||
(validatedStream, [key, value]) => {
|
||||
if (registryStreamProps.includes(key as RegistryStreamKeys)) {
|
||||
// @ts-expect-error
|
||||
validatedStream[key] = value;
|
||||
}
|
||||
return validatedStream;
|
||||
},
|
||||
{
|
||||
input,
|
||||
title: streamTitle,
|
||||
vars,
|
||||
template_path: templatePath || 'stream.yml.hbs',
|
||||
} as RegistryStream
|
||||
)
|
||||
Object.entries(restOfProps).reduce((validatedStream, [key, value]) => {
|
||||
if (registryStreamProps.includes(key as RegistryStreamKeys)) {
|
||||
// @ts-expect-error
|
||||
validatedStream[key] = value;
|
||||
}
|
||||
return validatedStream;
|
||||
}, streamObject)
|
||||
);
|
||||
});
|
||||
}
|
|
@ -27,7 +27,7 @@ import { appContextService } from '../../app_context';
|
|||
|
||||
import { getArchiveEntry, setArchiveEntry, setArchiveFilelist, setPackageInfo } from './index';
|
||||
import type { ArchiveEntry } from './index';
|
||||
import { parseAndVerifyPolicyTemplates, parseAndVerifyStreams } from './validation';
|
||||
import { parseAndVerifyPolicyTemplates, parseAndVerifyStreams } from './parse';
|
||||
|
||||
const ONE_BYTE = 1024 * 1024;
|
||||
// could be anything, picked this from https://github.com/elastic/elastic-agent-client/issues/17
|
||||
|
|
|
@ -9,19 +9,26 @@ import fs from 'fs/promises';
|
|||
import path from 'path';
|
||||
|
||||
import type { BundledPackage } from '../../../types';
|
||||
import { IngestManagerError } from '../../../errors';
|
||||
import { appContextService } from '../../app_context';
|
||||
import { splitPkgKey } from '../registry';
|
||||
|
||||
const BUNDLED_PACKAGE_DIRECTORY = path.join(__dirname, '../../../../target/bundled_packages');
|
||||
|
||||
export async function getBundledPackages(): Promise<BundledPackage[]> {
|
||||
const config = appContextService.getConfig();
|
||||
|
||||
const bundledPackageLocation = config?.developer?.bundledPackageLocation;
|
||||
|
||||
if (!bundledPackageLocation) {
|
||||
throw new IngestManagerError('xpack.fleet.developer.bundledPackageLocation is not configured');
|
||||
}
|
||||
|
||||
try {
|
||||
const dirContents = await fs.readdir(BUNDLED_PACKAGE_DIRECTORY);
|
||||
const dirContents = await fs.readdir(bundledPackageLocation);
|
||||
const zipFiles = dirContents.filter((file) => file.endsWith('.zip'));
|
||||
|
||||
const result = await Promise.all(
|
||||
zipFiles.map(async (zipFile) => {
|
||||
const file = await fs.readFile(path.join(BUNDLED_PACKAGE_DIRECTORY, zipFile));
|
||||
const file = await fs.readFile(path.join(bundledPackageLocation, zipFile));
|
||||
|
||||
const { pkgName, pkgVersion } = splitPkgKey(zipFile.replace(/\.zip$/, ''));
|
||||
|
||||
|
@ -36,7 +43,7 @@ export async function getBundledPackages(): Promise<BundledPackage[]> {
|
|||
return result;
|
||||
} catch (err) {
|
||||
const logger = appContextService.getLogger();
|
||||
logger.debug(`Unable to read bundled packages from ${BUNDLED_PACKAGE_DIRECTORY}`);
|
||||
logger.debug(`Unable to read bundled packages from ${bundledPackageLocation}`);
|
||||
|
||||
return [];
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ jest.mock('../kibana/index_pattern/install', () => {
|
|||
});
|
||||
jest.mock('../archive', () => {
|
||||
return {
|
||||
parseAndVerifyArchiveEntries: jest.fn(() =>
|
||||
generatePackageInfoFromArchiveBuffer: jest.fn(() =>
|
||||
Promise.resolve({ packageInfo: { name: 'apache', version: '1.3.0' } })
|
||||
),
|
||||
unpackBufferToCache: jest.fn(),
|
||||
|
|
|
@ -32,7 +32,11 @@ import type {
|
|||
} from '../../../types';
|
||||
import { appContextService } from '../../app_context';
|
||||
import * as Registry from '../registry';
|
||||
import { setPackageInfo, parseAndVerifyArchiveEntries, unpackBufferToCache } from '../archive';
|
||||
import {
|
||||
setPackageInfo,
|
||||
generatePackageInfoFromArchiveBuffer,
|
||||
unpackBufferToCache,
|
||||
} from '../archive';
|
||||
import { toAssetReference } from '../kibana/assets/install';
|
||||
import type { ArchiveAsset } from '../kibana/assets/install';
|
||||
|
||||
|
@ -391,7 +395,7 @@ async function installPackageByUpload({
|
|||
let installType: InstallType = 'unknown';
|
||||
const telemetryEvent: PackageUpdateEvent = getTelemetryEvent('', '');
|
||||
try {
|
||||
const { packageInfo } = await parseAndVerifyArchiveEntries(archiveBuffer, contentType);
|
||||
const { packageInfo } = await generatePackageInfoFromArchiveBuffer(archiveBuffer, contentType);
|
||||
|
||||
const installedPkg = await getInstallationObject({
|
||||
savedObjectsClient,
|
||||
|
|
|
@ -261,7 +261,7 @@ export async function ensureCachedArchiveInfo(
|
|||
}
|
||||
}
|
||||
|
||||
async function fetchArchiveBuffer(
|
||||
export async function fetchArchiveBuffer(
|
||||
pkgName: string,
|
||||
pkgVersion: string
|
||||
): Promise<{ archiveBuffer: Buffer; archivePath: string }> {
|
||||
|
|
|
@ -14,7 +14,7 @@ export default function loadTests({ loadTestFile }) {
|
|||
loadTestFile(require.resolve('./file'));
|
||||
loadTestFile(require.resolve('./template'));
|
||||
loadTestFile(require.resolve('./ilm'));
|
||||
// loadTestFile(require.resolve('./install_bundled'));
|
||||
loadTestFile(require.resolve('./install_bundled'));
|
||||
loadTestFile(require.resolve('./install_by_upload'));
|
||||
loadTestFile(require.resolve('./install_endpoint'));
|
||||
loadTestFile(require.resolve('./install_overrides'));
|
||||
|
|
|
@ -9,6 +9,7 @@ import expect from '@kbn/expect';
|
|||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { BUNDLED_PACKAGE_DIR } from '../../config';
|
||||
import { FtrProviderContext } from '../../../api_integration/ftr_provider_context';
|
||||
import { skipIfNoDockerRegistry } from '../../helpers';
|
||||
import { setupFleetAndAgents } from '../agents/services';
|
||||
|
@ -22,30 +23,31 @@ export default function (providerContext: FtrProviderContext) {
|
|||
path.dirname(__filename),
|
||||
'../fixtures/bundled_packages'
|
||||
);
|
||||
const BUNDLED_PACKAGES_DIR = path.join(
|
||||
path.dirname(__filename),
|
||||
'../../../../plugins/fleet/target/bundled_packages'
|
||||
);
|
||||
|
||||
const bundlePackage = async (name: string) => {
|
||||
try {
|
||||
await fs.access(BUNDLED_PACKAGES_DIR);
|
||||
await fs.access(BUNDLED_PACKAGE_DIR);
|
||||
} catch (error) {
|
||||
await fs.mkdir(BUNDLED_PACKAGES_DIR);
|
||||
await fs.mkdir(BUNDLED_PACKAGE_DIR);
|
||||
}
|
||||
|
||||
await fs.copyFile(
|
||||
path.join(BUNDLED_PACKAGE_FIXTURES_DIR, `${name}.zip`),
|
||||
path.join(BUNDLED_PACKAGES_DIR, `${name}.zip`)
|
||||
path.join(BUNDLED_PACKAGE_DIR, `${name}.zip`)
|
||||
);
|
||||
};
|
||||
|
||||
const removeBundledPackages = async () => {
|
||||
try {
|
||||
const files = await fs.readdir(BUNDLED_PACKAGES_DIR);
|
||||
const files = await fs.readdir(BUNDLED_PACKAGE_DIR);
|
||||
|
||||
for (const file of files) {
|
||||
await fs.unlink(path.join(BUNDLED_PACKAGES_DIR, file));
|
||||
const isFixtureFile = !!(await fs.readFile(path.join(BUNDLED_PACKAGE_FIXTURES_DIR, file)));
|
||||
|
||||
// Only remove fixture files - leave normal bundled packages in place
|
||||
if (isFixtureFile) {
|
||||
await fs.unlink(path.join(BUNDLED_PACKAGE_DIR, file));
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
log.error('Error removing bundled packages');
|
||||
|
@ -63,10 +65,11 @@ export default function (providerContext: FtrProviderContext) {
|
|||
|
||||
describe('without registry', () => {
|
||||
it('installs from bundled source via api', async () => {
|
||||
await bundlePackage('elastic_agent-1.2.0');
|
||||
// Need to bundle a package that doesn't conflict with those listed in `fleet_packages.json
|
||||
await bundlePackage('nginx-1.2.1');
|
||||
|
||||
const response = await supertest
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.2.0`)
|
||||
.post(`/api/fleet/epm/packages/nginx/1.2.1`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({ force: true })
|
||||
|
@ -76,11 +79,11 @@ export default function (providerContext: FtrProviderContext) {
|
|||
});
|
||||
|
||||
it('allows for upgrading from newer bundled source when outdated package was installed from bundled source', async () => {
|
||||
await bundlePackage('elastic_agent-1.0.0');
|
||||
await bundlePackage('elastic_agent-1.2.0');
|
||||
await bundlePackage('nginx-1.1.0');
|
||||
await bundlePackage('nginx-1.2.1');
|
||||
|
||||
const installResponse = await supertest
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.0.0`)
|
||||
.post(`/api/fleet/epm/packages/nginx/1.1.0`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({ force: true })
|
||||
|
@ -89,7 +92,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
expect(installResponse.body._meta.install_source).to.be('bundled');
|
||||
|
||||
const updateResponse = await supertest
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.2.0`)
|
||||
.post(`/api/fleet/epm/packages/nginx/1.2.1`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({ force: true })
|
||||
|
@ -101,10 +104,10 @@ export default function (providerContext: FtrProviderContext) {
|
|||
|
||||
describe('with registry', () => {
|
||||
it('allows for updating from registry when outdated package is installed from bundled source', async () => {
|
||||
await bundlePackage('elastic_agent-1.2.0');
|
||||
await bundlePackage('nginx-1.1.0');
|
||||
|
||||
const bundledInstallResponse = await supertest
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.2.0`)
|
||||
.post(`/api/fleet/epm/packages/nginx/1.1.0`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({ force: true })
|
||||
|
@ -112,8 +115,9 @@ export default function (providerContext: FtrProviderContext) {
|
|||
|
||||
expect(bundledInstallResponse.body._meta.install_source).to.be('bundled');
|
||||
|
||||
// Update to one version prior to the bundled version of nginx
|
||||
const registryUpdateResponse = await supertest
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.3.0`)
|
||||
.post(`/api/fleet/epm/packages/elastic_agent/1.2.0`)
|
||||
.set('kbn-xsrf', 'xxxx')
|
||||
.type('application/json')
|
||||
.send({ force: true })
|
||||
|
|
|
@ -75,7 +75,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
.type('application/gzip')
|
||||
.send(buf)
|
||||
.expect(200);
|
||||
expect(res.body.items.length).to.be(27);
|
||||
expect(res.body.items.length).to.be(29);
|
||||
});
|
||||
|
||||
it('should install a zip archive correctly and package info should return correctly after validation', async function () {
|
||||
|
@ -86,7 +86,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
.type('application/zip')
|
||||
.send(buf)
|
||||
.expect(200);
|
||||
expect(res.body.items.length).to.be(27);
|
||||
expect(res.body.items.length).to.be(29);
|
||||
});
|
||||
|
||||
it('should throw an error if the archive is zip but content type is gzip', async function () {
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -15,7 +15,9 @@ import { defineDockerServersConfig } from '@kbn/test';
|
|||
// example: https://beats-ci.elastic.co/blue/organizations/jenkins/Ingest-manager%2Fpackage-storage/detail/snapshot/74/pipeline/257#step-302-log-1.
|
||||
// It should be updated any time there is a new Docker image published for the Snapshot Distribution of the Package Registry.
|
||||
export const dockerImage =
|
||||
'docker.elastic.co/package-registry/distribution@sha256:8b4ce36ecdf86e6cfdf781d9df8d564a014add9afc9aec21cf2c5a68ff82d3ab';
|
||||
'docker.elastic.co/package-registry/distribution@sha256:b3dfc6a11ff7dce82ba8689ea9eeb54e353c6b4bfd2d28127b20ef72fd8883e9';
|
||||
|
||||
export const BUNDLED_PACKAGE_DIR = '/tmp/fleet_bundled_packages';
|
||||
|
||||
export default async function ({ readConfigFile }: FtrConfigProviderContext) {
|
||||
const xPackAPITestsConfig = await readConfigFile(require.resolve('../api_integration/config.ts'));
|
||||
|
@ -63,6 +65,7 @@ export default async function ({ readConfigFile }: FtrConfigProviderContext) {
|
|||
`--xpack.fleet.packages.0.name=endpoint`,
|
||||
`--xpack.fleet.packages.0.version=latest`,
|
||||
...(registryPort ? [`--xpack.fleet.registryUrl=http://localhost:${registryPort}`] : []),
|
||||
`--xpack.fleet.developer.bundledPackageLocation=${BUNDLED_PACKAGE_DIR}`,
|
||||
],
|
||||
},
|
||||
};
|
||||
|
|
|
@ -15,7 +15,7 @@ import { pageObjects } from './page_objects';
|
|||
// example: https://beats-ci.elastic.co/blue/organizations/jenkins/Ingest-manager%2Fpackage-storage/detail/snapshot/74/pipeline/257#step-302-log-1.
|
||||
// It should be updated any time there is a new Docker image published for the Snapshot Distribution of the Package Registry.
|
||||
export const dockerImage =
|
||||
'docker.elastic.co/package-registry/distribution@sha256:8b4ce36ecdf86e6cfdf781d9df8d564a014add9afc9aec21cf2c5a68ff82d3ab';
|
||||
'docker.elastic.co/package-registry/distribution@sha256:b3dfc6a11ff7dce82ba8689ea9eeb54e353c6b4bfd2d28127b20ef72fd8883e9';
|
||||
|
||||
// the default export of config files must be a config provider
|
||||
// that returns an object with the projects config values
|
||||
|
|
|
@ -17,7 +17,7 @@ import { pageObjects } from './page_objects';
|
|||
// example: https://beats-ci.elastic.co/blue/organizations/jenkins/Ingest-manager%2Fpackage-storage/detail/snapshot/74/pipeline/257#step-302-log-1.
|
||||
// It should be updated any time there is a new Docker image published for the Snapshot Distribution of the Package Registry that updates Synthetics.
|
||||
export const dockerImage =
|
||||
'docker.elastic.co/package-registry/distribution@sha256:8b4ce36ecdf86e6cfdf781d9df8d564a014add9afc9aec21cf2c5a68ff82d3ab';
|
||||
'docker.elastic.co/package-registry/distribution@sha256:b3dfc6a11ff7dce82ba8689ea9eeb54e353c6b4bfd2d28127b20ef72fd8883e9';
|
||||
|
||||
// the default export of config files must be a config provider
|
||||
// that returns an object with the projects config values
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue