mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[Fleet] Support bundling packages as zip archives with Kibana source (#122297)
* Support installing bundled packages during Fleet setup
* Fix failing API integration test
* Sort packages in test
* Move get bundled packages + add tests
* Fix mock installPackage type
* Don't attempt to install already-installed bundled packageS
* Use unique installation status for bundled packages
* Fix failing tests
* Fix type error
* Fix logic for latest value
* Remove bundled packages from test since they no longer come back from list endpoint
* Change assertion for empty preconfiguration update request
* Add all bundled packages + refactor install to use Promise.allSettled
* Fix name comparison again
* Rework approach to use preferred source at install-time instead
* Add helper script + update bundled packages + allow updates for upload path
* Fix failing test
* Address code review + add additional tests for update cases
* Fix type error
* Remove configOverride concept for now
* Fix another type error 🙃
* Add build step for bundling fleet packages
* Fix unused import in build step
* Keep bundled_packages directory in place to prevent setup failures
* Remove gitkeep + make bundled package directory lookup fault tolerant
* Update gitignore
* Add doc comment + use build.resolvePath
* Add --use-snapshot-registry CLI flag
* Update snapshots
* Rename arg to use-snapshot-epr
* Don't fail the build on failure to bundled packages
* Update all checksums + don't attempt download multiple times
* Skip checksum process for bundled packages for now
* mkdirp destination directory to fix tests
* Revert build_distributables file
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
40817b2fae
commit
68466226b3
20 changed files with 907 additions and 432 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -93,3 +93,5 @@ elastic-agent-*
|
|||
fleet-server-*
|
||||
elastic-agent.yml
|
||||
fleet-server.yml
|
||||
|
||||
/x-pack/plugins/fleet/server/bundled_packages
|
||||
|
|
36
fleet_packages.json
Normal file
36
fleet_packages.json
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
|
||||
Packages in this file are considered "bundled" and are installed as part of Fleet's setup process. Each entry points to a valid version name
|
||||
avaiable in the Elastic Package Registry service, and must include a sha-512 checksum of the `.zip` archive for the given package.
|
||||
|
||||
You may opt in to using the "snapshot" environment of the EPR service by passing the `--use-epr-snapshot-registry` flag to `yarn build`. This will
|
||||
cause the package archive download to pull from the "spapshot" environment instead of the "production" environment. Be aware that not all packages
|
||||
exist in the snapshot environment, so you may have errors when fetching package versions. It's recommended to alter this file to contain _only_ the
|
||||
packages you're testing when using the snapshot environment.
|
||||
|
||||
These files don't include any kind of checksum, but they should eventually include a package signature as introduced in https://github.com/elastic/elastic-package/issues/583
|
||||
in order to verify package integrity.
|
||||
*/
|
||||
|
||||
[
|
||||
{
|
||||
"name": "apm",
|
||||
"version": "8.0.0"
|
||||
},
|
||||
{
|
||||
"name": "elastic_agent",
|
||||
"version": "1.3.0"
|
||||
},
|
||||
{
|
||||
"name": "endpoint",
|
||||
"version": "1.4.1"
|
||||
},
|
||||
{
|
||||
"name": "fleet_server",
|
||||
"version": "1.1.0"
|
||||
},
|
||||
{
|
||||
"name": "synthetics",
|
||||
"version": "0.9.0"
|
||||
}
|
||||
]
|
|
@ -43,6 +43,7 @@ it('build default and oss dist for current platform, without packages, by defaul
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": false,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -73,6 +74,7 @@ it('builds packages if --all-platforms is passed', () => {
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -103,6 +105,7 @@ it('limits packages if --rpm passed with --all-platforms', () => {
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -133,6 +136,7 @@ it('limits packages if --deb passed with --all-platforms', () => {
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -164,6 +168,7 @@ it('limits packages if --docker passed with --all-platforms', () => {
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -202,6 +207,7 @@ it('limits packages if --docker passed with --skip-docker-ubi and --all-platform
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
@ -233,6 +239,7 @@ it('limits packages if --all-platforms passed with --skip-docker-ubuntu', () =>
|
|||
"initialize": true,
|
||||
"isRelease": false,
|
||||
"targetAllPlatforms": true,
|
||||
"useSnapshotEpr": false,
|
||||
"versionQualifier": "",
|
||||
},
|
||||
"log": <ToolingLog>,
|
||||
|
|
|
@ -40,6 +40,7 @@ export function readCliArgs(argv: string[]) {
|
|||
'silent',
|
||||
'debug',
|
||||
'help',
|
||||
'use-snapshot-epr',
|
||||
],
|
||||
alias: {
|
||||
v: 'verbose',
|
||||
|
@ -115,6 +116,7 @@ export function readCliArgs(argv: string[]) {
|
|||
createDockerUBI: isOsPackageDesired('docker-images') && !Boolean(flags['skip-docker-ubi']),
|
||||
createDockerContexts: !Boolean(flags['skip-docker-contexts']),
|
||||
targetAllPlatforms: Boolean(flags['all-platforms']),
|
||||
useSnapshotEpr: Boolean(flags['use-snapshot-epr']),
|
||||
};
|
||||
|
||||
return {
|
||||
|
|
|
@ -30,6 +30,7 @@ export interface BuildOptions {
|
|||
versionQualifier: string | undefined;
|
||||
targetAllPlatforms: boolean;
|
||||
createExamplePlugins: boolean;
|
||||
useSnapshotEpr: boolean;
|
||||
}
|
||||
|
||||
export async function buildDistributables(log: ToolingLog, options: BuildOptions): Promise<void> {
|
||||
|
@ -84,6 +85,7 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions
|
|||
await run(Tasks.CleanTypescript);
|
||||
await run(Tasks.CleanExtraFilesFromModules);
|
||||
await run(Tasks.CleanEmptyFolders);
|
||||
await run(Tasks.BundleFleetPackages);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
104
src/dev/build/tasks/bundle_fleet_packages.ts
Normal file
104
src/dev/build/tasks/bundle_fleet_packages.ts
Normal file
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import axios from 'axios';
|
||||
import JSON5 from 'json5';
|
||||
|
||||
// @ts-expect-error untyped internal module used to prevent axios from using xhr adapter in tests
|
||||
import AxiosHttpAdapter from 'axios/lib/adapters/http';
|
||||
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
import { closeSync, openSync, writeSync } from 'fs';
|
||||
import { dirname } from 'path';
|
||||
import { readCliArgs } from '../args';
|
||||
|
||||
import { Task, read, mkdirp } from '../lib';
|
||||
|
||||
const BUNDLED_PACKAGES_DIR = 'x-pack/plugins/fleet/server/bundled_packages';
|
||||
|
||||
interface FleetPackage {
|
||||
name: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export const BundleFleetPackages: Task = {
|
||||
description: 'Bundling fleet packages',
|
||||
|
||||
async run(config, log, build) {
|
||||
log.info('Fetching fleet packages from package registry');
|
||||
log.indent(4);
|
||||
|
||||
// Support the `--use-snapshot-epr` command line argument to fetch from the snapshot registry
|
||||
// in development or test environments
|
||||
const { buildOptions } = readCliArgs(process.argv);
|
||||
const eprUrl = buildOptions?.useSnapshotEpr
|
||||
? 'https://epr-snapshot.elastic.co'
|
||||
: 'https://epr.elastic.co';
|
||||
|
||||
const configFilePath = config.resolveFromRepo('fleet_packages.json');
|
||||
const fleetPackages = (await read(configFilePath)) || '[]';
|
||||
|
||||
await Promise.all(
|
||||
JSON5.parse(fleetPackages).map(async (fleetPackage: FleetPackage) => {
|
||||
const archivePath = `${fleetPackage.name}-${fleetPackage.version}.zip`;
|
||||
const archiveUrl = `${eprUrl}/epr/${fleetPackage.name}/${fleetPackage.name}-${fleetPackage.version}.zip`;
|
||||
|
||||
const destination = build.resolvePath(BUNDLED_PACKAGES_DIR, archivePath);
|
||||
|
||||
try {
|
||||
await downloadPackageArchive({ log, url: archiveUrl, destination });
|
||||
} catch (error) {
|
||||
log.warning(`Failed to download bundled package archive ${archivePath}`);
|
||||
log.warning(error);
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* We need to skip the checksum process on Fleet's bundled packages for now because we can't reliably generate
|
||||
* a consistent checksum for the `.zip` file returned from the EPR service. This download process should be updated
|
||||
* to verify packages using the proposed package signature field provided in https://github.com/elastic/elastic-package/issues/583
|
||||
*/
|
||||
async function downloadPackageArchive({
|
||||
log,
|
||||
url,
|
||||
destination,
|
||||
}: {
|
||||
log: ToolingLog;
|
||||
url: string;
|
||||
destination: string;
|
||||
}) {
|
||||
log.info(`Downloading bundled package from ${url}`);
|
||||
|
||||
await mkdirp(dirname(destination));
|
||||
const file = openSync(destination, 'w');
|
||||
|
||||
try {
|
||||
const response = await axios.request({
|
||||
url,
|
||||
responseType: 'stream',
|
||||
adapter: AxiosHttpAdapter,
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
response.data.on('data', (chunk: Buffer) => {
|
||||
writeSync(file, chunk);
|
||||
});
|
||||
|
||||
response.data.on('error', reject);
|
||||
response.data.on('end', resolve);
|
||||
});
|
||||
} catch (error) {
|
||||
log.warning(`Error downloading bundled package from ${url}`);
|
||||
log.warning(error);
|
||||
} finally {
|
||||
closeSync(file);
|
||||
}
|
||||
}
|
|
@ -10,6 +10,7 @@ export * from './bin';
|
|||
export * from './build_kibana_platform_plugins';
|
||||
export * from './build_kibana_example_plugins';
|
||||
export * from './build_packages_task';
|
||||
export * from './bundle_fleet_packages';
|
||||
export * from './clean_tasks';
|
||||
export * from './copy_source_task';
|
||||
export * from './create_archives_sources_task';
|
||||
|
|
|
@ -61,6 +61,9 @@ export const IGNORE_FILE_GLOBS = [
|
|||
|
||||
'x-pack/plugins/maps/server/fonts/**/*',
|
||||
|
||||
// Bundled package names typically use a format like ${pkgName}-${pkgVersion}, so don't lint them
|
||||
'x-pack/plugins/fleet/server/bundled_packages/**/*',
|
||||
|
||||
// Bazel default files
|
||||
'**/WORKSPACE.bazel',
|
||||
'**/BUILD.bazel',
|
||||
|
|
|
@ -60,4 +60,5 @@ export const installationStatuses = {
|
|||
Installing: 'installing',
|
||||
InstallFailed: 'install_failed',
|
||||
NotInstalled: 'not_installed',
|
||||
InstalledBundled: 'installed_bundled',
|
||||
} as const;
|
||||
|
|
|
@ -45,7 +45,11 @@ export interface DefaultPackagesInstallationError {
|
|||
export type InstallType = 'reinstall' | 'reupdate' | 'rollback' | 'update' | 'install' | 'unknown';
|
||||
export type InstallSource = 'registry' | 'upload';
|
||||
|
||||
export type EpmPackageInstallStatus = 'installed' | 'installing' | 'install_failed';
|
||||
export type EpmPackageInstallStatus =
|
||||
| 'installed'
|
||||
| 'installing'
|
||||
| 'install_failed'
|
||||
| 'installed_bundled';
|
||||
|
||||
export type DetailViewPanelName = 'overview' | 'policies' | 'assets' | 'settings' | 'custom';
|
||||
export type ServiceName = 'kibana' | 'elasticsearch';
|
||||
|
@ -410,13 +414,22 @@ export interface PackageUsageStats {
|
|||
agent_policy_count: number;
|
||||
}
|
||||
|
||||
export type Installable<T> = Installed<T> | Installing<T> | NotInstalled<T> | InstallFailed<T>;
|
||||
export type Installable<T> =
|
||||
| InstalledRegistry<T>
|
||||
| Installing<T>
|
||||
| NotInstalled<T>
|
||||
| InstallFailed<T>
|
||||
| InstalledBundled<T>;
|
||||
|
||||
export type Installed<T = {}> = T & {
|
||||
export type InstalledRegistry<T = {}> = T & {
|
||||
status: InstallationStatus['Installed'];
|
||||
savedObject: SavedObject<Installation>;
|
||||
};
|
||||
|
||||
export type InstalledBundled<T = {}> = T & {
|
||||
status: InstallationStatus['InstalledBundled'];
|
||||
};
|
||||
|
||||
export type Installing<T = {}> = T & {
|
||||
status: InstallationStatus['Installing'];
|
||||
savedObject: SavedObject<Installation>;
|
||||
|
|
|
@ -209,9 +209,9 @@ export function parseAndVerifyDataStreams(
|
|||
streams: manifestStreams,
|
||||
...restOfProps
|
||||
} = manifest;
|
||||
if (!(dataStreamTitle && release && type)) {
|
||||
if (!(dataStreamTitle && type)) {
|
||||
throw new PackageInvalidArchiveError(
|
||||
`Invalid manifest for data stream '${dataStreamPath}': one or more fields missing of 'title', 'release', 'type'`
|
||||
`Invalid manifest for data stream '${dataStreamPath}': one or more fields missing of 'title', 'type'`
|
||||
);
|
||||
}
|
||||
const streams = parseAndVerifyStreams(manifestStreams, dataStreamPath);
|
||||
|
|
|
@ -14,6 +14,7 @@ import type { InstallResult } from '../../../types';
|
|||
|
||||
import { installPackage, isPackageVersionOrLaterInstalled } from './install';
|
||||
import type { BulkInstallResponse, IBulkInstallPackageError } from './install';
|
||||
import { getBundledPackages } from './get_bundled_packages';
|
||||
|
||||
interface BulkInstallPackagesParams {
|
||||
savedObjectsClient: SavedObjectsClientContract;
|
||||
|
@ -21,6 +22,7 @@ interface BulkInstallPackagesParams {
|
|||
esClient: ElasticsearchClient;
|
||||
force?: boolean;
|
||||
spaceId: string;
|
||||
preferredSource?: 'registry' | 'bundled';
|
||||
}
|
||||
|
||||
export async function bulkInstallPackages({
|
||||
|
@ -29,9 +31,12 @@ export async function bulkInstallPackages({
|
|||
esClient,
|
||||
spaceId,
|
||||
force,
|
||||
preferredSource = 'registry',
|
||||
}: BulkInstallPackagesParams): Promise<BulkInstallResponse[]> {
|
||||
const logger = appContextService.getLogger();
|
||||
const installSource = 'registry';
|
||||
|
||||
const bundledPackages = await getBundledPackages();
|
||||
|
||||
const packagesResults = await Promise.allSettled(
|
||||
packagesToInstall.map((pkg) => {
|
||||
if (typeof pkg === 'string') return Registry.fetchFindLatestPackage(pkg);
|
||||
|
@ -39,57 +44,113 @@ export async function bulkInstallPackages({
|
|||
})
|
||||
);
|
||||
|
||||
logger.debug(`kicking off bulk install of ${packagesToInstall.join(', ')} from registry`);
|
||||
logger.debug(
|
||||
`kicking off bulk install of ${packagesToInstall.join(
|
||||
', '
|
||||
)} with preferred source of "${preferredSource}"`
|
||||
);
|
||||
|
||||
const bulkInstallResults = await Promise.allSettled(
|
||||
packagesResults.map(async (result, index) => {
|
||||
const packageName = getNameFromPackagesToInstall(packagesToInstall, index);
|
||||
if (result.status === 'fulfilled') {
|
||||
const pkgKeyProps = result.value;
|
||||
const installedPackageResult = await isPackageVersionOrLaterInstalled({
|
||||
savedObjectsClient,
|
||||
pkgName: pkgKeyProps.name,
|
||||
pkgVersion: pkgKeyProps.version,
|
||||
});
|
||||
if (installedPackageResult) {
|
||||
const {
|
||||
name,
|
||||
version,
|
||||
installed_es: installedEs,
|
||||
installed_kibana: installedKibana,
|
||||
} = installedPackageResult.package;
|
||||
return {
|
||||
name,
|
||||
version,
|
||||
result: {
|
||||
assets: [...installedEs, ...installedKibana],
|
||||
status: 'already_installed',
|
||||
installType: installedPackageResult.installType,
|
||||
} as InstallResult,
|
||||
};
|
||||
|
||||
if (result.status === 'rejected') {
|
||||
return { name: packageName, error: result.reason };
|
||||
}
|
||||
|
||||
const pkgKeyProps = result.value;
|
||||
const installedPackageResult = await isPackageVersionOrLaterInstalled({
|
||||
savedObjectsClient,
|
||||
pkgName: pkgKeyProps.name,
|
||||
pkgVersion: pkgKeyProps.version,
|
||||
});
|
||||
|
||||
if (installedPackageResult) {
|
||||
const {
|
||||
name,
|
||||
version,
|
||||
installed_es: installedEs,
|
||||
installed_kibana: installedKibana,
|
||||
} = installedPackageResult.package;
|
||||
return {
|
||||
name,
|
||||
version,
|
||||
result: {
|
||||
assets: [...installedEs, ...installedKibana],
|
||||
status: 'already_installed',
|
||||
installType: installedPackageResult.installType,
|
||||
} as InstallResult,
|
||||
};
|
||||
}
|
||||
|
||||
let installResult: InstallResult;
|
||||
const pkgkey = Registry.pkgToPkgKey(pkgKeyProps);
|
||||
|
||||
const bundledPackage = bundledPackages.find((pkg) => pkg.name === pkgkey);
|
||||
|
||||
// If preferred source is bundled packages on disk, attempt to install from disk first, then fall back to registry
|
||||
if (preferredSource === 'bundled') {
|
||||
if (bundledPackage) {
|
||||
logger.debug(
|
||||
`kicking off install of ${pkgKeyProps.name}-${pkgKeyProps.version} from bundled package on disk`
|
||||
);
|
||||
installResult = await installPackage({
|
||||
savedObjectsClient,
|
||||
esClient,
|
||||
installSource: 'upload',
|
||||
archiveBuffer: bundledPackage.buffer,
|
||||
contentType: 'application/zip',
|
||||
spaceId,
|
||||
});
|
||||
} else {
|
||||
installResult = await installPackage({
|
||||
savedObjectsClient,
|
||||
esClient,
|
||||
pkgkey,
|
||||
installSource: 'registry',
|
||||
spaceId,
|
||||
force,
|
||||
});
|
||||
}
|
||||
const installResult = await installPackage({
|
||||
} else {
|
||||
// If preferred source is registry, attempt to install from registry first, then fall back to bundled packages on disk
|
||||
installResult = await installPackage({
|
||||
savedObjectsClient,
|
||||
esClient,
|
||||
pkgkey: Registry.pkgToPkgKey(pkgKeyProps),
|
||||
installSource,
|
||||
pkgkey,
|
||||
installSource: 'registry',
|
||||
spaceId,
|
||||
force,
|
||||
});
|
||||
if (installResult.error) {
|
||||
return {
|
||||
name: packageName,
|
||||
error: installResult.error,
|
||||
installType: installResult.installType,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
name: packageName,
|
||||
version: pkgKeyProps.version,
|
||||
result: installResult,
|
||||
};
|
||||
|
||||
// If we initially errored, try to install from bundled package on disk
|
||||
if (installResult.error && bundledPackage) {
|
||||
logger.debug(
|
||||
`kicking off install of ${pkgKeyProps.name}-${pkgKeyProps.version} from bundled package on disk`
|
||||
);
|
||||
installResult = await installPackage({
|
||||
savedObjectsClient,
|
||||
esClient,
|
||||
installSource: 'upload',
|
||||
archiveBuffer: bundledPackage.buffer,
|
||||
contentType: 'application/zip',
|
||||
spaceId,
|
||||
});
|
||||
}
|
||||
}
|
||||
return { name: packageName, error: result.reason };
|
||||
|
||||
if (installResult.error) {
|
||||
return {
|
||||
name: packageName,
|
||||
error: installResult.error,
|
||||
installType: installResult.installType,
|
||||
};
|
||||
}
|
||||
return {
|
||||
name: packageName,
|
||||
version: pkgKeyProps.version,
|
||||
result: installResult,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
|
||||
import { appContextService } from '../../app_context';
|
||||
|
||||
const BUNDLED_PACKAGE_DIRECTORY = path.join(__dirname, '../../../bundled_packages');
|
||||
|
||||
interface BundledPackage {
|
||||
name: string;
|
||||
buffer: Buffer;
|
||||
}
|
||||
|
||||
export async function getBundledPackages(): Promise<BundledPackage[]> {
|
||||
try {
|
||||
const dirContents = await fs.readdir(BUNDLED_PACKAGE_DIRECTORY);
|
||||
const zipFiles = dirContents.filter((file) => file.endsWith('.zip'));
|
||||
|
||||
const result = await Promise.all(
|
||||
zipFiles.map(async (zipFile) => {
|
||||
const file = await fs.readFile(path.join(BUNDLED_PACKAGE_DIRECTORY, zipFile));
|
||||
|
||||
return {
|
||||
name: zipFile.replace(/\.zip$/, ''),
|
||||
buffer: file,
|
||||
};
|
||||
})
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
const logger = appContextService.getLogger();
|
||||
logger.debug(`Unable to read bundled packages from ${BUNDLED_PACKAGE_DIRECTORY}`);
|
||||
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -190,10 +190,11 @@ describe('install', () => {
|
|||
});
|
||||
|
||||
describe('upload', () => {
|
||||
it('should send telemetry on install failure', async () => {
|
||||
it('should send telemetry on update', async () => {
|
||||
jest
|
||||
.spyOn(obj, 'getInstallationObject')
|
||||
.mockImplementationOnce(() => Promise.resolve({ attributes: { version: '1.2.0' } } as any));
|
||||
jest.spyOn(licenseService, 'hasAtLeast').mockReturnValue(true);
|
||||
await installPackage({
|
||||
spaceId: DEFAULT_SPACE_ID,
|
||||
installSource: 'upload',
|
||||
|
@ -206,13 +207,11 @@ describe('install', () => {
|
|||
expect(sendTelemetryEvents).toHaveBeenCalledWith(expect.anything(), undefined, {
|
||||
currentVersion: '1.2.0',
|
||||
dryRun: false,
|
||||
errorMessage:
|
||||
'Package upload only supports fresh installations. Package apache is already installed, please uninstall first.',
|
||||
eventType: 'package-install',
|
||||
installType: 'update',
|
||||
newVersion: '1.3.0',
|
||||
packageName: 'apache',
|
||||
status: 'failure',
|
||||
status: 'success',
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -19,11 +19,7 @@ import type {
|
|||
InstallSource,
|
||||
} from '../../../../common';
|
||||
import { AUTO_UPGRADE_POLICIES_PACKAGES } from '../../../../common';
|
||||
import {
|
||||
IngestManagerError,
|
||||
PackageOperationNotSupportedError,
|
||||
PackageOutdatedError,
|
||||
} from '../../../errors';
|
||||
import { IngestManagerError, PackageOutdatedError } from '../../../errors';
|
||||
import { PACKAGES_SAVED_OBJECT_TYPE, MAX_TIME_COMPLETE_INSTALL } from '../../../constants';
|
||||
import type { KibanaAssetType } from '../../../types';
|
||||
import { licenseService } from '../../';
|
||||
|
@ -402,12 +398,6 @@ async function installPackageByUpload({
|
|||
telemetryEvent.installType = installType;
|
||||
telemetryEvent.currentVersion = installedPkg?.attributes.version || 'not_installed';
|
||||
|
||||
if (installType !== 'install') {
|
||||
throw new PackageOperationNotSupportedError(
|
||||
`Package upload only supports fresh installations. Package ${packageInfo.name} is already installed, please uninstall first.`
|
||||
);
|
||||
}
|
||||
|
||||
const installSource = 'upload';
|
||||
const paths = await unpackBufferToCache({
|
||||
name: packageInfo.name,
|
||||
|
@ -463,7 +453,9 @@ async function installPackageByUpload({
|
|||
}
|
||||
}
|
||||
|
||||
export type InstallPackageParams = { spaceId: string } & (
|
||||
export type InstallPackageParams = {
|
||||
spaceId: string;
|
||||
} & (
|
||||
| ({ installSource: Extract<InstallSource, 'registry'> } & InstallRegistryPackageParams)
|
||||
| ({ installSource: Extract<InstallSource, 'upload'> } & InstallUploadedArchiveParams)
|
||||
);
|
||||
|
@ -472,6 +464,7 @@ export async function installPackage(args: InstallPackageParams) {
|
|||
if (!('installSource' in args)) {
|
||||
throw new Error('installSource is required');
|
||||
}
|
||||
|
||||
const logger = appContextService.getLogger();
|
||||
const { savedObjectsClient, esClient } = args;
|
||||
|
||||
|
@ -488,7 +481,6 @@ export async function installPackage(args: InstallPackageParams) {
|
|||
return response;
|
||||
} else if (args.installSource === 'upload') {
|
||||
const { archiveBuffer, contentType, spaceId } = args;
|
||||
logger.debug(`kicking off install of uploaded package`);
|
||||
const response = installPackageByUpload({
|
||||
savedObjectsClient,
|
||||
esClient,
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -21,11 +21,9 @@ import type {
|
|||
PreconfiguredOutput,
|
||||
PackagePolicy,
|
||||
} from '../../common';
|
||||
import { PRECONFIGURATION_LATEST_KEYWORD } from '../../common';
|
||||
import { SO_SEARCH_LIMIT, normalizeHostsForAgents } from '../../common';
|
||||
import {
|
||||
PRECONFIGURATION_DELETION_RECORD_SAVED_OBJECT_TYPE,
|
||||
PRECONFIGURATION_LATEST_KEYWORD,
|
||||
} from '../constants';
|
||||
import { PRECONFIGURATION_DELETION_RECORD_SAVED_OBJECT_TYPE } from '../constants';
|
||||
|
||||
import { escapeSearchQueryPhrase } from './saved_object';
|
||||
import { pkgToPkgKey } from './epm/registry';
|
||||
|
@ -172,19 +170,25 @@ export async function ensurePreconfiguredPackagesAndPolicies(
|
|||
);
|
||||
}
|
||||
|
||||
const packagesToInstall = packages.map((pkg) =>
|
||||
pkg.version === PRECONFIGURATION_LATEST_KEYWORD ? pkg.name : pkg
|
||||
);
|
||||
|
||||
// Preinstall packages specified in Kibana config
|
||||
const preconfiguredPackages = await bulkInstallPackages({
|
||||
savedObjectsClient: soClient,
|
||||
esClient,
|
||||
packagesToInstall: packages.map((pkg) =>
|
||||
pkg.version === PRECONFIGURATION_LATEST_KEYWORD ? pkg.name : pkg
|
||||
),
|
||||
packagesToInstall,
|
||||
force: true, // Always force outdated packages to be installed if a later version isn't installed
|
||||
spaceId,
|
||||
// During setup, we'll try to install preconfigured packages from the versions bundled with Kibana
|
||||
// whenever possible
|
||||
preferredSource: 'bundled',
|
||||
});
|
||||
|
||||
const fulfilledPackages = [];
|
||||
const rejectedPackages: PreconfigurationError[] = [];
|
||||
|
||||
for (let i = 0; i < preconfiguredPackages.length; i++) {
|
||||
const packageResult = preconfiguredPackages[i];
|
||||
if ('error' in packageResult) {
|
||||
|
@ -381,7 +385,7 @@ export async function ensurePreconfiguredPackagesAndPolicies(
|
|||
}),
|
||||
}
|
||||
),
|
||||
packages: fulfilledPackages.map((pkg) => pkgToPkgKey(pkg)),
|
||||
packages: fulfilledPackages.map((pkg) => ('version' in pkg ? pkgToPkgKey(pkg) : pkg.name)),
|
||||
nonFatalErrors: [...rejectedPackages, ...rejectedPolicies, ...packagePolicyUpgradeResults],
|
||||
};
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
};
|
||||
const listResponse = await fetchLimitedPackageList();
|
||||
|
||||
expect(listResponse.items).to.eql(['endpoint']);
|
||||
expect(listResponse.items.sort()).to.eql(['endpoint'].sort());
|
||||
});
|
||||
|
||||
it('allows user with only read permission to access', async () => {
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { FtrProviderContext } from '../../../api_integration/ftr_provider_context';
|
||||
import { skipIfNoDockerRegistry } from '../../helpers';
|
||||
import { GetInfoResponse, Installed } from '../../../../plugins/fleet/common';
|
||||
import { GetInfoResponse, InstalledRegistry } from '../../../../plugins/fleet/common';
|
||||
import { setupFleetAndAgents } from '../agents/services';
|
||||
|
||||
export default function (providerContext: FtrProviderContext) {
|
||||
|
@ -46,7 +46,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
.get(`/api/fleet/epm/packages/endpoint/${latestEndpointVersion}`)
|
||||
.expect(200));
|
||||
expect(body.item).to.have.property('savedObject');
|
||||
expect((body.item as Installed).savedObject.attributes.install_version).to.eql(
|
||||
expect((body.item as InstalledRegistry).savedObject.attributes.install_version).to.eql(
|
||||
latestEndpointVersion
|
||||
);
|
||||
});
|
||||
|
|
|
@ -42,11 +42,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
.send({})
|
||||
.expect(200);
|
||||
|
||||
expect(body).to.eql({
|
||||
packages: [],
|
||||
policies: [],
|
||||
nonFatalErrors: [],
|
||||
});
|
||||
expect(body.nonFatalErrors).to.eql([]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue