mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Fleet] Update fleet file storage indices to the new Datastream names (#160998)
## Summary - Updates fleet file storage indexes to the new Datastream names: | Old Name | New Name | |-----------|-------------------| | `.fleet-file-data-*` | `.fleet-fileds-fromhost-data-*` | | `.fleet-files-*` | `.fleet-fileds-fromhost-meta-*` | | `.fleet-filedelivery-data-*` | `.fleet-fileds-tohost-data-*` | | `.fleet-filedelivery-meta-*` | `.fleet-fileds-tohost-meta-*` | - Removes code that was initializing the old backing indexes - Updates the `fleet:check-deleted-files-task` to ensure it correctly parses the index name/alias from the underlying chunk backing index - Update Security Solution dev scripts, types and mocks to include the `@timestamp` property and ensure any mocks indexed use `op_type:create`
This commit is contained in:
parent
e8b2303875
commit
66fd6eb0ef
19 changed files with 187 additions and 281 deletions
|
@ -8,26 +8,26 @@
|
|||
// File storage indexes supporting file upload from the host to Elastic/Kibana
|
||||
// If needing to get an integration specific index name, use the utility functions
|
||||
// found in `common/services/file_storage`
|
||||
export const FILE_STORAGE_METADATA_INDEX_PATTERN = '.fleet-files-*';
|
||||
export const FILE_STORAGE_DATA_INDEX_PATTERN = '.fleet-file-data-*';
|
||||
export const FILE_STORAGE_METADATA_INDEX_PATTERN = '.fleet-fileds-fromhost-meta-*';
|
||||
export const FILE_STORAGE_DATA_INDEX_PATTERN = '.fleet-fileds-fromhost-data-*';
|
||||
|
||||
// File storage indexes supporting user uplaoded files (via kibana) that will be
|
||||
// File storage indexes supporting user uploaded files (via kibana) that will be
|
||||
// delivered to the host agent/endpoint
|
||||
export const FILE_STORAGE_TO_HOST_METADATA_INDEX_PATTERN = '.fleet-filedelivery-meta-*';
|
||||
export const FILE_STORAGE_TO_HOST_DATA_INDEX_PATTERN = '.fleet-filedelivery-data-*';
|
||||
export const FILE_STORAGE_TO_HOST_METADATA_INDEX_PATTERN = '.fleet-fileds-tohost-meta-*';
|
||||
export const FILE_STORAGE_TO_HOST_DATA_INDEX_PATTERN = '.fleet-fileds-tohost-data-*';
|
||||
|
||||
// which integrations support file upload and the name to use for the file upload index
|
||||
export const FILE_STORAGE_INTEGRATION_INDEX_NAMES: Readonly<
|
||||
Record<
|
||||
string,
|
||||
{
|
||||
Readonly<{
|
||||
/** name to be used for the index */
|
||||
name: string;
|
||||
/** If integration supports files sent from host to ES/Kibana */
|
||||
fromHost: boolean;
|
||||
/** If integration supports files to be sent to host from kibana */
|
||||
toHost: boolean;
|
||||
}
|
||||
}>
|
||||
>
|
||||
> = {
|
||||
elastic_agent: { name: 'agent', fromHost: true, toHost: false },
|
||||
|
|
|
@ -5,24 +5,41 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { FILE_STORAGE_METADATA_INDEX_PATTERN } from '../constants';
|
||||
|
||||
import { getFileDataIndexName, getFileMetadataIndexName } from '..';
|
||||
|
||||
import { getIntegrationNameFromIndexName } from './file_storage';
|
||||
|
||||
describe('File Storage services', () => {
|
||||
describe('File Index Names', () => {
|
||||
it('should generate file metadata index name for files received from host', () => {
|
||||
expect(getFileMetadataIndexName('foo')).toEqual('.fleet-files-foo');
|
||||
expect(getFileMetadataIndexName('foo')).toEqual('.fleet-fileds-fromhost-meta-foo');
|
||||
});
|
||||
|
||||
it('should generate file data index name for files received from host', () => {
|
||||
expect(getFileDataIndexName('foo')).toEqual('.fleet-file-data-foo');
|
||||
expect(getFileDataIndexName('foo')).toEqual('.fleet-fileds-fromhost-data-foo');
|
||||
});
|
||||
|
||||
it('should generate file metadata index name for files to be delivered to host', () => {
|
||||
expect(getFileMetadataIndexName('foo', true)).toEqual('.fleet-filedelivery-meta-foo');
|
||||
expect(getFileMetadataIndexName('foo', true)).toEqual('.fleet-fileds-tohost-meta-foo');
|
||||
});
|
||||
|
||||
it('should generate file data index name for files to be delivered to host', () => {
|
||||
expect(getFileDataIndexName('foo', true)).toEqual('.fleet-filedelivery-data-foo');
|
||||
expect(getFileDataIndexName('foo', true)).toEqual('.fleet-fileds-tohost-data-foo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getIntegrationNameFromIndexName()', () => {
|
||||
it.each([
|
||||
['regular index names', '.fleet-fileds-fromhost-meta-agent'],
|
||||
['datastream index names', '.ds-.fleet-fileds-fromhost-data-agent-2023.06.30-00001'],
|
||||
])('should handle %s', (_, index) => {
|
||||
expect(getIntegrationNameFromIndexName(index, FILE_STORAGE_METADATA_INDEX_PATTERN)).toEqual(
|
||||
'agent'
|
||||
);
|
||||
});
|
||||
|
||||
it.todo('should error if index pattern does not include `*`');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -56,21 +56,19 @@ export const getFileDataIndexName = (
|
|||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the write index name for a given file upload alias name, this is the same for metadata and chunks
|
||||
* @param aliasName
|
||||
*/
|
||||
export const getFileWriteIndexName = (aliasName: string) => aliasName + '-000001';
|
||||
/**
|
||||
* Returns back the integration name for a given File Data (chunks) index name.
|
||||
*
|
||||
* @example
|
||||
* // Given a File data index pattern of `.fleet-file-data-*`:
|
||||
* // Given a File data index pattern of `.fleet-fileds-fromhost-data-*`:
|
||||
*
|
||||
* getIntegrationNameFromFileDataIndexName('.fleet-file-data-agent');
|
||||
* getIntegrationNameFromFileDataIndexName('.fleet-fileds-fromhost-data-agent');
|
||||
* // return 'agent'
|
||||
*
|
||||
* getIntegrationNameFromFileDataIndexName('.fleet-file-data-agent-00001');
|
||||
* getIntegrationNameFromFileDataIndexName('.ds-.fleet-fileds-fromhost-data-agent');
|
||||
* // return 'agent'
|
||||
*
|
||||
* getIntegrationNameFromFileDataIndexName('.ds-.fleet-fileds-fromhost-data-agent-2023.06.30-00001');
|
||||
* // return 'agent'
|
||||
*/
|
||||
export const getIntegrationNameFromFileDataIndexName = (indexName: string): string => {
|
||||
|
@ -87,7 +85,7 @@ export const getIntegrationNameFromIndexName = (
|
|||
throw new Error(`Unable to parse index name. No '*' in index pattern: ${indexPattern}`);
|
||||
}
|
||||
|
||||
const indexPieces = indexName.split('-');
|
||||
const indexPieces = indexName.replace(/^\.ds-/, '').split('-');
|
||||
|
||||
if (indexPieces[integrationNameIndexPosition]) {
|
||||
return indexPieces[integrationNameIndexPosition];
|
||||
|
@ -95,15 +93,3 @@ export const getIntegrationNameFromIndexName = (
|
|||
|
||||
throw new Error(`Index name ${indexName} does not seem to be a File storage index`);
|
||||
};
|
||||
|
||||
export const getFileStorageWriteIndexBody = (aliasName: string) => ({
|
||||
aliases: {
|
||||
[aliasName]: {
|
||||
is_write_index: true,
|
||||
},
|
||||
},
|
||||
settings: {
|
||||
'index.lifecycle.rollover_alias': aliasName,
|
||||
'index.hidden': true,
|
||||
},
|
||||
});
|
||||
|
|
|
@ -11,18 +11,9 @@ import type { ElasticsearchClient, Logger } from '@kbn/core/server';
|
|||
|
||||
import type { IndicesCreateRequest } from '@elastic/elasticsearch/lib/api/types';
|
||||
|
||||
import {
|
||||
FILE_STORAGE_INTEGRATION_INDEX_NAMES,
|
||||
FILE_STORAGE_INTEGRATION_NAMES,
|
||||
} from '../../../../../common/constants';
|
||||
|
||||
import { ElasticsearchAssetType } from '../../../../types';
|
||||
import {
|
||||
getFileWriteIndexName,
|
||||
getFileStorageWriteIndexBody,
|
||||
getPipelineNameForDatastream,
|
||||
getFileDataIndexName,
|
||||
getFileMetadataIndexName,
|
||||
getRegistryDataStreamAssetBaseName,
|
||||
} from '../../../../../common/services';
|
||||
import type {
|
||||
|
@ -440,63 +431,6 @@ export async function ensureDefaultComponentTemplates(
|
|||
);
|
||||
}
|
||||
|
||||
/*
|
||||
* Given a list of integration names, if the integrations support file upload
|
||||
* then ensure that the alias has a matching write index, as we use "plain" indices
|
||||
* not data streams.
|
||||
* e.g .fleet-file-data-agent must have .fleet-file-data-agent-00001 as the write index
|
||||
* before files can be uploaded.
|
||||
*/
|
||||
export async function ensureFileUploadWriteIndices(opts: {
|
||||
esClient: ElasticsearchClient;
|
||||
logger: Logger;
|
||||
integrationNames: string[];
|
||||
}) {
|
||||
const { esClient, logger, integrationNames } = opts;
|
||||
|
||||
const integrationsWithFileUpload = integrationNames.filter((integration) =>
|
||||
FILE_STORAGE_INTEGRATION_NAMES.includes(integration as any)
|
||||
);
|
||||
|
||||
if (!integrationsWithFileUpload.length) return [];
|
||||
|
||||
const ensure = (aliasName: string) =>
|
||||
ensureAliasHasWriteIndex({
|
||||
esClient,
|
||||
logger,
|
||||
aliasName,
|
||||
writeIndexName: getFileWriteIndexName(aliasName),
|
||||
body: getFileStorageWriteIndexBody(aliasName),
|
||||
});
|
||||
|
||||
return Promise.all(
|
||||
integrationsWithFileUpload.flatMap((integrationName) => {
|
||||
const {
|
||||
name: indexName,
|
||||
fromHost,
|
||||
toHost,
|
||||
} = FILE_STORAGE_INTEGRATION_INDEX_NAMES[integrationName];
|
||||
const indexCreateRequests: Array<Promise<void>> = [];
|
||||
|
||||
if (fromHost) {
|
||||
indexCreateRequests.push(
|
||||
ensure(getFileDataIndexName(indexName)),
|
||||
ensure(getFileMetadataIndexName(indexName))
|
||||
);
|
||||
}
|
||||
|
||||
if (toHost) {
|
||||
indexCreateRequests.push(
|
||||
ensure(getFileDataIndexName(indexName, true)),
|
||||
ensure(getFileMetadataIndexName(indexName, true))
|
||||
);
|
||||
}
|
||||
|
||||
return indexCreateRequests;
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export async function ensureComponentTemplate(
|
||||
esClient: ElasticsearchClient,
|
||||
logger: Logger,
|
||||
|
|
|
@ -37,7 +37,6 @@ import type {
|
|||
PackageVerificationResult,
|
||||
IndexTemplateEntry,
|
||||
} from '../../../types';
|
||||
import { ensureFileUploadWriteIndices } from '../elasticsearch/template/install';
|
||||
import { removeLegacyTemplates } from '../elasticsearch/template/remove_legacy';
|
||||
import { isTopLevelPipeline, deletePreviousPipelines } from '../elasticsearch/ingest_pipeline';
|
||||
import { installILMPolicy } from '../elasticsearch/ilm/install';
|
||||
|
@ -236,15 +235,6 @@ export async function _installPackage({
|
|||
logger.warn(`Error removing legacy templates: ${e.message}`);
|
||||
}
|
||||
|
||||
const { diagnosticFileUploadEnabled } = appContextService.getExperimentalFeatures();
|
||||
if (diagnosticFileUploadEnabled) {
|
||||
await ensureFileUploadWriteIndices({
|
||||
integrationNames: [packageInfo.name],
|
||||
esClient,
|
||||
logger,
|
||||
});
|
||||
}
|
||||
|
||||
// update current backing indices of each data stream
|
||||
await withPackageSpan('Update write indices', () =>
|
||||
updateCurrentWriteIndices(esClient, logger, indexTemplates)
|
||||
|
|
|
@ -91,11 +91,11 @@ describe('FleetFromHostFilesClient', () => {
|
|||
|
||||
esClientMock.search.mockImplementation(async (searchRequest = {}) => {
|
||||
// File metadata
|
||||
if ((searchRequest.index as string).startsWith('.fleet-files-')) {
|
||||
if ((searchRequest.index as string).startsWith('.fleet-fileds-fromhost-meta-')) {
|
||||
return fleetFilesIndexSearchResponse;
|
||||
}
|
||||
|
||||
if ((searchRequest.index as string).startsWith('.fleet-file-data-')) {
|
||||
if ((searchRequest.index as string).startsWith('.fleet-fileds-fromhost-data-')) {
|
||||
return fleetFileDataIndexSearchResponse;
|
||||
}
|
||||
|
||||
|
@ -111,8 +111,8 @@ describe('FleetFromHostFilesClient', () => {
|
|||
expect(createEsFileClientMock).toHaveBeenCalledWith({
|
||||
elasticsearchClient: esClientMock,
|
||||
logger: loggerMock,
|
||||
metadataIndex: '.fleet-files-foo',
|
||||
blobStorageIndex: '.fleet-file-data-foo',
|
||||
metadataIndex: '.fleet-fileds-fromhost-meta-foo',
|
||||
blobStorageIndex: '.fleet-fileds-fromhost-data-foo',
|
||||
indexIsAlias: true,
|
||||
});
|
||||
});
|
||||
|
@ -159,7 +159,7 @@ describe('FleetFromHostFilesClient', () => {
|
|||
},
|
||||
},
|
||||
},
|
||||
index: '.fleet-file-data-foo',
|
||||
index: '.fleet-fileds-fromhost-data-foo',
|
||||
size: 0,
|
||||
});
|
||||
});
|
||||
|
|
|
@ -130,8 +130,8 @@ describe('FleetToHostFilesClient', () => {
|
|||
expect(createEsFileClientMock).toHaveBeenCalledWith({
|
||||
elasticsearchClient: esClientMock,
|
||||
logger: loggerMock,
|
||||
metadataIndex: '.fleet-filedelivery-meta-foo',
|
||||
blobStorageIndex: '.fleet-filedelivery-data-foo',
|
||||
metadataIndex: '.fleet-fileds-tohost-meta-foo',
|
||||
blobStorageIndex: '.fleet-fileds-tohost-data-foo',
|
||||
maxSizeBytes: 12345,
|
||||
indexIsAlias: true,
|
||||
});
|
||||
|
|
|
@ -34,22 +34,27 @@ export async function getFilesByStatus(
|
|||
abortController: AbortController,
|
||||
status: FileStatus = 'READY'
|
||||
): Promise<SearchHit[]> {
|
||||
const result = await esClient.search(
|
||||
{
|
||||
index: FILE_STORAGE_METADATA_INDEX_PATTERN,
|
||||
body: {
|
||||
size: ES_SEARCH_LIMIT,
|
||||
query: {
|
||||
term: {
|
||||
'file.Status': status,
|
||||
const result = await esClient
|
||||
.search(
|
||||
{
|
||||
index: FILE_STORAGE_METADATA_INDEX_PATTERN,
|
||||
body: {
|
||||
size: ES_SEARCH_LIMIT,
|
||||
query: {
|
||||
term: {
|
||||
'file.Status': status,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
},
|
||||
_source: false,
|
||||
ignore_unavailable: true,
|
||||
},
|
||||
ignore_unavailable: true,
|
||||
},
|
||||
{ signal: abortController.signal }
|
||||
);
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
.catch((err) => {
|
||||
Error.captureStackTrace(err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
return result.hits.hits;
|
||||
}
|
||||
|
@ -84,32 +89,37 @@ export async function fileIdsWithoutChunksByIndex(
|
|||
return acc;
|
||||
}, {} as FileIdsByIndex);
|
||||
|
||||
const chunks = await esClient.search<{ bid: string }>(
|
||||
{
|
||||
index: FILE_STORAGE_DATA_INDEX_PATTERN,
|
||||
body: {
|
||||
size: ES_SEARCH_LIMIT,
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
terms: {
|
||||
bid: Array.from(allFileIds),
|
||||
const chunks = await esClient
|
||||
.search<{ bid: string }>(
|
||||
{
|
||||
index: FILE_STORAGE_DATA_INDEX_PATTERN,
|
||||
body: {
|
||||
size: ES_SEARCH_LIMIT,
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
terms: {
|
||||
bid: Array.from(allFileIds),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
term: {
|
||||
last: true,
|
||||
{
|
||||
term: {
|
||||
last: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
_source: ['bid'],
|
||||
},
|
||||
_source: ['bid'],
|
||||
},
|
||||
},
|
||||
{ signal: abortController.signal }
|
||||
);
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
.catch((err) => {
|
||||
Error.captureStackTrace(err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
chunks.hits.hits.forEach((hit) => {
|
||||
const fileId = hit._source?.bid;
|
||||
|
@ -140,22 +150,27 @@ export function updateFilesStatus(
|
|||
): Promise<UpdateByQueryResponse[]> {
|
||||
return Promise.all(
|
||||
Object.entries(fileIdsByIndex).map(([index, fileIds]) => {
|
||||
return esClient.updateByQuery(
|
||||
{
|
||||
index,
|
||||
refresh: true,
|
||||
query: {
|
||||
ids: {
|
||||
values: Array.from(fileIds),
|
||||
return esClient
|
||||
.updateByQuery(
|
||||
{
|
||||
index,
|
||||
refresh: true,
|
||||
query: {
|
||||
ids: {
|
||||
values: Array.from(fileIds),
|
||||
},
|
||||
},
|
||||
script: {
|
||||
source: `ctx._source.file.Status = '${status}'`,
|
||||
lang: 'painless',
|
||||
},
|
||||
},
|
||||
script: {
|
||||
source: `ctx._source.file.Status = '${status}'`,
|
||||
lang: 'painless',
|
||||
},
|
||||
},
|
||||
{ signal: abortController.signal }
|
||||
);
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
.catch((err) => {
|
||||
Error.captureStackTrace(err);
|
||||
throw err;
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ export const createFromHostEsSearchResponseMock =
|
|||
max_score: 0,
|
||||
hits: [
|
||||
{
|
||||
_index: '.fleet-files-foo-000001',
|
||||
_index: '.fleet-fileds-fromhost-meta-foo-000001',
|
||||
_id: '123',
|
||||
_score: 1.0,
|
||||
_source: {
|
||||
|
|
|
@ -15,9 +15,7 @@ import { ensurePreconfiguredPackagesAndPolicies } from '.';
|
|||
import { appContextService } from './app_context';
|
||||
import { getInstallations } from './epm/packages';
|
||||
import { upgradeManagedPackagePolicies } from './managed_package_policies';
|
||||
import { setupFleet, ensureFleetFileUploadIndices } from './setup';
|
||||
|
||||
import { ensureFileUploadWriteIndices } from './epm/elasticsearch/template/install';
|
||||
import { setupFleet } from './setup';
|
||||
|
||||
jest.mock('./preconfiguration');
|
||||
jest.mock('./preconfiguration/outputs');
|
||||
|
@ -70,8 +68,6 @@ describe('setupFleet', () => {
|
|||
|
||||
soClient.find.mockResolvedValue({ saved_objects: [] } as any);
|
||||
soClient.bulkGet.mockResolvedValue({ saved_objects: [] } as any);
|
||||
|
||||
(ensureFileUploadWriteIndices as jest.Mock).mockResolvedValue({});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
|
@ -138,12 +134,4 @@ describe('setupFleet', () => {
|
|||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create agent file upload write indices', async () => {
|
||||
await ensureFleetFileUploadIndices(soClient, esClient);
|
||||
|
||||
expect((ensureFileUploadWriteIndices as jest.Mock).mock.calls[0][0].integrationNames).toEqual([
|
||||
'elastic_agent',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -12,11 +12,7 @@ import pMap from 'p-map';
|
|||
import type { ElasticsearchClient, SavedObjectsClientContract } from '@kbn/core/server';
|
||||
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common/constants';
|
||||
|
||||
import {
|
||||
AUTO_UPDATE_PACKAGES,
|
||||
FILE_STORAGE_INTEGRATION_NAMES,
|
||||
FLEET_ELASTIC_AGENT_PACKAGE,
|
||||
} from '../../common/constants';
|
||||
import { AUTO_UPDATE_PACKAGES } from '../../common/constants';
|
||||
import type { PreconfigurationError } from '../../common/constants';
|
||||
import type {
|
||||
DefaultPackagesInstallationError,
|
||||
|
@ -44,10 +40,7 @@ import { ensureDefaultEnrollmentAPIKeyForAgentPolicy } from './api_keys';
|
|||
import { getRegistryUrl, settingsService } from '.';
|
||||
import { awaitIfPending } from './setup_utils';
|
||||
import { ensureFleetFinalPipelineIsInstalled } from './epm/elasticsearch/ingest_pipeline/install';
|
||||
import {
|
||||
ensureDefaultComponentTemplates,
|
||||
ensureFileUploadWriteIndices,
|
||||
} from './epm/elasticsearch/template/install';
|
||||
import { ensureDefaultComponentTemplates } from './epm/elasticsearch/template/install';
|
||||
import { getInstallations, reinstallPackageForInstallation } from './epm/packages';
|
||||
import { isPackageInstalled } from './epm/packages/install';
|
||||
import type { UpgradeManagedPackagePoliciesResult } from './managed_package_policies';
|
||||
|
@ -60,7 +53,6 @@ import {
|
|||
ensurePreconfiguredFleetServerHosts,
|
||||
getPreconfiguredFleetServerHostFromConfig,
|
||||
} from './preconfiguration/fleet_server_host';
|
||||
import { getInstallationsByName } from './epm/packages/get';
|
||||
|
||||
export interface SetupStatus {
|
||||
isInitialized: boolean;
|
||||
|
@ -125,7 +117,6 @@ async function createSetupSideEffects(
|
|||
logger.debug('Setting up Fleet Elasticsearch assets');
|
||||
await ensureFleetGlobalEsAssets(soClient, esClient);
|
||||
|
||||
await ensureFleetFileUploadIndices(soClient, esClient);
|
||||
// Ensure that required packages are always installed even if they're left out of the config
|
||||
const preconfiguredPackageNames = new Set(packages.map((pkg) => pkg.name));
|
||||
|
||||
|
@ -207,32 +198,6 @@ async function createSetupSideEffects(
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure ES assets shared by all Fleet index template are installed
|
||||
*/
|
||||
export async function ensureFleetFileUploadIndices(
|
||||
soClient: SavedObjectsClientContract,
|
||||
esClient: ElasticsearchClient
|
||||
) {
|
||||
const { diagnosticFileUploadEnabled } = appContextService.getExperimentalFeatures();
|
||||
if (!diagnosticFileUploadEnabled) return;
|
||||
const logger = appContextService.getLogger();
|
||||
const installedFileUploadIntegrations = await getInstallationsByName({
|
||||
savedObjectsClient: soClient,
|
||||
pkgNames: [...FILE_STORAGE_INTEGRATION_NAMES],
|
||||
});
|
||||
|
||||
const integrationNames = installedFileUploadIntegrations.map(({ name }) => name);
|
||||
if (!integrationNames.includes(FLEET_ELASTIC_AGENT_PACKAGE)) {
|
||||
integrationNames.push(FLEET_ELASTIC_AGENT_PACKAGE);
|
||||
}
|
||||
logger.debug(`Ensuring file upload write indices for ${integrationNames}`);
|
||||
return ensureFileUploadWriteIndices({
|
||||
esClient,
|
||||
logger,
|
||||
integrationNames,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Ensure ES assets shared by all Fleet index template are installed
|
||||
*/
|
||||
|
|
|
@ -72,6 +72,7 @@ export class CheckDeletedFilesTask {
|
|||
}
|
||||
|
||||
this.wasStarted = true;
|
||||
this.logger.info(`Started with interval of [${INTERVAL}] and timeout of [${TIMEOUT}]`);
|
||||
|
||||
try {
|
||||
await taskManager.ensureScheduled({
|
||||
|
@ -85,7 +86,7 @@ export class CheckDeletedFilesTask {
|
|||
params: { version: VERSION },
|
||||
});
|
||||
} catch (e) {
|
||||
this.logger.error(`Error scheduling task, received error: ${e}`);
|
||||
this.logger.error(`Error scheduling task, received error: ${e.message}`, e);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -104,19 +105,34 @@ export class CheckDeletedFilesTask {
|
|||
throwUnrecoverableError(new Error('Outdated task version'));
|
||||
}
|
||||
|
||||
this.logger.info(`[runTask()] started`);
|
||||
|
||||
const endRun = (msg: string = '') => {
|
||||
this.logger.info(`[runTask()] ended${msg ? ': ' + msg : ''}`);
|
||||
};
|
||||
|
||||
const [{ elasticsearch }] = await core.getStartServices();
|
||||
const esClient = elasticsearch.client.asInternalUser;
|
||||
|
||||
try {
|
||||
const readyFiles = await getFilesByStatus(esClient, this.abortController);
|
||||
if (!readyFiles.length) return;
|
||||
|
||||
if (!readyFiles.length) {
|
||||
endRun('no files to process');
|
||||
return;
|
||||
}
|
||||
|
||||
const { fileIdsByIndex: deletedFileIdsByIndex, allFileIds: allDeletedFileIds } =
|
||||
await fileIdsWithoutChunksByIndex(esClient, this.abortController, readyFiles);
|
||||
if (!allDeletedFileIds.size) return;
|
||||
|
||||
if (!allDeletedFileIds.size) {
|
||||
endRun('No files with deleted chunks');
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info(`Attempting to update ${allDeletedFileIds.size} files to DELETED status`);
|
||||
this.logger.debug(`Attempting to file ids: ${deletedFileIdsByIndex}`);
|
||||
this.logger.debug(`Attempting to update file ids: ${deletedFileIdsByIndex}`);
|
||||
|
||||
const updatedFilesResponses = await updateFilesStatus(
|
||||
esClient,
|
||||
this.abortController,
|
||||
|
@ -130,12 +146,16 @@ export class CheckDeletedFilesTask {
|
|||
this.logger.warn(`Failed to update ${failures.length} files to DELETED status`);
|
||||
this.logger.debug(`Failed to update files to DELETED status: ${failures}`);
|
||||
}
|
||||
|
||||
endRun('success');
|
||||
} catch (err) {
|
||||
if (err instanceof errors.RequestAbortedError) {
|
||||
this.logger.warn(`request aborted due to timeout: ${err}`);
|
||||
endRun();
|
||||
return;
|
||||
}
|
||||
this.logger.error(err);
|
||||
endRun('error');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -466,6 +466,7 @@ export interface FileUploadMetadata {
|
|||
transithash: {
|
||||
sha256: string;
|
||||
};
|
||||
'@timestamp': string;
|
||||
}
|
||||
|
||||
export type UploadedFileInfo = Pick<
|
||||
|
|
|
@ -209,8 +209,9 @@ export const sendEndpointActionResponse = async (
|
|||
const fileMeta = await esClient.index({
|
||||
index: FILE_STORAGE_METADATA_INDEX,
|
||||
id: getFileDownloadId(action, action.agents[0]),
|
||||
body: fileMetaDoc,
|
||||
op_type: 'create',
|
||||
refresh: 'wait_for',
|
||||
body: fileMetaDoc,
|
||||
});
|
||||
|
||||
// Index the file content (just one chunk)
|
||||
|
@ -224,12 +225,14 @@ export const sendEndpointActionResponse = async (
|
|||
document: cborx.encode({
|
||||
bid: fileMeta._id,
|
||||
last: true,
|
||||
'@timestamp': new Date().toISOString(),
|
||||
data: Buffer.from(
|
||||
'UEsDBAoACQAAAFZeRFWpAsDLHwAAABMAAAAMABwAYmFkX2ZpbGUudHh0VVQJAANTVjxjU1Y8Y3V4CwABBPUBAAAEFAAAAMOcoyEq/Q4VyG02U9O0LRbGlwP/y5SOCfRKqLz1rsBQSwcIqQLAyx8AAAATAAAAUEsBAh4DCgAJAAAAVl5EVakCwMsfAAAAEwAAAAwAGAAAAAAAAQAAAKSBAAAAAGJhZF9maWxlLnR4dFVUBQADU1Y8Y3V4CwABBPUBAAAEFAAAAFBLBQYAAAAAAQABAFIAAAB1AAAAAAA=',
|
||||
'base64'
|
||||
),
|
||||
}),
|
||||
refresh: 'wait_for',
|
||||
op_type: 'create',
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
|
|
|
@ -44,7 +44,7 @@ export interface RuntimeServices {
|
|||
interface CreateRuntimeServicesOptions {
|
||||
kibanaUrl: string;
|
||||
elasticsearchUrl: string;
|
||||
fleetServerUrl: string | undefined;
|
||||
fleetServerUrl?: string;
|
||||
username: string;
|
||||
password: string;
|
||||
log?: ToolingLog;
|
||||
|
|
|
@ -214,6 +214,7 @@ export const generateFileMetadataDocumentMock = (
|
|||
transithash: {
|
||||
sha256: 'a0d6d6a2bb73340d4a0ed32b2a46272a19dd111427770c072918aed7a8565010',
|
||||
},
|
||||
'@timestamp': new Date().toISOString(),
|
||||
|
||||
...overrides,
|
||||
};
|
||||
|
|
|
@ -23,6 +23,25 @@ export default function (providerContext: FtrProviderContext) {
|
|||
|
||||
const ES_INDEX_OPTIONS = { headers: { 'X-elastic-product-origin': 'fleet' } };
|
||||
|
||||
const cleanupFiles = async () => {
|
||||
await esClient.deleteByQuery({
|
||||
index: `${FILE_STORAGE_DATA_AGENT_INDEX},${FILE_STORAGE_METADATA_AGENT_INDEX}`,
|
||||
refresh: true,
|
||||
ignore_unavailable: true,
|
||||
query: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
ids: {
|
||||
values: ['file1', 'file1.0'],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
describe('fleet_uploads', () => {
|
||||
skipIfNoDockerRegistry(providerContext);
|
||||
setupFleetAndAgents(providerContext);
|
||||
|
@ -30,6 +49,7 @@ export default function (providerContext: FtrProviderContext) {
|
|||
before(async () => {
|
||||
await esArchiver.unload('x-pack/test/functional/es_archives/fleet/empty_fleet_server');
|
||||
await getService('supertest').post(`/api/fleet/setup`).set('kbn-xsrf', 'xxx').send();
|
||||
await cleanupFiles();
|
||||
|
||||
await esClient.create({
|
||||
index: AGENT_ACTIONS_INDEX,
|
||||
|
@ -60,34 +80,36 @@ export default function (providerContext: FtrProviderContext) {
|
|||
ES_INDEX_OPTIONS
|
||||
);
|
||||
|
||||
await esClient.update({
|
||||
await esClient.index({
|
||||
index: FILE_STORAGE_METADATA_AGENT_INDEX,
|
||||
id: 'file1',
|
||||
refresh: true,
|
||||
op_type: 'create',
|
||||
body: {
|
||||
doc_as_upsert: true,
|
||||
doc: {
|
||||
upload_id: 'file1',
|
||||
action_id: 'action1',
|
||||
agent_id: 'agent1',
|
||||
file: {
|
||||
ChunkSize: 4194304,
|
||||
extension: 'zip',
|
||||
hash: {},
|
||||
mime_type: 'application/zip',
|
||||
mode: '0644',
|
||||
name: 'elastic-agent-diagnostics-2022-10-07T12-00-00Z-00.zip',
|
||||
path: '/agent/elastic-agent-diagnostics-2022-10-07T12-00-00Z-00.zip',
|
||||
size: 24917,
|
||||
Status: 'READY',
|
||||
type: 'file',
|
||||
},
|
||||
'@timestamp': new Date().toISOString(),
|
||||
upload_id: 'file1',
|
||||
action_id: 'action1',
|
||||
agent_id: 'agent1',
|
||||
file: {
|
||||
ChunkSize: 4194304,
|
||||
extension: 'zip',
|
||||
hash: {},
|
||||
mime_type: 'application/zip',
|
||||
mode: '0644',
|
||||
name: 'elastic-agent-diagnostics-2022-10-07T12-00-00Z-00.zip',
|
||||
path: '/agent/elastic-agent-diagnostics-2022-10-07T12-00-00Z-00.zip',
|
||||
size: 24917,
|
||||
Status: 'READY',
|
||||
type: 'file',
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
after(async () => {
|
||||
await esArchiver.load('x-pack/test/functional/es_archives/fleet/empty_fleet_server');
|
||||
await Promise.all([
|
||||
esArchiver.load('x-pack/test/functional/es_archives/fleet/empty_fleet_server'),
|
||||
cleanupFiles(),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should get agent uploads', async () => {
|
||||
|
@ -108,17 +130,16 @@ export default function (providerContext: FtrProviderContext) {
|
|||
});
|
||||
|
||||
it('should get agent uploaded file', async () => {
|
||||
await esClient.update({
|
||||
await esClient.index({
|
||||
index: FILE_STORAGE_DATA_AGENT_INDEX,
|
||||
id: 'file1.0',
|
||||
op_type: 'create',
|
||||
refresh: true,
|
||||
body: {
|
||||
doc_as_upsert: true,
|
||||
doc: {
|
||||
last: true,
|
||||
bid: 'file1',
|
||||
data: 'test',
|
||||
},
|
||||
'@timestamp': new Date().toISOString(),
|
||||
last: true,
|
||||
bid: 'file1',
|
||||
data: 'test',
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import {
|
||||
FILE_STORAGE_DATA_INDEX,
|
||||
FILE_STORAGE_METADATA_INDEX,
|
||||
} from '@kbn/security-solution-plugin/common/endpoint/constants';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const esClient = getService('es');
|
||||
|
||||
describe('File upload indices', () => {
|
||||
it('should have created the file data index on install', async () => {
|
||||
const endpointFileUploadIndexExists = await esClient.indices.exists({
|
||||
index: FILE_STORAGE_METADATA_INDEX,
|
||||
});
|
||||
|
||||
expect(endpointFileUploadIndexExists).equal(true);
|
||||
});
|
||||
it('should have created the files index on install', async () => {
|
||||
const endpointFileUploadIndexExists = await esClient.indices.exists({
|
||||
index: FILE_STORAGE_DATA_INDEX,
|
||||
});
|
||||
|
||||
expect(endpointFileUploadIndexExists).equal(true);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -48,7 +48,6 @@ export default function endpointAPIIntegrationTests(providerContext: FtrProvider
|
|||
loadTestFile(require.resolve('./package'));
|
||||
loadTestFile(require.resolve('./endpoint_authz'));
|
||||
loadTestFile(require.resolve('./endpoint_response_actions/execute'));
|
||||
loadTestFile(require.resolve('./file_upload_index'));
|
||||
loadTestFile(require.resolve('./endpoint_artifacts/trusted_apps'));
|
||||
loadTestFile(require.resolve('./endpoint_artifacts/event_filters'));
|
||||
loadTestFile(require.resolve('./endpoint_artifacts/host_isolation_exceptions'));
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue