[Fleet] When removing a inputs type package policy, clean up assets (#218582)

Fixes https://github.com/elastic/kibana/issues/209789

## Summary

For `inputs` type integrations
([docs](https://github.com/elastic/kibana/blob/main/x-pack/platform/plugins/shared/fleet/dev_docs/input_packages.md)),
we install all the assets when creating the integration policy and not
at integration install time (like for "regular": integrations).

However the clean up of assets doesn't happen when removing the
integration policy and this leaves around orphaned assets that are not
working anymore.

- This PR adds a new endpoint that removes the datastream assets
```
DELETE kbn:/api/fleet/epm/packages/{pkgName}/{pkgVersion}/datastream_assets?packagePolicyId={Id}
```
- The new endpoint is called by the UI when removing the integration
policy;
- Only the datastream assets that match exactly the dataset name are
removed; assets that are common across the integration are kept and can
only be deleted when the whole integration is uninstalled.

Additional changes:
- I did some light refactoring of the functions for inputs-type
integrations to make the code more readable
- Updated the dev_docs about input-type integrations that haven't been
touched for long time

### Testing
- Install an input-type package, for instance "custom logs"
- Check the assets created under the tab `assets`
- Check that the package has only this integration policy
- Remove the integration policy for the package - a warning is shown:

<img width="937" alt="Screenshot 2025-05-09 at 16 58 51"
src="https://github.com/user-attachments/assets/0f86ab38-e0a9-47f5-91f5-71b83e17f2e3"
/>

- Verify that the assets related to the package are cleaned up as well
- Try again but with several integration policies
- In this case the clean up doesn't happen

### Delete assets when there are two integration policies with different
dataset names

Dataset names are`udp.generic` and `udp.test` - in the video I deleted
policy `udp-2` having dataset name `udp.test` and the relative assets
are no longer present:


https://github.com/user-attachments/assets/23350051-1b26-4e52-914d-62f784809c80

### Delete assets when there are two integration policies with same
dataset names
In this case there are two different policies having the same dataset
name `udp.generic`, when deleting the policy there is no warning to
remove the assets. In fact they can be deleted only when there is only
one remaining policy using them:


https://github.com/user-attachments/assets/f75668dd-a4ce-4f5a-ba5d-c99911278dfc



### Checklist

- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [ ] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
Co-authored-by: jillguyonnet <jill.guyonnet@gmail.com>
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Cristina Amico 2025-05-30 09:15:22 +02:00 committed by GitHub
parent 25c17fc4f0
commit 0ba35cb0cc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 1769 additions and 308 deletions

View file

@ -27957,6 +27957,102 @@
]
}
},
"/api/fleet/epm/packages/{pkgName}/{pkgVersion}/datastream_assets": {
"delete": {
"description": "[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.",
"operationId": "delete-fleet-epm-packages-pkgname-pkgversion-datastream-assets",
"parameters": [
{
"description": "A required header to protect against CSRF attacks",
"in": "header",
"name": "kbn-xsrf",
"required": true,
"schema": {
"example": "true",
"type": "string"
}
},
{
"in": "path",
"name": "pkgName",
"required": true,
"schema": {
"type": "string"
}
},
{
"in": "path",
"name": "pkgVersion",
"required": true,
"schema": {
"type": "string"
}
},
{
"in": "query",
"name": "packagePolicyId",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"success": {
"type": "boolean"
}
},
"required": [
"success"
],
"type": "object"
}
}
}
},
"400": {
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"description": "Generic Error",
"properties": {
"attributes": {},
"error": {
"type": "string"
},
"errorType": {
"type": "string"
},
"message": {
"type": "string"
},
"statusCode": {
"type": "number"
}
},
"required": [
"message",
"attributes"
],
"type": "object"
}
}
}
}
},
"summary": "Delete assets for an input package",
"tags": [
"Elastic Package Manager (EPM)"
]
}
},
"/api/fleet/epm/packages/{pkgName}/{pkgVersion}/kibana_assets": {
"delete": {
"description": "[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.",

View file

@ -27957,6 +27957,102 @@
]
}
},
"/api/fleet/epm/packages/{pkgName}/{pkgVersion}/datastream_assets": {
"delete": {
"description": "[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.",
"operationId": "delete-fleet-epm-packages-pkgname-pkgversion-datastream-assets",
"parameters": [
{
"description": "A required header to protect against CSRF attacks",
"in": "header",
"name": "kbn-xsrf",
"required": true,
"schema": {
"example": "true",
"type": "string"
}
},
{
"in": "path",
"name": "pkgName",
"required": true,
"schema": {
"type": "string"
}
},
{
"in": "path",
"name": "pkgVersion",
"required": true,
"schema": {
"type": "string"
}
},
{
"in": "query",
"name": "packagePolicyId",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"success": {
"type": "boolean"
}
},
"required": [
"success"
],
"type": "object"
}
}
}
},
"400": {
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"description": "Generic Error",
"properties": {
"attributes": {},
"error": {
"type": "string"
},
"errorType": {
"type": "string"
},
"message": {
"type": "string"
},
"statusCode": {
"type": "number"
}
},
"required": [
"message",
"attributes"
],
"type": "object"
}
}
}
}
},
"summary": "Delete assets for an input package",
"tags": [
"Elastic Package Manager (EPM)"
]
}
},
"/api/fleet/epm/packages/{pkgName}/{pkgVersion}/kibana_assets": {
"delete": {
"description": "[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.",

View file

@ -28092,6 +28092,68 @@ paths:
summary: Get a package file
tags:
- Elastic Package Manager (EPM)
/api/fleet/epm/packages/{pkgName}/{pkgVersion}/datastream_assets:
delete:
description: '[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.'
operationId: delete-fleet-epm-packages-pkgname-pkgversion-datastream-assets
parameters:
- description: A required header to protect against CSRF attacks
in: header
name: kbn-xsrf
required: true
schema:
example: 'true'
type: string
- in: path
name: pkgName
required: true
schema:
type: string
- in: path
name: pkgVersion
required: true
schema:
type: string
- in: query
name: packagePolicyId
required: true
schema:
type: string
responses:
'200':
content:
application/json:
schema:
additionalProperties: false
type: object
properties:
success:
type: boolean
required:
- success
'400':
content:
application/json:
schema:
additionalProperties: false
description: Generic Error
type: object
properties:
attributes: {}
error:
type: string
errorType:
type: string
message:
type: string
statusCode:
type: number
required:
- message
- attributes
summary: Delete assets for an input package
tags:
- Elastic Package Manager (EPM)
/api/fleet/epm/packages/{pkgName}/{pkgVersion}/kibana_assets:
delete:
description: '[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.'

View file

@ -30334,6 +30334,68 @@ paths:
summary: Get a package file
tags:
- Elastic Package Manager (EPM)
/api/fleet/epm/packages/{pkgName}/{pkgVersion}/datastream_assets:
delete:
description: '[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.'
operationId: delete-fleet-epm-packages-pkgname-pkgversion-datastream-assets
parameters:
- description: A required header to protect against CSRF attacks
in: header
name: kbn-xsrf
required: true
schema:
example: 'true'
type: string
- in: path
name: pkgName
required: true
schema:
type: string
- in: path
name: pkgVersion
required: true
schema:
type: string
- in: query
name: packagePolicyId
required: true
schema:
type: string
responses:
'200':
content:
application/json:
schema:
additionalProperties: false
type: object
properties:
success:
type: boolean
required:
- success
'400':
content:
application/json:
schema:
additionalProperties: false
description: Generic Error
type: object
properties:
attributes: {}
error:
type: string
errorType:
type: string
message:
type: string
statusCode:
type: number
required:
- message
- attributes
summary: Delete assets for an input package
tags:
- Elastic Package Manager (EPM)
/api/fleet/epm/packages/{pkgName}/{pkgVersion}/kibana_assets:
delete:
description: '[Required authorization] Route required privileges: integrations-all AND fleet-agent-policies-all.'

View file

@ -50,6 +50,7 @@ export const EPM_API_ROUTES = {
STATS_PATTERN: `${EPM_PACKAGES_MANY}/{pkgName}/stats`,
BULK_ASSETS_PATTERN: `${EPM_API_ROOT}/bulk_assets`,
INPUTS_PATTERN: `${EPM_API_ROOT}/templates/{pkgName}/{pkgVersion}/inputs`,
PACKAGES_DATASTREAM_ASSETS: `${EPM_API_ROOT}/packages/{pkgName}/{pkgVersion}/datastream_assets`,
REAUTHORIZE_TRANSFORMS: `${EPM_PACKAGES_ONE}/transforms/authorize`,
};

View file

@ -145,6 +145,12 @@ export const epmRouteService = {
getUpdateCustomIntegrationsPath: (pkgName: string) => {
return EPM_API_ROUTES.UPDATE_CUSTOM_INTEGRATIONS_PATTERN.replace('{pkgName}', pkgName);
},
getDeletePackageDatastreamAssets: (pkgName: string, pkgVersion: string) => {
return EPM_API_ROUTES.PACKAGES_DATASTREAM_ASSETS.replace('{pkgName}', pkgName).replace(
'{pkgVersion}',
pkgVersion
);
},
};
export const packagePolicyRouteService = {

View file

@ -14,6 +14,7 @@ export interface PackagePolicyPackage {
version: string;
experimental_data_stream_features?: ExperimentalDataStreamFeature[];
requires_root?: boolean;
type?: string;
}
export interface PackagePolicyConfigRecordEntry {

View file

@ -234,3 +234,17 @@ export type GetInputsTemplatesResponse =
| {
inputs: any;
};
export interface DeletePackageDatastreamAssetsRequest {
params: {
pkgName: string;
pkgVersion: string;
};
query: {
packagePolicyId: string;
};
}
export interface DeletePackageDatastreamAssetsResponse {
success: boolean;
}

View file

@ -1,34 +1,45 @@
# Input Packages
Input packages are a specific type of integrations available since version 8.8.0.
Github issue: https://github.com/elastic/kibana/issues/133296
Design Document: https://docs.google.com/document/d/1vzBh2frnlxEBBcab8-ist8pu_PDZUjOHdsV-QuUn_WA/edit
## Background
## Background
To enable the collection, transformation, and analysis of data flowing between elasticsearch and an external data source like 1Password or Apache HTTP server, a user needs a bunch of components including agent configuration, inputs, ingest pipelines, data streams, index templates, visualizations, etc. When these assets and their definitions are encapsulated together, we call it a package.
To enable the collection, transformation, and analysis of data flowing between elasticsearch and an external data source, a user needs a bunch of components including agent configuration, inputs, ingest pipelines, data streams, index templates, visualizations, etc. When these assets and their definitions are encapsulated together, we call it a package.
Until input packages, there used to be only one type of package - "integration". An input-only package (input package) is composed of only an input and agent configuration. These packages leave all other assets for users to define and are very generic intentionally.
Until `input` packages, there used to be only one type of package - `integration`. An input-type package is composed of only an input and agent configuration. These packages leave all other assets for users to define and are very generic intentionally.
An example of an input package is the `logs` package.
## Dataset customization
## Summary
Currently, the dataset is the only way that a user can customize an input only integration, the user can specify a new or existing dataset and the data will be sent there.
Input-type packages only define the input, allowing the user to create the data stream dynamically.
To allow this customization, kibana only creates the index and component templates at package policy creation time (as opposed to at package install time as with integration packages). Related code [here](https://github.com/hop-dev/kibana/blob/08d44fe52b3900c80242d2446feef7b7a7f9e2af/x-pack/platform/plugins/shared/fleet/server/services/epm/packages/_install_package.ts#L219)
The only way a user can customize an `input` type package is by specifying a new or existing dataset to send the data to.
- To allow this, these packages include the `data_stream.dataset` variable definition in their package spec
- They can only have one datastream (as opposed to multiple ones for integration-type packages)
The index templates etc are only created if the user selected to send to a datastream which doesn't currently exist.
Another important difference with integration-type packages is that Kibana creates the assets (index and component templates, ingest pipelines etc) at integration policy creation time (as opposed to at package install time).
There are also some key differences regarding the generated assets:
- They are tied to the policy: if the integration is installed without generating a policy those assets are empty.
- They are only created if the user selects a datastream which doesn't yet exist.
In Fleet code it's possible to find the related code by searching for `packageInfo.type === 'input'` or similar queries.
### UI dataset selector
The dataset selector in the UI shows the user all available datasets to send to, or gives the option to send to a new datastream.
The dataset selector in the UI shows the user all available datasets to send to or gives the option to send to a new datastream.
To get all available datastream we use the datastreams API which returns all fleet managed datastreams. Fleet managed is detected using the datastream metadata, this is why all a users datastreams will not show in this selector.
To get all available datastream Fleet uses the datastreams API which returns all Fleet "managed" datastreams, detected using the datastream metadata. This is why all a users datastreams will not show in this selector.
## Package structure
## Package structure
An input-only package expects the following structure:
An input-type package expects the following structure:
```
├── agent/input
│ └── input.yml.hbs
@ -41,27 +52,17 @@ An input-only package expects the following structure:
│ └── input-logo.svg
└── manifest.yml
```
1. File manifest.yml describes the properties of the package, similarly to the Integration type, including a policy template, and all Beats input options.
1. File `manifest.yml` describes the properties of the package, similarly to the `integration` type, including a policy template, and all Beats input options.
- type: input
- File input.yml contains definitions of fields set by the input, generic ones, and unrelated to the processed data.
- File `input.yml` contains definitions of fields set by the input, generic ones, and unrelated to the processed data.
2. File input.yml.hbs is a template used to build the agents policy.
2. File `input.yml.hbs` is a template used to build the agents policy.
- It needs to include if-conditions for all Beats input options.
- It will accept a special configuration for extra Beats processors. For example: to strip some fields on the agent level before passing to the ingest pipeline.
3. Files changelog.yml, docs/*, img/* are the same as for integration-type packages.
3. Files `changelog.yml`, `docs/*`, `img/*` are the same as for `integration` type packages.
## FAQ
### Are input packages finished?
### Why don't input packages allow assets e.g dashboards to be defined?
No! Dataset is currently the only thing that we allow to be customized, but we could also have UIs for:
- An ingest pipeline - a user could select one or many ingestion pipelines from the ingest pipelines available in the cluster.
- Custom field mapping - a user could provide their own field mapping and they would be added to the index template on creation
This was intended for a 'phase 3' of input packages which we never got to.
### Why don't input packages allow assets e.g dashboards to be defined?
Because input packages allow dataset to be configured, the data could end up anywhere. Therefore assets e.g a dashboard would not know where to point to fetch their data. This isn;t an impossible problem to solve but this is the reason input packages don't currently allow assets.
Because input packages allow dataset to be configured, the data could end up anywhere. Therefore a dashboard would not know where to point to fetch their data. This isn't an impossible problem to solve but this is the reason input packages don't currently allow assets.

View file

@ -9,22 +9,23 @@ import React from 'react';
import { act, fireEvent } from '@testing-library/react';
import { useAuthz } from '../../../../../../../../hooks/use_authz';
import { useMultipleAgentPolicies } from '../../../../../../../../hooks/use_multiple_agent_policies';
import { createIntegrationsTestRendererMock } from '../../../../../../../../mock';
import type { AgentPolicy } from '../../../../../../types';
import { useAuthz, useMultipleAgentPolicies } from '../../../../../../hooks';
import { PackagePolicyAgentsCell } from './package_policy_agents_cell';
jest.mock('../../../../../../hooks', () => ({
...jest.requireActual('../../../../../../hooks'),
useAuthz: jest.fn(),
useMultipleAgentPolicies: jest.fn(),
useConfirmForceInstall: jest.fn(),
}));
jest.mock('../../../../../../../../hooks/use_multiple_agent_policies');
jest.mock('../../../../../../../../hooks/use_authz');
const useMultipleAgentPoliciesMock = useMultipleAgentPolicies as jest.MockedFunction<
typeof useMultipleAgentPolicies
>;
const mockedUseAuthz = useAuthz as jest.MockedFunction<typeof useAuthz>;
function renderCell({
agentPolicies = [] as AgentPolicy[],
onAddAgent = () => {},
@ -43,11 +44,14 @@ function renderCell({
describe('PackagePolicyAgentsCell', () => {
beforeEach(() => {
jest.mocked(useAuthz).mockReturnValue({
mockedUseAuthz.mockReturnValue({
fleet: {
addAgents: true,
addFleetServers: true,
},
integrations: {
writeIntegrationPolicies: true,
},
} as any);
});
@ -57,7 +61,9 @@ describe('PackagePolicyAgentsCell', () => {
describe('when multiple agent policies is disabled', () => {
beforeEach(() => {
useMultipleAgentPoliciesMock.mockReturnValue({ canUseMultipleAgentPolicies: false });
useMultipleAgentPoliciesMock.mockImplementation(() => {
return { canUseMultipleAgentPolicies: false };
});
});
test('it should display add agent button if count is 0', async () => {
@ -68,7 +74,7 @@ describe('PackagePolicyAgentsCell', () => {
} as AgentPolicy,
],
});
utils.debug();
await act(async () => {
expect(utils.queryByText('Add agent')).toBeInTheDocument();
});
@ -136,7 +142,7 @@ describe('PackagePolicyAgentsCell', () => {
} as AgentPolicy,
],
});
utils.debug();
await act(async () => {
expect(utils.queryByText('Add agent')).not.toBeInTheDocument();
expect(utils.queryByTestId('LinkedAgentCountLink')).toBeInTheDocument();
@ -145,10 +151,13 @@ describe('PackagePolicyAgentsCell', () => {
});
test('Add agent button should be disabled if canAddAgents is false', async () => {
jest.mocked(useAuthz).mockReturnValue({
mockedUseAuthz.mockReturnValue({
fleet: {
addAgents: false,
},
integrations: {
writeIntegrationPolicies: true,
},
} as any);
const utils = renderCell({
@ -166,16 +175,12 @@ describe('PackagePolicyAgentsCell', () => {
describe('when multiple agent policies is enabled', () => {
beforeEach(() => {
useMultipleAgentPoliciesMock.mockReturnValue({ canUseMultipleAgentPolicies: true });
useMultipleAgentPoliciesMock.mockImplementation(() => {
return { canUseMultipleAgentPolicies: true };
});
});
test('it should display agent count sum and popover if agent count > 0', async () => {
jest.mocked(useAuthz).mockReturnValue({
fleet: {
addAgents: false,
},
} as any);
const utils = renderCell({
agentPolicies: [
{
@ -188,6 +193,7 @@ describe('PackagePolicyAgentsCell', () => {
} as AgentPolicy,
],
});
await act(async () => {
expect(utils.queryByText('300')).toBeInTheDocument();
expect(utils.queryByText('Add agent')).not.toBeInTheDocument();

View file

@ -25,6 +25,7 @@ import type {
InMemoryPackagePolicy,
PackageInfo,
PackagePolicy,
PackagePolicyPackage,
} from '../../../../../types';
import {
useLink,
@ -49,7 +50,8 @@ export const PackagePoliciesPage = ({
packageInfo: PackageInfo;
embedded?: boolean;
}) => {
const { name, version } = packageInfo;
const { name, version, type } = packageInfo;
const { search } = useLocation();
const queryParams = useMemo(() => new URLSearchParams(search), [search]);
const addAgentToPolicyIdFromParams = useMemo(
@ -79,18 +81,21 @@ export const PackagePoliciesPage = ({
packagePolicy,
}: { agentPolicies: AgentPolicy[]; packagePolicy: PackagePolicy },
index: number
) => {
const hasUpgrade = isPackagePolicyUpgradable(packagePolicy);
): { agentPolicies: AgentPolicy[]; packagePolicy: InMemoryPackagePolicy; rowIndex: number } => {
return {
agentPolicies,
packagePolicy: {
...packagePolicy,
hasUpgrade,
package: {
...(packagePolicy?.package as PackagePolicyPackage),
type,
},
hasUpgrade: isPackagePolicyUpgradable(packagePolicy),
},
rowIndex: index,
};
},
[isPackagePolicyUpgradable]
[isPackagePolicyUpgradable, type]
);
// States and data for agent-based policies table
@ -122,9 +127,10 @@ export const PackagePoliciesPage = ({
}`,
});
useEffect(() => {
setAgentBasedPackageAndAgentPolicies(
!agentBasedData?.items ? [] : agentBasedData.items.map(mapPoliciesData)
);
const mappedPoliciesData = !agentBasedData?.items
? []
: agentBasedData.items.map(mapPoliciesData);
setAgentBasedPackageAndAgentPolicies(mappedPoliciesData);
}, [agentBasedData, mapPoliciesData]);
// States and data for agentless policies table

View file

@ -35,6 +35,7 @@ jest.mock('../hooks', () => {
}),
};
});
jest.mock('../applications/integrations/sections/epm/screens/detail/policies/package_policies');
const useMultipleAgentPoliciesMock = useMultipleAgentPolicies as jest.MockedFunction<
typeof useMultipleAgentPolicies

View file

@ -147,6 +147,7 @@ export const PackagePolicyActionsMenu: React.FunctionComponent<{
from={from}
agentPolicies={agentPolicies}
key="packagePolicyDelete"
packagePolicyPackage={packagePolicy.package}
>
{(deletePackagePoliciesPrompt) => {
return (

View file

@ -19,13 +19,15 @@ import {
useMultipleAgentPolicies,
useLink,
useDeletePackagePolicyMutation,
sendDeletePackageDatastreamAssets,
} from '../hooks';
import { AGENTS_PREFIX } from '../../common/constants';
import type { AgentPolicy } from '../types';
import type { AgentPolicy, PackagePolicyPackage } from '../types';
interface Props {
agentPolicies?: AgentPolicy[];
from?: 'fleet-policy-list' | undefined;
packagePolicyPackage?: PackagePolicyPackage;
children: (deletePackagePoliciesPrompt: DeletePackagePoliciesPrompt) => React.ReactElement;
}
@ -40,6 +42,7 @@ export const PackagePolicyDeleteProvider: React.FunctionComponent<Props> = ({
agentPolicies,
from,
children,
packagePolicyPackage,
}) => {
const { notifications } = useStartServices();
const {
@ -56,7 +59,6 @@ export const PackagePolicyDeleteProvider: React.FunctionComponent<Props> = ({
const { canUseMultipleAgentPolicies } = useMultipleAgentPolicies();
const { mutateAsync: deletePackagePolicyMutationAsync } = useDeletePackagePolicyMutation();
const isShared = useMemo(() => {
if (agentPolicies?.length !== 1) {
return false;
@ -119,11 +121,22 @@ export const PackagePolicyDeleteProvider: React.FunctionComponent<Props> = ({
() => agentPolicies?.map((p) => p.name).join(', '),
[agentPolicies]
);
const deletePackagePolicies = useCallback(async () => {
setIsLoading(true);
try {
/**
* Try to delete assets if there are any
*/
if (packagePolicyPackage?.type === 'input') {
const assetsData = await sendDeletePackageDatastreamAssets(
{ pkgName: packagePolicyPackage?.name, pkgVersion: packagePolicyPackage?.version },
{ packagePolicyId: packagePolicies[0] }
);
if (assetsData?.error?.message) {
notifications.toasts.addDanger(`Error: ${assetsData.error.message}`);
}
}
const data = await deletePackagePolicyMutationAsync({ packagePolicyIds: packagePolicies });
const successfulResults = data?.filter((result) => result.success) || [];
const failedResults = data?.filter((result) => !result.success) || [];
@ -183,20 +196,21 @@ export const PackagePolicyDeleteProvider: React.FunctionComponent<Props> = ({
} catch (e) {
notifications.toasts.addDanger(
i18n.translate('xpack.fleet.deletePackagePolicy.fatalErrorNotificationTitle', {
defaultMessage: 'Error deleting integration',
defaultMessage: `Error deleting integration`,
})
);
}
closeModal();
}, [
closeModal,
packagePolicies,
notifications.toasts,
agentPolicies,
closeModal,
deletePackagePolicyMutationAsync,
getPath,
history,
packagePolicyPackage,
agentPolicies,
notifications.toasts,
from,
history,
getPath,
]);
const renderModal = () => {
@ -251,6 +265,22 @@ export const PackagePolicyDeleteProvider: React.FunctionComponent<Props> = ({
buttonColor="danger"
confirmButtonDisabled={isLoading || isLoadingAgentsCount}
>
{packagePolicyPackage?.type === 'input' && (
<>
<EuiCallOut
color="warning"
iconType="alert"
title={
<FormattedMessage
id="xpack.fleet.deletePackagePolicy.confirmModal.inputPackage.message"
defaultMessage="This action will also remove the installed assets"
/>
}
data-test-subj="InputPackageCallOut"
/>
<EuiSpacer size="m" />
</>
)}
{(hasMultipleAgentPolicies || isShared) && (
<>
<EuiCallOut

View file

@ -36,6 +36,8 @@ import type {
GetOneBulkOperationPackagesResponse,
GetStatsResponse,
BulkUninstallPackagesRequest,
DeletePackageDatastreamAssetsRequest,
DeletePackageDatastreamAssetsResponse,
} from '../../../common/types';
import { API_VERSIONS } from '../../../common/constants';
@ -467,6 +469,18 @@ export const sendGetBulkAssets = (body: GetBulkAssetsRequest['body']) => {
});
};
export const sendDeletePackageDatastreamAssets = (
{ pkgName, pkgVersion }: DeletePackageDatastreamAssetsRequest['params'],
query: DeletePackageDatastreamAssetsRequest['query']
) => {
return sendRequest<DeletePackageDatastreamAssetsResponse>({
path: epmRouteService.getDeletePackageDatastreamAssets(pkgName, pkgVersion),
method: 'delete',
version: API_VERSIONS.public.v1,
query,
});
};
export function useGetInputsTemplatesQuery(
{ pkgName, pkgVersion }: GetInputsTemplatesRequest['params'],
query: GetInputsTemplatesRequest['query']

View file

@ -12,5 +12,6 @@ export interface InMemoryPackagePolicy extends PackagePolicy {
packageName?: string;
packageTitle?: string;
packageVersion?: string;
type?: string;
hasUpgrade: boolean;
}

View file

@ -61,6 +61,8 @@ import {
GetOneBulkOperationPackagesResponseSchema,
BulkUninstallPackagesRequestSchema,
CustomIntegrationRequestSchema,
DeletePackageDatastreamAssetsRequestSchema,
DeletePackageDatastreamAssetsResponseSchema,
} from '../../types';
import type { FleetConfigType } from '../../config';
import { FLEET_API_PRIVILEGES } from '../../constants/api_privileges';
@ -96,6 +98,7 @@ import {
postBulkUninstallPackagesHandler,
getOneBulkOperationPackagesHandler,
} from './bulk_handler';
import { deletePackageDatastreamAssetsHandler } from './package_datastream_assets_handler';
const MAX_FILE_SIZE_BYTES = 104857600; // 100MB
@ -845,4 +848,31 @@ export const registerRoutes = (router: FleetAuthzRouter, config: FleetConfigType
},
updateCustomIntegrationHandler
);
router.versioned
.delete({
path: EPM_API_ROUTES.PACKAGES_DATASTREAM_ASSETS,
security: INSTALL_PACKAGES_SECURITY,
summary: `Delete assets for an input package`,
options: {
tags: ['oas-tag:Elastic Package Manager (EPM)'],
},
})
.addVersion(
{
version: API_VERSIONS.public.v1,
validate: {
request: DeletePackageDatastreamAssetsRequestSchema,
response: {
200: {
body: () => DeletePackageDatastreamAssetsResponseSchema,
},
400: {
body: genericErrorResponse,
},
},
},
},
deletePackageDatastreamAssetsHandler
);
};

View file

@ -0,0 +1,316 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { httpServerMock } from '@kbn/core/server/mocks';
import { type MockedLogger, loggerMock } from '@kbn/logging-mocks';
import { getPackageInfo } from '../../services/epm/packages/get';
import type { FleetRequestHandlerContext, PackagePolicyClient } from '../..';
import { packagePolicyService } from '../../services/package_policy';
import { xpackMocks } from '../../mocks';
import {
checkExistingDataStreamsAreFromDifferentPackage,
findDataStreamsFromDifferentPackages,
getDatasetName,
removeAssetsForInputPackagePolicy,
isInputPackageDatasetUsedByMultiplePolicies,
} from '../../services/epm/packages/input_type_packages';
import { appContextService } from '../../services';
import { FleetNotFoundError } from '../../errors';
import { deletePackageDatastreamAssetsHandler } from './package_datastream_assets_handler';
jest.mock('../../services/epm/packages/get');
jest.mock('../../services/epm/packages/input_type_packages');
jest.mock('../../services/package_policy', () => {
return {
packagePolicyService: {
list: jest.fn(),
},
};
});
const packagePolicyServiceMock = packagePolicyService as jest.Mocked<PackagePolicyClient>;
const mockedGetPackageInfo = getPackageInfo as jest.Mock<ReturnType<typeof getPackageInfo>>;
const mockedGetDatasetName = getDatasetName as jest.Mock<ReturnType<typeof getDatasetName>>;
const mockedFindDataStreamsFromDifferentPackages =
findDataStreamsFromDifferentPackages as jest.Mock<
ReturnType<typeof findDataStreamsFromDifferentPackages>
>;
const mockedCheckExistingDataStreamsAreFromDifferentPackage =
checkExistingDataStreamsAreFromDifferentPackage as jest.Mock<
ReturnType<typeof checkExistingDataStreamsAreFromDifferentPackage>
>;
const mockedRemoveAssetsForInputPackagePolicy = removeAssetsForInputPackagePolicy as jest.Mock<
ReturnType<typeof removeAssetsForInputPackagePolicy>
>;
const mockedIsInputPackageDatasetUsedByMultiplePolicies =
isInputPackageDatasetUsedByMultiplePolicies as jest.Mock<
ReturnType<typeof isInputPackageDatasetUsedByMultiplePolicies>
>;
describe('deletePackageDatastreamAssetsHandler', () => {
let context: FleetRequestHandlerContext;
let response: ReturnType<typeof httpServerMock.createResponseFactory>;
let logger: MockedLogger;
beforeAll(async () => {
logger = loggerMock.create();
appContextService.getLogger = () => logger;
appContextService.getInternalUserSOClientWithoutSpaceExtension = jest.fn();
});
beforeEach(() => {
context = xpackMocks.createRequestHandlerContext() as unknown as FleetRequestHandlerContext;
response = httpServerMock.createResponseFactory();
jest.resetAllMocks();
});
const testPackagePolicy = {
id: 'test-package-policy',
name: 'Test policy',
policy_ids: ['agent-policy'],
description: 'Test policy description',
namespace: 'default',
inputs: [],
package: {
name: 'logs',
title: 'Test',
version: '1.0.0',
},
} as any;
const packagePolicy1 = {
id: 'policy1',
name: 'Policy',
policy_ids: ['agent-policy'],
description: 'Policy description',
namespace: 'default',
inputs: [],
package: {
name: 'logs',
title: 'Test',
version: '1.0.0',
},
} as any;
it('should remove assets', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.0.0',
type: 'input',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
query: {
packagePolicyId: 'policy1',
},
});
packagePolicyServiceMock.list.mockResolvedValue({
items: [packagePolicy1, testPackagePolicy],
} as any);
mockedGetDatasetName.mockReturnValue('custom');
mockedFindDataStreamsFromDifferentPackages.mockResolvedValue({
existingDataStreams: [],
dataStream: {},
} as any);
mockedCheckExistingDataStreamsAreFromDifferentPackage.mockReturnValue(false);
await deletePackageDatastreamAssetsHandler(context, request, response);
expect(response.ok).toHaveBeenCalledWith({
body: { success: true },
});
await expect(mockedRemoveAssetsForInputPackagePolicy).toHaveBeenCalledWith({
packageInfo: {
name: 'logs',
version: '1.0.0',
type: 'input',
status: 'installed',
},
logger: expect.anything(),
datasetName: 'custom',
esClient: expect.anything(),
savedObjectsClient: expect.anything(),
});
});
it('should throw not found error if the version in packageInfo not found', async () => {
mockedGetPackageInfo.mockResolvedValue({} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
});
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError(new FleetNotFoundError('Version is not installed'));
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
it('should throw not found error if the version in packageInfo is different', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.1.0',
type: 'input',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
});
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError(new FleetNotFoundError('Version is not installed'));
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
it('should throw not found error if package is not an input type package', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.0.0',
type: 'package',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
});
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError(
new FleetNotFoundError('Requested package test-1.0.0 is not an input package')
);
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
it('should throw not found error if package policy id does not exist', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.0.0',
type: 'input',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
query: {
packagePolicyId: 'idontexist',
},
});
packagePolicyServiceMock.list.mockRejectedValueOnce(
new Error('Saved object [ingest-package-policies/idontexist] not found')
);
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError('Saved object [ingest-package-policies/idontexist] not found');
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
it('should throw error if the datastreams also exist on different packages', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.0.0',
type: 'input',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
query: {
packagePolicyId: 'policy1',
},
});
packagePolicyServiceMock.list.mockResolvedValue({
items: [testPackagePolicy, packagePolicy1],
} as any);
mockedGetDatasetName.mockReturnValue('custom');
mockedFindDataStreamsFromDifferentPackages.mockResolvedValue({
existingDataStreams: [
{ name: 'datastream1', _meta: { package: { name: 'integration-test' } } },
],
dataStream: {},
} as any);
mockedCheckExistingDataStreamsAreFromDifferentPackage.mockReturnValue(true);
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError(
`Datastreams matching custom exist on other packages and cannot be removed`
);
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
it('should throw error if the datastreams exist on other package policies on different namespaces', async () => {
mockedGetPackageInfo.mockResolvedValue({
name: 'logs',
version: '1.0.0',
type: 'input',
status: 'installed',
} as any);
const request = httpServerMock.createKibanaRequest({
params: {
pkgName: 'test',
pkgVersion: '1.0.0',
},
query: {
packagePolicyId: 'policy1',
},
});
packagePolicyServiceMock.list.mockResolvedValue({
items: [
packagePolicy1,
testPackagePolicy,
{
...testPackagePolicy,
id: 'namespace-new',
namespace: 'new',
inputs: [{ streams: { vars: { 'datastream.dataset': { value: 'custom' } } } }],
},
],
} as any);
mockedGetDatasetName.mockReturnValue('custom');
mockedFindDataStreamsFromDifferentPackages.mockResolvedValue({
existingDataStreams: [],
dataStream: {},
} as any);
mockedIsInputPackageDatasetUsedByMultiplePolicies.mockReturnValue(true);
await expect(
deletePackageDatastreamAssetsHandler(context, request, response)
).rejects.toThrowError(
`Datastreams matching custom are in use by other package policies and cannot be removed`
);
await expect(mockedRemoveAssetsForInputPackagePolicy).not.toHaveBeenCalled();
});
});

View file

@ -0,0 +1,108 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { TypeOf } from '@kbn/config-schema';
import semverValid from 'semver/functions/valid';
import { FleetError, FleetNotFoundError, PackagePolicyRequestError } from '../../errors';
import { appContextService, packagePolicyService } from '../../services';
import { getPackageInfo } from '../../services/epm/packages/get';
import type { DeletePackageDatastreamAssetsRequestSchema, FleetRequestHandler } from '../../types';
import {
checkExistingDataStreamsAreFromDifferentPackage,
findDataStreamsFromDifferentPackages,
getDatasetName,
isInputPackageDatasetUsedByMultiplePolicies,
removeAssetsForInputPackagePolicy,
} from '../../services/epm/packages/input_type_packages';
import { PACKAGE_POLICY_SAVED_OBJECT_TYPE } from '../../constants';
export const deletePackageDatastreamAssetsHandler: FleetRequestHandler<
TypeOf<typeof DeletePackageDatastreamAssetsRequestSchema.params>,
TypeOf<typeof DeletePackageDatastreamAssetsRequestSchema.query>
> = async (context, request, response) => {
const fleetContext = await context.fleet;
const savedObjectsClient = fleetContext.internalSoClient;
const coreContext = await context.core;
const esClient = coreContext.elasticsearch.client.asInternalUser;
const logger = appContextService.getLogger();
const { pkgName, pkgVersion } = request.params;
const { packagePolicyId } = request.query;
try {
const packageInfo = await getPackageInfo({
savedObjectsClient,
pkgName,
pkgVersion,
});
if (pkgVersion && !semverValid(pkgVersion)) {
throw new PackagePolicyRequestError('Package version is not a valid semver');
}
if (!packageInfo || packageInfo.version !== pkgVersion) {
throw new FleetNotFoundError('Version is not installed');
}
if (packageInfo?.type !== 'input') {
throw new PackagePolicyRequestError(
`Requested package ${pkgName}-${pkgVersion} is not an input package`
);
}
const allSpacesSoClient = appContextService.getInternalUserSOClientWithoutSpaceExtension();
const { items: allPackagePolicies } = await packagePolicyService.list(allSpacesSoClient, {
kuery: `${PACKAGE_POLICY_SAVED_OBJECT_TYPE}.package.name:${pkgName}`,
spaceId: '*',
});
const packagePolicy = allPackagePolicies.find((policy) => policy.id === packagePolicyId);
if (!packagePolicy) {
throw new FleetNotFoundError(`Package policy with id ${packagePolicyId} not found`);
}
const datasetName = getDatasetName(packagePolicy?.inputs);
const datasetNameUsedByMultiplePolicies = isInputPackageDatasetUsedByMultiplePolicies(
allPackagePolicies,
datasetName,
pkgName
);
if (datasetNameUsedByMultiplePolicies) {
throw new FleetError(
`Datastreams matching ${datasetName} are in use by other package policies and cannot be removed`
);
}
const { existingDataStreams } = await findDataStreamsFromDifferentPackages(
datasetName,
packageInfo,
esClient
);
const existingDataStreamsAreFromDifferentPackage =
checkExistingDataStreamsAreFromDifferentPackage(packageInfo, existingDataStreams);
if (existingDataStreamsAreFromDifferentPackage) {
throw new FleetError(
`Datastreams matching ${datasetName} exist on other packages and cannot be removed`
);
}
logger.info(`Removing datastreams matching ${datasetName}`);
await removeAssetsForInputPackagePolicy({
packageInfo,
logger,
datasetName,
esClient,
savedObjectsClient,
});
return response.ok({ body: { success: true } });
} catch (error) {
logger.error(`error ${error.message}`);
throw error;
}
};

View file

@ -0,0 +1,428 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { savedObjectsClientMock } from '@kbn/core/server/mocks';
import type { ElasticsearchClient } from '@kbn/core/server';
import { appContextService } from '../../app_context';
import { PackageNotFoundError } from '../../../errors';
import { dataStreamService } from '../../data_streams';
import { getInstalledPackageWithAssets, getInstallation } from './get';
import { optimisticallyAddEsAssetReferences } from './es_assets_reference';
import {
installAssetsForInputPackagePolicy,
removeAssetsForInputPackagePolicy,
isInputPackageDatasetUsedByMultiplePolicies,
} from './input_type_packages';
import { cleanupAssets } from './remove';
jest.mock('../../data_streams');
jest.mock('./get');
jest.mock('./install_index_template_pipeline');
jest.mock('./es_assets_reference');
jest.mock('./remove');
const cleanupAssetsMock = cleanupAssets as jest.MockedFunction<typeof cleanupAssets>;
jest.mock('../../app_context', () => {
const logger = { error: jest.fn(), debug: jest.fn(), warn: jest.fn(), info: jest.fn() };
const mockedSavedObjectTagging = {
createInternalAssignmentService: jest.fn(),
createTagClient: jest.fn(),
};
return {
appContextService: {
getLogger: jest.fn(() => {
return logger;
}),
getTelemetryEventsSender: jest.fn(),
getSavedObjects: jest.fn(() => ({
createImporter: jest.fn(),
})),
getConfig: jest.fn(() => ({})),
getSavedObjectsTagging: jest.fn(() => mockedSavedObjectTagging),
getInternalUserSOClientForSpaceId: jest.fn(),
getExperimentalFeatures: jest.fn(),
},
};
});
describe('installAssetsForInputPackagePolicy', () => {
beforeEach(() => {
jest.mocked(optimisticallyAddEsAssetReferences).mockReset();
});
it('should do nothing for non input package', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
await installAssetsForInputPackagePolicy({
pkgInfo: {
type: 'integration',
} as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {} as any,
});
expect(jest.mocked(optimisticallyAddEsAssetReferences)).not.toBeCalled();
});
const TEST_PKG_INFO_INPUT = {
type: 'input',
name: 'test',
version: '1.0.0',
policy_templates: [
{
name: 'log',
type: 'log',
},
],
};
it('should throw for input package if package is not installed', async () => {
jest.mocked(dataStreamService).getMatchingDataStreams.mockResolvedValue([]);
jest.mocked(getInstalledPackageWithAssets).mockResolvedValue(undefined);
const mockedLogger = jest.mocked(appContextService.getLogger());
await expect(() =>
installAssetsForInputPackagePolicy({
pkgInfo: TEST_PKG_INFO_INPUT as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {
inputs: [
{
type: 'log',
streams: [{ data_stream: { type: 'log' }, vars: { dataset: 'test.tata' } }],
},
],
} as any,
})
).rejects.toThrowError(PackageNotFoundError);
});
it('should install es index patterns for input package if package is installed', async () => {
jest.mocked(dataStreamService).getMatchingDataStreams.mockResolvedValue([]);
jest.mocked(getInstalledPackageWithAssets).mockResolvedValue({
installation: {
name: 'test',
version: '1.0.0',
},
packageInfo: TEST_PKG_INFO_INPUT,
assetsMap: new Map(),
paths: [],
} as any);
const mockedLogger = jest.mocked(appContextService.getLogger());
await installAssetsForInputPackagePolicy({
pkgInfo: TEST_PKG_INFO_INPUT as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {
inputs: [
{
name: 'log',
type: 'log',
streams: [
{
data_stream: { type: 'log' },
vars: { 'data_stream.dataset': { value: 'test.tata' } },
},
],
},
],
} as any,
});
expect(jest.mocked(optimisticallyAddEsAssetReferences)).toBeCalledWith(
expect.anything(),
expect.anything(),
expect.anything(),
{
'test.tata': 'log-test.tata-*',
}
);
});
});
describe('removeAssetsForInputPackagePolicy', () => {
beforeEach(() => {
jest.mocked(cleanupAssetsMock).mockReset();
});
it('should do nothing for non input package', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'integration',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(cleanupAssetsMock).not.toBeCalled();
});
it('should do nothing for input packages with status !== than installed', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'input',
status: 'not_installed',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(cleanupAssetsMock).not.toBeCalled();
});
it('should clean up assets for input packages with status = installed', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
const installation = {
name: 'logs',
version: '1.0.0',
installed_kibana: [],
installed_es: [
{
id: 'logs@custom',
type: 'component_template',
},
{
id: 'udp@custom',
type: 'component_template',
},
{
id: 'logs-udp.test',
type: 'index_template',
},
{
id: 'logs-udp.test@package',
type: 'component_template',
},
],
es_index_patterns: {
generic: 'logs-udp.generic-*',
test: 'logs-udp.test-*',
},
} as any;
jest.mocked(getInstallation).mockResolvedValue(installation);
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'input',
status: 'installed',
name: 'logs',
version: '1.0.0',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(cleanupAssetsMock).toBeCalledWith(
'test',
{
es_index_patterns: { test: 'logs-udp.test-*' },
installed_es: [
{ id: 'logs-udp.test', type: 'index_template' },
{ id: 'logs-udp.test@package', type: 'component_template' },
],
installed_kibana: [],
name: 'logs',
package_assets: [],
version: '1.0.0',
},
installation,
expect.anything(),
expect.anything()
);
});
it('should clean up assets matching exactly the datasetName', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
const installation = {
name: 'logs',
version: '1.0.0',
installed_kibana: [],
installed_es: [
{
id: 'logs-udp.test',
type: 'index_template',
},
{
id: 'logs-udp.test@package',
type: 'component_template',
},
{
id: 'logs-udp.test1',
type: 'index_template',
},
{
id: 'logs-udp.test1@package',
type: 'component_template',
},
],
} as any;
jest.mocked(getInstallation).mockResolvedValue(installation);
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'input',
status: 'installed',
name: 'logs',
version: '1.0.0',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(cleanupAssetsMock).toBeCalledWith(
'test',
{
installed_es: [
{ id: 'logs-udp.test', type: 'index_template' },
{ id: 'logs-udp.test@package', type: 'component_template' },
],
installed_kibana: [],
es_index_patterns: {},
name: 'logs',
package_assets: [],
version: '1.0.0',
},
installation,
expect.anything(),
expect.anything()
);
});
it('should not clean up assets for input packages with status not installed', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
jest.mocked(getInstallation).mockResolvedValue(undefined);
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'input',
status: 'installed',
name: 'logs',
version: '1.0.0',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(cleanupAssetsMock).not.toBeCalled();
});
it('should log error if cleanupAssets failed', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
jest.mocked(getInstallation).mockResolvedValue({
name: 'logs',
version: '1.0.0',
} as any);
cleanupAssetsMock.mockRejectedValueOnce('error');
await removeAssetsForInputPackagePolicy({
packageInfo: {
type: 'input',
status: 'installed',
name: 'logs',
version: '1.0.0',
} as any,
datasetName: 'test',
savedObjectsClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
logger: mockedLogger,
});
expect(mockedLogger.error).toBeCalled();
});
describe('isInputPackageDatasetUsedByMultiplePolicies', () => {
const policy1 = {
id: 'policy1',
name: 'Policy',
policy_ids: ['agent-policy'],
description: 'Policy description',
namespace: 'default',
inputs: [],
package: {
name: 'logs',
title: 'Test',
version: '1.0.0',
type: 'input',
},
};
const policy2 = {
id: 'test-package-policy',
name: 'Test policy',
policy_ids: ['agent-policy'],
description: 'Test policy description',
namespace: 'default',
inputs: [],
package: {
name: 'logs',
title: 'Test',
version: '1.0.0',
type: 'input',
},
};
it('should return false if there are no other policies using the dataset', async () => {
const res = await isInputPackageDatasetUsedByMultiplePolicies(
[policy1, policy2] as any,
'generic',
'logs'
);
expect(res).toEqual(false);
});
it('should return true if there other policies using the same dataset ', async () => {
const res = await isInputPackageDatasetUsedByMultiplePolicies(
[
{
...policy1,
inputs: [
{
streams: [
{ vars: { 'data_stream.dataset': { value: 'udp.generic', type: 'text' } } },
],
},
],
namespace: 'another',
},
{
...policy2,
inputs: [
{
streams: [
{ vars: { 'data_stream.dataset': { value: 'udp.generic', type: 'text' } } },
],
},
],
},
] as any,
'udp.generic',
'logs'
);
expect(res).toEqual(true);
});
});
});

View file

@ -0,0 +1,268 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { ElasticsearchClient, SavedObjectsClientContract, Logger } from '@kbn/core/server';
import type { IndicesDataStream } from 'elasticsearch-8.x/lib/api/types';
import type {
NewPackagePolicy,
NewPackagePolicyInput,
PackageInfo,
PackagePolicy,
} from '../../../types';
import { DATASET_VAR_NAME, DATA_STREAM_TYPE_VAR_NAME } from '../../../../common/constants';
import { PackagePolicyValidationError, PackageNotFoundError, FleetError } from '../../../errors';
import { dataStreamService } from '../..';
import * as Registry from '../registry';
import { createArchiveIteratorFromMap } from '../archive/archive_iterator';
import { getNormalizedDataStreams } from '../../../../common/services';
import { generateESIndexPatterns } from '../elasticsearch/template/template';
import type { PackageInstallContext } from '../../../../common/types';
import { getInstallation, getInstalledPackageWithAssets } from './get';
import { installIndexTemplatesAndPipelines } from './install_index_template_pipeline';
import { optimisticallyAddEsAssetReferences } from './es_assets_reference';
import { cleanupAssets } from './remove';
export const getDatasetName = (packagePolicyInput: NewPackagePolicyInput[]): string =>
packagePolicyInput[0].streams[0]?.vars?.[DATASET_VAR_NAME]?.value;
export const findDataStreamsFromDifferentPackages = async (
datasetName: string,
pkgInfo: PackageInfo,
esClient: ElasticsearchClient,
dataStreamType?: string
) => {
const [dataStream] = getNormalizedDataStreams(pkgInfo, datasetName, dataStreamType);
const existingDataStreams = await dataStreamService.getMatchingDataStreams(esClient, {
type: dataStream.type,
dataset: datasetName,
});
return { dataStream, existingDataStreams };
};
export const checkExistingDataStreamsAreFromDifferentPackage = (
pkgInfo: PackageInfo,
existingDataStreams: IndicesDataStream[]
) => {
return (existingDataStreams || []).some((ds) => ds._meta?.package?.name !== pkgInfo.name);
};
export const isInputPackageDatasetUsedByMultiplePolicies = (
packagePolicies: PackagePolicy[],
datasetName: string,
pkgName: string
) => {
const allStreams = packagePolicies
.filter(
(packagePolicy) =>
packagePolicy?.package?.name === pkgName || packagePolicy?.package?.type === 'input'
)
.flatMap((packagePolicy) => {
return packagePolicy?.inputs[0]?.streams ?? [];
});
const filtered = allStreams.filter(
(stream) => stream.vars?.[DATASET_VAR_NAME]?.value === datasetName
);
return filtered.length > 1;
};
// install the assets needed for inputs type packages
export async function installAssetsForInputPackagePolicy(opts: {
pkgInfo: PackageInfo;
logger: Logger;
packagePolicy: NewPackagePolicy;
esClient: ElasticsearchClient;
soClient: SavedObjectsClientContract;
force: boolean;
}) {
const { pkgInfo, logger, packagePolicy, esClient, soClient, force } = opts;
if (pkgInfo.type !== 'input') return;
const datasetName = getDatasetName(packagePolicy.inputs);
const dataStreamType =
packagePolicy.inputs[0].streams[0].vars?.[DATA_STREAM_TYPE_VAR_NAME]?.value ||
packagePolicy.inputs[0].streams[0].data_stream?.type ||
'logs';
const { dataStream, existingDataStreams } = await findDataStreamsFromDifferentPackages(
datasetName,
pkgInfo,
esClient,
dataStreamType
);
if (existingDataStreams.length) {
const existingDataStreamsAreFromDifferentPackage =
checkExistingDataStreamsAreFromDifferentPackage(pkgInfo, existingDataStreams);
if (existingDataStreamsAreFromDifferentPackage && !force) {
// user has opted to send data to an existing data stream which is managed by another
// package. This means certain custom setting such as elasticsearch settings
// defined by the package will not have been applied which could lead
// to unforeseen circumstances, so force flag must be used.
const streamIndexPattern = dataStreamService.streamPartsToIndexPattern({
type: dataStream.type,
dataset: datasetName,
});
throw new PackagePolicyValidationError(
`Datastreams matching "${streamIndexPattern}" already exist and are not managed by this package, force flag is required`
);
} else {
logger.info(
`Data stream for dataset ${datasetName} already exists, skipping index template creation`
);
return;
}
}
const existingIndexTemplate = await dataStreamService.getMatchingIndexTemplate(esClient, {
type: dataStream.type,
dataset: datasetName,
});
if (existingIndexTemplate) {
const indexTemplateOwnedByDifferentPackage =
existingIndexTemplate._meta?.package?.name !== pkgInfo.name;
if (indexTemplateOwnedByDifferentPackage && !force) {
// index template already exists but there is no data stream yet
// we do not want to override the index template
throw new PackagePolicyValidationError(
`Index template "${dataStream.type}-${datasetName}" already exist and is not managed by this package, force flag is required`
);
} else {
logger.info(
`Index template "${dataStream.type}-${datasetName}" already exists, skipping index template creation`
);
return;
}
}
const installedPkgWithAssets = await getInstalledPackageWithAssets({
savedObjectsClient: soClient,
pkgName: pkgInfo.name,
logger,
});
let packageInstallContext: PackageInstallContext | undefined;
if (!installedPkgWithAssets) {
throw new PackageNotFoundError(
`Error while creating index templates: unable to find installed package ${pkgInfo.name}`
);
}
try {
if (installedPkgWithAssets.installation.version !== pkgInfo.version) {
const pkg = await Registry.getPackage(pkgInfo.name, pkgInfo.version, {
ignoreUnverified: force,
});
const archiveIterator = createArchiveIteratorFromMap(pkg.assetsMap);
packageInstallContext = {
packageInfo: pkg.packageInfo,
paths: pkg.paths,
archiveIterator,
};
} else {
const archiveIterator = createArchiveIteratorFromMap(installedPkgWithAssets.assetsMap);
packageInstallContext = {
packageInfo: installedPkgWithAssets.packageInfo,
paths: installedPkgWithAssets.paths,
archiveIterator,
};
}
await installIndexTemplatesAndPipelines({
installedPkg: installedPkgWithAssets.installation,
packageInstallContext,
esReferences: installedPkgWithAssets.installation.installed_es || [],
savedObjectsClient: soClient,
esClient,
logger,
onlyForDataStreams: [dataStream],
});
// Upate ES index patterns
await optimisticallyAddEsAssetReferences(
soClient,
installedPkgWithAssets.installation.name,
[],
generateESIndexPatterns([dataStream])
);
} catch (error) {
logger.warn(`installAssetsForInputPackagePolicy error: ${error}`);
}
}
// Remove the assets installed for input-type packages
export async function removeAssetsForInputPackagePolicy(opts: {
packageInfo: PackageInfo;
datasetName: string;
logger: Logger;
esClient: ElasticsearchClient;
savedObjectsClient: SavedObjectsClientContract;
}) {
const { logger, packageInfo, esClient, savedObjectsClient, datasetName } = opts;
if (packageInfo.type === 'input' && packageInfo.status === 'installed') {
logger.info(`Removing assets for input package ${packageInfo.name}:${packageInfo.version}`);
try {
const installation = await getInstallation({
savedObjectsClient,
pkgName: packageInfo.name,
});
if (!installation) {
throw new FleetError(`${packageInfo.name} is not installed`);
}
const {
installed_es: installedEs,
installed_kibana: installedKibana,
es_index_patterns: esIndexPatterns,
} = installation;
// regex matching names with word boundary, allows to match `generic` and not `generic1`
const regex = new RegExp(`${datasetName}\\b`);
const filteredInstalledEs = installedEs.filter((asset) => asset.id.search(regex) > -1);
const filteredInstalledKibana = installedKibana.filter(
(asset) => asset.id.search(regex) > -1
);
const filteredEsIndexPatterns: Record<string, string> = {};
if (esIndexPatterns) {
filteredEsIndexPatterns[datasetName] = esIndexPatterns[datasetName];
}
const installationToDelete = {
...installation,
installed_es: filteredInstalledEs,
installed_kibana: filteredInstalledKibana,
es_index_patterns: filteredEsIndexPatterns,
package_assets: [],
};
await cleanupAssets(
datasetName,
installationToDelete,
installation,
esClient,
savedObjectsClient
);
} catch (error) {
logger.error(
`Failed to remove assets for input package ${packageInfo.name}:${packageInfo.version}: ${error.message}`
);
}
}
}

View file

@ -16,24 +16,21 @@ import { sendTelemetryEvents } from '../../upgrade_sender';
import { licenseService } from '../../license';
import { auditLoggingService } from '../../audit_logging';
import { appContextService } from '../../app_context';
import { ConcurrentInstallOperationError, FleetError, PackageNotFoundError } from '../../../errors';
import { ConcurrentInstallOperationError, FleetError } from '../../../errors';
import { isAgentlessEnabled, isOnlyAgentlessIntegration } from '../../utils/agentless';
import * as Registry from '../registry';
import { dataStreamService } from '../../data_streams';
import {
createInstallation,
handleInstallPackageFailure,
installAssetsForInputPackagePolicy,
installPackage,
isPackageVersionOrLaterInstalled,
} from './install';
import * as installStateMachine from './install_state_machine/_state_machine_package_install';
import { getBundledPackageByPkgKey } from './bundled_packages';
import { getInstalledPackageWithAssets, getInstallationObject } from './get';
import { optimisticallyAddEsAssetReferences } from './es_assets_reference';
import { getInstallationObject } from './get';
jest.mock('../../data_streams');
jest.mock('./get');
@ -540,106 +537,6 @@ describe('install', () => {
});
});
describe('installAssetsForInputPackagePolicy', () => {
beforeEach(() => {
jest.mocked(optimisticallyAddEsAssetReferences).mockReset();
});
it('should do nothing for non input package', async () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
await installAssetsForInputPackagePolicy({
pkgInfo: {
type: 'integration',
} as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {} as any,
});
});
const TEST_PKG_INFO_INPUT = {
type: 'input',
name: 'test',
version: '1.0.0',
policy_templates: [
{
name: 'log',
type: 'log',
},
],
};
it('should throw for input package if package is not installed', async () => {
jest.mocked(dataStreamService).getMatchingDataStreams.mockResolvedValue([]);
jest.mocked(getInstalledPackageWithAssets).mockResolvedValue(undefined);
const mockedLogger = jest.mocked(appContextService.getLogger());
await expect(() =>
installAssetsForInputPackagePolicy({
pkgInfo: TEST_PKG_INFO_INPUT as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {
inputs: [
{
type: 'log',
streams: [{ data_stream: { type: 'log' }, vars: { dataset: 'test.tata' } }],
},
],
} as any,
})
).rejects.toThrowError(PackageNotFoundError);
});
it('should install es index patterns for input package if package is installed', async () => {
jest.mocked(dataStreamService).getMatchingDataStreams.mockResolvedValue([]);
jest.mocked(getInstalledPackageWithAssets).mockResolvedValue({
installation: {
name: 'test',
version: '1.0.0',
},
packageInfo: TEST_PKG_INFO_INPUT,
assetsMap: new Map(),
paths: [],
} as any);
const mockedLogger = jest.mocked(appContextService.getLogger());
await installAssetsForInputPackagePolicy({
pkgInfo: TEST_PKG_INFO_INPUT as any,
soClient: savedObjectsClientMock.create(),
esClient: {} as ElasticsearchClient,
force: false,
logger: mockedLogger,
packagePolicy: {
inputs: [
{
name: 'log',
type: 'log',
streams: [
{
data_stream: { type: 'log' },
vars: { 'data_stream.dataset': { value: 'test.tata' } },
},
],
},
],
} as any,
});
expect(jest.mocked(optimisticallyAddEsAssetReferences)).toBeCalledWith(
expect.anything(),
expect.anything(),
expect.anything(),
{
'test.tata': 'log-test.tata-*',
}
);
});
});
describe('handleInstallPackageFailure', () => {
const mockedLogger = jest.mocked(appContextService.getLogger());
const savedObjectsClient = savedObjectsClientMock.create();

View file

@ -41,25 +41,19 @@ import type {
InstallSource,
InstallType,
KibanaAssetType,
NewPackagePolicy,
PackageInfo,
PackageVerificationResult,
InstallResultStatus,
} from '../../../types';
import {
AUTO_UPGRADE_POLICIES_PACKAGES,
CUSTOM_INTEGRATION_PACKAGE_SPEC_VERSION,
DATASET_VAR_NAME,
DATA_STREAM_TYPE_VAR_NAME,
GENERIC_DATASET_NAME,
} from '../../../../common/constants';
import {
FleetError,
PackageOutdatedError,
PackagePolicyValidationError,
ConcurrentInstallOperationError,
FleetUnauthorizedError,
PackageNotFoundError,
FleetTooManyRequestsError,
PackageInvalidDeploymentMode,
} from '../../../errors';
@ -68,7 +62,7 @@ import {
MAX_TIME_COMPLETE_INSTALL,
MAX_REINSTALL_RETRIES,
} from '../../../constants';
import { dataStreamService, licenseService } from '../..';
import { licenseService } from '../..';
import { appContextService } from '../../app_context';
import * as Registry from '../registry';
import {
@ -90,7 +84,7 @@ import { _stateMachineInstallPackage } from './install_state_machine/_state_mach
import { formatVerificationResultForSO } from './package_verification';
import { getInstallation, getInstallationObject } from './get';
import { getInstalledPackageWithAssets, getPackageSavedObjects } from './get';
import { getPackageSavedObjects } from './get';
import { removeOldAssets } from './cleanup';
import { getBundledPackageByPkgKey } from './bundled_packages';
import { convertStringToTitle, generateDescription } from './custom_integrations/utils';
@ -100,8 +94,6 @@ import { generateDatastreamEntries } from './custom_integrations/assets/dataset/
import { checkForNamingCollision } from './custom_integrations/validation/check_naming_collision';
import { checkDatasetsNameFormat } from './custom_integrations/validation/check_dataset_name_format';
import { addErrorToLatestFailedAttempts } from './install_errors_helpers';
import { installIndexTemplatesAndPipelines } from './install_index_template_pipeline';
import { optimisticallyAddEsAssetReferences } from './es_assets_reference';
import { setLastUploadInstallCache, getLastUploadInstallCache } from './utils';
import { removeInstallation } from './remove';
@ -1302,130 +1294,6 @@ export async function ensurePackagesCompletedInstall(
return installingPackages;
}
export async function installAssetsForInputPackagePolicy(opts: {
pkgInfo: PackageInfo;
logger: Logger;
packagePolicy: NewPackagePolicy;
esClient: ElasticsearchClient;
soClient: SavedObjectsClientContract;
force: boolean;
}) {
const { pkgInfo, logger, packagePolicy, esClient, soClient, force } = opts;
if (pkgInfo.type !== 'input') return;
const datasetName = packagePolicy.inputs[0].streams[0].vars?.[DATASET_VAR_NAME]?.value;
const dataStreamType =
packagePolicy.inputs[0].streams[0].vars?.[DATA_STREAM_TYPE_VAR_NAME]?.value ||
packagePolicy.inputs[0].streams[0].data_stream?.type ||
'logs';
const [dataStream] = getNormalizedDataStreams(pkgInfo, datasetName, dataStreamType);
const existingDataStreams = await dataStreamService.getMatchingDataStreams(esClient, {
type: dataStream.type,
dataset: datasetName,
});
if (existingDataStreams.length) {
const existingDataStreamsAreFromDifferentPackage = existingDataStreams.some(
(ds) => ds._meta?.package?.name !== pkgInfo.name
);
if (existingDataStreamsAreFromDifferentPackage && !force) {
// user has opted to send data to an existing data stream which is managed by another
// package. This means certain custom setting such as elasticsearch settings
// defined by the package will not have been applied which could lead
// to unforeseen circumstances, so force flag must be used.
const streamIndexPattern = dataStreamService.streamPartsToIndexPattern({
type: dataStream.type,
dataset: datasetName,
});
throw new PackagePolicyValidationError(
`Datastreams matching "${streamIndexPattern}" already exist and are not managed by this package, force flag is required`
);
} else {
logger.info(
`Data stream for dataset ${datasetName} already exists, skipping index template creation for ${packagePolicy.id}`
);
return;
}
}
const existingIndexTemplate = await dataStreamService.getMatchingIndexTemplate(esClient, {
type: dataStream.type,
dataset: datasetName,
});
if (existingIndexTemplate) {
const indexTemplateOwnnedByDifferentPackage =
existingIndexTemplate._meta?.package?.name !== pkgInfo.name;
if (indexTemplateOwnnedByDifferentPackage && !force) {
// index template already exists but there is no data stream yet
// we do not want to override the index template
throw new PackagePolicyValidationError(
`Index template "${dataStream.type}-${datasetName}" already exist and is not managed by this package, force flag is required`
);
} else {
logger.info(
`Index template "${dataStream.type}-${datasetName}" already exists, skipping index template creation for ${packagePolicy.id}`
);
return;
}
}
const installedPkgWithAssets = await getInstalledPackageWithAssets({
savedObjectsClient: soClient,
pkgName: pkgInfo.name,
logger,
});
let packageInstallContext: PackageInstallContext | undefined;
if (!installedPkgWithAssets) {
throw new PackageNotFoundError(
`Error while creating index templates: unable to find installed package ${pkgInfo.name}`
);
}
try {
if (installedPkgWithAssets.installation.version !== pkgInfo.version) {
const pkg = await Registry.getPackage(pkgInfo.name, pkgInfo.version, {
ignoreUnverified: force,
});
const archiveIterator = createArchiveIteratorFromMap(pkg.assetsMap);
packageInstallContext = {
packageInfo: pkg.packageInfo,
paths: pkg.paths,
archiveIterator,
};
} else {
const archiveIterator = createArchiveIteratorFromMap(installedPkgWithAssets.assetsMap);
packageInstallContext = {
packageInfo: installedPkgWithAssets.packageInfo,
paths: installedPkgWithAssets.paths,
archiveIterator,
};
}
await installIndexTemplatesAndPipelines({
installedPkg: installedPkgWithAssets.installation,
packageInstallContext,
esReferences: installedPkgWithAssets.installation.installed_es || [],
savedObjectsClient: soClient,
esClient,
logger,
onlyForDataStreams: [dataStream],
});
// Upate ES index patterns
await optimisticallyAddEsAssetReferences(
soClient,
installedPkgWithAssets.installation.name,
[],
generateESIndexPatterns([dataStream])
);
} catch (error) {
logger.warn(`installAssetsForInputPackagePolicy error: ${error}`);
}
}
interface NoPkgArgs {
pkgVersion: string;
installedPkg?: undefined;

View file

@ -11,7 +11,7 @@ import { ElasticsearchAssetType, PACKAGES_SAVED_OBJECT_TYPE } from '../../../../
import { packagePolicyService } from '../..';
import { auditLoggingService } from '../../audit_logging';
import { deleteESAsset, removeInstallation } from './remove';
import { deleteESAsset, removeInstallation, cleanupAssets } from './remove';
jest.mock('../..', () => {
return {
@ -144,4 +144,98 @@ describe('deleteESAsset', () => {
expect.anything()
);
});
describe('cleanupAssets', () => {
let soClientMock: any;
const esClientMock = {} as any;
beforeEach(() => {
soClientMock = {
get: jest
.fn()
.mockResolvedValue({ attributes: { installed_kibana: [], installed_es: [] } }),
update: jest.fn().mockImplementation(async (type, id, data) => {
return {
id,
type,
attributes: {},
references: [],
};
}),
delete: jest.fn(),
find: jest.fn().mockResolvedValue({ saved_objects: [] }),
bulkResolve: jest.fn().mockResolvedValue({ resolved_objects: [] }),
} as any;
});
it('should remove assets marked for deletion', async () => {
const installation = {
name: 'test',
version: '1.0.0',
installed_kibana: [],
installed_es: [
{
id: 'logs@custom',
type: 'component_template',
},
{
id: 'udp@custom',
type: 'component_template',
},
{
id: 'logs-udp.generic',
type: 'index_template',
},
{
id: 'logs-udp.generic@package',
type: 'component_template',
},
],
es_index_patterns: {
generic: 'logs-generic-*',
'udp.generic': 'logs-udp.generic-*',
'udp.test': 'logs-udp.test-*',
},
} as any;
const installationToDelete = {
name: 'test',
version: '1.0.0',
installed_kibana: [],
installed_es: [
{
id: 'logs-udp.generic',
type: 'index_template',
},
{
id: 'logs-udp.generic@package',
type: 'component_template',
},
],
} as any;
await cleanupAssets(
'generic',
installationToDelete,
installation,
esClientMock,
soClientMock
);
expect(soClientMock.update).toBeCalledWith('epm-packages', 'test', {
installed_es: [
{
id: 'logs@custom',
type: 'component_template',
},
{
id: 'udp@custom',
type: 'component_template',
},
],
installed_kibana: [],
es_index_patterns: {
'udp.generic': 'logs-udp.generic-*',
'udp.test': 'logs-udp.test-*',
},
});
});
});
});

View file

@ -6,6 +6,7 @@
*/
import type { ElasticsearchClient, SavedObjectsClientContract } from '@kbn/core/server';
import { differenceBy } from 'lodash';
import type { SavedObject } from '@kbn/core/server';
@ -383,8 +384,8 @@ async function deleteIndexTemplate(esClient: ElasticsearchClient, name: string):
if (name && name !== '*') {
try {
await esClient.indices.deleteIndexTemplate({ name }, { ignore: [404] });
} catch {
throw new FleetError(`Error deleting index template ${name}`);
} catch (error) {
throw new FleetError(`Error deleting index template ${name}: ${error.message}`);
}
}
}
@ -395,7 +396,7 @@ async function deleteComponentTemplate(esClient: ElasticsearchClient, name: stri
try {
await esClient.cluster.deleteComponentTemplate({ name }, { ignore: [404] });
} catch (error) {
throw new FleetError(`Error deleting component template ${name}`);
throw new FleetError(`Error deleting component template ${name}: ${error.message}`);
}
}
}
@ -488,6 +489,45 @@ export function cleanupTransforms(
return deleteTransforms(esClient, idsToDelete);
}
/**
* This function deletes assets for a given installation and updates the package SO accordingly.
*
* It is used to delete assets installed for input packages when they are no longer relevant,
* e.g. when a package policy is deleted and the package has no more policies.
*/
export async function cleanupAssets(
datasetName: string,
installationToDelete: Installation,
originalInstallation: Installation,
esClient: ElasticsearchClient,
soClient: SavedObjectsClientContract
) {
await deleteAssets(installationToDelete, esClient);
const {
installed_es: installedEs,
installed_kibana: installedKibana,
es_index_patterns: installedIndexPatterns,
} = originalInstallation;
const { installed_es: ESToRemove, installed_kibana: kibanaToRemove } = installationToDelete;
if (installedIndexPatterns && installedIndexPatterns[datasetName]) {
delete installedIndexPatterns[datasetName];
}
await soClient.update(PACKAGES_SAVED_OBJECT_TYPE, originalInstallation.name, {
installed_es: differenceBy(installedEs, ESToRemove, 'id'),
installed_kibana: differenceBy(installedKibana, kibanaToRemove, 'id'),
es_index_patterns: installedIndexPatterns,
});
auditLoggingService.writeCustomSoAuditLog({
action: 'update',
id: originalInstallation.name,
name: originalInstallation.name,
savedObjectType: PACKAGES_SAVED_OBJECT_TYPE,
});
}
async function updateUninstallStatusToFailed(
savedObjectsClient: SavedObjectsClientContract,
pkgName: string,

View file

@ -132,7 +132,7 @@ import type {
RunExternalCallbacksPackagePolicyArgument,
RunExternalCallbacksPackagePolicyResponse,
} from './package_policy_service';
import { installAssetsForInputPackagePolicy } from './epm/packages/install';
import { installAssetsForInputPackagePolicy } from './epm/packages/input_type_packages';
import { auditLoggingService } from './audit_logging';
import {
extractAndUpdateSecrets,

View file

@ -345,6 +345,10 @@ export const InstallKibanaAssetsResponseSchema = schema.object({
success: schema.boolean(),
});
export const DeletePackageDatastreamAssetsResponseSchema = schema.object({
success: schema.boolean(),
});
export const BulkInstallPackagesResponseItemSchema = schema.oneOf([
schema.object({
name: schema.string(),
@ -665,6 +669,16 @@ export const DeleteKibanaAssetsRequestSchema = {
}),
};
export const DeletePackageDatastreamAssetsRequestSchema = {
params: schema.object({
pkgName: schema.string(),
pkgVersion: schema.string(),
}),
query: schema.object({
packagePolicyId: schema.string(),
}),
};
export const GetInputsRequestSchema = {
params: schema.object({
pkgName: schema.string(),