[9.0] Chunk saved object creation by 10 (#221479) (#223194)

# Backport

This will backport the following commits from `main` to `9.0`:
- [Chunk saved object creation by 10
(#221479)](https://github.com/elastic/kibana/pull/221479)

<!--- Backport version: 9.6.6 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sorenlouv/backport)

<!--BACKPORT [{"author":{"name":"Khristinin
Nikita","email":"nikita.khristinin@elastic.co"},"sourceCommit":{"committedDate":"2025-06-10T09:26:30Z","message":"Chunk
saved object creation by 10 (#221479)\n\n## Summary\n\nBulk create saved
object for manual rule run in chunks\n\nIssue
[here](https://github.com/elastic/security-team/issues/12678)\n\n---------\n\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"29b11809b7b2973161932e46d7d1f84a80aafeaf","branchLabelMapping":{"^v9.1.0$":"main","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","backport:prev-minor","backport:version","v9.1.0","v8.19.0","v9.0.3","v8.18.3"],"title":"Chunk
saved object creation by
10","number":221479,"url":"https://github.com/elastic/kibana/pull/221479","mergeCommit":{"message":"Chunk
saved object creation by 10 (#221479)\n\n## Summary\n\nBulk create saved
object for manual rule run in chunks\n\nIssue
[here](https://github.com/elastic/security-team/issues/12678)\n\n---------\n\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"29b11809b7b2973161932e46d7d1f84a80aafeaf"}},"sourceBranch":"main","suggestedTargetBranches":["8.19","9.0","8.18"],"targetPullRequestStates":[{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/221479","number":221479,"mergeCommit":{"message":"Chunk
saved object creation by 10 (#221479)\n\n## Summary\n\nBulk create saved
object for manual rule run in chunks\n\nIssue
[here](https://github.com/elastic/security-team/issues/12678)\n\n---------\n\nCo-authored-by:
Elastic Machine
<elasticmachine@users.noreply.github.com>\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"29b11809b7b2973161932e46d7d1f84a80aafeaf"}},{"branch":"8.19","label":"v8.19.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"9.0","label":"v9.0.3","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"8.18","label":"v8.18.3","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"}]}]
BACKPORT-->

Co-authored-by: Khristinin Nikita <nikita.khristinin@elastic.co>
Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Kibana Machine 2025-06-12 15:09:02 +02:00 committed by GitHub
parent 5419984424
commit e835dff82e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 60 additions and 5 deletions

View file

@ -2552,6 +2552,54 @@ describe('BackfillClient', () => {
`Error deleting tasks with IDs: def with errors: delete failed` `Error deleting tasks with IDs: def with errors: delete failed`
); );
}); });
test('should process bulk create in chunks of 10', async () => {
// Create 25 mock rules and backfill params to test chunking
const mockRules = Array.from({ length: 25 }, (_, i) => getMockRule({ id: `${i + 1}` }));
const mockData = Array.from({ length: 25 }, (_, i) => getMockData({ ruleId: `${i + 1}` }));
// Create mock responses for each chunk
const mockResponses = Array.from({ length: 3 }, (_, chunkIndex) => {
const startIdx = chunkIndex * 10;
const endIdx = Math.min(startIdx + 10, 25);
return {
saved_objects: Array.from({ length: endIdx - startIdx }, (item, i) => {
const idx = startIdx + i;
return getBulkCreateParam(`id-${idx}`, `${idx + 1}`, getMockAdHocRunAttributes());
}),
};
});
// Mock bulkCreate to return different responses for each chunk
unsecuredSavedObjectsClient.bulkCreate
.mockResolvedValueOnce(mockResponses[0])
.mockResolvedValueOnce(mockResponses[1])
.mockResolvedValueOnce(mockResponses[2]);
const result = await backfillClient.bulkQueue({
actionsClient,
auditLogger,
params: mockData,
rules: mockRules,
ruleTypeRegistry,
spaceId: 'default',
unsecuredSavedObjectsClient,
eventLogClient,
internalSavedObjectsRepository,
eventLogger,
});
// Verify bulkCreate was called 3 times (for chunks of 10, 10, and 5)
expect(unsecuredSavedObjectsClient.bulkCreate).toHaveBeenCalledTimes(3);
// Verify each chunk was processed with correct size
expect(unsecuredSavedObjectsClient.bulkCreate.mock.calls[0][0]).toHaveLength(10);
expect(unsecuredSavedObjectsClient.bulkCreate.mock.calls[1][0]).toHaveLength(10);
expect(unsecuredSavedObjectsClient.bulkCreate.mock.calls[2][0]).toHaveLength(5);
// Verify all results were combined correctly
expect(result).toHaveLength(25);
});
}); });
describe('findOverlappingBackfills()', () => { describe('findOverlappingBackfills()', () => {

View file

@ -181,12 +181,19 @@ export class BackfillClient {
); );
} }
// Bulk create the saved object // Bulk create the saved objects in chunks of 10 to manage resource usage
const bulkCreateResponse = await unsecuredSavedObjectsClient.bulkCreate<AdHocRunSO>( const chunkSize = 10;
adHocSOsToCreate const allSavedObjects: Array<SavedObject<AdHocRunSO>> = [];
);
const transformedResponse: ScheduleBackfillResults = bulkCreateResponse.saved_objects.map( for (let i = 0; i < adHocSOsToCreate.length; i += chunkSize) {
const chunk = adHocSOsToCreate.slice(i, i + chunkSize);
const bulkCreateChunkResponse = await unsecuredSavedObjectsClient.bulkCreate<AdHocRunSO>(
chunk
);
allSavedObjects.push(...bulkCreateChunkResponse.saved_objects);
}
const transformedResponse: ScheduleBackfillResults = allSavedObjects.map(
(so: SavedObject<AdHocRunSO>, index: number) => { (so: SavedObject<AdHocRunSO>, index: number) => {
if (so.error) { if (so.error) {
auditLogger?.log( auditLogger?.log(