mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
# Backport This will backport the following commits from `main` to `9.0`: - [[Automatic Import] Fix chunking of samples in ecs mapping (#214702)](https://github.com/elastic/kibana/pull/214702) <!--- Backport version: 9.6.6 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sorenlouv/backport) <!--BACKPORT [{"author":{"name":"Bharat Pasupula","email":"123897612+bhapas@users.noreply.github.com"},"sourceCommit":{"committedDate":"2025-03-17T12:52:37Z","message":"[Automatic Import] Fix chunking of samples in ecs mapping (#214702)","sha":"059e7e7c52076298537a4faf351ae77eb0563309","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","backport:prev-minor","backport:prev-major","Team:Security-Scalability","Feature:AutomaticImport","v9.1.0"],"title":"[Automatic Import] Fix chunking of samples in ecs mapping","number":214702,"url":"https://github.com/elastic/kibana/pull/214702","mergeCommit":{"message":"[Automatic Import] Fix chunking of samples in ecs mapping (#214702)","sha":"059e7e7c52076298537a4faf351ae77eb0563309"}},"sourceBranch":"main","suggestedTargetBranches":[],"targetPullRequestStates":[{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/214702","number":214702,"mergeCommit":{"message":"[Automatic Import] Fix chunking of samples in ecs mapping (#214702)","sha":"059e7e7c52076298537a4faf351ae77eb0563309"}}]}] BACKPORT--> Co-authored-by: Bharat Pasupula <123897612+bhapas@users.noreply.github.com>
This commit is contained in:
parent
542d800c42
commit
5de3b67a96
2 changed files with 52 additions and 22 deletions
|
@ -17,4 +17,31 @@ describe('test chunks', () => {
|
|||
JSON.stringify({ c: { d: 3 }, e: 4 }, null, 2),
|
||||
]);
|
||||
});
|
||||
it('handles empty objects', () => {
|
||||
const objects = ['{}', '{}'];
|
||||
const chunkSize = 2;
|
||||
const result = mergeAndChunkSamples(objects, chunkSize);
|
||||
expect(result).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it('handles large objects with custom chunk size', () => {
|
||||
const objects = ['{"a": 1, "b": 2, "c": 3, "d": 4}', '{"e": 5, "f": 6, "g": 7, "h": 8}'];
|
||||
const chunkSize = 3;
|
||||
const result = mergeAndChunkSamples(objects, chunkSize);
|
||||
expect(result).toStrictEqual([
|
||||
JSON.stringify({ a: 1, b: 2, c: 3 }, null, 2),
|
||||
JSON.stringify({ d: 4, e: 5, f: 6 }, null, 2),
|
||||
JSON.stringify({ g: 7, h: 8 }, null, 2),
|
||||
]);
|
||||
});
|
||||
|
||||
it('safely handles prototype pollution attempts', () => {
|
||||
const objects = [
|
||||
'{"a": 1, "__proto__": {"polluted": true}, "constructor": {"ignored": true}}',
|
||||
'{"b": 2, "prototype": {"unsafe": true}, "constructor": {"bad": true}}',
|
||||
];
|
||||
const chunkSize = 2;
|
||||
const result = mergeAndChunkSamples(objects, chunkSize);
|
||||
expect(result).toStrictEqual([JSON.stringify({ a: 1, b: 2 }, null, 2)]);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
import { merge } from '../../util/samples';
|
||||
import { isUnsafeProperty, merge } from '../../util/samples';
|
||||
|
||||
interface NestedObject {
|
||||
[key: string]: any;
|
||||
|
@ -40,32 +40,35 @@ function generateChunks(mergedSamples: NestedObject, chunkSize: number): NestedO
|
|||
|
||||
function traverse(current: NestedObject, path: string[] = []) {
|
||||
for (const [key, value] of Object.entries(current)) {
|
||||
const newPath = [...path, key];
|
||||
if (!isUnsafeProperty(key, current)) {
|
||||
const newPath = [...path, key];
|
||||
|
||||
// If the value is a nested object, recurse into it
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
traverse(value, newPath);
|
||||
} else {
|
||||
// For non-object values, add them to the current chunk
|
||||
let target = currentChunk;
|
||||
// If the value is a nested object, recurse into it
|
||||
if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
||||
traverse(value, newPath);
|
||||
} else {
|
||||
// For non-object values, add them to the current chunk
|
||||
let target = currentChunk;
|
||||
|
||||
// Recreate the nested structure in the current chunk
|
||||
for (let i = 0; i < newPath.length - 1; i++) {
|
||||
if (!(newPath[i] in target)) {
|
||||
target[newPath[i]] = {};
|
||||
// Recreate the nested structure in the current chunk
|
||||
for (let i = 0; i < newPath.length - 1; i++) {
|
||||
const pathSegment = newPath[i];
|
||||
if (isUnsafeProperty(pathSegment, target) || !(pathSegment in target)) {
|
||||
target[pathSegment] = {};
|
||||
}
|
||||
target = target[newPath[i]];
|
||||
}
|
||||
target = target[newPath[i]];
|
||||
}
|
||||
|
||||
// Add the value to the deepest level of the structure
|
||||
target[newPath[newPath.length - 1]] = value;
|
||||
currentSize++;
|
||||
// Add the value to the deepest level of the structure
|
||||
target[newPath[newPath.length - 1]] = value;
|
||||
currentSize++;
|
||||
|
||||
// If the chunk is full, add it to the chunks and start a new chunk
|
||||
if (currentSize === chunkSize) {
|
||||
chunks.push(currentChunk);
|
||||
currentChunk = {};
|
||||
currentSize = 0;
|
||||
// If the chunk is full, add it to the chunks and start a new chunk
|
||||
if (currentSize === chunkSize) {
|
||||
chunks.push(currentChunk);
|
||||
currentChunk = {};
|
||||
currentSize = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue