mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Logs+] Aggregate unmanaged datasets (#162144)
## 📓 Summary
Closes #162061
This implementation updates and aggregates the unmanaged datasets once
they are retrieved by the state machine on initialization.
I implemented this step in the state machine against doing it on the
dataset service to keep the service pure and able to serve the whole
response in case we need to use it on other plugins/use cases.
a5883521
-6dd7-4291-9b90-0cd7665bed65
---------
Co-authored-by: Marco Antonio Ghiani <marcoantonio.ghiani@elastic.co>
This commit is contained in:
parent
9a87af0cc3
commit
e1b4910f3c
2 changed files with 24 additions and 3 deletions
|
@ -11,6 +11,7 @@ import { IndexPattern } from '@kbn/io-ts-utils';
|
|||
import { DatasetId, DatasetType, IntegrationType } from '../types';
|
||||
|
||||
type IntegrationBase = Pick<IntegrationType, 'name' | 'title' | 'icons' | 'version'>;
|
||||
|
||||
interface DatasetDeps extends DatasetType {
|
||||
iconType?: IconType;
|
||||
}
|
||||
|
@ -41,6 +42,12 @@ export class Dataset {
|
|||
: this.title;
|
||||
}
|
||||
|
||||
getDatasetWildcard(): IndexPattern {
|
||||
const [type, dataset, _namespace] = this.name.split('-');
|
||||
|
||||
return `${type}-${dataset}-*` as IndexPattern;
|
||||
}
|
||||
|
||||
toDataviewSpec(): DataViewSpec {
|
||||
// Invert the property because the API returns the index pattern as `name` and a readable name as `title`
|
||||
return {
|
||||
|
@ -68,4 +75,15 @@ export class Dataset {
|
|||
iconType: 'editorChecklist',
|
||||
});
|
||||
}
|
||||
|
||||
public static createWildcardDatasetsFrom(datasets: Dataset[]) {
|
||||
// Gather unique list of wildcards
|
||||
const wildcards = datasets.reduce(
|
||||
(list, dataset) => list.add(dataset.getDatasetWildcard()),
|
||||
new Set<IndexPattern>()
|
||||
);
|
||||
|
||||
// Create new datasets for the retrieved wildcards
|
||||
return Array.from(wildcards).map((wildcard) => Dataset.create({ name: wildcard }));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
import { isEmpty, isError, omitBy } from 'lodash';
|
||||
import { assign, createMachine } from 'xstate';
|
||||
import { Dataset } from '../../../../common/datasets';
|
||||
import { IDatasetsClient } from '../../../services/datasets';
|
||||
import { DEFAULT_CONTEXT } from './defaults';
|
||||
import type {
|
||||
|
@ -39,7 +40,7 @@ export const createPureDatasetsStateMachine = (
|
|||
src: 'loadDatasets',
|
||||
onDone: {
|
||||
target: 'loaded',
|
||||
actions: ['storeInCache', 'storeDatasets', 'storeSearch'],
|
||||
actions: ['storeInCache', 'aggregateAndStoreDatasets', 'storeSearch'],
|
||||
},
|
||||
onError: 'loadingFailed',
|
||||
},
|
||||
|
@ -77,8 +78,10 @@ export const createPureDatasetsStateMachine = (
|
|||
// Store search from search event
|
||||
...('search' in event && { search: event.search }),
|
||||
})),
|
||||
storeDatasets: assign((_context, event) =>
|
||||
'data' in event && !isError(event.data) ? { datasets: event.data.items } : {}
|
||||
aggregateAndStoreDatasets: assign((_context, event) =>
|
||||
'data' in event && !isError(event.data)
|
||||
? { datasets: Dataset.createWildcardDatasetsFrom(event.data.items) }
|
||||
: {}
|
||||
),
|
||||
storeInCache: (context, event) => {
|
||||
if ('data' in event && !isError(event.data)) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue