mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[ML] Improving empty object creation (#191518)
Replacing instances of empty object creation with `Object.create(null)` to remove any risk of prototype pollution. Only touching server and common code. Follows on from https://github.com/elastic/kibana/pull/186821
This commit is contained in:
parent
ac4a786c38
commit
5ea65f9ed7
25 changed files with 94 additions and 88 deletions
|
@ -24,7 +24,7 @@ const COLORS = [
|
|||
euiVars.euiColorPrimary,
|
||||
];
|
||||
|
||||
const colorMap: Record<string, string> = {};
|
||||
const colorMap: Record<string, string> = Object.create(null);
|
||||
|
||||
export function tabColor(name: string): string {
|
||||
if (colorMap[name] === undefined) {
|
||||
|
|
|
@ -255,7 +255,7 @@ export function registerAnomalyDetectionAlertType({
|
|||
spaceId,
|
||||
rule,
|
||||
}: ExecutorOptions<MlAnomalyDetectionAlertParams>) => {
|
||||
const fakeRequest = {} as KibanaRequest;
|
||||
const fakeRequest = Object.create(null) as KibanaRequest;
|
||||
const alertingService = mlSharedServices.alertingServiceProvider(
|
||||
services.savedObjectsClient,
|
||||
fakeRequest
|
||||
|
|
|
@ -266,7 +266,7 @@ export function registerJobsMonitoringRuleType({
|
|||
throw new AlertsClientError();
|
||||
}
|
||||
|
||||
const fakeRequest = {} as KibanaRequest;
|
||||
const fakeRequest = Object.create(null) as KibanaRequest;
|
||||
const { getTestsResults } = mlServicesProviders.jobsHealthServiceProvider(
|
||||
services.savedObjectsClient,
|
||||
fakeRequest,
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from '../../../../common/types/capabilities';
|
||||
|
||||
export function getAdminCapabilities() {
|
||||
const caps: any = {};
|
||||
const caps: any = Object.create(null);
|
||||
Object.keys(adminMlCapabilities).forEach((k) => {
|
||||
caps[k] = true;
|
||||
});
|
||||
|
@ -21,7 +21,7 @@ export function getAdminCapabilities() {
|
|||
}
|
||||
|
||||
export function getUserCapabilities() {
|
||||
const caps: any = {};
|
||||
const caps: any = Object.create(null);
|
||||
Object.keys(userMlCapabilities).forEach((k) => {
|
||||
caps[k] = true;
|
||||
});
|
||||
|
|
|
@ -65,8 +65,8 @@ const cardinalityCheckProvider = (client: IScopedClusterClient) => {
|
|||
|
||||
const { detectors, influencers, bucket_span: bucketSpan } = analysisConfig;
|
||||
|
||||
let overallCardinality = {};
|
||||
let maxBucketCardinality = {};
|
||||
let overallCardinality = Object.create(null);
|
||||
let maxBucketCardinality = Object.create(null);
|
||||
|
||||
// Get fields required for the model memory estimation
|
||||
const overallCardinalityFields: Set<string> = detectors.reduce(
|
||||
|
|
|
@ -228,7 +228,7 @@ export class AnalyticsManager {
|
|||
private async getInitialElementsModelRoot(modelId: string): Promise<InitialElementsReturnType> {
|
||||
const resultElements = [];
|
||||
const modelElements = [];
|
||||
const details: any = {};
|
||||
const details: any = Object.create(null);
|
||||
let data: estypes.MlTrainedModelConfig | estypes.MlDataframeAnalyticsSummary | undefined;
|
||||
// fetch model data and create model elements
|
||||
data = this.findTrainedModel(modelId);
|
||||
|
@ -296,7 +296,7 @@ export class AnalyticsManager {
|
|||
): Promise<InitialElementsReturnType> {
|
||||
const resultElements = [];
|
||||
const modelElements = [];
|
||||
const details: any = {};
|
||||
const details: any = Object.create(null);
|
||||
const data = this.findJob(jobId);
|
||||
|
||||
const nextLinkId = data?.source?.index[0];
|
||||
|
@ -351,7 +351,7 @@ export class AnalyticsManager {
|
|||
try {
|
||||
await this.initData();
|
||||
// Create first node for incoming analyticsId or modelId
|
||||
let initialData: InitialElementsReturnType = {} as InitialElementsReturnType;
|
||||
let initialData: InitialElementsReturnType = Object.create(null) as InitialElementsReturnType;
|
||||
const job = analyticsId === undefined ? undefined : this.findJob(analyticsId);
|
||||
if (analyticsId !== undefined && job !== undefined) {
|
||||
const jobCreateTime = job.create_time!;
|
||||
|
|
|
@ -355,7 +355,7 @@ export class DataRecognizer {
|
|||
|
||||
const jobs: ModuleJob[] = [];
|
||||
const datafeeds: ModuleDatafeed[] = [];
|
||||
const kibana: KibanaObjects = {};
|
||||
const kibana: KibanaObjects = Object.create(null);
|
||||
// load all of the job configs
|
||||
if (isModule(module)) {
|
||||
const tempJobs: ModuleJob[] = module.jobs.map((j) => ({
|
||||
|
@ -595,7 +595,7 @@ export class DataRecognizer {
|
|||
}
|
||||
|
||||
public async dataRecognizerJobsExist(moduleId: string): Promise<JobExistResult> {
|
||||
const results = {} as JobExistResult;
|
||||
const results = Object.create(null) as JobExistResult;
|
||||
|
||||
// Load the module with the specified ID and check if the jobs
|
||||
// in the module have been created.
|
||||
|
@ -851,7 +851,7 @@ export class DataRecognizer {
|
|||
start?: number,
|
||||
end?: number
|
||||
): Promise<{ [key: string]: DatafeedResponse }> {
|
||||
const results = {} as { [key: string]: DatafeedResponse };
|
||||
const results = Object.create(null) as { [key: string]: DatafeedResponse };
|
||||
for (const datafeed of datafeeds) {
|
||||
results[datafeed.id] = await this._startDatafeed(datafeed, start, end);
|
||||
}
|
||||
|
@ -957,7 +957,9 @@ export class DataRecognizer {
|
|||
// creates an empty results object,
|
||||
// listing each job/datafeed/savedObject with a save success boolean
|
||||
private _createResultsTemplate(moduleConfig: Module): DataRecognizerConfigResponse {
|
||||
const results: DataRecognizerConfigResponse = {} as DataRecognizerConfigResponse;
|
||||
const results: DataRecognizerConfigResponse = Object.create(
|
||||
null
|
||||
) as DataRecognizerConfigResponse;
|
||||
const reducedConfig = {
|
||||
jobs: moduleConfig.jobs,
|
||||
datafeeds: moduleConfig.datafeeds,
|
||||
|
@ -982,7 +984,7 @@ export class DataRecognizer {
|
|||
if (Array.isArray(reducedConfig[i])) {
|
||||
createResultsItems(reducedConfig[i] as any[], results, i);
|
||||
} else {
|
||||
results[i] = {} as any;
|
||||
results[i] = Object.create(null);
|
||||
Object.keys(reducedConfig[i]).forEach((k) => {
|
||||
createResultsItems((reducedConfig[i] as Module['kibana'])[k] as any[], results[i], k);
|
||||
});
|
||||
|
@ -1174,7 +1176,7 @@ export class DataRecognizer {
|
|||
);
|
||||
|
||||
if (!job.config.analysis_limits) {
|
||||
job.config.analysis_limits = {} as AnalysisLimits;
|
||||
job.config.analysis_limits = Object.create(null) as AnalysisLimits;
|
||||
}
|
||||
|
||||
job.config.analysis_limits.model_memory_limit = modelMemoryLimit;
|
||||
|
@ -1206,7 +1208,7 @@ export class DataRecognizer {
|
|||
// so set the jobs mml to be the max
|
||||
|
||||
if (!job.config.analysis_limits) {
|
||||
job.config.analysis_limits = {} as AnalysisLimits;
|
||||
job.config.analysis_limits = Object.create(null) as AnalysisLimits;
|
||||
}
|
||||
|
||||
job.config.analysis_limits.model_memory_limit = maxMml;
|
||||
|
|
|
@ -246,7 +246,7 @@ export class DataVisualizer {
|
|||
): Promise<BatchStats[]> {
|
||||
// Batch up fields by type, getting stats for multiple fields at a time.
|
||||
const batches: Field[][] = [];
|
||||
const batchedFields: { [key: string]: Field[][] } = {};
|
||||
const batchedFields: { [key: string]: Field[][] } = Object.create(null);
|
||||
each(fields, (field) => {
|
||||
if (field.fieldName === undefined) {
|
||||
// undefined fieldName is used for a document count request.
|
||||
|
@ -584,7 +584,7 @@ export class DataVisualizer {
|
|||
{ maxRetries: 0 }
|
||||
);
|
||||
|
||||
const buckets: { [key: string]: number } = {};
|
||||
const buckets: { [key: string]: number } = Object.create(null);
|
||||
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get(
|
||||
body,
|
||||
['aggregations', 'eventRate', 'buckets'],
|
||||
|
@ -628,7 +628,7 @@ export class DataVisualizer {
|
|||
() => (count += PERCENTILE_SPACING)
|
||||
);
|
||||
|
||||
const aggs: { [key: string]: any } = {};
|
||||
const aggs: { [key: string]: any } = Object.create(null);
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
aggs[`${safeFieldName}_field_stats`] = {
|
||||
|
@ -757,7 +757,7 @@ export class DataVisualizer {
|
|||
const size = 0;
|
||||
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);
|
||||
|
||||
const aggs: Aggs = {};
|
||||
const aggs: Aggs = Object.create(null);
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
const top = {
|
||||
|
@ -839,7 +839,7 @@ export class DataVisualizer {
|
|||
const size = 0;
|
||||
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);
|
||||
|
||||
const aggs: Aggs = {};
|
||||
const aggs: Aggs = Object.create(null);
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
aggs[`${safeFieldName}_field_stats`] = {
|
||||
|
@ -910,7 +910,7 @@ export class DataVisualizer {
|
|||
const size = 0;
|
||||
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);
|
||||
|
||||
const aggs: Aggs = {};
|
||||
const aggs: Aggs = Object.create(null);
|
||||
fields.forEach((field, i) => {
|
||||
const safeFieldName = getSafeAggregationName(field.fieldName, i);
|
||||
aggs[`${safeFieldName}_value_count`] = {
|
||||
|
|
|
@ -146,7 +146,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
|
|||
mustCriteria.push(query);
|
||||
}
|
||||
|
||||
const runtimeMappings: any = {};
|
||||
const runtimeMappings: any = Object.create(null);
|
||||
const aggs = fieldsToAgg.reduce(
|
||||
(obj, field) => {
|
||||
if (
|
||||
|
|
|
@ -77,7 +77,7 @@ export class FilterManager {
|
|||
]);
|
||||
|
||||
// Build a map of filter_ids against jobs and detectors using that filter.
|
||||
let filtersInUse: FiltersInUse = {};
|
||||
let filtersInUse: FiltersInUse = Object.create(null);
|
||||
if (results[JOBS] && (results[JOBS] as estypes.MlGetJobsResponse).jobs) {
|
||||
filtersInUse = this.buildFiltersInUse((results[JOBS] as estypes.MlGetJobsResponse).jobs);
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ export class FilterManager {
|
|||
|
||||
buildFiltersInUse(jobsList: Job[]) {
|
||||
// Build a map of filter_ids against jobs and detectors using that filter.
|
||||
const filtersInUse: FiltersInUse = {};
|
||||
const filtersInUse: FiltersInUse = Object.create(null);
|
||||
jobsList.forEach((job) => {
|
||||
const detectors = job.analysis_config.detectors;
|
||||
detectors.forEach((detector) => {
|
||||
|
|
|
@ -89,7 +89,7 @@ export function jobAuditMessagesProvider(
|
|||
gte = `now-${from}`;
|
||||
}
|
||||
|
||||
let timeFilter = {};
|
||||
let timeFilter = Object.create(null);
|
||||
if (from !== null) {
|
||||
timeFilter = {
|
||||
range: {
|
||||
|
|
|
@ -41,7 +41,7 @@ export function datafeedsProvider(client: IScopedClusterClient, mlClient: MlClie
|
|||
return acc;
|
||||
}, {} as { [id: string]: boolean });
|
||||
|
||||
const results: Results = {};
|
||||
const results: Results = Object.create(null);
|
||||
|
||||
async function doStart(datafeedId: string): Promise<{ started: boolean; error?: string }> {
|
||||
if (doStartsCalled[datafeedId] === false) {
|
||||
|
@ -114,7 +114,7 @@ export function datafeedsProvider(client: IScopedClusterClient, mlClient: MlClie
|
|||
}
|
||||
|
||||
async function stopDatafeeds(datafeedIds: string[]) {
|
||||
const results: Results = {};
|
||||
const results: Results = Object.create(null);
|
||||
|
||||
for (const datafeedId of datafeedIds) {
|
||||
try {
|
||||
|
|
|
@ -22,8 +22,8 @@ export function groupsProvider(mlClient: MlClient) {
|
|||
const calMngr = new CalendarManager(mlClient);
|
||||
|
||||
async function getAllGroups() {
|
||||
const groups: { [id: string]: Group } = {};
|
||||
const jobIds: { [id: string]: undefined | null } = {};
|
||||
const groups: { [id: string]: Group } = Object.create(null);
|
||||
const jobIds: { [id: string]: undefined | null } = Object.create(null);
|
||||
const [body, calendars] = await Promise.all([mlClient.getJobs(), calMngr.getAllCalendars()]);
|
||||
|
||||
const { jobs } = body;
|
||||
|
@ -70,7 +70,7 @@ export function groupsProvider(mlClient: MlClient) {
|
|||
}
|
||||
|
||||
async function updateGroups(jobs: UpdateGroupsRequest['jobs']) {
|
||||
const results: Results = {};
|
||||
const results: Results = Object.create(null);
|
||||
for (const job of jobs) {
|
||||
const { jobId, groups } = job;
|
||||
try {
|
||||
|
|
|
@ -89,7 +89,7 @@ export function jobsProvider(
|
|||
deleteUserAnnotations = false,
|
||||
deleteAlertingRules = false
|
||||
) {
|
||||
const results: Results = {};
|
||||
const results: Results = Object.create(null);
|
||||
const datafeedIds = await getDatafeedIdsByJobId();
|
||||
|
||||
if (deleteAlertingRules && rulesClient) {
|
||||
|
@ -149,7 +149,7 @@ export function jobsProvider(
|
|||
}
|
||||
|
||||
async function closeJobs(jobIds: string[]) {
|
||||
const results: Results = {};
|
||||
const results: Results = Object.create(null);
|
||||
for (const jobId of jobIds) {
|
||||
try {
|
||||
await mlClient.closeJob({ job_id: jobId });
|
||||
|
@ -185,7 +185,7 @@ export function jobsProvider(
|
|||
}
|
||||
|
||||
async function resetJobs(jobIds: string[], deleteUserAnnotations = false) {
|
||||
const results: ResetJobsResponse = {};
|
||||
const results: ResetJobsResponse = Object.create(null);
|
||||
for (const jobId of jobIds) {
|
||||
try {
|
||||
// @ts-expect-error @elastic-elasticsearch resetJob response incorrect, missing task
|
||||
|
@ -229,7 +229,7 @@ export function jobsProvider(
|
|||
async function jobsSummary(jobIds: string[] = []) {
|
||||
const fullJobsList: CombinedJobWithStats[] = await createFullJobsList();
|
||||
const fullJobsIds = fullJobsList.map((job) => job.job_id);
|
||||
let auditMessagesByJob: { [id: string]: AuditMessage } = {};
|
||||
let auditMessagesByJob: { [id: string]: AuditMessage } = Object.create(null);
|
||||
|
||||
// even if there are errors getting the audit messages, we still want to show the full list
|
||||
try {
|
||||
|
@ -309,12 +309,12 @@ export function jobsProvider(
|
|||
|
||||
async function jobsWithTimerange() {
|
||||
const fullJobsList = await createFullJobsList();
|
||||
const jobsMap: { [id: string]: string[] } = {};
|
||||
const jobsMap: { [id: string]: string[] } = Object.create(null);
|
||||
|
||||
const jobs = fullJobsList.map((job) => {
|
||||
jobsMap[job.job_id] = job.groups || [];
|
||||
const hasDatafeed = isPopulatedObject(job.datafeed_config);
|
||||
const timeRange: { to?: number; from?: number } = {};
|
||||
const timeRange: { to?: number; from?: number } = Object.create(null);
|
||||
|
||||
const dataCounts = job.data_counts;
|
||||
if (dataCounts !== undefined) {
|
||||
|
@ -378,9 +378,9 @@ export function jobsProvider(
|
|||
|
||||
async function createFullJobsList(jobIds: string[] = []) {
|
||||
const jobs: CombinedJobWithStats[] = [];
|
||||
const groups: { [jobId: string]: string[] } = {};
|
||||
const datafeeds: { [id: string]: DatafeedWithStats } = {};
|
||||
const calendarsByJobId: { [jobId: string]: string[] } = {};
|
||||
const groups: { [jobId: string]: string[] } = Object.create(null);
|
||||
const datafeeds: { [id: string]: DatafeedWithStats } = Object.create(null);
|
||||
const calendarsByJobId: { [jobId: string]: string[] } = Object.create(null);
|
||||
const globalCalendars: string[] = [];
|
||||
|
||||
const jobIdsString = jobIds.join();
|
||||
|
@ -583,7 +583,7 @@ export function jobsProvider(
|
|||
jobIds: string[] = [],
|
||||
allSpaces: boolean = false
|
||||
): Promise<JobsExistResponse> {
|
||||
const results: JobsExistResponse = {};
|
||||
const results: JobsExistResponse = Object.create(null);
|
||||
for (const jobId of jobIds) {
|
||||
try {
|
||||
if (jobId === '') {
|
||||
|
@ -669,7 +669,7 @@ export function jobsProvider(
|
|||
jobs: Array<{ job: Job; datafeed: Datafeed }>,
|
||||
authHeader: AuthorizationHeader
|
||||
) {
|
||||
const results: BulkCreateResults = {};
|
||||
const results: BulkCreateResults = Object.create(null);
|
||||
await Promise.all(
|
||||
jobs.map(async ({ job, datafeed }) => {
|
||||
results[job.job_id] = { job: { success: false }, datafeed: { success: false } };
|
||||
|
|
|
@ -69,7 +69,7 @@ export function newJobLineChartProvider({ asCurrentUser }: IScopedClusterClient)
|
|||
function processSearchResults(resp: any, fields: string[]): ProcessedResults {
|
||||
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
|
||||
const tempResults: Record<DtrIndex, Result[]> = {};
|
||||
const tempResults: Record<DtrIndex, Result[]> = Object.create(null);
|
||||
fields.forEach((f, i) => (tempResults[i] = []));
|
||||
|
||||
aggregationsByTime.forEach((dataForTime: any) => {
|
||||
|
@ -166,7 +166,10 @@ function getSearchJsonFromConfig(
|
|||
|
||||
json.body.query = query;
|
||||
|
||||
const aggs: Record<number, Record<string, { field: string; percents?: string[] }>> = {};
|
||||
const aggs: Record<
|
||||
number,
|
||||
Record<string, { field: string; percents?: string[] }>
|
||||
> = Object.create(null);
|
||||
|
||||
aggFieldNamePairs.forEach(({ agg, field }, i) => {
|
||||
if (field !== null && field !== EVENT_RATE_FIELD_ID) {
|
||||
|
|
|
@ -75,7 +75,7 @@ export function newJobPopulationChartProvider({ asCurrentUser }: IScopedClusterC
|
|||
function processSearchResults(resp: any, fields: string[]): ProcessedResults {
|
||||
const aggregationsByTime = get(resp, ['aggregations', 'times', 'buckets'], []);
|
||||
|
||||
const tempResults: Record<DtrIndex, Result[]> = {};
|
||||
const tempResults: Record<DtrIndex, Result[]> = Object.create(null);
|
||||
fields.forEach((f, i) => (tempResults[i] = []));
|
||||
|
||||
aggregationsByTime.forEach((dataForTime: any) => {
|
||||
|
@ -188,7 +188,7 @@ function getPopulationSearchJsonFromConfig(
|
|||
|
||||
json.body.query = query;
|
||||
|
||||
const aggs: any = {};
|
||||
const aggs: any = Object.create(null);
|
||||
|
||||
aggFieldNamePairs.forEach(({ agg, field, by }, i) => {
|
||||
if (field === EVENT_RATE_FIELD_ID) {
|
||||
|
|
|
@ -125,7 +125,7 @@ class FieldsService {
|
|||
// the _indexPattern will be replaced with a comma separated list
|
||||
// of index patterns from all of the rollup jobs
|
||||
public async getData(includeNested: boolean = false): Promise<NewJobCaps> {
|
||||
let rollupFields: RollupFields = {};
|
||||
let rollupFields: RollupFields = Object.create(null);
|
||||
|
||||
if (this._isRollup) {
|
||||
const rollupService = await rollupServiceProvider(
|
||||
|
@ -159,7 +159,7 @@ class FieldsService {
|
|||
function combineAllRollupFields(
|
||||
rollupConfigs: estypes.RollupGetRollupCapsRollupCapabilitySummary[]
|
||||
): RollupFields {
|
||||
const rollupFields: RollupFields = {};
|
||||
const rollupFields: RollupFields = Object.create(null);
|
||||
rollupConfigs.forEach((conf) => {
|
||||
Object.keys(conf.fields).forEach((fieldName) => {
|
||||
if (rollupFields[fieldName] === undefined) {
|
||||
|
|
|
@ -23,7 +23,7 @@ callAs.search.mockResponse({
|
|||
hits: { total: { value: 1, relation: ES_CLIENT_TOTAL_HITS_RELATION.EQ } },
|
||||
});
|
||||
|
||||
const authHeader: AuthorizationHeader = {};
|
||||
const authHeader: AuthorizationHeader = Object.create(null);
|
||||
|
||||
const mlClusterClient = {
|
||||
asCurrentUser: callAs,
|
||||
|
|
|
@ -227,7 +227,7 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
esSearchRequest.query!.bool!.minimum_should_match = shouldCriteria.length / 2;
|
||||
}
|
||||
|
||||
esSearchRequest.aggs!.byTime.aggs = {};
|
||||
esSearchRequest.aggs!.byTime.aggs = Object.create(null);
|
||||
|
||||
if (metricFieldName !== undefined && metricFieldName !== '' && metricFunction) {
|
||||
const metricAgg: any = {
|
||||
|
@ -258,7 +258,7 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
}
|
||||
esSearchRequest.aggs!.byTime.aggs = tempAggs;
|
||||
} else {
|
||||
esSearchRequest.aggs!.byTime.aggs.metric = metricAgg;
|
||||
esSearchRequest.aggs!.byTime.aggs!.metric = metricAgg;
|
||||
}
|
||||
} else {
|
||||
// if metricFieldName is not defined, it's probably a variation of the non zero count function
|
||||
|
@ -465,9 +465,9 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
return { records: [], errors: undefined };
|
||||
}
|
||||
// Aggregate by job, detector, and analysis fields (partition, by, over).
|
||||
const aggregatedData: Record<string, any> = {};
|
||||
const aggregatedData: Record<string, any> = Object.create(null);
|
||||
|
||||
const jobsErrorMessage: Record<string, string> = {};
|
||||
const jobsErrorMessage: Record<string, string> = Object.create(null);
|
||||
each(anomalyRecords, (record) => {
|
||||
// Check if we can plot a chart for this record, depending on whether the source data
|
||||
// is chartable, and if model plot is enabled for the job.
|
||||
|
@ -516,13 +516,13 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
}
|
||||
const jobId = record.job_id;
|
||||
if (aggregatedData[jobId] === undefined) {
|
||||
aggregatedData[jobId] = {};
|
||||
aggregatedData[jobId] = Object.create(null);
|
||||
}
|
||||
const detectorsForJob = aggregatedData[jobId];
|
||||
|
||||
const detectorIndex = record.detector_index;
|
||||
if (detectorsForJob[detectorIndex] === undefined) {
|
||||
detectorsForJob[detectorIndex] = {};
|
||||
detectorsForJob[detectorIndex] = Object.create(null);
|
||||
}
|
||||
|
||||
// TODO - work out how best to display results from detectors with just an over field.
|
||||
|
@ -534,11 +534,11 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
const groupsForDetector = detectorsForJob[detectorIndex];
|
||||
|
||||
if (groupsForDetector[firstFieldName] === undefined) {
|
||||
groupsForDetector[firstFieldName] = {};
|
||||
groupsForDetector[firstFieldName] = Object.create(null);
|
||||
}
|
||||
const valuesForGroup: Record<string, any> = groupsForDetector[firstFieldName];
|
||||
if (valuesForGroup[firstFieldValue] === undefined) {
|
||||
valuesForGroup[firstFieldValue] = {};
|
||||
valuesForGroup[firstFieldValue] = Object.create(null);
|
||||
}
|
||||
|
||||
const dataForGroupValue = valuesForGroup[firstFieldValue];
|
||||
|
@ -568,12 +568,12 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
|
||||
if (secondFieldName !== undefined && secondFieldValue !== undefined) {
|
||||
if (dataForGroupValue[secondFieldName] === undefined) {
|
||||
dataForGroupValue[secondFieldName] = {};
|
||||
dataForGroupValue[secondFieldName] = Object.create(null);
|
||||
}
|
||||
|
||||
const splitsForGroup = dataForGroupValue[secondFieldName];
|
||||
if (splitsForGroup[secondFieldValue] === undefined) {
|
||||
splitsForGroup[secondFieldValue] = {};
|
||||
splitsForGroup[secondFieldValue] = Object.create(null);
|
||||
}
|
||||
|
||||
const dataForSplitValue = splitsForGroup[secondFieldValue];
|
||||
|
@ -604,7 +604,7 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
});
|
||||
|
||||
// Group job id by error message instead of by job:
|
||||
const errorMessages: Record<string, Set<string>> | undefined = {};
|
||||
const errorMessages: Record<string, Set<string>> = Object.create(null);
|
||||
Object.keys(jobsErrorMessage).forEach((jobId) => {
|
||||
const msg = jobsErrorMessage[jobId];
|
||||
if (errorMessages[msg] === undefined) {
|
||||
|
@ -907,13 +907,13 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
handleError = (errorMsg: string, jobId: string) => {
|
||||
// Group the jobIds by the type of error message
|
||||
if (!errorMessages) {
|
||||
errorMessages = {};
|
||||
errorMessages = Object.create(null);
|
||||
}
|
||||
|
||||
if (errorMessages[errorMsg]) {
|
||||
errorMessages[errorMsg].add(jobId);
|
||||
if (errorMessages![errorMsg]) {
|
||||
errorMessages![errorMsg].add(jobId);
|
||||
} else {
|
||||
errorMessages[errorMsg] = new Set([jobId]);
|
||||
errorMessages![errorMsg] = new Set([jobId]);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -1408,7 +1408,7 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
const dataByJobId = get(resp, ['aggregations', 'jobs', 'buckets'], []);
|
||||
each(dataByJobId, (dataForJob: any) => {
|
||||
const jobId: string = dataForJob.key;
|
||||
const resultsForTime: Record<string, any> = {};
|
||||
const resultsForTime: Record<string, any> = Object.create(null);
|
||||
const dataByTime = get(dataForJob, ['times', 'buckets'], []);
|
||||
each(dataByTime, (dataForTime: any) => {
|
||||
const time: string = dataForTime.key;
|
||||
|
@ -1543,7 +1543,7 @@ export function anomalyChartsDataProvider(mlClient: MlClient, client: IScopedClu
|
|||
typeof metricFunction === 'string'
|
||||
) {
|
||||
// @ts-ignore
|
||||
body.aggs.sample.aggs.byTime.aggs.entities.aggs = {};
|
||||
body.aggs.sample.aggs.byTime.aggs.entities.aggs = Object.create(null);
|
||||
|
||||
const metricAgg = {
|
||||
[metricFunction]: {
|
||||
|
|
|
@ -54,7 +54,7 @@ interface Influencer {
|
|||
* @param source
|
||||
*/
|
||||
export function getTypicalAndActualValues(source: MlAnomalyRecordDoc) {
|
||||
const result: { actual?: number[]; typical?: number[] } = {};
|
||||
const result: { actual?: number[]; typical?: number[] } = Object.create(null);
|
||||
|
||||
const functionDescription = source.function_description || '';
|
||||
const causes = source.causes || [];
|
||||
|
@ -254,9 +254,9 @@ export function resultsServiceProvider(mlClient: MlClient, client?: IScopedClust
|
|||
(item: any) => item.entityName === 'mlcategory'
|
||||
);
|
||||
if (categoryAnomalies.length > 0) {
|
||||
tableData.examplesByJobId = {};
|
||||
tableData.examplesByJobId = Object.create(null);
|
||||
|
||||
const categoryIdsByJobId: { [key: string]: any } = {};
|
||||
const categoryIdsByJobId: { [key: string]: any } = Object.create(null);
|
||||
categoryAnomalies.forEach((anomaly) => {
|
||||
if (categoryIdsByJobId[anomaly.jobId] === undefined) {
|
||||
categoryIdsByJobId[anomaly.jobId] = [];
|
||||
|
@ -420,7 +420,7 @@ export function resultsServiceProvider(mlClient: MlClient, client?: IScopedClust
|
|||
['aggregations', 'byJobId', 'buckets'],
|
||||
[]
|
||||
);
|
||||
const timestampByJobId: { [key: string]: number | undefined } = {};
|
||||
const timestampByJobId: { [key: string]: number | undefined } = Object.create(null);
|
||||
bucketsByJobId.forEach((bucket) => {
|
||||
timestampByJobId[bucket.key] = bucket.maxTimestamp.value;
|
||||
});
|
||||
|
@ -446,7 +446,7 @@ export function resultsServiceProvider(mlClient: MlClient, client?: IScopedClust
|
|||
[jobId]
|
||||
);
|
||||
|
||||
const examplesByCategoryId: { [key: string]: any } = {};
|
||||
const examplesByCategoryId: { [key: string]: any } = Object.create(null);
|
||||
// @ts-expect-error incorrect search response type
|
||||
if (body.hits.total.value > 0) {
|
||||
body.hits.hits.forEach((hit: any) => {
|
||||
|
|
|
@ -668,7 +668,7 @@ export function dataFrameAnalyticsRoutes(
|
|||
routeGuard.fullLicenseAPIGuard(async ({ client, mlClient, request, response }) => {
|
||||
try {
|
||||
const { analyticsIds, allSpaces } = request.body;
|
||||
const results: { [id: string]: { exists: boolean } } = {};
|
||||
const results: { [id: string]: { exists: boolean } } = Object.create(null);
|
||||
for (const id of analyticsIds) {
|
||||
try {
|
||||
const body = allSpaces
|
||||
|
|
|
@ -617,7 +617,8 @@ export function trainedModelsRoutes(
|
|||
try {
|
||||
const { deploymentId, modelId } = request.params;
|
||||
|
||||
const results: Record<string, { success: boolean; error?: ErrorType }> = {};
|
||||
const results: Record<string, { success: boolean; error?: ErrorType }> =
|
||||
Object.create(null);
|
||||
|
||||
for (const id of deploymentId.split(',')) {
|
||||
try {
|
||||
|
|
|
@ -404,7 +404,7 @@ export function checksFactory(
|
|||
.reduce((acc, cur) => {
|
||||
const type = cur.type;
|
||||
if (acc[type] === undefined) {
|
||||
acc[type] = {};
|
||||
acc[type] = Object.create(null);
|
||||
}
|
||||
acc[type][cur.jobId] = cur.namespaces;
|
||||
return acc;
|
||||
|
|
|
@ -75,7 +75,7 @@ export function mlSavedObjectServiceFactory(
|
|||
currentSpaceOnly: boolean = true
|
||||
) {
|
||||
await isMlReady();
|
||||
const filterObject: JobObjectFilter = {};
|
||||
const filterObject: JobObjectFilter = Object.create(null);
|
||||
|
||||
if (jobType !== undefined) {
|
||||
filterObject.type = jobType;
|
||||
|
@ -225,7 +225,7 @@ export function mlSavedObjectServiceFactory(
|
|||
|
||||
async function getAllJobObjectsForAllSpaces(jobType?: JobType, jobId?: string) {
|
||||
await isMlReady();
|
||||
const filterObject: JobObjectFilter = {};
|
||||
const filterObject: JobObjectFilter = Object.create(null);
|
||||
|
||||
if (jobType !== undefined) {
|
||||
filterObject.type = jobType;
|
||||
|
@ -359,7 +359,7 @@ export function mlSavedObjectServiceFactory(
|
|||
return {};
|
||||
}
|
||||
|
||||
const results: SavedObjectResult = {};
|
||||
const results: SavedObjectResult = Object.create(null);
|
||||
const jobs = await _getJobObjects(jobType);
|
||||
const jobObjectIdMap = new Map<string, string>();
|
||||
const jobObjectsToUpdate: Array<{ type: string; id: string }> = [];
|
||||
|
@ -463,7 +463,7 @@ export function mlSavedObjectServiceFactory(
|
|||
|
||||
async function _getTrainedModelObjects(modelId?: string, currentSpaceOnly: boolean = true) {
|
||||
await isMlReady();
|
||||
const filterObject: TrainedModelObjectFilter = {};
|
||||
const filterObject: TrainedModelObjectFilter = Object.create(null);
|
||||
|
||||
if (modelId !== undefined) {
|
||||
filterObject.model_id = modelId;
|
||||
|
@ -725,7 +725,7 @@ export function mlSavedObjectServiceFactory(
|
|||
if (modelIds.length === 0 || (spacesToAdd.length === 0 && spacesToRemove.length === 0)) {
|
||||
return {};
|
||||
}
|
||||
const results: SavedObjectResult = {};
|
||||
const results: SavedObjectResult = Object.create(null);
|
||||
const models = await _getTrainedModelObjects();
|
||||
const trainedModelObjectIdMap = new Map<string, string>();
|
||||
const objectsToUpdate: Array<{ type: string; id: string }> = [];
|
||||
|
|
|
@ -61,7 +61,7 @@ export function syncSavedObjectsFactory(
|
|||
if (job.checks.savedObjectExits === false) {
|
||||
const type = 'anomaly-detector';
|
||||
if (results.savedObjectsCreated[type] === undefined) {
|
||||
results.savedObjectsCreated[type] = {};
|
||||
results.savedObjectsCreated[type] = Object.create(null);
|
||||
}
|
||||
if (simulate === true) {
|
||||
results.savedObjectsCreated[type]![job.jobId] = { success: true };
|
||||
|
@ -88,7 +88,7 @@ export function syncSavedObjectsFactory(
|
|||
if (job.checks.savedObjectExits === false) {
|
||||
const type = 'data-frame-analytics';
|
||||
if (results.savedObjectsCreated[type] === undefined) {
|
||||
results.savedObjectsCreated[type] = {};
|
||||
results.savedObjectsCreated[type] = Object.create(null);
|
||||
}
|
||||
if (simulate === true) {
|
||||
results.savedObjectsCreated[type]![job.jobId] = { success: true };
|
||||
|
@ -119,7 +119,7 @@ export function syncSavedObjectsFactory(
|
|||
const { modelId } = model;
|
||||
const type = 'trained-model';
|
||||
if (results.savedObjectsCreated[type] === undefined) {
|
||||
results.savedObjectsCreated[type] = {};
|
||||
results.savedObjectsCreated[type] = Object.create(null);
|
||||
}
|
||||
if (simulate === true) {
|
||||
results.savedObjectsCreated[type]![modelId] = { success: true };
|
||||
|
@ -161,7 +161,7 @@ export function syncSavedObjectsFactory(
|
|||
if (job.checks.jobExists === false) {
|
||||
const type = 'anomaly-detector';
|
||||
if (results.savedObjectsDeleted[type] === undefined) {
|
||||
results.savedObjectsDeleted[type] = {};
|
||||
results.savedObjectsDeleted[type] = Object.create(null);
|
||||
}
|
||||
if (simulate === true) {
|
||||
results.savedObjectsDeleted[type]![job.jobId] = { success: true };
|
||||
|
@ -191,7 +191,7 @@ export function syncSavedObjectsFactory(
|
|||
if (job.checks.jobExists === false) {
|
||||
const type = 'data-frame-analytics';
|
||||
if (results.savedObjectsDeleted[type] === undefined) {
|
||||
results.savedObjectsDeleted[type] = {};
|
||||
results.savedObjectsDeleted[type] = Object.create(null);
|
||||
}
|
||||
if (simulate === true) {
|
||||
results.savedObjectsDeleted[type]![job.jobId] = { success: true };
|
||||
|
@ -225,7 +225,7 @@ export function syncSavedObjectsFactory(
|
|||
const { modelId, namespaces } = model;
|
||||
const type = 'trained-model';
|
||||
if (results.savedObjectsDeleted[type] === undefined) {
|
||||
results.savedObjectsDeleted[type] = {};
|
||||
results.savedObjectsDeleted[type] = Object.create(null);
|
||||
}
|
||||
|
||||
if (simulate === true) {
|
||||
|
@ -265,7 +265,7 @@ export function syncSavedObjectsFactory(
|
|||
adJobsById[job.jobId].datafeedId !== job.datafeedId)
|
||||
) {
|
||||
if (results.datafeedsAdded[type] === undefined) {
|
||||
results.datafeedsAdded[type] = {};
|
||||
results.datafeedsAdded[type] = Object.create(null);
|
||||
}
|
||||
// add datafeed id for jobs where the datafeed exists but the id is missing from the saved object
|
||||
// or if the datafeed id in the saved object is not the same as the one attached to the job in es
|
||||
|
@ -298,7 +298,7 @@ export function syncSavedObjectsFactory(
|
|||
job.datafeedId !== undefined
|
||||
) {
|
||||
if (results.datafeedsRemoved[type] === undefined) {
|
||||
results.datafeedsRemoved[type] = {};
|
||||
results.datafeedsRemoved[type] = Object.create(null);
|
||||
}
|
||||
// remove datafeed id for jobs where the datafeed no longer exists but the id is populated in the saved object
|
||||
if (simulate === true) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue