kibana/x-pack/plugins/task_manager/server/task_store.test.ts
Mike Côté cb2e28d1e4
Fix task manager polling flow controls (#153491)
Fixes https://github.com/elastic/kibana/issues/151938

In this PR, I'm re-writing the Task Manager poller so it doesn't run
concurrently when timeouts occur while also fixing the issue where
polling requests would pile up when polling takes time. To support this,
I've also made the following changes:
- Removed the observable monitor and the
`xpack.task_manager.max_poll_inactivity_cycles` setting
- Make the task store `search` and `updateByQuery` functions have no
retries. This prevents the request from retrying 5x whenever a timeout
occurs, causing each call taking up to 2 1/2 minutes before Kibana sees
the error (now down to 30s each). We have polling to manage retries in
these situations.
- Switch the task poller tests to use `sinon` for faking timers
- Removing the `assertStillInSetup` checks on plugin setup. Felt like a
maintenance burden that wasn't necessary to fix with my code changes.

The main code changes are within these files (to review thoroughly so
the polling cycle doesn't suddenly stop):
- x-pack/plugins/task_manager/server/polling/task_poller.ts
- x-pack/plugins/task_manager/server/polling_lifecycle.ts (easier to
review if you disregard whitespace `?w=1`)

## To verify
1. Tasks run normally (create a rule or something that goes through task
manager regularly).
2. When the update by query takes a while, the request is cancelled
after 30s or the time manually configured.
4. When the search for claimed tasks query takes a while, the request is
cancelled after 30s or the time manually configured.

**Tips:**
<details><summary>how to slowdown search for claimed task
queries</summary>

```
diff --git a/x-pack/plugins/task_manager/server/queries/task_claiming.ts b/x-pack/plugins/task_manager/server/queries/task_claiming.ts
index 07042650a37..2caefd63672 100644
--- a/x-pack/plugins/task_manager/server/queries/task_claiming.ts
+++ b/x-pack/plugins/task_manager/server/queries/task_claiming.ts
@@ -247,7 +247,7 @@ export class TaskClaiming {
         taskTypes,
       });

-    const docs = tasksUpdated > 0 ? await this.sweepForClaimedTasks(taskTypes, size) : [];
+    const docs = await this.sweepForClaimedTasks(taskTypes, size);

     this.emitEvents(docs.map((doc) => asTaskClaimEvent(doc.id, asOk(doc))));

@@ -346,6 +346,13 @@ export class TaskClaiming {
       size,
       sort: SortByRunAtAndRetryAt,
       seq_no_primary_term: true,
+      aggs: {
+        delay: {
+          shard_delay: {
+            value: '40s',
+          },
+        },
+      },
     });

     return docs;
```
</details>

<details><summary>how to slow down update by queries</summary>
Not the cleanest way but you'll see occasional request timeouts from the
updateByQuery calls. I had more luck creating rules running every 1s.

```
diff --git a/x-pack/plugins/task_manager/server/task_store.ts b/x-pack/plugins/task_manager/server/task_store.ts
index a06ee7b918a..07aa81e5388 100644
--- a/x-pack/plugins/task_manager/server/task_store.ts
+++ b/x-pack/plugins/task_manager/server/task_store.ts
@@ -126,6 +126,7 @@ export class TaskStore {
       // Timeouts are retried and make requests timeout after (requestTimeout * (1 + maxRetries))
       // The poller doesn't need retry logic because it will try again at the next polling cycle
       maxRetries: 0,
+      requestTimeout: 900,
     });
   }

@@ -458,6 +459,7 @@ export class TaskStore {
           ignore_unavailable: true,
           refresh: true,
           conflicts: 'proceed',
+          requests_per_second: 1,
           body: {
             ...opts,
             max_docs,
```
</details>

---------

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
2023-05-03 09:33:10 -04:00

984 lines
29 KiB
TypeScript

/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { Client } from '@elastic/elasticsearch';
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import _ from 'lodash';
import { first } from 'rxjs/operators';
import {
TaskInstance,
TaskStatus,
TaskLifecycleResult,
SerializedConcreteTaskInstance,
} from './task';
import { elasticsearchServiceMock, savedObjectsServiceMock } from '@kbn/core/server/mocks';
import { TaskStore, SearchOpts, AggregationOpts } from './task_store';
import { savedObjectsRepositoryMock } from '@kbn/core/server/mocks';
import { SavedObjectAttributes, SavedObjectsErrorHelpers } from '@kbn/core/server';
import { TaskTypeDictionary } from './task_type_dictionary';
import { mockLogger } from './test_utils';
import { AdHocTaskCounter } from './lib/adhoc_task_counter';
import { asErr } from './lib/result_type';
const savedObjectsClient = savedObjectsRepositoryMock.create();
const serializer = savedObjectsServiceMock.createSerializer();
const adHocTaskCounter = new AdHocTaskCounter();
const randomId = () => `id-${_.random(1, 20)}`;
beforeEach(() => jest.resetAllMocks());
const mockedDate = new Date('2019-02-12T21:01:22.479Z');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(global as any).Date = class Date {
constructor() {
return mockedDate;
}
static now() {
return mockedDate.getTime();
}
};
const taskDefinitions = new TaskTypeDictionary(mockLogger());
taskDefinitions.registerTaskDefinitions({
report: {
title: 'report',
createTaskRunner: jest.fn(),
},
dernstraight: {
title: 'dernstraight',
createTaskRunner: jest.fn(),
},
yawn: {
title: 'yawn',
createTaskRunner: jest.fn(),
},
});
describe('TaskStore', () => {
describe('schedule', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
afterEach(() => {
adHocTaskCounter.reset();
});
async function testSchedule(task: unknown) {
savedObjectsClient.create.mockImplementation(async (type: string, attributes: unknown) => ({
id: 'testid',
type,
attributes,
references: [],
version: '123',
}));
const result = await store.schedule(task as TaskInstance);
expect(savedObjectsClient.create).toHaveBeenCalledTimes(1);
return result;
}
test('serializes the params and state', async () => {
const task = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
traceparent: 'apmTraceparent',
};
const result = await testSchedule(task);
expect(savedObjectsClient.create).toHaveBeenCalledWith(
'task',
{
attempts: 0,
schedule: undefined,
params: '{"hello":"world"}',
retryAt: null,
runAt: '2019-02-12T21:01:22.479Z',
scheduledAt: '2019-02-12T21:01:22.479Z',
scope: undefined,
startedAt: null,
state: '{"foo":"bar"}',
status: 'idle',
taskType: 'report',
user: undefined,
traceparent: 'apmTraceparent',
},
{
id: 'id',
refresh: false,
}
);
expect(result).toEqual({
id: 'testid',
attempts: 0,
schedule: undefined,
params: { hello: 'world' },
retryAt: null,
runAt: mockedDate,
scheduledAt: mockedDate,
scope: undefined,
startedAt: null,
state: { foo: 'bar' },
status: 'idle',
taskType: 'report',
user: undefined,
version: '123',
traceparent: 'apmTraceparent',
});
});
test('returns a concrete task instance', async () => {
const task = {
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
const result = await testSchedule(task);
expect(result).toMatchObject({
...task,
id: 'testid',
});
});
test('sets runAt to now if not specified', async () => {
await testSchedule({ taskType: 'dernstraight', params: {}, state: {} });
expect(savedObjectsClient.create).toHaveBeenCalledTimes(1);
const attributes = savedObjectsClient.create.mock
.calls[0][1] as SerializedConcreteTaskInstance;
expect(new Date(attributes.runAt as string).getTime()).toEqual(mockedDate.getTime());
});
test('ensures params and state are not null', async () => {
await testSchedule({ taskType: 'yawn' });
expect(savedObjectsClient.create).toHaveBeenCalledTimes(1);
const attributes = savedObjectsClient.create.mock
.calls[0][1] as SerializedConcreteTaskInstance;
expect(attributes.params).toEqual('{}');
expect(attributes.state).toEqual('{}');
});
test('errors if the task type is unknown', async () => {
await expect(testSchedule({ taskType: 'nope', params: {}, state: {} })).rejects.toThrow(
/Unsupported task type "nope"/i
);
});
test('pushes error from saved objects client to errors$', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.create.mockRejectedValue(new Error('Failure'));
await expect(store.schedule(task)).rejects.toThrowErrorMatchingInlineSnapshot(`"Failure"`);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
test('increments adHocTaskCounter', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
await testSchedule(task);
expect(adHocTaskCounter.count).toEqual(1);
});
test('does not increment adHocTaskCounter if the task is recurring', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
schedule: { interval: '1m' },
};
await testSchedule(task);
expect(adHocTaskCounter.count).toEqual(0);
});
});
describe('fetch', () => {
let store: TaskStore;
let esClient: ReturnType<typeof elasticsearchServiceMock.createClusterClient>['asInternalUser'];
let childEsClient: ReturnType<
typeof elasticsearchServiceMock.createClusterClient
>['asInternalUser'];
beforeAll(() => {
esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
childEsClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
esClient.child.mockReturnValue(childEsClient as unknown as Client);
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
async function testFetch(opts?: SearchOpts, hits: Array<estypes.SearchHit<unknown>> = []) {
childEsClient.search.mockResponse({
hits: { hits, total: hits.length },
} as estypes.SearchResponse);
const result = await store.fetch(opts);
expect(childEsClient.search).toHaveBeenCalledTimes(1);
return {
result,
args: childEsClient.search.mock.calls[0][0],
};
}
test('empty call filters by type, sorts by runAt and id', async () => {
const { args } = await testFetch();
expect(args).toMatchObject({
index: 'tasky',
body: {
sort: [{ 'task.runAt': 'asc' }],
query: { term: { type: 'task' } },
},
});
});
test('allows custom queries', async () => {
const { args } = await testFetch({
query: {
term: { 'task.taskType': 'bar' },
},
});
expect(args).toMatchObject({
body: {
query: {
bool: {
must: [{ term: { type: 'task' } }, { term: { 'task.taskType': 'bar' } }],
},
},
},
});
});
test('pushes error from call cluster to errors$', async () => {
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
childEsClient.search.mockRejectedValue(new Error('Failure'));
await expect(store.fetch()).rejects.toThrowErrorMatchingInlineSnapshot(`"Failure"`);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('aggregate', () => {
let store: TaskStore;
let esClient: ReturnType<typeof elasticsearchServiceMock.createClusterClient>['asInternalUser'];
beforeAll(() => {
esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
async function testAggregate(
opts: AggregationOpts,
hits: Array<estypes.SearchHit<unknown>> = []
) {
esClient.search.mockResponse({
hits: { hits, total: hits.length },
aggregations: {},
} as estypes.SearchResponse);
const result = await store.aggregate(opts);
expect(esClient.search).toHaveBeenCalledTimes(1);
return {
result,
args: esClient.search.mock.calls[0][0],
};
}
test('empty call filters by type, sets size to 0, passes aggregation to esClient', async () => {
const { args } = await testAggregate({
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
});
expect(args).toMatchObject({
index: 'tasky',
body: {
size: 0,
query: { bool: { filter: [{ term: { type: 'task' } }] } },
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
},
});
});
test('allows custom queries', async () => {
const { args } = await testAggregate({
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
query: {
term: { 'task.taskType': 'bar' },
},
});
expect(args).toMatchObject({
body: {
size: 0,
query: {
bool: {
must: [
{ bool: { filter: [{ term: { type: 'task' } }] } },
{ term: { 'task.taskType': 'bar' } },
],
},
},
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
},
});
});
test('allows runtime mappings', async () => {
const { args } = await testAggregate({
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
runtime_mappings: { testMapping: { type: 'long', script: { source: `` } } },
});
expect(args).toMatchObject({
body: {
size: 0,
query: { bool: { filter: [{ term: { type: 'task' } }] } },
aggs: { testAgg: { terms: { field: 'task.taskType' } } },
runtime_mappings: { testMapping: { type: 'long', script: { source: `` } } },
},
});
});
test('throws error when esClient.search throws error', async () => {
esClient.search.mockRejectedValue(new Error('Failure'));
await expect(store.aggregate({ aggs: {} })).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
});
});
describe('update', () => {
let store: TaskStore;
let esClient: ReturnType<typeof elasticsearchServiceMock.createClusterClient>['asInternalUser'];
beforeAll(() => {
esClient = elasticsearchServiceMock.createClusterClient().asInternalUser;
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('refreshes the index, handles versioning', async () => {
const task = {
runAt: mockedDate,
scheduledAt: mockedDate,
startedAt: null,
retryAt: null,
id: 'task:324242',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
attempts: 3,
status: 'idle' as TaskStatus,
version: '123',
ownerId: null,
traceparent: 'myTraceparent',
};
savedObjectsClient.update.mockImplementation(
async (type: string, id: string, attributes: SavedObjectAttributes) => {
return {
id,
type,
attributes,
references: [],
version: '123',
};
}
);
const result = await store.update(task);
expect(savedObjectsClient.update).toHaveBeenCalledWith(
'task',
task.id,
{
attempts: task.attempts,
schedule: undefined,
params: JSON.stringify(task.params),
retryAt: null,
runAt: task.runAt.toISOString(),
scheduledAt: mockedDate.toISOString(),
scope: undefined,
startedAt: null,
state: JSON.stringify(task.state),
status: task.status,
taskType: task.taskType,
user: undefined,
ownerId: null,
traceparent: 'myTraceparent',
},
{ version: '123', refresh: false }
);
expect(result).toEqual({
...task,
schedule: undefined,
retryAt: null,
scope: undefined,
startedAt: null,
user: undefined,
version: '123',
});
});
test('pushes error from saved objects client to errors$', async () => {
const task = {
runAt: mockedDate,
scheduledAt: mockedDate,
startedAt: null,
retryAt: null,
id: 'task:324242',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
attempts: 3,
status: 'idle' as TaskStatus,
version: '123',
ownerId: null,
traceparent: '',
};
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.update.mockRejectedValue(new Error('Failure'));
await expect(store.update(task)).rejects.toThrowErrorMatchingInlineSnapshot(`"Failure"`);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('bulkUpdate', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('pushes error from saved objects client to errors$', async () => {
const task = {
runAt: mockedDate,
scheduledAt: mockedDate,
startedAt: null,
retryAt: null,
id: 'task:324242',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
attempts: 3,
status: 'idle' as TaskStatus,
version: '123',
ownerId: null,
traceparent: '',
};
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.bulkUpdate.mockRejectedValue(new Error('Failure'));
await expect(store.bulkUpdate([task])).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('remove', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('removes the task with the specified id', async () => {
const id = randomId();
const result = await store.remove(id);
expect(result).toBeUndefined();
expect(savedObjectsClient.delete).toHaveBeenCalledWith('task', id);
});
test('pushes error from saved objects client to errors$', async () => {
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.delete.mockRejectedValue(new Error('Failure'));
await expect(store.remove(randomId())).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('bulkRemove', () => {
let store: TaskStore;
const tasksIdsToDelete = [randomId(), randomId()];
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('removes the tasks with the specified ids', async () => {
const result = await store.bulkRemove(tasksIdsToDelete);
expect(result).toBeUndefined();
expect(savedObjectsClient.bulkDelete).toHaveBeenCalledWith([
{ type: 'task', id: tasksIdsToDelete[0] },
{ type: 'task', id: tasksIdsToDelete[1] },
]);
});
test('pushes error from saved objects client to errors$', async () => {
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.bulkDelete.mockRejectedValue(new Error('Failure'));
await expect(store.bulkRemove(tasksIdsToDelete)).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('get', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('gets the task with the specified id', async () => {
const task = {
runAt: mockedDate,
scheduledAt: mockedDate,
startedAt: null,
retryAt: null,
id: randomId(),
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
attempts: 3,
status: 'idle' as TaskStatus,
version: '123',
ownerId: null,
};
savedObjectsClient.get.mockImplementation(async (type: string, objectId: string) => ({
id: objectId,
type,
attributes: {
..._.omit(task, 'id'),
..._.mapValues(_.pick(task, ['params', 'state']), (value) => JSON.stringify(value)),
},
references: [],
version: '123',
}));
const result = await store.get(task.id);
expect(result).toEqual(task);
expect(savedObjectsClient.get).toHaveBeenCalledWith('task', task.id);
});
test('pushes error from saved objects client to errors$', async () => {
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.get.mockRejectedValue(new Error('Failure'));
await expect(store.get(randomId())).rejects.toThrowErrorMatchingInlineSnapshot(`"Failure"`);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('bulkGet', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
test('gets a task specified by id', async () => {
savedObjectsClient.bulkGet.mockResolvedValue({ saved_objects: [] });
await store.bulkGet(['1', '2']);
expect(savedObjectsClient.bulkGet).toHaveBeenCalledWith([
{ type: 'task', id: '1' },
{ type: 'task', id: '2' },
]);
});
test('returns error when task not found', async () => {
savedObjectsClient.bulkGet.mockResolvedValue({
saved_objects: [
{
type: 'task',
id: '1',
attributes: {},
references: [],
error: {
error: 'Oh no',
message: 'Oh no',
statusCode: 404,
},
},
],
});
const result = await store.bulkGet(['1']);
expect(result).toEqual([
asErr({
type: 'task',
id: '1',
error: {
error: 'Oh no',
message: 'Oh no',
statusCode: 404,
},
}),
]);
});
test('pushes error from saved objects client to errors$', async () => {
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.bulkGet.mockRejectedValue(new Error('Failure'));
await expect(store.bulkGet([randomId()])).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
});
describe('getLifecycle', () => {
test('returns the task status if the task exists ', async () => {
expect.assertions(5);
return Promise.all(
Object.values(TaskStatus).map(async (status) => {
const task = {
runAt: mockedDate,
scheduledAt: mockedDate,
startedAt: null,
retryAt: null,
id: randomId(),
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
attempts: 3,
status: status as TaskStatus,
version: '123',
ownerId: null,
traceparent: 'myTraceparent',
};
savedObjectsClient.get.mockImplementation(async (type: string, objectId: string) => ({
id: objectId,
type,
attributes: {
..._.omit(task, 'id'),
..._.mapValues(_.pick(task, ['params', 'state']), (value) => JSON.stringify(value)),
},
references: [],
version: '123',
}));
const store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
expect(await store.getLifecycle(task.id)).toEqual(status);
})
);
});
test('returns NotFound status if the task doesnt exists ', async () => {
savedObjectsClient.get.mockRejectedValueOnce(
SavedObjectsErrorHelpers.createGenericNotFoundError('type', 'id')
);
const store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
expect(await store.getLifecycle(randomId())).toEqual(TaskLifecycleResult.NotFound);
});
test('throws if an unknown error takes place ', async () => {
savedObjectsClient.get.mockRejectedValueOnce(
SavedObjectsErrorHelpers.createBadRequestError()
);
const store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
return expect(store.getLifecycle(randomId())).rejects.toThrow('Bad Request');
});
});
describe('bulkSchedule', () => {
let store: TaskStore;
beforeAll(() => {
store = new TaskStore({
index: 'tasky',
taskManagerId: '',
serializer,
esClient: elasticsearchServiceMock.createClusterClient().asInternalUser,
definitions: taskDefinitions,
savedObjectsRepository: savedObjectsClient,
adHocTaskCounter,
});
});
afterEach(() => {
adHocTaskCounter.reset();
});
async function testBulkSchedule(task: unknown) {
savedObjectsClient.bulkCreate.mockImplementation(async () => ({
saved_objects: [
{
id: 'testid',
type: 'test',
attributes: {
attempts: 0,
params: '{"hello":"world"}',
retryAt: null,
runAt: '2019-02-12T21:01:22.479Z',
scheduledAt: '2019-02-12T21:01:22.479Z',
startedAt: null,
state: '{"foo":"bar"}',
status: 'idle',
taskType: 'report',
traceparent: 'apmTraceparent',
},
references: [],
version: '123',
},
],
}));
const result = await store.bulkSchedule(task as TaskInstance[]);
expect(savedObjectsClient.bulkCreate).toHaveBeenCalledTimes(1);
return result;
}
test('serializes the params and state', async () => {
const task = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
traceparent: 'apmTraceparent',
};
const result = await testBulkSchedule([task]);
expect(savedObjectsClient.bulkCreate).toHaveBeenCalledWith(
[
{
id: 'id',
type: 'task',
attributes: {
attempts: 0,
params: '{"hello":"world"}',
retryAt: null,
runAt: '2019-02-12T21:01:22.479Z',
scheduledAt: '2019-02-12T21:01:22.479Z',
startedAt: null,
state: '{"foo":"bar"}',
status: 'idle',
taskType: 'report',
traceparent: 'apmTraceparent',
},
},
],
{ refresh: false }
);
expect(result).toEqual([
{
id: 'testid',
attempts: 0,
schedule: undefined,
params: { hello: 'world' },
retryAt: null,
runAt: mockedDate,
scheduledAt: mockedDate,
scope: undefined,
startedAt: null,
state: { foo: 'bar' },
status: 'idle',
taskType: 'report',
user: undefined,
version: '123',
traceparent: 'apmTraceparent',
},
]);
});
test('returns a concrete task instance', async () => {
const task = {
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
const result = await testBulkSchedule([task]);
expect(result).toMatchObject([
{
...task,
id: 'testid',
},
]);
});
test('errors if the task type is unknown', async () => {
await expect(testBulkSchedule([{ taskType: 'nope', params: {}, state: {} }])).rejects.toThrow(
/Unsupported task type "nope"/i
);
});
test('pushes error from saved objects client to errors$', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
const firstErrorPromise = store.errors$.pipe(first()).toPromise();
savedObjectsClient.bulkCreate.mockRejectedValue(new Error('Failure'));
await expect(store.bulkSchedule([task])).rejects.toThrowErrorMatchingInlineSnapshot(
`"Failure"`
);
expect(await firstErrorPromise).toMatchInlineSnapshot(`[Error: Failure]`);
});
test('increments adHocTaskCounter', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
};
const result = await testBulkSchedule([task]);
expect(adHocTaskCounter.count).toEqual(result.length);
});
test('does not increment adHocTaskCounter if the task is recurring', async () => {
const task: TaskInstance = {
id: 'id',
params: { hello: 'world' },
state: { foo: 'bar' },
taskType: 'report',
schedule: { interval: '1m' },
};
await testBulkSchedule([task]);
expect(adHocTaskCounter.count).toEqual(0);
});
});
});