mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
[EBT] Use analytics module (#189052)
The EBT pacakge has been moved to a separate NPM package ([@elastic/ebt](https://www.npmjs.com/package/@elastic/ebt)) The npm package is on version `0.0.x` until we finish the reviews then i'll publish the `1.0.0` version before merging this PR. The PR is mostly code deletes after moving the code to the public ebt github repo https://github.com/elastic/ebt The significant changes are: 1. removed the `packages/analytics/ebt` package from kibana 2. remove @kbn/ebt references in favor of the npm package. 3. Added a util package to provide the package with the telemetry endpoint and headers This was previously backed into the package but now i've rewired it be provided from Kibana, this way we have more control over the URL and headers we use to send EBT telemetry for our elastic endpoint, which will probably be different between users of this package and this way we'll also avoid republishing the package if we ever want to change these details. --------- Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com> Co-authored-by: Alejandro Fernández Haro <afharo@gmail.com>
This commit is contained in:
parent
5219a1f14d
commit
8e10d0eff2
122 changed files with 181 additions and 8275 deletions
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
|
@ -376,7 +376,6 @@ packages/kbn-discover-utils @elastic/kibana-data-discovery
|
|||
packages/kbn-doc-links @elastic/docs
|
||||
packages/kbn-docs-utils @elastic/kibana-operations
|
||||
packages/kbn-dom-drag-drop @elastic/kibana-visualizations @elastic/kibana-data-discovery
|
||||
packages/analytics/ebt @elastic/kibana-core
|
||||
packages/kbn-ebt-tools @elastic/kibana-core
|
||||
x-pack/packages/security-solution/ecs_data_quality_dashboard @elastic/security-threat-hunting-explore
|
||||
x-pack/plugins/ecs_data_quality_dashboard @elastic/security-threat-hunting-explore
|
||||
|
|
|
@ -110,6 +110,7 @@
|
|||
"@elastic/apm-rum-react": "^2.0.3",
|
||||
"@elastic/charts": "66.1.0",
|
||||
"@elastic/datemath": "5.0.3",
|
||||
"@elastic/ebt": "1.0.0",
|
||||
"@elastic/ecs": "^8.11.1",
|
||||
"@elastic/elasticsearch": "^8.14.0",
|
||||
"@elastic/ems-client": "8.5.3",
|
||||
|
@ -436,7 +437,6 @@
|
|||
"@kbn/discover-utils": "link:packages/kbn-discover-utils",
|
||||
"@kbn/doc-links": "link:packages/kbn-doc-links",
|
||||
"@kbn/dom-drag-drop": "link:packages/kbn-dom-drag-drop",
|
||||
"@kbn/ebt": "link:packages/analytics/ebt",
|
||||
"@kbn/ebt-tools": "link:packages/kbn-ebt-tools",
|
||||
"@kbn/ecs-data-quality-dashboard": "link:x-pack/packages/security-solution/ecs_data_quality_dashboard",
|
||||
"@kbn/ecs-data-quality-dashboard-plugin": "link:x-pack/plugins/ecs_data_quality_dashboard",
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
# @kbn/ebt/*
|
||||
|
||||
This module implements the Analytics client used for Event-Based Telemetry. The intention of the client is to be usable on both: the UI and the Server sides.
|
||||
|
||||
## Client
|
||||
|
||||
`@kbn/ebt/client` holds the public APIs to report events, enrich the events' context and set up the transport mechanisms. Refer to the [client's docs](./client/README.md) for more information.
|
||||
|
||||
## Prebuilt shippers
|
||||
|
||||
Elastic-approved shippers are available as `@kbn/ebt/shippers/*` packages. Refer to the [shippers' docs](./shippers/README.md) for more information.
|
|
@ -1,348 +0,0 @@
|
|||
# @kbn/ebt/client
|
||||
|
||||
This module implements the Analytics client used for Event-Based Telemetry. The intention of the client is to be usable on both: the UI and the Server sides.
|
||||
|
||||
## How to use it
|
||||
|
||||
It all starts by creating the client with the `createAnalytics` API:
|
||||
|
||||
```typescript
|
||||
import { createAnalytics } from '@kbn/ebt/client';
|
||||
|
||||
const analytics = createAnalytics({
|
||||
// Set to `true` when running in developer mode.
|
||||
// It enables development helpers like schema validation and extra debugging features.
|
||||
isDev: false,
|
||||
// Set to `staging` if you don't want your events to be sent to the production cluster. Useful for CI & QA environments.
|
||||
sendTo: 'production',
|
||||
// The application's instrumented logger
|
||||
logger,
|
||||
});
|
||||
```
|
||||
|
||||
### Reporting events
|
||||
|
||||
Reporting events is as simple as calling the `reportEvent` API every time your application needs to track an event:
|
||||
|
||||
```typescript
|
||||
analytics.reportEvent('my_unique_event_name', myEventProperties);
|
||||
```
|
||||
|
||||
But first, it requires a setup phase where the application must declare the event and the structure of the `eventProperties`:
|
||||
|
||||
```typescript
|
||||
analytics.registerEventType({
|
||||
eventType: 'my_unique_event_name',
|
||||
schema: {
|
||||
my_keyword: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Represents the key property...'
|
||||
}
|
||||
},
|
||||
my_number: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'Indicates the number of times...',
|
||||
optional: true
|
||||
}
|
||||
},
|
||||
my_complex_unknown_meta_object: {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Unknown object that contains the key-values...'
|
||||
}
|
||||
},
|
||||
my_array_of_str: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'text',
|
||||
_meta: {
|
||||
description: 'List of tags...'
|
||||
}
|
||||
}
|
||||
},
|
||||
my_object: {
|
||||
properties: {
|
||||
my_timestamp: {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'timestamp when the user...'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
my_array_of_objects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
my_bool_prop: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: '`true` when...'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
For more information about how to declare the schemas, refer to the section [Schema definition](#schema-definition).
|
||||
|
||||
### Enriching events
|
||||
|
||||
Context is important! For that reason, the client internally appends the timestamp in which the event was generated and any additional context provided by the Context Providers. To register a context provider use the `registerContextProvider` API:
|
||||
|
||||
```typescript
|
||||
analytics.registerContextProvider({
|
||||
name: 'my_context_provider',
|
||||
// RxJS Observable that emits every time the context changes. For example: a License changes from `basic` to `trial`.
|
||||
context$,
|
||||
// Similar to the `reportEvent` API, schema defining the structure of the expected output of the context$ observable.
|
||||
schema,
|
||||
})
|
||||
```
|
||||
|
||||
### Setting the user's opt-in consent
|
||||
|
||||
The client cannot send any data until the user provides consent. At the beginning, the client will internally enqueue any incoming events until the consent is either granted or refused.
|
||||
|
||||
To set the user's selection use the `optIn` API:
|
||||
|
||||
```typescript
|
||||
analytics.optIn({
|
||||
global: {
|
||||
enabled: true, // The user granted consent
|
||||
shippers: {
|
||||
shipperA: false, // Shipper A is explicitly disabled for all events
|
||||
}
|
||||
},
|
||||
event_types: {
|
||||
my_unique_event_name: {
|
||||
enabled: true, // The consent is explictly granted to send this type of event (only if global === true)
|
||||
shippers: {
|
||||
shipperB: false, // Shipper B is not allowed to report this event.
|
||||
}
|
||||
},
|
||||
my_other_event_name: {
|
||||
enabled: false, // The consent is not granted to send this type of event.
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### Explicit flush of the events
|
||||
|
||||
If, at any given point (usually testing or during shutdowns) we need to make sure that all the pending events
|
||||
in the queue are sent. The `flush` API returns a promise that will resolve as soon as all events in the queue are sent.
|
||||
|
||||
```typescript
|
||||
await analytics.flush()
|
||||
```
|
||||
|
||||
### Shipping events
|
||||
|
||||
In order to report the event to an analytics tool, we need to register the shippers our application wants to use. To register a shipper use the API `registerShipper`:
|
||||
|
||||
```typescript
|
||||
analytics.registerShipper(ShipperClass, shipperOptions);
|
||||
```
|
||||
|
||||
There are some prebuilt shippers in this package that can be enabled using the API above. Additionally, each application can register their own custom shippers.
|
||||
|
||||
#### Prebuilt shippers
|
||||
|
||||
Refer to the [shippers' documentation](../shippers/README.md) for more information.
|
||||
|
||||
#### Custom shippers
|
||||
|
||||
To use your own shipper, you just need to implement and register it!:
|
||||
|
||||
```typescript
|
||||
import type {
|
||||
AnalyticsClientInitContext,
|
||||
Event,
|
||||
EventContext,
|
||||
IShipper,
|
||||
TelemetryCounter
|
||||
} from '@kbn/ebt/client';
|
||||
|
||||
class MyVeryOwnShipper implements IShipper {
|
||||
constructor(myOptions: MyOptions, initContext: AnalyticsClientInitContext) {
|
||||
// ...
|
||||
}
|
||||
|
||||
public reportEvents(events: Event[]): void {
|
||||
// Send the events to the analytics platform
|
||||
}
|
||||
public optIn(isOptedIn: boolean): void {
|
||||
// Start/stop any sending mechanisms
|
||||
}
|
||||
|
||||
public extendContext(newContext: EventContext): void {
|
||||
// Call any custom APIs to internally set the context
|
||||
}
|
||||
|
||||
// Emit any success/failed/dropped activity
|
||||
public telemetryCounter$: Observable<TelemetryCounter>;
|
||||
}
|
||||
|
||||
// Register the custom shipper
|
||||
analytics.registerShipper(MyVeryOwnShipper, myOptions);
|
||||
```
|
||||
|
||||
### Schema definition
|
||||
|
||||
Schemas are a framework that allows us to document the structure of the events that our application will report. It is useful to understand the meaning of the events that we report. And, at the same time, it serves as an extra validation step from the developer's point of view.
|
||||
|
||||
The syntax of a schema is a _simplified ES mapping on steroids_: it removes some of the ES mapping complexity, and at the same time, it includes features that are specific to the telemetry collection.
|
||||
|
||||
**DISCLAIMER:** **The schema is not a direct mapping to ES indices.** The final structure of how the event is stored will depend on many factors like the context providers, shippers and final analytics solution.
|
||||
|
||||
#### Schema Specification: Primitive data types (`string`, `number`, `boolean`)
|
||||
|
||||
When declaring primitive values like `string` or `number`, the basic schema must contain both: `type` and `_meta`.
|
||||
|
||||
The `type` value depends on the type of the content to report in that field. Refer to the table below for the values allowed in the schema `type`:
|
||||
|
||||
| Typescript `type` | Schema `type` |
|
||||
|:-----------------:|:-----------------------:|
|
||||
| `boolean` | `boolean` |
|
||||
| `string` | `keyword` |
|
||||
| `string` | `text` |
|
||||
| `string` | `date` (for ISO format) |
|
||||
| `number` | `date` (for ms format) |
|
||||
| `number` | `byte` |
|
||||
| `number` | `short` |
|
||||
| `number` | `integer` |
|
||||
| `number` | `long` |
|
||||
| `number` | `double` |
|
||||
| `number` | `float` |
|
||||
|
||||
```typescript
|
||||
const stringSchema: SchemaValue<string> = {
|
||||
type: 'text',
|
||||
_meta: {
|
||||
description: 'Description of the feature that was broken',
|
||||
optional: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
For the `_meta`, refer to [Schema Specification: `_meta`](#schema-specification-_meta).
|
||||
|
||||
#### Schema Specification: Objects
|
||||
|
||||
Declaring the schema of an object contains 2 main attributes: `properties` and an optional `_meta`:
|
||||
|
||||
The `properties` attribute is an object with all the keys that the original object may include:
|
||||
|
||||
```typescript
|
||||
interface MyObject {
|
||||
an_id: string;
|
||||
a_description: string;
|
||||
a_number?: number;
|
||||
a_boolean: boolean;
|
||||
}
|
||||
|
||||
const objectSchema: SchemaObject<MyObject> = {
|
||||
properties: {
|
||||
an_id: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'The ID of the element that generated the event',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
a_description: {
|
||||
type: 'text',
|
||||
_meta: {
|
||||
description: 'The human readable description of the element that generated the event',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
a_number: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'The number of times the element is used',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
a_boolean: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'Is the element still active',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'MyObject represents the events generated by elements in the UI when ...',
|
||||
optional: false,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For the optional `_meta`, refer to [Schema Specification: `_meta`](#schema-specification-_meta).
|
||||
|
||||
#### Schema Specification: Arrays
|
||||
|
||||
Declaring the schema of an array contains 2 main attributes: `items` and an optional `_meta`:
|
||||
|
||||
The `items` attribute is an object declaring the schema of the elements inside the array. At the moment, we only support arrays of one type, so `Array<string | number>` are not allowed.
|
||||
|
||||
```typescript
|
||||
type MyArray = string[];
|
||||
|
||||
const arraySchema: SchemaArray<MyArray> = {
|
||||
items: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Tag attached to the element...',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'List of tags attached to the element...',
|
||||
optional: false,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For the optional `_meta`, refer to [Schema Specification: `_meta`](#schema-specification-_meta).
|
||||
|
||||
#### Schema Specification: Special type `pass_through`
|
||||
|
||||
In case a property in the schema is just used to pass through some unknown content that is declared and validated somewhere else, or that it can dynamically grow and shrink, you may use the `type: 'pass_through'` option. It behaves like a [first-order data type](#schema-specification-first-order-data-types-string-number-boolean):
|
||||
|
||||
```typescript
|
||||
type MyUnknownType = unknown;
|
||||
|
||||
const passThroughSchema: SchemaValue<MyUnknownType> = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Payload context recevied from the HTTP request...',
|
||||
optional: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
For the optional `_meta`, refer to [Schema Specification: `_meta`](#schema-specification-_meta).
|
||||
|
||||
#### Schema Specification: `_meta`
|
||||
|
||||
The `_meta` adds the invaluable information of a `description` and whether a field is `optional` in the payload.
|
||||
|
||||
It can be attached to any schema definition as seen in the examples above. For high-order types, like arrays or objects, the `_meta` field is optional. For first-order types, like numbers, strings, booleans or `pass_through`, the `_meta` key is mandatory.
|
||||
|
||||
The field `_meta.optional` is not required unless the schema is describing an optional field. In that case, `_meta.optional: true` is required. However, it's highly encouraged to be explicit about declaring it even when the described field is not optional.
|
||||
|
||||
### Schema Validation
|
||||
|
||||
Apart from documentation, the schema is used to validate the payload during the dev cycle. This adds an extra layer of confidence over the data to be sent.
|
||||
|
||||
The validation, however, is disabled in production because users cannot do anything to fix the bug after it is released. Additionally, receiving _buggy_ events can be considered an additional insight into how our users use our products. For example, the buggy event can be caused by a user following an unexpected path in the UI like clicking an "Upload" button when the file has not been selected [#125013](https://github.com/elastic/kibana/issues/125013). In those cases, receiving the _incomplete_ event tells us the user didn't select a file, but they still hit the "Upload" button.
|
||||
|
||||
The validation is performed with the `io-ts` library. In order to do that, the schema is firstly parsed into the `io-ts` equivalent, and then used to validate the event & context payloads.
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type {
|
||||
AnalyticsClientInitContext,
|
||||
IAnalyticsClient as AnalyticsClient,
|
||||
} from './src/analytics_client';
|
||||
import { AnalyticsClient as AnalyticsClientClass } from './src/analytics_client';
|
||||
|
||||
/**
|
||||
* Creates an {@link AnalyticsClient}.
|
||||
* @param initContext The initial context to create the client {@link AnalyticsClientInitContext}
|
||||
*/
|
||||
export function createAnalytics(initContext: AnalyticsClientInitContext): AnalyticsClient {
|
||||
return new AnalyticsClientClass(initContext);
|
||||
}
|
||||
|
||||
export type {
|
||||
IAnalyticsClient as AnalyticsClient,
|
||||
// Types for the constructor
|
||||
AnalyticsClientInitContext,
|
||||
// Types for the registerShipper API
|
||||
ShipperClassConstructor,
|
||||
RegisterShipperOpts,
|
||||
// Types for the optIn API
|
||||
OptInConfig,
|
||||
OptInConfigPerType,
|
||||
ShipperName,
|
||||
// Types for the registerContextProvider API
|
||||
ContextProviderOpts,
|
||||
ContextProviderName,
|
||||
// Types for the registerEventType API
|
||||
EventTypeOpts,
|
||||
} from './src/analytics_client';
|
||||
|
||||
export type {
|
||||
Event,
|
||||
EventContext,
|
||||
EventType,
|
||||
TelemetryCounter,
|
||||
TelemetryCounterType,
|
||||
} from './src/events';
|
||||
|
||||
export type {
|
||||
RootSchema,
|
||||
SchemaObject,
|
||||
SchemaArray,
|
||||
SchemaChildValue,
|
||||
SchemaMeta,
|
||||
SchemaValue,
|
||||
SchemaMetaOptional,
|
||||
PossibleSchemaTypes,
|
||||
AllowedSchemaBooleanTypes,
|
||||
AllowedSchemaNumberTypes,
|
||||
AllowedSchemaStringTypes,
|
||||
AllowedSchemaTypes,
|
||||
} from './src/schema';
|
||||
|
||||
export type { IShipper } from './src/shippers';
|
File diff suppressed because it is too large
Load diff
|
@ -1,359 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Mixed } from 'io-ts';
|
||||
import type { Observable } from 'rxjs';
|
||||
import { BehaviorSubject, Subject, combineLatest, from, merge } from 'rxjs';
|
||||
import {
|
||||
buffer,
|
||||
bufferCount,
|
||||
concatMap,
|
||||
delay,
|
||||
filter,
|
||||
groupBy,
|
||||
map,
|
||||
mergeMap,
|
||||
share,
|
||||
shareReplay,
|
||||
skipWhile,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs';
|
||||
import type { LogMeta } from '@kbn/logging';
|
||||
import type { IShipper } from '../shippers';
|
||||
import type {
|
||||
AnalyticsClientInitContext,
|
||||
ContextProviderName,
|
||||
ContextProviderOpts,
|
||||
EventTypeOpts,
|
||||
IAnalyticsClient,
|
||||
OptInConfig,
|
||||
RegisterShipperOpts,
|
||||
ShipperClassConstructor,
|
||||
} from './types';
|
||||
import type { Event, EventContext, EventType, TelemetryCounter } from '../events';
|
||||
import { ShippersRegistry } from './shippers_registry';
|
||||
import { OptInConfigService } from './opt_in_config';
|
||||
import { ContextService } from './context_service';
|
||||
import { schemaToIoTs, validateSchema } from '../schema/validation';
|
||||
|
||||
interface EventDebugLogMeta extends LogMeta {
|
||||
ebt_event: Event<unknown>;
|
||||
}
|
||||
|
||||
export class AnalyticsClient implements IAnalyticsClient {
|
||||
private readonly internalTelemetryCounter$ = new Subject<TelemetryCounter>();
|
||||
public readonly telemetryCounter$: Observable<TelemetryCounter> =
|
||||
this.internalTelemetryCounter$.pipe(share()); // Using `share` so we can have multiple subscribers
|
||||
/**
|
||||
* This queue holds all the events until both conditions occur:
|
||||
* 1. We know the user's optIn decision.
|
||||
* 2. We have, at least, one registered shipper.
|
||||
* @private
|
||||
*/
|
||||
private readonly internalEventQueue$ = new Subject<Event>();
|
||||
private readonly shippersRegistry = new ShippersRegistry();
|
||||
/**
|
||||
* Observable used to report when a shipper is registered.
|
||||
* @private
|
||||
*/
|
||||
private readonly shipperRegistered$ = new Subject<void>();
|
||||
private readonly eventTypeRegistry = new Map<
|
||||
EventType,
|
||||
EventTypeOpts<unknown> & { validator?: Mixed }
|
||||
>();
|
||||
private readonly contextService: ContextService;
|
||||
private readonly context$ = new BehaviorSubject<Partial<EventContext>>({});
|
||||
private readonly optInConfig$ = new BehaviorSubject<OptInConfigService | undefined>(undefined);
|
||||
private readonly optInConfigWithReplay$ = this.optInConfig$.pipe(
|
||||
filter((optInConfig): optInConfig is OptInConfigService => typeof optInConfig !== 'undefined'),
|
||||
shareReplay(1)
|
||||
);
|
||||
private readonly contextWithReplay$ = this.context$.pipe(
|
||||
skipWhile(() => !this.optInConfig$.value), // Do not forward the context events until we have an optInConfig value
|
||||
shareReplay(1)
|
||||
);
|
||||
|
||||
constructor(private readonly initContext: AnalyticsClientInitContext) {
|
||||
this.contextService = new ContextService(
|
||||
this.context$,
|
||||
this.initContext.isDev,
|
||||
this.initContext.logger.get('context-service')
|
||||
);
|
||||
this.reportEnqueuedEventsWhenClientIsReady();
|
||||
}
|
||||
|
||||
public reportEvent = <EventTypeData extends object>(
|
||||
eventType: EventType,
|
||||
eventData: EventTypeData
|
||||
) => {
|
||||
// Fetch the timestamp as soon as we receive the event.
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
this.internalTelemetryCounter$.next({
|
||||
type: 'enqueued',
|
||||
source: 'client',
|
||||
event_type: eventType,
|
||||
code: 'enqueued',
|
||||
count: 1,
|
||||
});
|
||||
|
||||
const eventTypeOpts = this.eventTypeRegistry.get(eventType);
|
||||
if (!eventTypeOpts) {
|
||||
this.internalTelemetryCounter$.next({
|
||||
type: 'dropped',
|
||||
source: 'client',
|
||||
event_type: eventType,
|
||||
code: 'UnregisteredType',
|
||||
count: 1,
|
||||
});
|
||||
throw new Error(
|
||||
`Attempted to report event type "${eventType}", before registering it. Use the "registerEventType" API to register it.`
|
||||
);
|
||||
}
|
||||
|
||||
// If the validator is registered (dev-mode only), perform the validation.
|
||||
if (eventTypeOpts.validator) {
|
||||
validateSchema<EventTypeData>(
|
||||
`Event Type '${eventType}'`,
|
||||
eventTypeOpts.validator,
|
||||
eventData
|
||||
);
|
||||
}
|
||||
|
||||
const event: Event = {
|
||||
timestamp,
|
||||
event_type: eventType,
|
||||
context: this.context$.value,
|
||||
properties: eventData as unknown as Record<string, unknown>,
|
||||
};
|
||||
|
||||
this.initContext.logger.debug<EventDebugLogMeta>(`Report event "${eventType}"`, {
|
||||
ebt_event: event,
|
||||
});
|
||||
|
||||
const optInConfig = this.optInConfig$.value;
|
||||
|
||||
if (optInConfig?.isEventTypeOptedIn(eventType) === false) {
|
||||
// If opted out, skip early
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof optInConfig === 'undefined') {
|
||||
// If the opt-in config is not provided yet, we need to enqueue the event to an internal queue
|
||||
this.internalEventQueue$.next(event);
|
||||
} else {
|
||||
this.sendToShipper(eventType, [event]);
|
||||
}
|
||||
};
|
||||
|
||||
public registerEventType = <EventTypeData>(eventTypeOps: EventTypeOpts<EventTypeData>) => {
|
||||
if (this.eventTypeRegistry.get(eventTypeOps.eventType)) {
|
||||
throw new Error(`Event Type "${eventTypeOps.eventType}" is already registered.`);
|
||||
}
|
||||
|
||||
this.eventTypeRegistry.set(eventTypeOps.eventType, {
|
||||
...eventTypeOps,
|
||||
validator: this.initContext.isDev ? schemaToIoTs(eventTypeOps.schema) : undefined,
|
||||
});
|
||||
};
|
||||
|
||||
public optIn = (optInConfig: OptInConfig) => {
|
||||
const optInConfigInstance = new OptInConfigService(optInConfig);
|
||||
this.optInConfig$.next(optInConfigInstance);
|
||||
};
|
||||
|
||||
public registerContextProvider = <Context>(contextProviderOpts: ContextProviderOpts<Context>) => {
|
||||
this.contextService.registerContextProvider(contextProviderOpts);
|
||||
};
|
||||
|
||||
public removeContextProvider = (name: ContextProviderName) => {
|
||||
this.contextService.removeContextProvider(name);
|
||||
};
|
||||
|
||||
public registerShipper = <Shipper extends IShipper, ShipperConfig>(
|
||||
ShipperClass: ShipperClassConstructor<Shipper, ShipperConfig>,
|
||||
shipperConfig: ShipperConfig,
|
||||
{ exclusiveEventTypes = [] }: RegisterShipperOpts = {}
|
||||
) => {
|
||||
const shipperName = ShipperClass.shipperName;
|
||||
const shipper = new ShipperClass(shipperConfig, {
|
||||
...this.initContext,
|
||||
logger: this.initContext.logger.get('shipper', shipperName),
|
||||
});
|
||||
if (exclusiveEventTypes.length) {
|
||||
// This feature is not intended to be supported in the MVP.
|
||||
// I can remove it if we think it causes more bad than good.
|
||||
exclusiveEventTypes.forEach((eventType) => {
|
||||
this.shippersRegistry.addEventExclusiveShipper(eventType, shipperName, shipper);
|
||||
});
|
||||
} else {
|
||||
this.shippersRegistry.addGlobalShipper(shipperName, shipper);
|
||||
}
|
||||
|
||||
// Subscribe to the shipper's telemetryCounter$ and pass it over to the client's-level observable
|
||||
shipper.telemetryCounter$?.subscribe((counter) =>
|
||||
this.internalTelemetryCounter$.next({
|
||||
...counter,
|
||||
source: shipperName, // Enforce the shipper's name in the `source`
|
||||
})
|
||||
);
|
||||
|
||||
// Spread the optIn configuration updates
|
||||
this.optInConfigWithReplay$.subscribe((optInConfig) => {
|
||||
const isOptedIn = optInConfig.isShipperOptedIn(shipperName);
|
||||
try {
|
||||
shipper.optIn(isOptedIn);
|
||||
} catch (err) {
|
||||
this.initContext.logger.warn(
|
||||
`Failed to set isOptedIn:${isOptedIn} in shipper ${shipperName}`,
|
||||
err
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Spread the global context if it has custom extendContext method
|
||||
if (shipper.extendContext) {
|
||||
this.contextWithReplay$.subscribe((context) => {
|
||||
try {
|
||||
shipper.extendContext!(context);
|
||||
} catch (err) {
|
||||
this.initContext.logger.warn(
|
||||
`Shipper "${shipperName}" failed to extend the context`,
|
||||
err
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Notify that a shipper is registered
|
||||
this.shipperRegistered$.next();
|
||||
};
|
||||
|
||||
public flush = async () => {
|
||||
await Promise.all(
|
||||
[...this.shippersRegistry.allShippers.entries()].map(async ([shipperName, shipper]) => {
|
||||
try {
|
||||
await shipper.flush();
|
||||
} catch (err) {
|
||||
this.initContext.logger.warn(`Failed to flush shipper "${shipperName}"`, err);
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
public shutdown = async () => {
|
||||
await this.flush();
|
||||
this.shippersRegistry.allShippers.forEach((shipper, shipperName) => {
|
||||
try {
|
||||
shipper.shutdown();
|
||||
} catch (err) {
|
||||
this.initContext.logger.warn(`Failed to shutdown shipper "${shipperName}"`, err);
|
||||
}
|
||||
});
|
||||
this.internalEventQueue$.complete();
|
||||
this.internalTelemetryCounter$.complete();
|
||||
this.shipperRegistered$.complete();
|
||||
this.optInConfig$.complete();
|
||||
this.context$.complete();
|
||||
};
|
||||
|
||||
/**
|
||||
* Forwards the `events` to the registered shippers, bearing in mind if the shipper is opted-in for that eventType.
|
||||
* @param eventType The event type's name
|
||||
* @param events A bulk array of events matching the eventType.
|
||||
* @private
|
||||
*/
|
||||
private sendToShipper(eventType: EventType, events: Event[]) {
|
||||
let sentToShipper = false;
|
||||
this.shippersRegistry.getShippersForEventType(eventType).forEach((shipper, shipperName) => {
|
||||
const isShipperOptedIn = this.optInConfig$.value?.isShipperOptedIn(shipperName, eventType);
|
||||
|
||||
// Only send it to the non-explicitly opted-out shippers
|
||||
if (isShipperOptedIn) {
|
||||
sentToShipper = true;
|
||||
try {
|
||||
shipper.reportEvents(events);
|
||||
} catch (err) {
|
||||
this.initContext.logger.warn(
|
||||
`Failed to report event "${eventType}" via shipper "${shipperName}"`,
|
||||
err
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (sentToShipper) {
|
||||
this.internalTelemetryCounter$.next({
|
||||
type: 'sent_to_shipper',
|
||||
source: 'client',
|
||||
event_type: eventType,
|
||||
code: 'OK',
|
||||
count: events.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Once the client is ready (it has a valid optInConfig and at least one shipper),
|
||||
* flush any early events and ship them or discard them based on the optInConfig.
|
||||
* @private
|
||||
*/
|
||||
private reportEnqueuedEventsWhenClientIsReady() {
|
||||
// Observer that will emit when both events occur: the OptInConfig is set + a shipper has been registered
|
||||
const configReceivedAndShipperReceivedObserver$ = combineLatest([
|
||||
this.optInConfigWithReplay$,
|
||||
merge([
|
||||
this.shipperRegistered$,
|
||||
// Merging shipperRegistered$ with the optInConfigWithReplay$ when optedIn is false, so that we don't need to wait for the shipper if opted-in === false
|
||||
this.optInConfigWithReplay$.pipe(filter((cfg) => cfg?.isOptedIn() === false)),
|
||||
]),
|
||||
]);
|
||||
|
||||
// Flush the internal queue when we get any optInConfig and, at least, 1 shipper
|
||||
this.internalEventQueue$
|
||||
.pipe(
|
||||
// Take until will close the observer once we reach the condition below
|
||||
takeUntil(configReceivedAndShipperReceivedObserver$),
|
||||
|
||||
// Accumulate the events until we can send them
|
||||
buffer(configReceivedAndShipperReceivedObserver$),
|
||||
|
||||
// Minimal delay only to make this chain async and let the optIn operation to complete first.
|
||||
delay(0),
|
||||
|
||||
// Re-emit the context to make sure all the shippers got it (only if opted-in)
|
||||
tap(() => {
|
||||
if (this.optInConfig$.value?.isOptedIn()) {
|
||||
this.context$.next(this.context$.value);
|
||||
}
|
||||
}),
|
||||
|
||||
// Minimal delay only to make this chain async and let
|
||||
// the context update operation to complete first.
|
||||
delay(0),
|
||||
|
||||
// Flatten the array of events
|
||||
concatMap((events) => from(events)),
|
||||
|
||||
// Discard opted-out events
|
||||
filter((event) => this.optInConfig$.value?.isEventTypeOptedIn(event.event_type) === true),
|
||||
|
||||
// Let's group the requests per eventType for easier batching
|
||||
groupBy((event) => event.event_type),
|
||||
mergeMap((groupedObservable) =>
|
||||
groupedObservable.pipe(
|
||||
bufferCount(1000), // Batching up-to 1000 events per event type for backpressure reasons
|
||||
map((events) => ({ eventType: groupedObservable.key, events }))
|
||||
)
|
||||
)
|
||||
)
|
||||
.subscribe(({ eventType, events }) => {
|
||||
this.sendToShipper(eventType, events);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,353 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { loggerMock, type MockedLogger } from '@kbn/logging-mocks';
|
||||
import { BehaviorSubject, firstValueFrom, lastValueFrom, Subject, take, toArray } from 'rxjs';
|
||||
import type { EventContext } from '../events';
|
||||
import { ContextService } from './context_service';
|
||||
|
||||
describe('ContextService', () => {
|
||||
let globalContext$: Subject<Partial<EventContext>>;
|
||||
let contextService: ContextService;
|
||||
let logger: MockedLogger;
|
||||
|
||||
beforeEach(() => {
|
||||
globalContext$ = new BehaviorSubject<Partial<EventContext>>({});
|
||||
logger = loggerMock.create();
|
||||
contextService = new ContextService(globalContext$, true, logger);
|
||||
});
|
||||
|
||||
test('Registers a context provider', async () => {
|
||||
const context$ = new Subject<{ a_field: boolean }>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(2), toArray()));
|
||||
context$.next({ a_field: true });
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ a_field: true },
|
||||
]);
|
||||
});
|
||||
|
||||
test('It does not break if context emits `undefined`', async () => {
|
||||
contextService = new ContextService(
|
||||
globalContext$,
|
||||
false, // setting to `false` so the validation piece of logic does not kick in.
|
||||
logger
|
||||
);
|
||||
|
||||
const context$ = new Subject<{ a_field: boolean } | undefined | void>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(3), toArray()));
|
||||
context$.next();
|
||||
context$.next(undefined);
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{},
|
||||
{},
|
||||
]);
|
||||
});
|
||||
|
||||
test('It does not break for BehaviourSubjects (emitting as soon as they connect)', async () => {
|
||||
const context$ = new BehaviorSubject<{ a_field: boolean }>({ a_field: true });
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(1), toArray()));
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{ a_field: true }, // No original empty state
|
||||
]);
|
||||
});
|
||||
|
||||
test('Merges all the contexts together', async () => {
|
||||
const contextA$ = new Subject<{ a_field: boolean }>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$: contextA$,
|
||||
});
|
||||
|
||||
const contextB$ = new Subject<{ a_field?: boolean; b_field: number }>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderB',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
b_field: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'b_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$: contextB$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(6), toArray()));
|
||||
contextA$.next({ a_field: true });
|
||||
contextB$.next({ b_field: 1 });
|
||||
contextB$.next({ a_field: false, b_field: 1 });
|
||||
contextA$.next({ a_field: true });
|
||||
contextB$.next({ b_field: 2 });
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ a_field: true },
|
||||
{ a_field: true, b_field: 1 }, // Merged A & B
|
||||
{ a_field: false, b_field: 1 }, // a_field updated from B
|
||||
{ a_field: false, b_field: 1 }, // a_field still from B because it was registered later.
|
||||
// We may want to change this last behaviour in the future but, for now, it's fine.
|
||||
{ a_field: true, b_field: 2 }, // a_field is now taken from A because B is not providing it yet.
|
||||
]);
|
||||
});
|
||||
|
||||
test('The global context is not polluted by context providers removing reported fields', async () => {
|
||||
const context$ = new Subject<{ a_field?: boolean; b_field: number }>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
b_field: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'b_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(6), toArray()));
|
||||
context$.next({ b_field: 1 });
|
||||
context$.next({ a_field: false, b_field: 1 });
|
||||
context$.next({ a_field: true, b_field: 1 });
|
||||
context$.next({ b_field: 1 });
|
||||
context$.next({ a_field: true, b_field: 2 });
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ b_field: 1 },
|
||||
{ a_field: false, b_field: 1 },
|
||||
{ a_field: true, b_field: 1 },
|
||||
{ b_field: 1 }, // a_field is removed because the context provider removed it.
|
||||
{ a_field: true, b_field: 2 },
|
||||
]);
|
||||
});
|
||||
|
||||
test('The undefined values are not forwarded to the global context', async () => {
|
||||
const context$ = new Subject<{ a_field?: boolean; b_field: number }>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
b_field: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'b_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = firstValueFrom(globalContext$.pipe(take(6), toArray()));
|
||||
context$.next({ b_field: 1 });
|
||||
context$.next({ a_field: false, b_field: 1 });
|
||||
context$.next({ a_field: true, b_field: 1 });
|
||||
context$.next({ b_field: 1 });
|
||||
context$.next({ a_field: undefined, b_field: 2 });
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ b_field: 1 },
|
||||
{ a_field: false, b_field: 1 },
|
||||
{ a_field: true, b_field: 1 },
|
||||
{ b_field: 1 }, // a_field is removed because the context provider removed it.
|
||||
{ b_field: 2 }, // a_field is not forwarded because it is `undefined`
|
||||
]);
|
||||
});
|
||||
|
||||
test('Fails to register 2 context providers with the same name', () => {
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$: new Subject<{ a_field: boolean }>(),
|
||||
});
|
||||
expect(() => {
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$: new Subject<{ a_field: boolean }>(),
|
||||
});
|
||||
}).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Context provider with name 'contextProviderA' already registered"`
|
||||
);
|
||||
});
|
||||
|
||||
test('Does not remove the context provider after it completes', async () => {
|
||||
const context$ = new Subject<{ a_field: boolean }>();
|
||||
|
||||
// eslint-disable-next-line dot-notation
|
||||
const contextProvidersRegistry = contextService['contextProvidersRegistry'];
|
||||
|
||||
// The context registry is empty
|
||||
expect(contextProvidersRegistry.size).toBe(0);
|
||||
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(4), toArray()));
|
||||
context$.next({ a_field: true });
|
||||
// The size of the registry grows on the first emission
|
||||
expect(contextProvidersRegistry.size).toBe(1);
|
||||
|
||||
context$.next({ a_field: true });
|
||||
// Still in the registry
|
||||
expect(contextProvidersRegistry.size).toBe(1);
|
||||
context$.complete();
|
||||
// Still in the registry
|
||||
expect(contextProvidersRegistry.size).toBe(1);
|
||||
contextService.removeContextProvider('contextProviderA');
|
||||
// The context provider is removed from the registry
|
||||
expect(contextProvidersRegistry.size).toBe(0);
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ a_field: true },
|
||||
{ a_field: true },
|
||||
{},
|
||||
]);
|
||||
});
|
||||
|
||||
test('validates the input and logs the error if invalid', () => {
|
||||
const context$ = new Subject<{ a_field: boolean } | undefined>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
context$.next(undefined);
|
||||
expect(logger.error).toHaveBeenCalledTimes(1);
|
||||
expect((logger.error.mock.calls[0][0] as Error).message).toContain(
|
||||
`Failed to validate payload coming from "Context Provider 'contextProviderA'"`
|
||||
);
|
||||
});
|
||||
|
||||
test('it does not stop the subscription after an error', async () => {
|
||||
const context$ = new Subject<{ a_field: boolean } | undefined>();
|
||||
contextService.registerContextProvider({
|
||||
name: 'contextProviderA',
|
||||
schema: {
|
||||
a_field: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: 'a_field description',
|
||||
},
|
||||
},
|
||||
},
|
||||
context$,
|
||||
});
|
||||
|
||||
const globalContextPromise = lastValueFrom(globalContext$.pipe(take(2), toArray()));
|
||||
context$.next({ a_field: '123' as unknown as boolean }); // cause the error
|
||||
expect(logger.error).toHaveBeenCalledTimes(1);
|
||||
expect((logger.error.mock.calls[0][0] as Error).message).toContain(
|
||||
`Failed to validate payload coming from "Context Provider 'contextProviderA'"`
|
||||
);
|
||||
context$.next({ a_field: true }); // send a good one
|
||||
await expect(globalContextPromise).resolves.toEqual([
|
||||
{}, // Original empty state
|
||||
{ a_field: true }, // 2nd emission (the errored one does not spread)
|
||||
]);
|
||||
});
|
||||
});
|
|
@ -1,113 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Subject, Subscription } from 'rxjs';
|
||||
import { filter } from 'rxjs';
|
||||
import type { Logger } from '@kbn/logging';
|
||||
import type { EventContext } from '../events';
|
||||
import type { ContextProviderName, ContextProviderOpts } from './types';
|
||||
import { schemaToIoTs, validateSchema } from '../schema/validation';
|
||||
|
||||
export class ContextService {
|
||||
private readonly contextProvidersRegistry = new Map<ContextProviderName, unknown>();
|
||||
private readonly contextProvidersSubscriptions = new Map<ContextProviderName, Subscription>();
|
||||
|
||||
constructor(
|
||||
private readonly context$: Subject<Partial<EventContext>>,
|
||||
private readonly isDevMode: boolean,
|
||||
private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Registers a context provider, and subscribes to any updates from it.
|
||||
* @param contextProviderOpts The options to register the context provider {@link ContextProviderOpts}
|
||||
*/
|
||||
public registerContextProvider<Context>({
|
||||
name,
|
||||
context$,
|
||||
schema,
|
||||
}: ContextProviderOpts<Context>) {
|
||||
if (this.contextProvidersSubscriptions.has(name)) {
|
||||
throw new Error(`Context provider with name '${name}' already registered`);
|
||||
}
|
||||
|
||||
// Declare the validator only in dev-mode
|
||||
const validator = this.isDevMode ? schemaToIoTs(schema) : undefined;
|
||||
|
||||
const subscription = context$
|
||||
.pipe(
|
||||
filter((context) => {
|
||||
if (validator) {
|
||||
try {
|
||||
validateSchema(
|
||||
`Context Provider '${name}'`,
|
||||
validator,
|
||||
context as Record<string, unknown>
|
||||
);
|
||||
} catch (validationError) {
|
||||
this.logger.error(validationError);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
)
|
||||
.subscribe((context) => {
|
||||
// We store each context linked to the context provider, so they can increase and reduce
|
||||
// the number of fields they report without having left-overs in the global context.
|
||||
this.contextProvidersRegistry.set(name, context);
|
||||
|
||||
// For every context change, we rebuild the global context.
|
||||
// It's better to do it here than to rebuild it for every reportEvent.
|
||||
this.updateGlobalContext();
|
||||
});
|
||||
|
||||
this.contextProvidersSubscriptions.set(name, subscription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the context provider from the registry, unsubscribes from it, and rebuilds the global context.
|
||||
* @param name The name of the context provider to remove.
|
||||
*/
|
||||
public removeContextProvider(name: ContextProviderName) {
|
||||
this.contextProvidersSubscriptions.get(name)?.unsubscribe();
|
||||
this.contextProvidersRegistry.delete(name);
|
||||
this.updateGlobalContext();
|
||||
}
|
||||
|
||||
/**
|
||||
* Loops through all the context providers and sets the global context
|
||||
* @private
|
||||
*/
|
||||
private updateGlobalContext() {
|
||||
this.context$.next(
|
||||
[...this.contextProvidersRegistry.values()].reduce((acc: Partial<EventContext>, context) => {
|
||||
return {
|
||||
...acc,
|
||||
...this.removeEmptyValues(context),
|
||||
};
|
||||
}, {} as Partial<EventContext>)
|
||||
);
|
||||
}
|
||||
|
||||
private removeEmptyValues(context: unknown) {
|
||||
if (!isObject(context)) {
|
||||
return {};
|
||||
}
|
||||
return Object.keys(context).reduce((acc, key) => {
|
||||
if (context[key] !== undefined) {
|
||||
acc[key] = context[key];
|
||||
}
|
||||
return acc;
|
||||
}, {} as Partial<EventContext>);
|
||||
}
|
||||
}
|
||||
|
||||
function isObject(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === 'object' && value !== null;
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type {
|
||||
IAnalyticsClient,
|
||||
// Types for the constructor
|
||||
AnalyticsClientInitContext,
|
||||
// Types for the registerShipper API
|
||||
ShipperClassConstructor,
|
||||
RegisterShipperOpts,
|
||||
// Types for the optIn API
|
||||
OptInConfig,
|
||||
OptInConfigPerType,
|
||||
ShipperName,
|
||||
// Types for the registerContextProvider API
|
||||
ContextProviderOpts,
|
||||
ContextProviderName,
|
||||
// Types for the registerEventType API
|
||||
EventTypeOpts,
|
||||
} from './types';
|
||||
|
||||
export { AnalyticsClient } from './analytics_client';
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Subject } from 'rxjs';
|
||||
import type { IAnalyticsClient } from './types';
|
||||
|
||||
function createMockedAnalyticsClient(): jest.Mocked<IAnalyticsClient> {
|
||||
return {
|
||||
optIn: jest.fn(),
|
||||
reportEvent: jest.fn(),
|
||||
registerEventType: jest.fn(),
|
||||
registerContextProvider: jest.fn(),
|
||||
removeContextProvider: jest.fn(),
|
||||
registerShipper: jest.fn(),
|
||||
telemetryCounter$: new Subject(),
|
||||
flush: jest.fn(),
|
||||
shutdown: jest.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
export const analyticsClientMock = {
|
||||
create: createMockedAnalyticsClient,
|
||||
};
|
|
@ -1,329 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { OptInConfigService } from './opt_in_config';
|
||||
|
||||
describe('OptInConfigService', () => {
|
||||
describe('isOptedIn', () => {
|
||||
test('Returns `true` when `global.enabled: true`', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: true } });
|
||||
expect(config.isOptedIn()).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false`', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: false } });
|
||||
expect(config.isOptedIn()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isEventTypeOptedIn', () => {
|
||||
test('Returns `true` when `global.enabled: true` and no eventType specific config is provided', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: true } });
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and no eventType specific config is provided', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: false } });
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true` and event_type config exists but not for the requested eventType', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: true },
|
||||
event_types: {
|
||||
'test-event-2': { enabled: true },
|
||||
'test-event-3': { enabled: false },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and event_type config exists but not for the requested eventType', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: false },
|
||||
event_types: {
|
||||
'test-event-2': { enabled: true },
|
||||
'test-event-3': { enabled: false },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true` and event_type config exists and it is `true`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: true },
|
||||
event_types: {
|
||||
'test-event-1': { enabled: true },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and event_type config exists and it is `true`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: false },
|
||||
event_types: {
|
||||
'test-event-1': { enabled: true },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: true` and event_type config exists and it is `false`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: true },
|
||||
event_types: {
|
||||
'test-event-1': { enabled: false },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and event_type config exists and it is `false`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: { enabled: false },
|
||||
event_types: {
|
||||
'test-event-1': { enabled: false },
|
||||
},
|
||||
});
|
||||
expect(config.isEventTypeOptedIn('test-event-1')).toBe(false);
|
||||
});
|
||||
});
|
||||
describe('isShipperOptedIn', () => {
|
||||
test('Returns `true` when `global.enabled: true` and no shipper specific config is provided', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: true } });
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(true);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and no shipper specific config is provided', () => {
|
||||
const config = new OptInConfigService({ global: { enabled: false } });
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(false);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true` and shipper config exists but not for the requested eventType', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-2': true,
|
||||
'test-shipper-3': false,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(true);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and shipper config exists but not for the requested eventType', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: false,
|
||||
shippers: {
|
||||
'test-shipper-2': true,
|
||||
'test-shipper-3': false,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(false);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true` and shipper config exists and it is `true`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(true);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and shipper config exists and it is `true`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: false,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(false);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: true` and shipper config exists and it is `false`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': false,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(false);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false` and shipper config exists and it is `false`', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: false,
|
||||
shippers: {
|
||||
'test-shipper-1': false,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1')).toBe(false);
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
describe('with event_type config', () => {
|
||||
test('Returns `true` when `global.enabled: true`, `shipper: true` and `event: true` (no `event.shippers`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true`, `shipper: true`, `event: true` (`event.shippers` exists but for others)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-2': false,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `true` when `global.enabled: true`, `shipper: true`, `event: true` (`event.shipper: true`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(true);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: false`, `shipper: true`, `event: true` (`event.shipper: true`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: false,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: true`, `shipper: false`, `event: true` (`event.shipper: true`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': false,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: true`, `shipper: true`, `event: false` (`event.shipper: true`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: false,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
|
||||
test('Returns `false` when `global.enabled: true`, `shipper: true`, `event: true` (`event.shipper: false`)', () => {
|
||||
const config = new OptInConfigService({
|
||||
global: {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': true,
|
||||
},
|
||||
},
|
||||
event_types: {
|
||||
'test-event-1': {
|
||||
enabled: true,
|
||||
shippers: {
|
||||
'test-shipper-1': false,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(config.isShipperOptedIn('test-shipper-1', 'test-event-1')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,71 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { OptInConfig, ShipperName } from './types';
|
||||
import type { EventType } from '../events';
|
||||
|
||||
export class OptInConfigService {
|
||||
constructor(private readonly optInConfig: OptInConfig) {}
|
||||
|
||||
/**
|
||||
* Is globally opted in?
|
||||
*/
|
||||
public isOptedIn(): boolean {
|
||||
return this.optInConfig.global.enabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the given event type opted in?
|
||||
* @param eventType the event type to check
|
||||
*/
|
||||
public isEventTypeOptedIn(eventType: EventType): boolean {
|
||||
if (!this.isOptedIn()) {
|
||||
return false;
|
||||
}
|
||||
// In case of not provided a specific eventType consent, we assume opted-in
|
||||
const isEventTypeOptedIn =
|
||||
(this.optInConfig.event_types && this.optInConfig.event_types[eventType]?.enabled) ?? true;
|
||||
|
||||
return isEventTypeOptedIn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the given shipper opted in?
|
||||
* @param shipperName the shipper to check
|
||||
* @param eventType the event type to check for the shipper
|
||||
*/
|
||||
public isShipperOptedIn(shipperName: ShipperName, eventType?: EventType): boolean {
|
||||
if (!this.isOptedIn()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// In case of not provided a specific shipper consent, we assume opted-in
|
||||
const isShipperGloballyOptedIn: boolean =
|
||||
(this.optInConfig.global.shippers && this.optInConfig.global.shippers[shipperName]) ?? true;
|
||||
|
||||
if (!isShipperGloballyOptedIn) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (eventType) {
|
||||
if (!this.isEventTypeOptedIn(eventType)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const eventTypeOptInConfig =
|
||||
this.optInConfig.event_types && this.optInConfig.event_types[eventType];
|
||||
// In case of not provided a specific eventType-level shipper consent, we assume opted-in
|
||||
const isEventTypeShipperOptedIn: boolean =
|
||||
(eventTypeOptInConfig?.shippers && eventTypeOptInConfig.shippers[shipperName]) ?? true;
|
||||
|
||||
return isEventTypeShipperOptedIn;
|
||||
} else {
|
||||
return isShipperGloballyOptedIn;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ShippersRegistry } from './shippers_registry';
|
||||
import { shippersMock } from '../shippers/mocks';
|
||||
|
||||
describe('ShippersRegistry', () => {
|
||||
let shippersRegistry: ShippersRegistry;
|
||||
|
||||
beforeEach(() => {
|
||||
shippersRegistry = new ShippersRegistry();
|
||||
});
|
||||
|
||||
describe('Global Shippers', () => {
|
||||
test('adds a shipper without an error', () => {
|
||||
const shipper = shippersMock.createShipper();
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addGlobalShipper('testShipper', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
});
|
||||
|
||||
test('fails to add the same shipper name twice (even when the shipper implementation is different)', () => {
|
||||
const shipper1 = shippersMock.createShipper();
|
||||
const shipper2 = shippersMock.createShipper();
|
||||
shippersRegistry.addGlobalShipper('testShipper', shipper1);
|
||||
expect(() =>
|
||||
shippersRegistry.addGlobalShipper('testShipper', shipper2)
|
||||
).toThrowErrorMatchingInlineSnapshot(`"Shipper \\"testShipper\\" is already registered"`);
|
||||
});
|
||||
|
||||
test('adds multiple shippers with different names (even when the shipper implementation is the same)', () => {
|
||||
const shipper = shippersMock.createShipper(); // Explicitly testing with the same shipper implementation
|
||||
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addGlobalShipper('testShipper1', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
shippersRegistry.addGlobalShipper('testShipper2', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(2);
|
||||
});
|
||||
|
||||
test('returns a global shipper if there is no event-type specific shipper', () => {
|
||||
const shipper = shippersMock.createShipper();
|
||||
const shipperName = 'testShipper';
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addGlobalShipper(shipperName, shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
|
||||
const shippersForEventType = shippersRegistry.getShippersForEventType(
|
||||
`RandomEvent${Date.now()}`
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shippersForEventType).toBe(shippersRegistry['globalShippers']);
|
||||
expect(shippersForEventType.size).toBe(1);
|
||||
expect(shippersForEventType.get(shipperName)).toBe(shipper);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Event-Exclusive Shippers', () => {
|
||||
test('adds a shipper without an error', () => {
|
||||
const shipper = shippersMock.createShipper();
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent', 'testShipper', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
});
|
||||
|
||||
test('fails to add the same shipper name twice (even when the shipper implementation is different)', () => {
|
||||
const shipper1 = shippersMock.createShipper();
|
||||
const shipper2 = shippersMock.createShipper();
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent', 'testShipper', shipper1);
|
||||
expect(() =>
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent', 'testShipper', shipper2)
|
||||
).toThrowErrorMatchingInlineSnapshot(
|
||||
`"testShipper is already registered for event-type testEvent"`
|
||||
);
|
||||
});
|
||||
|
||||
test('adds multiple shippers with different names (even when the shipper implementation is the same)', () => {
|
||||
const shipper = shippersMock.createShipper(); // Explicitly testing with the same shipper implementation
|
||||
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent', 'testShipper1', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent', 'testShipper2', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(2);
|
||||
});
|
||||
|
||||
test('adds the same shipper to different event types. The allShippers count does not increase', () => {
|
||||
const shipper = shippersMock.createShipper(); // Explicitly testing with the same shipper implementation
|
||||
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent1', 'testShipper', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
shippersRegistry.addEventExclusiveShipper('testEvent2', 'testShipper', shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1); // This is still 1 because the shipper is the same
|
||||
});
|
||||
|
||||
test('returns an event-specific shipper', () => {
|
||||
const shipper = shippersMock.createShipper();
|
||||
const shipperName = 'testShipper';
|
||||
const eventTypeName = 'testEvent';
|
||||
expect(shippersRegistry.allShippers.size).toBe(0);
|
||||
shippersRegistry.addEventExclusiveShipper(eventTypeName, shipperName, shipper);
|
||||
expect(shippersRegistry.allShippers.size).toBe(1);
|
||||
|
||||
const shippersForEventType = shippersRegistry.getShippersForEventType(eventTypeName);
|
||||
expect(shippersForEventType.size).toBe(1);
|
||||
expect(shippersForEventType.get(shipperName)).toBe(shipper);
|
||||
|
||||
// No event-specific shipper found, returns global but no shippers found in global
|
||||
const shippersForEventTypeNotFound = shippersRegistry.getShippersForEventType(
|
||||
`RandomEvent${Date.now()}`
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shippersForEventTypeNotFound).toBe(shippersRegistry['globalShippers']);
|
||||
expect(shippersForEventTypeNotFound.size).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { EventType } from '../events';
|
||||
import type { IShipper } from '../shippers';
|
||||
import type { ShipperName } from './types';
|
||||
|
||||
/**
|
||||
* Holds the map of the { [shipperName]: shipperInstance }
|
||||
*/
|
||||
export type ShippersMap = Map<ShipperName, IShipper>;
|
||||
|
||||
export class ShippersRegistry {
|
||||
/**
|
||||
* Holds all the shippers: global and eventTypeExclusive.
|
||||
* This helps to avoid looping over all the shippers when we just need them all.
|
||||
*/
|
||||
public readonly allShippers: ShippersMap = new Map();
|
||||
/**
|
||||
* Holds the shippers that are not registered as exclusive to any event-type
|
||||
*/
|
||||
private readonly globalShippers: ShippersMap = new Map();
|
||||
/**
|
||||
* Holds the shippers that are exclusive to an event-type in the format of { [eventType]: ShippersMap }
|
||||
*/
|
||||
private readonly eventTypeExclusiveShippers: Map<EventType, ShippersMap> = new Map();
|
||||
|
||||
/**
|
||||
* Adds shipper to the registry.
|
||||
* @param shipperName The unique name of the shipper.
|
||||
* @param shipper The initialized shipper.
|
||||
*/
|
||||
public addGlobalShipper(shipperName: ShipperName, shipper: IShipper) {
|
||||
if (this.globalShippers.get(shipperName)) {
|
||||
throw new Error(`Shipper "${shipperName}" is already registered`);
|
||||
}
|
||||
this.globalShippers.set(shipperName, shipper);
|
||||
this.allShippers.set(shipperName, shipper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an event-type exclusive shipper.
|
||||
* @param eventType The name of the event type
|
||||
* @param shipperName The unique name for the shipper.
|
||||
* @param shipper The initialized shipper.
|
||||
*/
|
||||
public addEventExclusiveShipper(
|
||||
eventType: EventType,
|
||||
shipperName: ShipperName,
|
||||
shipper: IShipper
|
||||
) {
|
||||
const eventExclusiveMap = this.eventTypeExclusiveShippers.get(eventType) || new Map();
|
||||
if (eventExclusiveMap.get(shipperName)) {
|
||||
throw new Error(`${shipperName} is already registered for event-type ${eventType}`);
|
||||
}
|
||||
eventExclusiveMap.set(shipperName, shipper);
|
||||
this.eventTypeExclusiveShippers.set(eventType, eventExclusiveMap);
|
||||
this.allShippers.set(shipperName, shipper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the shippers that must be used for the specified event type.
|
||||
* @param eventType The name of the event type.
|
||||
*/
|
||||
public getShippersForEventType(eventType: EventType): ShippersMap {
|
||||
return this.eventTypeExclusiveShippers.get(eventType) || this.globalShippers;
|
||||
}
|
||||
}
|
|
@ -1,226 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Observable } from 'rxjs';
|
||||
|
||||
// If we are going to export this to a separate NPM module in the future,
|
||||
// we'll need to revisit this import.
|
||||
import type { Logger } from '@kbn/logging';
|
||||
|
||||
import type { IShipper } from '../shippers';
|
||||
import type { EventType, TelemetryCounter } from '../events';
|
||||
import type { RootSchema } from '../schema';
|
||||
|
||||
/**
|
||||
* General settings of the analytics client
|
||||
*/
|
||||
export interface AnalyticsClientInitContext {
|
||||
/**
|
||||
* Boolean indicating if it's running in developer mode.
|
||||
*/
|
||||
isDev: boolean;
|
||||
/**
|
||||
* Specify if the shippers should send their data to the production or staging environments.
|
||||
*/
|
||||
sendTo: 'production' | 'staging';
|
||||
/**
|
||||
* Application-provided logger.
|
||||
*/
|
||||
logger: Logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shipper Name used for indexed structures. Only used to improve the readability of the types
|
||||
*/
|
||||
export type ShipperName = string;
|
||||
|
||||
/**
|
||||
* Constructor of a {@link IShipper}
|
||||
*/
|
||||
export interface ShipperClassConstructor<Shipper extends IShipper, Config> {
|
||||
/**
|
||||
* The shipper's unique name
|
||||
*/
|
||||
shipperName: ShipperName;
|
||||
|
||||
/**
|
||||
* The constructor
|
||||
* @param config The shipper's custom config
|
||||
* @param initContext Common context {@link AnalyticsClientInitContext}
|
||||
*/
|
||||
new (config: Config, initContext: AnalyticsClientInitContext): Shipper;
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional options to register a shipper
|
||||
*/
|
||||
export interface RegisterShipperOpts {
|
||||
/**
|
||||
* List of event types that will be received only by this shipper.
|
||||
* @deprecated
|
||||
* @internal Set as internal and deprecated until we come up with the best design for this.
|
||||
* Not in the scope of the initial MVP.
|
||||
*/
|
||||
exclusiveEventTypes?: EventType[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets whether a type of event is enabled/disabled globally or per shipper.
|
||||
*/
|
||||
export interface OptInConfigPerType {
|
||||
/**
|
||||
* The event type is globally enabled.
|
||||
*/
|
||||
enabled: boolean;
|
||||
/**
|
||||
* Controls if an event type should be disabled for a specific type of shipper.
|
||||
* @example If the event type is automatically tracked by ShipperA, the config would look like:
|
||||
* ```
|
||||
* {
|
||||
* enabled: true,
|
||||
* shippers: {
|
||||
* ShipperA: false
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
shippers?: Record<ShipperName, boolean | undefined>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for the optIn API
|
||||
*/
|
||||
export interface OptInConfig {
|
||||
/**
|
||||
* Controls the global enabled/disabled behaviour of the client and shippers.
|
||||
*/
|
||||
global: OptInConfigPerType;
|
||||
/**
|
||||
* Controls if an event type should be disabled for a specific type of shipper.
|
||||
* @example If "clicks" are automatically tracked by ShipperA, the config would look like:
|
||||
* ```
|
||||
* {
|
||||
* global: { enabled: true },
|
||||
* event_types: {
|
||||
* click: {
|
||||
* enabled: true,
|
||||
* shippers: {
|
||||
* ShipperA: false
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
event_types?: Record<EventType, OptInConfigPerType | undefined>;
|
||||
}
|
||||
|
||||
/**
|
||||
* ContextProviderName used for indexed structures. Only used to improve the readability of the types
|
||||
*/
|
||||
export type ContextProviderName = string;
|
||||
|
||||
/**
|
||||
* Definition of a context provider
|
||||
*/
|
||||
export interface ContextProviderOpts<Context> {
|
||||
/**
|
||||
* The name of the provider.
|
||||
*/
|
||||
name: ContextProviderName;
|
||||
/**
|
||||
* Observable that emits the custom context.
|
||||
*/
|
||||
context$: Observable<Context>;
|
||||
/**
|
||||
* Schema declaring and documenting the expected output in the context$
|
||||
*
|
||||
* @remark During development, it may be used to validate the provided values.
|
||||
*/
|
||||
schema: RootSchema<Context>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Definition of an Event Type.
|
||||
*/
|
||||
export interface EventTypeOpts<EventTypeData> {
|
||||
/**
|
||||
* The event type's unique name.
|
||||
*/
|
||||
eventType: EventType;
|
||||
/**
|
||||
* Schema declaring and documenting the expected structure of this event type.
|
||||
*
|
||||
* @remark During development, it may be used to validate the provided values.
|
||||
*/
|
||||
schema: RootSchema<EventTypeData>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analytics client's public APIs
|
||||
*/
|
||||
export interface IAnalyticsClient {
|
||||
/**
|
||||
* Reports a telemetry event.
|
||||
* @param eventType The event type registered via the `registerEventType` API.
|
||||
* @param eventData The properties matching the schema declared in the `registerEventType` API.
|
||||
*
|
||||
* @track-adoption
|
||||
*/
|
||||
reportEvent: <EventTypeData extends object>(
|
||||
eventType: EventType,
|
||||
eventData: EventTypeData
|
||||
) => void;
|
||||
/**
|
||||
* Registers the event type that will be emitted via the reportEvent API.
|
||||
* @param eventTypeOps The definition of the event type {@link EventTypeOpts}.
|
||||
*/
|
||||
registerEventType: <EventTypeData>(eventTypeOps: EventTypeOpts<EventTypeData>) => void;
|
||||
|
||||
/**
|
||||
* Set up the shipper that will be used to report the telemetry events.
|
||||
* @param Shipper The {@link IShipper} class to instantiate the shipper.
|
||||
* @param shipperConfig The config specific to the Shipper to instantiate.
|
||||
* @param opts Additional options to register the shipper {@link RegisterShipperOpts}.
|
||||
*/
|
||||
registerShipper: <Shipper extends IShipper, ShipperConfig>(
|
||||
Shipper: ShipperClassConstructor<Shipper, ShipperConfig>,
|
||||
shipperConfig: ShipperConfig,
|
||||
opts?: RegisterShipperOpts
|
||||
) => void;
|
||||
/**
|
||||
* Used to control the user's consent to report the data.
|
||||
* In the advanced mode, it allows to "cherry-pick" which events and shippers are enabled/disabled.
|
||||
* @param optInConfig {@link OptInConfig}
|
||||
*/
|
||||
optIn: (optInConfig: OptInConfig) => void;
|
||||
/**
|
||||
* Registers the context provider to enrich any reported events.
|
||||
* @param contextProviderOpts {@link ContextProviderOpts}
|
||||
*
|
||||
* @track-adoption
|
||||
*/
|
||||
registerContextProvider: <Context>(contextProviderOpts: ContextProviderOpts<Context>) => void;
|
||||
/**
|
||||
* Removes the context provider and stop enriching the events from its context.
|
||||
* @param contextProviderName The name of the context provider to remove.
|
||||
*/
|
||||
removeContextProvider: (contextProviderName: ContextProviderName) => void;
|
||||
/**
|
||||
* Observable to emit the stats of the processed events.
|
||||
*/
|
||||
readonly telemetryCounter$: Observable<TelemetryCounter>;
|
||||
/**
|
||||
* Forces all shippers to send all their enqueued events and fulfills the returned promise.
|
||||
*/
|
||||
flush: () => Promise<void>;
|
||||
/**
|
||||
* Stops the client. Flushing any pending events in the process.
|
||||
*/
|
||||
shutdown: () => Promise<void>;
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type {
|
||||
Event,
|
||||
EventType,
|
||||
EventContext,
|
||||
TelemetryCounter,
|
||||
TelemetryCounterType,
|
||||
} from './types';
|
|
@ -1,128 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { ShipperName } from '../analytics_client';
|
||||
|
||||
/**
|
||||
* Definition of the context that can be appended to the events through the {@link IAnalyticsClient.registerContextProvider}.
|
||||
*/
|
||||
export interface EventContext {
|
||||
/**
|
||||
* The UUID of the cluster
|
||||
*/
|
||||
cluster_uuid?: string;
|
||||
/**
|
||||
* The name of the cluster.
|
||||
*/
|
||||
cluster_name?: string;
|
||||
/**
|
||||
* The license ID.
|
||||
*/
|
||||
license_id?: string;
|
||||
/**
|
||||
* The unique user ID.
|
||||
*/
|
||||
userId?: string;
|
||||
/**
|
||||
* The Cloud ID.
|
||||
*/
|
||||
cloudId?: string;
|
||||
/**
|
||||
* `true` if the user is logged in via the Elastic Cloud authentication provider.
|
||||
*/
|
||||
isElasticCloudUser?: boolean;
|
||||
/**
|
||||
* The product's version.
|
||||
*/
|
||||
version?: string;
|
||||
/**
|
||||
* The name of the current page.
|
||||
*/
|
||||
pageName?: string;
|
||||
/**
|
||||
* The current application ID.
|
||||
*/
|
||||
applicationId?: string;
|
||||
/**
|
||||
* The current entity ID (dashboard ID, visualization ID, etc.).
|
||||
*/
|
||||
entityId?: string;
|
||||
|
||||
/**
|
||||
* Additional keys are allowed.
|
||||
*/
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event Type used for indexed structures. Only used to improve the readability of the types
|
||||
*/
|
||||
export type EventType = string;
|
||||
|
||||
/**
|
||||
* Indicates if the event contains data about succeeded, failed or dropped events:
|
||||
* - enqueued: The event was accepted and will be sent to the shippers when they become available (and opt-in === true).
|
||||
* - sent_to_shipper: The event was sent to at least one shipper.
|
||||
* - succeeded: The event was successfully sent by the shipper.
|
||||
* - failed: There was an error when processing/shipping the event. Refer to the Telemetry Counter's code for the reason.
|
||||
* - dropped: The event was dropped from the queue. Refer to the Telemetry Counter's code for the reason.
|
||||
*/
|
||||
export type TelemetryCounterType =
|
||||
| 'enqueued'
|
||||
| 'sent_to_shipper'
|
||||
| 'succeeded'
|
||||
| 'failed'
|
||||
| 'dropped';
|
||||
|
||||
/**
|
||||
* Shape of the events emitted by the telemetryCounter$ observable
|
||||
*/
|
||||
export interface TelemetryCounter {
|
||||
/**
|
||||
* {@link TelemetryCounterType}
|
||||
*/
|
||||
type: TelemetryCounterType;
|
||||
/**
|
||||
* Who emitted the event? It can be "client" or the name of the shipper.
|
||||
*/
|
||||
source: 'client' | ShipperName;
|
||||
/**
|
||||
* The event type the success/failure/drop event refers to.
|
||||
*/
|
||||
event_type: EventType;
|
||||
/**
|
||||
* Code to provide additional information about the success or failure. Examples are 200/400/504/ValidationError/UnknownError
|
||||
*/
|
||||
code: string;
|
||||
/**
|
||||
* The number of events that this counter refers to.
|
||||
*/
|
||||
count: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Definition of the full event structure
|
||||
*/
|
||||
export interface Event<Properties = Record<string, unknown>> {
|
||||
/**
|
||||
* The time the event was generated in ISO format.
|
||||
*/
|
||||
timestamp: string;
|
||||
/**
|
||||
* The event type.
|
||||
*/
|
||||
event_type: EventType;
|
||||
/**
|
||||
* The specific properties of the event type.
|
||||
*/
|
||||
properties: Properties;
|
||||
/**
|
||||
* The {@link EventContext} enriched during the processing pipeline.
|
||||
*/
|
||||
context: EventContext;
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { shippersMock } from './shippers/mocks';
|
||||
export { analyticsClientMock } from './analytics_client/mocks';
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type {
|
||||
RootSchema,
|
||||
SchemaObject,
|
||||
SchemaArray,
|
||||
SchemaChildValue,
|
||||
SchemaMeta,
|
||||
SchemaValue,
|
||||
SchemaMetaOptional,
|
||||
PossibleSchemaTypes,
|
||||
AllowedSchemaBooleanTypes,
|
||||
AllowedSchemaNumberTypes,
|
||||
AllowedSchemaStringTypes,
|
||||
AllowedSchemaTypes,
|
||||
} from './types';
|
|
@ -1,650 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { PossibleSchemaTypes, RootSchema, SchemaValue } from './types';
|
||||
|
||||
describe('schema types', () => {
|
||||
describe('PossibleSchemaTypes', () => {
|
||||
test('it should only allow "string" types', () => {
|
||||
let valueType: PossibleSchemaTypes<string> = 'keyword';
|
||||
valueType = 'text';
|
||||
valueType = 'date';
|
||||
|
||||
// @ts-expect-error
|
||||
valueType = 'boolean';
|
||||
// @ts-expect-error
|
||||
valueType = 'long';
|
||||
// @ts-expect-error
|
||||
valueType = 'integer';
|
||||
// @ts-expect-error
|
||||
valueType = 'short';
|
||||
// @ts-expect-error
|
||||
valueType = 'byte';
|
||||
// @ts-expect-error
|
||||
valueType = 'double';
|
||||
// @ts-expect-error
|
||||
valueType = 'float';
|
||||
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
test('it should only allow "number" types', () => {
|
||||
let valueType: PossibleSchemaTypes<number> = 'long';
|
||||
valueType = 'integer';
|
||||
valueType = 'short';
|
||||
valueType = 'byte';
|
||||
valueType = 'double';
|
||||
valueType = 'float';
|
||||
valueType = 'date';
|
||||
|
||||
// @ts-expect-error
|
||||
valueType = 'boolean';
|
||||
// @ts-expect-error
|
||||
valueType = 'keyword';
|
||||
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
test('it should only allow "boolean" types', () => {
|
||||
let valueType: PossibleSchemaTypes<boolean> = 'boolean';
|
||||
// @ts-expect-error
|
||||
valueType = 'integer';
|
||||
// @ts-expect-error
|
||||
valueType = 'short';
|
||||
// @ts-expect-error
|
||||
valueType = 'byte';
|
||||
// @ts-expect-error
|
||||
valueType = 'double';
|
||||
// @ts-expect-error
|
||||
valueType = 'float';
|
||||
// @ts-expect-error
|
||||
valueType = 'date';
|
||||
|
||||
// @ts-expect-error
|
||||
valueType = 'keyword';
|
||||
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
|
||||
describe('SchemaValue', () => {
|
||||
describe('Pass Through', () => {
|
||||
test('it should allow "pass_through" and enforce the _meta.description', () => {
|
||||
let valueType: SchemaValue<string> = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error optional can't be true when the types don't set the value as optional
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the _meta.description
|
||||
valueType = { type: 'pass_through' };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
test('it should enforce `_meta.optional: true`', () => {
|
||||
let valueType: SchemaValue<string | undefined> = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error because optional can't be false when the value can be undefined
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
|
||||
describe('Plain value', () => {
|
||||
test('it should allow the correct type and enforce the _meta.description', () => {
|
||||
let valueType: SchemaValue<string> = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
// @ts-expect-error because the type does not match
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error optional can't be true when the types don't set the value as optional
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the _meta.description
|
||||
valueType = { type: 'keyword' };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
test('it should enforce `_meta.optional: true`', () => {
|
||||
let valueType: SchemaValue<string | undefined> = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error because optional can't be false when the value can be undefined
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
|
||||
describe('Date value', () => {
|
||||
test('it should allow the correct type and enforce the _meta.description', () => {
|
||||
let valueType: SchemaValue<Date> = {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
// @ts-expect-error because the type does not match
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error optional can't be true when the types don't set the value as optional
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the _meta.description
|
||||
valueType = { type: 'date' };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
test('it should enforce `_meta.optional: true`', () => {
|
||||
let valueType: SchemaValue<Date | undefined> = {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
// @ts-expect-error because optional can't be false when the value can be undefined
|
||||
optional: false,
|
||||
},
|
||||
};
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
|
||||
describe('Object value', () => {
|
||||
test('it should allow "pass_through" and enforce the _meta.description', () => {
|
||||
let valueType: SchemaValue<{ a_value: string }> = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the _meta.description
|
||||
valueType = { type: 'pass_through' };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
|
||||
test('it should expect the proper object-schema definition, and allows some _meta at the object level as well', () => {
|
||||
let valueType: SchemaValue<{ a_value: string }> = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Description at the object level',
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
// @ts-expect-error b_value does not exist in the object definition
|
||||
b_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Description at the object level',
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing object properties
|
||||
valueType = { properties: {} };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
|
||||
test('it should enforce `_meta.optional: true`', () => {
|
||||
const objectValueType: SchemaValue<{ a_value: string } | undefined> = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Optional object',
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
expect(objectValueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
|
||||
let valueType: SchemaValue<{ a_value?: string }> = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
// @ts-expect-error because it should provide optional: true
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
|
||||
describe('Array value', () => {
|
||||
test('it should allow "pass_through" and enforce the _meta.description', () => {
|
||||
let valueType: SchemaValue<Array<{ a_value: string }>> = {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the _meta.description
|
||||
valueType = { type: 'pass_through' };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
|
||||
test('it should expect the proper array-schema definition, and allows some _meta at the object level as well', () => {
|
||||
let valueType: SchemaValue<Array<{ a_value: string }>> = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Description at the object level',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the items definition
|
||||
valueType = { type: 'array' };
|
||||
// @ts-expect-error because it's missing the items definition
|
||||
valueType = { type: 'array', items: {} };
|
||||
// @ts-expect-error because it's missing the items' properties definition
|
||||
valueType = { type: 'array', items: { properties: {} } };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
|
||||
test('it should enforce `_meta.optional: true`', () => {
|
||||
const arrayValueType: SchemaValue<Array<{ a_value: string }> | undefined> = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Optional object',
|
||||
optional: true,
|
||||
},
|
||||
};
|
||||
expect(arrayValueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
|
||||
const objectValueType: SchemaValue<Array<{ a_value: string } | undefined>> = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Optional object',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(objectValueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
|
||||
let valueType: SchemaValue<Array<{ a_value?: string }>> = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
// @ts-expect-error because it should provide optional: true
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
|
||||
test('it should expect support readonly arrays', () => {
|
||||
let valueType: SchemaValue<ReadonlyArray<{ a_value: string }>> = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
valueType = {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
a_value: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Some description',
|
||||
optional: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
_meta: {
|
||||
description: 'Description at the object level',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// @ts-expect-error because it's missing the items definition
|
||||
valueType = { type: 'array' };
|
||||
// @ts-expect-error because it's missing the items definition
|
||||
valueType = { type: 'array', items: {} };
|
||||
// @ts-expect-error because it's missing the items' properties definition
|
||||
valueType = { type: 'array', items: { properties: {} } };
|
||||
expect(valueType).not.toBeUndefined(); // <-- Only to stop the var-not-used complain
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('RootSchema', () => {
|
||||
const registerSchema = <Base>(schema: RootSchema<Base>) => schema;
|
||||
test('it works with the explicit types', () => {
|
||||
registerSchema<{
|
||||
my_keyword: string;
|
||||
my_number?: number;
|
||||
my_complex_unknown_meta_object: Record<string, unknown>;
|
||||
my_array_of_str: string[];
|
||||
my_object: { my_timestamp: string };
|
||||
my_array_of_objects: Array<{ my_bool_prop: boolean }>;
|
||||
}>({
|
||||
my_keyword: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Represents the key property...',
|
||||
},
|
||||
},
|
||||
my_number: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'Indicates the number of times...',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
my_complex_unknown_meta_object: {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Unknown object that contains the key-values...',
|
||||
},
|
||||
},
|
||||
my_array_of_str: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'text',
|
||||
_meta: {
|
||||
description: 'List of tags...',
|
||||
},
|
||||
},
|
||||
},
|
||||
my_object: {
|
||||
properties: {
|
||||
my_timestamp: {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'timestamp when the user...',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
my_array_of_objects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
my_bool_prop: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: '`true` when...',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
test('it works with implicit types', () => {
|
||||
registerSchema({});
|
||||
registerSchema({
|
||||
my_keyword: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'Represents the key property...',
|
||||
},
|
||||
},
|
||||
my_number: {
|
||||
type: 'long',
|
||||
_meta: {
|
||||
description: 'Indicates the number of times...',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
my_complex_unknown_meta_object: {
|
||||
type: 'pass_through',
|
||||
_meta: {
|
||||
description: 'Unknown object that contains the key-values...',
|
||||
},
|
||||
},
|
||||
my_array_of_str: {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'text',
|
||||
_meta: {
|
||||
description: 'List of tags...',
|
||||
},
|
||||
},
|
||||
},
|
||||
my_object: {
|
||||
properties: {
|
||||
my_timestamp: {
|
||||
type: 'date',
|
||||
_meta: {
|
||||
description: 'timestamp when the user...',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
my_array_of_objects: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
my_bool_prop: {
|
||||
type: 'boolean',
|
||||
_meta: {
|
||||
description: '`true` when...',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,184 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/** Types matching number values **/
|
||||
export type AllowedSchemaNumberTypes =
|
||||
| 'long'
|
||||
| 'integer'
|
||||
| 'short'
|
||||
| 'byte'
|
||||
| 'double'
|
||||
| 'float'
|
||||
| 'date';
|
||||
/** Types matching string values **/
|
||||
export type AllowedSchemaStringTypes = 'keyword' | 'text' | 'date';
|
||||
/** Types matching boolean values **/
|
||||
export type AllowedSchemaBooleanTypes = 'boolean';
|
||||
|
||||
/**
|
||||
* Possible type values in the schema
|
||||
*/
|
||||
export type AllowedSchemaTypes =
|
||||
| AllowedSchemaNumberTypes
|
||||
| AllowedSchemaStringTypes
|
||||
| AllowedSchemaBooleanTypes;
|
||||
|
||||
/**
|
||||
* Helper to ensure the declared types match the schema types
|
||||
*/
|
||||
export type PossibleSchemaTypes<Value> = Value extends string | Date
|
||||
? AllowedSchemaStringTypes
|
||||
: Value extends number
|
||||
? AllowedSchemaNumberTypes
|
||||
: Value extends boolean
|
||||
? AllowedSchemaBooleanTypes
|
||||
: // allow any schema type from the union if typescript is unable to resolve the exact U type
|
||||
AllowedSchemaTypes;
|
||||
|
||||
/**
|
||||
* Schema to define a primitive value
|
||||
*/
|
||||
export interface SchemaChildValue<Value> {
|
||||
/** The type of the value */
|
||||
type: PossibleSchemaTypes<NonNullable<Value>>;
|
||||
/** Meta properties of the value: description and is optional */
|
||||
_meta: {
|
||||
/** A description of the value */
|
||||
description: string; // Intentionally enforcing the descriptions here
|
||||
} & SchemaMetaOptional<Value>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type that defines all the possible values that the Schema accepts.
|
||||
* These types definitions are helping to identify earlier the possible missing `properties` nesting when
|
||||
* manually defining the schemas.
|
||||
*/
|
||||
export type SchemaValue<Value> =
|
||||
// Always allow the pass_through no matter what the value is
|
||||
| {
|
||||
/** Type specification of a pass through object */
|
||||
type: 'pass_through';
|
||||
/** Meta properties of the pass through: description and is optional */
|
||||
_meta: {
|
||||
/** A description of the value */
|
||||
description: string; // Intentionally enforcing the descriptions here
|
||||
} & SchemaMetaOptional<Value>;
|
||||
}
|
||||
| (unknown extends Value
|
||||
? // If the Value is unknown (TS can't infer the type), allow any type of schema
|
||||
SchemaArray<unknown, Value> | SchemaObject<Value> | SchemaChildValue<Value>
|
||||
: // Otherwise, try to infer the type and enforce the schema
|
||||
NonNullable<Value> extends Array<infer U> | ReadonlyArray<infer U>
|
||||
? SchemaArray<U, Value>
|
||||
: NonNullable<Value> extends Date
|
||||
? SchemaChildValue<Value>
|
||||
: NonNullable<Value> extends object
|
||||
? SchemaObject<Value>
|
||||
: SchemaChildValue<Value>);
|
||||
|
||||
/**
|
||||
* Enforces { optional: true } if the value can be undefined
|
||||
*/
|
||||
export type SchemaMetaOptional<Value> = unknown extends Value
|
||||
? { optional?: boolean }
|
||||
: undefined extends Value
|
||||
? { optional: true }
|
||||
: { optional?: false };
|
||||
|
||||
/**
|
||||
* Schema meta with optional description
|
||||
*/
|
||||
export interface SchemaMeta<Value> {
|
||||
/** Meta properties of the pass through: description and is optional */
|
||||
_meta?: {
|
||||
/** A description of the value */
|
||||
description?: string;
|
||||
} & SchemaMetaOptional<Value>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema to represent an array
|
||||
*/
|
||||
export interface SchemaArray<Value, Base> extends SchemaMeta<Base> {
|
||||
/** The type must be an array */
|
||||
type: 'array';
|
||||
/** The schema of the items in the array is defined in the `items` property */
|
||||
items: SchemaValue<Value>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema to represent an object
|
||||
*/
|
||||
export interface SchemaObject<Value> extends SchemaMeta<Value> {
|
||||
/**
|
||||
* The schemas of the keys of the object are defined in the `properties` object.
|
||||
*/
|
||||
properties: {
|
||||
[Key in keyof Required<Value>]: SchemaValue<Value[Key]>;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Schema definition to match the structure of the properties provided.
|
||||
*
|
||||
* @example
|
||||
* {
|
||||
* my_keyword: {
|
||||
* type: 'keyword',
|
||||
* _meta: {
|
||||
* description: 'Represents the key property...'
|
||||
* }
|
||||
* },
|
||||
* my_number: {
|
||||
* type: 'long',
|
||||
* _meta: {
|
||||
* description: 'Indicates the number of times...',
|
||||
* optional: true
|
||||
* }
|
||||
* },
|
||||
* my_complex_unknown_meta_object: {
|
||||
* type: 'pass_through',
|
||||
* _meta: {
|
||||
* description: 'Unknown object that contains the key-values...'
|
||||
* }
|
||||
* },
|
||||
* my_array_of_str: {
|
||||
* type: 'array',
|
||||
* items: {
|
||||
* type: 'text',
|
||||
* _meta: {
|
||||
* description: 'List of tags...'
|
||||
* }
|
||||
* }
|
||||
* },
|
||||
* my_object: {
|
||||
* properties: {
|
||||
* my_timestamp: {
|
||||
* type: 'date',
|
||||
* _meta: {
|
||||
* description: 'timestamp when the user...'
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* },
|
||||
* my_array_of_objects: {
|
||||
* type: 'array',
|
||||
* items: {
|
||||
* properties: {
|
||||
* my_bool_prop: {
|
||||
* type: 'boolean',
|
||||
* _meta: {
|
||||
* description: '`true` when...'
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
export type RootSchema<Base> = SchemaObject<Base>['properties'];
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as t from 'io-ts';
|
||||
import { either, isLeft } from 'fp-ts/lib/Either';
|
||||
import { excess } from './excess';
|
||||
|
||||
describe('excess', () => {
|
||||
test('should pass validation when not found extra properties', () => {
|
||||
const validator = excess(t.interface({ a_string: t.string, a_number: t.number }));
|
||||
const invalidObj = { a_string: 'test', a_number: 1 };
|
||||
expect(validator.is(invalidObj)).toBe(true);
|
||||
const result = validator.decode(invalidObj);
|
||||
expect(isLeft(result)).toBe(false);
|
||||
});
|
||||
|
||||
test('should not pass validation when found extra properties', () => {
|
||||
const validator = excess(t.interface({ a_string: t.string, a_number: t.number }));
|
||||
const invalidObj = { a_string: 'test', a_number: 1, another_string: 'test' };
|
||||
expect(validator.is(invalidObj)).toBe(false);
|
||||
const result = validator.decode(invalidObj);
|
||||
expect(isLeft(result)).toBe(true);
|
||||
either.mapLeft(result, (validationError) =>
|
||||
expect(validationError[0].message).toBe(`excess key 'another_string' found`)
|
||||
);
|
||||
});
|
||||
|
||||
test('should not pass validation when found a non-declared property in an all-optional object', () => {
|
||||
const validator = excess(t.partial({ a_string: t.string, a_number: t.number }));
|
||||
const invalidObj = { another_string: 'test' };
|
||||
expect(validator.is(invalidObj)).toBe(false);
|
||||
const result = validator.decode(invalidObj);
|
||||
expect(isLeft(result)).toBe(true);
|
||||
either.mapLeft(result, (validationErrors) =>
|
||||
expect(validationErrors.map((err) => err.message)).toStrictEqual([
|
||||
`excess key 'another_string' found`,
|
||||
])
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// Extra IO-TS type to not allow more keys than the defined ones.
|
||||
// Extracted from https://github.com/gcanti/io-ts/issues/322
|
||||
|
||||
import * as t from 'io-ts';
|
||||
import { either, Either, isRight, left, right, Right } from 'fp-ts/lib/Either';
|
||||
|
||||
const getIsCodec =
|
||||
<T extends t.Any>(tag: string) =>
|
||||
(codec: t.Any): codec is T =>
|
||||
(codec as t.Any & { _tag: string })._tag === tag;
|
||||
|
||||
const isInterfaceCodec = getIsCodec<t.InterfaceType<t.Props>>('InterfaceType');
|
||||
const isPartialCodec = getIsCodec<t.PartialType<t.Props>>('PartialType');
|
||||
const isIntersectionType = getIsCodec<t.IntersectionType<t.Mixed[]>>('IntersectionType');
|
||||
|
||||
const getProps = (codec: t.HasProps): t.Props => {
|
||||
switch (codec._tag) {
|
||||
case 'RefinementType':
|
||||
case 'ReadonlyType':
|
||||
return getProps(codec.type);
|
||||
case 'InterfaceType':
|
||||
case 'StrictType':
|
||||
case 'PartialType':
|
||||
return codec.props;
|
||||
case 'IntersectionType':
|
||||
return codec.types.reduce<t.Props>((props, type) => Object.assign(props, getProps(type)), {});
|
||||
}
|
||||
};
|
||||
|
||||
const getNameFromProps = (props: t.Props, isPartial: boolean): string =>
|
||||
Object.keys(props)
|
||||
.map((k) => `${k}${isPartial ? '?' : ''}: ${props[k].name}`)
|
||||
.join(', ');
|
||||
|
||||
/**
|
||||
* Provides a human-readable definition of the io-ts validator.
|
||||
* @param codec The io-ts declaration passed as an argument to the Excess method.
|
||||
* @remarks Since we currently use it only with objects, we'll cover the IntersectionType and PartialType
|
||||
*/
|
||||
const getExcessTypeName = (codec: t.Any): string => {
|
||||
if (isIntersectionType(codec)) {
|
||||
return `{ ${codec.types
|
||||
.map((subCodec) => {
|
||||
if (isInterfaceCodec(subCodec)) {
|
||||
return getNameFromProps(subCodec.props, false);
|
||||
}
|
||||
if (isPartialCodec(subCodec)) {
|
||||
return getNameFromProps(subCodec.props, true);
|
||||
}
|
||||
return subCodec.name;
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join(', ')} }`;
|
||||
}
|
||||
return `Excess<${codec.name}>`;
|
||||
};
|
||||
|
||||
const stripKeys = <T>(o: T, props: t.Props): Either<string[], T> => {
|
||||
const keys = Object.getOwnPropertyNames(o);
|
||||
const propsKeys = Object.getOwnPropertyNames(props);
|
||||
|
||||
propsKeys.forEach((pk) => {
|
||||
const index = keys.indexOf(pk);
|
||||
if (index !== -1) {
|
||||
keys.splice(index, 1);
|
||||
}
|
||||
});
|
||||
|
||||
return keys.length ? left(keys) : right(o);
|
||||
};
|
||||
|
||||
/**
|
||||
* Validate if there are any keys that exist in the validated object, but they don't in the validation object.
|
||||
* @param codec The io-ts schema to wrap with this validation
|
||||
* @param name (optional) Replace the custom logic to name the validation error by providing a static name.
|
||||
*/
|
||||
export const excess = <C extends t.HasProps>(
|
||||
codec: C,
|
||||
name: string = getExcessTypeName(codec)
|
||||
): ExcessType<C> => {
|
||||
const props: t.Props = getProps(codec);
|
||||
return new ExcessType<C>(
|
||||
name,
|
||||
(u): u is C => isRight(stripKeys(u, props)) && codec.is(u),
|
||||
(u, c) =>
|
||||
either.chain(t.UnknownRecord.validate(u, c), () =>
|
||||
either.chain(codec.validate(u, c), (a) =>
|
||||
either.mapLeft(stripKeys<C>(a, props), (keys) =>
|
||||
keys.map((k) => ({
|
||||
value: a[k],
|
||||
context: c,
|
||||
message: `excess key '${k}' found`,
|
||||
}))
|
||||
)
|
||||
)
|
||||
),
|
||||
(a) => codec.encode((stripKeys(a, props) as Right<any>).right),
|
||||
codec
|
||||
);
|
||||
};
|
||||
|
||||
class ExcessType<C extends t.Any, A = C['_A'], O = A, I = unknown> extends t.Type<A, O, I> {
|
||||
public readonly _tag: 'ExcessType' = 'ExcessType';
|
||||
constructor(
|
||||
name: string,
|
||||
is: ExcessType<C, A, O, I>['is'],
|
||||
validate: ExcessType<C, A, O, I>['validate'],
|
||||
encode: ExcessType<C, A, O, I>['encode'],
|
||||
public readonly type: C
|
||||
) {
|
||||
super(name, is, validate, encode);
|
||||
}
|
||||
}
|
|
@ -1,177 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { AllowedSchemaTypes, RootSchema } from '../types';
|
||||
import { schemaToIoTs } from './schema_to_io_ts';
|
||||
|
||||
describe(`convertSchemaToIoTs`, () => {
|
||||
test('fail with anything other than an object', () => {
|
||||
// @ts-expect-error
|
||||
expect(() => schemaToIoTs(null)).toThrow();
|
||||
});
|
||||
test('invalid type => errors with malformed schema', () => {
|
||||
expect(() =>
|
||||
schemaToIoTs({
|
||||
// @ts-expect-error Non-valid type
|
||||
an_invalid_field: { type: 'invalid', _meta: { description: 'Test description' } },
|
||||
})
|
||||
).toThrow(/Malformed schema/);
|
||||
});
|
||||
test('array type missing `items` => errors with malformed schema', () => {
|
||||
expect(() =>
|
||||
schemaToIoTs({
|
||||
// @ts-expect-error Non-valid array-construct
|
||||
an_invalid_field: { type: 'array' },
|
||||
})
|
||||
).toThrow(/Malformed schema/);
|
||||
});
|
||||
test('minimal schemas and empty value => pass', () => {
|
||||
const validator = schemaToIoTs({});
|
||||
expect(validator.is({})).toBe(true);
|
||||
});
|
||||
test('value has fields not defined in the schema => fail', () => {
|
||||
const validator = schemaToIoTs({});
|
||||
expect(validator.is({ version: 'some-version' })).toBe(false);
|
||||
expect(validator.is({ an_array: [{ docs: { missing: 1 } }] })).toBe(false);
|
||||
});
|
||||
test('support optional fields', () => {
|
||||
const validator = schemaToIoTs<unknown>({
|
||||
an_optional_field: {
|
||||
type: 'keyword',
|
||||
_meta: {
|
||||
description: 'An optional field',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
an_optional_obj: {
|
||||
_meta: { optional: true },
|
||||
properties: {
|
||||
other_field: { type: 'short', _meta: { description: 'Test description' } },
|
||||
},
|
||||
},
|
||||
an_optional_array: {
|
||||
type: 'array',
|
||||
items: { type: 'short', _meta: { description: 'Test description' } },
|
||||
_meta: { optional: true },
|
||||
},
|
||||
});
|
||||
expect(validator.is({})).toBe(true);
|
||||
});
|
||||
test('value has nested-fields not defined in the schema => fail', () => {
|
||||
const schemas: Array<RootSchema<unknown>> = [
|
||||
{
|
||||
an_array: {
|
||||
type: 'array',
|
||||
_meta: { description: 'Test description' },
|
||||
items: {
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
an_array: {
|
||||
type: 'array',
|
||||
_meta: { description: 'Test description' },
|
||||
items: {
|
||||
properties: { docs: { properties: {} } },
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
schemas.forEach((schema) => {
|
||||
const validator = schemaToIoTs(schema);
|
||||
expect(validator.is({ an_array: [{ docs: { missing: 1 } }] })).toBe(false);
|
||||
});
|
||||
});
|
||||
test('value has nested-fields defined in the schema, but with wrong type => fail', () => {
|
||||
const validator = schemaToIoTs({
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
docs: {
|
||||
properties: {
|
||||
field: { type: 'short', _meta: { description: 'Test description' } },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(validator.is({ an_array: [{ docs: { field: 'abc' } }] })).toBe(false);
|
||||
});
|
||||
test.each([
|
||||
'boolean',
|
||||
'byte',
|
||||
'double',
|
||||
'float',
|
||||
'integer',
|
||||
'long',
|
||||
'short',
|
||||
] as AllowedSchemaTypes[])('Expected type %s, but got string', (type) => {
|
||||
const validator = schemaToIoTs({
|
||||
a_field: { type, _meta: { description: 'Test description' } },
|
||||
});
|
||||
expect(validator.is({ a_field: 'abc' })).toBe(false);
|
||||
});
|
||||
test.each(['keyword', 'text', 'date'] as AllowedSchemaTypes[])(
|
||||
'Expected type %s, but got number',
|
||||
(type) => {
|
||||
const validator = schemaToIoTs({
|
||||
a_field: { type, _meta: { description: 'Test description' } },
|
||||
});
|
||||
expect(validator.is({ a_field: 1234 })).toBe(false);
|
||||
}
|
||||
);
|
||||
test('Support DYNAMIC_KEY', () => {
|
||||
const validator = schemaToIoTs({
|
||||
a_field: {
|
||||
properties: { DYNAMIC_KEY: { type: 'short', _meta: { description: 'Test description' } } },
|
||||
},
|
||||
});
|
||||
expect(validator.is({ a_field: { some_key: 1234 } })).toBe(true);
|
||||
});
|
||||
test('Support DYNAMIC_KEY + known props', () => {
|
||||
const validator = schemaToIoTs({
|
||||
a_field: {
|
||||
properties: {
|
||||
DYNAMIC_KEY: { type: 'short', _meta: { description: 'Test description' } },
|
||||
known_prop: { type: 'short', _meta: { description: 'Test description' } },
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(validator.is({ a_field: { some_key: 1234, known_prop: 1234 } })).toBe(true);
|
||||
});
|
||||
test('value has nested-fields defined in the schema => succeed', () => {
|
||||
const validator = schemaToIoTs({
|
||||
an_array: {
|
||||
type: 'array',
|
||||
items: {
|
||||
properties: {
|
||||
docs: {
|
||||
properties: {
|
||||
field: { type: 'short', _meta: { description: 'Test description' } },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(validator.is({ an_array: [{ docs: { field: 1 } }] })).toBe(true);
|
||||
});
|
||||
|
||||
test('allow pass_through properties', () => {
|
||||
const validator = schemaToIoTs({
|
||||
im_only_passing_through_data: {
|
||||
type: 'pass_through',
|
||||
_meta: { description: 'Test description' },
|
||||
},
|
||||
});
|
||||
expect(validator.is({ im_only_passing_through_data: [{ docs: { field: 1 } }] })).toBe(true);
|
||||
});
|
||||
});
|
|
@ -1,121 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as t from 'io-ts';
|
||||
import type { RootSchema, SchemaArray, SchemaObject, SchemaValue } from '../types';
|
||||
import { excess } from './excess';
|
||||
|
||||
/**
|
||||
* Is it a tuple of t.Mixed?
|
||||
* @param schemas Array of io-ts schemas
|
||||
*/
|
||||
function isOneOfCandidate(schemas: t.Mixed[]): schemas is [t.Mixed, t.Mixed] {
|
||||
return schemas.length === 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts each {@link SchemaValue} to the io-ts equivalent
|
||||
* @param value The {@link SchemaValue} to parse
|
||||
*/
|
||||
function schemaValueToIoTs<Value>(value: SchemaValue<Value>): t.Mixed {
|
||||
// We need to check the pass_through type on top of everything
|
||||
if ((value as { type: 'pass_through' }).type === 'pass_through') {
|
||||
return t.unknown;
|
||||
}
|
||||
|
||||
if ('properties' in value) {
|
||||
const { DYNAMIC_KEY, ...properties } = value.properties as SchemaObject<Value>['properties'] & {
|
||||
DYNAMIC_KEY?: SchemaValue<unknown>;
|
||||
};
|
||||
const schemas: t.Mixed[] = [schemaObjectToIoTs<Record<string, unknown>>({ properties })];
|
||||
if (DYNAMIC_KEY) {
|
||||
schemas.push(t.record(t.string, schemaValueToIoTs(DYNAMIC_KEY)));
|
||||
}
|
||||
return isOneOfCandidate(schemas) ? t.union(schemas) : schemas[0];
|
||||
} else {
|
||||
const valueType = value.type; // Copied in here because of TS reasons, it's not available in the `default` case
|
||||
switch (valueType) {
|
||||
case 'boolean':
|
||||
return t.boolean;
|
||||
case 'keyword':
|
||||
case 'text':
|
||||
case 'date':
|
||||
return t.string;
|
||||
case 'byte':
|
||||
case 'double':
|
||||
case 'float':
|
||||
case 'integer':
|
||||
case 'long':
|
||||
case 'short':
|
||||
return t.number;
|
||||
case 'array':
|
||||
if ('items' in value) {
|
||||
return t.array(schemaValueToIoTs((value as SchemaArray<unknown, unknown>).items));
|
||||
}
|
||||
throw new Error(`Schema type must include the "items" declaration.`);
|
||||
default:
|
||||
throw new Error(`Unsupported schema type ${valueType}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loops through a list of [key, SchemaValue] tuples to convert them into a valid io-ts parameter to define objects.
|
||||
* @param entries Array of tuples [key, {@link SchemaValue}]. Typically, coming from Object.entries(SchemaObject).
|
||||
*/
|
||||
function entriesToObjectIoTs<Value>(
|
||||
entries: Array<[string, SchemaValue<Value>]>
|
||||
): Record<string, t.Mixed> {
|
||||
return Object.fromEntries(
|
||||
entries.map(([key, value]) => {
|
||||
try {
|
||||
return [key, schemaValueToIoTs(value)];
|
||||
} catch (err) {
|
||||
err.failedKey = [key, ...(err.failedKey || [])];
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link SchemaObject} to the io-ts equivalent.
|
||||
* @param schemaObject The {@link SchemaObject} to parse.
|
||||
*/
|
||||
function schemaObjectToIoTs<Value>(
|
||||
schemaObject: SchemaObject<Value>
|
||||
): t.Type<Record<string, unknown>> {
|
||||
const objectEntries: Array<[string, SchemaValue<unknown>]> = Object.entries(
|
||||
schemaObject.properties
|
||||
);
|
||||
|
||||
const requiredFields = objectEntries.filter(([key, { _meta }]) => _meta?.optional !== true);
|
||||
const optionalFields = objectEntries.filter(([key, { _meta }]) => _meta?.optional === true);
|
||||
|
||||
return excess(
|
||||
t.intersection([
|
||||
t.interface(entriesToObjectIoTs(requiredFields)),
|
||||
t.partial(entriesToObjectIoTs(optionalFields)),
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a {@link RootSchema} to an io-ts validation object.
|
||||
* @param rootSchema The {@link RootSchema} to be parsed.
|
||||
*/
|
||||
export function schemaToIoTs<Base>(rootSchema: RootSchema<Base>): t.Type<Record<string, unknown>> {
|
||||
try {
|
||||
return schemaObjectToIoTs({ properties: rootSchema });
|
||||
} catch (err) {
|
||||
if (err.failedKey) {
|
||||
err.message = `Malformed schema for key [${err.failedKey.join('.')}]: ${err.message}`;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { validateSchema } from './validate_schema';
|
||||
import { schemaToIoTs } from './schema_to_io_ts';
|
||||
|
||||
describe('validateSchema', () => {
|
||||
describe('successful', () => {
|
||||
test('valid object', () => {
|
||||
expect(() =>
|
||||
validateSchema(
|
||||
'test source',
|
||||
schemaToIoTs({
|
||||
an_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
},
|
||||
}),
|
||||
{ an_object: { a_field: 'test' } }
|
||||
)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
describe('failed', () => {
|
||||
test('object is valid but it has some extra fields not declared in the schema', () => {
|
||||
expect(() =>
|
||||
validateSchema(
|
||||
'test source',
|
||||
schemaToIoTs({
|
||||
an_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
},
|
||||
}),
|
||||
{ an_object: { a_field: 'test' }, another_object: { a_field: 'test' } }
|
||||
)
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Failed to validate payload coming from \\"test source\\":
|
||||
- []: excess key 'another_object' found"
|
||||
`);
|
||||
});
|
||||
|
||||
test('object is valid but it has some extra nested fields not declared in the schema', () => {
|
||||
expect(() =>
|
||||
validateSchema(
|
||||
'test source',
|
||||
schemaToIoTs({
|
||||
an_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
},
|
||||
}),
|
||||
{ an_object: { a_field: 'test', an_extra_field: 'test' } }
|
||||
)
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Failed to validate payload coming from \\"test source\\":
|
||||
- [an_object]: excess key 'an_extra_field' found"
|
||||
`);
|
||||
});
|
||||
|
||||
test('the object is not valid because it is missing a key', () => {
|
||||
expect(() =>
|
||||
validateSchema(
|
||||
'test source',
|
||||
schemaToIoTs<unknown>({
|
||||
an_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
},
|
||||
an_optional_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
_meta: { optional: true },
|
||||
},
|
||||
}),
|
||||
{ another_object: { a_field: 'test' } }
|
||||
)
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Failed to validate payload coming from \\"test source\\":
|
||||
- [an_object]: {\\"expected\\":\\"{ a_field: string }\\",\\"actual\\":\\"undefined\\",\\"value\\":\\"undefined\\"}"
|
||||
`);
|
||||
});
|
||||
|
||||
test('lists multiple errors', () => {
|
||||
expect(() =>
|
||||
validateSchema(
|
||||
'test source',
|
||||
schemaToIoTs<unknown>({
|
||||
an_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
},
|
||||
an_optional_object: {
|
||||
properties: { a_field: { type: 'keyword', _meta: { description: 'A test field' } } },
|
||||
_meta: { optional: true },
|
||||
},
|
||||
}),
|
||||
{
|
||||
an_object: { a_field: 'test', an_extra_field: 'test' },
|
||||
an_optional_object: {},
|
||||
another_object: { a_field: 'test' },
|
||||
}
|
||||
)
|
||||
).toThrowErrorMatchingInlineSnapshot(`
|
||||
"Failed to validate payload coming from \\"test source\\":
|
||||
- [an_object]: excess key 'an_extra_field' found
|
||||
- [an_optional_object.a_field]: {\\"expected\\":\\"string\\",\\"actual\\":\\"undefined\\",\\"value\\":\\"undefined\\"}"
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Context, Type } from 'io-ts';
|
||||
import { either } from 'fp-ts/lib/Either';
|
||||
|
||||
/**
|
||||
* Validates the event according to the schema validator generated by {@link convertSchemaToIoTs}.
|
||||
* @throws Error when the event does not comply with the schema.
|
||||
* @param validator The io-ts validator for the event.
|
||||
* @param payload The payload to validate.
|
||||
*/
|
||||
export function validateSchema<Payload>(
|
||||
sourceName: string,
|
||||
validator: Type<Payload>,
|
||||
payload: Payload
|
||||
): void {
|
||||
// Run io-ts validation to the event
|
||||
const result = validator.decode(payload);
|
||||
|
||||
either.mapLeft(result, (validationErrors) => {
|
||||
const humanFriendlyErrors = validationErrors
|
||||
.map(
|
||||
(err) => `[${getFullPathKey(err.context)}]: ${err.message ?? readableContext(err.context)}`
|
||||
)
|
||||
.filter((errMsg, idx, listOfErrMsgs) => listOfErrMsgs.indexOf(errMsg, idx + 1) === -1);
|
||||
throw new Error(
|
||||
`Failed to validate payload coming from "${sourceName}":\n\t- ${humanFriendlyErrors.join(
|
||||
'\n\t- '
|
||||
)}`
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Picks the relevant fields of the validation error's context
|
||||
* @param context The {@link Context} coming from the validation error
|
||||
*/
|
||||
function readableContext(context: Context) {
|
||||
// The information provided, the last context is good enough.
|
||||
// Otherwise, repeating the values for every nested key is too noisy.
|
||||
const last = context[context.length - 1];
|
||||
return JSON.stringify({
|
||||
expected: last.type.name,
|
||||
// Explicitly printing `undefined` to make it more obvious in the message
|
||||
actual: typeof last.actual,
|
||||
value: last.actual === undefined ? 'undefined' : last.actual,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the full path to the key that raised the validation error.
|
||||
* @param context The {@link Context} coming from the validation error
|
||||
*/
|
||||
function getFullPathKey(context: Context): string {
|
||||
return (
|
||||
context
|
||||
// Remove the context provided by InterfaceType and PartialType because their keys are simply numeric indices
|
||||
.filter(
|
||||
(ctx) =>
|
||||
!['InterfaceType', 'PartialType'].includes(
|
||||
(ctx.type as Type<unknown> & { _tag: string })._tag
|
||||
)
|
||||
)
|
||||
.map(({ key }) => key)
|
||||
.filter(Boolean)
|
||||
.join('.')
|
||||
);
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type { IShipper } from './types';
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Subject } from 'rxjs';
|
||||
import type { IShipper } from './types';
|
||||
import type { TelemetryCounter } from '../events';
|
||||
|
||||
function createShipper(): jest.Mocked<IShipper> {
|
||||
return new MockedShipper();
|
||||
}
|
||||
|
||||
class MockedShipper implements IShipper {
|
||||
public static shipperName = 'mocked-shipper';
|
||||
|
||||
constructor() {}
|
||||
|
||||
public optIn = jest.fn();
|
||||
public reportEvents = jest.fn();
|
||||
public extendContext = jest.fn();
|
||||
public telemetryCounter$ = new Subject<TelemetryCounter>();
|
||||
public flush = jest.fn();
|
||||
public shutdown = jest.fn();
|
||||
}
|
||||
|
||||
export const shippersMock = {
|
||||
createShipper,
|
||||
MockedShipper,
|
||||
};
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Observable } from 'rxjs';
|
||||
import { Event, EventContext, TelemetryCounter } from '../events';
|
||||
|
||||
/**
|
||||
* Basic structure of a Shipper
|
||||
*/
|
||||
export interface IShipper {
|
||||
/**
|
||||
* Adapts and ships the event to the persisting/analytics solution.
|
||||
* @param events batched events {@link Event}
|
||||
*/
|
||||
reportEvents: (events: Event[]) => void;
|
||||
/**
|
||||
* Stops/restarts the shipping mechanism based on the value of isOptedIn
|
||||
* @param isOptedIn `true` for resume sending events. `false` to stop.
|
||||
*/
|
||||
optIn: (isOptedIn: boolean) => void;
|
||||
/**
|
||||
* Perform any necessary calls to the persisting/analytics solution to set the event's context.
|
||||
* @param newContext The full new context to set {@link EventContext}
|
||||
*/
|
||||
extendContext?: (newContext: EventContext) => void;
|
||||
/**
|
||||
* Observable to emit the stats of the processed events.
|
||||
*/
|
||||
telemetryCounter$?: Observable<TelemetryCounter>;
|
||||
/**
|
||||
* Sends all the enqueued events and fulfills the returned promise.
|
||||
*/
|
||||
flush: () => Promise<void>;
|
||||
/**
|
||||
* Shutdown the shipper.
|
||||
*/
|
||||
shutdown: () => void;
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
// Exporting the types here as a utility only
|
||||
// The recommended way of using this library is to import from the subdirectories /client, /shippers/*
|
||||
// The reason is to avoid leaking server-side code to the browser, and vice-versa
|
||||
export type {
|
||||
AnalyticsClient,
|
||||
// Types for the constructor
|
||||
AnalyticsClientInitContext,
|
||||
// Types for the registerShipper API
|
||||
ShipperClassConstructor,
|
||||
RegisterShipperOpts,
|
||||
// Types for the optIn API
|
||||
OptInConfig,
|
||||
OptInConfigPerType,
|
||||
ShipperName,
|
||||
// Types for the registerContextProvider API
|
||||
ContextProviderOpts,
|
||||
ContextProviderName,
|
||||
// Types for the registerEventType API
|
||||
EventTypeOpts,
|
||||
// Events
|
||||
Event,
|
||||
EventContext,
|
||||
EventType,
|
||||
TelemetryCounter,
|
||||
TelemetryCounterType,
|
||||
// Schema
|
||||
RootSchema,
|
||||
SchemaObject,
|
||||
SchemaArray,
|
||||
SchemaChildValue,
|
||||
SchemaMeta,
|
||||
SchemaValue,
|
||||
SchemaMetaOptional,
|
||||
PossibleSchemaTypes,
|
||||
AllowedSchemaBooleanTypes,
|
||||
AllowedSchemaNumberTypes,
|
||||
AllowedSchemaStringTypes,
|
||||
AllowedSchemaTypes,
|
||||
// Shippers
|
||||
IShipper,
|
||||
} from './client';
|
||||
export type { ElasticV3ShipperOptions } from './shippers/elastic_v3/common';
|
||||
export type { ElasticV3BrowserShipper } from './shippers/elastic_v3/browser';
|
||||
export type { ElasticV3ServerShipper } from './shippers/elastic_v3/server';
|
||||
export type {
|
||||
FullStoryShipperConfig,
|
||||
FullStoryShipper,
|
||||
FullStorySnippetConfig,
|
||||
} from './shippers/fullstory';
|
|
@ -1,13 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
preset: '@kbn/test',
|
||||
rootDir: '../../..',
|
||||
roots: ['<rootDir>/packages/analytics/ebt'],
|
||||
};
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"type": "shared-common",
|
||||
"id": "@kbn/ebt",
|
||||
"owner": "@elastic/kibana-core"
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"name": "@kbn/ebt",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"license": "SSPL-1.0 OR Elastic License 2.0"
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
# @kbn/ebt/shippers/*
|
||||
|
||||
This directory holds the implementation of the _built-in_ shippers provided by the Analytics client. At the moment, the shippers are:
|
||||
|
||||
* [FullStory](./fullstory/README.md)
|
||||
* [Elastic V3 (Browser shipper)](./elastic_v3/browser/README.md)
|
||||
* [Elastic V3 (Server-side shipper)](./elastic_v3/server/README.md)
|
|
@ -1,25 +0,0 @@
|
|||
# @kbn/ebt/shippers/elastic_v3/browser
|
||||
|
||||
UI-side implementation of the Elastic V3 shipper for the `@kbn/ebt/client`.
|
||||
|
||||
## How to use it
|
||||
|
||||
This module is intended to be used **on the browser only**. Due to the nature of the UI events, they are usually more scattered in time, and we can assume a much lower load than the server. For that reason, it doesn't apply the necessary backpressure mechanisms to prevent the server from getting overloaded with too many events neither identifies if the server sits behind a firewall to discard any incoming events. Refer to `@kbn/ebt/shippers/elastic_v3/server` for the server-side implementation.
|
||||
|
||||
```typescript
|
||||
import { ElasticV3BrowserShipper } from "@kbn/ebt/shippers/elastic_v3/browser";
|
||||
|
||||
analytics.registerShipper(ElasticV3BrowserShipper, { channelName: 'myChannel', version: '1.0.0' });
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
| Name | Description |
|
||||
|:-------------:|:-------------------------------------------------------------------------------------------|
|
||||
| `channelName` | The name of the channel to send the events. |
|
||||
| `version` | The version of the application generating the events. |
|
||||
| `debug` | When `true`, it logs the responses from the remote Telemetry Service. Defaults to `false`. |
|
||||
|
||||
## Transmission protocol
|
||||
|
||||
This shipper sends the events to the Elastic Internal Telemetry Service. The incoming events are buffered for up to 1 second to attempt to send them in a single request.
|
|
@ -1,10 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type { ElasticV3ShipperOptions } from '../common';
|
||||
export { ElasticV3BrowserShipper } from './src/browser_shipper';
|
|
@ -1,319 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
import { firstValueFrom } from 'rxjs';
|
||||
import type { AnalyticsClientInitContext, Event } from '../../../../client';
|
||||
import { ElasticV3BrowserShipper } from './browser_shipper';
|
||||
|
||||
describe('ElasticV3BrowserShipper', () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'test-event-type',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const initContext: AnalyticsClientInitContext = {
|
||||
sendTo: 'staging',
|
||||
isDev: true,
|
||||
logger: loggerMock.create(),
|
||||
};
|
||||
|
||||
let shipper: ElasticV3BrowserShipper;
|
||||
|
||||
let fetchMock: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
fetchMock = jest.fn().mockResolvedValue({
|
||||
status: 200,
|
||||
ok: true,
|
||||
text: () => Promise.resolve('{"status": "ok"}'),
|
||||
});
|
||||
|
||||
Object.defineProperty(global, 'fetch', {
|
||||
value: fetchMock,
|
||||
writable: true,
|
||||
});
|
||||
|
||||
shipper = new ElasticV3BrowserShipper(
|
||||
{ version: '1.2.3', channelName: 'test-channel', debug: true },
|
||||
initContext
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
shipper.shutdown();
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
test("custom sendTo overrides Analytics client's", () => {
|
||||
const prodShipper = new ElasticV3BrowserShipper(
|
||||
{ version: '1.2.3', channelName: 'test-channel', debug: true, sendTo: 'production' },
|
||||
initContext
|
||||
);
|
||||
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(prodShipper['url']).not.toEqual(shipper['url']);
|
||||
});
|
||||
|
||||
test('set optIn should update the isOptedIn$ observable', () => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
const internalOptIn$ = shipper['isOptedIn$'];
|
||||
|
||||
// Initially undefined
|
||||
expect(internalOptIn$.value).toBeUndefined();
|
||||
|
||||
shipper.optIn(true);
|
||||
expect(internalOptIn$.value).toBe(true);
|
||||
|
||||
shipper.optIn(false);
|
||||
expect(internalOptIn$.value).toBe(false);
|
||||
});
|
||||
|
||||
test('set extendContext should store local values: clusterUuid and licenseId', () => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
const getInternalClusterUuid = () => shipper['clusterUuid'];
|
||||
// eslint-disable-next-line dot-notation
|
||||
const getInternalLicenseId = () => shipper['licenseId'];
|
||||
|
||||
// Initial values
|
||||
expect(getInternalClusterUuid()).toBe('UNKNOWN');
|
||||
expect(getInternalLicenseId()).toBeUndefined();
|
||||
|
||||
shipper.extendContext({ cluster_uuid: 'test-cluster-uuid' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid');
|
||||
expect(getInternalLicenseId()).toBeUndefined();
|
||||
|
||||
shipper.extendContext({ license_id: 'test-license-id' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid');
|
||||
expect(getInternalLicenseId()).toBe('test-license-id');
|
||||
|
||||
shipper.extendContext({ cluster_uuid: 'test-cluster-uuid-2', license_id: 'test-license-id-2' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid-2');
|
||||
expect(getInternalLicenseId()).toBe('test-license-id-2');
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` straight away (buffer of 1s)', () => {
|
||||
shipper.reportEvents(events);
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` after 1s because no optIn value is set yet', async () => {
|
||||
shipper.reportEvents(events);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to reportEvents call `fetch` after 1s when optIn value is set to true', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_browser",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` after 1s when optIn value is set to false', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(false);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to flush forces the client to send all the pending events', async () => {
|
||||
shipper.optIn(true);
|
||||
shipper.reportEvents(events);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
await shipper.flush();
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_browser",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('calls to flush resolve immediately if there is nothing to send', async () => {
|
||||
shipper.optIn(true);
|
||||
await shipper.flush();
|
||||
expect(fetchMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('calling flush multiple times does not keep hanging', async () => {
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
await Promise.all([shipper.flush(), shipper.flush()]);
|
||||
});
|
||||
|
||||
test('calling flush after shutdown does not keep hanging', async () => {
|
||||
shipper.shutdown();
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
});
|
||||
|
||||
test('calls to reportEvents call `fetch` when shutting down if optIn value is set to true', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
shipper.shutdown();
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_browser",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('does not add the query.debug: true property to the request if the shipper is not set with the debug flag', async () => {
|
||||
shipper = new ElasticV3BrowserShipper(
|
||||
{ version: '1.2.3', channelName: 'test-channel' },
|
||||
initContext
|
||||
);
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('handles when the fetch request fails', async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "Failed to fetch",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_browser",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('handles when the fetch request fails (request completes but not OK response)', async () => {
|
||||
fetchMock.mockResolvedValue({
|
||||
ok: false,
|
||||
status: 400,
|
||||
text: () => Promise.resolve('{"status": "not ok"}'),
|
||||
});
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
await jest.advanceTimersByTimeAsync(1000);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
keepalive: true,
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "400",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_browser",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -1,182 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import {
|
||||
BehaviorSubject,
|
||||
interval,
|
||||
Subject,
|
||||
bufferWhen,
|
||||
concatMap,
|
||||
skipWhile,
|
||||
firstValueFrom,
|
||||
map,
|
||||
merge,
|
||||
} from 'rxjs';
|
||||
import type {
|
||||
AnalyticsClientInitContext,
|
||||
Event,
|
||||
EventContext,
|
||||
IShipper,
|
||||
TelemetryCounter,
|
||||
} from '../../../../client';
|
||||
import { ElasticV3ShipperOptions, ErrorWithCode } from '../../common';
|
||||
import { buildHeaders, buildUrl, createTelemetryCounterHelper, eventsToNDJSON } from '../../common';
|
||||
|
||||
/**
|
||||
* Elastic V3 shipper to use in the browser.
|
||||
*/
|
||||
export class ElasticV3BrowserShipper implements IShipper {
|
||||
/** Shipper's unique name */
|
||||
public static shipperName = 'elastic_v3_browser';
|
||||
|
||||
/** Observable to emit the stats of the processed events. */
|
||||
public readonly telemetryCounter$ = new Subject<TelemetryCounter>();
|
||||
|
||||
private readonly reportTelemetryCounters = createTelemetryCounterHelper(
|
||||
this.telemetryCounter$,
|
||||
ElasticV3BrowserShipper.shipperName
|
||||
);
|
||||
private readonly url: string;
|
||||
|
||||
private readonly internalQueue$ = new Subject<Event>();
|
||||
private readonly flush$ = new Subject<void>();
|
||||
private readonly queueFlushed$ = new Subject<void>();
|
||||
|
||||
private readonly isOptedIn$ = new BehaviorSubject<boolean | undefined>(undefined);
|
||||
private clusterUuid: string = 'UNKNOWN';
|
||||
private licenseId: string | undefined;
|
||||
|
||||
/**
|
||||
* Creates a new instance of the {@link ElasticV3BrowserShipper}.
|
||||
* @param options {@link ElasticV3ShipperOptions}
|
||||
* @param initContext {@link AnalyticsClientInitContext}
|
||||
*/
|
||||
constructor(
|
||||
private readonly options: ElasticV3ShipperOptions,
|
||||
private readonly initContext: AnalyticsClientInitContext
|
||||
) {
|
||||
this.setUpInternalQueueSubscriber();
|
||||
this.url = buildUrl({
|
||||
sendTo: options.sendTo ?? initContext.sendTo,
|
||||
channelName: options.channelName,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses the `cluster_uuid` and `license_id` from the context to hold them in memory for the generation of the headers
|
||||
* used later on in the HTTP request.
|
||||
* @param newContext The full new context to set {@link EventContext}
|
||||
*/
|
||||
public extendContext(newContext: EventContext) {
|
||||
if (newContext.cluster_uuid) {
|
||||
this.clusterUuid = newContext.cluster_uuid;
|
||||
}
|
||||
if (newContext.license_id) {
|
||||
this.licenseId = newContext.license_id;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When `false`, it flushes the internal queue and stops sending events.
|
||||
* @param isOptedIn `true` for resume sending events. `false` to stop.
|
||||
*/
|
||||
public optIn(isOptedIn: boolean) {
|
||||
this.isOptedIn$.next(isOptedIn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enqueues the events to be sent to in a batched approach.
|
||||
* @param events batched events {@link Event}
|
||||
*/
|
||||
public reportEvents(events: Event[]) {
|
||||
events.forEach((event) => {
|
||||
this.internalQueue$.next(event);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers a flush of the internal queue to attempt to send any events held in the queue
|
||||
* and resolves the returned promise once the queue is emptied.
|
||||
*/
|
||||
public async flush() {
|
||||
if (this.flush$.isStopped) {
|
||||
// If called after shutdown, return straight away
|
||||
return;
|
||||
}
|
||||
|
||||
const promise = firstValueFrom(this.queueFlushed$);
|
||||
this.flush$.next();
|
||||
await promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts down the shipper.
|
||||
* Triggers a flush of the internal queue to attempt to send any events held in the queue.
|
||||
*/
|
||||
public shutdown() {
|
||||
this.internalQueue$.complete(); // NOTE: When completing the observable, the buffer logic does not wait and releases any buffered events.
|
||||
this.flush$.complete();
|
||||
}
|
||||
|
||||
private setUpInternalQueueSubscriber() {
|
||||
this.internalQueue$
|
||||
.pipe(
|
||||
// Buffer events for 1 second or until we have an optIn value
|
||||
bufferWhen(() =>
|
||||
merge(
|
||||
this.flush$,
|
||||
interval(1000).pipe(skipWhile(() => this.isOptedIn$.value === undefined))
|
||||
)
|
||||
),
|
||||
// Send events (one batch at a time)
|
||||
concatMap(async (events) => {
|
||||
// Only send if opted-in and there's anything to send
|
||||
if (this.isOptedIn$.value === true && events.length > 0) {
|
||||
await this.sendEvents(events);
|
||||
}
|
||||
}),
|
||||
map(() => this.queueFlushed$.next())
|
||||
)
|
||||
.subscribe();
|
||||
}
|
||||
|
||||
private async sendEvents(events: Event[]) {
|
||||
try {
|
||||
const code = await this.makeRequest(events);
|
||||
this.reportTelemetryCounters(events, { code });
|
||||
} catch (error) {
|
||||
this.reportTelemetryCounters(events, { code: error.code, error });
|
||||
}
|
||||
}
|
||||
|
||||
private async makeRequest(events: Event[]): Promise<string> {
|
||||
const response = await fetch(this.url, {
|
||||
method: 'POST',
|
||||
body: eventsToNDJSON(events),
|
||||
headers: buildHeaders(this.clusterUuid, this.options.version, this.licenseId),
|
||||
...(this.options.debug && { query: { debug: true } }),
|
||||
// Allow the request to outlive the page in case the tab is closed
|
||||
keepalive: true,
|
||||
});
|
||||
|
||||
if (this.options.debug) {
|
||||
this.initContext.logger.debug(
|
||||
`[${ElasticV3BrowserShipper.shipperName}]: ${response.status} - ${await response.text()}`
|
||||
);
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ErrorWithCode(
|
||||
`${response.status} - ${await response.text()}`,
|
||||
`${response.status}`
|
||||
);
|
||||
}
|
||||
|
||||
return `${response.status}`;
|
||||
}
|
||||
}
|
|
@ -1,10 +0,0 @@
|
|||
# @kbn/ebt/shippers/elastic_v3/common
|
||||
|
||||
This package holds the common code for the Elastic V3 shippers:
|
||||
|
||||
- Types defining the Shipper configuration `ElasticV3ShipperOptions`
|
||||
- `buildUrl` utility helps decide which URL to use depending on whether the shipper is configured to send to production or staging.
|
||||
- `eventsToNdjson` utility converts any array of events to NDJSON format.
|
||||
- `reportTelemetryCounters` helps with building the TelemetryCounter to emit after processing an event.
|
||||
|
||||
It should be considered an internal package and should not be used other than by the shipper implementations: `@kbn/ebt/shippers/elastic_v3/browser` and `@kbn/ebt/shippers/elastic_v3/server`
|
|
@ -1,15 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { buildHeaders } from './src/build_headers';
|
||||
export { buildUrl } from './src/build_url';
|
||||
export type { BuildUrlOptions } from './src/build_url';
|
||||
export { ErrorWithCode } from './src/error_with_code';
|
||||
export { eventsToNDJSON } from './src/events_to_ndjson';
|
||||
export { createTelemetryCounterHelper } from './src/report_telemetry_counters';
|
||||
export type { ElasticV3ShipperOptions } from './src/types';
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/** The options to build the URL of the V3 API. */
|
||||
export interface BuildUrlOptions {
|
||||
/** Whether to send it to production or staging. */
|
||||
sendTo: 'production' | 'staging';
|
||||
/** The name of the channel to send the data to. */
|
||||
channelName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the URL for the V3 API.
|
||||
* @param urlOptions The options to build the URL of the V3 API.
|
||||
*/
|
||||
export function buildUrl(urlOptions: BuildUrlOptions): string {
|
||||
const { sendTo, channelName } = urlOptions;
|
||||
const baseUrl =
|
||||
sendTo === 'production'
|
||||
? 'https://telemetry.elastic.co'
|
||||
: 'https://telemetry-staging.elastic.co';
|
||||
return `${baseUrl}/v3/send/${channelName}`;
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ErrorWithCode } from './error_with_code';
|
||||
|
||||
describe('ErrorWithCode', () => {
|
||||
const error = new ErrorWithCode('test', 'test_code');
|
||||
test('message and code properties are publicly accessible', () => {
|
||||
expect(error.message).toBe('test');
|
||||
expect(error.code).toBe('test_code');
|
||||
});
|
||||
test('extends error', () => {
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
});
|
||||
});
|
|
@ -1,21 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Custom error to report the error message with an additional error code.
|
||||
*/
|
||||
export class ErrorWithCode extends Error {
|
||||
/**
|
||||
* Constructor of the error.
|
||||
* @param message The error message.
|
||||
* @param code The code of the error.
|
||||
*/
|
||||
constructor(message: string, public readonly code: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Event } from '../../../../client';
|
||||
import { eventsToNDJSON } from './events_to_ndjson';
|
||||
|
||||
describe('eventsToNDJSON', () => {
|
||||
test('works with one event', () => {
|
||||
const event: Event = {
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type',
|
||||
context: {},
|
||||
properties: {},
|
||||
};
|
||||
|
||||
// Mind the extra line at the bottom
|
||||
expect(eventsToNDJSON([event])).toMatchInlineSnapshot(`
|
||||
"{\\"timestamp\\":\\"2020-01-01T00:00:00.000Z\\",\\"event_type\\":\\"event_type\\",\\"context\\":{},\\"properties\\":{}}
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
test('works with many events', () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
{
|
||||
timestamp: '2020-01-02T00:00:00.000Z',
|
||||
event_type: 'event_type',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
expect(eventsToNDJSON(events)).toMatchInlineSnapshot(`
|
||||
"{\\"timestamp\\":\\"2020-01-01T00:00:00.000Z\\",\\"event_type\\":\\"event_type\\",\\"context\\":{},\\"properties\\":{}}
|
||||
{\\"timestamp\\":\\"2020-01-02T00:00:00.000Z\\",\\"event_type\\":\\"event_type\\",\\"context\\":{},\\"properties\\":{}}
|
||||
"
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -1,17 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Event } from '../../../../client';
|
||||
|
||||
/**
|
||||
* Converts an array of events to a single ndjson string.
|
||||
* @param events An array of events {@link Event}
|
||||
*/
|
||||
export function eventsToNDJSON(events: Event[]): string {
|
||||
return `${events.map((event) => JSON.stringify(event)).join('\n')}\n`;
|
||||
}
|
|
@ -1,202 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { firstValueFrom, Subject, take, toArray } from 'rxjs';
|
||||
import type { Event, TelemetryCounter } from '../../../../client';
|
||||
import { createTelemetryCounterHelper } from './report_telemetry_counters';
|
||||
|
||||
describe('reportTelemetryCounters', () => {
|
||||
let reportTelemetryCounters: ReturnType<typeof createTelemetryCounterHelper>;
|
||||
let telemetryCounter$: Subject<TelemetryCounter>;
|
||||
|
||||
beforeEach(() => {
|
||||
telemetryCounter$ = new Subject<TelemetryCounter>();
|
||||
reportTelemetryCounters = createTelemetryCounterHelper(telemetryCounter$, 'my_shipper');
|
||||
});
|
||||
|
||||
test('emits a success counter for one event', async () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$);
|
||||
|
||||
reportTelemetryCounters(events);
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "OK",
|
||||
"count": 1,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('emits a success counter for one event with custom code', async () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$);
|
||||
|
||||
reportTelemetryCounters(events, { code: 'my_code' });
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "my_code",
|
||||
"count": 1,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('emits a counter with custom type', async () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$);
|
||||
|
||||
reportTelemetryCounters(events, {
|
||||
type: 'dropped',
|
||||
code: 'my_code',
|
||||
});
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "my_code",
|
||||
"count": 1,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "dropped",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('emits a failure counter for one event with error message as a code', async () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$);
|
||||
|
||||
reportTelemetryCounters(events, {
|
||||
error: new Error('Something went terribly wrong'),
|
||||
});
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "Something went terribly wrong",
|
||||
"count": 1,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('emits a failure counter for one event with custom code', async () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$);
|
||||
|
||||
reportTelemetryCounters(events, {
|
||||
code: 'my_code',
|
||||
error: new Error('Something went terribly wrong'),
|
||||
});
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "my_code",
|
||||
"count": 1,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('emits a success counter for multiple events of different types', async () => {
|
||||
const events: Event[] = [
|
||||
// 2 types a
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_a',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
// 1 type b
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'event_type_b',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const counters = firstValueFrom(telemetryCounter$.pipe(take(2), toArray()));
|
||||
|
||||
reportTelemetryCounters(events);
|
||||
|
||||
await expect(counters).resolves.toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"code": "OK",
|
||||
"count": 2,
|
||||
"event_type": "event_type_a",
|
||||
"source": "my_shipper",
|
||||
"type": "succeeded",
|
||||
},
|
||||
Object {
|
||||
"code": "OK",
|
||||
"count": 1,
|
||||
"event_type": "event_type_b",
|
||||
"source": "my_shipper",
|
||||
"type": "succeeded",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { Subject } from 'rxjs';
|
||||
import type { Event, TelemetryCounter, TelemetryCounterType } from '../../../../client';
|
||||
|
||||
/**
|
||||
* Creates a telemetry counter helper to make it easier to generate them
|
||||
* @param telemetryCounter$ The observable that will be used to emit the telemetry counters
|
||||
* @param source The name of the shipper that is sending the events.
|
||||
*/
|
||||
export function createTelemetryCounterHelper(
|
||||
telemetryCounter$: Subject<TelemetryCounter>,
|
||||
source: string
|
||||
) {
|
||||
/**
|
||||
* Triggers a telemetry counter for each event type.
|
||||
* @param events The events to trigger the telemetry counter for.
|
||||
* @param type The type of telemetry counter to trigger.
|
||||
* @param code The success or error code for additional detail about the result.
|
||||
* @param error The error that occurred, if any.
|
||||
*/
|
||||
return (
|
||||
events: Event[],
|
||||
{
|
||||
type,
|
||||
code,
|
||||
error,
|
||||
}: {
|
||||
type?: TelemetryCounterType;
|
||||
code?: string;
|
||||
error?: Error;
|
||||
} = {}
|
||||
) => {
|
||||
const eventTypeCounts = countEventTypes(events);
|
||||
Object.entries(eventTypeCounts).forEach(([eventType, count]) => {
|
||||
telemetryCounter$.next({
|
||||
source,
|
||||
type: type ?? (error ? 'failed' : 'succeeded'),
|
||||
code: code ?? error?.message ?? 'OK',
|
||||
count,
|
||||
event_type: eventType,
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function countEventTypes(events: Event[]) {
|
||||
return events.reduce((acc, event) => {
|
||||
if (acc[event.event_type]) {
|
||||
acc[event.event_type] += 1;
|
||||
} else {
|
||||
acc[event.event_type] = 1;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, number>);
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Options for the Elastic V3 shipper
|
||||
*/
|
||||
export interface ElasticV3ShipperOptions {
|
||||
/**
|
||||
* The name of the channel to stream all the events to.
|
||||
*/
|
||||
channelName: string;
|
||||
/**
|
||||
* The product's version.
|
||||
*/
|
||||
version: string;
|
||||
/**
|
||||
* Provide it to override the Analytics client's default configuration.
|
||||
*/
|
||||
sendTo?: 'staging' | 'production';
|
||||
/**
|
||||
* Should show debug information about the requests it makes to the V3 API.
|
||||
*/
|
||||
debug?: boolean;
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
# @kbn/ebt/shippers/elastic_v3/server
|
||||
|
||||
Server-side implementation of the Elastic V3 shipper for the `@kbn/ebt/client`.
|
||||
|
||||
## How to use it
|
||||
|
||||
This module is intended to be used **on the server-side only**. It is specially designed to apply the necessary backpressure mechanisms to prevent the server from getting overloaded with too many events and identify if the server sits behind a firewall to discard any incoming events. Refer to `@kbn/ebt/shippers/elastic_v3/browser` for the browser-side implementation.
|
||||
|
||||
```typescript
|
||||
import { ElasticV3ServerShipper } from "@kbn/ebt/shippers/elastic_v3/server";
|
||||
|
||||
analytics.registerShipper(ElasticV3ServerShipper, { channelName: 'myChannel', version: '1.0.0' });
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
| Name | Description |
|
||||
|:-------------:|:-------------------------------------------------------------------------------------------|
|
||||
| `channelName` | The name of the channel to send the events. |
|
||||
| `version` | The version of the application generating the events. |
|
||||
| `debug` | When `true`, it logs the responses from the remote Telemetry Service. Defaults to `false`. |
|
||||
|
||||
## Transmission protocol
|
||||
|
||||
This shipper sends the events to the Elastic Internal Telemetry Service. It holds up to 1000 events in a shared queue. Any additional incoming events once it's full will be dropped. It sends the events from the queue in batches of up to 10kB every 10 seconds. When shutting down, it'll send all the remaining events in the queue.
|
|
@ -1,10 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export type { ElasticV3ShipperOptions } from '../common';
|
||||
export { ElasticV3ServerShipper } from './src/server_shipper';
|
|
@ -1,15 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export const fetchMock = jest.fn().mockResolvedValue({
|
||||
status: 200,
|
||||
ok: true,
|
||||
text: () => Promise.resolve('{"status": "ok"}'),
|
||||
});
|
||||
|
||||
jest.doMock('node-fetch', () => fetchMock);
|
|
@ -1,571 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
import { firstValueFrom } from 'rxjs';
|
||||
import type { AnalyticsClientInitContext, Event } from '../../../../client';
|
||||
import { fetchMock } from './server_shipper.test.mocks';
|
||||
import { ElasticV3ServerShipper } from './server_shipper';
|
||||
|
||||
const SECONDS = 1000;
|
||||
const MINUTES = 60 * SECONDS;
|
||||
|
||||
describe('ElasticV3ServerShipper', () => {
|
||||
const events: Event[] = [
|
||||
{
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
event_type: 'test-event-type',
|
||||
context: {},
|
||||
properties: {},
|
||||
},
|
||||
];
|
||||
|
||||
const initContext: AnalyticsClientInitContext = {
|
||||
sendTo: 'staging',
|
||||
isDev: true,
|
||||
logger: loggerMock.create(),
|
||||
};
|
||||
|
||||
let shipper: ElasticV3ServerShipper;
|
||||
|
||||
// eslint-disable-next-line dot-notation
|
||||
const setLastBatchSent = (ms: number) => (shipper['lastBatchSent'] = ms);
|
||||
|
||||
beforeEach(() => {
|
||||
jest.useFakeTimers({ legacyFakeTimers: false });
|
||||
|
||||
shipper = new ElasticV3ServerShipper(
|
||||
{ version: '1.2.3', channelName: 'test-channel', debug: true },
|
||||
initContext
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
shipper['firstTimeOffline'] = null; // The tests think connectivity is OK initially for easier testing.
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
shipper.shutdown();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('set optIn should update the isOptedIn$ observable', () => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
const getInternalOptIn = () => shipper['isOptedIn$'].value;
|
||||
|
||||
// Initially undefined
|
||||
expect(getInternalOptIn()).toBeUndefined();
|
||||
|
||||
shipper.optIn(true);
|
||||
expect(getInternalOptIn()).toBe(true);
|
||||
|
||||
shipper.optIn(false);
|
||||
expect(getInternalOptIn()).toBe(false);
|
||||
});
|
||||
|
||||
test('clears the queue after optIn: false', () => {
|
||||
shipper.reportEvents(events);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(1);
|
||||
|
||||
shipper.optIn(false);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(0);
|
||||
});
|
||||
|
||||
test('set extendContext should store local values: clusterUuid and licenseId', () => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
const getInternalClusterUuid = () => shipper['clusterUuid'];
|
||||
// eslint-disable-next-line dot-notation
|
||||
const getInternalLicenseId = () => shipper['licenseId'];
|
||||
|
||||
// Initial values
|
||||
expect(getInternalClusterUuid()).toBe('UNKNOWN');
|
||||
expect(getInternalLicenseId()).toBeUndefined();
|
||||
|
||||
shipper.extendContext({ cluster_uuid: 'test-cluster-uuid' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid');
|
||||
expect(getInternalLicenseId()).toBeUndefined();
|
||||
|
||||
shipper.extendContext({ license_id: 'test-license-id' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid');
|
||||
expect(getInternalLicenseId()).toBe('test-license-id');
|
||||
|
||||
shipper.extendContext({ cluster_uuid: 'test-cluster-uuid-2', license_id: 'test-license-id-2' });
|
||||
expect(getInternalClusterUuid()).toBe('test-cluster-uuid-2');
|
||||
expect(getInternalLicenseId()).toBe('test-license-id-2');
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` straight away', () => {
|
||||
shipper.reportEvents(events);
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` after 10 minutes because no optIn value is set yet', async () => {
|
||||
shipper.reportEvents(events);
|
||||
await jest.advanceTimersByTimeAsync(10 * MINUTES);
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to reportEvents call `fetch` after 10 seconds when optIn value is set to true', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('calls to reportEvents do not call `fetch` after 10 seconds when optIn value is set to false', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(false);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('calls to reportEvents call `fetch` when shutting down if optIn value is set to true', async () => {
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
shipper.shutdown();
|
||||
jest.advanceTimersToNextTimer(); // We are handling the shutdown in a promise, so we need to wait for the next tick.
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('does not add the query.debug: true property to the request if the shipper is not set with the debug flag', async () => {
|
||||
shipper = new ElasticV3ServerShipper(
|
||||
{ version: '1.2.3', channelName: 'test-channel' },
|
||||
initContext
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
shipper['firstTimeOffline'] = null;
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('sends when the queue overflows the 10kB leaky bucket one batch every 10s', async () => {
|
||||
expect.assertions(2 * 9 + 2);
|
||||
|
||||
shipper.reportEvents(new Array(1000).fill(events[0]));
|
||||
shipper.optIn(true);
|
||||
|
||||
// Due to the size of the test events, it matches 8 rounds.
|
||||
for (let i = 0; i < 9; i++) {
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
i + 1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: new Array(103)
|
||||
.fill(
|
||||
'{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n'
|
||||
)
|
||||
.join(''),
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "200",
|
||||
"count": 103,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "succeeded",
|
||||
}
|
||||
`);
|
||||
jest.advanceTimersToNextTimer();
|
||||
}
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(1000 - 9 * 103); // 73
|
||||
|
||||
// If we call it again, it should not enqueue all the events (only the ones to fill the queue):
|
||||
shipper.reportEvents(new Array(1000).fill(events[0]));
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(1000);
|
||||
});
|
||||
|
||||
test('handles when the fetch request fails', async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "Failed to fetch",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('handles when the fetch request fails (request completes but not OK response)', async () => {
|
||||
fetchMock.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 400,
|
||||
text: () => Promise.resolve('{"status": "not ok"}'),
|
||||
});
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "400",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
describe('Connectivity Checks', () => {
|
||||
describe('connectivity check when connectivity is confirmed (firstTimeOffline === null)', () => {
|
||||
test.each([undefined, false, true])('does not run for opt-in %p', async (optInValue) => {
|
||||
if (optInValue !== undefined) {
|
||||
shipper.optIn(optInValue);
|
||||
}
|
||||
|
||||
// From the start, it doesn't check connectivity because already confirmed
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
|
||||
// Wait a big time (1 minute should be enough, but for the sake of tests...)
|
||||
await jest.advanceTimersByTimeAsync(10 * MINUTES);
|
||||
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('connectivity check with initial unknown state of the connectivity', () => {
|
||||
beforeEach(() => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
shipper['firstTimeOffline'] = undefined; // Initial unknown state of the connectivity
|
||||
});
|
||||
|
||||
test.each([undefined, false])('does not run for opt-in %p', async (optInValue) => {
|
||||
if (optInValue !== undefined) {
|
||||
shipper.optIn(optInValue);
|
||||
}
|
||||
|
||||
// From the start, it doesn't check connectivity because already confirmed
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
|
||||
// Wait a big time (1 minute should be enough, but for the sake of tests...)
|
||||
await jest.advanceTimersByTimeAsync(10 * MINUTES);
|
||||
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
|
||||
test('runs as soon as opt-in is set to true', () => {
|
||||
shipper.optIn(true);
|
||||
|
||||
// From the start, it doesn't check connectivity because opt-in is not true
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('connectivity check with the connectivity confirmed to be faulty', () => {
|
||||
beforeEach(() => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
shipper['firstTimeOffline'] = 100; // Failed at some point
|
||||
});
|
||||
|
||||
test.each([undefined, false])('does not run for opt-in %p', async (optInValue) => {
|
||||
if (optInValue !== undefined) {
|
||||
shipper.optIn(optInValue);
|
||||
}
|
||||
|
||||
// From the start, it doesn't check connectivity because already confirmed
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
|
||||
// Wait a big time (1 minute should be enough, but for the sake of tests...)
|
||||
await jest.advanceTimersByTimeAsync(10 * MINUTES);
|
||||
|
||||
expect(fetchMock).not.toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
|
||||
test('runs as soon as opt-in is set to true', () => {
|
||||
shipper.optIn(true);
|
||||
|
||||
// From the start, it doesn't check connectivity because opt-in is not true
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('after report failure', () => {
|
||||
// generate the report failure for each test
|
||||
beforeEach(async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
shipper.reportEvents(events);
|
||||
shipper.optIn(true);
|
||||
const counter = firstValueFrom(shipper.telemetryCounter$);
|
||||
setLastBatchSent(Date.now() - 10 * SECONDS);
|
||||
await jest.advanceTimersByTimeAsync(1 * SECONDS); // Moving 1 second should be enough to trigger the logic
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
await expect(counter).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"code": "Failed to fetch",
|
||||
"count": 1,
|
||||
"event_type": "test-event-type",
|
||||
"source": "elastic_v3_server",
|
||||
"type": "failed",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('connectivity check runs periodically', async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
await jest.advanceTimersByTimeAsync(1 * MINUTES);
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
fetchMock.mockResolvedValueOnce({ ok: false });
|
||||
await jest.advanceTimersByTimeAsync(2 * MINUTES);
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('after being offline for longer than 24h', () => {
|
||||
beforeEach(() => {
|
||||
shipper.optIn(true);
|
||||
shipper.reportEvents(events);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(1);
|
||||
// eslint-disable-next-line dot-notation
|
||||
shipper['firstTimeOffline'] = 100;
|
||||
});
|
||||
|
||||
test('the following connectivity check clears the queue', async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
await jest.advanceTimersByTimeAsync(1 * MINUTES);
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(0);
|
||||
});
|
||||
|
||||
test('new events are not added to the queue', async () => {
|
||||
fetchMock.mockRejectedValueOnce(new Error('Failed to fetch'));
|
||||
await jest.advanceTimersByTimeAsync(1 * MINUTES);
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(0);
|
||||
|
||||
shipper.reportEvents(events);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['internalQueue'].length).toBe(0);
|
||||
});
|
||||
|
||||
test('regains the connection', async () => {
|
||||
fetchMock.mockResolvedValueOnce({ ok: true });
|
||||
await jest.advanceTimersByTimeAsync(1 * MINUTES);
|
||||
expect(fetchMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['firstTimeOffline']).toBe(null);
|
||||
|
||||
await jest.advanceTimersByTimeAsync(10 * MINUTES);
|
||||
expect(fetchMock).not.toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('flush method', () => {
|
||||
test('resolves straight away if it should not send anything', async () => {
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
});
|
||||
|
||||
test('resolves when all the ongoing requests are complete', async () => {
|
||||
shipper.optIn(true);
|
||||
shipper.reportEvents(events);
|
||||
expect(fetchMock).toHaveBeenCalledTimes(0);
|
||||
fetchMock.mockImplementation(async () => {
|
||||
// eslint-disable-next-line dot-notation
|
||||
expect(shipper['inFlightRequests$'].value).toBe(1);
|
||||
});
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel',
|
||||
{
|
||||
body: '{"timestamp":"2020-01-01T00:00:00.000Z","event_type":"test-event-type","context":{},"properties":{}}\n',
|
||||
headers: {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': 'UNKNOWN',
|
||||
'x-elastic-stack-version': '1.2.3',
|
||||
},
|
||||
method: 'POST',
|
||||
query: { debug: true },
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test('calling flush multiple times does not keep hanging', async () => {
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
await Promise.all([shipper.flush(), shipper.flush()]);
|
||||
});
|
||||
|
||||
test('calling flush after shutdown does not keep hanging', async () => {
|
||||
shipper.shutdown();
|
||||
await expect(shipper.flush()).resolves.toBe(undefined);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,370 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import fetch from 'node-fetch';
|
||||
import {
|
||||
filter,
|
||||
Subject,
|
||||
ReplaySubject,
|
||||
interval,
|
||||
merge,
|
||||
timer,
|
||||
retryWhen,
|
||||
tap,
|
||||
delayWhen,
|
||||
takeUntil,
|
||||
map,
|
||||
BehaviorSubject,
|
||||
exhaustMap,
|
||||
mergeMap,
|
||||
skip,
|
||||
firstValueFrom,
|
||||
} from 'rxjs';
|
||||
import {
|
||||
type ElasticV3ShipperOptions,
|
||||
buildHeaders,
|
||||
buildUrl,
|
||||
createTelemetryCounterHelper,
|
||||
eventsToNDJSON,
|
||||
ErrorWithCode,
|
||||
} from '../../common';
|
||||
import type {
|
||||
AnalyticsClientInitContext,
|
||||
Event,
|
||||
EventContext,
|
||||
IShipper,
|
||||
TelemetryCounter,
|
||||
} from '../../../../client';
|
||||
|
||||
const SECOND = 1000;
|
||||
const MINUTE = 60 * SECOND;
|
||||
const HOUR = 60 * MINUTE;
|
||||
const KIB = 1024;
|
||||
const MAX_NUMBER_OF_EVENTS_IN_INTERNAL_QUEUE = 1000;
|
||||
const MIN_TIME_SINCE_LAST_SEND = 10 * SECOND;
|
||||
|
||||
/**
|
||||
* Elastic V3 shipper to use on the server side.
|
||||
*/
|
||||
export class ElasticV3ServerShipper implements IShipper {
|
||||
/** Shipper's unique name */
|
||||
public static shipperName = 'elastic_v3_server';
|
||||
|
||||
/** Observable to emit the stats of the processed events. */
|
||||
public readonly telemetryCounter$ = new Subject<TelemetryCounter>();
|
||||
|
||||
private readonly reportTelemetryCounters = createTelemetryCounterHelper(
|
||||
this.telemetryCounter$,
|
||||
ElasticV3ServerShipper.shipperName
|
||||
);
|
||||
|
||||
private readonly internalQueue: Event[] = [];
|
||||
private readonly shutdown$ = new ReplaySubject<void>(1);
|
||||
private readonly flush$ = new Subject<void>();
|
||||
private readonly inFlightRequests$ = new BehaviorSubject<number>(0);
|
||||
private readonly isOptedIn$ = new BehaviorSubject<boolean | undefined>(undefined);
|
||||
|
||||
private readonly url: string;
|
||||
|
||||
private lastBatchSent = Date.now();
|
||||
|
||||
private clusterUuid: string = 'UNKNOWN';
|
||||
private licenseId?: string;
|
||||
|
||||
/**
|
||||
* Specifies when it went offline:
|
||||
* - `undefined` means it doesn't know yet whether it is online or offline
|
||||
* - `null` means it's online
|
||||
* - `number` means it's offline since that time
|
||||
* @private
|
||||
*/
|
||||
private firstTimeOffline?: number | null;
|
||||
|
||||
/**
|
||||
* Creates a new instance of the {@link ElasticV3ServerShipper}.
|
||||
* @param options {@link ElasticV3ShipperOptions}
|
||||
* @param initContext {@link AnalyticsClientInitContext}
|
||||
*/
|
||||
constructor(
|
||||
private readonly options: ElasticV3ShipperOptions,
|
||||
private readonly initContext: AnalyticsClientInitContext
|
||||
) {
|
||||
this.url = buildUrl({
|
||||
sendTo: options.sendTo ?? initContext.sendTo,
|
||||
channelName: options.channelName,
|
||||
});
|
||||
this.setInternalSubscriber();
|
||||
this.checkConnectivity();
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses the `cluster_uuid` and `license_id` from the context to hold them in memory for the generation of the headers
|
||||
* used later on in the HTTP request.
|
||||
* @param newContext The full new context to set {@link EventContext}
|
||||
*/
|
||||
public extendContext(newContext: EventContext) {
|
||||
if (newContext.cluster_uuid) {
|
||||
this.clusterUuid = newContext.cluster_uuid;
|
||||
}
|
||||
if (newContext.license_id) {
|
||||
this.licenseId = newContext.license_id;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When `false`, it flushes the internal queue and stops sending events.
|
||||
* @param isOptedIn `true` for resume sending events. `false` to stop.
|
||||
*/
|
||||
public optIn(isOptedIn: boolean) {
|
||||
this.isOptedIn$.next(isOptedIn);
|
||||
|
||||
if (isOptedIn === false) {
|
||||
this.internalQueue.length = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enqueues the events to be sent via the leaky bucket algorithm.
|
||||
* @param events batched events {@link Event}
|
||||
*/
|
||||
public reportEvents(events: Event[]) {
|
||||
// If opted out OR offline for longer than 24 hours, skip processing any events.
|
||||
if (
|
||||
this.isOptedIn$.value === false ||
|
||||
(this.firstTimeOffline && Date.now() - this.firstTimeOffline > 24 * HOUR)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const freeSpace = MAX_NUMBER_OF_EVENTS_IN_INTERNAL_QUEUE - this.internalQueue.length;
|
||||
|
||||
// As per design, we only want store up-to 1000 events at a time. Drop anything that goes beyond that limit
|
||||
if (freeSpace < events.length) {
|
||||
const toDrop = events.length - freeSpace;
|
||||
const droppedEvents = events.splice(-toDrop, toDrop);
|
||||
this.reportTelemetryCounters(droppedEvents, {
|
||||
type: 'dropped',
|
||||
code: 'queue_full',
|
||||
});
|
||||
}
|
||||
|
||||
this.internalQueue.push(...events);
|
||||
}
|
||||
|
||||
/**
|
||||
* Triggers a flush of the internal queue to attempt to send any events held in the queue
|
||||
* and resolves the returned promise once the queue is emptied.
|
||||
*/
|
||||
public async flush() {
|
||||
if (this.flush$.isStopped) {
|
||||
// If called after shutdown, return straight away
|
||||
return;
|
||||
}
|
||||
|
||||
const promise = firstValueFrom(
|
||||
this.inFlightRequests$.pipe(
|
||||
skip(1), // Skipping the first value because BehaviourSubjects always emit the current value on subscribe.
|
||||
filter((count) => count === 0) // Wait until all the inflight requests are completed.
|
||||
)
|
||||
);
|
||||
this.flush$.next();
|
||||
await promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts down the shipper.
|
||||
* Triggers a flush of the internal queue to attempt to send any events held in the queue.
|
||||
*/
|
||||
public shutdown() {
|
||||
this.shutdown$.next();
|
||||
this.flush$.complete();
|
||||
this.shutdown$.complete();
|
||||
this.isOptedIn$.complete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the server has connectivity to the remote endpoint.
|
||||
* The frequency of the connectivity tests will back off, starting with 1 minute, and multiplying by 2
|
||||
* until it reaches 1 hour. Then, it’ll keep the 1h frequency until it reaches 24h without connectivity.
|
||||
* At that point, it clears the queue and stops accepting events in the queue.
|
||||
* The connectivity checks will continue to happen every 1 hour just in case it regains it at any point.
|
||||
* @private
|
||||
*/
|
||||
private checkConnectivity() {
|
||||
let backoff = 1 * MINUTE;
|
||||
merge(
|
||||
timer(0, 1 * MINUTE),
|
||||
// Also react to opt-in changes to avoid being stalled for 1 minute for the first connectivity check.
|
||||
// More details in: https://github.com/elastic/kibana/issues/135647
|
||||
this.isOptedIn$
|
||||
)
|
||||
.pipe(
|
||||
takeUntil(this.shutdown$),
|
||||
filter(() => this.isOptedIn$.value === true && this.firstTimeOffline !== null),
|
||||
// Using exhaustMap here because one request at a time is enough to check the connectivity.
|
||||
exhaustMap(async () => {
|
||||
const { ok } = await fetch(this.url, {
|
||||
method: 'OPTIONS',
|
||||
});
|
||||
|
||||
if (!ok) {
|
||||
throw new Error(`Failed to connect to ${this.url}`);
|
||||
}
|
||||
|
||||
this.firstTimeOffline = null;
|
||||
backoff = 1 * MINUTE;
|
||||
}),
|
||||
retryWhen((errors) =>
|
||||
errors.pipe(
|
||||
takeUntil(this.shutdown$),
|
||||
tap(() => {
|
||||
if (!this.firstTimeOffline) {
|
||||
this.firstTimeOffline = Date.now();
|
||||
} else if (Date.now() - this.firstTimeOffline > 24 * HOUR) {
|
||||
this.internalQueue.length = 0;
|
||||
}
|
||||
backoff = backoff * 2;
|
||||
if (backoff > 1 * HOUR) {
|
||||
backoff = 1 * HOUR;
|
||||
}
|
||||
}),
|
||||
delayWhen(() => timer(backoff))
|
||||
)
|
||||
)
|
||||
)
|
||||
.subscribe();
|
||||
}
|
||||
|
||||
private setInternalSubscriber() {
|
||||
// Create an emitter that emits when MIN_TIME_SINCE_LAST_SEND have passed since the last time we sent the data
|
||||
const minimumTimeSinceLastSent$ = interval(SECOND).pipe(
|
||||
filter(() => Date.now() - this.lastBatchSent >= MIN_TIME_SINCE_LAST_SEND)
|
||||
);
|
||||
|
||||
merge(
|
||||
minimumTimeSinceLastSent$.pipe(
|
||||
takeUntil(this.shutdown$),
|
||||
map(() => ({ shouldFlush: false }))
|
||||
),
|
||||
// Whenever a `flush` request comes in
|
||||
this.flush$.pipe(map(() => ({ shouldFlush: true }))),
|
||||
// Attempt to send one last time on shutdown, flushing the queue
|
||||
this.shutdown$.pipe(map(() => ({ shouldFlush: true })))
|
||||
)
|
||||
.pipe(
|
||||
// Only move ahead if it's opted-in and online, and there are some events in the queue
|
||||
filter(() => {
|
||||
const shouldSendAnything =
|
||||
this.isOptedIn$.value === true &&
|
||||
this.firstTimeOffline === null &&
|
||||
this.internalQueue.length > 0;
|
||||
|
||||
// If it should not send anything, re-emit the inflight request observable just in case it's already 0
|
||||
if (!shouldSendAnything) {
|
||||
this.inFlightRequests$.next(this.inFlightRequests$.value);
|
||||
}
|
||||
|
||||
return shouldSendAnything;
|
||||
}),
|
||||
|
||||
// Send the events:
|
||||
// 1. Set lastBatchSent and retrieve the events to send (clearing the queue) in a synchronous operation to avoid race conditions.
|
||||
map(({ shouldFlush }) => {
|
||||
this.lastBatchSent = Date.now();
|
||||
return this.getEventsToSend(shouldFlush);
|
||||
}),
|
||||
// 2. Skip empty buffers (just to be sure)
|
||||
filter((events) => events.length > 0),
|
||||
// 3. Actually send the events
|
||||
// Using `mergeMap` here because we want to send events whenever the emitter says so:
|
||||
// We don't want to skip emissions (exhaustMap) or enqueue them (concatMap).
|
||||
mergeMap((eventsToSend) => this.sendEvents(eventsToSend))
|
||||
)
|
||||
.subscribe();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the size of the queue in bytes.
|
||||
* @returns The number of bytes held in the queue.
|
||||
* @private
|
||||
*/
|
||||
private getQueueByteSize(queue: Event[]) {
|
||||
return queue.reduce((acc, event) => {
|
||||
return acc + this.getEventSize(event);
|
||||
}, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the size of the event in bytes.
|
||||
* @param event The event to calculate the size of.
|
||||
* @returns The number of bytes held in the event.
|
||||
* @private
|
||||
*/
|
||||
private getEventSize(event: Event) {
|
||||
return Buffer.from(JSON.stringify(event)).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a queue of events of up-to 10kB. Or all events in the queue if it's a FLUSH action.
|
||||
* @remarks It mutates the internal queue by removing from it the events returned by this method.
|
||||
* @private
|
||||
*/
|
||||
private getEventsToSend(shouldFlush: boolean): Event[] {
|
||||
// If the internal queue is already smaller than the minimum batch size, or it's a flush action, do a direct assignment.
|
||||
if (shouldFlush || this.getQueueByteSize(this.internalQueue) < 10 * KIB) {
|
||||
return this.internalQueue.splice(0, this.internalQueue.length);
|
||||
}
|
||||
// Otherwise, we'll feed the events to the leaky bucket queue until we reach 10kB.
|
||||
const queue: Event[] = [];
|
||||
let queueByteSize = 0;
|
||||
while (queueByteSize < 10 * KIB) {
|
||||
const event = this.internalQueue.shift()!;
|
||||
queueByteSize += this.getEventSize(event);
|
||||
queue.push(event);
|
||||
}
|
||||
return queue;
|
||||
}
|
||||
|
||||
private async sendEvents(events: Event[]) {
|
||||
this.initContext.logger.debug(`Reporting ${events.length} events...`);
|
||||
this.inFlightRequests$.next(this.inFlightRequests$.value + 1);
|
||||
try {
|
||||
const code = await this.makeRequest(events);
|
||||
this.reportTelemetryCounters(events, { code });
|
||||
this.initContext.logger.debug(`Reported ${events.length} events...`);
|
||||
} catch (error) {
|
||||
this.initContext.logger.debug(`Failed to report ${events.length} events...`);
|
||||
this.initContext.logger.debug(error);
|
||||
this.reportTelemetryCounters(events, { code: error.code, error });
|
||||
this.firstTimeOffline = undefined;
|
||||
}
|
||||
this.inFlightRequests$.next(Math.max(0, this.inFlightRequests$.value - 1));
|
||||
}
|
||||
|
||||
private async makeRequest(events: Event[]): Promise<string> {
|
||||
const response = await fetch(this.url, {
|
||||
method: 'POST',
|
||||
body: eventsToNDJSON(events),
|
||||
headers: buildHeaders(this.clusterUuid, this.options.version, this.licenseId),
|
||||
...(this.options.debug && { query: { debug: true } }),
|
||||
});
|
||||
|
||||
if (this.options.debug) {
|
||||
this.initContext.logger.debug(`${response.status} - ${await response.text()}`);
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new ErrorWithCode(
|
||||
`${response.status} - ${await response.text()}`,
|
||||
`${response.status}`
|
||||
);
|
||||
}
|
||||
|
||||
return `${response.status}`;
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
# @kbn/ebt/shippers/fullstory
|
||||
|
||||
FullStory implementation as a shipper for the `@kbn/ebt/client`.
|
||||
|
||||
## How to use it
|
||||
|
||||
This module is intended to be used **on the browser only**. It does not support server-side events.
|
||||
|
||||
```typescript
|
||||
import { FullStoryShipper } from "@kbn/ebt/shippers/fullstory";
|
||||
|
||||
analytics.registerShipper(FullStoryShipper, { fullStoryOrgId: '12345' })
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
| Name | Description |
|
||||
|:----------------:|:----------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `fullStoryOrgId` | FullStory account ID |
|
||||
| `host` | The host to send the data to. Used to overcome AdBlockers by using custom DNSs. If not specified, it defaults to `fullstory.com`. |
|
||||
| `scriptUrl` | The URL to load the FullStory client from. Falls back to `edge.fullstory.com/s/fs.js` if not specified. |
|
||||
| `debug` | Whether the debug logs should be printed to the console. Defaults to `false`. |
|
||||
| `namespace` | The name of the variable where the API is stored: `window[namespace]`. Defaults to `FS`. |
|
||||
|
||||
## FullStory Custom Events Rate Limits
|
||||
|
||||
FullStory limits the number of custom events that can be sent per second ([docs](https://help.fullstory.com/hc/en-us/articles/360020623234#custom-property-rate-limiting)). In order to comply with that limit, this shipper will only emit the event types registered in the allow-list defined in the constant [CUSTOM_EVENT_TYPES_ALLOWLIST](./src/fullstory_shipper.ts). We may change this behaviour in the future to a remotely-controlled list of events or rely on the opt-in _cherry-pick_ config mechanism of the Analytics Client.
|
||||
|
||||
## Transmission protocol
|
||||
|
||||
This shipper relies on FullStory official snippet. The internals about how it transfers the data are not documented.
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { FullStoryShipper } from './src/fullstory_shipper';
|
||||
export type { FullStoryShipperConfig } from './src/fullstory_shipper';
|
||||
export type { FullStorySnippetConfig } from './src/load_snippet';
|
|
@ -1,146 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { formatPayload } from './format_payload';
|
||||
|
||||
describe('formatPayload', () => {
|
||||
test('appends `_str` to string values', () => {
|
||||
const payload = {
|
||||
foo: 'bar',
|
||||
baz: ['qux'],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_str: payload.foo,
|
||||
baz_strs: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('appends `_int` to integer values', () => {
|
||||
const payload = {
|
||||
foo: 1,
|
||||
baz: [100000],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_int: payload.foo,
|
||||
baz_ints: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('appends `_real` to integer values', () => {
|
||||
const payload = {
|
||||
foo: 1.5,
|
||||
baz: [100000.5],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_real: payload.foo,
|
||||
baz_reals: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('appends `_bool` to booleans values', () => {
|
||||
const payload = {
|
||||
foo: true,
|
||||
baz: [false],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_bool: payload.foo,
|
||||
baz_bools: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('appends `_date` to Date values', () => {
|
||||
const payload = {
|
||||
foo: new Date(),
|
||||
baz: [new Date()],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_date: payload.foo,
|
||||
baz_dates: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('supports nested values', () => {
|
||||
const payload = {
|
||||
nested: {
|
||||
foo: 'bar',
|
||||
baz: ['qux'],
|
||||
},
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
nested: {
|
||||
foo_str: payload.nested.foo,
|
||||
baz_strs: payload.nested.baz,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('does not mutate reserved keys', () => {
|
||||
const payload = {
|
||||
uid: 'uid',
|
||||
displayName: 'displayName',
|
||||
email: 'email',
|
||||
acctId: 'acctId',
|
||||
website: 'website',
|
||||
pageName: 'pageName',
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual(payload);
|
||||
});
|
||||
|
||||
test('removes undefined values', () => {
|
||||
const payload = {
|
||||
foo: undefined,
|
||||
baz: [undefined],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({});
|
||||
});
|
||||
|
||||
test('throws if null is provided', () => {
|
||||
const payload = {
|
||||
foo: null,
|
||||
baz: [null],
|
||||
};
|
||||
|
||||
expect(() => formatPayload(payload)).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Unsupported type: object"`
|
||||
);
|
||||
});
|
||||
|
||||
describe('String to Date identification', () => {
|
||||
test('appends `_date` to ISO string values', () => {
|
||||
const payload = {
|
||||
foo: new Date().toISOString(),
|
||||
baz: [new Date().toISOString()],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_date: payload.foo,
|
||||
baz_dates: payload.baz,
|
||||
});
|
||||
});
|
||||
|
||||
test('appends `_str` to random string values', () => {
|
||||
const payload = {
|
||||
foo: 'test-1',
|
||||
baz: ['test-1'],
|
||||
};
|
||||
|
||||
expect(formatPayload(payload)).toEqual({
|
||||
foo_str: payload.foo,
|
||||
baz_strs: payload.baz,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,84 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import moment from 'moment';
|
||||
|
||||
// https://help.fullstory.com/hc/en-us/articles/360020623234#reserved-properties
|
||||
const FULLSTORY_RESERVED_PROPERTIES = [
|
||||
'uid',
|
||||
'displayName',
|
||||
'email',
|
||||
'acctId',
|
||||
'website',
|
||||
// https://developer.fullstory.com/page-variables
|
||||
'pageName',
|
||||
];
|
||||
|
||||
export function formatPayload(context: object): Record<string, unknown> {
|
||||
// format context keys as required for env vars, see docs: https://help.fullstory.com/hc/en-us/articles/360020623234
|
||||
return Object.fromEntries(
|
||||
Object.entries(context)
|
||||
// Discard any undefined values
|
||||
.map<[string, unknown]>(([key, value]) => {
|
||||
return Array.isArray(value)
|
||||
? [key, value.filter((v) => typeof v !== 'undefined')]
|
||||
: [key, value];
|
||||
})
|
||||
.filter(
|
||||
([, value]) => typeof value !== 'undefined' && (!Array.isArray(value) || value.length > 0)
|
||||
)
|
||||
// Transform key names according to the FullStory needs
|
||||
.map(([key, value]) => {
|
||||
if (FULLSTORY_RESERVED_PROPERTIES.includes(key)) {
|
||||
return [key, value];
|
||||
}
|
||||
if (isRecord(value)) {
|
||||
return [key, formatPayload(value)];
|
||||
}
|
||||
const valueType = getFullStoryType(value);
|
||||
const formattedKey = valueType ? `${key}_${valueType}` : key;
|
||||
return [formattedKey, value];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function getFullStoryType(value: unknown) {
|
||||
// For arrays, make the decision based on the first element
|
||||
const isArray = Array.isArray(value);
|
||||
const v = isArray ? value[0] : value;
|
||||
let type: string;
|
||||
switch (typeof v) {
|
||||
case 'string':
|
||||
type = moment(v, moment.ISO_8601, true).isValid() ? 'date' : 'str';
|
||||
break;
|
||||
case 'number':
|
||||
type = Number.isInteger(v) ? 'int' : 'real';
|
||||
break;
|
||||
case 'boolean':
|
||||
type = 'bool';
|
||||
break;
|
||||
case 'object':
|
||||
if (isDate(v)) {
|
||||
type = 'date';
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported type: ${typeof v}`);
|
||||
}
|
||||
|
||||
// convert to plural form for arrays
|
||||
return isArray ? `${type}s` : type;
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === 'object' && value !== null && !Array.isArray(value) && !isDate(value);
|
||||
}
|
||||
|
||||
function isDate(value: unknown): value is Date {
|
||||
return value instanceof Date;
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as RxJS from 'rxjs';
|
||||
import type { FullStoryApi } from './types';
|
||||
|
||||
export const fullStoryApiMock: jest.Mocked<FullStoryApi> = {
|
||||
identify: jest.fn(),
|
||||
setUserVars: jest.fn(),
|
||||
setVars: jest.fn(),
|
||||
consent: jest.fn(),
|
||||
restart: jest.fn(),
|
||||
shutdown: jest.fn(),
|
||||
event: jest.fn(),
|
||||
};
|
||||
jest.doMock('./load_snippet', () => {
|
||||
return {
|
||||
loadSnippet: () => fullStoryApiMock,
|
||||
};
|
||||
});
|
||||
|
||||
jest.doMock('rxjs', () => {
|
||||
return {
|
||||
...RxJS,
|
||||
debounceTime: () => RxJS.identity,
|
||||
};
|
||||
});
|
|
@ -1,227 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
import { fullStoryApiMock } from './fullstory_shipper.test.mocks';
|
||||
import { FullStoryShipper } from './fullstory_shipper';
|
||||
|
||||
describe('FullStoryShipper', () => {
|
||||
let fullstoryShipper: FullStoryShipper;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
fullstoryShipper = new FullStoryShipper(
|
||||
{
|
||||
debug: true,
|
||||
fullStoryOrgId: 'test-org-id',
|
||||
},
|
||||
{
|
||||
logger: loggerMock.create(),
|
||||
sendTo: 'staging',
|
||||
isDev: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fullstoryShipper.shutdown();
|
||||
});
|
||||
|
||||
describe('extendContext', () => {
|
||||
describe('FS.identify', () => {
|
||||
test('calls `identify` when the userId is provided', () => {
|
||||
const userId = 'test-user-id';
|
||||
fullstoryShipper.extendContext({ userId });
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledWith(userId);
|
||||
});
|
||||
|
||||
test('calls `identify` again only if the userId changes', () => {
|
||||
const userId = 'test-user-id';
|
||||
fullstoryShipper.extendContext({ userId });
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledTimes(1);
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledWith(userId);
|
||||
|
||||
fullstoryShipper.extendContext({ userId });
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledTimes(1); // still only called once
|
||||
|
||||
fullstoryShipper.extendContext({ userId: `${userId}-1` });
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledTimes(2); // called again because the user changed
|
||||
expect(fullStoryApiMock.identify).toHaveBeenCalledWith(`${userId}-1`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FS.setUserVars', () => {
|
||||
test('calls `setUserVars` when isElasticCloudUser: true is provided', () => {
|
||||
fullstoryShipper.extendContext({ isElasticCloudUser: true });
|
||||
expect(fullStoryApiMock.setUserVars).toHaveBeenCalledWith({
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
isElasticCloudUser_bool: true,
|
||||
});
|
||||
});
|
||||
|
||||
test('calls `setUserVars` when isElasticCloudUser: false is provided', () => {
|
||||
fullstoryShipper.extendContext({ isElasticCloudUser: false });
|
||||
expect(fullStoryApiMock.setUserVars).toHaveBeenCalledWith({
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
isElasticCloudUser_bool: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('FS.setVars', () => {
|
||||
test('calls `setVars` when version is provided', () => {
|
||||
fullstoryShipper.extendContext({ version: '1.2.3' });
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledWith('page', {
|
||||
version_str: '1.2.3',
|
||||
version_major_int: 1,
|
||||
version_minor_int: 2,
|
||||
version_patch_int: 3,
|
||||
});
|
||||
});
|
||||
|
||||
test('calls `setVars` when cloudId is provided', () => {
|
||||
fullstoryShipper.extendContext({ cloudId: 'test-es-org-id' });
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledWith('page', {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
cloudId_str: 'test-es-org-id',
|
||||
});
|
||||
});
|
||||
|
||||
test('merges both: version and cloudId if both are provided', () => {
|
||||
fullstoryShipper.extendContext({ version: '1.2.3', cloudId: 'test-es-org-id' });
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledWith('page', {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
cloudId_str: 'test-es-org-id',
|
||||
version_str: '1.2.3',
|
||||
version_major_int: 1,
|
||||
version_minor_int: 2,
|
||||
version_patch_int: 3,
|
||||
});
|
||||
});
|
||||
|
||||
test('adds the rest of the context to `setVars` (only if they match one of the valid keys)', () => {
|
||||
const context = {
|
||||
userId: 'test-user-id',
|
||||
version: '1.2.3',
|
||||
cloudId: 'test-es-org-id',
|
||||
labels: { serverless: 'test' },
|
||||
foo: 'bar',
|
||||
};
|
||||
fullstoryShipper.extendContext(context);
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledWith('page', {
|
||||
version_str: '1.2.3',
|
||||
version_major_int: 1,
|
||||
version_minor_int: 2,
|
||||
version_patch_int: 3,
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
cloudId_str: 'test-es-org-id',
|
||||
labels: { serverless_str: 'test' },
|
||||
});
|
||||
});
|
||||
|
||||
test('emits once only if nothing changes', () => {
|
||||
const context = {
|
||||
userId: 'test-user-id',
|
||||
version: '1.2.3',
|
||||
cloudId: 'test-es-org-id',
|
||||
labels: { serverless: 'test' },
|
||||
foo: 'bar',
|
||||
};
|
||||
fullstoryShipper.extendContext(context);
|
||||
fullstoryShipper.extendContext(context);
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledTimes(1);
|
||||
fullstoryShipper.extendContext(context);
|
||||
expect(fullStoryApiMock.setVars).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('optIn', () => {
|
||||
test('should call consent true and restart when isOptIn: true', () => {
|
||||
fullstoryShipper.optIn(true);
|
||||
expect(fullStoryApiMock.consent).toHaveBeenCalledWith(true);
|
||||
expect(fullStoryApiMock.restart).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should call consent false and shutdown when isOptIn: false', () => {
|
||||
fullstoryShipper.optIn(false);
|
||||
expect(fullStoryApiMock.consent).toHaveBeenCalledWith(false);
|
||||
expect(fullStoryApiMock.shutdown).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('reportEvents', () => {
|
||||
test('calls the API once per event in the array with the properties transformed', () => {
|
||||
fullstoryShipper.reportEvents([
|
||||
{
|
||||
event_type: 'test-event-1',
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
properties: { test: 'test-1' },
|
||||
context: { pageName: 'test-page-1' },
|
||||
},
|
||||
{
|
||||
event_type: 'test-event-2',
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
properties: { other_property: 'test-2' },
|
||||
context: { pageName: 'test-page-1' },
|
||||
},
|
||||
]);
|
||||
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledTimes(2);
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledWith('test-event-1', {
|
||||
test_str: 'test-1',
|
||||
});
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledWith('test-event-2', {
|
||||
other_property_str: 'test-2',
|
||||
});
|
||||
});
|
||||
|
||||
test('filters the events by the allow-list', () => {
|
||||
fullstoryShipper = new FullStoryShipper(
|
||||
{
|
||||
eventTypesAllowlist: ['valid-event-1', 'valid-event-2'],
|
||||
debug: true,
|
||||
fullStoryOrgId: 'test-org-id',
|
||||
},
|
||||
{
|
||||
logger: loggerMock.create(),
|
||||
sendTo: 'staging',
|
||||
isDev: true,
|
||||
}
|
||||
);
|
||||
fullstoryShipper.reportEvents([
|
||||
{
|
||||
event_type: 'test-event-1', // Should be filtered out.
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
properties: { test: 'test-1' },
|
||||
context: { pageName: 'test-page-1' },
|
||||
},
|
||||
{
|
||||
event_type: 'valid-event-1',
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
properties: { test: 'test-1' },
|
||||
context: { pageName: 'test-page-1' },
|
||||
},
|
||||
{
|
||||
event_type: 'valid-event-2',
|
||||
timestamp: '2020-01-01T00:00:00.000Z',
|
||||
properties: { test: 'test-2' },
|
||||
context: { pageName: 'test-page-1' },
|
||||
},
|
||||
]);
|
||||
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledTimes(2);
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledWith('valid-event-1', {
|
||||
test_str: 'test-1',
|
||||
});
|
||||
expect(fullStoryApiMock.event).toHaveBeenCalledWith('valid-event-2', {
|
||||
test_str: 'test-2',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,235 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Subject, distinct, debounceTime, map, filter, Subscription } from 'rxjs';
|
||||
import { get, has } from 'lodash';
|
||||
import { set } from '@kbn/safer-lodash-set';
|
||||
import type { AnalyticsClientInitContext, EventContext, Event, IShipper } from '../../../client';
|
||||
import type { FullStoryApi } from './types';
|
||||
import type { FullStorySnippetConfig } from './load_snippet';
|
||||
import { formatPayload } from './format_payload';
|
||||
import { loadSnippet } from './load_snippet';
|
||||
import { getParsedVersion } from './get_parsed_version';
|
||||
|
||||
const PAGE_VARS_KEYS = [
|
||||
// Page-specific keys
|
||||
'pageName',
|
||||
'page',
|
||||
'entityId',
|
||||
'applicationId',
|
||||
|
||||
// Deployment-specific keys
|
||||
'version', // x4, split to version_major, version_minor, version_patch for easier filtering
|
||||
'buildSha', // Useful for Serverless
|
||||
'cloudId',
|
||||
'deploymentId',
|
||||
'projectId', // projectId and deploymentId are mutually exclusive. They shouldn't be sent in the same offering.
|
||||
'cluster_name',
|
||||
'cluster_uuid',
|
||||
'cluster_version',
|
||||
'labels.serverless',
|
||||
'license_id',
|
||||
'license_status',
|
||||
'license_type',
|
||||
|
||||
// Session-specific
|
||||
'session_id',
|
||||
'preferred_languages',
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* FullStory shipper configuration.
|
||||
*/
|
||||
export interface FullStoryShipperConfig extends FullStorySnippetConfig {
|
||||
/**
|
||||
* FullStory's custom events rate limit is very aggressive.
|
||||
* If this setting is provided, it'll only send the event types specified in this list.
|
||||
*/
|
||||
eventTypesAllowlist?: string[];
|
||||
/**
|
||||
* FullStory only allows calling setVars('page') once per navigation.
|
||||
* This setting defines how much time to hold from calling the API while additional lazy context is being resolved.
|
||||
*/
|
||||
pageVarsDebounceTimeMs?: number;
|
||||
}
|
||||
|
||||
interface FullStoryUserVars {
|
||||
userId?: string;
|
||||
isElasticCloudUser?: boolean;
|
||||
cloudIsElasticStaffOwned?: boolean;
|
||||
cloudTrialEndDate?: string;
|
||||
}
|
||||
|
||||
type FullStoryPageContext = Pick<EventContext, (typeof PAGE_VARS_KEYS)[number]>;
|
||||
|
||||
/**
|
||||
* FullStory shipper.
|
||||
*/
|
||||
export class FullStoryShipper implements IShipper {
|
||||
/** Shipper's unique name */
|
||||
public static shipperName = 'FullStory';
|
||||
|
||||
private readonly fullStoryApi: FullStoryApi;
|
||||
private lastUserId: string | undefined;
|
||||
private readonly eventTypesAllowlist?: string[];
|
||||
private readonly pageContext$ = new Subject<EventContext>();
|
||||
private readonly userContext$ = new Subject<FullStoryUserVars>();
|
||||
private readonly subscriptions = new Subscription();
|
||||
|
||||
/**
|
||||
* Creates a new instance of the FullStoryShipper.
|
||||
* @param config {@link FullStoryShipperConfig}
|
||||
* @param initContext {@link AnalyticsClientInitContext}
|
||||
*/
|
||||
constructor(
|
||||
config: FullStoryShipperConfig,
|
||||
private readonly initContext: AnalyticsClientInitContext
|
||||
) {
|
||||
const { eventTypesAllowlist, pageVarsDebounceTimeMs = 500, ...snippetConfig } = config;
|
||||
this.fullStoryApi = loadSnippet(snippetConfig);
|
||||
this.eventTypesAllowlist = eventTypesAllowlist;
|
||||
|
||||
this.subscriptions.add(
|
||||
this.userContext$
|
||||
.pipe(
|
||||
distinct(({ userId, isElasticCloudUser, cloudIsElasticStaffOwned, cloudTrialEndDate }) =>
|
||||
[userId, isElasticCloudUser, cloudIsElasticStaffOwned, cloudTrialEndDate].join('-')
|
||||
)
|
||||
)
|
||||
.subscribe((userVars) => this.updateUserVars(userVars))
|
||||
);
|
||||
|
||||
this.subscriptions.add(
|
||||
this.pageContext$
|
||||
.pipe(
|
||||
map((newContext) => {
|
||||
// Cherry-picking fields because FS limits the number of fields that can be sent.
|
||||
// > Note: You can capture up to 20 unique page properties (exclusive of pageName) for any given page
|
||||
// > and up to 500 unique page properties across all pages.
|
||||
// https://help.fullstory.com/hc/en-us/articles/1500004101581-FS-setVars-API-Sending-custom-page-data-to-FullStory
|
||||
return PAGE_VARS_KEYS.reduce((acc, key) => {
|
||||
if (has(newContext, key)) {
|
||||
set(acc, key, get(newContext, key));
|
||||
}
|
||||
return acc;
|
||||
}, {} as Partial<FullStoryPageContext> & Record<string, unknown>);
|
||||
}),
|
||||
filter((pageVars) => Object.keys(pageVars).length > 0),
|
||||
// Wait for anything to actually change.
|
||||
distinct((pageVars) => {
|
||||
const sortedKeys = Object.keys(pageVars).sort();
|
||||
return sortedKeys.map((key) => pageVars[key]).join('-');
|
||||
}),
|
||||
// We need some debounce time to ensure everything is updated before calling FS because some properties cannot be changed twice for the same URL.
|
||||
debounceTime(pageVarsDebounceTimeMs)
|
||||
)
|
||||
.subscribe((pageVars) => {
|
||||
this.initContext.logger.debug(
|
||||
() => `Calling FS.setVars with context ${JSON.stringify(pageVars)}`
|
||||
);
|
||||
this.fullStoryApi.setVars('page', {
|
||||
...formatPayload(pageVars),
|
||||
...(pageVars.version ? getParsedVersion(pageVars.version) : {}),
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls `fs.identify`, `fs.setUserVars` and `fs.setVars` depending on the fields provided in the newContext.
|
||||
* @param newContext The full new context to set {@link EventContext}
|
||||
*/
|
||||
public extendContext(newContext: EventContext): void {
|
||||
this.initContext.logger.debug(() => `Received context ${JSON.stringify(newContext)}`);
|
||||
|
||||
// FullStory requires different APIs for different type of contexts:
|
||||
// User-level context.
|
||||
this.userContext$.next(newContext);
|
||||
// Event-level context. At the moment, only the scope `page` is supported by FullStory for webapps.
|
||||
this.pageContext$.next(newContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops/restarts the shipping mechanism based on the value of isOptedIn
|
||||
* @param isOptedIn `true` for resume sending events. `false` to stop.
|
||||
*/
|
||||
public optIn(isOptedIn: boolean): void {
|
||||
this.initContext.logger.debug(`Setting FS to optIn ${isOptedIn}`);
|
||||
// FullStory uses 2 different opt-in methods:
|
||||
// - `consent` is needed to allow collecting information about the components
|
||||
// declared as "Record with user consent" (https://help.fullstory.com/hc/en-us/articles/360020623574).
|
||||
// We need to explicitly call `consent` if for the "Record with user content" feature to work.
|
||||
this.fullStoryApi.consent(isOptedIn);
|
||||
// - `restart` and `shutdown` fully start/stop the collection of data.
|
||||
if (isOptedIn) {
|
||||
this.fullStoryApi.restart();
|
||||
} else {
|
||||
this.fullStoryApi.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters the events by the eventTypesAllowlist from the config.
|
||||
* Then it transforms the event into a FS valid format and calls `fs.event`.
|
||||
* @param events batched events {@link Event}
|
||||
*/
|
||||
public reportEvents(events: Event[]): void {
|
||||
this.initContext.logger.debug(`Reporting ${events.length} events to FS`);
|
||||
events
|
||||
.filter((event) => this.eventTypesAllowlist?.includes(event.event_type) ?? true)
|
||||
.forEach((event) => {
|
||||
// We only read event.properties and discard the rest because the context is already sent in the other APIs.
|
||||
this.fullStoryApi.event(event.event_type, formatPayload(event.properties));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes all internal queues of the shipper.
|
||||
* It doesn't really do anything inside because this shipper doesn't hold any internal queues.
|
||||
*/
|
||||
public async flush() {}
|
||||
|
||||
/**
|
||||
* Shuts down the shipper.
|
||||
*/
|
||||
public shutdown() {
|
||||
this.subscriptions.unsubscribe();
|
||||
}
|
||||
|
||||
private updateUserVars({
|
||||
userId,
|
||||
isElasticCloudUser,
|
||||
cloudIsElasticStaffOwned,
|
||||
cloudTrialEndDate,
|
||||
}: FullStoryUserVars) {
|
||||
// Call it only when the userId changes
|
||||
if (userId && userId !== this.lastUserId) {
|
||||
this.initContext.logger.debug(`Calling FS.identify with userId ${userId}`);
|
||||
// We need to call the API for every new userId (restarting the session).
|
||||
this.fullStoryApi.identify(userId);
|
||||
this.lastUserId = userId;
|
||||
}
|
||||
|
||||
// User-level context
|
||||
if (
|
||||
typeof isElasticCloudUser === 'boolean' ||
|
||||
typeof cloudIsElasticStaffOwned === 'boolean' ||
|
||||
cloudTrialEndDate
|
||||
) {
|
||||
const userVars = {
|
||||
isElasticCloudUser,
|
||||
cloudIsElasticStaffOwned,
|
||||
cloudTrialEndDate,
|
||||
};
|
||||
this.initContext.logger.debug(
|
||||
() => `Calling FS.setUserVars with ${JSON.stringify(userVars)}`
|
||||
);
|
||||
this.fullStoryApi.setUserVars(formatPayload(userVars));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getParsedVersion } from './get_parsed_version';
|
||||
|
||||
describe('getParsedVersion', () => {
|
||||
test('parses a version string', () => {
|
||||
expect(getParsedVersion('1.2.3')).toEqual({
|
||||
version_str: '1.2.3',
|
||||
version_major_int: 1,
|
||||
version_minor_int: 2,
|
||||
version_patch_int: 3,
|
||||
});
|
||||
});
|
||||
|
||||
test('parses a version string with extra label', () => {
|
||||
expect(getParsedVersion('1.2.3-SNAPSHOT')).toEqual({
|
||||
version_str: '1.2.3-SNAPSHOT',
|
||||
version_major_int: 1,
|
||||
version_minor_int: 2,
|
||||
version_patch_int: 3,
|
||||
});
|
||||
});
|
||||
|
||||
test('does not throw for invalid version', () => {
|
||||
expect(getParsedVersion('INVALID_VERSION')).toEqual({
|
||||
version_str: 'INVALID_VERSION',
|
||||
version_major_int: NaN,
|
||||
version_minor_int: NaN,
|
||||
version_patch_int: NaN,
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export function getParsedVersion(version: string): {
|
||||
version_str: string;
|
||||
version_major_int: number;
|
||||
version_minor_int: number;
|
||||
version_patch_int: number;
|
||||
} {
|
||||
const [major, minor, patch] = version.split('.');
|
||||
return {
|
||||
version_str: version,
|
||||
version_major_int: parseInt(major, 10),
|
||||
version_minor_int: parseInt(minor, 10),
|
||||
version_patch_int: parseInt(patch, 10),
|
||||
};
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { loadSnippet } from './load_snippet';
|
||||
|
||||
describe('loadSnippet', () => {
|
||||
beforeAll(() => {
|
||||
// Define necessary window and document global variables for the tests
|
||||
jest
|
||||
.spyOn(global.document, 'getElementsByTagName')
|
||||
.mockReturnValue([
|
||||
{ parentNode: { insertBefore: jest.fn() } },
|
||||
] as unknown as HTMLCollectionOf<Element>);
|
||||
});
|
||||
|
||||
it('should return the FullStory API', () => {
|
||||
const fullStoryApi = loadSnippet({ debug: true, fullStoryOrgId: 'foo' });
|
||||
expect(fullStoryApi).toBeDefined();
|
||||
expect(fullStoryApi.event).toBeDefined();
|
||||
expect(fullStoryApi.consent).toBeDefined();
|
||||
expect(fullStoryApi.restart).toBeDefined();
|
||||
expect(fullStoryApi.shutdown).toBeDefined();
|
||||
expect(fullStoryApi.identify).toBeDefined();
|
||||
expect(fullStoryApi.setUserVars).toBeDefined();
|
||||
expect(fullStoryApi.setVars).toBeDefined();
|
||||
});
|
||||
});
|
|
@ -1,92 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { FullStoryApi } from './types';
|
||||
|
||||
/**
|
||||
* FullStory basic configuration.
|
||||
*/
|
||||
export interface FullStorySnippetConfig {
|
||||
/**
|
||||
* The FullStory account id.
|
||||
*/
|
||||
fullStoryOrgId: string;
|
||||
/**
|
||||
* The host to send the data to. Used to overcome AdBlockers by using custom DNSs.
|
||||
* If not specified, it defaults to `fullstory.com`.
|
||||
*/
|
||||
host?: string;
|
||||
/**
|
||||
* The URL to load the FullStory client from. Falls back to `edge.fullstory.com/s/fs.js` if not specified.
|
||||
*/
|
||||
scriptUrl?: string;
|
||||
/**
|
||||
* Whether the debug logs should be printed to the console.
|
||||
*/
|
||||
debug?: boolean;
|
||||
/**
|
||||
* The name of the variable where the API is stored: `window[namespace]`. Defaults to `FS`.
|
||||
*/
|
||||
namespace?: string;
|
||||
}
|
||||
|
||||
export function loadSnippet({
|
||||
scriptUrl = 'https://edge.fullstory.com/s/fs.js',
|
||||
fullStoryOrgId,
|
||||
host = 'fullstory.com',
|
||||
namespace = 'FS',
|
||||
debug = false,
|
||||
}: FullStorySnippetConfig): FullStoryApi {
|
||||
window._fs_debug = debug;
|
||||
window._fs_host = host;
|
||||
window._fs_script = scriptUrl;
|
||||
window._fs_org = fullStoryOrgId;
|
||||
window._fs_namespace = namespace;
|
||||
|
||||
/* eslint-disable dot-notation,prettier/prettier,@typescript-eslint/no-shadow,prefer-rest-params,@typescript-eslint/no-unused-expressions */
|
||||
(function(m,n,e,t,l,o,g,y){
|
||||
if (e in m) {if(m.console && m.console.log) { m.console.log('FullStory namespace conflict. Please set window["_fs_namespace"].');} return;}
|
||||
// @ts-expect-error
|
||||
g=m[e]=function(a,b,s){g.q?g.q.push([a,b,s]):g._api(a,b,s);};g.q=[];
|
||||
// @ts-expect-error
|
||||
o=n.createElement(t);o.async=1;o.crossOrigin='anonymous';o.src=_fs_script;
|
||||
// @ts-expect-error
|
||||
y=n.getElementsByTagName(t)[0];y.parentNode.insertBefore(o,y);
|
||||
// @ts-expect-error
|
||||
g.identify=function(i,v,s){g(l,{uid:i},s);if(v)g(l,v,s)};g.setUserVars=function(v,s){g(l,v,s)};g.event=function(i,v,s){g('event',{n:i,p:v},s)};
|
||||
// @ts-expect-error
|
||||
g.anonymize=function(){g.identify(!!0)};
|
||||
// @ts-expect-error
|
||||
g.shutdown=function(){g("rec",!1)};g.restart=function(){g("rec",!0)};
|
||||
// @ts-expect-error
|
||||
g.log = function(a,b){g("log",[a,b])};
|
||||
// @ts-expect-error
|
||||
g.consent=function(a){g("consent",!arguments.length||a)};
|
||||
// @ts-expect-error
|
||||
g.identifyAccount=function(i,v){o='account';v=v||{};v.acctId=i;g(o,v)};
|
||||
// @ts-expect-error
|
||||
g.clearUserCookie=function(){};
|
||||
// @ts-expect-error
|
||||
g.setVars=function(n, p){g('setVars',[n,p]);};
|
||||
// @ts-expect-error
|
||||
g._w={};y='XMLHttpRequest';g._w[y]=m[y];y='fetch';g._w[y]=m[y];
|
||||
// @ts-expect-error
|
||||
if(m[y])m[y]=function(){return g._w[y].apply(this,arguments)};
|
||||
// @ts-expect-error
|
||||
g._v="1.3.0";
|
||||
|
||||
})(window,document,window['_fs_namespace'],'script','user');
|
||||
|
||||
const fullStoryApi = window[namespace as 'FS'];
|
||||
|
||||
if (!fullStoryApi) {
|
||||
throw new Error('FullStory snippet failed to load. Check browser logs for more information.');
|
||||
}
|
||||
|
||||
return fullStoryApi;
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Definition of the FullStory API.
|
||||
* Docs are available at https://developer.fullstory.com/.
|
||||
*/
|
||||
export interface FullStoryApi {
|
||||
/**
|
||||
* Identify a User
|
||||
* https://developer.fullstory.com/identify
|
||||
* @param userId
|
||||
* @param userVars
|
||||
*/
|
||||
identify(userId: string, userVars?: Record<string, unknown>): void;
|
||||
|
||||
/**
|
||||
* Set User Variables
|
||||
* https://developer.fullstory.com/user-variables
|
||||
* @param userVars
|
||||
*/
|
||||
setUserVars(userVars: Record<string, unknown>): void;
|
||||
|
||||
/**
|
||||
* Setting page variables
|
||||
* https://developer.fullstory.com/page-variables
|
||||
* @param scope
|
||||
* @param pageProperties
|
||||
*/
|
||||
setVars(scope: 'page', pageProperties: Record<string, unknown>): void;
|
||||
|
||||
/**
|
||||
* Sending custom event data into FullStory
|
||||
* https://developer.fullstory.com/custom-events
|
||||
* @param eventName
|
||||
* @param eventProperties
|
||||
*/
|
||||
event(eventName: string, eventProperties: Record<string, unknown>): void;
|
||||
|
||||
/**
|
||||
* Selectively record parts of your site based on explicit user consent
|
||||
* https://developer.fullstory.com/consent
|
||||
* @param isOptedIn true if the user has opted in to tracking
|
||||
*/
|
||||
consent(isOptedIn: boolean): void;
|
||||
|
||||
/**
|
||||
* Restart session recording after it has been shutdown
|
||||
* https://developer.fullstory.com/restart-recording
|
||||
*/
|
||||
restart(): void;
|
||||
|
||||
/**
|
||||
* Stop recording a session
|
||||
* https://developer.fullstory.com/stop-recording
|
||||
*/
|
||||
shutdown(): void;
|
||||
}
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
_fs_debug: boolean;
|
||||
_fs_host: string;
|
||||
_fs_org: string;
|
||||
_fs_namespace: string;
|
||||
_fs_script: string;
|
||||
FS: FullStoryApi;
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
{
|
||||
"extends": "../../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "target/types",
|
||||
"types": [
|
||||
"jest",
|
||||
"node",
|
||||
"react"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"**/*.tsx",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/logging-mocks",
|
||||
"@kbn/logging",
|
||||
"@kbn/safer-lodash-set",
|
||||
]
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const analyticsClientMock: jest.Mocked<AnalyticsClient> = {
|
||||
|
@ -21,6 +21,6 @@ export const analyticsClientMock: jest.Mocked<AnalyticsClient> = {
|
|||
shutdown: jest.fn(),
|
||||
};
|
||||
|
||||
jest.doMock('@kbn/ebt/client', () => ({
|
||||
jest.doMock('@elastic/ebt/client', () => ({
|
||||
createAnalytics: () => analyticsClientMock,
|
||||
}));
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
*/
|
||||
|
||||
import { of, Subscription } from 'rxjs';
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import { createAnalytics } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { createAnalytics } from '@elastic/ebt/client';
|
||||
import { registerPerformanceMetricEventType } from '@kbn/ebt-tools';
|
||||
import type { CoreContext } from '@kbn/core-base-browser-internal';
|
||||
import type { InternalInjectedMetadataSetup } from '@kbn/core-injected-metadata-browser-internal';
|
||||
|
@ -32,9 +32,6 @@ export class AnalyticsService {
|
|||
this.analyticsClient = createAnalytics({
|
||||
isDev: core.env.mode.dev,
|
||||
logger: core.logger.get('analytics'),
|
||||
// TODO: We need to be able to edit sendTo once we resolve the telemetry config.
|
||||
// For now, we are relying on whether it's a distributable or running from source.
|
||||
sendTo: core.env.packageInfo.dist ? 'production' : 'staging',
|
||||
});
|
||||
|
||||
this.registerBuildInfoAnalyticsContext(core);
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { fromEvent } from 'rxjs';
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
|
||||
/** HTML attributes that should be skipped from reporting because they might contain data we do not wish to collect */
|
||||
const HTML_ATTRIBUTES_TO_REMOVE = [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { reportPerformanceMetricEvent } from '@kbn/ebt-tools';
|
||||
|
||||
export function trackPerformanceMeasureEntries(analytics: AnalyticsClient, isDevMode: boolean) {
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
*/
|
||||
|
||||
import { debounceTime, fromEvent, map, merge, of, shareReplay } from 'rxjs';
|
||||
import type { AnalyticsClient, RootSchema } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient, RootSchema } from '@elastic/ebt/client';
|
||||
|
||||
export interface ViewportSize {
|
||||
viewport_width: number;
|
||||
|
|
|
@ -11,8 +11,7 @@
|
|||
"@kbn/core-injected-metadata-browser-internal",
|
||||
"@kbn/core-analytics-browser",
|
||||
"@kbn/core-base-browser-mocks",
|
||||
"@kbn/core-injected-metadata-browser-mocks",
|
||||
"@kbn/ebt",
|
||||
"@kbn/core-injected-metadata-browser-mocks"
|
||||
],
|
||||
"exclude": ["target/**/*"]
|
||||
}
|
||||
|
|
|
@ -48,4 +48,4 @@ export type {
|
|||
AllowedSchemaTypes,
|
||||
// Shippers
|
||||
IShipper,
|
||||
} from '@kbn/ebt/client';
|
||||
} from '@elastic/ebt/client';
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
|
||||
/**
|
||||
* Exposes the public APIs of the AnalyticsClient during the setup phase.
|
||||
|
|
|
@ -10,9 +10,7 @@
|
|||
"include": [
|
||||
"**/*.ts"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/ebt",
|
||||
],
|
||||
"kbn_references": [],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
]
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { Subject } from 'rxjs';
|
||||
|
||||
export const analyticsClientMock: jest.Mocked<AnalyticsClient> = {
|
||||
|
@ -21,6 +21,6 @@ export const analyticsClientMock: jest.Mocked<AnalyticsClient> = {
|
|||
flush: jest.fn(),
|
||||
};
|
||||
|
||||
jest.doMock('@kbn/ebt/client', () => ({
|
||||
jest.doMock('@elastic/ebt/client', () => ({
|
||||
createAnalytics: () => analyticsClientMock,
|
||||
}));
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
*/
|
||||
|
||||
import { of } from 'rxjs';
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import { createAnalytics } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { createAnalytics } from '@elastic/ebt/client';
|
||||
import { registerPerformanceMetricEventType } from '@kbn/ebt-tools';
|
||||
import type { CoreContext } from '@kbn/core-base-server-internal';
|
||||
import type {
|
||||
|
@ -24,9 +24,6 @@ export class AnalyticsService {
|
|||
this.analyticsClient = createAnalytics({
|
||||
isDev: core.env.mode.dev,
|
||||
logger: core.logger.get('analytics'),
|
||||
// TODO: We need to be able to edit sendTo once we resolve the telemetry config.
|
||||
// For now, we are relying on whether it's a distributable or running from source.
|
||||
sendTo: core.env.packageInfo.dist ? 'production' : 'staging',
|
||||
});
|
||||
|
||||
this.registerBuildInfoAnalyticsContext(core);
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
"@kbn/core-base-server-internal",
|
||||
"@kbn/core-analytics-server",
|
||||
"@kbn/config-mocks",
|
||||
"@kbn/core-base-server-mocks",
|
||||
"@kbn/ebt",
|
||||
"@kbn/core-base-server-mocks"
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -48,4 +48,4 @@ export type {
|
|||
AllowedSchemaTypes,
|
||||
// Shippers
|
||||
IShipper,
|
||||
} from '@kbn/ebt/client';
|
||||
} from '@elastic/ebt/client';
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
|
||||
/**
|
||||
* Exposes the public APIs of the AnalyticsClient during the preboot phase
|
||||
|
|
|
@ -10,9 +10,7 @@
|
|||
"include": [
|
||||
"**/*.ts"
|
||||
],
|
||||
"kbn_references": [
|
||||
"@kbn/ebt"
|
||||
],
|
||||
"kbn_references": [],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
]
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { type RootSchema, type EventTypeOpts } from '@kbn/ebt/client';
|
||||
import { type RootSchema, type EventTypeOpts } from '@elastic/ebt/client';
|
||||
|
||||
export enum EventMetric {
|
||||
TOAST_DISMISSED = 'global_toast_list_toast_dismissed',
|
||||
|
|
|
@ -30,8 +30,7 @@
|
|||
"@kbn/core-mount-utils-browser",
|
||||
"@kbn/react-kibana-context-render",
|
||||
"@kbn/core-analytics-browser",
|
||||
"@kbn/core-analytics-browser-mocks",
|
||||
"@kbn/ebt",
|
||||
"@kbn/core-analytics-browser-mocks"
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -18,7 +18,7 @@ import {
|
|||
import { map, distinctUntilChanged, shareReplay, takeUntil, debounceTime } from 'rxjs';
|
||||
import { isDeepStrictEqual } from 'util';
|
||||
|
||||
import type { RootSchema } from '@kbn/ebt/client';
|
||||
import type { RootSchema } from '@elastic/ebt/client';
|
||||
import type { Logger, LogMeta } from '@kbn/logging';
|
||||
import type { CoreContext, CoreService } from '@kbn/core-base-server-internal';
|
||||
import type { PluginName } from '@kbn/core-base-common';
|
||||
|
|
|
@ -41,7 +41,6 @@
|
|||
"@kbn/core-analytics-server-mocks",
|
||||
"@kbn/core-logging-server-internal",
|
||||
"@kbn/core-logging-server-mocks",
|
||||
"@kbn/ebt",
|
||||
],
|
||||
"exclude": [
|
||||
"target/**/*",
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { createAnalytics, type AnalyticsClient } from '@kbn/ebt/client';
|
||||
import { createAnalytics, type AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { loggerMock } from '@kbn/logging-mocks';
|
||||
import { registerPerformanceMetricEventType, reportPerformanceMetricEvent } from './helpers';
|
||||
import { METRIC_EVENT_SCHEMA } from './schema';
|
||||
|
@ -18,7 +18,6 @@ describe('performance metric event helpers', () => {
|
|||
beforeEach(() => {
|
||||
analyticsClient = createAnalytics({
|
||||
isDev: true, // Explicitly setting `true` to ensure we have event validation to make sure the events sent pass our validation.
|
||||
sendTo: 'staging',
|
||||
logger: loggerMock.create(),
|
||||
});
|
||||
});
|
||||
|
@ -39,7 +38,6 @@ describe('performance metric event helpers', () => {
|
|||
beforeEach(() => {
|
||||
analyticsClient = createAnalytics({
|
||||
isDev: true, // Explicitly setting `true` to ensure we have event validation to make sure the events sent pass our validation.
|
||||
sendTo: 'staging',
|
||||
logger: loggerMock.create(),
|
||||
});
|
||||
registerPerformanceMetricEventType(analyticsClient);
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { AnalyticsClient } from '@kbn/ebt/client';
|
||||
import type { AnalyticsClient } from '@elastic/ebt/client';
|
||||
import { type PerformanceMetricEvent, METRIC_EVENT_SCHEMA } from './schema';
|
||||
|
||||
const PERFORMANCE_METRIC_EVENT_TYPE = 'performance_metric';
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { RootSchema } from '@kbn/ebt/client';
|
||||
import type { RootSchema } from '@elastic/ebt/client';
|
||||
|
||||
/**
|
||||
* Structure of the `metric` event
|
||||
|
|
|
@ -6,8 +6,7 @@
|
|||
},
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"kbn_references": [
|
||||
"@kbn/logging-mocks",
|
||||
"@kbn/ebt",
|
||||
"@kbn/logging-mocks"
|
||||
],
|
||||
"exclude": ["target/**/*"]
|
||||
}
|
||||
|
|
|
@ -6,11 +6,11 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { buildHeaders } from './build_headers';
|
||||
import { buildShipperHeaders } from './build_headers';
|
||||
|
||||
describe('buildHeaders', () => {
|
||||
describe('buildShipperHeaders', () => {
|
||||
test('builds the headers as expected in the V3 endpoints', () => {
|
||||
expect(buildHeaders('test-cluster', '1.2.3', 'test-license')).toMatchInlineSnapshot(`
|
||||
expect(buildShipperHeaders('test-cluster', '1.2.3', 'test-license')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content-type": "application/x-ndjson",
|
||||
"x-elastic-cluster-id": "test-cluster",
|
||||
|
@ -21,7 +21,7 @@ describe('buildHeaders', () => {
|
|||
});
|
||||
|
||||
test('if license is not provided, it skips the license header', () => {
|
||||
expect(buildHeaders('test-cluster', '1.2.3')).toMatchInlineSnapshot(`
|
||||
expect(buildShipperHeaders('test-cluster', '1.2.3')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content-type": "application/x-ndjson",
|
||||
"x-elastic-cluster-id": "test-cluster",
|
|
@ -6,17 +6,23 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { BuildShipperHeaders } from '@elastic/ebt/shippers/elastic_v3/common';
|
||||
|
||||
/**
|
||||
* Returns the headers to send to the Remote Telemetry Service.
|
||||
* @param clusterUuid The UUID of the ES cluster.
|
||||
* @param version The version of the ES cluster.
|
||||
* @param licenseId The ID of the license (if available).
|
||||
*/
|
||||
export function buildHeaders(clusterUuid: string, version: string, licenseId?: string) {
|
||||
export const buildShipperHeaders: BuildShipperHeaders = (
|
||||
clusterUuid: string,
|
||||
version: string,
|
||||
licenseId?: string
|
||||
) => {
|
||||
return {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-cluster-id': clusterUuid,
|
||||
'x-elastic-stack-version': version,
|
||||
...(licenseId && { 'x-elastic-license-id': licenseId }),
|
||||
};
|
||||
}
|
||||
};
|
|
@ -6,17 +6,19 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { buildUrl } from './build_url';
|
||||
import { createBuildShipperUrl } from './build_url';
|
||||
|
||||
describe('buildUrl', () => {
|
||||
test('returns production URL', () => {
|
||||
expect(buildUrl({ sendTo: 'production', channelName: 'test-channel' })).toBe(
|
||||
const buildShipperUrl = createBuildShipperUrl('production');
|
||||
expect(buildShipperUrl({ channelName: 'test-channel' })).toBe(
|
||||
'https://telemetry.elastic.co/v3/send/test-channel'
|
||||
);
|
||||
});
|
||||
|
||||
test('returns staging URL', () => {
|
||||
expect(buildUrl({ sendTo: 'staging', channelName: 'test-channel' })).toBe(
|
||||
const buildShipperUrl = createBuildShipperUrl('staging');
|
||||
expect(buildShipperUrl({ channelName: 'test-channel' })).toBe(
|
||||
'https://telemetry-staging.elastic.co/v3/send/test-channel'
|
||||
);
|
||||
});
|
26
src/plugins/telemetry/common/ebt_v3_endpoint/build_url.ts
Normal file
26
src/plugins/telemetry/common/ebt_v3_endpoint/build_url.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type {
|
||||
BuildShipperUrlOptions,
|
||||
BuildShipperUrl,
|
||||
} from '@elastic/ebt/shippers/elastic_v3/common';
|
||||
|
||||
/**
|
||||
* Builds the URL for the V3 API.
|
||||
*/
|
||||
export const createBuildShipperUrl =
|
||||
(sendTo: 'production' | 'staging'): BuildShipperUrl =>
|
||||
(urlOptions: BuildShipperUrlOptions): string => {
|
||||
const { channelName } = urlOptions;
|
||||
const baseUrl =
|
||||
sendTo === 'production'
|
||||
? 'https://telemetry.elastic.co'
|
||||
: 'https://telemetry-staging.elastic.co';
|
||||
return `${baseUrl}/v3/send/${channelName}`;
|
||||
};
|
|
@ -6,5 +6,5 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export { schemaToIoTs } from './schema_to_io_ts';
|
||||
export { validateSchema } from './validate_schema';
|
||||
export { buildShipperHeaders } from './build_headers';
|
||||
export { createBuildShipperUrl } from './build_url';
|
|
@ -5,14 +5,15 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/* eslint-disable dot-notation */
|
||||
import { of } from 'rxjs';
|
||||
import { ElasticV3BrowserShipper } from '@kbn/ebt/shippers/elastic_v3/browser';
|
||||
import { ElasticV3BrowserShipper } from '@elastic/ebt/shippers/elastic_v3/browser';
|
||||
import { coreMock } from '@kbn/core/public/mocks';
|
||||
import { homePluginMock } from '@kbn/home-plugin/public/mocks';
|
||||
import { screenshotModePluginMock } from '@kbn/screenshot-mode-plugin/public/mocks';
|
||||
import { HomePublicPluginSetup } from '@kbn/home-plugin/public';
|
||||
import { ScreenshotModePluginSetup } from '@kbn/screenshot-mode-plugin/public';
|
||||
import { buildShipperHeaders } from '../common/ebt_v3_endpoint';
|
||||
import { isSyntheticsMonitorMock } from './plugin.test.mock';
|
||||
import { TelemetryPlugin } from './plugin';
|
||||
|
||||
|
@ -64,24 +65,44 @@ describe('TelemetryPlugin', () => {
|
|||
it('registers the UI telemetry shipper', () => {
|
||||
const initializerContext = coreMock.createPluginInitializerContext();
|
||||
const coreSetupMock = coreMock.createSetup();
|
||||
const telemetryPlugin = new TelemetryPlugin(initializerContext);
|
||||
|
||||
new TelemetryPlugin(initializerContext).setup(coreSetupMock, { screenshotMode, home });
|
||||
telemetryPlugin['getSendToEnv'] = jest.fn();
|
||||
telemetryPlugin.setup(coreSetupMock, { screenshotMode, home });
|
||||
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledTimes(1);
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledWith(undefined);
|
||||
|
||||
expect(coreSetupMock.analytics.registerShipper).toHaveBeenCalledWith(
|
||||
ElasticV3BrowserShipper,
|
||||
{ channelName: 'kibana-browser', version: 'version', sendTo: 'staging' }
|
||||
{
|
||||
channelName: 'kibana-browser',
|
||||
version: 'version',
|
||||
buildShipperUrl: expect.any(Function),
|
||||
buildShipperHeaders,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('registers the UI telemetry shipper (pointing to prod)', () => {
|
||||
const initializerContext = coreMock.createPluginInitializerContext({ sendUsageTo: 'prod' });
|
||||
const coreSetupMock = coreMock.createSetup();
|
||||
const telemetryPlugin = new TelemetryPlugin(initializerContext);
|
||||
|
||||
new TelemetryPlugin(initializerContext).setup(coreSetupMock, { screenshotMode, home });
|
||||
telemetryPlugin['getSendToEnv'] = jest.fn();
|
||||
telemetryPlugin.setup(coreSetupMock, { screenshotMode, home });
|
||||
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledTimes(1);
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledWith('prod');
|
||||
|
||||
expect(coreSetupMock.analytics.registerShipper).toHaveBeenCalledWith(
|
||||
ElasticV3BrowserShipper,
|
||||
{ channelName: 'kibana-browser', version: 'version', sendTo: 'production' }
|
||||
{
|
||||
channelName: 'kibana-browser',
|
||||
version: 'version',
|
||||
buildShipperUrl: expect.any(Function),
|
||||
buildShipperHeaders,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -22,10 +22,11 @@ import type {
|
|||
ScreenshotModePluginStart,
|
||||
} from '@kbn/screenshot-mode-plugin/public';
|
||||
import type { HomePublicPluginSetup } from '@kbn/home-plugin/public';
|
||||
import { ElasticV3BrowserShipper } from '@kbn/ebt/shippers/elastic_v3/browser';
|
||||
import { ElasticV3BrowserShipper } from '@elastic/ebt/shippers/elastic_v3/browser';
|
||||
import { isSyntheticsMonitor } from '@kbn/analytics-collection-utils';
|
||||
|
||||
import { BehaviorSubject, map, switchMap, tap } from 'rxjs';
|
||||
import { buildShipperHeaders, createBuildShipperUrl } from '../common/ebt_v3_endpoint';
|
||||
|
||||
import type { TelemetryConfigLabels } from '../server/config';
|
||||
import { FetchTelemetryConfigRoute, INTERNAL_VERSION } from '../common/routes';
|
||||
import type { v2 } from '../common/types';
|
||||
|
@ -192,10 +193,12 @@ export class TelemetryPlugin
|
|||
},
|
||||
});
|
||||
|
||||
const sendTo = this.getSendToEnv(config.sendUsageTo);
|
||||
analytics.registerShipper(ElasticV3BrowserShipper, {
|
||||
channelName: 'kibana-browser',
|
||||
version: currentKibanaVersion,
|
||||
sendTo: config.sendUsageTo === 'prod' ? 'production' : 'staging',
|
||||
buildShipperHeaders,
|
||||
buildShipperUrl: createBuildShipperUrl(sendTo),
|
||||
});
|
||||
|
||||
this.telemetrySender = new TelemetrySender(this.telemetryService, async () => {
|
||||
|
@ -296,6 +299,10 @@ export class TelemetryPlugin
|
|||
this.telemetrySender?.stop();
|
||||
}
|
||||
|
||||
private getSendToEnv(sendUsageTo: string): 'production' | 'staging' {
|
||||
return sendUsageTo === 'prod' ? 'production' : 'staging';
|
||||
}
|
||||
|
||||
/**
|
||||
* Kibana should skip telemetry collection if reporting is taking a screenshot
|
||||
* or Synthetics monitoring is navigating Kibana.
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { ElasticV3ServerShipper } from '@kbn/ebt/shippers/elastic_v3/server';
|
||||
/* eslint-disable dot-notation */
|
||||
import { ElasticV3ServerShipper } from '@elastic/ebt/shippers/elastic_v3/server';
|
||||
import { coreMock } from '@kbn/core/server/mocks';
|
||||
import { usageCollectionPluginMock } from '@kbn/usage-collection-plugin/server/mocks';
|
||||
import { telemetryCollectionManagerPluginMock } from '@kbn/telemetry-collection-manager-plugin/server/mocks';
|
||||
import { buildShipperHeaders } from '../common/ebt_v3_endpoint';
|
||||
import { TelemetryPlugin } from './plugin';
|
||||
import type { NodeRoles } from '@kbn/core-node-server';
|
||||
|
||||
|
@ -40,14 +41,25 @@ describe('TelemetryPlugin', () => {
|
|||
const initializerContext = coreMock.createPluginInitializerContext();
|
||||
const coreSetupMock = coreMock.createSetup();
|
||||
|
||||
new TelemetryPlugin(initializerContext).setup(coreSetupMock, {
|
||||
const telemetryPlugin = new TelemetryPlugin(initializerContext);
|
||||
telemetryPlugin['getSendToEnv'] = jest.fn();
|
||||
|
||||
telemetryPlugin.setup(coreSetupMock, {
|
||||
usageCollection: usageCollectionPluginMock.createSetupContract(),
|
||||
telemetryCollectionManager: telemetryCollectionManagerPluginMock.createSetupContract(),
|
||||
});
|
||||
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledTimes(1);
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledWith(undefined);
|
||||
|
||||
expect(coreSetupMock.analytics.registerShipper).toHaveBeenCalledWith(
|
||||
ElasticV3ServerShipper,
|
||||
{ channelName: 'kibana-server', version: 'version', sendTo: 'staging' }
|
||||
{
|
||||
channelName: 'kibana-server',
|
||||
version: 'version',
|
||||
buildShipperUrl: expect.any(Function),
|
||||
buildShipperHeaders,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -55,14 +67,25 @@ describe('TelemetryPlugin', () => {
|
|||
const initializerContext = coreMock.createPluginInitializerContext({ sendUsageTo: 'prod' });
|
||||
const coreSetupMock = coreMock.createSetup();
|
||||
|
||||
new TelemetryPlugin(initializerContext).setup(coreSetupMock, {
|
||||
const telemetryPlugin = new TelemetryPlugin(initializerContext);
|
||||
telemetryPlugin['getSendToEnv'] = jest.fn();
|
||||
|
||||
telemetryPlugin.setup(coreSetupMock, {
|
||||
usageCollection: usageCollectionPluginMock.createSetupContract(),
|
||||
telemetryCollectionManager: telemetryCollectionManagerPluginMock.createSetupContract(),
|
||||
});
|
||||
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledTimes(1);
|
||||
expect(telemetryPlugin['getSendToEnv']).toHaveBeenCalledWith('prod');
|
||||
|
||||
expect(coreSetupMock.analytics.registerShipper).toHaveBeenCalledWith(
|
||||
ElasticV3ServerShipper,
|
||||
{ channelName: 'kibana-server', version: 'version', sendTo: 'production' }
|
||||
{
|
||||
channelName: 'kibana-server',
|
||||
version: 'version',
|
||||
buildShipperUrl: expect.any(Function),
|
||||
buildShipperHeaders,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -76,7 +99,6 @@ describe('TelemetryPlugin', () => {
|
|||
|
||||
const plugin = new TelemetryPlugin(initializerContext);
|
||||
|
||||
// eslint-disable-next-line dot-notation
|
||||
const startFetcherMock = (plugin['startFetcher'] = jest.fn());
|
||||
|
||||
plugin.setup(coreMock.createSetup(), {
|
||||
|
|
|
@ -22,7 +22,7 @@ import {
|
|||
map,
|
||||
} from 'rxjs';
|
||||
|
||||
import { ElasticV3ServerShipper } from '@kbn/ebt/shippers/elastic_v3/server';
|
||||
import { ElasticV3ServerShipper } from '@elastic/ebt/shippers/elastic_v3/server';
|
||||
|
||||
import type { UsageCollectionSetup } from '@kbn/usage-collection-plugin/server';
|
||||
import type {
|
||||
|
@ -41,6 +41,7 @@ import type { SecurityPluginStart } from '@kbn/security-plugin/server';
|
|||
import { SavedObjectsClient } from '@kbn/core/server';
|
||||
|
||||
import apm from 'elastic-apm-node';
|
||||
import { buildShipperHeaders, createBuildShipperUrl } from '../common/ebt_v3_endpoint';
|
||||
import {
|
||||
type TelemetrySavedObject,
|
||||
getTelemetrySavedObject,
|
||||
|
@ -171,10 +172,12 @@ export class TelemetryPlugin implements Plugin<TelemetryPluginSetup, TelemetryPl
|
|||
|
||||
const currentKibanaVersion = this.currentKibanaVersion;
|
||||
|
||||
const sendTo = this.getSendToEnv(this.initialConfig.sendUsageTo);
|
||||
analytics.registerShipper(ElasticV3ServerShipper, {
|
||||
channelName: 'kibana-server',
|
||||
version: currentKibanaVersion,
|
||||
sendTo: this.initialConfig.sendUsageTo === 'prod' ? 'production' : 'staging',
|
||||
buildShipperHeaders,
|
||||
buildShipperUrl: createBuildShipperUrl(sendTo),
|
||||
});
|
||||
|
||||
analytics.registerContextProvider<{ labels: TelemetryConfigLabels }>({
|
||||
|
@ -262,6 +265,10 @@ export class TelemetryPlugin implements Plugin<TelemetryPluginSetup, TelemetryPl
|
|||
this.fetcherTask.stop();
|
||||
}
|
||||
|
||||
private getSendToEnv(sendUsageTo: string): 'production' | 'staging' {
|
||||
return sendUsageTo === 'prod' ? 'production' : 'staging';
|
||||
}
|
||||
|
||||
private async getOptInStatus(): Promise<boolean | undefined> {
|
||||
const internalRepositoryClient = await firstValueFrom(this.savedObjectsInternalClient$, {
|
||||
defaultValue: undefined,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue