[Logs UI] Add <LogStream/> documentation to the infra Storybook (#87169)

Co-authored-by: Felix Stürmer <weltenwort@users.noreply.github.com>
This commit is contained in:
Alejandro Fernández Gómez 2021-01-12 17:33:50 +01:00 committed by GitHub
parent 807e8bdeb2
commit 1a9836b296
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 478 additions and 176 deletions

View file

@ -440,6 +440,7 @@
"@types/enzyme": "^3.10.5",
"@types/eslint": "^6.1.3",
"@types/extract-zip": "^1.6.2",
"@types/faker": "^5.1.5",
"@types/fancy-log": "^1.3.1",
"@types/fetch-mock": "^7.3.1",
"@types/file-saver": "^2.0.0",
@ -647,7 +648,7 @@
"eslint-plugin-react-hooks": "^4.2.0",
"eslint-plugin-react-perf": "^3.2.3",
"expose-loader": "^0.7.5",
"faker": "1.1.0",
"faker": "^5.1.0",
"fancy-log": "^1.3.2",
"fast-glob": "2.2.7",
"fetch-mock": "^7.3.9",

View file

@ -1,171 +0,0 @@
# Embeddable `<LogStream />` component
The purpose of this component is to allow you, the developer, to have your very own Log Stream in your plugin.
The plugin is exposed through `infra/public`. Since Kibana uses relative paths is up to you to find how to import it (sorry).
```tsx
import { LogStream } from '../../../../../../infra/public';
```
## Prerequisites
To use the component, there are several things you need to ensure in your plugin:
- In your plugin's `kibana.json` plugin, add `"infra"` to `requiredPlugins`.
- The component needs to be mounted inside the hiearchy of a [`kibana-react` provider](https://github.com/elastic/kibana/blob/b2d0aa7b7fae1c89c8f9e8854ae73e71be64e765/src/plugins/kibana_react/README.md#L45).
## Usage
The simplest way to use the component is with a date range, passed with the `startTimestamp` and `endTimestamp` props.
```tsx
const endTimestamp = Date.now();
const startTimestamp = endTimestamp - 15 * 60 * 1000; // 15 minutes
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} />;
```
This will show a list of log entries between the time range, in ascending order (oldest first), but with the scroll position all the way to the bottom (showing the newest entries)
### Filtering data
You might want to show specific data for the purpose of your plugin. Maybe you want to show log lines from a specific host, or for an APM trace. You can pass a KQL expression via the `query` prop.
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
query="trace.id: 18fabada9384abd4"
/>
```
### Modifying rendering
By default the component will initially load at the bottom of the list, showing the newest entries. You can change what log line is shown in the center via the `center` prop. The prop takes a [`LogEntriesCursor`](https://github.com/elastic/kibana/blob/0a6c748cc837c016901f69ff05d81395aa2d41c8/x-pack/plugins/infra/common/http_api/log_entries/common.ts#L9-L13).
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
center={{ time: ..., tiebreaker: ... }}
/>
```
If you want to highlight a specific log line, you can do so by passing its ID in the `highlight` prop.
```tsx
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} highlight="abcde12345" />
```
### Source configuration
The infra plugin has the concept of "source configuration" to store settings for the logs UI. The component will use the source configuration to determine which indices to query or what columns to show.
By default the `<LogStream />` uses the `"default"` source confiuration, but if your plugin uses a different one you can specify it via the `sourceId` prop.
```tsx
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} sourceId="my_source" />
```
### Custom columns
It is possible to change what columns are loaded without creating a whole new source configuration. To do so the component supports the `columns` prop. The default configuration can be replicated as follows.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
{ type: 'timestamp' },
{ type: 'field', field: 'event.dataset' }
{ type: 'message' },
]}
/>
```
There are three column types:
<table>
<tr>
<td>`type: "timestamp"`
<td>The configured timestamp field. Defaults to `@timestamp`.
<tr>
<td>`type: "message"`
<td>The value of the `message` field if it exists. If it doesn't, the component will try to recompose the original log line using values of other fields.
<tr>
<td>`type: "field"`
<td>A specific field specified in the `field` property.
</table>
### Custom column rendering
Besides customizing what columns are shown, you can also customize how a column is rendered. You can customize the width of the column, the text of the header, and how each value is rendered within the cell
#### `width` option
The `width` modifies the width of the column. It can be a number (in pixels) or a string with a valid CSS value.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
{ type: 'timestamp', width: 100 }, // Same as "100px"
{ type: 'field', field: 'event.dataset', width: "50%" }
]}
/>
```
#### `header` option
The `header` takes either a `boolean` value that specifies if the header should be rendered or not, or a `string` with the text to render.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
// Don't show anything in the header
{ type: 'timestamp', header: false },
// Show a custom string in the header
{ type: 'field', field: 'event.dataset', header: "Dataset of the event" }
]}
/>
```
The default is `true`, which render the default values for each column type:
| Column type | Default value |
| ----------- | ------------------------------------- |
| `timestamp` | Date of the top-most visible log line |
| `message` | `"Message"` literal |
| `field` | Field name |
#### `render` option
The `render` takes a function to customize the rendering of the column. The first argument is the value of the column. The function must return a valid `ReactNode`.
```tsx
<LogStream
startTimestamp={...}
endTimetsamp={...}
columns={[
{ type: 'timestamp', render: (timestamp) => <b>{new Date(timestamp).toString()}</b>; },
{ type: 'field', field: 'log.level', render: (value) => value === 'warn' ? '⚠️' : '' }
{ type: 'message', render: (message) => message.toUpperCase() }
]}
/>
```
The first argument's type depends on the column type.
| Column type | Type of the `value` |
| ----------- | ---------------------------------------------------------------------- |
| `timestamp` | `number`. The epoch_millis of the log line |
| `message` | `string`. The processed log message |
| `field` | `JsonValue`. The type of the field itself. Must be checked at runtime. |
### Considerations
As mentioned in the prerequisites, the component relies on `kibana-react` to access kibana's core services. If this is not the case the component will throw an exception when rendering. We advise to use an `<EuiErrorBoundary>` in your component hierarchy to catch this error if necessary.

View file

@ -0,0 +1,348 @@
import { Meta, Story, Canvas, ArgsTable } from '@storybook/addon-docs/blocks';
import { Subject } from 'rxjs';
import { I18nProvider } from '@kbn/i18n/react';
import { EuiThemeProvider } from '../../../../observability/public';
import { KibanaContextProvider } from '../../../../../../src/plugins/kibana_react/public';
import { DEFAULT_SOURCE_CONFIGURATION } from '../../test_utils/source_configuration';
import { generateFakeEntries, ENTRIES_EMPTY } from '../../test_utils/entries';
import { LogStream } from './';
<!-- Prework -->
export const startTimestamp = 1595145600000;
export const endTimestamp = startTimestamp + 15 * 60 * 1000;
export const fetch = function (url, params) {
switch (url) {
case '/api/infra/log_source_configurations/default':
return DEFAULT_SOURCE_CONFIGURATION;
case '/api/log_entries/entries':
const body = JSON.parse(params.body);
if (body.after?.time === body.endTimestamp || body.before?.time === body.startTimestamp) {
return ENTRIES_EMPTY;
} else {
const entries = generateFakeEntries(
200,
body.startTimestamp,
body.endTimestamp,
body.columns || DEFAULT_SOURCE_CONFIGURATION.data.configuration.logColumns
);
return {
data: {
entries,
topCursor: entries[0].cursor,
bottomCursor: entries[entries.length - 1].cursor,
hasMoreBefore: false,
},
};
}
default:
return {};
}
};
export const uiSettings = {
get: (setting) => {
switch (setting) {
case 'dateFormat':
return 'MMM D, YYYY @ HH:mm:ss.SSS';
case 'dateFormat:scaled':
return [['', 'HH:mm:ss.SSS']];
}
},
get$: () => {
return new Subject();
},
};
export const Template = (args) => <LogStream {...args} />;
<Meta
title="infra/LogStream"
component={LogStream}
decorators={[
(story) => (
<I18nProvider>
<EuiThemeProvider>
<KibanaContextProvider services={{ http: { fetch }, uiSettings }}>
{story()}
</KibanaContextProvider>
</EuiThemeProvider>
</I18nProvider>
),
]}
/>
# Embeddable `<LogStream />` component
The purpose of this component is to allow you, the developer, to have your very own Log Stream in your plugin.
The component is exposed through `infra/public`. Since Kibana uses relative paths is up to you to find how to import it (sorry).
```tsx
import { LogStream } from '../../../../../../infra/public';
// ^^ Modify appropriately
```
## Prerequisites
To use the component your plugin needs to follow certain criteria:
- Ensure `"infra"` is specified as a `requiredPlugins` in your plugin's `kibana.json`.
- Ensure the `<LogStream />` component is mounted inside the hiearchy of a [`kibana-react` provider](https://github.com/elastic/kibana/blob/b2d0aa7b7fae1c89c8f9e8854ae73e71be64e765/src/plugins/kibana_react/README.md#L45).
## Usage
The simplest way to use the component is with a date range
```tsx
const endTimestamp = Date.now();
const startTimestamp = endTimestamp - 15 * 60 * 1000; // 15 minutes
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} />;
```
This will show a list of log entries between the specified timestamps.
<Canvas>
<Story name="Default" args={{ startTimestamp, endTimestamp }}>
{Template.bind({})}
</Story>
</Canvas>
## Query log entries
You might want to show specific log entries in your plugin. Maybe you want to show log lines from a specific host, or for an AMP trace. The component has a `query` prop that accepts valid KQL expressions.
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
query="trace.id: 18fabada9384abd4"
/>
```
## Center the view on a specific entry
By default the component will load at the bottom of the list, showing the newest entries. You can change the rendering point with the `center` prop. The prop takes a [`LogEntriesCursor`](https://github.com/elastic/kibana/blob/0a6c748cc837c016901f69ff05d81395aa2d41c8/x-pack/plugins/infra/common/http_api/log_entries/common.ts#L9-L13).
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
center={{ time: 1595146275000, tiebreaker: 150 }}
/>
```
<Canvas>
<Story
name="CenteredView"
args={{ startTimestamp, endTimestamp, center: { time: 1595146275000, tiebreaker: 150 } }}
>
{Template.bind({})}
</Story>
</Canvas>
## Highlight a specific entry
The component can highlight a specific line via the `highlight` prop. It takes the `id` of the log entry. Note that this prop doesn't center the view around that log line.
```tsx
<LogStream startTimestamp={startTimestamp} endTimestamp={endTimestamp} highlight="entry-197" />
```
<Canvas>
<Story name="HighlightedEntry" args={{ startTimestamp, endTimestamp, highlight: 'entry-197' }}>
{Template.bind({})}
</Story>
</Canvas>
## Column configuration
By default the component will use the same columns as the Logs UI:
- `@timestamp` of the log.
- `event.dataset` field.
- The log message. This might be the `message` field, or a reconstruction based on other fields.
These columns are user-configurable. When the end user changes the default columns those changes will be reflected in the `<LogStream />` component.
If the default columns don't work for the use case of your plugin, or you don't want your plugin to be affected by user changes, you can specify which columns you want. We offer two mechanisms for this.
### With a `columns` prop
The easiest way is to specify what columns you want with the `columns` prop.
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
columns={[
{ type: 'timestamp' },
{ type: 'field', field: 'log.level' },
{ type: 'field', field: 'host.name' },
{ type: 'message' },
]}
/>
```
<Canvas>
<Story
name="CustomColumns"
args={{
startTimestamp,
endTimestamp,
columns: [
{ type: 'timestamp' },
{ type: 'field', field: 'log.level' },
{ type: 'field', field: 'host.name' },
{ type: 'message' },
],
}}
>
{Template.bind({})}
</Story>
</Canvas>
The rendering of the column headers and the cell contents can also be customized with the following properties:
<table>
<tbody>
<tr>
<td>
<code>width</code>
</td>
<td>
<code>number | string</code>
</td>
<td>The width of the column. Accepts any valid `flex-basis` value.</td>
</tr>
<tr>
<td>
<code>header</code>
</td>
<td>
<code>boolean | string</code>
<br />
Defaults to <code>true</code>
</td>
<td>
When `boolean`, decide if the header should render or not.
<br />
When `string`, show the string contents in the header.
</td>
</tr>
<tr>
<td>
<code>render</code>
</td>
<td>
<code>(timestamp: number) => ReactNode</code> for the `timestamp` column
<br />
<code>(message: string) => ReactNode</code> for the `message` column.
<br />
<code>(value: JsonValue) => ReactNode</code> for the `field` columns.
</td>
<td>How should the column value render</td>
</tr>
</tbody>
</table>
```tsx
<LogStream
startTimestamp={startTimestamp}
endTimestamp={endTimestamp}
columns={[
{ type: 'timestamp', header: 'When?' },
{
type: 'field',
field: 'log.level',
header: false,
width: 24,
render: (value) => {
switch (value) {
case 'debug':
return '🐞';
case 'info':
return '';
case 'warn':
return '⚠️';
case 'error':
return '❌';
}
},
},
{ type: 'message' },
]}
/>
```
<Canvas>
<Story
name="CustomColumnRendering"
args={{
startTimestamp,
endTimestamp,
columns: [
{ type: 'timestamp', header: 'When?' },
{
type: 'field',
field: 'log.level',
header: false,
width: 24,
render: (value) => {
switch (value) {
case 'debug':
return '🐞';
case 'info':
return '';
case 'warn':
return '⚠️';
case 'error':
return '❌';
}
},
},
{ type: 'message' },
],
}}
>
{Template.bind({})}
</Story>
</Canvas>
### With a source configuration
The infra plugin has the concept of a "source configuration", a collection of settings that apply to the logs and metrics UIs. The component uses the source configuration to determine which indices to query or what columns to show.
The `<LogStream />` component will use the `"default"` source configuration. If you want to use your own configuration, you need to first create it when you initialize your plugin, and then specify it in the `<LogStream />` component with the `sourceId` prop.
```tsx
// Your `plugin/init.ts`
class MyPlugin {
// ...
setup(core, plugins) {
plugins.infra.defineInternalSourceConfiguration(
'my_source', // ID for your source configuration
{
logAlias: 'some-index-*', // Optional. what ES index to query.
logColumns: [
{ timestampColumn: { id: '...uuid4' }, // The `@timestamp` column.
{ fieldColumn: { id: '...uuid4', field: 'some_field' }}, // Any column(s) you want.
{ messageColumn: { id: '...uuid' }} // The `message` column.
]
}
);
}
}
// Somewhere else on your code
<LogStream
sourceId="my_source"
startTimestamp={...}
endTimestamp={...}
/>
```

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import faker from 'faker';
import { LogEntry } from '../../common/http_api';
import { LogSourceConfiguration } from '../containers/logs/log_source';
export const ENTRIES_EMPTY = {
data: {
entries: [],
topCursor: null,
bottomCursor: null,
},
};
export function generateFakeEntries(
count: number,
startTimestamp: number,
endTimestamp: number,
columns: LogSourceConfiguration['configuration']['logColumns']
): LogEntry[] {
const entries: LogEntry[] = [];
const timestampStep = Math.floor((endTimestamp - startTimestamp) / count);
for (let i = 0; i < count; i++) {
const timestamp = i === count - 1 ? endTimestamp : startTimestamp + timestampStep * i;
entries.push({
id: `entry-${i}`,
context: {},
cursor: { time: timestamp, tiebreaker: i },
columns: columns.map((column) => {
if ('timestampColumn' in column) {
return { columnId: column.timestampColumn.id, timestamp };
} else if ('messageColumn' in column) {
return {
columnId: column.messageColumn.id,
message: [{ field: 'message', value: [fakeColumnValue('message')], highlights: [] }],
};
} else {
return {
columnId: column.fieldColumn.id,
field: column.fieldColumn.field,
value: [fakeColumnValue(column.fieldColumn.field)],
highlights: [],
};
}
}),
});
}
return entries;
}
function fakeColumnValue(field: string): string {
switch (field) {
case 'message':
return faker.fake(
'{{internet.ip}} - [{{date.past}}] "GET {{internet.url}} HTTP/1.1" 200 {{random.number}} "-" "{{internet.userAgent}}"'
);
case 'event.dataset':
return faker.fake('{{hacker.noun}}.{{hacker.noun}}');
case 'log.file.path':
return faker.system.filePath();
case 'log.level':
return faker.random.arrayElement(['debug', 'info', 'warn', 'error']);
case 'host.name':
return faker.hacker.noun();
default:
return faker.lorem.sentence();
}
}

View file

@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { GetLogSourceConfigurationSuccessResponsePayload } from '../../common/http_api/log_sources';
export const DEFAULT_SOURCE_CONFIGURATION: GetLogSourceConfigurationSuccessResponsePayload = {
data: {
id: 'default',
version: 'WzQwNiwxXQ==',
updatedAt: 1608559663482,
origin: 'stored',
configuration: {
name: 'Default',
description: '',
logAlias: 'kibana_sample_data_logs*',
fields: {
container: 'container.id',
host: 'host.name',
pod: 'kubernetes.pod.uid',
tiebreaker: '_doc',
timestamp: '@timestamp',
},
logColumns: [
{
timestampColumn: {
id: '5e7f964a-be8a-40d8-88d2-fbcfbdca0e2f',
},
},
{
fieldColumn: {
id: ' eb9777a8-fcd3-420e-ba7d-172fff6da7a2',
field: 'event.dataset',
},
},
{
messageColumn: {
id: 'b645d6da-824b-4723-9a2a-e8cece1645c0',
},
},
],
},
},
};

View file

@ -4775,6 +4775,11 @@
resolved "https://registry.yarnpkg.com/@types/extract-zip/-/extract-zip-1.6.2.tgz#5c7eb441c41136167a42b88b64051e6260c29e86"
integrity sha1-XH60QcQRNhZ6QriLZAUeYmDCnoY=
"@types/faker@^5.1.5":
version "5.1.5"
resolved "https://registry.yarnpkg.com/@types/faker/-/faker-5.1.5.tgz#f14b015e0100232bb00c6dd7611505efb08709a0"
integrity sha512-2uEQFb7bsx68rqD4F8q95wZq6LTLOyexjv6BnvJogCO4jStkyc6IDEkODPQcWfovI6g6M3uPQ2/uD/oedJKkNw==
"@types/fancy-log@^1.3.1":
version "1.3.1"
resolved "https://registry.yarnpkg.com/@types/fancy-log/-/fancy-log-1.3.1.tgz#dd94fbc8c2e2ab8ab402ca8d04bb8c34965f0696"
@ -13369,10 +13374,10 @@ extsprintf@1.3.0, extsprintf@^1.2.0:
resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=
faker@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/faker/-/faker-1.1.0.tgz#230738ebd37edad9de4a421de12922bd8206a872"
integrity sha1-Iwc469N+2tneSkId4SkivYIGqHI=
faker@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/faker/-/faker-5.1.0.tgz#e10fa1dec4502551aee0eb771617a7e7b94692e8"
integrity sha512-RrWKFSSA/aNLP0g3o2WW1Zez7/MnMr7xkiZmoCfAGZmdkDQZ6l2KtuXHN5XjdvpRjDl8+3vf+Rrtl06Z352+Mw==
fancy-log@^1.3.2:
version "1.3.2"