[Automatic Import] use setup and troubleshooting templates in readme (#206477)

This commit is contained in:
Hanna Tamoudi 2025-01-14 16:40:20 +01:00 committed by GitHub
parent ffccfdc62c
commit e6e580e35f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 157 additions and 31 deletions

View file

@ -5,6 +5,8 @@
* 2.0.
*/
import { InputType } from '../../common';
export const CEL_EXISTING_AUTH_CONFIG_FIELDS = [
'oauth_id',
'oauth_secret',
@ -28,6 +30,18 @@ export const DEFAULT_CEL_PROGRAM = `# // Fetch the agent's public IP every minut
export const DEFAULT_URL = 'https://server.example.com:8089/api';
export const INPUTS_INCLUDE_SSL_CONFIG: readonly InputType[] = [
'aws-cloudwatch',
'aws-s3',
'azure-blob-storage',
'azure-eventhub',
'gcp-pubsub',
'gcs',
'http_endpoint',
'kafka',
'tcp',
];
// The version of the package specification format used by this package https://github.com/elastic/package-spec/blob/main/spec/changelog.yml
export const FORMAT_VERSION = '3.1.4';

View file

@ -5,10 +5,11 @@
* 2.0.
*/
import { testIntegration } from '../../__jest__/fixtures/build_integration';
import { ensureDirSync, createSync } from '../util';
import { configure } from 'nunjucks';
import { configure, Environment } from 'nunjucks';
import { join as joinPath } from 'path';
import { testIntegration } from '../../__jest__/fixtures/build_integration';
import { DataStream } from '../../common';
import { createSync, ensureDirSync } from '../util';
import { createReadme } from './readme_files';
jest.mock('../util', () => ({
@ -18,6 +19,7 @@ jest.mock('../util', () => ({
}));
describe('createReadme', () => {
const getTemplateSpy = jest.spyOn(Environment.prototype, 'getTemplate');
const integrationPath = 'path';
const templateDir = joinPath(__dirname, '../templates');
@ -179,4 +181,106 @@ describe('createReadme', () => {
expect.stringContaining(firstDatastreamFieldsDisplayed)
);
});
it('Should call input setup and troubleshooting templates', () => {
const dataStreams = [
{
name: 'example-datastream',
inputTypes: ['filestream', 'tcp', 'udp'],
},
] as DataStream[];
createReadme(integrationPath, testIntegration.name, dataStreams, []);
const calledTemplateNames = getTemplateSpy.mock.calls.map((call) => call[0]);
expect(calledTemplateNames).toEqual(
expect.arrayContaining([
expect.stringContaining('./readme/setup/filestream.md.njk'),
expect.stringContaining('./readme/setup/tcp.md.njk'),
expect.stringContaining('./readme/setup/udp.md.njk'),
expect.stringContaining('./readme/troubleshooting/filestream.md.njk'),
expect.stringContaining('./readme/troubleshooting/tcp.md.njk'),
expect.stringContaining('./readme/troubleshooting/udp.md.njk'),
])
);
});
it('Should not throw any error if input template does not exist', () => {
const dataStreams = [
{
name: 'example-datastream',
inputTypes: ['fake'],
},
] as unknown as DataStream[];
expect(() =>
createReadme(integrationPath, testIntegration.name, dataStreams, [])
).not.toThrow();
const calledTemplateNames = getTemplateSpy.mock.calls.map((call) => call[0]);
expect(calledTemplateNames).toEqual(
expect.arrayContaining([
expect.stringContaining('./readme/setup/fake.md.njk'),
expect.stringContaining('./readme/troubleshooting/fake.md.njk'),
])
);
});
it('Should pass a list of unique input types to the readme', () => {
const dataStreams = [
{
name: 'datastream1',
inputTypes: ['filestream', 'tcp', 'udp'],
},
{
name: 'datastream2',
inputTypes: ['filestream', 'tcp', 'aws-s3'],
},
] as DataStream[];
createReadme(integrationPath, testIntegration.name, dataStreams, []);
const calls = getTemplateSpy.mock.calls;
for (const input of ['filestream', 'tcp', 'udp', 'aws-s3']) {
const filteredCalls = calls.filter(
(call) =>
call.some(
(arg) => typeof arg === 'string' && arg.includes(`./readme/setup/${input}.md.njk`)
) && call.some((arg) => typeof arg === 'string' && arg.includes('description_readme.njk'))
);
// Assert that there are exactly 2 calls for each input type (one for the build_readme and one for the package_readme)
expect(filteredCalls.length).toBe(2);
}
});
it('Should call ssl template if input can be configured with ssl', () => {
const dataStreams = [
{
name: 'example-datastream',
inputTypes: ['aws-s3'],
},
] as DataStream[];
createReadme(integrationPath, testIntegration.name, dataStreams, []);
const calledTemplateNames = getTemplateSpy.mock.calls.map((call) => call[0]);
expect(calledTemplateNames).toEqual(
expect.arrayContaining([expect.stringContaining('./readme/setup/ssl-tls.md.njk')])
);
});
it('Should not call ssl template if input cannot be configured with ssl', () => {
const dataStreams = [
{
name: 'example-datastream',
inputTypes: ['journald'],
},
] as DataStream[];
createReadme(integrationPath, testIntegration.name, dataStreams, []);
const calledTemplateNames = getTemplateSpy.mock.calls.map((call) => call[0]);
expect(calledTemplateNames).not.toContain('./readme/setup/ssl-tls.md.njk');
});
});

View file

@ -8,8 +8,9 @@
import { Environment, FileSystemLoader } from 'nunjucks';
import { join as joinPath } from 'path';
import { DataStream } from '../../common';
import { DataStream, InputType } from '../../common';
import { createSync, ensureDirSync } from '../util';
import { INPUTS_INCLUDE_SSL_CONFIG } from './constants';
export function createReadme(
packageDir: string,
@ -58,12 +59,23 @@ function createReadmeFile(
});
const template = env.getTemplate(templateName);
const uniqueInputs = getUniqueInputs(datastreams);
const renderedTemplate = template.render({
package_name: integrationName,
datastreams,
fields,
inputs: uniqueInputs,
include_ssl: shouldIncludeSSLDocumentation(uniqueInputs),
});
createSync(joinPath(targetDir, 'README.md'), renderedTemplate);
}
function getUniqueInputs(datastreams: DataStream[]): InputType[] {
return [...new Set(datastreams.flatMap((d) => d.inputTypes))];
}
function shouldIncludeSSLDocumentation(inputs: InputType[]): boolean {
return inputs.some((item) => INPUTS_INCLUDE_SSL_CONFIG.includes(item));
}

View file

@ -2,7 +2,8 @@
{% for datastream in datastreams %}
### {{ datastream.title }}
{{ datastream.description }}{% endfor %}
{% for data_stream in fields %}
{% raw %}{{fields {% endraw %}"{{ data_stream.datastream }}"{% raw %}}}{% endraw %}
{{ datastream.description }}
{% endfor %}
{% for field in fields %}
{% raw %}{{fields {% endraw %}"{{ field.datastream }}"{% raw %}}}{% endraw %}
{% endfor %}

View file

@ -55,18 +55,30 @@ Check the [setup guidelines](https://www.elastic.co/guide/en/integrations-develo
### Install the integration
1. In Kibana, go to **Management** > **Integrations**.
2. In **Search for integrations* search bar, type {{ package_name }}.
2. In **Search for integrations** search bar, type {{ package_name }}.
3. Click the **{{ package_name }}** integration from the search results.
4. Click the **Add {{ package_name }}** button to add the integration.
5. Add all the required integration configuration parameters.
6. Click **Save and continue** to save the integration.
{% for input in inputs %}
{% include "./readme/setup/" + input + ".md.njk" ignore missing %}
{% endfor %}
{% if include_ssl %}
{% include "./readme/setup/ssl-tls.md.njk" %}
{% endif %}
## Troubleshooting (optional)
- If some fields appear conflicted under the ``logs-*`` or ``metrics-*`` data views, this issue can be resolved by [reindexing](https://www.elastic.co/guide/en/elasticsearch/reference/current/use-a-data-stream.html#reindex-with-a-data-stream) the impacted data stream.
Provide information about special cases and exceptions that arent necessary for getting started or wont be applicable to all users. Check the [troubleshooting guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-troubleshooting) for more information.
{% for input in inputs %}
{% include "./readme/troubleshooting/" + input + ".md.njk" ignore missing %}
{% endfor %}
## Reference
Provide detailed information about the log or metric types we support within the integration. Check the [reference guidelines](https://www.elastic.co/guide/en/integrations-developer/current/documentation-guidelines.html#idg-docs-guidelines-reference) for more information.

View file

@ -1,4 +1,5 @@
{% include "./description_readme.njk" %}
{% for datastream in datastreams %}
### {{ datastream.title }}

View file

@ -1,5 +1,3 @@
### Collecting logs from AWS CloudWatch
When collecting logs from CloudWatch is enabled, users can retrieve logs from all log streams in a specific log group. `filterLogEvents` AWS API is used to list log events from the specified log group. Amazon CloudWatch Logs can be used to store log files from Amazon Elastic Compute Cloud(EC2), AWS CloudTrail, Route53, and other sources.
{% include "ssl-tls.md.njk" %}

View file

@ -22,5 +22,3 @@ The SQS notification method is enabled setting `queue_url` configuration value.
- A separate SQS queue and S3 bucket notification is required for each enabled data stream.
- Permissions for the above AWS S3 bucket and SQS queues should be configured according to the [Filebeat S3 input documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-aws-s3.html#_aws_permissions_2)
- Data collection via AWS S3 Bucket and AWS SQS are mutually exclusive in this case.
{% include "ssl-tls.md.njk" %}

View file

@ -25,5 +25,3 @@ When you run the Elastic Agent behind a firewall, to ensure proper communication
##### Storage Account Container
Port `443` is used for secure communication with the Storage Account container. This port is commonly used for HTTPS traffic. By allowing traffic on port 443, the Elastic Agent can securely access and interact with the Storage Account container, which is essential for storing and retrieving checkpoint data for each event hub partition.
{% include "ssl-tls.md.njk" %}

View file

@ -174,5 +174,3 @@ For more information, check the following documents:
- [What ports do I need to open on the firewall?](https://learn.microsoft.com/en-us/azure/event-hubs/event-hubs-faq#what-ports-do-i-need-to-open-on-the-firewall) from the [Event Hubs frequently asked questions](https://learn.microsoft.com/en-us/azure/event-hubs/event-hubs-faq#what-ports-do-i-need-to-open-on-the-firewall).
- [AMQP outbound port requirements](https://learn.microsoft.com/en-us/azure/service-bus-messaging/service-bus-amqp-protocol-guide#amqp-outbound-port-requirements)
{% include "ssl-tls.md.njk" %}

View file

@ -1,3 +1,3 @@
### Collecting logs from Filestream
Identify the log location on the system. Determine the directory or file path where logs are stored. Then, add this path to the integration configuration.
Identify the log location on the system. Determine the directory or file path where logs are stored. Then, add this path to the integration configuration.

View file

@ -13,5 +13,3 @@
**NOTE**:
- Create unique Pub/Sub topic per data-stream.
{% include "ssl-tls.md.njk" %}

View file

@ -34,5 +34,3 @@ A sample JSON Credentials file looks as follows:
"universe_domain": "dummy-universe-domain.com"
}
```
{% include "ssl-tls.md.njk" %}

View file

@ -1,5 +1,3 @@
### Collecting logs from HTTP endpoint
Specify the address and port that will be used to initialize a listening HTTP server that collects incoming HTTP POST requests containing a JSON body. The body must be either an object or an array of objects. Any other data types will result in an HTTP 400 (Bad Request) response. For arrays, one document is created for each object in the array.
{% include "ssl-tls.md.njk" %}

View file

@ -1,3 +1,3 @@
### Collecting logs from journald
The journald input is available on Linux systems with systemd installed.
The journald input is available on Linux systems with systemd installed.

View file

@ -1,5 +1,3 @@
### Collecting logs from Kafka
This integration collects logs and metrics from [Kafka](https://kafka.apache.org) servers.
{% include "ssl-tls.md.njk" %}

View file

@ -1,5 +1,3 @@
### Collecting logs from TCP
Specify the address and port that will be used to intialize a listening TCP socket that collects any TCP traffic received and sends each line as a document to Elasticsearch.
{% include "ssl-tls.md.njk" %}

View file

@ -1,3 +1,3 @@
### Collecting logs from UDP
Specify the address and port that will be used to intialize a listening UDP socket that collects any UDP traffic received and sends each line as a document to Elasticsearch.
Specify the address and port that will be used to intialize a listening UDP socket that collects any UDP traffic received and sends each line as a document to Elasticsearch.

View file

@ -6,4 +6,4 @@ Common error types:
- Period is lower than 60 seconds
- Missing roles in the Service Account
- Misconfigured settings, like "Project Id"
- Misconfigured settings, like "Project Id"

View file

@ -7,4 +7,4 @@ If you encounter an error while ingesting data, it might be due to the data coll
"message": "failed eval: net/http: request canceled (Client.Timeout or context cancellation while reading body)"
}
}
```
```