mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
Support local file path for xpack.productDocBase.artifactRepositoryUrl
(#217046)
## Summary Closes https://github.com/elastic/kibana/issues/216583 Adds support for a local file path in `xpack.productDocBase.artifactRepositoryUrl` setting. If local path with `file://` protocol is provided, it has to contain a path to a directory with the artifacts and the `index.xml` file. #### How to test 1. Download the XML and zip files from https://kibana-knowledge-base-artifacts.elastic.co 2. Create a folder, e.g. `mkdir /Users/<my_user>/test_artifacts` and place all the files there. The XML file has to be called `index.xml` 3. Add `xpack.productDocBase.artifactRepositoryUrl: 'file:///Users/<my_user>/test_artifacts'` to your `kibana.dev.yml` 4. Go to `/app/management/kibana/observabilityAiAssistantManagement` in Kibana and install Elastic documentation 5. Kibana dev server should report `[2025-04-07T14:05:10.640+02:00][INFO ][plugins.productDocBase.package-installer] Documentation installation successful for product [security] and version [8.17]` 6. Check `data/ai-kb-artifacts` folder in your Kibana repo, it should contain zip files with docs ### Checklist - [x] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md) - [x] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [x] The PR description includes the appropriate Release Notes section, and the correct `release_note:*` label is applied per the [guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)
This commit is contained in:
parent
5d96f36e54
commit
6722f142a4
6 changed files with 218 additions and 11 deletions
|
@ -10,11 +10,11 @@ applies_to:
|
|||
# AI Assistant settings in {{kib}} [ai-assistant-settings-kb]
|
||||
|
||||
`xpack.productDocBase.artifactRepositoryUrl`
|
||||
: Url of the repository to use to download and install the Elastic product documentation artifacts for the AI assistants. Defaults to `https://kibana-knowledge-base-artifacts.elastic.co`
|
||||
: Url of the repository to use to download and install the Elastic product documentation artifacts for the AI assistants. Supports both HTTP(S) URLs and local file paths (`file://`). Defaults to `https://kibana-knowledge-base-artifacts.elastic.co`
|
||||
|
||||
## Configuring product documentation for air-gapped environments [configuring-product-doc-for-airgap]
|
||||
|
||||
Installing product documentation requires network access to its artifact repository. For air-gapped environments, or environments where remote network traffic is blocked or filtered, the artifact repository must be manually deployed somewhere accessible by the Kibana deployment.
|
||||
Installing product documentation requires network access to its artifact repository. In air-gapped environments, or environments where remote network traffic is blocked or filtered, you can use a local artifact repository by specifying the path with the `file://` URI scheme.
|
||||
|
||||
Deploying a custom product documentation repository can be done in 2 ways: using a S3 bucket, or using a CDN.
|
||||
|
||||
|
|
|
@ -5,11 +5,14 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import fetch, { Response } from 'node-fetch';
|
||||
import { fetchArtifactVersions } from './fetch_artifact_versions';
|
||||
import { getArtifactName, DocumentationProduct, ProductName } from '@kbn/product-doc-common';
|
||||
|
||||
jest.mock('node-fetch');
|
||||
jest.mock('fs');
|
||||
|
||||
const fetchMock = fetch as jest.MockedFn<typeof fetch>;
|
||||
|
||||
const createResponse = ({
|
||||
|
@ -41,6 +44,7 @@ const createResponse = ({
|
|||
};
|
||||
|
||||
const artifactRepositoryUrl = 'https://lost.com';
|
||||
const localArtifactRepositoryUrl = 'file://usr/local/local_artifacts';
|
||||
|
||||
const expectVersions = (
|
||||
versions: Partial<Record<ProductName, string[]>>
|
||||
|
@ -58,6 +62,7 @@ const expectVersions = (
|
|||
describe('fetchArtifactVersions', () => {
|
||||
beforeEach(() => {
|
||||
fetchMock.mockReset();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
const mockResponse = (responseText: string) => {
|
||||
|
@ -67,6 +72,13 @@ describe('fetchArtifactVersions', () => {
|
|||
fetchMock.mockResolvedValue(response as Response);
|
||||
};
|
||||
|
||||
const mockFileResponse = (responseText: string) => {
|
||||
const mockData = Buffer.from(responseText);
|
||||
(fs.readFile as unknown as jest.Mock).mockImplementation((path, callback) => {
|
||||
callback(null, mockData);
|
||||
});
|
||||
};
|
||||
|
||||
it('calls fetch with the right parameters', async () => {
|
||||
mockResponse(createResponse({ artifactNames: [] }));
|
||||
|
||||
|
@ -76,6 +88,56 @@ describe('fetchArtifactVersions', () => {
|
|||
expect(fetchMock).toHaveBeenCalledWith(`${artifactRepositoryUrl}?max-keys=1000`);
|
||||
});
|
||||
|
||||
it('parses the local file', async () => {
|
||||
const artifactNames = [
|
||||
getArtifactName({ productName: 'kibana', productVersion: '8.16' }),
|
||||
getArtifactName({ productName: 'elasticsearch', productVersion: '8.16' }),
|
||||
];
|
||||
mockFileResponse(createResponse({ artifactNames }));
|
||||
|
||||
const result = await fetchArtifactVersions({
|
||||
artifactRepositoryUrl: localArtifactRepositoryUrl,
|
||||
});
|
||||
|
||||
expect(fs.readFile as unknown as jest.Mock).toHaveBeenCalledWith(
|
||||
'/local/local_artifacts/index.xml',
|
||||
expect.any(Function)
|
||||
);
|
||||
|
||||
expect(result).toEqual({
|
||||
elasticsearch: ['8.16'],
|
||||
kibana: ['8.16'],
|
||||
observability: [],
|
||||
security: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('supports win32 env', async () => {
|
||||
const artifactNames = [
|
||||
getArtifactName({ productName: 'kibana', productVersion: '8.16' }),
|
||||
getArtifactName({ productName: 'elasticsearch', productVersion: '8.16' }),
|
||||
];
|
||||
mockFileResponse(createResponse({ artifactNames }));
|
||||
|
||||
const originalPlatform = process.platform;
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: 'win32',
|
||||
});
|
||||
|
||||
await fetchArtifactVersions({
|
||||
artifactRepositoryUrl: 'file:///C:/path/local_artifacts',
|
||||
});
|
||||
|
||||
expect(fs.readFile as unknown as jest.Mock).toHaveBeenCalledWith(
|
||||
'C:/path/local_artifacts/index.xml',
|
||||
expect.any(Function)
|
||||
);
|
||||
|
||||
Object.defineProperty(process, 'platform', {
|
||||
value: originalPlatform,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns the list of versions from the repository', async () => {
|
||||
const artifactNames = [
|
||||
getArtifactName({ productName: 'kibana', productVersion: '8.16' }),
|
||||
|
|
|
@ -5,9 +5,13 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DocumentationProduct, parseArtifactName, type ProductName } from '@kbn/product-doc-common';
|
||||
import * as fs from 'fs';
|
||||
import fetch from 'node-fetch';
|
||||
import Path from 'path';
|
||||
import { URL } from 'url';
|
||||
import { parseString } from 'xml2js';
|
||||
import { type ProductName, DocumentationProduct, parseArtifactName } from '@kbn/product-doc-common';
|
||||
import { resolveLocalArtifactsPath } from '../utils/local_artifacts';
|
||||
|
||||
type ArtifactAvailableVersions = Record<ProductName, string[]>;
|
||||
|
||||
|
@ -16,8 +20,17 @@ export const fetchArtifactVersions = async ({
|
|||
}: {
|
||||
artifactRepositoryUrl: string;
|
||||
}): Promise<ArtifactAvailableVersions> => {
|
||||
const res = await fetch(`${artifactRepositoryUrl}?max-keys=1000`);
|
||||
const xml = await res.text();
|
||||
const parsedUrl = new URL(artifactRepositoryUrl);
|
||||
|
||||
let xml: string;
|
||||
if (parsedUrl.protocol === 'file:') {
|
||||
const file = await fetchLocalFile(parsedUrl);
|
||||
xml = file.toString();
|
||||
} else {
|
||||
const res = await fetch(`${artifactRepositoryUrl}?max-keys=1000`);
|
||||
xml = await res.text();
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
parseString(xml, (err, result: ListBucketResponse) => {
|
||||
if (err) {
|
||||
|
@ -50,6 +63,21 @@ export const fetchArtifactVersions = async ({
|
|||
});
|
||||
};
|
||||
|
||||
function fetchLocalFile(parsedUrl: URL): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const normalizedPath = resolveLocalArtifactsPath(parsedUrl);
|
||||
const xmlFilePath = Path.join(normalizedPath, 'index.xml');
|
||||
|
||||
fs.readFile(xmlFilePath, (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
interface ListBucketResponse {
|
||||
ListBucketResult: {
|
||||
Name?: string[];
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createReadStream } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import fetch from 'node-fetch';
|
||||
import { downloadToDisk } from './download';
|
||||
|
||||
jest.mock('fs', () => ({
|
||||
createReadStream: jest.fn().mockReturnValue({
|
||||
on: jest.fn(),
|
||||
pipe: jest.fn(),
|
||||
}),
|
||||
createWriteStream: jest.fn(() => ({
|
||||
on: jest.fn((event, callback) => {
|
||||
if (event === 'finish') {
|
||||
callback();
|
||||
}
|
||||
}),
|
||||
pipe: jest.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('fs/promises', () => ({
|
||||
mkdir: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('node-fetch', () => jest.fn());
|
||||
|
||||
describe('downloadToDisk', () => {
|
||||
const mockFileUrl = 'http://example.com/file.txt';
|
||||
const mockFilePath = '/path/to/file.txt';
|
||||
const mockDirPath = '/path/to';
|
||||
const mockLocalPath = '/local/path/to/file.txt';
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should create the directory if it does not exist', async () => {
|
||||
(fetch as unknown as jest.Mock).mockResolvedValue({
|
||||
body: {
|
||||
pipe: jest.fn(),
|
||||
on: jest.fn(),
|
||||
},
|
||||
});
|
||||
|
||||
await downloadToDisk(mockFileUrl, mockFilePath);
|
||||
|
||||
expect(mkdir).toHaveBeenCalledWith(mockDirPath, { recursive: true });
|
||||
});
|
||||
|
||||
it('should download a file from a remote URL', async () => {
|
||||
const mockResponseBody = {
|
||||
pipe: jest.fn(),
|
||||
on: jest.fn((event, callback) => {}),
|
||||
};
|
||||
|
||||
(fetch as unknown as jest.Mock).mockResolvedValue({
|
||||
body: mockResponseBody,
|
||||
});
|
||||
|
||||
await downloadToDisk(mockFileUrl, mockFilePath);
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith(mockFileUrl);
|
||||
});
|
||||
|
||||
it('should copy a file from a local file URL', async () => {
|
||||
const mockLocalFileUrl = 'file:///local/path/to/file.txt';
|
||||
|
||||
await downloadToDisk(mockLocalFileUrl, mockFilePath);
|
||||
|
||||
expect(createReadStream).toHaveBeenCalledWith(mockLocalPath);
|
||||
});
|
||||
|
||||
it('should handle errors during the download process', async () => {
|
||||
const mockError = new Error('Download failed');
|
||||
(fetch as unknown as jest.Mock).mockRejectedValue(mockError);
|
||||
|
||||
await expect(downloadToDisk(mockFileUrl, mockFilePath)).rejects.toThrow('Download failed');
|
||||
});
|
||||
});
|
|
@ -5,19 +5,32 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { createWriteStream } from 'fs';
|
||||
import { type ReadStream, createReadStream, createWriteStream } from 'fs';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import Path from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { resolveLocalArtifactsPath } from './local_artifacts';
|
||||
|
||||
export const downloadToDisk = async (fileUrl: string, filePath: string) => {
|
||||
const dirPath = Path.dirname(filePath);
|
||||
await mkdir(dirPath, { recursive: true });
|
||||
const res = await fetch(fileUrl);
|
||||
const fileStream = createWriteStream(filePath);
|
||||
const writeStream = createWriteStream(filePath);
|
||||
let readStream: ReadStream | NodeJS.ReadableStream;
|
||||
|
||||
const parsedUrl = new URL(fileUrl);
|
||||
|
||||
if (parsedUrl.protocol === 'file:') {
|
||||
const path = resolveLocalArtifactsPath(parsedUrl);
|
||||
readStream = createReadStream(path);
|
||||
} else {
|
||||
const res = await fetch(fileUrl);
|
||||
|
||||
readStream = res.body;
|
||||
}
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
res.body.pipe(fileStream);
|
||||
res.body.on('error', reject);
|
||||
fileStream.on('finish', resolve);
|
||||
readStream.pipe(writeStream);
|
||||
readStream.on('error', reject);
|
||||
writeStream.on('finish', resolve);
|
||||
});
|
||||
};
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
/** Resolve a path to the artifacts folder */
|
||||
export function resolveLocalArtifactsPath(parsedUrl: URL): string {
|
||||
if (parsedUrl.protocol !== 'file:') {
|
||||
throw new Error(`Expected file URL, got ${parsedUrl.protocol}`);
|
||||
}
|
||||
const filePath = parsedUrl.pathname;
|
||||
// On Windows, remove leading "/" (e.g., file:///C:/path should be C:/path)
|
||||
const normalizedPath =
|
||||
process.platform === 'win32' && filePath.startsWith('/') ? filePath.substring(1) : filePath;
|
||||
return normalizedPath;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue