Introduce Elasticsearch service. (#28344)

This commit is contained in:
Aleh Zasypkin 2019-02-28 17:22:07 +02:00 committed by GitHub
parent b8c9d0afa0
commit 0835cd30ca
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
109 changed files with 3415 additions and 1957 deletions

View file

@ -48,7 +48,7 @@ function numberToDuration(numberMs: number) {
return momentDuration(numberMs);
}
export function ensureDuration(value?: Duration | string | number) {
export function ensureDuration(value: Duration | string | number) {
if (typeof value === 'string') {
return stringToDuration(value);
}

View file

@ -48,6 +48,8 @@ import {
TypeOf,
TypeOptions,
UnionType,
URIOptions,
URIType,
} from './types';
export { ObjectType, TypeOf, Type };
@ -65,6 +67,10 @@ function string(options?: StringOptions): Type<string> {
return new StringType(options);
}
function uri(options?: URIOptions): Type<string> {
return new URIType(options);
}
function literal<T extends string | number | boolean>(value: T): Type<T> {
return new LiteralType(value);
}
@ -188,6 +194,7 @@ export const schema = {
recordOf,
siblingRef,
string,
uri,
};
export type Schema = typeof schema;

View file

@ -0,0 +1,25 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`#scheme returns error when shorter string 1`] = `"expected URI with scheme [http|https] but but got [ftp://elastic.co]."`;
exports[`#scheme returns error when shorter string 2`] = `"expected URI with scheme [http|https] but but got [file:///kibana.log]."`;
exports[`#validate throws when returns string 1`] = `"validator failure"`;
exports[`is required by default 1`] = `"expected value of type [string] but got [undefined]."`;
exports[`returns error when not string 1`] = `"expected value of type [string] but got [number]."`;
exports[`returns error when not string 2`] = `"expected value of type [string] but got [Array]."`;
exports[`returns error when not string 3`] = `"expected value of type [string] but got [RegExp]."`;
exports[`returns error when value is not a URI 1`] = `"value is [3domain.local] but it must be a valid URI (see RFC 3986)."`;
exports[`returns error when value is not a URI 2`] = `"value is [http://8010:0:0:0:9:500:300C:200A] but it must be a valid URI (see RFC 3986)."`;
exports[`returns error when value is not a URI 3`] = `"value is [-] but it must be a valid URI (see RFC 3986)."`;
exports[`returns error when value is not a URI 4`] = `"value is [https://example.com?baz[]=foo&baz[]=bar] but it must be a valid URI (see RFC 3986)."`;
exports[`returns error when value is not a URI 5`] = `"value is [http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa] but it must be a valid URI (see RFC 3986)."`;

View file

@ -20,7 +20,7 @@
import { duration as momentDuration } from 'moment';
import { schema } from '..';
const { duration } = schema;
const { duration, object, contextRef, siblingRef } = schema;
test('returns value by default', () => {
expect(duration().validate('123s')).toMatchSnapshot();
@ -58,6 +58,70 @@ describe('#defaultValue', () => {
}).validate(undefined)
).toMatchSnapshot();
});
test('can be a function that returns compatible type', () => {
expect(
duration({
defaultValue: () => 600,
}).validate(undefined)
).toMatchInlineSnapshot(`"PT0.6S"`);
expect(
duration({
defaultValue: () => '1h',
}).validate(undefined)
).toMatchInlineSnapshot(`"PT1H"`);
expect(
duration({
defaultValue: () => momentDuration(1, 'hour'),
}).validate(undefined)
).toMatchInlineSnapshot(`"PT1H"`);
});
test('can be a reference to a moment.Duration', () => {
expect(
object({
source: duration({ defaultValue: 600 }),
target: duration({ defaultValue: siblingRef('source') }),
fromContext: duration({ defaultValue: contextRef('val') }),
}).validate(undefined, { val: momentDuration(700, 'ms') })
).toMatchInlineSnapshot(`
Object {
"fromContext": "PT0.7S",
"source": "PT0.6S",
"target": "PT0.6S",
}
`);
expect(
object({
source: duration({ defaultValue: '1h' }),
target: duration({ defaultValue: siblingRef('source') }),
fromContext: duration({ defaultValue: contextRef('val') }),
}).validate(undefined, { val: momentDuration(2, 'hour') })
).toMatchInlineSnapshot(`
Object {
"fromContext": "PT2H",
"source": "PT1H",
"target": "PT1H",
}
`);
expect(
object({
source: duration({ defaultValue: momentDuration(1, 'hour') }),
target: duration({ defaultValue: siblingRef('source') }),
fromContext: duration({ defaultValue: contextRef('val') }),
}).validate(undefined, { val: momentDuration(2, 'hour') })
).toMatchInlineSnapshot(`
Object {
"fromContext": "PT2H",
"source": "PT1H",
"target": "PT1H",
}
`);
});
});
test('returns error when not string or non-safe positive integer', () => {

View file

@ -21,20 +21,33 @@ import typeDetect from 'type-detect';
import { Duration, ensureDuration } from '../duration';
import { SchemaTypeError } from '../errors';
import { internals } from '../internals';
import { Reference } from '../references';
import { Type } from './type';
type DurationValueType = Duration | string | number;
export interface DurationOptions {
// we need to special-case defaultValue as we want to handle string inputs too
defaultValue?: DurationValueType | Reference<DurationValueType> | (() => DurationValueType);
validate?: (value: Duration) => string | void;
defaultValue?: Duration | string | number;
}
export class DurationType extends Type<Duration> {
constructor(options: DurationOptions = {}) {
super(internals.duration(), {
...options,
defaultValue: ensureDuration(options.defaultValue),
});
let defaultValue;
if (typeof options.defaultValue === 'function') {
const originalDefaultValue = options.defaultValue;
defaultValue = () => ensureDuration(originalDefaultValue());
} else if (
typeof options.defaultValue === 'string' ||
typeof options.defaultValue === 'number'
) {
defaultValue = ensureDuration(options.defaultValue);
} else {
defaultValue = options.defaultValue;
}
super(internals.duration(), { ...options, defaultValue });
}
protected handleError(type: string, { message, value }: Record<string, any>, path: string[]) {

View file

@ -32,3 +32,4 @@ export { ObjectType, Props, TypeOf } from './object_type';
export { RecordOfOptions, RecordOfType } from './record_type';
export { StringOptions, StringType } from './string_type';
export { UnionType } from './union_type';
export { URIOptions, URIType } from './uri_type';

View file

@ -0,0 +1,144 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { schema } from '..';
test('is required by default', () => {
expect(() => schema.uri().validate(undefined)).toThrowErrorMatchingSnapshot();
});
test('returns value for valid URI as per RFC3986', () => {
const uriSchema = schema.uri();
expect(uriSchema.validate('http://tools.ietf.org/html/rfc3986')).toBe(
'http://tools.ietf.org/html/rfc3986'
);
expect(uriSchema.validate('udp://3domain.local')).toBe('udp://3domain.local');
expect(uriSchema.validate('urn:elastic:kibana')).toBe('urn:elastic:kibana');
expect(uriSchema.validate('ftp://ftp.ietf.org/rfc/rfc3986.txt')).toBe(
'ftp://ftp.ietf.org/rfc/rfc3986.txt'
);
expect(uriSchema.validate('mailto:Platform.Kibana@elastic.co')).toBe(
'mailto:Platform.Kibana@elastic.co'
);
expect(uriSchema.validate('tel:+500-111-222-333')).toBe('tel:+500-111-222-333');
expect(uriSchema.validate('file:///kibana.log')).toBe('file:///kibana.log');
expect(uriSchema.validate('http://elastic@localhost:9200')).toBe('http://elastic@localhost:9200');
expect(uriSchema.validate('http://elastic:changeme@localhost:9200')).toBe(
'http://elastic:changeme@localhost:9200'
);
expect(uriSchema.validate('ldap://[2001:db8::7]/c=GB?objectClass?one')).toBe(
'ldap://[2001:db8::7]/c=GB?objectClass?one'
);
const uriWithMaxAllowedLength = `http://${'a'.repeat(255)}`;
expect(uriSchema.validate(uriWithMaxAllowedLength)).toBe(uriWithMaxAllowedLength);
});
test('returns error when value is not a URI', () => {
const uriSchema = schema.uri();
expect(() => uriSchema.validate('3domain.local')).toThrowErrorMatchingSnapshot();
expect(() =>
uriSchema.validate('http://8010:0:0:0:9:500:300C:200A')
).toThrowErrorMatchingSnapshot();
expect(() => uriSchema.validate('-')).toThrowErrorMatchingSnapshot();
expect(() =>
uriSchema.validate('https://example.com?baz[]=foo&baz[]=bar')
).toThrowErrorMatchingSnapshot();
const tooLongUri = `http://${'a'.repeat(256)}`;
expect(() => uriSchema.validate(tooLongUri)).toThrowErrorMatchingSnapshot();
});
describe('#scheme', () => {
test('returns value when URI has required scheme', () => {
const uriSchema = schema.uri({ scheme: ['http', 'https'] });
expect(uriSchema.validate('http://elastic.co')).toBe('http://elastic.co');
expect(uriSchema.validate('https://elastic.co')).toBe('https://elastic.co');
});
test('returns error when shorter string', () => {
const uriSchema = schema.uri({ scheme: ['http', 'https'] });
expect(() => uriSchema.validate('ftp://elastic.co')).toThrowErrorMatchingSnapshot();
expect(() => uriSchema.validate('file:///kibana.log')).toThrowErrorMatchingSnapshot();
});
});
describe('#defaultValue', () => {
test('returns default when URI is undefined', () => {
expect(schema.uri({ defaultValue: 'http://localhost:9200' }).validate(undefined)).toBe(
'http://localhost:9200'
);
});
test('returns value when specified', () => {
expect(
schema.uri({ defaultValue: 'http://localhost:9200' }).validate('http://kibana.local')
).toBe('http://kibana.local');
});
test('returns value from context when context reference is specified', () => {
expect(
schema.uri({ defaultValue: schema.contextRef('some_uri') }).validate(undefined, {
some_uri: 'http://kibana.local',
})
).toBe('http://kibana.local');
});
});
describe('#validate', () => {
test('is called with input value', () => {
let calledWith;
const validator = (val: any) => {
calledWith = val;
};
schema.uri({ validate: validator }).validate('http://kibana.local');
expect(calledWith).toBe('http://kibana.local');
});
test('is not called with default value in no input', () => {
const validate = jest.fn();
schema.uri({ validate, defaultValue: 'http://kibana.local' }).validate(undefined);
expect(validate).not.toHaveBeenCalled();
});
test('throws when returns string', () => {
const validate = () => 'validator failure';
expect(() =>
schema.uri({ validate }).validate('http://kibana.local')
).toThrowErrorMatchingSnapshot();
});
});
test('returns error when not string', () => {
expect(() => schema.uri().validate(123)).toThrowErrorMatchingSnapshot();
expect(() => schema.uri().validate([1, 2, 3])).toThrowErrorMatchingSnapshot();
expect(() => schema.uri().validate(/abc/)).toThrowErrorMatchingSnapshot();
});

View file

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import typeDetect from 'type-detect';
import { internals } from '../internals';
import { Type, TypeOptions } from './type';
export type URIOptions = TypeOptions<string> & {
scheme?: string | string[];
};
export class URIType extends Type<string> {
constructor(options: URIOptions = {}) {
super(internals.string().uri({ scheme: options.scheme }), options);
}
protected handleError(type: string, { value, scheme }: Record<string, unknown>) {
switch (type) {
case 'any.required':
case 'string.base':
return `expected value of type [string] but got [${typeDetect(value)}].`;
case 'string.uriCustomScheme':
return `expected URI with scheme [${scheme}] but but got [${value}].`;
case 'string.uri':
return `value is [${value}] but it must be a valid URI (see RFC 3986).`;
}
}
}

View file

@ -0,0 +1,374 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ElasticsearchConfig } from './elasticsearch_config';
const MockClient = jest.fn();
jest.mock('elasticsearch', () => ({
// Jest types don't include `requireActual` right now.
...(jest as any).requireActual('elasticsearch'),
Client: MockClient,
}));
const MockScopedClusterClient = jest.fn();
jest.mock('./scoped_cluster_client', () => ({
ScopedClusterClient: MockScopedClusterClient,
}));
const mockParseElasticsearchClientConfig = jest.fn();
jest.mock('./elasticsearch_client_config', () => ({
parseElasticsearchClientConfig: mockParseElasticsearchClientConfig,
}));
import { errors } from 'elasticsearch';
import { get } from 'lodash';
import { Logger } from '../logging';
import { logger } from '../logging/__mocks__';
import { ClusterClient } from './cluster_client';
afterEach(() => jest.clearAllMocks());
test('#constructor creates client with parsed config', () => {
const mockEsClientConfig = { apiVersion: 'es-client-master' };
mockParseElasticsearchClientConfig.mockReturnValue(mockEsClientConfig);
const mockEsConfig = { apiVersion: 'es-version' } as any;
const mockLogger = logger.get();
const clusterClient = new ClusterClient(mockEsConfig, mockLogger);
expect(clusterClient).toBeDefined();
expect(mockParseElasticsearchClientConfig).toHaveBeenCalledTimes(1);
expect(mockParseElasticsearchClientConfig).toHaveBeenLastCalledWith(mockEsConfig, mockLogger);
expect(MockClient).toHaveBeenCalledTimes(1);
expect(MockClient).toHaveBeenCalledWith(mockEsClientConfig);
});
describe('#callAsInternalUser', () => {
let mockEsClientInstance: {
close: jest.Mock;
ping: jest.Mock;
security: { authenticate: jest.Mock };
};
let clusterClient: ClusterClient;
beforeEach(() => {
mockEsClientInstance = {
close: jest.fn(),
ping: jest.fn(),
security: { authenticate: jest.fn() },
};
MockClient.mockImplementation(() => mockEsClientInstance);
clusterClient = new ClusterClient({ apiVersion: 'es-version' } as any, logger.get());
});
test('fails if cluster client is closed', async () => {
clusterClient.close();
await expect(
clusterClient.callAsInternalUser('ping', {})
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Cluster client cannot be used after it has been closed."`
);
});
test('fails if endpoint is invalid', async () => {
await expect(
clusterClient.callAsInternalUser('pong', {})
).rejects.toThrowErrorMatchingInlineSnapshot(`"called with an invalid endpoint: pong"`);
});
test('correctly deals with top level endpoint', async () => {
const mockResponse = { data: 'ping' };
const mockParams = { param: 'ping' };
mockEsClientInstance.ping.mockImplementation(function mockCall(this: any) {
return Promise.resolve({
context: this,
response: mockResponse,
});
});
const mockResult = await clusterClient.callAsInternalUser('ping', mockParams);
expect(mockResult.response).toBe(mockResponse);
expect(mockResult.context).toBe(mockEsClientInstance);
expect(mockEsClientInstance.ping).toHaveBeenCalledTimes(1);
expect(mockEsClientInstance.ping).toHaveBeenLastCalledWith(mockParams);
});
test('correctly deals with nested endpoint', async () => {
const mockResponse = { data: 'authenticate' };
const mockParams = { param: 'authenticate' };
mockEsClientInstance.security.authenticate.mockImplementation(function mockCall(this: any) {
return Promise.resolve({
context: this,
response: mockResponse,
});
});
const mockResult = await clusterClient.callAsInternalUser('security.authenticate', mockParams);
expect(mockResult.response).toBe(mockResponse);
expect(mockResult.context).toBe(mockEsClientInstance.security);
expect(mockEsClientInstance.security.authenticate).toHaveBeenCalledTimes(1);
expect(mockEsClientInstance.security.authenticate).toHaveBeenLastCalledWith(mockParams);
});
test('does not wrap errors if `wrap401Errors` is not set', async () => {
const mockError = { message: 'some error' };
mockEsClientInstance.ping.mockRejectedValue(mockError);
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: false })
).rejects.toBe(mockError);
const mockAuthenticationError = { message: 'authentication error', statusCode: 401 };
mockEsClientInstance.ping.mockRejectedValue(mockAuthenticationError);
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: false })
).rejects.toBe(mockAuthenticationError);
});
test('wraps only 401 errors by default or when `wrap401Errors` is set', async () => {
const mockError = { message: 'some error' };
mockEsClientInstance.ping.mockRejectedValue(mockError);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(mockError);
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: true })
).rejects.toBe(mockError);
const mockAuthorizationError = { message: 'authentication error', statusCode: 403 };
mockEsClientInstance.ping.mockRejectedValue(mockAuthorizationError);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(mockAuthorizationError);
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: true })
).rejects.toBe(mockAuthorizationError);
const mockAuthenticationError = new (errors.AuthenticationException as any)(
'Authentication Exception',
{ statusCode: 401 }
);
mockEsClientInstance.ping.mockRejectedValue(mockAuthenticationError);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(mockAuthenticationError);
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: true })
).rejects.toStrictEqual(mockAuthenticationError);
});
test('does not override WWW-Authenticate if returned by Elasticsearch', async () => {
const mockAuthenticationError = new (errors.AuthenticationException as any)(
'Authentication Exception',
{ statusCode: 401 }
);
const mockAuthenticationErrorWithHeader = new (errors.AuthenticationException as any)(
'Authentication Exception',
{
body: { error: { header: { 'WWW-Authenticate': 'some custom header' } } },
statusCode: 401,
}
);
mockEsClientInstance.ping
.mockRejectedValueOnce(mockAuthenticationError)
.mockRejectedValueOnce(mockAuthenticationErrorWithHeader);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(mockAuthenticationError);
expect(get(mockAuthenticationError, 'output.headers.WWW-Authenticate')).toBe(
'Basic realm="Authorization Required"'
);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(
mockAuthenticationErrorWithHeader
);
expect(get(mockAuthenticationErrorWithHeader, 'output.headers.WWW-Authenticate')).toBe(
'some custom header'
);
});
});
describe('#asScoped', () => {
let mockEsClientInstance: { ping: jest.Mock; close: jest.Mock };
let mockScopedEsClientInstance: { ping: jest.Mock; close: jest.Mock };
let clusterClient: ClusterClient;
let mockLogger: Logger;
let mockEsConfig: ElasticsearchConfig;
beforeEach(() => {
mockEsClientInstance = { ping: jest.fn(), close: jest.fn() };
mockScopedEsClientInstance = { ping: jest.fn(), close: jest.fn() };
MockClient.mockImplementationOnce(() => mockEsClientInstance).mockImplementationOnce(
() => mockScopedEsClientInstance
);
mockLogger = logger.get();
mockEsConfig = {
apiVersion: 'es-version',
requestHeadersWhitelist: ['one', 'two'],
} as any;
clusterClient = new ClusterClient(mockEsConfig, mockLogger);
jest.clearAllMocks();
});
test('creates additional Elasticsearch client only once', () => {
const firstScopedClusterClient = clusterClient.asScoped({ headers: { one: '1' } });
expect(firstScopedClusterClient).toBeDefined();
expect(mockParseElasticsearchClientConfig).toHaveBeenCalledTimes(1);
expect(mockParseElasticsearchClientConfig).toHaveBeenLastCalledWith(mockEsConfig, mockLogger, {
auth: false,
ignoreCertAndKey: true,
});
expect(MockClient).toHaveBeenCalledTimes(1);
expect(MockClient).toHaveBeenCalledWith(
mockParseElasticsearchClientConfig.mock.results[0].value
);
jest.clearAllMocks();
const secondScopedClusterClient = clusterClient.asScoped({ headers: { two: '2' } });
expect(secondScopedClusterClient).toBeDefined();
expect(secondScopedClusterClient).not.toBe(firstScopedClusterClient);
expect(mockParseElasticsearchClientConfig).not.toHaveBeenCalled();
expect(MockClient).not.toHaveBeenCalled();
});
test('properly configures `ignoreCertAndKey` for various configurations', () => {
// Config without SSL.
clusterClient = new ClusterClient(mockEsConfig, mockLogger);
mockParseElasticsearchClientConfig.mockClear();
clusterClient.asScoped({ headers: { one: '1' } });
expect(mockParseElasticsearchClientConfig).toHaveBeenCalledTimes(1);
expect(mockParseElasticsearchClientConfig).toHaveBeenLastCalledWith(mockEsConfig, mockLogger, {
auth: false,
ignoreCertAndKey: true,
});
// Config ssl.alwaysPresentCertificate === false
mockEsConfig = { ...mockEsConfig, ssl: { alwaysPresentCertificate: false } } as any;
clusterClient = new ClusterClient(mockEsConfig, mockLogger);
mockParseElasticsearchClientConfig.mockClear();
clusterClient.asScoped({ headers: { one: '1' } });
expect(mockParseElasticsearchClientConfig).toHaveBeenCalledTimes(1);
expect(mockParseElasticsearchClientConfig).toHaveBeenLastCalledWith(mockEsConfig, mockLogger, {
auth: false,
ignoreCertAndKey: true,
});
// Config ssl.alwaysPresentCertificate === true
mockEsConfig = { ...mockEsConfig, ssl: { alwaysPresentCertificate: true } } as any;
clusterClient = new ClusterClient(mockEsConfig, mockLogger);
mockParseElasticsearchClientConfig.mockClear();
clusterClient.asScoped({ headers: { one: '1' } });
expect(mockParseElasticsearchClientConfig).toHaveBeenCalledTimes(1);
expect(mockParseElasticsearchClientConfig).toHaveBeenLastCalledWith(mockEsConfig, mockLogger, {
auth: false,
ignoreCertAndKey: false,
});
});
test('passes only filtered headers to the scoped cluster client', () => {
clusterClient.asScoped({ headers: { zero: '0', one: '1', two: '2', three: '3' } });
expect(MockScopedClusterClient).toHaveBeenCalledTimes(1);
expect(MockScopedClusterClient).toHaveBeenCalledWith(
expect.any(Function),
expect.any(Function),
{ one: '1', two: '2' }
);
});
test('both scoped and internal API caller fail if cluster client is closed', async () => {
clusterClient.asScoped({ headers: { zero: '0', one: '1', two: '2', three: '3' } });
clusterClient.close();
const [[internalAPICaller, scopedAPICaller]] = MockScopedClusterClient.mock.calls;
await expect(internalAPICaller('ping')).rejects.toThrowErrorMatchingInlineSnapshot(
`"Cluster client cannot be used after it has been closed."`
);
await expect(scopedAPICaller('ping', {})).rejects.toThrowErrorMatchingInlineSnapshot(
`"Cluster client cannot be used after it has been closed."`
);
});
});
describe('#close', () => {
let mockEsClientInstance: { close: jest.Mock };
let mockScopedEsClientInstance: { close: jest.Mock };
let clusterClient: ClusterClient;
beforeEach(() => {
mockEsClientInstance = { close: jest.fn() };
mockScopedEsClientInstance = { close: jest.fn() };
MockClient.mockImplementationOnce(() => mockEsClientInstance).mockImplementationOnce(
() => mockScopedEsClientInstance
);
clusterClient = new ClusterClient(
{ apiVersion: 'es-version', requestHeadersWhitelist: [] } as any,
logger.get()
);
});
test('closes underlying Elasticsearch client', () => {
expect(mockEsClientInstance.close).not.toHaveBeenCalled();
clusterClient.close();
expect(mockEsClientInstance.close).toHaveBeenCalledTimes(1);
});
test('closes both internal and scoped underlying Elasticsearch clients', () => {
clusterClient.asScoped({ headers: { one: '1' } });
expect(mockEsClientInstance.close).not.toHaveBeenCalled();
expect(mockScopedEsClientInstance.close).not.toHaveBeenCalled();
clusterClient.close();
expect(mockEsClientInstance.close).toHaveBeenCalledTimes(1);
expect(mockScopedEsClientInstance.close).toHaveBeenCalledTimes(1);
});
test('does not call close on already closed client', () => {
clusterClient.asScoped({ headers: { one: '1' } });
clusterClient.close();
mockEsClientInstance.close.mockClear();
mockScopedEsClientInstance.close.mockClear();
clusterClient.close();
expect(mockEsClientInstance.close).not.toHaveBeenCalled();
expect(mockScopedEsClientInstance.close).not.toHaveBeenCalled();
});
});

View file

@ -0,0 +1,196 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Boom from 'boom';
import { Client } from 'elasticsearch';
import { get } from 'lodash';
import { filterHeaders, Headers } from '../http/router';
import { Logger } from '../logging';
import {
ElasticsearchClientConfig,
parseElasticsearchClientConfig,
} from './elasticsearch_client_config';
import { ScopedClusterClient } from './scoped_cluster_client';
/**
* The set of options that defines how API call should be made and result be
* processed.
*/
export interface CallAPIOptions {
/**
* Indicates whether `401 Unauthorized` errors returned from the Elasticsearch API
* should be wrapped into `Boom` error instances with properly set `WWW-Authenticate`
* header that could have been returned by the API itself. If API didn't specify that
* then `Basic realm="Authorization Required"` is used as `WWW-Authenticate`.
*/
wrap401Errors: boolean;
}
/**
* Calls the Elasticsearch API endpoint with the specified parameters.
* @param client Raw Elasticsearch JS client instance to use.
* @param endpoint Name of the API endpoint to call.
* @param clientParams Parameters that will be directly passed to the
* Elasticsearch JS client.
* @param options Options that affect the way we call the API and process the result.
*/
async function callAPI(
client: Client,
endpoint: string,
clientParams: Record<string, unknown> = {},
options: CallAPIOptions = { wrap401Errors: true }
) {
const clientPath = endpoint.split('.');
const api: any = get(client, clientPath);
if (!api) {
throw new Error(`called with an invalid endpoint: ${endpoint}`);
}
const apiContext = clientPath.length === 1 ? client : get(client, clientPath.slice(0, -1));
try {
return await api.call(apiContext, clientParams);
} catch (err) {
if (!options.wrap401Errors || err.statusCode !== 401) {
throw err;
}
const boomError = Boom.boomify(err, { statusCode: err.statusCode });
const wwwAuthHeader: string = get(err, 'body.error.header[WWW-Authenticate]');
boomError.output.headers['WWW-Authenticate'] =
wwwAuthHeader || 'Basic realm="Authorization Required"';
throw boomError;
}
}
/**
* Represents an Elasticsearch cluster API client and allows to call API on behalf
* of the internal Kibana user and the actual user that is derived from the request
* headers (via `asScoped(...)`).
*/
export class ClusterClient {
/**
* Raw Elasticsearch JS client that acts on behalf of the Kibana internal user.
*/
private readonly client: Client;
/**
* Optional raw Elasticsearch JS client that is shared between all the scoped clients created
* from this cluster client. Every API call is attributed by the wh
*/
private scopedClient?: Client;
/**
* Indicates whether this cluster client (and all internal raw Elasticsearch JS clients) has been closed.
*/
private isClosed = false;
constructor(private readonly config: ElasticsearchClientConfig, private readonly log: Logger) {
this.client = new Client(parseElasticsearchClientConfig(config, log));
}
/**
* Calls specified {@param endpoint} with provided {@param clientParams} on behalf of the
* Kibana internal user.
* @param endpoint String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams A dictionary of parameters that will be passed directly to the Elasticsearch JS client.
* @param options Options that affect the way we call the API and process the result.
*/
public callAsInternalUser = async (
endpoint: string,
clientParams: Record<string, unknown> = {},
options?: CallAPIOptions
) => {
this.assertIsNotClosed();
return await callAPI(this.client, endpoint, clientParams, options);
};
/**
* Closes the cluster client. After that client cannot be used and one should
* create a new client instance to be able to interact with Elasticsearch API.
*/
public close() {
if (this.isClosed) {
return;
}
this.isClosed = true;
this.client.close();
if (this.scopedClient !== undefined) {
this.scopedClient.close();
}
}
/**
* Creates an instance of `ScopedClusterClient` based on the configuration the
* current cluster client that exposes additional `callAsCurrentUser` method
* scoped to the provided {@param req}. Consumers shouldn't worry about closing
* scoped client instances, these will be automatically closed as soon as the
* original cluster client isn't needed anymore and closed.
* @param req Request the `ScopedClusterClient` instance will be scoped to.
*/
public asScoped(req: { headers?: Headers } = {}) {
// It'd have been quite expensive to create and configure client for every incoming
// request since it involves parsing of the config, reading of the SSL certificate and
// key files etc. Moreover scoped client needs two Elasticsearch JS clients at the same
// time: one to support `callAsInternalUser` and another one for `callAsCurrentUser`.
// To reduce that overhead we create one scoped client per cluster client and share it
// between all scoped client instances.
if (this.scopedClient === undefined) {
this.scopedClient = new Client(
parseElasticsearchClientConfig(this.config, this.log, {
auth: false,
ignoreCertAndKey: !this.config.ssl || !this.config.ssl.alwaysPresentCertificate,
})
);
}
const headers = req.headers
? filterHeaders(req.headers, this.config.requestHeadersWhitelist)
: req.headers;
return new ScopedClusterClient(this.callAsInternalUser, this.callAsCurrentUser, headers);
}
/**
* Calls specified {@param endpoint} with provided {@param clientParams} on behalf of the
* user initiated request to the Kibana server (via HTTP request headers).
* @param endpoint String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams A dictionary of parameters that will be passed directly to the Elasticsearch JS client.
* @param options Options that affect the way we call the API and process the result.
*/
private callAsCurrentUser = async (
endpoint: string,
clientParams: Record<string, unknown> = {},
options?: CallAPIOptions
) => {
this.assertIsNotClosed();
return await callAPI(this.scopedClient!, endpoint, clientParams, options);
};
private assertIsNotClosed() {
if (this.isClosed) {
throw new Error('Cluster client cannot be used after it has been closed.');
}
}
}

View file

@ -0,0 +1,661 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
const mockReadFileSync = jest.fn();
jest.mock('fs', () => ({ readFileSync: mockReadFileSync }));
import { duration } from 'moment';
import { logger } from '../logging/__mocks__';
import {
ElasticsearchClientConfig,
parseElasticsearchClientConfig,
} from './elasticsearch_client_config';
afterEach(() => jest.clearAllMocks());
test('parses minimally specified config', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'master',
customHeaders: { xsrf: 'something' },
logQueries: false,
sniffOnStart: false,
sniffOnConnectionFault: false,
hosts: ['http://localhost/elasticsearch'],
requestHeadersWhitelist: [],
},
logger.get()
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "master",
"hosts": Array [
Object {
"headers": Object {
"xsrf": "something",
},
"host": "localhost",
"path": "/elasticsearch",
"port": "80",
"protocol": "http:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": false,
"sniffOnStart": false,
}
`);
});
test('parses fully specified config', () => {
mockReadFileSync.mockImplementation(path => `content-of-${path}`);
const elasticsearchConfig: ElasticsearchClientConfig = {
apiVersion: 'v7.0.0',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: [
'http://localhost/elasticsearch',
'http://domain.com:1234/elasticsearch',
'https://es.local',
],
requestHeadersWhitelist: [],
username: 'elastic',
password: 'changeme',
pingTimeout: 12345,
requestTimeout: 54321,
sniffInterval: 11223344,
ssl: {
verificationMode: 'certificate',
certificateAuthorities: ['ca-path-1', 'ca-path-2'],
certificate: 'certificate-path',
key: 'key-path',
keyPassphrase: 'key-pass',
alwaysPresentCertificate: true,
},
};
const elasticsearchClientConfig = parseElasticsearchClientConfig(
elasticsearchConfig,
logger.get()
);
// Check that original references aren't used.
for (const host of elasticsearchClientConfig.hosts) {
expect(elasticsearchConfig.customHeaders).not.toBe(host.headers);
}
expect(elasticsearchConfig.ssl).not.toBe(elasticsearchClientConfig.ssl);
expect(elasticsearchClientConfig).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"auth": "elastic:changeme",
"headers": Object {
"xsrf": "something",
},
"host": "localhost",
"path": "/elasticsearch",
"port": "80",
"protocol": "http:",
"query": null,
},
Object {
"auth": "elastic:changeme",
"headers": Object {
"xsrf": "something",
},
"host": "domain.com",
"path": "/elasticsearch",
"port": "1234",
"protocol": "http:",
"query": null,
},
Object {
"auth": "elastic:changeme",
"headers": Object {
"xsrf": "something",
},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"pingTimeout": 12345,
"requestTimeout": 54321,
"sniffInterval": 11223344,
"sniffOnConnectionFault": true,
"sniffOnStart": true,
"ssl": Object {
"ca": Array [
"content-of-ca-path-1",
"content-of-ca-path-2",
],
"cert": "content-of-certificate-path",
"checkServerIdentity": [Function],
"key": "content-of-key-path",
"passphrase": "key-pass",
"rejectUnauthorized": true,
},
}
`);
});
test('parses config timeouts of moment.Duration type', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'master',
customHeaders: { xsrf: 'something' },
logQueries: false,
sniffOnStart: false,
sniffOnConnectionFault: false,
pingTimeout: duration(100, 'ms'),
requestTimeout: duration(30, 's'),
sniffInterval: duration(1, 'minute'),
hosts: ['http://localhost:9200/elasticsearch'],
requestHeadersWhitelist: [],
},
logger.get()
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "master",
"hosts": Array [
Object {
"headers": Object {
"xsrf": "something",
},
"host": "localhost",
"path": "/elasticsearch",
"port": "9200",
"protocol": "http:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"pingTimeout": 100,
"requestTimeout": 30000,
"sniffInterval": 60000,
"sniffOnConnectionFault": false,
"sniffOnStart": false,
}
`);
});
describe('#auth', () => {
test('is not set if #auth = false even if username and password are provided', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['http://user:password@localhost/elasticsearch', 'https://es.local'],
username: 'elastic',
password: 'changeme',
requestHeadersWhitelist: [],
},
logger.get(),
{ auth: false }
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {
"xsrf": "something",
},
"host": "localhost",
"path": "/elasticsearch",
"port": "80",
"protocol": "http:",
"query": null,
},
Object {
"headers": Object {
"xsrf": "something",
},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
}
`);
});
test('is not set if username is not specified', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
password: 'changeme',
},
logger.get(),
{ auth: true }
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {
"xsrf": "something",
},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
}
`);
});
test('is not set if password is not specified', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
username: 'elastic',
},
logger.get(),
{ auth: true }
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {
"xsrf": "something",
},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
}
`);
});
});
describe('#log', () => {
test('default logger with #logQueries = false', () => {
const parsedConfig = parseElasticsearchClientConfig(
{
apiVersion: 'master',
customHeaders: { xsrf: 'something' },
logQueries: false,
sniffOnStart: false,
sniffOnConnectionFault: false,
hosts: ['http://localhost/elasticsearch'],
requestHeadersWhitelist: [],
},
logger.get()
);
const esLogger = new parsedConfig.log();
esLogger.error('some-error');
esLogger.warning('some-warning');
esLogger.trace('some-trace');
esLogger.info('some-info');
esLogger.debug('some-debug');
expect(typeof esLogger.close).toBe('function');
expect(logger.mockCollect()).toMatchInlineSnapshot(`
Object {
"debug": Array [],
"error": Array [
Array [
"some-error",
],
],
"fatal": Array [],
"info": Array [],
"log": Array [],
"trace": Array [],
"warn": Array [
Array [
"some-warning",
],
],
}
`);
});
test('default logger with #logQueries = true', () => {
const parsedConfig = parseElasticsearchClientConfig(
{
apiVersion: 'master',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: false,
sniffOnConnectionFault: false,
hosts: ['http://localhost/elasticsearch'],
requestHeadersWhitelist: [],
},
logger.get()
);
const esLogger = new parsedConfig.log();
esLogger.error('some-error');
esLogger.warning('some-warning');
esLogger.trace('METHOD', { path: '/some-path' }, '?query=2', 'unknown', '304');
esLogger.info('some-info');
esLogger.debug('some-debug');
expect(typeof esLogger.close).toBe('function');
expect(logger.mockCollect()).toMatchInlineSnapshot(`
Object {
"debug": Array [
Array [
"304
METHOD /some-path
?query=2",
Object {
"tags": Array [
"query",
],
},
],
],
"error": Array [
Array [
"some-error",
],
],
"fatal": Array [],
"info": Array [],
"log": Array [],
"trace": Array [],
"warn": Array [
Array [
"some-warning",
],
],
}
`);
});
test('custom logger', () => {
const customLogger = jest.fn();
const parsedConfig = parseElasticsearchClientConfig(
{
apiVersion: 'master',
customHeaders: { xsrf: 'something' },
logQueries: true,
sniffOnStart: false,
sniffOnConnectionFault: false,
hosts: ['http://localhost/elasticsearch'],
requestHeadersWhitelist: [],
log: customLogger,
},
logger.get()
);
expect(parsedConfig.log).toBe(customLogger);
});
});
describe('#ssl', () => {
test('#verificationMode = none', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: {},
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
ssl: { verificationMode: 'none' },
},
logger.get()
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
"ssl": Object {
"rejectUnauthorized": false,
},
}
`);
});
test('#verificationMode = certificate', () => {
const clientConfig = parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: {},
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
ssl: { verificationMode: 'certificate' },
},
logger.get()
);
// `checkServerIdentity` shouldn't check hostname when verificationMode is certificate.
expect(
clientConfig.ssl!.checkServerIdentity!('right.com', { subject: { CN: 'wrong.com' } } as any)
).toBeUndefined();
expect(clientConfig).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
"ssl": Object {
"checkServerIdentity": [Function],
"rejectUnauthorized": true,
},
}
`);
});
test('#verificationMode = full', () => {
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: {},
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
ssl: { verificationMode: 'full' },
},
logger.get()
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
"ssl": Object {
"rejectUnauthorized": true,
},
}
`);
});
test('#verificationMode is unknown', () => {
expect(() =>
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: {},
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
ssl: { verificationMode: 'misspelled' as any },
},
logger.get()
)
).toThrowErrorMatchingInlineSnapshot(`"Unknown ssl verificationMode: misspelled"`);
});
test('#ignoreCertAndKey = true', () => {
mockReadFileSync.mockImplementation(path => `content-of-${path}`);
expect(
parseElasticsearchClientConfig(
{
apiVersion: 'v7.0.0',
customHeaders: {},
logQueries: true,
sniffOnStart: true,
sniffOnConnectionFault: true,
hosts: ['https://es.local'],
requestHeadersWhitelist: [],
ssl: {
verificationMode: 'certificate',
certificateAuthorities: ['ca-path'],
certificate: 'certificate-path',
key: 'key-path',
keyPassphrase: 'key-pass',
alwaysPresentCertificate: true,
},
},
logger.get(),
{ ignoreCertAndKey: true }
)
).toMatchInlineSnapshot(`
Object {
"apiVersion": "v7.0.0",
"hosts": Array [
Object {
"headers": Object {},
"host": "es.local",
"path": "/",
"port": "443",
"protocol": "https:",
"query": null,
},
],
"keepAlive": true,
"log": [Function],
"sniffOnConnectionFault": true,
"sniffOnStart": true,
"ssl": Object {
"ca": Array [
"content-of-ca-path",
],
"checkServerIdentity": [Function],
"rejectUnauthorized": true,
},
}
`);
});
});

View file

@ -0,0 +1,231 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ConfigOptions } from 'elasticsearch';
import { readFileSync } from 'fs';
import { cloneDeep } from 'lodash';
import { Duration } from 'moment';
import { checkServerIdentity } from 'tls';
import url from 'url';
import { pick } from '../../utils';
import { Logger } from '../logging';
import { ElasticsearchConfig } from './elasticsearch_config';
/**
* Config that consumers can pass to the Elasticsearch JS client is complex and includes
* not only entries from standard `elasticsearch.*` yaml config, but also some Elasticsearch JS
* client specific options like `keepAlive` or `plugins` (that eventually will be deprecated).
*/
export type ElasticsearchClientConfig = Pick<ConfigOptions, 'keepAlive' | 'log' | 'plugins'> &
Pick<
ElasticsearchConfig,
| 'apiVersion'
| 'customHeaders'
| 'logQueries'
| 'requestHeadersWhitelist'
| 'sniffOnStart'
| 'sniffOnConnectionFault'
| 'hosts'
| 'username'
| 'password'
> & {
pingTimeout?: ElasticsearchConfig['pingTimeout'] | ConfigOptions['pingTimeout'];
requestTimeout?: ElasticsearchConfig['requestTimeout'] | ConfigOptions['requestTimeout'];
sniffInterval?: ElasticsearchConfig['sniffInterval'] | ConfigOptions['sniffInterval'];
ssl?: Partial<ElasticsearchConfig['ssl']>;
};
/** @internal */
interface ElasticsearchClientConfigOverrides {
/**
* If set to `true`, username and password from the config won't be used
* to access Elasticsearch API even if these are specified.
*/
auth?: boolean;
/**
* If set to `true`, `ssl.key` and `ssl.certificate` provided through config won't
* be used to connect to Elasticsearch.
*/
ignoreCertAndKey?: boolean;
}
// Original `ConfigOptions` defines `ssl: object` so we need something more specific.
/** @internal */
type ExtendedConfigOptions = ConfigOptions &
Partial<{
ssl: Partial<{
rejectUnauthorized: boolean;
checkServerIdentity: typeof checkServerIdentity;
ca: string[];
cert: string;
key: string;
passphrase: string;
}>;
}>;
/** @internal */
export function parseElasticsearchClientConfig(
config: ElasticsearchClientConfig,
log: Logger,
{ ignoreCertAndKey = false, auth = true }: ElasticsearchClientConfigOverrides = {}
) {
const esClientConfig: ExtendedConfigOptions = {
keepAlive: true,
...pick(config, [
'apiVersion',
'sniffOnStart',
'sniffOnConnectionFault',
'keepAlive',
'log',
'plugins',
]),
};
if (esClientConfig.log == null) {
esClientConfig.log = getLoggerClass(log, config.logQueries);
}
if (config.pingTimeout != null) {
esClientConfig.pingTimeout = getDurationAsMs(config.pingTimeout);
}
if (config.requestTimeout != null) {
esClientConfig.requestTimeout = getDurationAsMs(config.requestTimeout);
}
if (config.sniffInterval) {
esClientConfig.sniffInterval = getDurationAsMs(config.sniffInterval);
}
if (Array.isArray(config.hosts)) {
const needsAuth = auth !== false && config.username && config.password;
esClientConfig.hosts = config.hosts.map((nodeUrl: string) => {
const uri = url.parse(nodeUrl);
const httpsURI = uri.protocol === 'https:';
const httpURI = uri.protocol === 'http:';
const host: Record<string, unknown> = {
host: uri.hostname,
port: uri.port || (httpsURI && '443') || (httpURI && '80'),
protocol: uri.protocol,
path: uri.pathname,
query: uri.query,
headers: config.customHeaders,
};
if (needsAuth) {
host.auth = `${config.username}:${config.password}`;
}
return host;
});
}
if (config.ssl === undefined) {
return cloneDeep(esClientConfig);
}
esClientConfig.ssl = {};
const verificationMode = config.ssl.verificationMode;
switch (verificationMode) {
case 'none':
esClientConfig.ssl.rejectUnauthorized = false;
break;
case 'certificate':
esClientConfig.ssl.rejectUnauthorized = true;
// by default, NodeJS is checking the server identify
esClientConfig.ssl.checkServerIdentity = () => undefined;
break;
case 'full':
esClientConfig.ssl.rejectUnauthorized = true;
break;
default:
throw new Error(`Unknown ssl verificationMode: ${verificationMode}`);
}
const readFile = (file: string) => readFileSync(file, 'utf8');
if (
config.ssl.certificateAuthorities !== undefined &&
config.ssl.certificateAuthorities.length > 0
) {
esClientConfig.ssl.ca = config.ssl.certificateAuthorities.map(readFile);
}
// Add client certificate and key if required by elasticsearch
if (!ignoreCertAndKey && config.ssl.certificate && config.ssl.key) {
esClientConfig.ssl.cert = readFile(config.ssl.certificate);
esClientConfig.ssl.key = readFile(config.ssl.key);
esClientConfig.ssl.passphrase = config.ssl.keyPassphrase;
}
// Elasticsearch JS client mutates config object, so all properties that are
// usually passed by reference should be cloned to avoid any side effects.
return cloneDeep(esClientConfig);
}
function getDurationAsMs(duration: number | Duration) {
if (typeof duration === 'number') {
return duration;
}
return duration.asMilliseconds();
}
function getLoggerClass(log: Logger, logQueries = false) {
return class ElasticsearchClientLogging {
public error(err: string | Error) {
log.error(err);
}
public warning(message: string) {
log.warn(message);
}
public trace(
method: string,
options: { path: string },
query: string,
_: unknown,
statusCode: string
) {
if (logQueries) {
log.debug(`${statusCode}\n${method} ${options.path}\n${query ? query.trim() : ''}`, {
tags: ['query'],
});
}
}
// elasticsearch-js expects the following functions to exist
public info() {
// noop
}
public debug() {
// noop
}
public close() {
// noop
}
};
}

View file

@ -0,0 +1,108 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ElasticsearchConfig } from './elasticsearch_config';
test('set correct defaults', () => {
const config = new ElasticsearchConfig(ElasticsearchConfig.schema.validate({}));
expect(config).toMatchInlineSnapshot(`
ElasticsearchConfig {
"apiVersion": "master",
"customHeaders": Object {},
"healthCheckDelay": "PT2.5S",
"hosts": Array [
"http://localhost:9200",
],
"logQueries": false,
"password": undefined,
"pingTimeout": "PT30S",
"requestHeadersWhitelist": Array [
"authorization",
],
"requestTimeout": "PT30S",
"shardTimeout": "PT30S",
"sniffInterval": false,
"sniffOnConnectionFault": false,
"sniffOnStart": false,
"ssl": Object {
"alwaysPresentCertificate": true,
"certificateAuthorities": undefined,
"verificationMode": "full",
},
"username": undefined,
}
`);
});
test('#hosts accepts both string and array of strings', () => {
let config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ hosts: 'http://some.host:1234' })
);
expect(config.hosts).toEqual(['http://some.host:1234']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ hosts: ['http://some.host:1234'] })
);
expect(config.hosts).toEqual(['http://some.host:1234']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({
hosts: ['http://some.host:1234', 'https://some.another.host'],
})
);
expect(config.hosts).toEqual(['http://some.host:1234', 'https://some.another.host']);
});
test('#requestHeadersWhitelist accepts both string and array of strings', () => {
let config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ requestHeadersWhitelist: 'token' })
);
expect(config.requestHeadersWhitelist).toEqual(['token']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ requestHeadersWhitelist: ['token'] })
);
expect(config.requestHeadersWhitelist).toEqual(['token']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({
requestHeadersWhitelist: ['token', 'X-Forwarded-Proto'],
})
);
expect(config.requestHeadersWhitelist).toEqual(['token', 'X-Forwarded-Proto']);
});
test('#ssl.certificateAuthorities accepts both string and array of strings', () => {
let config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ ssl: { certificateAuthorities: 'some-path' } })
);
expect(config.ssl.certificateAuthorities).toEqual(['some-path']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({ ssl: { certificateAuthorities: ['some-path'] } })
);
expect(config.ssl.certificateAuthorities).toEqual(['some-path']);
config = new ElasticsearchConfig(
ElasticsearchConfig.schema.validate({
ssl: { certificateAuthorities: ['some-path', 'another-path'] },
})
);
expect(config.ssl.certificateAuthorities).toEqual(['some-path', 'another-path']);
});

View file

@ -0,0 +1,188 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { schema, TypeOf } from '@kbn/config-schema';
import { Duration } from 'moment';
const hostURISchema = schema.uri({ scheme: ['http', 'https'] });
export const DEFAULT_API_VERSION = 'master';
const configSchema = schema.object({
sniffOnStart: schema.boolean({ defaultValue: false }),
sniffInterval: schema.oneOf([schema.duration(), schema.literal(false)], { defaultValue: false }),
sniffOnConnectionFault: schema.boolean({ defaultValue: false }),
hosts: schema.oneOf([hostURISchema, schema.arrayOf(hostURISchema, { minSize: 1 })], {
defaultValue: 'http://localhost:9200',
}),
preserveHost: schema.boolean({ defaultValue: true }),
username: schema.maybe(schema.string()),
password: schema.maybe(schema.string()),
requestHeadersWhitelist: schema.oneOf([schema.string(), schema.arrayOf(schema.string())], {
defaultValue: ['authorization'],
}),
customHeaders: schema.recordOf(schema.string(), schema.string(), { defaultValue: {} }),
shardTimeout: schema.duration({ defaultValue: '30s' }),
requestTimeout: schema.duration({ defaultValue: '30s' }),
pingTimeout: schema.duration({ defaultValue: schema.siblingRef('requestTimeout') }),
startupTimeout: schema.duration({ defaultValue: '5s' }),
logQueries: schema.boolean({ defaultValue: false }),
ssl: schema.object({
verificationMode: schema.oneOf(
[schema.literal('none'), schema.literal('certificate'), schema.literal('full')],
{ defaultValue: 'full' }
),
certificateAuthorities: schema.maybe(
schema.oneOf([schema.string(), schema.arrayOf(schema.string(), { minSize: 1 })])
),
certificate: schema.maybe(schema.string()),
key: schema.maybe(schema.string()),
keyPassphrase: schema.maybe(schema.string()),
alwaysPresentCertificate: schema.boolean({ defaultValue: true }),
}),
apiVersion: schema.string({ defaultValue: DEFAULT_API_VERSION }),
healthCheck: schema.object({ delay: schema.duration({ defaultValue: 2500 }) }),
});
type SslConfigSchema = TypeOf<typeof configSchema>['ssl'];
export class ElasticsearchConfig {
public static schema = configSchema;
/**
* The interval between health check requests Kibana sends to the Elasticsearch.
*/
public readonly healthCheckDelay: Duration;
/**
* Version of the Elasticsearch (6.7, 7.1 or `master`) client will be connecting to.
*/
public readonly apiVersion: string;
/**
* Specifies whether all queries to the client should be logged (status code,
* method, query etc.).
*/
public readonly logQueries: boolean;
/**
* Hosts that the client will connect to. If sniffing is enabled, this list will
* be used as seeds to discover the rest of your cluster.
*/
public readonly hosts: string[];
/**
* List of Kibana client-side headers to send to Elasticsearch when request
* scoped cluster client is used. If this is an empty array then *no* client-side
* will be sent.
*/
public readonly requestHeadersWhitelist: string[];
/**
* Timeout after which PING HTTP request will be aborted and retried.
*/
public readonly pingTimeout: Duration;
/**
* Timeout after which HTTP request will be aborted and retried.
*/
public readonly requestTimeout: Duration;
/**
* Timeout for Elasticsearch to wait for responses from shards. Set to 0 to disable.
*/
public readonly shardTimeout: Duration;
/**
* Specifies whether the client should attempt to detect the rest of the cluster
* when it is first instantiated.
*/
public readonly sniffOnStart: boolean;
/**
* Interval to perform a sniff operation and make sure the list of nodes is complete.
* If `false` then sniffing is disabled.
*/
public readonly sniffInterval: false | Duration;
/**
* Specifies whether the client should immediately sniff for a more current list
* of nodes when a connection dies.
*/
public readonly sniffOnConnectionFault: boolean;
/**
* If Elasticsearch is protected with basic authentication, this setting provides
* the username that the Kibana server uses to perform its administrative functions.
*/
public readonly username?: string;
/**
* If Elasticsearch is protected with basic authentication, this setting provides
* the password that the Kibana server uses to perform its administrative functions.
*/
public readonly password?: string;
/**
* Set of settings configure SSL connection between Kibana and Elasticsearch that
* are required when `xpack.ssl.verification_mode` in Elasticsearch is set to
* either `certificate` or `full`.
*/
public readonly ssl: Pick<
SslConfigSchema,
Exclude<keyof SslConfigSchema, 'certificateAuthorities'>
> & { certificateAuthorities?: string[] };
/**
* Header names and values to send to Elasticsearch with every request. These
* headers cannot be overwritten by client-side headers and aren't affected by
* `requestHeadersWhitelist` configuration.
*/
public readonly customHeaders: TypeOf<typeof configSchema>['customHeaders'];
constructor(config: TypeOf<typeof configSchema>) {
this.apiVersion = config.apiVersion;
this.logQueries = config.logQueries;
this.hosts = Array.isArray(config.hosts) ? config.hosts : [config.hosts];
this.requestHeadersWhitelist = Array.isArray(config.requestHeadersWhitelist)
? config.requestHeadersWhitelist
: [config.requestHeadersWhitelist];
this.pingTimeout = config.pingTimeout;
this.requestTimeout = config.requestTimeout;
this.shardTimeout = config.shardTimeout;
this.sniffOnStart = config.sniffOnStart;
this.sniffOnConnectionFault = config.sniffOnConnectionFault;
this.sniffInterval = config.sniffInterval;
this.healthCheckDelay = config.healthCheck.delay;
this.username = config.username;
this.password = config.password;
this.customHeaders = config.customHeaders;
const certificateAuthorities = Array.isArray(config.ssl.certificateAuthorities)
? config.ssl.certificateAuthorities
: typeof config.ssl.certificateAuthorities === 'string'
? [config.ssl.certificateAuthorities]
: undefined;
this.ssl = {
...config.ssl,
certificateAuthorities,
};
}
}

View file

@ -0,0 +1,133 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { first } from 'rxjs/operators';
const MockClusterClient = jest.fn();
jest.mock('./cluster_client', () => ({ ClusterClient: MockClusterClient }));
import { BehaviorSubject, combineLatest } from 'rxjs';
import { CoreContext } from '../../types';
import { Config, ConfigService, Env, ObjectToConfigAdapter } from '../config';
import { getEnvOptions } from '../config/__mocks__/env';
import { logger } from '../logging/__mocks__';
import { ElasticsearchConfig } from './elasticsearch_config';
import { ElasticsearchService } from './elasticsearch_service';
let elasticsearchService: ElasticsearchService;
let configService: ConfigService;
let env: Env;
let coreContext: CoreContext;
beforeEach(() => {
env = Env.createDefault(getEnvOptions());
configService = new ConfigService(
new BehaviorSubject<Config>(
new ObjectToConfigAdapter({
elasticsearch: { hosts: ['http://1.2.3.4'], username: 'jest' },
})
),
env,
logger
);
coreContext = { env, logger, configService };
elasticsearchService = new ElasticsearchService(coreContext);
});
afterEach(() => jest.clearAllMocks());
describe('#start', () => {
test('returns legacy Elasticsearch config as a part of the contract', async () => {
const startContract = await elasticsearchService.start();
await expect(startContract.legacy.config$.pipe(first()).toPromise()).resolves.toBeInstanceOf(
ElasticsearchConfig
);
});
test('returns data and admin client observables as a part of the contract', async () => {
const mockAdminClusterClientInstance = { close: jest.fn() };
const mockDataClusterClientInstance = { close: jest.fn() };
MockClusterClient.mockImplementationOnce(
() => mockAdminClusterClientInstance
).mockImplementationOnce(() => mockDataClusterClientInstance);
const startContract = await elasticsearchService.start();
const [esConfig, adminClient, dataClient] = await combineLatest(
startContract.legacy.config$,
startContract.adminClient$,
startContract.dataClient$
)
.pipe(first())
.toPromise();
expect(adminClient).toBe(mockAdminClusterClientInstance);
expect(dataClient).toBe(mockDataClusterClientInstance);
expect(MockClusterClient).toHaveBeenCalledTimes(2);
expect(MockClusterClient).toHaveBeenNthCalledWith(
1,
esConfig,
expect.objectContaining({ context: ['elasticsearch', 'admin'] })
);
expect(MockClusterClient).toHaveBeenNthCalledWith(
2,
esConfig,
expect.objectContaining({ context: ['elasticsearch', 'data'] })
);
expect(mockAdminClusterClientInstance.close).not.toHaveBeenCalled();
expect(mockDataClusterClientInstance.close).not.toHaveBeenCalled();
});
test('returns `createClient` as a part of the contract', async () => {
const startContract = await elasticsearchService.start();
const mockClusterClientInstance = { close: jest.fn() };
MockClusterClient.mockImplementation(() => mockClusterClientInstance);
const mockConfig = { logQueries: true };
const clusterClient = startContract.createClient('some-custom-type', mockConfig as any);
expect(clusterClient).toBe(mockClusterClientInstance);
expect(MockClusterClient).toHaveBeenCalledWith(
mockConfig,
expect.objectContaining({ context: ['elasticsearch', 'some-custom-type'] })
);
});
});
describe('#stop', () => {
test('stops both admin and data clients', async () => {
const mockAdminClusterClientInstance = { close: jest.fn() };
const mockDataClusterClientInstance = { close: jest.fn() };
MockClusterClient.mockImplementationOnce(
() => mockAdminClusterClientInstance
).mockImplementationOnce(() => mockDataClusterClientInstance);
await elasticsearchService.start();
await elasticsearchService.stop();
expect(mockAdminClusterClientInstance.close).toHaveBeenCalledTimes(1);
expect(mockDataClusterClientInstance.close).toHaveBeenCalledTimes(1);
});
});

View file

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ConnectableObservable, Observable, Subscription } from 'rxjs';
import { filter, map, publishReplay, switchMap } from 'rxjs/operators';
import { CoreContext, CoreService } from '../../types';
import { Logger } from '../logging';
import { ClusterClient } from './cluster_client';
import { ElasticsearchClientConfig } from './elasticsearch_client_config';
import { ElasticsearchConfig } from './elasticsearch_config';
interface CoreClusterClients {
config: ElasticsearchConfig;
adminClient: ClusterClient;
dataClient: ClusterClient;
}
export interface ElasticsearchServiceStart {
// Required for the BWC with the legacy Kibana only.
readonly legacy: {
readonly config$: Observable<ElasticsearchConfig>;
};
readonly createClient: (type: string, config: ElasticsearchClientConfig) => ClusterClient;
readonly adminClient$: Observable<ClusterClient>;
readonly dataClient$: Observable<ClusterClient>;
}
/** @internal */
export class ElasticsearchService implements CoreService<ElasticsearchServiceStart> {
private readonly log: Logger;
private subscription?: Subscription;
constructor(private readonly coreContext: CoreContext) {
this.log = coreContext.logger.get('elasticsearch-service');
}
public async start(): Promise<ElasticsearchServiceStart> {
this.log.debug('Starting elasticsearch service');
const clients$ = this.coreContext.configService
.atPath('elasticsearch', ElasticsearchConfig)
.pipe(
filter(() => {
if (this.subscription !== undefined) {
this.log.error('Clients cannot be changed after they are created');
return false;
}
return true;
}),
switchMap(
config =>
new Observable<CoreClusterClients>(subscriber => {
this.log.debug(`Creating elasticsearch clients`);
const coreClients = {
config,
adminClient: this.createClusterClient('admin', config),
dataClient: this.createClusterClient('data', config),
};
subscriber.next(coreClients);
return () => {
this.log.debug(`Closing elasticsearch clients`);
coreClients.adminClient.close();
coreClients.dataClient.close();
};
})
),
publishReplay(1)
) as ConnectableObservable<CoreClusterClients>;
this.subscription = clients$.connect();
return {
legacy: { config$: clients$.pipe(map(clients => clients.config)) },
adminClient$: clients$.pipe(map(clients => clients.adminClient)),
dataClient$: clients$.pipe(map(clients => clients.dataClient)),
createClient: (type: string, clientConfig: ElasticsearchClientConfig) => {
return this.createClusterClient(type, clientConfig);
},
};
}
public async stop() {
this.log.debug('Stopping elasticsearch service');
if (this.subscription !== undefined) {
this.subscription.unsubscribe();
this.subscription = undefined;
}
}
private createClusterClient(type: string, config: ElasticsearchClientConfig) {
return new ClusterClient(config, this.coreContext.logger.get('elasticsearch', type));
}
}

View file

@ -17,19 +17,17 @@
* under the License.
*/
import _ from 'lodash';
export { ElasticsearchServiceStart } from './elasticsearch_service';
export { CallAPIOptions, ClusterClient } from './cluster_client';
export default function (originalHeaders, headersToKeep) {
const normalizeHeader = function (header) {
if (!header) {
return '';
}
header = header.toString();
return header.trim().toLowerCase();
};
import { CoreContext } from '../../types';
import { ElasticsearchService } from './elasticsearch_service';
// Normalize list of headers we want to allow in upstream request
const headersToKeepNormalized = headersToKeep.map(normalizeHeader);
/** @internal */
export class ElasticsearchModule {
public readonly service: ElasticsearchService;
return _.pick(originalHeaders, headersToKeepNormalized);
constructor(coreContext: CoreContext) {
this.service = new ElasticsearchService(coreContext);
}
}

View file

@ -0,0 +1,169 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { ScopedClusterClient } from './scoped_cluster_client';
let internalAPICaller: jest.Mock;
let scopedAPICaller: jest.Mock;
let clusterClient: ScopedClusterClient;
beforeEach(() => {
internalAPICaller = jest.fn();
scopedAPICaller = jest.fn();
clusterClient = new ScopedClusterClient(internalAPICaller, scopedAPICaller, { one: '1' });
});
afterEach(() => jest.clearAllMocks());
describe('#callAsInternalUser', () => {
test('properly forwards arguments to the API caller and results back from it', async () => {
const mockResponse = { data: 'response' };
internalAPICaller.mockResolvedValue(mockResponse);
await expect(clusterClient.callAsInternalUser('ping')).resolves.toBe(mockResponse);
expect(internalAPICaller).toHaveBeenCalledTimes(1);
expect(internalAPICaller).toHaveBeenCalledWith('ping', {}, undefined);
internalAPICaller.mockClear();
await expect(
clusterClient.callAsInternalUser('security.authenticate', { some: 'some' })
).resolves.toBe(mockResponse);
expect(internalAPICaller).toHaveBeenCalledTimes(1);
expect(internalAPICaller).toHaveBeenCalledWith(
'security.authenticate',
{ some: 'some' },
undefined
);
internalAPICaller.mockClear();
await expect(
clusterClient.callAsInternalUser('ping', undefined, { wrap401Errors: true })
).resolves.toBe(mockResponse);
expect(internalAPICaller).toHaveBeenCalledTimes(1);
expect(internalAPICaller).toHaveBeenCalledWith('ping', {}, { wrap401Errors: true });
internalAPICaller.mockClear();
await expect(
clusterClient.callAsInternalUser(
'security.authenticate',
{ some: 'some' },
{ wrap401Errors: true }
)
).resolves.toBe(mockResponse);
expect(internalAPICaller).toHaveBeenCalledTimes(1);
expect(internalAPICaller).toHaveBeenCalledWith(
'security.authenticate',
{ some: 'some' },
{ wrap401Errors: true }
);
expect(scopedAPICaller).not.toHaveBeenCalled();
});
test('properly forwards errors returned by the API caller', async () => {
const mockErrorResponse = new Error('some-error');
internalAPICaller.mockRejectedValue(mockErrorResponse);
await expect(clusterClient.callAsInternalUser('ping')).rejects.toBe(mockErrorResponse);
expect(scopedAPICaller).not.toHaveBeenCalled();
});
});
describe('#callAsCurrentUser', () => {
test('properly forwards arguments to the API caller and results back from it', async () => {
const mockResponse = { data: 'response' };
scopedAPICaller.mockResolvedValue(mockResponse);
await expect(clusterClient.callAsCurrentUser('ping')).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith('ping', { headers: { one: '1' } }, undefined);
scopedAPICaller.mockClear();
await expect(
clusterClient.callAsCurrentUser('security.authenticate', { some: 'some' })
).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith(
'security.authenticate',
{ some: 'some', headers: { one: '1' } },
undefined
);
scopedAPICaller.mockClear();
await expect(
clusterClient.callAsCurrentUser('ping', undefined, { wrap401Errors: true })
).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith(
'ping',
{ headers: { one: '1' } },
{ wrap401Errors: true }
);
scopedAPICaller.mockClear();
await expect(
clusterClient.callAsCurrentUser(
'security.authenticate',
{ some: 'some', headers: { one: '1' } },
{ wrap401Errors: true }
)
).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith(
'security.authenticate',
{ some: 'some', headers: { one: '1' } },
{ wrap401Errors: true }
);
expect(internalAPICaller).not.toHaveBeenCalled();
});
test('properly forwards errors returned by the API caller', async () => {
const mockErrorResponse = new Error('some-error');
scopedAPICaller.mockRejectedValue(mockErrorResponse);
await expect(clusterClient.callAsCurrentUser('ping')).rejects.toBe(mockErrorResponse);
expect(internalAPICaller).not.toHaveBeenCalled();
});
test('does not attach headers to the client params if they are not available', async () => {
const mockResponse = { data: 'response' };
scopedAPICaller.mockResolvedValue(mockResponse);
const clusterClientWithoutHeaders = new ScopedClusterClient(internalAPICaller, scopedAPICaller);
await expect(clusterClientWithoutHeaders.callAsCurrentUser('ping')).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith('ping', {}, undefined);
scopedAPICaller.mockClear();
await expect(
clusterClientWithoutHeaders.callAsCurrentUser('security.authenticate', { some: 'some' })
).resolves.toBe(mockResponse);
expect(scopedAPICaller).toHaveBeenCalledTimes(1);
expect(scopedAPICaller).toHaveBeenCalledWith(
'security.authenticate',
{ some: 'some' },
undefined
);
expect(internalAPICaller).not.toHaveBeenCalled();
});
});

View file

@ -0,0 +1,75 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Headers } from '../http/router';
import { CallAPIOptions } from './cluster_client';
type APICaller = (
endpoint: string,
clientParams: Record<string, unknown>,
options?: CallAPIOptions
) => Promise<unknown>;
/**
* Serves the same purpose as "normal" `ClusterClient` but exposes additional
* `callAsCurrentUser` method that doesn't use credentials of the Kibana internal
* user (as `callAsInternalUser` does) to request Elasticsearch API, but rather
* passes HTTP headers extracted from the current user request to the API
*/
export class ScopedClusterClient {
constructor(
private readonly internalAPICaller: APICaller,
private readonly scopedAPICaller: APICaller,
private readonly headers?: Headers
) {}
/**
* Calls specified {@param endpoint} with provided {@param clientParams} on behalf of the
* Kibana internal user.
* @param endpoint String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams A dictionary of parameters that will be passed directly to the Elasticsearch JS client.
* @param options Options that affect the way we call the API and process the result.
*/
public callAsInternalUser(
endpoint: string,
clientParams: Record<string, unknown> = {},
options?: CallAPIOptions
) {
return this.internalAPICaller(endpoint, clientParams, options);
}
/**
* Calls specified {@param endpoint} with provided {@param clientParams} on behalf of the
* user initiated request to the Kibana server (via HTTP request headers).
* @param endpoint String descriptor of the endpoint e.g. `cluster.getSettings` or `ping`.
* @param clientParams A dictionary of parameters that will be passed directly to the Elasticsearch JS client.
* @param options Options that affect the way we call the API and process the result.
*/
public callAsCurrentUser(
endpoint: string,
clientParams: Record<string, unknown> = {},
options?: CallAPIOptions
) {
if (this.headers !== undefined) {
clientParams.headers = this.headers;
}
return this.scopedAPICaller(endpoint, clientParams, options);
}
}

View file

@ -19,9 +19,7 @@
import { pick } from '../../../utils';
export interface Headers {
[key: string]: string | string[] | undefined;
}
export type Headers = Record<string, string | string[] | undefined>;
const normalizeHeaderField = (field: string) => field.trim().toLowerCase();

View file

@ -17,5 +17,6 @@
* under the License.
*/
export { Headers, filterHeaders } from './headers';
export { Router } from './router';
export { KibanaRequest } from './request';

View file

@ -27,6 +27,11 @@ jest.mock('./plugins/plugins_service', () => ({
PluginsService: jest.fn(() => mockPluginsService),
}));
const mockElasticsearchService = { start: jest.fn(), stop: jest.fn() };
jest.mock('./elasticsearch/elasticsearch_service', () => ({
ElasticsearchService: jest.fn(() => mockElasticsearchService),
}));
const mockLegacyService = { start: jest.fn(), stop: jest.fn() };
jest.mock('./legacy/legacy_service', () => ({
LegacyService: jest.fn(() => mockLegacyService),
@ -50,6 +55,8 @@ afterEach(() => {
mockConfigService.atPath.mockReset();
mockHttpService.start.mockReset();
mockHttpService.stop.mockReset();
mockElasticsearchService.start.mockReset();
mockElasticsearchService.stop.mockReset();
mockPluginsService.start.mockReset();
mockPluginsService.stop.mockReset();
mockLegacyService.start.mockReset();
@ -60,21 +67,32 @@ test('starts services on "start"', async () => {
const mockHttpServiceStart = { something: true };
mockHttpService.start.mockReturnValue(Promise.resolve(mockHttpServiceStart));
const mockElasticsearchServiceStart = { adminClient$: {} };
mockElasticsearchService.start.mockResolvedValue(mockElasticsearchServiceStart);
const mockPluginsServiceStart = new Map([['some-plugin', 'some-value']]);
mockPluginsService.start.mockReturnValue(Promise.resolve(mockPluginsServiceStart));
const server = new Server(mockConfigService as any, logger, env);
expect(mockHttpService.start).not.toHaveBeenCalled();
expect(mockElasticsearchService.start).not.toHaveBeenCalled();
expect(mockPluginsService.start).not.toHaveBeenCalled();
expect(mockLegacyService.start).not.toHaveBeenCalled();
await server.start();
expect(mockHttpService.start).toHaveBeenCalledTimes(1);
expect(mockElasticsearchService.start).toHaveBeenCalledTimes(1);
expect(mockPluginsService.start).toHaveBeenCalledTimes(1);
expect(mockPluginsService.start).toHaveBeenCalledWith({
elasticsearch: mockElasticsearchServiceStart,
});
expect(mockLegacyService.start).toHaveBeenCalledTimes(1);
expect(mockLegacyService.start).toHaveBeenCalledWith({
elasticsearch: mockElasticsearchServiceStart,
http: mockHttpServiceStart,
plugins: mockPluginsServiceStart,
});
@ -127,12 +145,14 @@ test('stops services on "stop"', async () => {
await server.start();
expect(mockHttpService.stop).not.toHaveBeenCalled();
expect(mockElasticsearchService.stop).not.toHaveBeenCalled();
expect(mockPluginsService.stop).not.toHaveBeenCalled();
expect(mockLegacyService.stop).not.toHaveBeenCalled();
await server.stop();
expect(mockHttpService.stop).toHaveBeenCalledTimes(1);
expect(mockElasticsearchService.stop).toHaveBeenCalledTimes(1);
expect(mockPluginsService.stop).toHaveBeenCalledTimes(1);
expect(mockLegacyService.stop).toHaveBeenCalledTimes(1);
});

View file

@ -23,11 +23,13 @@ export { bootstrap } from './bootstrap';
import { first } from 'rxjs/operators';
import { ConfigService, Env } from './config';
import { ElasticsearchModule } from './elasticsearch';
import { HttpConfig, HttpModule, HttpServerInfo } from './http';
import { LegacyCompatModule } from './legacy';
import { Logger, LoggerFactory } from './logging';
export class Server {
private readonly elasticsearch: ElasticsearchModule;
private readonly http: HttpModule;
private readonly plugins: PluginsModule;
private readonly legacy: LegacyCompatModule;
@ -41,6 +43,7 @@ export class Server {
const core = { env, configService, logger };
this.plugins = new PluginsModule(core);
this.legacy = new LegacyCompatModule(core);
this.elasticsearch = new ElasticsearchModule(core);
}
public async start() {
@ -56,9 +59,14 @@ export class Server {
httpStart = await this.http.service.start();
}
const pluginsStart = await this.plugins.service.start();
const elasticsearchServiceStart = await this.elasticsearch.service.start();
const pluginsStart = await this.plugins.service.start({
elasticsearch: elasticsearchServiceStart,
});
await this.legacy.service.start({
elasticsearch: elasticsearchServiceStart,
http: httpStart,
plugins: pluginsStart,
});
@ -69,6 +77,7 @@ export class Server {
await this.legacy.service.stop();
await this.plugins.service.stop();
await this.elasticsearch.service.stop();
await this.http.service.stop();
}
}

View file

@ -29,6 +29,9 @@ Array [
},
},
"log": Object {
"context": Array [
"server",
],
"debug": [MockFunction] {
"calls": Array [
Array [

View file

@ -30,6 +30,7 @@ import MockClusterManager from '../../../cli/cluster/cluster_manager';
import KbnServer from '../../../legacy/server/kbn_server';
import { Config, ConfigService, Env, ObjectToConfigAdapter } from '../config';
import { getEnvOptions } from '../config/__mocks__/env';
import { ElasticsearchServiceStart } from '../elasticsearch';
import { logger } from '../logging/__mocks__';
import { PluginsServiceStart } from '../plugins/plugins_service';
import { LegacyPlatformProxy } from './legacy_platform_proxy';
@ -41,13 +42,18 @@ let legacyService: LegacyService;
let configService: jest.Mocked<ConfigService>;
let env: Env;
let config$: BehaviorSubject<Config>;
let startDeps: { http: any; plugins: PluginsServiceStart };
let startDeps: {
elasticsearch: ElasticsearchServiceStart;
http: any;
plugins: PluginsServiceStart;
};
beforeEach(() => {
env = Env.createDefault(getEnvOptions());
MockKbnServer.prototype.ready = jest.fn().mockReturnValue(Promise.resolve());
startDeps = {
elasticsearch: { legacy: {} } as any,
http: {
server: { listener: { addListener: jest.fn() }, route: jest.fn() },
options: { someOption: 'foo', someAnotherOption: 'bar' },
@ -57,6 +63,7 @@ beforeEach(() => {
config$ = new BehaviorSubject<Config>(
new ObjectToConfigAdapter({
elasticsearch: { hosts: ['http://127.0.0.1'] },
server: { autoListen: true },
})
);
@ -148,6 +155,7 @@ describe('once LegacyService is started with connection info', () => {
expect(MockKbnServer).toHaveBeenCalledWith(
{ server: { autoListen: true } },
{
elasticsearch: startDeps.elasticsearch,
serverOptions: {
listener: expect.any(LegacyPlatformProxy),
someAnotherOption: 'bar',
@ -172,6 +180,7 @@ describe('once LegacyService is started with connection info', () => {
expect(MockKbnServer).toHaveBeenCalledWith(
{ server: { autoListen: true } },
{
elasticsearch: startDeps.elasticsearch,
serverOptions: {
listener: expect.any(LegacyPlatformProxy),
someAnotherOption: 'bar',
@ -276,7 +285,13 @@ describe('once LegacyService is started with connection info', () => {
});
describe('once LegacyService is started without connection info', () => {
beforeEach(async () => await legacyService.start({ plugins: startDeps.plugins }));
beforeEach(
async () =>
await legacyService.start({
elasticsearch: startDeps.elasticsearch,
plugins: startDeps.plugins,
})
);
test('creates legacy kbnServer with `autoListen: false`.', () => {
expect(startDeps.http.server.route).not.toHaveBeenCalled();
@ -284,6 +299,7 @@ describe('once LegacyService is started without connection info', () => {
expect(MockKbnServer).toHaveBeenCalledWith(
{ server: { autoListen: true } },
{
elasticsearch: startDeps.elasticsearch,
serverOptions: { autoListen: false },
handledConfigPaths: ['foo.bar'],
plugins: startDeps.plugins,
@ -324,7 +340,10 @@ describe('once LegacyService is started in `devClusterMaster` mode', () => {
configService,
});
await devClusterLegacyService.start({ plugins: new Map() });
await devClusterLegacyService.start({
elasticsearch: startDeps.elasticsearch,
plugins: new Map(),
});
expect(MockClusterManager.create.mock.calls).toMatchSnapshot(
'cluster manager without base path proxy'
@ -343,7 +362,10 @@ describe('once LegacyService is started in `devClusterMaster` mode', () => {
configService,
});
await devClusterLegacyService.start({ plugins: new Map() });
await devClusterLegacyService.start({
elasticsearch: startDeps.elasticsearch,
plugins: new Map(),
});
expect(MockClusterManager.create.mock.calls).toMatchSnapshot(
'cluster manager with base path proxy'

View file

@ -23,6 +23,7 @@ import { first, map, mergeMap, publishReplay, tap } from 'rxjs/operators';
import { CoreContext, CoreService } from '../../types';
import { Config } from '../config';
import { DevConfig } from '../dev';
import { ElasticsearchServiceStart } from '../elasticsearch';
import { BasePathProxyServer, HttpConfig, HttpServiceStart } from '../http';
import { Logger } from '../logging';
import { PluginsServiceStart } from '../plugins/plugins_service';
@ -36,10 +37,23 @@ interface LegacyKbnServer {
}
interface Deps {
elasticsearch: ElasticsearchServiceStart;
http?: HttpServiceStart;
plugins: PluginsServiceStart;
}
function getLegacyRawConfig(config: Config) {
const rawConfig = config.toRaw();
// Elasticsearch config is solely handled by the core and legacy platform
// shouldn't have direct access to it.
if (rawConfig.elasticsearch !== undefined) {
delete rawConfig.elasticsearch;
}
return rawConfig;
}
/** @internal */
export class LegacyService implements CoreService {
private readonly log: Logger;
@ -111,28 +125,29 @@ export class LegacyService implements CoreService {
require('../../../cli/cluster/cluster_manager').create(
this.coreContext.env.cliArgs,
config.toRaw(),
getLegacyRawConfig(config),
await basePathProxy$.toPromise()
);
}
private async createKbnServer(config: Config, deps: Deps) {
private async createKbnServer(config: Config, { elasticsearch, http, plugins }: Deps) {
const KbnServer = require('../../../legacy/server/kbn_server');
const kbnServer: LegacyKbnServer = new KbnServer(config.toRaw(), {
const kbnServer: LegacyKbnServer = new KbnServer(getLegacyRawConfig(config), {
// If core HTTP service is run we'll receive internal server reference and
// options that were used to create that server so that we can properly
// bridge with the "legacy" Kibana. If server isn't run (e.g. if process is
// managed by ClusterManager or optimizer) then we won't have that info,
// so we can't start "legacy" server either.
serverOptions:
deps.http !== undefined
http !== undefined
? {
...deps.http.options,
listener: this.setupProxyListener(deps.http.server),
...http.options,
listener: this.setupProxyListener(http.server),
}
: { autoListen: false },
handledConfigPaths: await this.coreContext.configService.getUsedPaths(),
plugins: deps.plugins,
elasticsearch,
plugins,
});
// The kbnWorkerType check is necessary to prevent the repl

View file

@ -51,7 +51,10 @@ const mockCollect = () => ({
});
export const logger = {
get: jest.fn(() => mockLog),
get: jest.fn((...context) => ({
context,
...mockLog,
})),
mockClear,
mockCollect,
mockLog,

View file

@ -22,6 +22,7 @@ import { BehaviorSubject } from 'rxjs';
import { CoreContext } from '../../types';
import { Config, ConfigService, Env, ObjectToConfigAdapter } from '../config';
import { getEnvOptions } from '../config/__mocks__/env';
import { ElasticsearchServiceStart } from '../elasticsearch';
import { logger } from '../logging/__mocks__';
import { Plugin, PluginManifest } from './plugin';
import { createPluginInitializerContext, createPluginStartContext } from './plugin_context';
@ -56,8 +57,10 @@ function createPluginManifest(manifestProps: Partial<PluginManifest> = {}): Plug
let configService: ConfigService;
let env: Env;
let coreContext: CoreContext;
let startDeps: { elasticsearch: ElasticsearchServiceStart };
beforeEach(() => {
env = Env.createDefault(getEnvOptions());
startDeps = { elasticsearch: { adminClient$: {}, dataClient$: {} } as any };
configService = new ConfigService(
new BehaviorSubject<Config>(new ObjectToConfigAdapter({ plugins: { initialize: true } })),
@ -96,7 +99,7 @@ test('`start` fails if `plugin` initializer is not exported', async () => {
);
await expect(
plugin.start(createPluginStartContext(coreContext, plugin), {})
plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {})
).rejects.toMatchInlineSnapshot(
`[Error: Plugin "some-plugin-id" does not export "plugin" definition (plugin-without-initializer-path).]`
);
@ -111,7 +114,7 @@ test('`start` fails if plugin initializer is not a function', async () => {
);
await expect(
plugin.start(createPluginStartContext(coreContext, plugin), {})
plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {})
).rejects.toMatchInlineSnapshot(
`[Error: Definition of plugin "some-plugin-id" should be a function (plugin-with-wrong-initializer-path).]`
);
@ -128,7 +131,7 @@ test('`start` fails if initializer does not return object', async () => {
mockPluginInitializer.mockReturnValue(null);
await expect(
plugin.start(createPluginStartContext(coreContext, plugin), {})
plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {})
).rejects.toMatchInlineSnapshot(
`[Error: Initializer for plugin "some-plugin-id" is expected to return plugin instance, but returned "null".]`
);
@ -146,7 +149,7 @@ test('`start` fails if object returned from initializer does not define `start`
mockPluginInitializer.mockReturnValue(mockPluginInstance);
await expect(
plugin.start(createPluginStartContext(coreContext, plugin), {})
plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {})
).rejects.toMatchInlineSnapshot(
`[Error: Instance of plugin "some-plugin-id" does not define "start" function.]`
);
@ -160,7 +163,7 @@ test('`start` initializes plugin and calls appropriate lifecycle hook', async ()
const mockPluginInstance = { start: jest.fn().mockResolvedValue({ contract: 'yes' }) };
mockPluginInitializer.mockReturnValue(mockPluginInstance);
const startContext = createPluginStartContext(coreContext, plugin);
const startContext = createPluginStartContext(coreContext, startDeps, plugin);
const startDependencies = { 'some-required-dep': { contract: 'no' } };
await expect(plugin.start(startContext, startDependencies)).resolves.toEqual({ contract: 'yes' });
@ -197,7 +200,7 @@ test('`stop` does nothing if plugin does not define `stop` function', async () =
);
mockPluginInitializer.mockReturnValue({ start: jest.fn() });
await plugin.start(createPluginStartContext(coreContext, plugin), {});
await plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {});
await expect(plugin.stop()).resolves.toBeUndefined();
});
@ -212,7 +215,7 @@ test('`stop` calls `stop` defined by the plugin instance', async () => {
const mockPluginInstance = { start: jest.fn(), stop: jest.fn() };
mockPluginInitializer.mockReturnValue(mockPluginInstance);
await plugin.start(createPluginStartContext(coreContext, plugin), {});
await plugin.start(createPluginStartContext(coreContext, startDeps, plugin), {});
await expect(plugin.stop()).resolves.toBeUndefined();
expect(mockPluginInstance.stop).toHaveBeenCalledTimes(1);

View file

@ -21,9 +21,14 @@ import { Type } from '@kbn/config-schema';
import { Observable } from 'rxjs';
import { CoreContext } from '../../types';
import { ConfigWithSchema, EnvironmentMode } from '../config';
import { ClusterClient } from '../elasticsearch';
import { LoggerFactory } from '../logging';
import { Plugin, PluginManifest } from './plugin';
import { PluginsServiceStartDeps } from './plugins_service';
/**
* Context that's available to plugins during initialization stage.
*/
export interface PluginInitializerContext {
env: { mode: EnvironmentMode };
logger: LoggerFactory;
@ -37,8 +42,15 @@ export interface PluginInitializerContext {
};
}
// tslint:disable no-empty-interface
export interface PluginStartContext {}
/**
* Context passed to the plugins `start` method.
*/
export interface PluginStartContext {
elasticsearch: {
adminClient$: Observable<ClusterClient>;
dataClient$: Observable<ClusterClient>;
};
}
/**
* This returns a facade for `CoreContext` that will be exposed to the plugin initializer.
@ -104,11 +116,18 @@ export function createPluginInitializerContext(
*
* @param coreContext Kibana core context
* @param plugin The plugin we're building these values for.
* @param deps Dependencies that Plugins services gets during start.
* @internal
*/
export function createPluginStartContext<TPlugin, TPluginDependencies>(
coreContext: CoreContext,
deps: PluginsServiceStartDeps,
plugin: Plugin<TPlugin, TPluginDependencies>
): PluginStartContext {
return {};
return {
elasticsearch: {
adminClient$: deps.elasticsearch.adminClient$,
dataClient$: deps.elasticsearch.dataClient$,
},
};
}

View file

@ -17,6 +17,8 @@
* under the License.
*/
import { ElasticsearchServiceStart } from '../elasticsearch';
const mockPackage = new Proxy({ raw: {} as any }, { get: (obj, prop) => obj.raw[prop] });
jest.mock('../../../legacy/utils/package_json', () => ({ pkg: mockPackage }));
@ -42,6 +44,7 @@ let pluginsService: PluginsService;
let configService: ConfigService;
let env: Env;
let mockPluginSystem: jest.Mocked<PluginsSystem>;
let startDeps: { elasticsearch: ElasticsearchServiceStart };
beforeEach(() => {
mockPackage.raw = {
branch: 'feature-v1',
@ -54,6 +57,7 @@ beforeEach(() => {
};
env = Env.createDefault(getEnvOptions());
startDeps = { elasticsearch: { legacy: {} } as any };
configService = new ConfigService(
new BehaviorSubject<Config>(new ObjectToConfigAdapter({ plugins: { initialize: true } })),
@ -75,7 +79,7 @@ test('`start` throws if plugin has an invalid manifest', async () => {
plugin$: from([]),
});
await expect(pluginsService.start()).rejects.toMatchInlineSnapshot(`
await expect(pluginsService.start(startDeps)).rejects.toMatchInlineSnapshot(`
[Error: Failed to initialize plugins:
Invalid JSON (invalid-manifest, path-1)]
`);
@ -96,7 +100,7 @@ test('`start` throws if plugin required Kibana version is incompatible with the
plugin$: from([]),
});
await expect(pluginsService.start()).rejects.toMatchInlineSnapshot(`
await expect(pluginsService.start(startDeps)).rejects.toMatchInlineSnapshot(`
[Error: Failed to initialize plugins:
Incompatible version (incompatible-version, path-3)]
`);
@ -144,7 +148,7 @@ test('`start` throws if discovered plugins with conflicting names', async () =>
]),
});
await expect(pluginsService.start()).rejects.toMatchInlineSnapshot(
await expect(pluginsService.start(startDeps)).rejects.toMatchInlineSnapshot(
`[Error: Plugin with id "conflicting-id" is already registered!]`
);
@ -221,9 +225,10 @@ test('`start` properly detects plugins that should be disabled.', async () => {
]),
});
expect(await pluginsService.start()).toBeInstanceOf(Map);
expect(await pluginsService.start(startDeps)).toBeInstanceOf(Map);
expect(mockPluginSystem.addPlugin).not.toHaveBeenCalled();
expect(mockPluginSystem.startPlugins).toHaveBeenCalledTimes(1);
expect(mockPluginSystem.startPlugins).toHaveBeenCalledWith(startDeps);
expect(logger.mockCollect().info).toMatchInlineSnapshot(`
Array [
@ -286,7 +291,7 @@ test('`start` properly invokes `discover` and ignores non-critical errors.', asy
const pluginsStart = new Map();
mockPluginSystem.startPlugins.mockResolvedValue(pluginsStart);
const start = await pluginsService.start();
const start = await pluginsService.start(startDeps);
expect(start).toBe(pluginsStart);
expect(mockPluginSystem.addPlugin).toHaveBeenCalledTimes(2);

View file

@ -20,6 +20,7 @@
import { Observable } from 'rxjs';
import { filter, first, mergeMap, tap, toArray } from 'rxjs/operators';
import { CoreContext, CoreService } from '../../types';
import { ElasticsearchServiceStart } from '../elasticsearch';
import { Logger } from '../logging';
import { discover, PluginDiscoveryError, PluginDiscoveryErrorType } from './discovery';
import { Plugin, PluginName } from './plugin';
@ -29,6 +30,11 @@ import { PluginsSystem } from './plugins_system';
/** @internal */
export type PluginsServiceStart = Map<PluginName, unknown>;
/** @internal */
export interface PluginsServiceStartDeps {
elasticsearch: ElasticsearchServiceStart;
}
/** @internal */
export class PluginsService implements CoreService<PluginsServiceStart> {
private readonly log: Logger;
@ -39,7 +45,7 @@ export class PluginsService implements CoreService<PluginsServiceStart> {
this.pluginsSystem = new PluginsSystem(coreContext);
}
public async start() {
public async start(deps: PluginsServiceStartDeps) {
this.log.debug('Starting plugins service');
const config = await this.coreContext.configService
@ -56,7 +62,7 @@ export class PluginsService implements CoreService<PluginsServiceStart> {
return new Map();
}
return await this.pluginsSystem.startPlugins();
return await this.pluginsSystem.startPlugins(deps);
}
public async stop() {

View file

@ -27,6 +27,7 @@ jest.mock('./plugin_context', () => ({
import { BehaviorSubject } from 'rxjs';
import { Config, ConfigService, Env, ObjectToConfigAdapter } from '../config';
import { getEnvOptions } from '../config/__mocks__/env';
import { ElasticsearchServiceStart } from '../elasticsearch';
import { logger } from '../logging/__mocks__';
import { Plugin, PluginName } from './plugin';
import { PluginsSystem } from './plugins_system';
@ -59,8 +60,10 @@ let pluginsSystem: PluginsSystem;
let configService: ConfigService;
let env: Env;
let coreContext: CoreContext;
let startDeps: { elasticsearch: ElasticsearchServiceStart };
beforeEach(() => {
env = Env.createDefault(getEnvOptions());
startDeps = { elasticsearch: { legacy: {} } as any };
configService = new ConfigService(
new BehaviorSubject<Config>(new ObjectToConfigAdapter({ plugins: { initialize: true } })),
@ -78,7 +81,7 @@ afterEach(() => {
});
test('can be started even without plugins', async () => {
const pluginsStart = await pluginsSystem.startPlugins();
const pluginsStart = await pluginsSystem.startPlugins(startDeps);
expect(pluginsStart).toBeInstanceOf(Map);
expect(pluginsStart.size).toBe(0);
@ -87,7 +90,7 @@ test('can be started even without plugins', async () => {
test('`startPlugins` throws plugin has missing required dependency', async () => {
pluginsSystem.addPlugin(createPlugin('some-id', { required: ['missing-dep'] }));
await expect(pluginsSystem.startPlugins()).rejects.toMatchInlineSnapshot(
await expect(pluginsSystem.startPlugins(startDeps)).rejects.toMatchInlineSnapshot(
`[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["some-id",{}]]]`
);
});
@ -97,7 +100,7 @@ test('`startPlugins` throws if plugins have circular required dependency', async
pluginsSystem.addPlugin(createPlugin('depends-on-1', { required: ['depends-on-2'] }));
pluginsSystem.addPlugin(createPlugin('depends-on-2', { required: ['depends-on-1'] }));
await expect(pluginsSystem.startPlugins()).rejects.toMatchInlineSnapshot(
await expect(pluginsSystem.startPlugins(startDeps)).rejects.toMatchInlineSnapshot(
`[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["depends-on-1",{}],["depends-on-2",{}]]]`
);
});
@ -107,7 +110,7 @@ test('`startPlugins` throws if plugins have circular optional dependency', async
pluginsSystem.addPlugin(createPlugin('depends-on-1', { optional: ['depends-on-2'] }));
pluginsSystem.addPlugin(createPlugin('depends-on-2', { optional: ['depends-on-1'] }));
await expect(pluginsSystem.startPlugins()).rejects.toMatchInlineSnapshot(
await expect(pluginsSystem.startPlugins(startDeps)).rejects.toMatchInlineSnapshot(
`[Error: Topological ordering of plugins did not complete, these edges could not be ordered: [["depends-on-1",{}],["depends-on-2",{}]]]`
);
});
@ -118,7 +121,7 @@ test('`startPlugins` ignores missing optional dependency', async () => {
pluginsSystem.addPlugin(plugin);
expect([...(await pluginsSystem.startPlugins())]).toMatchInlineSnapshot(`
expect([...(await pluginsSystem.startPlugins(startDeps))]).toMatchInlineSnapshot(`
Array [
Array [
"some-id",
@ -153,9 +156,11 @@ test('`startPlugins` correctly orders plugins and returns exposed values', async
pluginsSystem.addPlugin(plugin);
});
mockCreatePluginStartContext.mockImplementation((_, plugin) => startContextMap.get(plugin.name));
mockCreatePluginStartContext.mockImplementation((context, deps, plugin) =>
startContextMap.get(plugin.name)
);
expect([...(await pluginsSystem.startPlugins())]).toMatchInlineSnapshot(`
expect([...(await pluginsSystem.startPlugins(startDeps))]).toMatchInlineSnapshot(`
Array [
Array [
"order-0",
@ -181,7 +186,7 @@ Array [
`);
for (const [plugin, deps] of plugins) {
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(coreContext, plugin);
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(coreContext, startDeps, plugin);
expect(plugin.start).toHaveBeenCalledTimes(1);
expect(plugin.start).toHaveBeenCalledWith(startContextMap.get(plugin.name), deps);
}
@ -198,7 +203,7 @@ test('`startPlugins` only starts plugins that have server side', async () => {
pluginsSystem.addPlugin(plugin);
});
expect([...(await pluginsSystem.startPlugins())]).toMatchInlineSnapshot(`
expect([...(await pluginsSystem.startPlugins(startDeps))]).toMatchInlineSnapshot(`
Array [
Array [
"order-1",
@ -211,9 +216,17 @@ Array [
]
`);
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(coreContext, firstPluginToRun);
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(
coreContext,
startDeps,
firstPluginToRun
);
expect(mockCreatePluginStartContext).not.toHaveBeenCalledWith(coreContext, secondPluginNotToRun);
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(coreContext, thirdPluginToRun);
expect(mockCreatePluginStartContext).toHaveBeenCalledWith(
coreContext,
startDeps,
thirdPluginToRun
);
expect(firstPluginToRun.start).toHaveBeenCalledTimes(1);
expect(secondPluginNotToRun.start).not.toHaveBeenCalled();

View file

@ -21,6 +21,7 @@ import { CoreContext } from '../../types';
import { Logger } from '../logging';
import { Plugin, PluginName } from './plugin';
import { createPluginStartContext } from './plugin_context';
import { PluginsServiceStartDeps } from './plugins_service';
/** @internal */
export class PluginsSystem {
@ -36,7 +37,7 @@ export class PluginsSystem {
this.plugins.set(plugin.name, plugin);
}
public async startPlugins() {
public async startPlugins(deps: PluginsServiceStartDeps) {
const exposedValues = new Map<PluginName, unknown>();
if (this.plugins.size === 0) {
return exposedValues;
@ -67,7 +68,7 @@ export class PluginsSystem {
exposedValues.set(
pluginName,
await plugin.start(
createPluginStartContext(this.coreContext, plugin),
createPluginStartContext(this.coreContext, deps, plugin),
exposedDependencyValues
)
);

View file

@ -17,17 +17,18 @@
* under the License.
*/
export function pick<T extends { [k: string]: any }, K extends keyof T>(
export function pick<T extends Record<string, unknown>, K extends keyof T>(
obj: T,
keys: K[]
): Pick<T, K> {
const newObj = keys.reduce(
(acc, val) => {
acc[val] = obj[val];
return keys.reduce(
(acc, key) => {
if (obj.hasOwnProperty(key)) {
acc[key] = obj[key];
}
return acc;
},
{} as { [k: string]: any }
{} as Pick<T, K>
);
return newObj as Pick<T, K>;
}

View file

@ -18,13 +18,14 @@
*/
import Boom from 'boom';
import { first } from 'rxjs/operators';
import { resolve, join, sep } from 'path';
import url from 'url';
import { has, isEmpty, head } from 'lodash';
import { has, isEmpty, head, pick } from 'lodash';
import { resolveApi } from './api_server/server';
import { addExtensionSpecFilePath } from './api_server/spec';
import setHeaders from '../elasticsearch/lib/set_headers';
import { setHeaders } from './server/set_headers';
import {
ProxyConfigCollection,
@ -32,10 +33,26 @@ import {
createProxyRoute
} from './server';
function filterHeaders(originalHeaders, headersToKeep) {
const normalizeHeader = function (header) {
if (!header) {
return '';
}
header = header.toString();
return header.trim().toLowerCase();
};
// Normalize list of headers we want to allow in upstream request
const headersToKeepNormalized = headersToKeep.map(normalizeHeader);
return pick(originalHeaders, headersToKeepNormalized);
}
export default function (kibana) {
const modules = resolve(__dirname, 'public/webpackShims/');
const src = resolve(__dirname, 'public/src/');
let defaultVars;
const apps = [];
return new kibana.Plugin({
id: 'console',
@ -79,24 +96,29 @@ export default function (kibana) {
];
},
init: function (server, options) {
async init(server, options) {
server.expose('addExtensionSpecFilePath', addExtensionSpecFilePath);
if (options.ssl && options.ssl.verify) {
throw new Error('sense.ssl.verify is no longer supported.');
}
const config = server.config();
const { filterHeaders } = server.plugins.elasticsearch;
const legacyEsConfig = await server.core.elasticsearch.legacy.config$.pipe(first()).toPromise();
const proxyConfigCollection = new ProxyConfigCollection(options.proxyConfig);
const proxyPathFilters = options.proxyFilter.map(str => new RegExp(str));
defaultVars = {
elasticsearchUrl: url.format(
Object.assign(url.parse(head(legacyEsConfig.hosts)), { auth: false })
)
};
server.route(createProxyRoute({
baseUrl: head(config.get('elasticsearch.hosts')),
baseUrl: head(legacyEsConfig.hosts),
pathFilters: proxyPathFilters,
getConfigForReq(req, uri) {
const whitelist = config.get('elasticsearch.requestHeadersWhitelist');
const filteredHeaders = filterHeaders(req.headers, whitelist);
const headers = setHeaders(filteredHeaders, config.get('elasticsearch.customHeaders'));
const filteredHeaders = filterHeaders(req.headers, legacyEsConfig.requestHeadersWhitelist);
const headers = setHeaders(filteredHeaders, legacyEsConfig.customHeaders);
if (!isEmpty(config.get('console.proxyConfig'))) {
return {
@ -106,7 +128,7 @@ export default function (kibana) {
}
return {
...getElasticsearchProxyConfig(server),
...getElasticsearchProxyConfig(legacyEsConfig),
headers,
};
}
@ -132,20 +154,7 @@ export default function (kibana) {
devTools: ['plugins/console/console'],
styleSheetPaths: resolve(__dirname, 'public/index.scss'),
injectDefaultVars(server) {
return {
elasticsearchUrl: url.format(
Object.assign(
url.parse(
head(
server.config().get('elasticsearch.hosts')
)
),
{ auth: false }
)
)
};
},
injectDefaultVars: () => defaultVars,
noParse: [
join(modules, 'ace' + sep),

View file

@ -18,77 +18,77 @@
*/
import expect from 'expect.js';
import moment from 'moment';
import fs from 'fs';
import { promisify } from 'bluebird';
import { getElasticsearchProxyConfig } from '../elasticsearch_proxy_config';
import https from 'https';
import http from 'http';
import sinon from 'sinon';
const readFileAsync = promisify(fs.readFile, fs);
const getDefaultElasticsearchConfig = () => {
return {
hosts: ['http://localhost:9200', 'http://192.168.1.1:1234'],
requestTimeout: moment.duration(30000),
ssl: { certificateAuthorities: [], verificationMode: 'full' },
};
};
describe('plugins/console', function () {
describe('#getElasticsearchProxyConfig', function () {
let server;
beforeEach(function () {
const stub = sinon.stub();
server = {
config() {
return {
get: stub
};
}
};
server.config().get.withArgs('elasticsearch.hosts').returns(['http://localhost:9200']);
server.config().get.withArgs('elasticsearch.ssl.verificationMode').returns('full');
});
const setElasticsearchConfig = (key, value) => {
server.config().get.withArgs(`elasticsearch.${key}`).returns(value);
};
it('sets timeout', function () {
const value = 1000;
setElasticsearchConfig('requestTimeout', value);
const proxyConfig = getElasticsearchProxyConfig(server);
const proxyConfig = getElasticsearchProxyConfig({
...getDefaultElasticsearchConfig(),
requestTimeout: moment.duration(value),
});
expect(proxyConfig.timeout).to.be(value);
});
it(`uses https.Agent when url's protocol is https`, function () {
setElasticsearchConfig('hosts', ['https://localhost:9200']);
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...getDefaultElasticsearchConfig(),
hosts: ['https://localhost:9200'],
});
expect(agent).to.be.a(https.Agent);
});
it(`uses http.Agent when url's protocol is http`, function () {
setElasticsearchConfig('hosts', ['http://localhost:9200']);
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig(getDefaultElasticsearchConfig());
expect(agent).to.be.a(http.Agent);
});
describe('ssl', function () {
let config;
beforeEach(function () {
setElasticsearchConfig('hosts', ['https://localhost:9200']);
config = {
...getDefaultElasticsearchConfig(),
hosts: ['https://localhost:9200'],
};
});
it('sets rejectUnauthorized to false when verificationMode is none', function () {
setElasticsearchConfig('ssl.verificationMode', 'none');
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, verificationMode: 'none' }
});
expect(agent.options.rejectUnauthorized).to.be(false);
});
it('sets rejectUnauthorized to true when verificationMode is certificate', function () {
setElasticsearchConfig('ssl.verificationMode', 'certificate');
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, verificationMode: 'certificate' }
});
expect(agent.options.rejectUnauthorized).to.be(true);
});
it('sets checkServerIdentity to not check hostname when verificationMode is certificate', function () {
setElasticsearchConfig('ssl.verificationMode', 'certificate');
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, verificationMode: 'certificate' }
});
const cert = {
subject: {
@ -102,44 +102,60 @@ describe('plugins/console', function () {
});
it('sets rejectUnauthorized to true when verificationMode is full', function () {
setElasticsearchConfig('ssl.verificationMode', 'full');
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, verificationMode: 'full' }
});
expect(agent.options.rejectUnauthorized).to.be(true);
});
it(`doesn't set checkServerIdentity when verificationMode is full`, function () {
setElasticsearchConfig('ssl.verificationMode', 'full');
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, verificationMode: 'full' }
});
expect(agent.options.checkServerIdentity).to.be(undefined);
});
it(`sets ca when certificateAuthorities are specified`, function () {
setElasticsearchConfig('ssl.certificateAuthorities', [__dirname + '/fixtures/ca.crt']);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: { ...config.ssl, certificateAuthorities: [__dirname + '/fixtures/ca.crt'] }
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.ca).to.contain('test ca certificate\n');
});
describe('when alwaysPresentCertificate is false', () => {
it(`doesn't set cert and key when certificate and key paths are specified`, function () {
setElasticsearchConfig('ssl.alwaysPresentCertificate', false);
setElasticsearchConfig('ssl.certificate', __dirname + '/fixtures/cert.crt');
setElasticsearchConfig('ssl.key', __dirname + '/fixtures/cert.key');
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: false,
certificate: __dirname + '/fixtures/cert.crt',
key: __dirname + '/fixtures/cert.key',
}
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.cert).to.be(undefined);
expect(agent.options.key).to.be(undefined);
});
it(`doesn't set passphrase when certificate, key and keyPassphrase are specified`, function () {
setElasticsearchConfig('ssl.alwaysPresentCertificate', false);
setElasticsearchConfig('ssl.certificate', __dirname + '/fixtures/cert.crt');
setElasticsearchConfig('ssl.key', __dirname + '/fixtures/cert.key');
setElasticsearchConfig('ssl.keyPassphrase', 'secret');
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: false,
certificate: __dirname + '/fixtures/cert.crt',
key: __dirname + '/fixtures/cert.key',
keyPassphrase: 'secret',
}
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.passphrase).to.be(undefined);
});
});
@ -148,43 +164,64 @@ describe('plugins/console', function () {
it(`sets cert and key when certificate and key paths are specified`, async function () {
const certificatePath = __dirname + '/fixtures/cert.crt';
const keyPath = __dirname + '/fixtures/cert.key';
setElasticsearchConfig('ssl.alwaysPresentCertificate', true);
setElasticsearchConfig('ssl.certificate', certificatePath);
setElasticsearchConfig('ssl.key', keyPath);
const { agent } = getElasticsearchProxyConfig(server);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: true,
certificate: certificatePath,
key: keyPath,
}
});
expect(agent.options.cert).to.be(await readFileAsync(certificatePath, 'utf8'));
expect(agent.options.key).to.be(await readFileAsync(keyPath, 'utf8'));
});
it(`sets passphrase when certificate, key and keyPassphrase are specified`, function () {
setElasticsearchConfig('ssl.alwaysPresentCertificate', true);
setElasticsearchConfig('ssl.certificate', __dirname + '/fixtures/cert.crt');
setElasticsearchConfig('ssl.key', __dirname + '/fixtures/cert.key');
setElasticsearchConfig('ssl.keyPassphrase', 'secret');
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: true,
certificate: __dirname + '/fixtures/cert.crt',
key: __dirname + '/fixtures/cert.key',
keyPassphrase: 'secret',
}
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.passphrase).to.be('secret');
});
it(`doesn't set cert when only certificate path is specified`, async function () {
const certificatePath = __dirname + '/fixtures/cert.crt';
setElasticsearchConfig('ssl.alwaysPresentCertificate', true);
setElasticsearchConfig('ssl.certificate', certificatePath);
setElasticsearchConfig('ssl.key', undefined);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: true,
certificate: certificatePath,
key: undefined,
}
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.cert).to.be(undefined);
expect(agent.options.key).to.be(undefined);
});
it(`doesn't set key when only key path is specified`, async function () {
const keyPath = __dirname + '/fixtures/cert.key';
setElasticsearchConfig('ssl.alwaysPresentCertificate', true);
setElasticsearchConfig('ssl.certificate', undefined);
setElasticsearchConfig('ssl.key', keyPath);
const { agent } = getElasticsearchProxyConfig({
...config,
ssl: {
...config.ssl,
alwaysPresentCertificate: true,
certificate: undefined,
key: keyPath,
}
});
const { agent } = getElasticsearchProxyConfig(server);
expect(agent.options.cert).to.be(undefined);
expect(agent.options.key).to.be(undefined);
});

View file

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import { setHeaders } from '../set_headers';
describe('#set_headers', function () {
it('throws if not given an object as the first argument', function () {
const fn = () => setHeaders(null, {});
expect(fn).to.throwError();
});
it('throws if not given an object as the second argument', function () {
const fn = () => setHeaders({}, null);
expect(fn).to.throwError();
});
it('returns a new object', function () {
const originalHeaders = {};
const newHeaders = {};
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).not.to.be(originalHeaders);
expect(returnedHeaders).not.to.be(newHeaders);
});
it('returns object with newHeaders merged with originalHeaders', function () {
const originalHeaders = { foo: 'bar' };
const newHeaders = { one: 'two' };
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).to.eql({ foo: 'bar', one: 'two' });
});
it('returns object where newHeaders takes precedence for any matching keys', function () {
const originalHeaders = { foo: 'bar' };
const newHeaders = { one: 'two', foo: 'notbar' };
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).to.eql({ foo: 'notbar', one: 'two' });
});
});

View file

@ -25,16 +25,13 @@ import url from 'url';
const readFile = (file) => readFileSync(file, 'utf8');
const createAgent = (server) => {
const config = server.config();
const target = url.parse(
_.head(config.get('elasticsearch.hosts'))
);
const createAgent = (legacyConfig) => {
const target = url.parse(_.head(legacyConfig.hosts));
if (!/^https/.test(target.protocol)) return new http.Agent();
const agentOptions = {};
const verificationMode = config.get('elasticsearch.ssl.verificationMode');
const verificationMode = legacyConfig.ssl && legacyConfig.ssl.verificationMode;
switch (verificationMode) {
case 'none':
agentOptions.rejectUnauthorized = false;
@ -52,26 +49,27 @@ const createAgent = (server) => {
throw new Error(`Unknown ssl verificationMode: ${verificationMode}`);
}
if (_.size(config.get('elasticsearch.ssl.certificateAuthorities'))) {
agentOptions.ca = config.get('elasticsearch.ssl.certificateAuthorities').map(readFile);
if (legacyConfig.ssl && legacyConfig.ssl.certificateAuthorities.length > 0) {
agentOptions.ca = legacyConfig.ssl.certificateAuthorities.map(readFile);
}
if (
config.get('elasticsearch.ssl.alwaysPresentCertificate') &&
config.get('elasticsearch.ssl.certificate') &&
config.get('elasticsearch.ssl.key')
legacyConfig.ssl &&
legacyConfig.ssl.alwaysPresentCertificate &&
legacyConfig.ssl.certificate &&
legacyConfig.ssl.key
) {
agentOptions.cert = readFile(config.get('elasticsearch.ssl.certificate'));
agentOptions.key = readFile(config.get('elasticsearch.ssl.key'));
agentOptions.passphrase = config.get('elasticsearch.ssl.keyPassphrase');
agentOptions.cert = readFile(legacyConfig.ssl.certificate);
agentOptions.key = readFile(legacyConfig.ssl.key);
agentOptions.passphrase = legacyConfig.ssl.keyPassphrase;
}
return new https.Agent(agentOptions);
};
export const getElasticsearchProxyConfig = (server) => {
export const getElasticsearchProxyConfig = (legacyConfig) => {
return {
timeout: server.config().get('elasticsearch.requestTimeout'),
agent: createAgent(server)
timeout: legacyConfig.requestTimeout.asMilliseconds(),
agent: createAgent(legacyConfig)
};
};

View file

@ -19,7 +19,7 @@
import { isPlainObject } from 'lodash';
export default function setHeaders(originalHeaders, newHeaders) {
export function setHeaders(originalHeaders, newHeaders) {
if (!isPlainObject(originalHeaders)) {
throw new Error(`Expected originalHeaders to be an object, but ${typeof originalHeaders} given`);
}

View file

@ -165,15 +165,6 @@ interface RequestHeaders {
[name: string]: string;
}
interface ElasticsearchClientLogging {
error(err: Error): void;
warning(message: string): void;
trace(method: string, options: { path: string }, query?: string, statusCode?: number): void;
info(): void;
debug(): void;
close(): void;
}
interface AssistantAPIClientParams extends GenericParams {
path: '/_migration/assistance';
method: 'GET';
@ -531,9 +522,7 @@ export interface CallCluster {
}
export interface ElasticsearchPlugin {
ElasticsearchClientLogging: ElasticsearchClientLogging;
getCluster(name: string): Cluster;
createCluster(name: string, config: ClusterConfig): Cluster;
filterHeaders(originalHeaders: RequestHeaders, headersToKeep: string[]): void;
waitUntilReady(): Promise<void>;
}

View file

@ -17,80 +17,91 @@
* under the License.
*/
import { combineLatest } from 'rxjs';
import { first, map } from 'rxjs/operators';
import healthCheck from './lib/health_check';
import { createDataCluster } from './lib/create_data_cluster';
import { createAdminCluster } from './lib/create_admin_cluster';
import { clientLogger } from './lib/client_logger';
import { createClusters } from './lib/create_clusters';
import { Cluster } from './lib/cluster';
import { createProxy } from './lib/create_proxy';
import filterHeaders from './lib/filter_headers';
import { DEFAULT_API_VERSION } from './lib/default_api_version';
const DEFAULT_REQUEST_HEADERS = ['authorization'];
export default function (kibana) {
let defaultVars;
return new kibana.Plugin({
require: ['kibana'],
config(Joi) {
const sslSchema = Joi.object({
verificationMode: Joi.string().valid('none', 'certificate', 'full').default('full'),
certificateAuthorities: Joi.array().single().items(Joi.string()),
certificate: Joi.string(),
key: Joi.string(),
keyPassphrase: Joi.string(),
alwaysPresentCertificate: Joi.boolean().default(false),
}).default();
return Joi.object({
enabled: Joi.boolean().default(true),
sniffOnStart: Joi.boolean().default(false),
sniffInterval: Joi.number().allow(false).default(false),
sniffOnConnectionFault: Joi.boolean().default(false),
hosts: Joi.array().items(Joi.string().uri({ scheme: ['http', 'https'] })).single().default('http://localhost:9200'),
preserveHost: Joi.boolean().default(true),
username: Joi.string(),
password: Joi.string(),
shardTimeout: Joi.number().default(30000),
requestTimeout: Joi.number().default(30000),
requestHeadersWhitelist: Joi.array().items().single().default(DEFAULT_REQUEST_HEADERS),
customHeaders: Joi.object().default({}),
pingTimeout: Joi.number().default(Joi.ref('requestTimeout')),
startupTimeout: Joi.number().default(5000),
logQueries: Joi.boolean().default(false),
ssl: sslSchema,
apiVersion: Joi.string().default(DEFAULT_API_VERSION),
healthCheck: Joi.object({
delay: Joi.number().default(2500)
}).default(),
}).default();
},
uiExports: { injectDefaultVars: () => defaultVars },
uiExports: {
injectDefaultVars(server, options) {
return {
esRequestTimeout: options.requestTimeout,
esShardTimeout: options.shardTimeout,
esApiVersion: options.apiVersion,
};
}
},
async init(server) {
// All methods that ES plugin exposes are synchronous so we should get the first
// value from all observables here to be able to synchronously return and create
// cluster clients afterwards.
const [esConfig, adminCluster, dataCluster] = await combineLatest(
server.core.elasticsearch.legacy.config$,
server.core.elasticsearch.adminClient$,
server.core.elasticsearch.dataClient$
).pipe(
first(),
map(([config, adminClusterClient, dataClusterClient]) => [
config,
new Cluster(adminClusterClient),
new Cluster(dataClusterClient)
])
).toPromise();
init(server) {
const clusters = createClusters(server);
defaultVars = {
esRequestTimeout: esConfig.requestTimeout.asMilliseconds(),
esShardTimeout: esConfig.shardTimeout.asMilliseconds(),
esApiVersion: esConfig.apiVersion,
};
server.expose('getCluster', clusters.get);
server.expose('createCluster', clusters.create);
const clusters = new Map();
server.expose('getCluster', (name) => {
if (name === 'admin') {
return adminCluster;
}
server.expose('filterHeaders', filterHeaders);
server.expose('ElasticsearchClientLogging', clientLogger(server));
if (name === 'data') {
return dataCluster;
}
createDataCluster(server);
createAdminCluster(server);
return clusters.get(name);
});
server.expose('createCluster', (name, clientConfig = {}) => {
// NOTE: Not having `admin` and `data` clients provided by the core in `clusters`
// map implicitly allows to create custom `data` and `admin` clients. This is
// allowed intentionally to support custom `admin` cluster client created by the
// x-pack/monitoring bulk uploader. We should forbid that as soon as monitoring
// bulk uploader is refactored, see https://github.com/elastic/kibana/issues/31934.
if (clusters.has(name)) {
throw new Error(`cluster '${name}' already exists`);
}
// We fill all the missing properties in the `clientConfig` using the default
// Elasticsearch config so that we don't depend on default values set and
// controlled by underlying Elasticsearch JS client.
const cluster = new Cluster(server.core.elasticsearch.createClient(name, {
...esConfig,
...clientConfig,
}));
clusters.set(name, cluster);
return cluster;
});
server.events.on('stop', () => {
for (const cluster of clusters.values()) {
cluster.close();
}
clusters.clear();
});
createProxy(server);
// Set up the health check service and start it.
const { start, waitUntilReady } = healthCheck(this, server);
const { start, waitUntilReady } = healthCheck(this, server, esConfig.healthCheckDelay.asMilliseconds());
server.expose('waitUntilReady', waitUntilReady);
start();
}

View file

@ -1,166 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import { Cluster } from '../cluster';
import sinon from 'sinon';
import { errors as esErrors } from 'elasticsearch';
import { set, partial } from 'lodash';
describe('plugins/elasticsearch', function () {
describe('cluster', function () {
let cluster;
const config = {
url: 'http://localhost:9200',
ssl: { verificationMode: 'full' },
requestHeadersWhitelist: [ 'authorization' ]
};
beforeEach(() => {
cluster = new Cluster(config);
});
it('persists the config', () => {
expect(cluster._config).to.eql(config);
});
it('exposes error definitions', () => {
expect(cluster.errors).to.be(esErrors);
});
it('closes the clients', () => {
cluster._client.close = sinon.spy();
cluster._noAuthClient.close = sinon.spy();
cluster.close();
sinon.assert.calledOnce(cluster._client.close);
sinon.assert.calledOnce(cluster._noAuthClient.close);
});
it('closes clients created with createClient', () => {
const client = cluster.createClient();
sinon.stub(client, 'close');
cluster.close();
sinon.assert.calledOnce(client.close);
});
it('protects the config from changes', () => {
const localRequestHeadersWhitelist = cluster.getRequestHeadersWhitelist();
expect(localRequestHeadersWhitelist.length).to.not.equal(config.requestHeadersWhitelist);
});
describe('callWithInternalUser', () => {
let client;
beforeEach(() => {
client = cluster._client = sinon.stub();
set(client, 'nodes.info', sinon.stub().returns(Promise.resolve()));
});
it('should return a function', () => {
expect(cluster.callWithInternalUser).to.be.a('function');
});
it('throws an error for an invalid endpoint', () => {
const fn = partial(cluster.callWithInternalUser, 'foo');
expect(fn).to.throwException(/called with an invalid endpoint: foo/);
});
it('calls the client with params', () => {
const params = { foo: 'Foo' };
cluster.callWithInternalUser('nodes.info', params);
sinon.assert.calledOnce(client.nodes.info);
expect(client.nodes.info.getCall(0).args[0]).to.eql(params);
});
});
describe('callWithRequest', () => {
let client;
beforeEach(() => {
client = cluster._noAuthClient = sinon.stub();
set(client, 'nodes.info', sinon.stub().returns(Promise.resolve()));
});
it('should return a function', () => {
expect(cluster.callWithRequest).to.be.a('function');
});
it('throws an error for an invalid endpoint', () => {
const fn = partial(cluster.callWithRequest, {}, 'foo');
expect(fn).to.throwException(/called with an invalid endpoint: foo/);
});
it('calls the client with params', () => {
const params = { foo: 'Foo' };
cluster.callWithRequest({}, 'nodes.info', params);
sinon.assert.calledOnce(client.nodes.info);
expect(client.nodes.info.getCall(0).args[0]).to.eql(params);
});
it('passes only whitelisted headers', () => {
const headers = { authorization: 'Basic TEST' };
const request = {
headers: {
...headers,
foo: 'Foo'
}
};
cluster.callWithRequest(request, 'nodes.info');
sinon.assert.calledOnce(client.nodes.info);
expect(client.nodes.info.getCall(0).args[0]).to.eql({
headers: headers
});
});
describe('wrap401Errors', () => {
let handler;
let error;
beforeEach(() => {
error = new Error('Authentication required');
error.statusCode = 401;
handler = sinon.stub();
});
it('ensures WWW-Authenticate header', async () => {
set(client, 'mock.401', sinon.stub().returns(Promise.reject(error)));
await cluster.callWithRequest({}, 'mock.401', {}, { wrap401Errors: true }).catch(handler);
sinon.assert.calledOnce(handler);
expect(handler.getCall(0).args[0].output.headers['WWW-Authenticate']).to.eql('Basic realm="Authorization Required"');
});
it('persists WWW-Authenticate header', async () => {
set(error, 'body.error.header[WWW-Authenticate]', 'Basic realm="Test"');
set(client, 'mock.401', sinon.stub().returns(Promise.reject(error)));
await cluster.callWithRequest({}, 'mock.401', {}, { wrap401Errors: true }).catch(handler);
sinon.assert.calledOnce(handler);
expect(handler.getCall(0).args[0].output.headers['WWW-Authenticate']).to.eql('Basic realm="Test"');
});
});
});
});
});

View file

@ -1,75 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import sinon from 'sinon';
import { set, get, partial } from 'lodash';
import { createAdminCluster } from '../create_admin_cluster';
describe('plugins/elasticsearch', function () {
describe('create_admin_cluster', function () {
let cluster;
let server;
beforeEach(() => {
const config = {
elasticsearch: {
url: 'http://localhost:9200',
logQueries: true
}
};
server = sinon.spy();
cluster = {
close: sinon.spy()
};
set(server, 'plugins.elasticsearch.createCluster', sinon.mock().returns(cluster));
set(server, 'on', sinon.spy());
server.config = () => {
return { get: partial(get, config) };
};
createAdminCluster(server);
});
it('creates the cluster', () => {
const { createCluster } = server.plugins.elasticsearch;
sinon.assert.calledOnce(createCluster);
expect(createCluster.getCall(0).args[0]).to.eql('admin');
expect(createCluster.getCall(0).args[1].url).to.eql('http://localhost:9200');
});
it('sets client logger for cluster options', () => {
const { createCluster } = server.plugins.elasticsearch;
const firstCall = createCluster.getCall(0);
const Log = firstCall.args[1].log;
const logger = new Log;
sinon.assert.calledOnce(createCluster);
expect(firstCall.args[0]).to.eql('admin');
expect(firstCall.args[1].url).to.eql('http://localhost:9200');
expect(logger.tags).to.eql(['admin']);
expect(logger.logQueries).to.eql(true);
});
});
});

View file

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import createAgent from '../create_agent';
import https from 'https';
import http from 'http';
describe('plugins/elasticsearch', function () {
describe('lib/create_agent', function () {
it(`uses http.Agent when url's protocol is http`, function () {
const config = {
url: 'http://localhost:9200'
};
const agent = createAgent(config);
expect(agent).to.be.a(http.Agent);
});
it(`throws an Error when url's protocol is https and ssl.verificationMode isn't set`, function () {
const config = {
url: 'https://localhost:9200'
};
expect(createAgent).withArgs(config).to.throwException();
});
it(`uses https.Agent when url's protocol is https and ssl.verificationMode is full`, function () {
const config = {
url: 'https://localhost:9200',
ssl: {
verificationMode: 'full'
}
};
const agent = createAgent(config);
expect(agent).to.be.a(https.Agent);
});
});
});

View file

@ -1,101 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import { createClusters } from '../create_clusters';
import sinon from 'sinon';
import { partial } from 'lodash';
import Hapi from 'hapi';
import * as ClusterNS from '../cluster';
describe('plugins/elasticsearch', function () {
describe('createClusters', function () {
let clusters;
let server;
beforeEach(() => {
server = {
plugins: {
elasticsearch: {}
},
expose: sinon.mock(),
events: {
on: sinon.stub(),
}
};
clusters = createClusters(server);
});
describe('createCluster', () => {
let cluster;
const config = {
url: 'http://localhost:9200',
ssl: {
verificationMode: 'none'
}
};
beforeEach(() => {
cluster = clusters.create('admin', config);
});
it('returns a cluster', () => {
expect(cluster).to.be.a(ClusterNS.Cluster);
});
it('persists the cluster', () => {
expect(clusters.get('admin')).to.be.a(ClusterNS.Cluster);
});
it('throws if cluster already exists', () => {
const fn = partial(clusters.create, 'admin', config);
expect(fn).to.throwException(/cluster \'admin\' already exists/);
});
});
});
describe('server stop', () => {
const sandbox = sinon.createSandbox();
beforeEach(() => {
sandbox.stub(ClusterNS, 'Cluster').callsFake(function () {
this.stub = true;
this.close = sinon.stub();
});
});
after(() => {
sandbox.restore();
});
it('closes all clusters', async () => {
const server = new Hapi.Server();
const clusters = createClusters(server);
const cluster = clusters.create('name', { config: true });
expect(cluster).to.have.property('stub', true);
sinon.assert.notCalled(cluster.close);
await server.start();
sinon.assert.notCalled(cluster.close);
await server.stop();
sinon.assert.calledOnce(cluster.close);
});
});
});

View file

@ -1,78 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import sinon from 'sinon';
import { set, get, partial } from 'lodash';
import { createDataCluster } from '../create_data_cluster';
describe('plugins/elasticsearch', function () {
describe('create_data_cluster', function () {
let cluster;
let server;
let config;
beforeEach(() => {
config = {
elasticsearch: {
url: 'http://localhost:9200',
logQueries: true
}
};
server = sinon.spy();
cluster = {
close: sinon.spy()
};
set(server, 'plugins.elasticsearch.createCluster', sinon.mock().returns(cluster));
set(server, 'on', sinon.spy());
server.config = () => {
return { get: partial(get, config) };
};
});
it('creates the cluster with elasticsearch config', () => {
createDataCluster(server);
const { createCluster } = server.plugins.elasticsearch;
sinon.assert.calledOnce(createCluster);
expect(createCluster.getCall(0).args[0]).to.eql('data');
expect(createCluster.getCall(0).args[1].url).to.eql('http://localhost:9200');
});
it('sets client logger for cluster options', () => {
createDataCluster(server);
const { createCluster } = server.plugins.elasticsearch;
const firstCall = createCluster.getCall(0);
const Log = firstCall.args[1].log;
const logger = new Log;
sinon.assert.calledOnce(createCluster);
expect(firstCall.args[0]).to.eql('data');
expect(firstCall.args[1].url).to.eql('http://localhost:9200');
expect(logger.tags).to.eql(['data']);
expect(logger.logQueries).to.eql(true);
});
});
});

View file

@ -1 +0,0 @@
test ca certificate

View file

@ -27,7 +27,6 @@ import healthCheck from '../health_check';
import kibanaVersion from '../kibana_version';
const esPort = 9220;
const esUrl = `http://elastic:changement@localhost:9220`;
describe('plugins/elasticsearch', () => {
describe('lib/health_check', function () {
@ -58,7 +57,7 @@ describe('plugins/elasticsearch', () => {
}
};
cluster = { callWithInternalUser: sinon.stub() };
cluster = { callWithInternalUser: sinon.stub(), errors: { NoConnections } };
cluster.callWithInternalUser.withArgs('index', sinon.match.any).returns(Promise.resolve());
cluster.callWithInternalUser.withArgs('mget', sinon.match.any).returns(Promise.resolve({ ok: true }));
cluster.callWithInternalUser.withArgs('get', sinon.match.any).returns(Promise.resolve({ found: false }));
@ -73,19 +72,11 @@ describe('plugins/elasticsearch', () => {
}
}));
// setup the config().get()/.set() stubs
const get = sinon.stub();
get.withArgs('elasticsearch.hosts').returns([esUrl]);
get.withArgs('kibana.index').returns('.my-kibana');
get.withArgs('pkg.version').returns('1.0.0');
const set = sinon.stub();
// Setup the server mock
server = {
logWithMetadata: sinon.stub(),
info: { port: 5601 },
config: function () { return { get, set }; },
config: () => ({ get: sinon.stub() }),
plugins: {
elasticsearch: {
getCluster: sinon.stub().returns(cluster)
@ -94,7 +85,7 @@ describe('plugins/elasticsearch', () => {
ext: sinon.stub()
};
health = healthCheck(plugin, server);
health = healthCheck(plugin, server, 0);
});
afterEach(() => sandbox.restore());

View file

@ -1,151 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import { parseConfig } from '../parse_config';
describe('plugins/elasticsearch', function () {
describe('lib/parse_config', function () {
describe('ssl', function () {
let serverConfig;
beforeEach(function () {
serverConfig = {
hosts: ['https://localhost:9200'],
ssl: {
verificationMode: 'full'
}
};
});
it('throws an Exception when verificationMode is undefined', function () {
delete serverConfig.ssl.verificationMode;
expect(parseConfig).withArgs(serverConfig).to.throwException();
});
it('sets rejectUnauthorized to false when verificationMode is none', function () {
serverConfig.ssl.verificationMode = 'none';
const config = parseConfig(serverConfig);
expect(config.ssl.rejectUnauthorized).to.be(false);
});
it('sets rejectUnauthorized to true when verificationMode is certificate', function () {
serverConfig.ssl.verificationMode = 'certificate';
const config = parseConfig(serverConfig);
expect(config.ssl.rejectUnauthorized).to.be(true);
});
it('sets checkServerIdentity to not check hostname when verificationMode is certificate', function () {
serverConfig.ssl.verificationMode = 'certificate';
const config = parseConfig(serverConfig);
const cert = {
subject: {
CN: 'wrong.com'
}
};
expect(config.ssl.checkServerIdentity).withArgs('right.com', cert).to.not.throwException();
const result = config.ssl.checkServerIdentity('right.com', cert);
expect(result).to.be(undefined);
});
it('sets rejectUnauthorized to true when verificationMode is full', function () {
serverConfig.ssl.verificationMode = 'full';
const config = parseConfig(serverConfig);
expect(config.ssl.rejectUnauthorized).to.be(true);
});
it(`doesn't set checkServerIdentity when verificationMode is full`, function () {
serverConfig.ssl.verificationMode = 'full';
const config = parseConfig(serverConfig);
expect(config.ssl.checkServerIdentity).to.be(undefined);
});
it(`sets ca when certificateAuthorities are specified`, function () {
serverConfig.ssl.certificateAuthorities = [__dirname + '/fixtures/ca.crt'];
const config = parseConfig(serverConfig);
expect(config.ssl.ca).to.contain('test ca certificate\n');
});
it(`by default sets cert and key when certificate and key paths are specified`, function () {
serverConfig.ssl.certificate = __dirname + '/fixtures/cert.crt';
serverConfig.ssl.key = __dirname + '/fixtures/cert.key';
const config = parseConfig(serverConfig);
expect(config.ssl.cert).to.be('test certificate\n');
expect(config.ssl.key).to.be('test key\n');
});
it(`by default sets passphrase when certificate, key and keyPassphrase are specified`, function () {
serverConfig.ssl.certificate = __dirname + '/fixtures/cert.crt';
serverConfig.ssl.key = __dirname + '/fixtures/cert.key';
serverConfig.ssl.keyPassphrase = 'secret';
const config = parseConfig(serverConfig);
expect(config.ssl.passphrase).to.be('secret');
});
it(`doesn't set cert and key when ignoreCertAndKey is true`, function () {
serverConfig.ssl.certificate = __dirname + '/fixtures/cert.crt';
serverConfig.ssl.key = __dirname + '/fixtures/cert.key';
const config = parseConfig(serverConfig, { ignoreCertAndKey: true });
expect(config.ssl.cert).to.be(undefined);
expect(config.ssl.key).to.be(undefined);
});
it(`by default sets passphrase when ignoreCertAndKey is true`, function () {
serverConfig.ssl.certificate = __dirname + '/fixtures/cert.crt';
serverConfig.ssl.key = __dirname + '/fixtures/cert.key';
serverConfig.ssl.keyPassphrase = 'secret';
const config = parseConfig(serverConfig, { ignoreCertAndKey: true });
expect(config.ssl.passphrase).to.be(undefined);
});
describe('port', () => {
it('uses the specified port', () => {
const config1 = parseConfig(serverConfig).hosts[0];
expect(config1.port).to.be('9200');
serverConfig.hosts = ['https://localhost:555'];
const config2 = parseConfig(serverConfig).hosts[0];
expect(config2.port).to.be('555');
});
it('uses port 80 if http and no specified port', () => {
serverConfig.hosts = ['http://localhost'];
const config2 = parseConfig(serverConfig).hosts[0];
expect(config2.port).to.be('80');
});
it ('uses port 443 if https and no specified port', () => {
serverConfig.hosts = ['https://localhost'];
const config2 = parseConfig(serverConfig).hosts[0];
expect(config2.port).to.be('443');
});
});
});
});
});

View file

@ -1,57 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from 'expect.js';
import setHeaders from '../set_headers';
describe('plugins/elasticsearch', function () {
describe('lib/set_headers', function () {
it('throws if not given an object as the first argument', function () {
const fn = () => setHeaders(null, {});
expect(fn).to.throwError();
});
it('throws if not given an object as the second argument', function () {
const fn = () => setHeaders({}, null);
expect(fn).to.throwError();
});
it('returns a new object', function () {
const originalHeaders = {};
const newHeaders = {};
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).not.to.be(originalHeaders);
expect(returnedHeaders).not.to.be(newHeaders);
});
it('returns object with newHeaders merged with originalHeaders', function () {
const originalHeaders = { foo: 'bar' };
const newHeaders = { one: 'two' };
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).to.eql({ foo: 'bar', one: 'two' });
});
it('returns object where newHeaders takes precedence for any matching keys', function () {
const originalHeaders = { foo: 'bar' };
const newHeaders = { one: 'two', foo: 'notbar' };
const returnedHeaders = setHeaders(originalHeaders, newHeaders);
expect(returnedHeaders).to.eql({ foo: 'notbar', one: 'two' });
});
});
});

View file

@ -1,58 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export function clientLogger(server) {
return class ElasticsearchClientLogging {
// additional tags to differentiate connection
tags = [];
logQueries = false;
error(err) {
server.log(['error', 'elasticsearch'].concat(this.tags), err);
}
warning(message) {
server.log(['warning', 'elasticsearch'].concat(this.tags), message);
}
trace(method, options, query, _response, statusCode) {
/* Check if query logging is enabled
* It requires Kibana to be configured with verbose logging turned on. */
if (this.logQueries) {
const methodAndPath = `${method} ${options.path}`;
const queryDsl = query ? query.trim() : '';
server.log(['elasticsearch', 'query', 'debug'].concat(this.tags), [
statusCode,
methodAndPath,
queryDsl
].join('\n'));
}
}
// elasticsearch-js expects the following functions to exist
info() {}
debug() {}
close() {}
};
}

View file

@ -1,142 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import elasticsearch from 'elasticsearch';
import { get, set, isEmpty, cloneDeep, pick } from 'lodash';
import toPath from 'lodash/internal/toPath';
import Boom from 'boom';
import filterHeaders from './filter_headers';
import { parseConfig } from './parse_config';
export class Cluster {
constructor(config) {
this._config = {
...config
};
this.errors = elasticsearch.errors;
this._clients = new Set();
this._client = this.createClient();
this._noAuthClient = this.createClient(
{ auth: false },
{ ignoreCertAndKey: !this.getSsl().alwaysPresentCertificate }
);
return this;
}
callWithRequest = (req = {}, endpoint, clientParams = {}, options = {}) => {
if (req.headers) {
const filteredHeaders = filterHeaders(req.headers, this.getRequestHeadersWhitelist());
set(clientParams, 'headers', filteredHeaders);
}
return callAPI(this._noAuthClient, endpoint, clientParams, options);
}
callWithInternalUser = (endpoint, clientParams = {}, options = {}) => {
return callAPI(this._client, endpoint, clientParams, options);
}
getRequestHeadersWhitelist = () => getClonedProperty(this._config, 'requestHeadersWhitelist');
getCustomHeaders = () => getClonedProperty(this._config, 'customHeaders');
getRequestTimeout = () => getClonedProperty(this._config, 'requestTimeout');
getHosts = () => getClonedProperty(this._config, 'hosts');
getSsl = () => getClonedProperty(this._config, 'ssl');
getClient = () => this._client;
close() {
for (const client of this._clients) {
client.close();
}
this._clients.clear();
}
createClient = (configOverrides, parseOptions) => {
const config = {
...this._getClientConfig(),
...configOverrides
};
const client = new elasticsearch.Client(parseConfig(config, parseOptions));
this._clients.add(client);
return client;
}
_getClientConfig = () => {
return getClonedProperties(this._config, [
'hosts',
'ssl',
'username',
'password',
'customHeaders',
'plugins',
'apiVersion',
'keepAlive',
'pingTimeout',
'requestTimeout',
'sniffOnStart',
'sniffInterval',
'sniffOnConnectionFault',
'log'
]);
}
}
function callAPI(client, endpoint, clientParams = {}, options = {}) {
const wrap401Errors = options.wrap401Errors !== false;
const clientPath = toPath(endpoint);
const api = get(client, clientPath);
let apiContext = get(client, clientPath.slice(0, -1));
if (isEmpty(apiContext)) {
apiContext = client;
}
if (!api) {
throw new Error(`called with an invalid endpoint: ${endpoint}`);
}
return api.call(apiContext, clientParams).catch((err) => {
if (!wrap401Errors || err.statusCode !== 401) {
return Promise.reject(err);
}
const boomError = Boom.boomify(err, { statusCode: err.statusCode });
const wwwAuthHeader = get(err, 'body.error.header[WWW-Authenticate]');
boomError.output.headers['WWW-Authenticate'] = wwwAuthHeader || 'Basic realm="Authorization Required"';
throw boomError;
});
}
function getClonedProperties(config, paths) {
return cloneDeep(paths ? pick(config, paths) : config);
}
function getClonedProperty(config, path) {
return cloneDeep(path ? get(config, path) : config);
}

View file

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { errors } from 'elasticsearch';
import { CallAPIOptions, ClusterClient } from 'kibana';
export class Cluster {
public readonly errors = errors;
constructor(private readonly clusterClient: ClusterClient) {}
public callWithRequest = async (
req: { headers?: Record<string, string> } = {},
endpoint: string,
clientParams?: Record<string, unknown>,
options?: CallAPIOptions
) => {
return await this.clusterClient
.asScoped(req)
.callAsCurrentUser(endpoint, clientParams, options);
};
public callWithInternalUser = async (
endpoint: string,
clientParams?: Record<string, unknown>,
options?: CallAPIOptions
) => {
return await this.clusterClient.callAsInternalUser(endpoint, clientParams, options);
};
public close() {
this.clusterClient.close();
}
}

View file

@ -1,38 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { clientLogger } from './client_logger';
export function createAdminCluster(server) {
const config = server.config();
const ElasticsearchClientLogging = clientLogger(server);
class AdminClientLogging extends ElasticsearchClientLogging {
tags = ['admin'];
logQueries = config.get('elasticsearch.logQueries');
}
server.plugins.elasticsearch.createCluster(
'admin',
{
log: AdminClientLogging,
...config.get('elasticsearch')
}
);
}

View file

@ -1,34 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import url from 'url';
import { get } from 'lodash';
import http from 'http';
import https from 'https';
import { parseConfig } from './parse_config';
export default function (config) {
const target = url.parse(get(config, 'url'));
if (!/^https/.test(target.protocol)) return new http.Agent();
const ignoreCertAndKey = !get(config, 'ssl.alwaysPresentCertificate');
return new https.Agent(parseConfig(config, { ignoreCertAndKey }).ssl);
}

View file

@ -1,49 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Cluster } from './cluster';
export function createClusters(server) {
const clusters = new Map();
server.events.on('stop', () => {
for (const [name, cluster] of clusters) {
cluster.close();
clusters.delete(name);
}
});
return {
get(name) {
return clusters.get(name);
},
create(name, config) {
const cluster = new Cluster(config);
if (clusters.has(name)) {
throw new Error(`cluster '${name}' already exists`);
}
clusters.set(name, cluster);
return cluster;
}
};
}

View file

@ -1,42 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { clientLogger } from './client_logger';
export function createDataCluster(server) {
const config = server.config();
const ElasticsearchClientLogging = clientLogger(server);
class DataClientLogging extends ElasticsearchClientLogging {
tags = ['data'];
logQueries = getConfig().logQueries;
}
function getConfig() {
return config.get('elasticsearch');
}
server.plugins.elasticsearch.createCluster(
'data',
{
log: DataClientLogging,
...getConfig()
}
);
}

View file

@ -18,23 +18,20 @@
*/
import Promise from 'bluebird';
import elasticsearch from 'elasticsearch';
import kibanaVersion from './kibana_version';
import { ensureEsVersion } from './ensure_es_version';
const NoConnections = elasticsearch.errors.NoConnections;
export default function (plugin, server) {
const config = server.config();
const callAdminAsKibanaUser = server.plugins.elasticsearch.getCluster('admin').callWithInternalUser;
const REQUEST_DELAY = config.get('elasticsearch.healthCheck.delay');
export default function (plugin, server, requestDelay) {
const adminCluster = server.plugins.elasticsearch.getCluster('admin');
const NoConnections = adminCluster.errors.NoConnections;
const callAdminAsKibanaUser = adminCluster.callWithInternalUser;
plugin.status.yellow('Waiting for Elasticsearch');
function waitForPong(callWithInternalUser) {
return callWithInternalUser('ping').catch(function (err) {
if (!(err instanceof NoConnections)) throw err;
plugin.status.red(`Unable to connect to Elasticsearch.`);
return Promise.delay(REQUEST_DELAY).then(waitForPong.bind(null, callWithInternalUser));
return Promise.delay(requestDelay).then(waitForPong.bind(null, callWithInternalUser));
});
}
@ -47,7 +44,7 @@ export default function (plugin, server) {
function waitForEsVersion() {
return ensureEsVersion(server, kibanaVersion.get()).catch(err => {
plugin.status.red(err);
return Promise.delay(REQUEST_DELAY).then(waitForEsVersion);
return Promise.delay(requestDelay).then(waitForEsVersion);
});
}
@ -81,7 +78,7 @@ export default function (plugin, server) {
}
function startorRestartChecking() {
scheduleCheck(stopChecking() ? REQUEST_DELAY : 1);
scheduleCheck(stopChecking() ? requestDelay : 1);
}
function stopChecking() {

View file

@ -1,65 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { defaults, omit, trimLeft, trimRight } from 'lodash';
import { parse as parseUrl, format as formatUrl } from 'url';
import filterHeaders from './filter_headers';
import setHeaders from './set_headers';
export default function mapUri(cluster, proxyPrefix) {
function joinPaths(pathA, pathB) {
return trimRight(pathA, '/') + '/' + trimLeft(pathB, '/');
}
return function (request) {
const {
protocol: esUrlProtocol,
slashes: esUrlHasSlashes,
auth: esUrlAuth,
hostname: esUrlHostname,
port: esUrlPort,
pathname: esUrlBasePath,
query: esUrlQuery
} = parseUrl(cluster.getUrl(), true);
// copy most url components directly from elasticsearch.hosts
const mappedUrlComponents = {
protocol: esUrlProtocol,
slashes: esUrlHasSlashes,
auth: esUrlAuth,
hostname: esUrlHostname,
port: esUrlPort
};
// pathname
const reqSubPath = request.path.replace(proxyPrefix, '');
mappedUrlComponents.pathname = joinPaths(esUrlBasePath, reqSubPath);
// querystring
const mappedQuery = defaults(omit(request.query, '_'), esUrlQuery);
if (Object.keys(mappedQuery).length) {
mappedUrlComponents.query = mappedQuery;
}
const filteredHeaders = filterHeaders(request.headers, cluster.getRequestHeadersWhitelist());
const mappedHeaders = setHeaders(filteredHeaders, cluster.getCustomHeaders());
const mappedUrl = formatUrl(mappedUrlComponents);
return { uri: mappedUrl, headers: mappedHeaders };
};
}

View file

@ -1,99 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import util from 'util';
import url from 'url';
import { get, noop, size, pick } from 'lodash';
import { readFileSync } from 'fs';
import Bluebird from 'bluebird';
const readFile = (file) => readFileSync(file, 'utf8');
export function parseConfig(serverConfig = {}, { ignoreCertAndKey = false } = {}) {
const config = {
keepAlive: true,
...pick(serverConfig, [
'plugins', 'apiVersion', 'keepAlive', 'pingTimeout',
'requestTimeout', 'log', 'logQueries', 'sniffOnStart',
'sniffInterval', 'sniffOnConnectionFault', 'hosts'
])
};
const mapHost = nodeUrl => {
const uri = url.parse(nodeUrl);
const httpsURI = uri.protocol === 'https:';
const httpURI = uri.protocol === 'http:';
const protocolPort = httpsURI && '443' || httpURI && '80';
return {
host: uri.hostname,
port: uri.port || protocolPort,
protocol: uri.protocol,
path: uri.pathname,
query: uri.query,
headers: serverConfig.customHeaders
};
};
if (serverConfig.hosts) {
config.hosts = serverConfig.hosts.map(mapHost);
}
// Auth
if (serverConfig.auth !== false && serverConfig.username && serverConfig.password) {
config.hosts.forEach(host => {
host.auth = util.format('%s:%s', serverConfig.username, serverConfig.password);
});
}
// SSL
config.ssl = {};
const verificationMode = get(serverConfig, 'ssl.verificationMode');
switch (verificationMode) {
case 'none':
config.ssl.rejectUnauthorized = false;
break;
case 'certificate':
config.ssl.rejectUnauthorized = true;
// by default, NodeJS is checking the server identify
config.ssl.checkServerIdentity = noop;
break;
case 'full':
config.ssl.rejectUnauthorized = true;
break;
default:
throw new Error(`Unknown ssl verificationMode: ${verificationMode}`);
}
if (size(get(serverConfig, 'ssl.certificateAuthorities'))) {
config.ssl.ca = serverConfig.ssl.certificateAuthorities.map(readFile);
}
// Add client certificate and key if required by elasticsearch
if (!ignoreCertAndKey && get(serverConfig, 'ssl.certificate') && get(serverConfig, 'ssl.key')) {
config.ssl.cert = readFile(serverConfig.ssl.certificate);
config.ssl.key = readFile(serverConfig.ssl.key);
config.ssl.passphrase = serverConfig.ssl.keyPassphrase;
}
config.defer = () => Bluebird.defer();
return config;
}

View file

@ -33,8 +33,6 @@ export default async function (server /*options*/) {
return {
kbnIndex: config.get('kibana.index'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
esApiVersion: config.get('elasticsearch.apiVersion'),
serverFunctions: registries.serverFunctions.toArray(),
basePath,
reportingBrowserType,

View file

@ -17,23 +17,23 @@
* under the License.
*/
import sinon from 'sinon';
import moment from 'moment';
import { of } from 'rxjs';
import { expect } from 'chai';
import { getEsShardTimeout } from '../../helpers/get_es_shard_timeout';
describe('getEsShardTimeout', () => {
it('should return the elasticsearch.shardTimeout', () => {
const getConfig = sinon.spy(() => '30000');
it('should return the elasticsearch.shardTimeout', async () => {
const req = {
server: {
config: () => ({
get: getConfig
})
core: {
elasticsearch: { legacy: { config$: of({ shardTimeout: moment.duration(12345) }) } }
}
}
};
const timeout = getEsShardTimeout(req);
expect(timeout).to.equal('30000');
expect(getConfig.called).to.equal(true);
const timeout = await getEsShardTimeout(req);
expect(timeout).to.equal(12345);
});
});

View file

@ -22,7 +22,7 @@ import { getIndexPatternObject } from '../helpers/get_index_pattern';
export async function getAnnotationRequestParams(req, panel, annotation, esQueryConfig, capabilities) {
const bodies = [];
const esShardTimeout = getEsShardTimeout(req);
const esShardTimeout = await getEsShardTimeout(req);
const indexPattern = annotation.index_pattern;
const { indexPatternObject, indexPatternString } = await getIndexPatternObject(req, indexPattern);
const request = buildAnnotationRequest(req, panel, annotation, esQueryConfig, indexPatternObject, capabilities);

View file

@ -16,7 +16,11 @@
* specific language governing permissions and limitations
* under the License.
*/
import { first, map } from 'rxjs/operators';
export function getEsShardTimeout(req) {
return req.server.config().get('elasticsearch.shardTimeout');
export async function getEsShardTimeout(req) {
return await req.server.core.elasticsearch.legacy.config$.pipe(
first(),
map(config => config.shardTimeout.asMilliseconds())
).toPromise();
}

View file

@ -25,7 +25,7 @@ export async function getSeriesRequestParams(req, panel, series, esQueryConfig,
const indexPattern = series.override_index_pattern && series.series_index_pattern || panel.index_pattern;
const { indexPatternObject, indexPatternString } = await getIndexPatternObject(req, indexPattern);
const request = buildRequestBody(req, panel, series, esQueryConfig, indexPatternObject, capabilities);
const esShardTimeout = getEsShardTimeout(req);
const esShardTimeout = await getEsShardTimeout(req);
if (capabilities.batchRequestsSupport) {
bodies.push({

View file

@ -56,8 +56,6 @@ export default function (server) {
const config = server.config();
return {
kbnIndex: config.get('kibana.index'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
esApiVersion: config.get('elasticsearch.apiVersion')
};
});
}

View file

@ -198,27 +198,16 @@ describe(filename, () => {
});
describe('timeouts', () => {
let sandbox;
beforeEach(() => {
sandbox = sinon.createSandbox();
});
afterEach(() => {
sandbox.restore();
});
it('sets the timeout on the request', () => {
config.index = 'beer';
const request = fn(config, tlConfig, emptyScriptedFields);
const request = fn(config, tlConfig, emptyScriptedFields, 30000);
expect(request.timeout).to.equal('30000ms');
});
it('sets no timeout if elasticsearch.shardTimeout is set to 0', () => {
sandbox.stub(tlConfig.server.config(), 'get').withArgs('elasticsearch.shardTimeout').returns(0);
config.index = 'beer';
const request = fn(config, tlConfig, emptyScriptedFields);
const request = fn(config, tlConfig, emptyScriptedFields, 0);
expect(request).to.not.have.property('timeout');
});

View file

@ -18,23 +18,12 @@
*/
import moment from 'moment';
import { of } from 'rxjs';
import sinon from 'sinon';
import timelionDefaults from '../../../lib/get_namespaced_settings';
import esResponse from './es_response';
export default function () {
const config = {
get(key) {
switch (key) {
case 'elasticsearch.shardTimeout':
return 30000;
default:
throw new Error(`unexpected config ${key}`);
}
}
};
const functions = require('../../../lib/load_functions')('series_functions');
const server = {
plugins: {
@ -52,7 +41,11 @@ export default function () {
})
}
},
config: () => config,
core: {
elasticsearch: {
legacy: { config$: of({ shardTimeout: moment.duration(30000) }) }
}
},
};
const tlConfig = require('../../../handlers/lib/tl_config.js')({

View file

@ -17,6 +17,7 @@
* under the License.
*/
import { first, map } from 'rxjs/operators';
import { i18n } from '@kbn/i18n';
import _ from 'lodash';
import Datasource from '../../lib/classes/datasource';
@ -126,7 +127,12 @@ export default new Datasource('es', {
});
}
const body = buildRequest(config, tlConfig, scriptedFields);
const esShardTimeout = await tlConfig.server.core.elasticsearch.legacy.config$.pipe(
first(),
map(config => config.shardTimeout.asMilliseconds())
).toPromise();
const body = buildRequest(config, tlConfig, scriptedFields, esShardTimeout);
const { callWithRequest } = tlConfig.server.plugins.elasticsearch.getCluster('data');
const resp = await callWithRequest(tlConfig.request, 'search', body);

View file

@ -21,7 +21,7 @@ import _ from 'lodash';
import { buildAggBody } from './agg_body';
import createDateAgg from './create_date_agg';
export default function buildRequest(config, tlConfig, scriptedFields) {
export default function buildRequest(config, tlConfig, scriptedFields, timeout) {
const bool = { must: [] };
@ -78,7 +78,6 @@ export default function buildRequest(config, tlConfig, scriptedFields) {
}
};
const timeout = tlConfig.server.config().get('elasticsearch.shardTimeout');
if (timeout) {
request.timeout = `${timeout}ms`;
}

View file

@ -17,4 +17,14 @@
* under the License.
*/
export const DEFAULT_API_VERSION = 'master';
import { Server } from 'hapi';
import KbnServer from '../kbn_server';
/**
* Exposes `kbnServer.core` through Hapi API.
* @param kbnServer KbnServer singleton instance.
* @param server Hapi server instance to expose `core` on.
*/
export function coreMixin(kbnServer: KbnServer, server: Server) {
server.decorate('server', 'core', kbnServer.core);
}

View file

@ -55,6 +55,7 @@ declare module 'hapi' {
type KbnMixinFunc = (kbnServer: KbnServer, server: Server, config: any) => Promise<any> | void;
export default class KbnServer {
public readonly core: any;
public server: Server;
public inject: Server['inject'];

View file

@ -24,6 +24,7 @@ import { Config } from './config';
import loggingConfiguration from './logging/configuration';
import configSetupMixin from './config/setup';
import httpMixin from './http';
import { coreMixin } from './core';
import { loggingMixin } from './logging';
import warningsMixin from './warnings';
import { usageMixin } from './usage';
@ -64,6 +65,8 @@ export default class KbnServer {
// sets this.server
httpMixin,
coreMixin,
// adds methods for extending this.server
serverExtensionsMixin,
loggingMixin,

View file

@ -3,5 +3,6 @@
"version": "0.0.1",
"kibanaVersion": "kibana",
"configPath": ["core", "testbed"],
"ui": true
"server": true,
"ui": false
}

View file

@ -17,8 +17,9 @@
* under the License.
*/
import { map } from 'rxjs/operators';
import { Logger, PluginInitializerContext, PluginName, PluginStartContext } from '../../../../';
import { map, mergeMap } from 'rxjs/operators';
import { Logger, PluginInitializerContext, PluginName, PluginStartContext } from 'kibana';
import { TestBedConfig } from './config';
class Plugin {
@ -42,6 +43,9 @@ class Plugin {
return `Some exposed data derived from config: ${config.secret}`;
})
),
pingElasticsearch$: startContext.elasticsearch.adminClient$.pipe(
mergeMap(client => client.callAsInternalUser('ping'))
),
};
}

View file

@ -60,5 +60,6 @@
* option TypeScript will not emit declarations for this code.
*/
export { CallAPIOptions, ClusterClient } from './core/server/elasticsearch';
export { Logger, LoggerFactory } from './core/server/logging';
export { PluginInitializerContext, PluginName, PluginStartContext } from './core/server/plugins';

View file

@ -21,7 +21,7 @@ import { format as formatUrl } from 'url';
import elasticsearch from 'elasticsearch';
import { DEFAULT_API_VERSION } from '../../../src/legacy/core_plugins/elasticsearch/lib/default_api_version';
import { DEFAULT_API_VERSION } from '../../../src/core/server/elasticsearch/elasticsearch_config';
export function EsProvider({ getService }) {
const config = getService('config');

View file

@ -30,8 +30,6 @@ export default async function(server /*options*/) {
return {
...kibanaVars,
kbnIndex: config.get('kibana.index'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
esApiVersion: config.get('elasticsearch.apiVersion'),
serverFunctions: serverFunctions.toArray(),
basePath,
reportingBrowserType,

View file

@ -8,10 +8,7 @@ import { once } from 'lodash';
import { elasticsearchJsPlugin } from '../../client/elasticsearch_ccr';
const callWithRequest = once(server => {
const config = {
plugins: [ elasticsearchJsPlugin ],
...server.config().get('elasticsearch')
};
const config = { plugins: [ elasticsearchJsPlugin ], };
const cluster = server.plugins.elasticsearch.createCluster('ccr', config);
return cluster.callWithRequest;
});

View file

@ -45,8 +45,6 @@ export function graph(kibana) {
server.injectUiAppVars('graph', () => {
const config = server.config();
return {
esApiVersion: config.get('elasticsearch.apiVersion'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
graphSavePolicy: config.get('xpack.graph.savePolicy'),
canEditDrillDownUrls: config.get('xpack.graph.canEditDrillDownUrls')
};

View file

@ -7,8 +7,7 @@
import { once } from 'lodash';
const callWithRequest = once((server) => {
const pipeline = server.config().get('elasticsearch');
const cluster = server.plugins.elasticsearch.createCluster('logstash', pipeline);
const cluster = server.plugins.elasticsearch.createCluster('logstash');
return cluster.callWithRequest;
});

View file

@ -10,10 +10,7 @@ import { once } from 'lodash';
import { elasticsearchJsPlugin } from './elasticsearch_ml';
const callWithRequest = once((server) => {
const config = {
plugins: [ elasticsearchJsPlugin ],
...server.config().get('elasticsearch')
};
const config = { plugins: [ elasticsearchJsPlugin ] };
const cluster = server.plugins.elasticsearch.createCluster('ml', config);
return cluster.callWithRequest;

View file

@ -9,7 +9,7 @@ import { i18n } from '@kbn/i18n';
/**
* Helper string to add as a tag in every logging call
*/
export const LOGGING_TAG = 'monitoring-ui';
export const LOGGING_TAG = 'monitoring';
/**
* Helper string to add as a tag in every logging call related to Kibana monitoring
*/

View file

@ -85,8 +85,6 @@ export const init = (monitoringPlugin, server) => {
maxBucketSize: config.get('xpack.monitoring.max_bucket_size'),
minIntervalSeconds: config.get('xpack.monitoring.min_interval_seconds'),
kbnIndex: config.get('kibana.index'),
esApiVersion: config.get('elasticsearch.apiVersion'),
esShardTimeout: config.get('elasticsearch.shardTimeout'),
showLicenseExpiration: config.get('xpack.monitoring.show_license_expiration'),
showCgroupMetricsElasticsearch: config.get('xpack.monitoring.ui.container.elasticsearch.enabled'),
showCgroupMetricsLogstash: config.get('xpack.monitoring.ui.container.logstash.enabled') // Note, not currently used, but see https://github.com/elastic/x-pack-kibana/issues/1559 part 2

View file

@ -24,15 +24,6 @@ function getMockServerFromConnectionUrl(monitoringClusterUrl) {
}
}
},
elasticsearch: {
hosts: ['http://localhost:9200'],
username: 'user-internal-test',
password: 'p@ssw0rd!-internal-test',
ssl: {},
customHeaders: {
'x-custom-headers-test': 'connection-production'
}
}
};
const config = () => {
@ -50,7 +41,6 @@ function getMockServerFromConnectionUrl(monitoringClusterUrl) {
config: sinon.stub().returns(server.elasticsearch)
}),
createCluster: sinon.stub(),
ElasticsearchClientLogging: noop
}
},
events: {
@ -69,7 +59,7 @@ describe('Instantiate Client', () => {
exposeClient(server);
expect(server.log.getCall(0).args).to.eql([
[ 'monitoring-ui', 'es-client' ],
[ 'monitoring', 'es-client' ],
'config sourced from: production cluster'
]);
});
@ -79,14 +69,14 @@ describe('Instantiate Client', () => {
exposeClient(server);
expect(server.log.getCall(0).args).to.eql([
[ 'monitoring-ui', 'es-client' ],
[ 'monitoring', 'es-client' ],
'config sourced from: monitoring cluster'
]);
});
});
describe('Custom Headers Configuration', () => {
it('Adds xpack.monitoring.elasticsearch.customHeaders if connected to production cluster', () => {
it('Does not add xpack.monitoring.elasticsearch.customHeaders if connected to production cluster', () => {
const server = getMockServerFromConnectionUrl(null); // pass null for URL to create the client using prod config
exposeClient(server);
@ -96,9 +86,7 @@ describe('Instantiate Client', () => {
sinon.assert.calledOnce(createCluster);
expect(createClusterCall.args[0]).to.be('monitoring');
expect(createClusterCall.args[1].customHeaders).to.eql(
{ 'x-custom-headers-test': 'connection-production' }
);
expect(createClusterCall.args[1].customHeaders).to.eql(undefined);
});
it('Adds xpack.monitoring.elasticsearch.customHeaders if connected to monitoring cluster', () => {
@ -128,7 +116,7 @@ describe('Instantiate Client', () => {
sinon.assert.calledOnce(createCluster);
expect(createClusterCall.args[0]).to.be('monitoring');
expect(createClientOptions.hosts[0]).to.eql('http://localhost:9200');
expect(createClientOptions.hosts).to.eql(undefined);
});
});

View file

@ -15,33 +15,24 @@ import { LOGGING_TAG } from '../../common/constants';
*/
export function exposeClient(server) {
const Logger = server.plugins.elasticsearch.ElasticsearchClientLogging;
const logQueries = Boolean(server.config().get('xpack.monitoring.elasticsearch.logQueries'));
const monitoringEsConfig = server.config().get('xpack.monitoring.elasticsearch');
class MonitoringClientLogging extends Logger {
constructor() {
super();
this.tags = [LOGGING_TAG];
this.logQueries = logQueries;
}
}
let config = {
...server.config().get('xpack.monitoring.elasticsearch')
};
let configSource = 'monitoring';
if (!Boolean(config.hosts && config.hosts.length)) {
config = server.config().get('elasticsearch');
let config;
let configSource;
if (!Boolean(monitoringEsConfig.hosts && monitoringEsConfig.hosts.length)) {
config = {};
configSource = 'production';
} else {
config = { ...monitoringEsConfig };
configSource = 'monitoring';
}
config.log = MonitoringClientLogging;
config.plugins = [monitoringBulk];
const cluster = server.plugins.elasticsearch.createCluster('monitoring', {
...config,
plugins: [monitoringBulk],
logQueries: Boolean(monitoringEsConfig.logQueries),
});
const esPlugin = server.plugins.elasticsearch;
const cluster = esPlugin.createCluster('monitoring', config);
server.events.on('stop', bindKey(cluster, 'close'));
server.log([LOGGING_TAG, 'es-client'], `config sourced from: ${configSource} cluster`);

View file

@ -32,20 +32,18 @@ describe('BulkUploader', () => {
describe('registers a collector set and runs lifecycle events', () => {
let server;
beforeEach(() => {
const cluster = {
callWithInternalUser: sinon.stub().withArgs('monitoring.bulk').callsFake(() => {
return new Promise(resolve => setTimeout(resolve, CHECK_DELAY + 1));
}),
};
server = {
log: sinon.spy(),
plugins: {
elasticsearch: {
getCluster: () => ({
createClient: () => ({
monitoring: {
bulk: function () {
return new Promise(resolve => setTimeout(resolve, CHECK_DELAY + 1));
}
},
}),
callWithInternalUser: sinon.spy(), // this tests internal collection and bulk upload, not HTTP API
}),
createCluster: () => cluster,
getCluster: () => cluster,
},
},
usage: {},
@ -74,15 +72,15 @@ describe('BulkUploader', () => {
const loggingCalls = server.log.getCalls();
expect(loggingCalls.length).to.be.greaterThan(2); // should be 3-5: start, fetch, skip, fetch, skip
expect(loggingCalls[0].args).to.eql([
['info', 'monitoring-ui', 'kibana-monitoring'],
['info', 'monitoring', 'kibana-monitoring'],
'Starting monitoring stats collection',
]);
expect(loggingCalls[1].args).to.eql([
['debug', 'monitoring-ui', 'kibana-monitoring'],
['debug', 'monitoring', 'kibana-monitoring'],
'Skipping bulk uploading of an empty stats payload',
]);
expect(loggingCalls[loggingCalls.length - 1].args).to.eql([
['info', 'monitoring-ui', 'kibana-monitoring'],
['info', 'monitoring', 'kibana-monitoring'],
'Monitoring stats collection is stopped',
]);
@ -112,11 +110,11 @@ describe('BulkUploader', () => {
// the last 2 logs as the call takes longer than this timeout (see the above mock)
expect(loggingCalls.length).to.be(4);
expect(loggingCalls[0].args).to.eql([
['info', 'monitoring-ui', 'kibana-monitoring'],
['info', 'monitoring', 'kibana-monitoring'],
'Starting monitoring stats collection',
]);
expect(loggingCalls[1].args).to.eql([
['debug', 'monitoring-ui', 'kibana-monitoring'],
['debug', 'monitoring', 'kibana-monitoring'],
'Uploading bulk stats payload to the local cluster',
]);

View file

@ -48,7 +48,7 @@ export class BulkUploader {
warn: message => server.log(['warning', ...LOGGING_TAGS], message)
};
this._client = server.plugins.elasticsearch.getCluster('admin').createClient({
this._cluster = server.plugins.elasticsearch.createCluster('admin', {
plugins: [monitoringBulk],
});
@ -121,7 +121,7 @@ export class BulkUploader {
}
_onPayload(payload) {
return sendBulkPayload(this._client, this._interval, payload);
return sendBulkPayload(this._cluster, this._interval, payload);
}
/*

View file

@ -10,8 +10,8 @@ import { KIBANA_SYSTEM_ID } from '../../../../xpack_main/common/constants';
/*
* Send the Kibana usage data to the ES Monitoring Bulk endpoint
*/
export function sendBulkPayload(client, interval, payload) {
return client.monitoring.bulk({
export function sendBulkPayload(cluster, interval, payload) {
return cluster.callWithInternalUser('monitoring.bulk', {
system_id: KIBANA_SYSTEM_ID,
system_api_version: MONITORING_SYSTEM_API_VERSION,
interval: interval + 'ms',

View file

@ -59,7 +59,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found all non-basic cluster licenses. All clusters will be supported.'
);
});
@ -95,12 +95,12 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Detected all clusters in monitoring data have basic license. Checking for supported admin cluster UUID for Kibana kibana-1234.'
);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found basic license admin cluster UUID for Monitoring UI support: supported_cluster_uuid.'
);
});
@ -137,7 +137,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found some basic license clusters in monitoring data. Only non-basic will be supported.'
);
});
@ -177,7 +177,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found basic license admin cluster UUID for Monitoring UI support: supported_cluster_uuid.'
);
});
@ -212,7 +212,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found some basic license clusters in monitoring data. Only non-basic will be supported.'
);
});
@ -248,7 +248,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found all non-basic cluster licenses. All clusters will be supported.'
);
});
@ -271,7 +271,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found single cluster in monitoring data.'
);
});
@ -288,7 +288,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found single cluster in monitoring data.'
);
});
@ -306,7 +306,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found single cluster in monitoring data.'
);
});
@ -324,7 +324,7 @@ describe('Flag Supported Clusters', () => {
]);
sinon.assert.calledWith(
logStub,
['debug', 'monitoring-ui', 'supported-clusters'],
['debug', 'monitoring', 'supported-clusters'],
'Found single cluster in monitoring data.'
);
});

View file

@ -16,7 +16,6 @@ function createQueueFn(server) {
const queueConfig = server.config().get('xpack.reporting.queue');
const index = server.config().get('xpack.reporting.index');
const createWorkers = createWorkersFactory(server);
const { getClient } = server.plugins.elasticsearch.getCluster('admin');
const logger = createTaggedLogger(server, ['reporting', 'esqueue']);
const queueOptions = {
@ -24,7 +23,7 @@ function createQueueFn(server) {
interval: queueConfig.indexInterval,
timeout: queueConfig.timeout,
dateSeparator: dateSeparator,
client: getClient(),
client: server.plugins.elasticsearch.getCluster('admin'),
logger,
};

View file

@ -3,105 +3,108 @@ import elasticsearch from 'elasticsearch';
import { constants } from '../../constants';
export function ClientMock() {
this.indices = {
create: () => Promise.resolve({ acknowledged: true }),
exists: () => Promise.resolve(false),
refresh: () => Promise.resolve(),
this.callWithInternalUser = (endpoint, params = {}, ...rest) => {
if (endpoint === 'indices.create') {
return Promise.resolve({ acknowledged: true });
}
if (endpoint === 'indices.exists') {
return Promise.resolve(false);
}
if (endpoint === 'index') {
const shardCount = 2;
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || uniqueId('testDoc'),
_seq_no: 1,
_primary_term: 1,
_shards: { total: shardCount, successful: shardCount, failed: 0 },
created: true
});
}
if (endpoint === 'get') {
if (params === elasticsearch.errors.NotFound) return elasticsearch.errors.NotFound;
const _source = {
jobtype: 'jobtype',
created_by: false,
payload: {
id: 'sample-job-1',
now: 'Mon Apr 25 2016 14:13:04 GMT-0700 (MST)'
},
priority: 10,
timeout: 10000,
created_at: '2016-04-25T21:13:04.738Z',
attempts: 0,
max_attempts: 3,
status: 'pending',
...(rest[0] || {})
};
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || 'AVRPRLnlp7Ur1SZXfT-T',
_seq_no: params._seq_no || 1,
_primary_term: params._primary_term || 1,
found: true,
_source: _source
});
}
if (endpoint === 'search') {
const [count = 5, source = {}] = rest;
const hits = times(count, () => {
return {
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: uniqueId('documentId'),
_seq_no: random(1, 5),
_primar_term: random(1, 5),
_score: null,
_source: {
created_at: new Date().toString(),
number: random(0, count, true),
...source
}
};
});
return Promise.resolve({
took: random(0, 10),
timed_out: false,
_shards: {
total: 5,
successful: 5,
failed: 0
},
hits: {
total: count,
max_score: null,
hits: hits
}
});
}
if (endpoint === 'update') {
const shardCount = 2;
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || uniqueId('testDoc'),
_seq_no: params.if_seq_no + 1 || 2,
_primary_term: params.if_primary_term + 1 || 2,
_shards: { total: shardCount, successful: shardCount, failed: 0 },
created: true
});
}
return Promise.resolve();
};
this.transport = {};
}
ClientMock.prototype.index = function (params = {}) {
const shardCount = 2;
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || uniqueId('testDoc'),
_seq_no: 1,
_primary_term: 1,
_shards: { total: shardCount, successful: shardCount, failed: 0 },
created: true
});
};
ClientMock.prototype.ping = function () {
return Promise.resolve();
};
ClientMock.prototype.get = function (params = {}, source = {}) {
if (params === elasticsearch.errors.NotFound) return elasticsearch.errors.NotFound;
const _source = {
jobtype: 'jobtype',
created_by: false,
payload: {
id: 'sample-job-1',
now: 'Mon Apr 25 2016 14:13:04 GMT-0700 (MST)'
},
priority: 10,
timeout: 10000,
created_at: '2016-04-25T21:13:04.738Z',
attempts: 0,
max_attempts: 3,
status: 'pending',
...source
};
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || 'AVRPRLnlp7Ur1SZXfT-T',
_seq_no: params._seq_no || 1,
_primary_term: params._primary_term || 1,
found: true,
_source: _source
});
};
ClientMock.prototype.search = function (params = {}, count = 5, source = {}) {
const hits = times(count, () => {
return {
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: uniqueId('documentId'),
_seq_no: random(1, 5),
_primar_term: random(1, 5),
_score: null,
_source: {
created_at: new Date().toString(),
number: random(0, count, true),
...source
}
};
});
return Promise.resolve({
took: random(0, 10),
timed_out: false,
_shards: {
total: 5,
successful: 5,
failed: 0
},
hits: {
total: count,
max_score: null,
hits: hits
}
});
};
ClientMock.prototype.update = function (params = {}) {
const shardCount = 2;
return Promise.resolve({
_index: params.index || 'index',
_type: params.type || constants.DEFAULT_SETTING_DOCTYPE,
_id: params.id || uniqueId('testDoc'),
_seq_no: params.if_seq_no + 1 || 2,
_primary_term: params.if_primary_term + 1 || 2,
_shards: { total: shardCount, successful: shardCount, failed: 0 },
created: true
});
};

View file

@ -1,29 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from 'expect.js';
import proxyquire from 'proxyquire';
import { ClientMock } from '../fixtures/elasticsearch';
const { createClient, isClient } = proxyquire.noPreserveCache()('../../helpers/create_client', {
'elasticsearch': { Client: ClientMock }
});
describe('Create client helper', function () {
it('should have a client', function () {
const options = {
host: 'http://localhost:9200'
};
const client = createClient(options);
expect(isClient(client)).to.be.ok();
});
it('should use passed in instance', function () {
const clientInstance = new ClientMock();
const client = createClient(clientInstance);
expect(client).to.equal(clientInstance);
});
});

View file

@ -18,7 +18,7 @@ describe('Create Index', function () {
beforeEach(function () {
client = new ClientMock();
createSpy = sinon.spy(client.indices, 'create');
createSpy = sinon.spy(client, 'callWithInternalUser').withArgs('indices.create');
});
it('should return true', function () {
@ -37,7 +37,7 @@ describe('Create Index', function () {
return result
.then(function () {
const payload = createSpy.getCall(0).args[0];
const payload = createSpy.getCall(0).args[1];
sinon.assert.callCount(createSpy, 1);
expect(payload).to.have.property('index', indexName);
expect(payload).to.have.property('body');
@ -57,7 +57,7 @@ describe('Create Index', function () {
return result
.then(function () {
const payload = createSpy.getCall(0).args[0];
const payload = createSpy.getCall(0).args[1];
sinon.assert.callCount(createSpy, 1);
expect(payload).to.have.property('index', indexName);
expect(payload).to.have.property('body');
@ -83,7 +83,7 @@ describe('Create Index', function () {
return result
.then(function () {
const payload = createSpy.getCall(0).args[0];
const payload = createSpy.getCall(0).args[1];
sinon.assert.callCount(createSpy, 1);
expect(payload).to.have.property('index', indexName);
expect(payload).to.have.property('body');
@ -102,8 +102,10 @@ describe('Create Index', function () {
beforeEach(function () {
client = new ClientMock();
sinon.stub(client.indices, 'exists').callsFake(() => Promise.resolve(true));
createSpy = sinon.spy(client.indices, 'create');
sinon.stub(client, 'callWithInternalUser')
.withArgs('indices.exists')
.callsFake(() => Promise.resolve(true));
createSpy = client.callWithInternalUser.withArgs('indices.create');
});
it('should return true', function () {

View file

@ -36,27 +36,11 @@ describe('Esqueue class', function () {
const init = () => new Esqueue();
expect(init).to.throwException(/must.+specify.+index/i);
});
it('should throw with an invalid host', function () {
const init = () => new Esqueue('esqueue', {
client: { host: 'nope://nope' }
});
expect(init).to.throwException(/invalid.+protocol/i);
});
it('should throw with invalid hosts', function () {
const init = () => new Esqueue('esqueue', {
client: { hosts: [{ host: 'localhost', protocol: 'nope' }] }
});
expect(init).to.throwException(/invalid.+protocol/i);
});
});
describe('Queue construction', function () {
it('should ping the ES server', function () {
const pingSpy = sinon.spy(client, 'ping');
const pingSpy = sinon.spy(client, 'callWithInternalUser').withArgs('ping');
new Esqueue('esqueue', { client });
sinon.assert.calledOnce(pingSpy);
});

View file

@ -25,7 +25,7 @@ const defaultCreatedBy = false;
function validateDoc(spy) {
sinon.assert.callCount(spy, 1);
const spyCall = spy.getCall(0);
return spyCall.args[0];
return spyCall.args[1];
}
describe('Job Class', function () {
@ -70,10 +70,11 @@ describe('Job Class', function () {
});
describe('construction', function () {
let indexSpy;
beforeEach(function () {
type = 'type1';
payload = { id: '123' };
sinon.spy(client, 'index');
indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index');
});
it('should create the target index', function () {
@ -90,7 +91,7 @@ describe('Job Class', function () {
it('should index the payload', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs).to.have.property('index', index);
expect(indexArgs).to.have.property('type', constants.DEFAULT_SETTING_DOCTYPE);
expect(indexArgs).to.have.property('body');
@ -101,7 +102,7 @@ describe('Job Class', function () {
it('should index the job type', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs).to.have.property('index', index);
expect(indexArgs).to.have.property('type', constants.DEFAULT_SETTING_DOCTYPE);
expect(indexArgs).to.have.property('body');
@ -112,19 +113,19 @@ describe('Job Class', function () {
it('should set event creation time', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('created_at');
});
});
it('should refresh the index', function () {
const refreshSpy = sinon.spy(client.indices, 'refresh');
const refreshSpy = client.callWithInternalUser.withArgs('indices.refresh');
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
sinon.assert.calledOnce(refreshSpy);
const spyCall = refreshSpy.getCall(0);
expect(spyCall.args[0]).to.have.property('index', index);
expect(spyCall.args[1]).to.have.property('index', index);
});
});
@ -163,8 +164,10 @@ describe('Job Class', function () {
it('should emit error on client index failure', function (done) {
const errMsg = 'test document index failure';
client.index.restore();
sinon.stub(client, 'index').callsFake(() => Promise.reject(new Error(errMsg)));
client.callWithInternalUser.restore();
sinon.stub(client, 'callWithInternalUser')
.withArgs('index')
.callsFake(() => Promise.reject(new Error(errMsg)));
const job = new Job(mockQueue, index, type, payload);
job.once(constants.EVENT_JOB_CREATE_ERROR, (err) => {
@ -206,16 +209,17 @@ describe('Job Class', function () {
});
describe('default values', function () {
let indexSpy;
beforeEach(function () {
type = 'type1';
payload = { id: '123' };
sinon.spy(client, 'index');
indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index');
});
it('should set attempt count to 0', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('attempts', 0);
});
});
@ -223,7 +227,7 @@ describe('Job Class', function () {
it('should index default created_by value', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('created_by', defaultCreatedBy);
});
});
@ -232,7 +236,7 @@ describe('Job Class', function () {
const now = new Date().getTime();
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('process_expiration');
expect(indexArgs.body.process_expiration.getTime()).to.be.lessThan(now);
});
@ -241,7 +245,7 @@ describe('Job Class', function () {
it('should set status as pending', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('status', constants.JOB_STATUS_PENDING);
});
});
@ -249,7 +253,7 @@ describe('Job Class', function () {
it('should have a default priority of 10', function () {
const job = new Job(mockQueue, index, type, payload, options);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('priority', defaultPriority);
});
});
@ -257,13 +261,14 @@ describe('Job Class', function () {
it('should set a browser type', function () {
const job = new Job(mockQueue, index, type, payload);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('browser_type');
});
});
});
describe('option passing', function () {
let indexSpy;
beforeEach(function () {
type = 'type1';
payload = { id: '123' };
@ -274,7 +279,7 @@ describe('Job Class', function () {
authorization: 'Basic cXdlcnR5'
}
};
sinon.spy(client, 'index');
indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index');
});
it('should index the created_by value', function () {
@ -284,7 +289,7 @@ describe('Job Class', function () {
...options
});
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('created_by', createdBy);
});
});
@ -292,7 +297,7 @@ describe('Job Class', function () {
it('should index timeout value from options', function () {
const job = new Job(mockQueue, index, type, payload, options);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('timeout', options.timeout);
});
});
@ -300,7 +305,7 @@ describe('Job Class', function () {
it('should set max attempt count', function () {
const job = new Job(mockQueue, index, type, payload, options);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('max_attempts', options.max_attempts);
});
});
@ -308,7 +313,7 @@ describe('Job Class', function () {
it('should add headers to the request params', function () {
const job = new Job(mockQueue, index, type, payload, options);
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs).to.have.property('headers', options.headers);
});
});
@ -316,7 +321,7 @@ describe('Job Class', function () {
it(`should use upper priority of ${maxPriority}`, function () {
const job = new Job(mockQueue, index, type, payload, { priority: maxPriority * 2 });
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('priority', maxPriority);
});
});
@ -324,51 +329,12 @@ describe('Job Class', function () {
it(`should use lower priority of ${minPriority}`, function () {
const job = new Job(mockQueue, index, type, payload, { priority: minPriority * 2 });
return job.ready.then(() => {
const indexArgs = validateDoc(client.index);
const indexArgs = validateDoc(indexSpy);
expect(indexArgs.body).to.have.property('priority', minPriority);
});
});
});
describe('custom client', function () {
let newClient;
let job;
beforeEach(function () {
sinon.spy(client, 'index');
newClient = new ClientMock();
sinon.spy(newClient, 'index');
job = new Job(mockQueue, index, type, payload, {
client: newClient,
...options
});
});
it('should create the target index', function () {
return job.ready.then(() => {
sinon.assert.calledOnce(createIndexMock);
const args = createIndexMock.getCall(0).args;
expect(args[0]).to.equal(newClient);
expect(args[1]).to.equal(index);
expect(args[2]).to.equal(constants.DEFAULT_SETTING_DOCTYPE);
});
});
it('should index the payload', function () {
return job.ready.then(() => {
sinon.assert.callCount(client.index, 0);
sinon.assert.callCount(newClient.index, 1);
const newDoc = newClient.index.getCall(0).args[0];
expect(newDoc).to.have.property('index', index);
expect(newDoc).to.have.property('type', constants.DEFAULT_SETTING_DOCTYPE);
expect(newDoc).to.have.property('body');
expect(newDoc.body).to.have.property('payload', payload);
});
});
});
describe('get method', function () {
beforeEach(function () {
type = 'type2';
@ -401,9 +367,11 @@ describe('Job Class', function () {
};
const job = new Job(mockQueue, index, type, payload, optionals);
return Promise.resolve(client.get({}, optionals))
return Promise.resolve(client.callWithInternalUser('get', {}, optionals))
.then((doc) => {
sinon.stub(client, 'get').returns(Promise.resolve(doc));
sinon.stub(client, 'callWithInternalUser')
.withArgs('get')
.returns(Promise.resolve(doc));
})
.then(() => {
return job.get()

View file

@ -102,7 +102,6 @@ describe('Worker class', function () {
worker = new Worker(mockQueue, jobtype, workerFn, defaultWorkerOptions);
expect(worker).to.have.property('id');
expect(worker).to.have.property('queue', mockQueue);
expect(worker).to.have.property('client', client);
expect(worker).to.have.property('jobtype', jobtype);
expect(worker).to.have.property('workerFn', workerFn);
expect(worker).to.have.property('checkSize');
@ -118,14 +117,6 @@ describe('Worker class', function () {
expect(worker.id).to.not.equal(worker2.id);
});
it('should use custom client', function () {
const newClient = new ClientMock();
worker = new Worker(mockQueue, 'test', noop, { ...defaultWorkerOptions, client: newClient });
expect(worker).to.have.property('queue', mockQueue);
expect(worker).to.have.property('client', newClient);
expect(worker.client).to.not.equal(client);
});
});
describe('event emitting', function () {
@ -281,15 +272,14 @@ describe('Worker class', function () {
function getSearchParams(jobtype = 'test', params = {}) {
worker = new Worker(mockQueue, jobtype, noop, { ...defaultWorkerOptions, ...params });
worker._getPendingJobs();
return searchStub.firstCall.args[0];
return searchStub.firstCall.args[1];
}
describe('error handling', function () {
beforeEach(() => {
});
it('should pass search errors', function (done) {
searchStub = sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.reject());
searchStub = sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.callsFake(() => Promise.reject());
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
worker._getPendingJobs()
.then(() => done(new Error('should not resolve')))
@ -301,9 +291,9 @@ describe('Worker class', function () {
describe('missing index', function () {
it('should swallow error', function (done) {
searchStub = sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.reject({
status: 404
}));
searchStub = sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.callsFake(() => Promise.reject({ status: 404 }));
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
worker._getPendingJobs()
.then(() => { done(); })
@ -311,9 +301,9 @@ describe('Worker class', function () {
});
it('should return an empty array', function (done) {
searchStub = sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.reject({
status: 404
}));
searchStub = sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.callsFake(() => Promise.reject({ status: 404 }));
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
worker._getPendingJobs()
.then((res) => {
@ -333,7 +323,8 @@ describe('Worker class', function () {
describe('query parameters', function () {
beforeEach(() => {
searchStub = sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.resolve({ hits: { hits: [] } }));
searchStub = sinon.stub(mockQueue.client, 'callWithInternalUser')
.callsFake(() => Promise.resolve({ hits: { hits: [] } }));
});
it('should query by default doctype', function () {
@ -353,7 +344,9 @@ describe('Worker class', function () {
const jobtype = 'test_jobtype';
beforeEach(() => {
searchStub = sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.resolve({ hits: { hits: [] } }));
searchStub = sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.callsFake(() => Promise.resolve({ hits: { hits: [] } }));
anchorMoment = moment(anchor);
clock = sinon.useFakeTimers(anchorMoment.valueOf());
});
@ -433,11 +426,11 @@ describe('Worker class', function () {
type: 'test',
id: 12345,
};
return mockQueue.client.get(params)
return mockQueue.client.callWithInternalUser('get', params)
.then((jobDoc) => {
job = jobDoc;
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
updateSpy = sinon.spy(mockQueue.client, 'update');
updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update');
});
});
@ -447,7 +440,7 @@ describe('Worker class', function () {
it('should use seqNo and primaryTerm on update', function () {
worker._claimJob(job);
const query = updateSpy.firstCall.args[0];
const query = updateSpy.firstCall.args[1];
expect(query).to.have.property('index', job._index);
expect(query).to.have.property('type', job._type);
expect(query).to.have.property('id', job._id);
@ -457,19 +450,19 @@ describe('Worker class', function () {
it('should increment the job attempts', function () {
worker._claimJob(job);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('attempts', job._source.attempts + 1);
});
it('should update the job status', function () {
worker._claimJob(job);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('status', constants.JOB_STATUS_PROCESSING);
});
it('should set job expiration time', function () {
worker._claimJob(job);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
const expiration = anchorMoment.add(defaults.timeout).toISOString();
expect(doc).to.have.property('process_expiration', expiration);
});
@ -501,8 +494,10 @@ describe('Worker class', function () {
});
it('should reject the promise on conflict errors', function () {
mockQueue.client.update.restore();
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 409 }));
mockQueue.client.callWithInternalUser.restore();
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 409 }));
return worker._claimJob(job)
.catch(err => {
expect(err).to.eql({ statusCode: 409 });
@ -510,8 +505,10 @@ describe('Worker class', function () {
});
it('should reject the promise on other errors', function () {
mockQueue.client.update.restore();
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 401 }));
mockQueue.client.callWithInternalUser.restore();
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 401 }));
return worker._claimJob(job)
.catch(err => {
expect(err).to.eql({ statusCode: 401 });
@ -545,11 +542,13 @@ describe('Worker class', function () {
});
afterEach(() => {
mockQueue.client.update.restore();
mockQueue.client.callWithInternalUser.restore();
});
it('should emit for errors from claiming job', function (done) {
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 401 }));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 401 }));
worker.once(constants.EVENT_WORKER_JOB_CLAIM_ERROR, function (err) {
try {
@ -567,7 +566,9 @@ describe('Worker class', function () {
});
it('should reject the promise if an error claiming the job', function () {
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 409 }));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 409 }));
return worker._claimPendingJobs(getMockJobs())
.catch(err => {
expect(err).to.eql({ statusCode: 409 });
@ -575,7 +576,9 @@ describe('Worker class', function () {
});
it('should get the pending job', function () {
sinon.stub(mockQueue.client, 'update').returns(Promise.resolve({ test: 'cool' }));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.resolve({ test: 'cool' }));
sinon.stub(worker, '_performJob').callsFake(identity);
return worker._claimPendingJobs(getMockJobs())
.then(claimedJob => {
@ -597,11 +600,11 @@ describe('Worker class', function () {
anchorMoment = moment(anchor);
clock = sinon.useFakeTimers(anchorMoment.valueOf());
return mockQueue.client.get()
return mockQueue.client.callWithInternalUser('get')
.then((jobDoc) => {
job = jobDoc;
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
updateSpy = sinon.spy(mockQueue.client, 'update');
updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update');
});
});
@ -611,7 +614,7 @@ describe('Worker class', function () {
it('should use _seq_no and _primary_term on update', function () {
worker._failJob(job);
const query = updateSpy.firstCall.args[0];
const query = updateSpy.firstCall.args[1];
expect(query).to.have.property('index', job._index);
expect(query).to.have.property('type', job._type);
expect(query).to.have.property('id', job._id);
@ -621,28 +624,32 @@ describe('Worker class', function () {
it('should set status to failed', function () {
worker._failJob(job);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('status', constants.JOB_STATUS_FAILED);
});
it('should append error message if supplied', function () {
const msg = 'test message';
worker._failJob(job, msg);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('output');
expect(doc.output).to.have.property('content', msg);
});
it('should return true on conflict errors', function () {
mockQueue.client.update.restore();
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 409 }));
mockQueue.client.callWithInternalUser.restore();
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 409 }));
return worker._failJob(job)
.then((res) => expect(res).to.equal(true));
});
it('should return false on other document update errors', function () {
mockQueue.client.update.restore();
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 401 }));
mockQueue.client.callWithInternalUser.restore();
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 401 }));
return worker._failJob(job)
.then((res) => expect(res).to.equal(false));
});
@ -653,7 +660,7 @@ describe('Worker class', function () {
clock.tick(100);
worker._failJob(job, msg);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('output');
expect(doc).to.have.property('status', constants.JOB_STATUS_FAILED);
expect(doc).to.have.property('completed_at');
@ -677,8 +684,10 @@ describe('Worker class', function () {
});
it('should emit on other document update errors', function (done) {
mockQueue.client.update.restore();
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 401 }));
mockQueue.client.callWithInternalUser.restore();
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 401 }));
worker.on(constants.EVENT_WORKER_FAIL_UPDATE_ERROR, function (err) {
try {
@ -705,10 +714,10 @@ describe('Worker class', function () {
value: random(0, 100, true)
};
return mockQueue.client.get({}, { payload })
return mockQueue.client.callWithInternalUser('get', {}, { payload })
.then((jobDoc) => {
job = jobDoc;
updateSpy = sinon.spy(mockQueue.client, 'update');
updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update');
});
});
@ -733,7 +742,7 @@ describe('Worker class', function () {
return worker._performJob(job)
.then(() => {
sinon.assert.calledOnce(updateSpy);
const query = updateSpy.firstCall.args[0];
const query = updateSpy.firstCall.args[1];
expect(query).to.have.property('index', job._index);
expect(query).to.have.property('type', job._type);
expect(query).to.have.property('id', job._id);
@ -758,7 +767,7 @@ describe('Worker class', function () {
return worker._performJob(job)
.then(() => {
sinon.assert.calledOnce(updateSpy);
const doc = updateSpy.firstCall.args[0].body.doc;
const doc = updateSpy.firstCall.args[1].body.doc;
expect(doc).to.have.property('status', constants.JOB_STATUS_COMPLETED);
expect(doc).to.have.property('completed_at');
const completedTimestamp = moment(doc.completed_at).valueOf();
@ -880,7 +889,9 @@ describe('Worker class', function () {
}
};
sinon.stub(mockQueue.client, 'update').returns(Promise.reject({ statusCode: 413 }));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('update')
.returns(Promise.reject({ statusCode: 413 }));
const workerFn = function (jobPayload) {
return new Promise(function (resolve) {
@ -899,7 +910,9 @@ describe('Worker class', function () {
describe('search failure', function () {
it('causes _processPendingJobs to reject the Promise', function () {
sinon.stub(mockQueue.client, 'search').returns(Promise.reject(new Error('test error')));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.returns(Promise.reject(new Error('test error')));
worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions);
return worker._processPendingJobs()
.then(() => {
@ -998,7 +1011,9 @@ describe('Worker class', function () {
};
beforeEach(function () {
sinon.stub(mockQueue.client, 'search').callsFake(() => Promise.resolve({ hits: { hits: [] } }));
sinon.stub(mockQueue.client, 'callWithInternalUser')
.withArgs('search')
.callsFake(() => Promise.resolve({ hits: { hits: [] } }));
});
describe('workerFn rejects promise', function () {

View file

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import elasticsearch from 'elasticsearch';
export function createClient(options) {
let client;
if (isClient(options)) {
client = options;
} else {
client = new elasticsearch.Client(options);
}
return client;
}
export function isClient(client) {
// if there's a transport property, assume it's a client instance
return !!client.transport;
}

Some files were not shown because too many files have changed in this diff Show more