Merge remote-tracking branch 'origin/master' into feature/merge-code

This commit is contained in:
Fuyao Zhao 2019-03-13 09:49:01 -07:00
commit 4963be3d37
127 changed files with 2547 additions and 1083 deletions

View file

@ -20,7 +20,7 @@ NOTE: You cannot access these endpoints via the Console in Kibana.
* <<saved-objects-api-delete>>
* <<saved-objects-api-export>>
* <<saved-objects-api-import>>
* <<saved-objects-api-resolve-import-conflicts>>
* <<saved-objects-api-resolve-import-errors>>
include::saved-objects/get.asciidoc[]
include::saved-objects/bulk_get.asciidoc[]
@ -31,4 +31,4 @@ include::saved-objects/update.asciidoc[]
include::saved-objects/delete.asciidoc[]
include::saved-objects/export.asciidoc[]
include::saved-objects/import.asciidoc[]
include::saved-objects/resolve_import_conflicts.asciidoc[]
include::saved-objects/resolve_import_errors.asciidoc[]

View file

@ -1,15 +1,15 @@
[[saved-objects-api-resolve-import-conflicts]]
=== Resolve Import Conflicts
[[saved-objects-api-resolve-import-errors]]
=== Resolve Import Errors
experimental[This functionality is *experimental* and may be changed or removed completely in a future release.]
The resolve import conflicts API enables you to resolve conflicts given by the import API by either overwriting specific saved objects or changing references to a newly created object.
The resolve import errors API enables you to resolve errors given by the import API by either overwriting specific saved objects or changing references to a newly created object.
Note: You cannot access this endpoint via the Console in Kibana.
==== Request
`POST /api/saved_objects/_resolve_import_conflicts`
`POST /api/saved_objects/_resolve_import_errors`
==== Request body
@ -35,12 +35,12 @@ In the scenario the import wasn't successful a top level `errors` array will con
==== Examples
The following example resolves conflicts for an index pattern and dashboard but indicates to skip the index pattern.
The following example resolves errors for an index pattern and dashboard but indicates to skip the index pattern.
This will cause the index pattern to not be in the system and the dashboard to overwrite the existing saved object.
[source,js]
--------------------------------------------------
POST api/saved_objects/_resolve_import_conflicts
POST api/saved_objects/_resolve_import_errors
Content-Type: multipart/form-data; boundary=EXAMPLE
--EXAMPLE
Content-Disposition: form-data; name="file"; filename="export.ndjson"
@ -71,12 +71,12 @@ containing a JSON structure similar to the following example:
}
--------------------------------------------------
The following example resolves conflicts for a visualization and dashboard but indicates
The following example resolves errors for a visualization and dashboard but indicates
to replace the dashboard references to another visualization.
[source,js]
--------------------------------------------------
POST api/saved_objects/_resolve_import_conflicts
POST api/saved_objects/_resolve_import_errors
Content-Type: multipart/form-data; boundary=EXAMPLE
--EXAMPLE
Content-Disposition: form-data; name="file"; filename="export.ndjson"

View file

@ -785,7 +785,8 @@ function discoverController(
const tabifiedData = tabifyAggResponse($scope.vis.aggs, merged);
$scope.searchSource.rawResponse = merged;
Promise
.resolve(responseHandler(tabifiedData, buildVislibDimensions($scope.vis, $scope.timeRange)))
.resolve(buildVislibDimensions($scope.vis, { timeRange: $scope.timeRange, searchSource: $scope.searchSource }))
.then(resp => responseHandler(tabifiedData, resp))
.then(resp => {
visualizeHandler.render({
as: 'visualization',

Binary file not shown.

After

Width:  |  Height:  |  Size: 726 KiB

View file

@ -236,17 +236,12 @@ export async function resolveSavedObjects(savedObjects, overwriteAll, services,
importedObjectCount++;
}
} catch (error) {
if (error instanceof SavedObjectNotFound) {
if (error.savedObjectType === 'search') {
failedImports.push({ obj, error });
}
if (error.savedObjectType === 'index-pattern') {
if (obj.savedSearchId) {
conflictedSavedObjectsLinkedToSavedSearches.push(obj);
} else {
conflictedIndexPatterns.push({ obj, doc: otherDoc });
}
}
const isIndexPatternNotFound = error instanceof SavedObjectNotFound &&
error.savedObjectType === 'index-pattern';
if (isIndexPatternNotFound && obj.savedSearchId) {
conflictedSavedObjectsLinkedToSavedSearches.push(obj);
} else if (isIndexPatternNotFound) {
conflictedIndexPatterns.push({ obj, doc: otherDoc });
} else {
failedImports.push({ obj, error });
}

View file

@ -45,18 +45,21 @@ export function natsLogsSpecProvider(server, context) {
}),
euiIconType: 'logoNats',
artifacts: {
application: {
label: i18n.translate('kbn.server.tutorials.natsLogs.artifacts.application.label', {
defaultMessage: 'Discover',
}),
path: '/app/kibana#/discover'
},
dashboards: [],
dashboards: [
{
id: 'Filebeat-nats-overview-ecs',
linkLabel: i18n.translate('kbn.server.tutorials.natsLogs.artifacts.dashboards.linkLabel', {
defaultMessage: 'Nats logs dashboard',
}),
isOverview: true
}
],
exportedFields: {
documentationUrl: '{config.docs.beats.filebeat}/exported-fields-nats.html'
}
},
completionTimeMinutes: 10,
previewImagePath: '/plugins/kibana/home/tutorial_resources/nats_logs/screenshot.png',
onPrem: onPremInstructions(moduleName, platforms, geoipRequired, uaRequired, context),
elasticCloud: cloudInstructions(moduleName, platforms),
onPremElasticCloud: onPremCloudInstructions(moduleName, platforms)

View file

@ -0,0 +1,21 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { getTypes, getProperty, getRootProperties, getRootPropertiesObjects } from './lib';
export { FieldMapping, MappingMeta, MappingProperties, IndexMapping } from './types';

View file

@ -17,6 +17,7 @@
* under the License.
*/
import { FieldMapping, IndexMapping } from '../types';
import { getProperty } from './get_property';
const MAPPINGS = {
@ -24,12 +25,12 @@ const MAPPINGS = {
foo: {
properties: {
name: {
type: 'text'
type: 'text',
},
description: {
type: 'text'
}
}
type: 'text',
},
},
},
bar: {
properties: {
@ -37,16 +38,16 @@ const MAPPINGS = {
type: 'text',
fields: {
box: {
type: 'keyword'
}
}
}
}
}
}
type: 'keyword',
},
},
},
},
},
},
};
function runTest(key, mapping) {
function runTest(key: string | string[], mapping: IndexMapping | FieldMapping) {
expect(typeof key === 'string' || Array.isArray(key)).toBeTruthy();
expect(typeof mapping).toBe('object');

View file

@ -18,40 +18,27 @@
*/
import toPath from 'lodash/internal/toPath';
import { CoreFieldMapping, FieldMapping, IndexMapping } from '../types';
/**
* Recursively read properties from the mapping object of type "object"
* until the `path` is resolved.
* @param {EsObjectMapping} mapping
* @param {Array<string>} path
* @return {Objects|undefined}
*/
function getPropertyMappingFromObjectMapping(mapping, path) {
const props = mapping && (mapping.properties || mapping.fields);
function getPropertyMappingFromObjectMapping(
mapping: IndexMapping | FieldMapping,
path: string[]
): FieldMapping | undefined {
const props =
(mapping && (mapping as IndexMapping).properties) ||
(mapping && (mapping as CoreFieldMapping).fields);
if (!props) {
return undefined;
}
if (path.length > 1) {
return getPropertyMappingFromObjectMapping(
props[path[0]],
path.slice(1)
);
return getPropertyMappingFromObjectMapping(props[path[0]], path.slice(1));
} else {
return props[path[0]];
}
}
/**
* Get the mapping for a specific property within the root type of the EsMappingsDsl.
* @param {EsMappingsDsl} mappings
* @param {string|Array<string>} path
* @return {Object|undefined}
*/
export function getProperty(mappings, path) {
return getPropertyMappingFromObjectMapping(
mappings,
toPath(path)
);
export function getProperty(mappings: IndexMapping | FieldMapping, path: string | string[]) {
return getPropertyMappingFromObjectMapping(mappings, toPath(path));
}

View file

@ -17,6 +17,8 @@
* under the License.
*/
import { IndexMapping } from '../types';
/**
* Get the property mappings for the root type in the EsMappingsDsl
*
@ -31,14 +33,11 @@
*
* This data can be found at `{indexName}.mappings.{typeName}.properties`
* in the es indices.get() response.
*
* @param {EsMappingsDsl} mapping
* @return {EsPropertyMappings}
*/
export function getRootProperties(mapping) {
if (mapping.type !== 'object' && !mapping.properties) {
export function getRootProperties(mapping: IndexMapping) {
if (!mapping.properties) {
throw new TypeError('Unable to get property names non-object root mapping');
}
return mapping.properties || {};
return mapping.properties;
}

View file

@ -23,16 +23,16 @@ test(`returns single object with properties`, () => {
const mappings = {
properties: {
foo: {
properties: {}
}
}
properties: {},
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
properties: {}
}
properties: {},
},
});
});
@ -40,16 +40,16 @@ test(`returns single object with type === 'object'`, () => {
const mappings = {
properties: {
foo: {
type: 'object'
}
}
type: 'object',
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
type: 'object'
}
type: 'object',
},
});
});
@ -57,22 +57,22 @@ test(`returns two objects with properties`, () => {
const mappings = {
properties: {
foo: {
properties: {}
properties: {},
},
bar: {
properties: {}
}
}
properties: {},
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
properties: {}
properties: {},
},
bar: {
properties: {}
}
properties: {},
},
});
});
@ -80,22 +80,22 @@ test(`returns two objects with type === 'object'`, () => {
const mappings = {
properties: {
foo: {
type: 'object'
type: 'object',
},
bar: {
type: 'object'
}
}
type: 'object',
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
type: 'object'
type: 'object',
},
bar: {
type: 'object'
}
type: 'object',
},
});
});
@ -103,9 +103,9 @@ test(`excludes objects without properties and type of keyword`, () => {
const mappings = {
properties: {
foo: {
type: 'keyword'
}
}
type: 'keyword',
},
},
};
const result = getRootPropertiesObjects(mappings);
@ -116,12 +116,12 @@ test(`excludes two objects without properties and type of keyword`, () => {
const mappings = {
properties: {
foo: {
type: 'keyword'
type: 'keyword',
},
bar: {
type: 'keyword'
}
}
type: 'keyword',
},
},
};
const result = getRootPropertiesObjects(mappings);
@ -132,19 +132,19 @@ test(`includes one object with properties and excludes one object without proper
const mappings = {
properties: {
foo: {
properties: {}
properties: {},
},
bar: {
type: 'keyword'
}
}
type: 'keyword',
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
properties: {}
}
properties: {},
},
});
});
@ -152,19 +152,19 @@ test(`includes one object with type === 'object' and excludes one object without
const mappings = {
properties: {
foo: {
type: 'object'
type: 'object',
},
bar: {
type: 'keyword'
}
}
type: 'keyword',
},
},
};
const result = getRootPropertiesObjects(mappings);
expect(result).toEqual({
foo: {
type: 'object'
}
type: 'object',
},
});
});

View file

@ -17,6 +17,7 @@
* under the License.
*/
import { ComplexFieldMapping, IndexMapping, MappingProperties } from '../types';
import { getRootProperties } from './get_root_properties';
/**
@ -34,20 +35,21 @@ import { getRootProperties } from './get_root_properties';
* @return {EsPropertyMappings}
*/
const blacklist = [
'migrationVersion',
'references',
];
const blacklist = ['migrationVersion', 'references'];
export function getRootPropertiesObjects(mappings) {
export function getRootPropertiesObjects(mappings: IndexMapping) {
const rootProperties = getRootProperties(mappings);
return Object.entries(rootProperties).reduce((acc, [key, value]) => {
// we consider the existence of the properties or type of object to designate that this is an object datatype
if (!blacklist.includes(key) && (value.properties || value.type === 'object')) {
acc[key] = value;
}
return acc;
}, {});
return Object.entries(rootProperties).reduce(
(acc, [key, value]) => {
// we consider the existence of the properties or type of object to designate that this is an object datatype
if (
!blacklist.includes(key) &&
((value as ComplexFieldMapping).properties || value.type === 'object')
) {
acc[key] = value;
}
return acc;
},
{} as MappingProperties
);
}

View file

@ -17,12 +17,11 @@
* under the License.
*/
import { IndexMapping } from '../types';
/**
* Get the names of the types defined in the EsMappingsDsl
*
* @param {EsMappingsDsl} mappings
* @return {Array<string>}
*/
export function getTypes(mappings) {
export function getTypes(mappings: IndexMapping) {
return Object.keys(mappings).filter(type => type !== '_default_');
}

View file

@ -0,0 +1,58 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// FieldMapping isn't 1:1 with the options available,
// modify as needed.
export interface CoreFieldMapping {
type: string;
fields?: {
[subfield: string]: {
type: string;
};
};
}
// FieldMapping isn't 1:1 with the options available,
// modify as needed.
export interface ComplexFieldMapping {
dynamic?: string;
type?: string;
properties: MappingProperties;
}
export type FieldMapping = CoreFieldMapping | ComplexFieldMapping;
export interface MappingProperties {
[field: string]: FieldMapping;
}
export interface MappingMeta {
// A dictionary of key -> md5 hash (e.g. 'dashboard': '24234qdfa3aefa3wa')
// with each key being a root-level mapping property, and each value being
// the md5 hash of that mapping's value when the index was created.
migrationMappingPropertyHashes?: { [k: string]: string };
}
// IndexMapping isn't 1:1 with the options available,
// modify as needed.
export interface IndexMapping {
dynamic?: string;
properties: MappingProperties;
_meta?: MappingMeta;
}

View file

@ -18,4 +18,4 @@
*/
export { importSavedObjects } from './import_saved_objects';
export { resolveImportConflicts } from './resolve_import_conflicts';
export { resolveImportErrors } from './resolve_import_errors';

View file

@ -19,9 +19,9 @@
import { Readable } from 'stream';
import { SavedObject } from '../service';
import { resolveImportConflicts } from './resolve_import_conflicts';
import { resolveImportErrors } from './resolve_import_errors';
describe('resolveImportConflicts()', () => {
describe('resolveImportErrors()', () => {
const savedObjects: SavedObject[] = [
{
id: '1',
@ -85,7 +85,7 @@ describe('resolveImportConflicts()', () => {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
const result = await resolveImportConflicts({
const result = await resolveImportErrors({
readStream,
objectLimit: 4,
skips: [],
@ -112,7 +112,7 @@ Object {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
const result = await resolveImportConflicts({
const result = await resolveImportErrors({
readStream,
objectLimit: 4,
skips: [
@ -150,7 +150,7 @@ Object {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
const result = await resolveImportConflicts({
const result = await resolveImportErrors({
readStream,
objectLimit: 4,
skips: [],
@ -206,7 +206,7 @@ Object {
savedObjectsClient.bulkCreate.mockResolvedValue({
saved_objects: savedObjects,
});
const result = await resolveImportConflicts({
const result = await resolveImportErrors({
readStream,
objectLimit: 4,
skips: [],

View file

@ -23,7 +23,7 @@ import { collectSavedObjects } from './collect_saved_objects';
import { createObjectsFilter } from './create_objects_filter';
import { CustomError, extractErrors } from './extract_errors';
interface ResolveImportConflictsOptions {
interface ResolveImportErrorsOptions {
readStream: Readable;
objectLimit: number;
savedObjectsClient: SavedObjectsClient;
@ -48,14 +48,14 @@ interface ImportResponse {
errors?: CustomError[];
}
export async function resolveImportConflicts({
export async function resolveImportErrors({
readStream,
objectLimit,
skips,
overwrites,
savedObjectsClient,
replaceReferences,
}: ResolveImportConflictsOptions): Promise<ImportResponse> {
}: ResolveImportErrorsOptions): Promise<ImportResponse> {
let errors: CustomError[] = [];
const filter = createObjectsFilter(skips, overwrites, replaceReferences);
const objectsToResolve = await collectSavedObjects(readStream, objectLimit, filter);

View file

@ -17,8 +17,8 @@
* under the License.
*/
import { IndexMapping } from './../../../mappings';
import { buildActiveMappings, diffMappings } from './build_active_mappings';
import { IndexMapping } from './call_cluster';
describe('buildActiveMappings', () => {
test('combines all mappings and includes core mappings', () => {
@ -39,7 +39,7 @@ describe('buildActiveMappings', () => {
});
test('disallows mappings with leading underscore', () => {
const properties = { _hm: 'You shall not pass!' };
const properties = { _hm: { type: 'keyword' } };
expect(() => buildActiveMappings({ properties })).toThrow(
/Invalid mapping \"_hm\"\. Mappings cannot start with _/
@ -48,9 +48,9 @@ describe('buildActiveMappings', () => {
test('generated hashes are stable', () => {
const properties = {
aaa: { a: '...', b: '...', c: new Date('2019-01-02'), d: [{ hello: 'world' }] },
bbb: { c: new Date('2019-01-02'), d: [{ hello: 'world' }], a: '...', b: '...' },
ccc: { c: new Date('2020-01-02'), d: [{ hello: 'world' }], a: '...', b: '...' },
aaa: { type: 'keyword', fields: { a: { type: 'keyword' }, b: { type: 'text' } } },
bbb: { fields: { b: { type: 'text' }, a: { type: 'keyword' } }, type: 'keyword' },
ccc: { fields: { b: { type: 'text' }, a: { type: 'text' } }, type: 'keyword' },
};
const mappings = buildActiveMappings({ properties });
@ -108,7 +108,7 @@ describe('diffMappings', () => {
},
dynamic: 'strict',
properties: {
foo: 'bar',
foo: { type: 'keyword' },
},
};
const expected: IndexMapping = {
@ -117,7 +117,7 @@ describe('diffMappings', () => {
},
dynamic: 'strict',
properties: {
foo: 'baz',
foo: { type: 'text' },
},
};

View file

@ -23,7 +23,7 @@
import crypto from 'crypto';
import _ from 'lodash';
import { IndexMapping, MappingProperties } from './call_cluster';
import { IndexMapping, MappingProperties } from './../../../mappings';
/**
* Creates an index mapping with the core properties required by saved object

View file

@ -23,6 +23,8 @@
* funcationality contained here.
*/
import { IndexMapping } from '../../../mappings';
export interface CallCluster {
(path: 'bulk', opts: { body: object[] }): Promise<BulkResult>;
(path: 'count', opts: CountOpts): Promise<{ count: number; _shards: ShardsInfo }>;
@ -185,20 +187,3 @@ export interface IndexInfo {
export interface IndicesInfo {
[index: string]: IndexInfo;
}
export interface MappingProperties {
[type: string]: any;
}
export interface MappingMeta {
// A dictionary of key -> md5 hash (e.g. 'dashboard': '24234qdfa3aefa3wa')
// with each key being a root-level mapping property, and each value being
// the md5 hash of that mapping's value when the index was created.
migrationMappingPropertyHashes?: { [k: string]: string };
}
export interface IndexMapping {
dynamic: string;
properties: MappingProperties;
_meta?: MappingMeta;
}

View file

@ -220,7 +220,7 @@ describe('ElasticIndex', () => {
expect(arg.body).toEqual({
mappings: {
dynamic: 'strict',
properties: { foo: 'bar' },
properties: { foo: { type: 'keyword' } },
},
settings: { auto_expand_replicas: '0-1', number_of_shards: 1 },
});
@ -264,7 +264,7 @@ describe('ElasticIndex', () => {
indexName: '.ze-index',
mappings: {
dynamic: 'strict',
properties: { foo: 'bar' },
properties: { foo: { type: 'keyword' } },
},
};
await Index.convertToAlias(callCluster as any, info, '.muchacha', 10);
@ -286,7 +286,7 @@ describe('ElasticIndex', () => {
expect(arg.body).toEqual({
mappings: {
dynamic: 'strict',
properties: { foo: 'bar' },
properties: { foo: { type: 'keyword' } },
},
settings: { auto_expand_replicas: '0-1', number_of_shards: 1 },
});
@ -323,7 +323,7 @@ describe('ElasticIndex', () => {
indexName: '.ze-index',
mappings: {
dynamic: 'strict',
properties: { foo: 'bar' },
properties: { foo: { type: 'keyword' } },
},
};
await expect(Index.convertToAlias(callCluster as any, info, '.muchacha', 10)).rejects.toThrow(

View file

@ -23,15 +23,9 @@
*/
import _ from 'lodash';
import { IndexMapping } from '../../../mappings';
import { MigrationVersion } from '../../serialization';
import {
AliasAction,
CallCluster,
IndexMapping,
NotFound,
RawDoc,
ShardsInfo,
} from './call_cluster';
import { AliasAction, CallCluster, NotFound, RawDoc, ShardsInfo } from './call_cluster';
// @ts-ignore untyped dependency
import { getTypes } from '../../../mappings';

View file

@ -20,6 +20,6 @@
export { DocumentMigrator } from './document_migrator';
export { IndexMigrator } from './index_migrator';
export { buildActiveMappings } from './build_active_mappings';
export { CallCluster, MappingProperties } from './call_cluster';
export { CallCluster } from './call_cluster';
export { LogFn } from './migration_logger';
export { MigrationResult } from './migration_coordinator';

View file

@ -25,8 +25,9 @@
*/
import { SavedObjectsSerializer } from '../../serialization';
import { MappingProperties } from './../../../mappings';
import { buildActiveMappings } from './build_active_mappings';
import { CallCluster, MappingProperties } from './call_cluster';
import { CallCluster } from './call_cluster';
import { VersionedTransformer } from './document_migrator';
import { fetchInfo, FullIndexInfo } from './elastic_index';
import { LogFn, Logger, MigrationLogger } from './migration_logger';

View file

@ -23,10 +23,11 @@
*/
import { once } from 'lodash';
import { MappingProperties } from '../../../mappings';
import { SavedObjectsSchema, SavedObjectsSchemaDefinition } from '../../schema';
import { RawSavedObjectDoc, SavedObjectsSerializer } from '../../serialization';
import { docValidator } from '../../validation';
import { buildActiveMappings, CallCluster, IndexMigrator, LogFn, MappingProperties } from '../core';
import { buildActiveMappings, CallCluster, IndexMigrator, LogFn } from '../core';
import { DocumentMigrator, VersionedTransformer } from '../core/document_migrator';
export interface KbnServer {

View file

@ -24,6 +24,6 @@ export { createDeleteRoute } from './delete';
export { createFindRoute } from './find';
export { createGetRoute } from './get';
export { createImportRoute } from './import';
export { createResolveImportConflictsRoute } from './resolve_import_conflicts';
export { createResolveImportErrorsRoute } from './resolve_import_errors';
export { createUpdateRoute } from './update';
export { createExportRoute } from './export';

View file

@ -19,9 +19,9 @@
import Hapi from 'hapi';
import { createMockServer } from './_mock_server';
import { createResolveImportConflictsRoute } from './resolve_import_conflicts';
import { createResolveImportErrorsRoute } from './resolve_import_errors';
describe('POST /api/saved_objects/_resolve_import_conflicts', () => {
describe('POST /api/saved_objects/_resolve_import_errors', () => {
let server: Hapi.Server;
const savedObjectsClient = {
errors: {} as any,
@ -53,13 +53,13 @@ describe('POST /api/saved_objects/_resolve_import_conflicts', () => {
},
};
server.route(createResolveImportConflictsRoute(prereqs, server));
server.route(createResolveImportErrorsRoute(prereqs, server));
});
test('formats successful response', async () => {
const request = {
method: 'POST',
url: '/api/saved_objects/_resolve_import_conflicts',
url: '/api/saved_objects/_resolve_import_errors',
payload: [
'--BOUNDARY',
'Content-Disposition: form-data; name="file"; filename="export.ndjson"',
@ -83,7 +83,7 @@ describe('POST /api/saved_objects/_resolve_import_conflicts', () => {
// NOTE: changes to this scenario should be reflected in the docs
const request = {
method: 'POST',
url: '/api/saved_objects/_resolve_import_conflicts',
url: '/api/saved_objects/_resolve_import_errors',
payload: [
'--EXAMPLE',
'Content-Disposition: form-data; name="file"; filename="export.ndjson"',
@ -152,7 +152,7 @@ describe('POST /api/saved_objects/_resolve_import_conflicts', () => {
// NOTE: changes to this scenario should be reflected in the docs
const request = {
method: 'POST',
url: '/api/saved_objects/_resolve_import_conflicts',
url: '/api/saved_objects/_resolve_import_errors',
payload: [
'--EXAMPLE',
'Content-Disposition: form-data; name="file"; filename="export.ndjson"',

View file

@ -23,7 +23,7 @@ import Joi from 'joi';
import { extname } from 'path';
import { Readable } from 'stream';
import { SavedObjectsClient } from '../';
import { resolveImportConflicts } from '../import';
import { resolveImportErrors } from '../import';
import { Prerequisites } from './types';
interface HapiReadableStream extends Readable {
@ -54,8 +54,8 @@ interface ImportRequest extends Hapi.Request {
};
}
export const createResolveImportConflictsRoute = (prereqs: Prerequisites, server: Hapi.Server) => ({
path: '/api/saved_objects/_resolve_import_conflicts',
export const createResolveImportErrorsRoute = (prereqs: Prerequisites, server: Hapi.Server) => ({
path: '/api/saved_objects/_resolve_import_errors',
method: 'POST',
config: {
pre: [prereqs.getSavedObjectsClient],
@ -102,7 +102,7 @@ export const createResolveImportConflictsRoute = (prereqs: Prerequisites, server
if (fileExtension !== '.ndjson') {
return Boom.badRequest(`Invalid file extension ${fileExtension}`);
}
return await resolveImportConflicts({
return await resolveImportErrors({
savedObjectsClient,
readStream: request.payload.file,
objectLimit: request.server.config().get('savedObjects.maxImportExportSize'),

View file

@ -33,7 +33,7 @@ import {
createUpdateRoute,
createExportRoute,
createImportRoute,
createResolveImportConflictsRoute,
createResolveImportErrorsRoute,
} from './routes';
export function savedObjectsMixin(kbnServer, server) {
@ -66,7 +66,7 @@ export function savedObjectsMixin(kbnServer, server) {
server.route(createUpdateRoute(prereqs));
server.route(createExportRoute(prereqs, server));
server.route(createImportRoute(prereqs, server));
server.route(createResolveImportConflictsRoute(prereqs, server));
server.route(createResolveImportErrorsRoute(prereqs, server));
const schema = new SavedObjectsSchema(kbnServer.uiExports.savedObjectSchemas);
const serializer = new SavedObjectsSerializer(schema);

View file

@ -142,10 +142,10 @@ describe('Saved Objects Mixin', () => {
savedObjectsMixin(mockKbnServer, mockServer);
expect(mockServer.route).toHaveBeenCalledWith(expect.objectContaining({ path: '/api/saved_objects/_import', method: 'POST' }));
});
it('should add POST /api/saved_objects/_resolve_import_conflicts', () => {
it('should add POST /api/saved_objects/_resolve_import_errors', () => {
savedObjectsMixin(mockKbnServer, mockServer);
expect(mockServer.route)
.toHaveBeenCalledWith(expect.objectContaining({ path: '/api/saved_objects/_resolve_import_conflicts', method: 'POST' }));
.toHaveBeenCalledWith(expect.objectContaining({ path: '/api/saved_objects/_resolve_import_errors', method: 'POST' }));
});
});

View file

@ -56,7 +56,7 @@ describe('initXAxis', function () {
});
it('makes the chart ordered if the agg is ordered', function () {
chart.aspects.x[0].params.date = true;
chart.aspects.x[0].params.interval = 10;
initXAxis(chart, table);
expect(chart)
@ -106,7 +106,6 @@ describe('initXAxis', function () {
});
it('reads the interval param from the x agg', function () {
chart.aspects.x[0].params.date = true;
chart.aspects.x[0].params.interval = 10;
initXAxis(chart, table);
expect(chart)

View file

@ -20,15 +20,16 @@
import { uniq } from 'lodash';
export function initXAxis(chart, table) {
const x = chart.aspects.x[0];
chart.xAxisOrderedValues = x.accessor === -1
? [x.params.defaultValue]
: uniq(table.rows.map(r => r[x.accessor]));
chart.xAxisFormat = x.format;
chart.xAxisLabel = x.title;
if (x.params.date) {
const { format, title, params, accessor } = chart.aspects.x[0];
chart.xAxisOrderedValues = accessor === -1
? [params.defaultValue]
: uniq(table.rows.map(r => r[accessor]));
chart.xAxisFormat = format;
chart.xAxisLabel = title;
if (params.interval) {
chart.ordered = {
interval: x.params.interval
interval: params.interval
};
}
}

View file

@ -32,8 +32,7 @@
}
.ace_indent-guide {
background: none;
border-right: 1px solid $euiColorMediumShade;
background: linear-gradient(to left, $euiColorMediumShade 0%, $euiColorMediumShade 1px, transparent 1px, transparent 100%);
}
.ace_search {

View file

@ -92,7 +92,7 @@ describe('Vislib Dispatch Class Test Suite', function () {
});
});
// test the addHoverEvent, addClickEvent, addBrushEvent methods by
// test the addHoverEvent, addClickEvent methods by
// checking that they return function which bind the events expected
function checkBoundAddMethod(name, event) {
describe(name + ' method', function () {
@ -119,7 +119,6 @@ describe('Vislib Dispatch Class Test Suite', function () {
checkBoundAddMethod('addHoverEvent', 'mouseover');
checkBoundAddMethod('addMouseoutEvent', 'mouseout');
checkBoundAddMethod('addClickEvent', 'click');
checkBoundAddMethod('addBrushEvent', 'mousedown');
describe('addMousePointer method', function () {
it('should be a function', function () {

View file

@ -244,33 +244,9 @@ export function VislibLibDispatchProvider(Private, config) {
addBrushEvent(svg) {
if (!this.isBrushable()) return;
const self = this;
const xScale = this.handler.categoryAxes[0].getScale();
const brush = this.createBrush(xScale, svg);
this.createBrush(xScale, svg);
function simulateClickWithBrushEnabled(d, i) {
if (!validBrushClick(d3.event)) return;
if (isQuantitativeScale(xScale)) {
const bar = d3.select(this);
const startX = d3.mouse(svg.node());
const startXInv = xScale.invert(startX[0]);
// Reset the brush value
brush.extent([startXInv, startXInv]);
// Magic!
// Need to call brush on svg to see brush when brushing
// while on top of bars.
// Need to call brush on bar to allow the click event to be registered
svg.call(brush);
bar.call(brush);
} else {
self.emit('click', self.eventResponse(d, i));
}
}
return this.addEvent('mousedown', simulateClickWithBrushEnabled);
}
/**
@ -378,22 +354,6 @@ export function VislibLibDispatchProvider(Private, config) {
}
}
/**
* Determine if d3.Scale is quantitative
*
* @param element {d3.Scale}
* @method isQuantitativeScale
* @returns {boolean}
*/
function isQuantitativeScale(scale) {
//Invert is a method that only exists on quantitative scales
if (scale.invert) {
return true;
} else {
return false;
}
}
function validBrushClick(event) {
return event.button === 0;
}

View file

@ -105,8 +105,7 @@ export function VislibVisualizationsPointSeriesProvider(Private) {
addEvents(svg) {
const isBrushable = this.events.isBrushable();
if (isBrushable) {
const brush = this.events.addBrushEvent(svg);
return svg.call(brush);
this.events.addBrushEvent(svg);
}
}

View file

@ -24,7 +24,7 @@ export class PipelineDataLoader {
constructor(private readonly vis: Vis) {}
public async fetch(params: RequestHandlerParams): Promise<any> {
this.vis.pipelineExpression = buildPipeline(this.vis, params);
this.vis.pipelineExpression = await buildPipeline(this.vis, params);
return await runPipeline(
this.vis.pipelineExpression,

View file

@ -333,8 +333,11 @@ const buildVisConfig: BuildVisConfigFunction = {
},
};
export const buildVislibDimensions = (vis: any, timeRange?: any) => {
const schemas = getSchemas(vis, timeRange);
export const buildVislibDimensions = async (
vis: any,
params: { searchSource: any; timeRange?: any }
) => {
const schemas = getSchemas(vis, params.timeRange);
const dimensions = {
x: schemas.segment ? schemas.segment[0] : null,
y: schemas.metric,
@ -351,6 +354,12 @@ export const buildVislibDimensions = (vis: any, timeRange?: any) => {
dimensions.x.params.interval = xAgg.buckets.getInterval().asMilliseconds();
dimensions.x.params.format = xAgg.buckets.getScaledDateFormat();
dimensions.x.params.bounds = xAgg.buckets.getBounds();
} else if (xAgg.type.name === 'histogram') {
const intervalParam = xAgg.type.params.byName.interval;
const output = { params: {} as any };
await intervalParam.modifyAggConfigOnSearchRequestStart(xAgg, params.searchSource);
intervalParam.write(xAgg, output);
dimensions.x.params.interval = output.params.interval;
}
}
@ -359,7 +368,10 @@ export const buildVislibDimensions = (vis: any, timeRange?: any) => {
// If not using the expression pipeline (i.e. visualize_data_loader), we need a mechanism to
// take a Vis object and decorate it with the necessary params (dimensions, bucket, metric, etc)
export const getVisParams = (vis: Vis, params: { timeRange?: any }) => {
export const getVisParams = async (
vis: Vis,
params: { searchSource: SearchSource; timeRange?: any }
) => {
const schemas = getSchemas(vis, params.timeRange);
let visConfig = cloneDeep(vis.params);
if (buildVisConfig[vis.type.name]) {
@ -368,12 +380,12 @@ export const getVisParams = (vis: Vis, params: { timeRange?: any }) => {
...buildVisConfig[vis.type.name](schemas, visConfig),
};
} else if (vislibCharts.includes(vis.type.name)) {
visConfig.dimensions = buildVislibDimensions(vis, params.timeRange);
visConfig.dimensions = await buildVislibDimensions(vis, params);
}
return visConfig;
};
export const buildPipeline = (
export const buildPipeline = async (
vis: Vis,
params: { searchSource: SearchSource; timeRange?: any }
) => {
@ -411,7 +423,7 @@ export const buildPipeline = (
pipeline += buildPipelineVisFunction[vis.type.name](visState, schemas, uiState);
} else if (vislibCharts.includes(vis.type.name)) {
const visConfig = visState.params;
visConfig.dimensions = buildVislibDimensions(vis, params.timeRange);
visConfig.dimensions = await buildVislibDimensions(vis, params);
pipeline += `vislib ${prepareJson('visConfig', visState.params)}`;
} else {

View file

@ -68,7 +68,10 @@ export class VisualizeDataLoader {
public async fetch(params: RequestHandlerParams): Promise<VisResponseData | void> {
// add necessary params to vis object (dimensions, bucket, metric, etc)
const visParams = getVisParams(this.vis, { timeRange: params.timeRange });
const visParams = await getVisParams(this.vis, {
searchSource: params.searchSource,
timeRange: params.timeRange,
});
// searchSource is only there for courier request handler
const requestHandlerResponse = await this.requestHandler({

View file

@ -27,7 +27,7 @@ export default function ({ loadTestFile }) {
loadTestFile(require.resolve('./find'));
loadTestFile(require.resolve('./get'));
loadTestFile(require.resolve('./import'));
loadTestFile(require.resolve('./resolve_import_conflicts'));
loadTestFile(require.resolve('./resolve_import_errors'));
loadTestFile(require.resolve('./update'));
loadTestFile(require.resolve('./migrations'));
});

View file

@ -24,14 +24,14 @@ export default function ({ getService }) {
const supertest = getService('supertest');
const esArchiver = getService('esArchiver');
describe('resolve_import_conflicts', () => {
describe('resolve_import_errors', () => {
describe('without kibana index', () => {
// Cleanup data that got created in import
after(() => esArchiver.unload('saved_objects/basic'));
it('should return 200 and import nothing when empty parameters are passed in', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.attach('file', join(__dirname, '../../fixtures/import.ndjson'))
.expect(200)
.then((resp) => {
@ -44,7 +44,7 @@ export default function ({ getService }) {
it('should return 200 and import everything when overwrite parameters contains all objects', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('overwrites', JSON.stringify([
{
type: 'index-pattern',
@ -71,7 +71,7 @@ export default function ({ getService }) {
it('should return 400 when no file passed in', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('skips', '[]')
.expect(400)
.then((resp) => {
@ -100,7 +100,7 @@ export default function ({ getService }) {
]
};
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('replaceReferences', JSON.stringify(
[
{
@ -138,7 +138,7 @@ export default function ({ getService }) {
fileChunks.push(`{"type":"visualization","id":"${i}","attributes":{},"references":[]}`);
}
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.attach('file', Buffer.from(fileChunks.join('\n'), 'utf8'), 'export.ndjson')
.expect(400)
.then((resp) => {
@ -158,7 +158,7 @@ export default function ({ getService }) {
it('should return 200 when skipping all the records', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('skips', JSON.stringify(
[
{
@ -184,7 +184,7 @@ export default function ({ getService }) {
it('should return 200 when manually overwriting each object', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('overwrites', JSON.stringify(
[
{
@ -210,7 +210,7 @@ export default function ({ getService }) {
it('should return 200 with only one record when overwriting 1 and skipping 1', async () => {
await supertest
.post('/api/saved_objects/_resolve_import_conflicts')
.post('/api/saved_objects/_resolve_import_errors')
.field('overwrites', JSON.stringify(
[
{

View file

@ -17,9 +17,8 @@
* under the License.
*/
export {
getTypes,
getProperty,
getRootProperties,
getRootPropertiesObjects,
} from './lib';
declare module 'lodash/internal/toPath' {
function toPath(value: string | string[]): string[]
export = toPath;
}

View file

@ -147,7 +147,6 @@
"typescript": "^3.3.3333",
"vinyl-fs": "^3.0.2",
"xml-crypto": "^0.10.1",
"xml2js": "^0.4.19",
"yargs": "4.8.1"
},
"dependencies": {
@ -322,6 +321,7 @@
"vscode-jsonrpc": "^3.6.2",
"vscode-languageserver": "^4.2.1",
"vscode-languageserver-types": "^3.10.0",
"xml2js": "^0.4.19",
"xregexp": "3.2.0"
},
"engines": {

View file

@ -4,7 +4,15 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { EuiBadge, EuiPanel, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
import {
EuiBadge,
EuiFlexGroup,
EuiFlexItem,
EuiPanel,
EuiSpacer,
EuiText,
EuiTitle
} from '@elastic/eui';
import theme from '@elastic/eui/dist/eui_theme_light.json';
import { i18n } from '@kbn/i18n';
import { Location } from 'history';
@ -15,13 +23,7 @@ import { idx } from 'x-pack/plugins/apm/common/idx';
import { ErrorDistributionRequest } from '../../../store/reactReduxRequest/errorDistribution';
import { ErrorGroupDetailsRequest } from '../../../store/reactReduxRequest/errorGroup';
import { IUrlParams } from '../../../store/urlParams';
import {
fontFamilyCode,
fontSizes,
px,
unit,
units
} from '../../../style/variables';
import { fontFamilyCode, fontSizes, px, units } from '../../../style/variables';
// @ts-ignore
import { FilterBar } from '../../shared/FilterBar';
import { DetailView } from './DetailView';
@ -31,11 +33,6 @@ const Titles = styled.div`
margin-bottom: ${px(units.plus)};
`;
const UnhandledBadge = styled(EuiBadge)`
margin-left: ${px(unit)};
margin-top: -${px(units.half - 1)};
`;
const Label = styled.div`
margin-bottom: ${px(units.quarter)};
font-size: ${fontSizes.small};
@ -85,26 +82,35 @@ export function ErrorGroupDetailsView({ urlParams, location }: Props) {
return (
<div>
<EuiTitle>
<span>
{i18n.translate('xpack.apm.errorGroupDetails.errorGroupTitle', {
defaultMessage: 'Error group {errorGroupId}',
values: {
errorGroupId: getShortGroupId(urlParams.errorGroupId)
}
})}
{isUnhandled && (
<UnhandledBadge color="warning">
<EuiFlexGroup alignItems="center">
<EuiFlexItem grow={false}>
<EuiTitle>
<h1>
{i18n.translate(
'xpack.apm.errorGroupDetails.errorGroupTitle',
{
defaultMessage: 'Error group {errorGroupId}',
values: {
errorGroupId: getShortGroupId(urlParams.errorGroupId)
}
}
)}
</h1>
</EuiTitle>
</EuiFlexItem>
{isUnhandled && (
<EuiFlexItem grow={false}>
<EuiBadge color="warning">
{i18n.translate(
'xpack.apm.errorGroupDetails.unhandledLabel',
{
defaultMessage: 'Unhandled'
}
)}
</UnhandledBadge>
)}
</span>
</EuiTitle>
</EuiBadge>
</EuiFlexItem>
)}
</EuiFlexGroup>
<EuiSpacer size="m" />

View file

@ -49,12 +49,8 @@ class DatePickerComponent extends React.Component<Props> {
public componentDidUpdate(prevProps: Props) {
const currentParams = this.getParamsFromSearch(this.props.location.search);
const previousParams = this.getParamsFromSearch(prevProps.location.search);
if (
currentParams.rangeFrom !== previousParams.rangeFrom ||
currentParams.rangeTo !== previousParams.rangeTo
) {
this.dispatchTimeRangeUpdate();
}
this.dispatchTimeRangeUpdate();
if (
currentParams.refreshPaused !== previousParams.refreshPaused ||

View file

@ -38,7 +38,6 @@ type PromiseResolver = (value?: {} | PromiseLike<{}> | undefined) => void;
// @ts-ignore
chrome.setRootTemplate(template);
chrome.disableAutoAngularUrlEncodingFix();
const store = configureStore();
const checkForRoot = (resolve: PromiseResolver) => {
const ready = !!document.getElementById(REACT_APP_ROOT_ID);

View file

@ -7,7 +7,7 @@
import expect from 'expect.js';
import { mapColumn } from '../mapColumn';
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
import { testTable } from './fixtures/test_tables';
import { testTable, emptyTable } from './fixtures/test_tables';
const pricePlusTwo = datatable => Promise.resolve(datatable.rows[0].price + 2);
@ -42,6 +42,16 @@ describe('mapColumn', () => {
});
});
it('adds a column to empty tables', () => {
return fn(emptyTable, { name: 'name', expression: pricePlusTwo }).then(result => {
expect(result.type).to.be('datatable');
expect(result.columns).to.have.length(1);
expect(result.columns[0])
.to.have.property('name', 'name')
.and.to.have.property('type', 'null');
});
});
describe('expression', () => {
it('maps null values to the new column', () => {
return fn(testTable, { name: 'empty' }).then(result => {

View file

@ -7,7 +7,7 @@
import expect from 'expect.js';
import { staticColumn } from '../staticColumn';
import { functionWrapper } from '../../../../__tests__/helpers/function_wrapper';
import { testTable } from './fixtures/test_tables';
import { testTable, emptyTable } from './fixtures/test_tables';
describe('staticColumn', () => {
const fn = functionWrapper(staticColumn);
@ -37,4 +37,12 @@ describe('staticColumn', () => {
expect(result.columns).to.eql([...testTable.columns, { name: 'empty', type: 'null' }]);
expect(result.rows.every(row => row.empty === null)).to.be(true);
});
it('adds a column to empty tables', () => {
const result = fn(emptyTable, { name: 'empty', value: 1 });
expect(result.type).to.be('datatable');
expect(result.columns).to.eql([{ name: 'empty', type: 'number' }]);
expect(result.rows.length).to.be(0);
});
});

View file

@ -47,7 +47,7 @@ export const mapColumn = () => ({
return Promise.all(rowPromises).then(rows => {
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
const type = getType(rows[0][args.name]);
const type = rows.length ? getType(rows[0][args.name]) : 'null';
const newColumn = { name: args.name, type };
if (existingColumnIndex === -1) {
columns.push(newColumn);

View file

@ -29,7 +29,7 @@ export const staticColumn = () => ({
},
fn: (context, args) => {
const rows = context.rows.map(row => ({ ...row, [args.name]: args.value }));
const type = getType(rows[0][args.name]);
const type = getType(args.value);
const columns = [...context.columns];
const existingColumnIndex = columns.findIndex(({ name }) => name === args.name);
const newColumn = { name: args.name, type };

View file

@ -5,14 +5,13 @@
*/
import { routes } from './server/routes';
import { commonFunctions } from './common/functions';
import { registerCanvasUsageCollector } from './server/usage';
import { functions } from './canvas_plugin_src/functions/server';
import { loadSampleData } from './server/sample_data';
export default async function(server /*options*/) {
const { serverFunctions } = server.plugins.interpreter.register({
serverFunctions: commonFunctions.concat(functions),
serverFunctions: functions,
});
server.injectUiAppVars('canvas', async () => {

View file

@ -25,7 +25,6 @@ import { tagSpecs } from '../../../canvas_plugin_src/uis/tags';
import { functions as browserFunctions } from '../../../canvas_plugin_src/functions/browser';
import { functions as commonPluginFunctions } from '../../../canvas_plugin_src/functions/common';
import { templateSpecs } from '../../../canvas_plugin_src/templates';
import { commonFunctions } from '../../../common/functions';
import { clientFunctions } from '../../functions';
import {
@ -67,10 +66,7 @@ register(registries, {
viewUIs: viewSpecs,
datasourceUIs: datasourceSpecs,
argumentUIs: argSpecs,
browserFunctions: browserFunctions
.concat(commonFunctions)
.concat(clientFunctions)
.concat(commonPluginFunctions),
browserFunctions: browserFunctions.concat(clientFunctions).concat(commonPluginFunctions),
templates: templateSpecs,
tagUIs: tagSpecs,
});

View file

@ -92,10 +92,10 @@ export class WorkpadExport extends React.PureComponent {
}}
>
<EuiButton
aria-label="Copy to clipboard"
iconType="copy"
size="s"
style={{ width: '100%' }}
aria-label="Alternatively, you can generate a PDF from a script or with Watcher by using this URL. Hit Enter to copy the URL to clipboard"
>
Copy POST URL
</EuiButton>

View file

@ -7,5 +7,6 @@
import { asset } from './asset';
import { filters } from './filters';
import { timelion } from './timelion';
import { to } from './to';
export const clientFunctions = [asset, filters, timelion];
export const clientFunctions = [asset, filters, timelion, to];

View file

@ -5,6 +5,7 @@
*/
import { castProvider } from '@kbn/interpreter/common';
import { registries } from '@kbn/interpreter/public';
export const to = () => ({
name: 'to',
@ -19,11 +20,11 @@ export const to = () => ({
multi: true,
},
},
fn: (context, args, { types }) => {
fn: (context, args) => {
if (!args.type) {
throw new Error('Must specify a casting type');
}
return castProvider(types)(context, args.type);
return castProvider(registries.types.toJS())(context, args.type);
},
});

View file

@ -588,6 +588,50 @@ export namespace FlyoutItemQuery {
};
}
export namespace LogSummary {
export type Variables = {
sourceId?: string | null;
start: number;
end: number;
bucketSize: number;
filterQuery?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
logSummaryBetween: LogSummaryBetween;
};
export type LogSummaryBetween = {
__typename?: 'InfraLogSummaryInterval';
start?: number | null;
end?: number | null;
buckets: Buckets[];
};
export type Buckets = {
__typename?: 'InfraLogSummaryBucket';
start: number;
end: number;
entriesCount: number;
};
}
export namespace MetadataQuery {
export type Variables = {
sourceId: string;
@ -866,50 +910,6 @@ export namespace LogEntries {
};
}
export namespace LogSummary {
export type Variables = {
sourceId?: string | null;
start: number;
end: number;
bucketSize: number;
filterQuery?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
logSummaryBetween: LogSummaryBetween;
};
export type LogSummaryBetween = {
__typename?: 'InfraLogSummaryInterval';
start?: number | null;
end?: number | null;
buckets: Buckets[];
};
export type Buckets = {
__typename?: 'InfraLogSummaryBucket';
start: number;
end: number;
entriesCount: number;
};
}
export namespace SourceFields {
export type Fragment = {
__typename?: 'InfraSource';

View file

@ -21,6 +21,7 @@ import { I18nContext } from 'ui/i18n';
import { InfraFrontendLibs } from '../lib/lib';
import { PageRouter } from '../routes';
import { createStore } from '../store';
import { ApolloClientContext } from '../utils/apollo_context';
export async function startApp(libs: InfraFrontendLibs) {
const history = createHashHistory();
@ -37,14 +38,16 @@ export async function startApp(libs: InfraFrontendLibs) {
<ConstateProvider devtools>
<ReduxStoreProvider store={store}>
<ApolloProvider client={libs.apolloClient}>
<ThemeProvider
theme={() => ({
eui: libs.framework.darkMode ? euiDarkVars : euiLightVars,
darkMode: libs.framework.darkMode,
})}
>
<PageRouter history={history} />
</ThemeProvider>
<ApolloClientContext.Provider value={libs.apolloClient}>
<ThemeProvider
theme={() => ({
eui: libs.framework.darkMode ? euiDarkVars : euiLightVars,
darkMode: libs.framework.darkMode,
})}
>
<PageRouter history={history} />
</ThemeProvider>
</ApolloClientContext.Provider>
</ApolloProvider>
</ReduxStoreProvider>
</ConstateProvider>

View file

@ -24,15 +24,6 @@ interface LogMinimapProps {
start: number;
} | null;
jumpToTarget: (params: LogEntryTime) => any;
reportVisibleInterval: (
params: {
start: number;
end: number;
bucketsOnPage: number;
pagesBeforeStart: number;
pagesAfterEnd: number;
}
) => any;
intervalSize: number;
summaryBuckets: SummaryBucket[];
// searchSummaryBuckets?: SearchSummaryBucket[];
@ -70,37 +61,6 @@ export class LogMinimap extends React.Component<LogMinimapProps> {
return ((time - minTime) * height) / intervalSize;
};
public updateVisibleInterval = () => {
const { summaryBuckets, intervalSize } = this.props;
const [minTime, maxTime] = this.getYScale().domain();
const firstBucket = summaryBuckets[0];
const lastBucket = summaryBuckets[summaryBuckets.length - 1];
const pagesBeforeStart = firstBucket ? (minTime - firstBucket.start) / intervalSize : 0;
const pagesAfterEnd = lastBucket ? (lastBucket.end - maxTime) / intervalSize : 0;
const bucketsOnPage = firstBucket
? (maxTime - minTime) / (firstBucket.end - firstBucket.start)
: 0;
this.props.reportVisibleInterval({
end: Math.ceil(maxTime),
start: Math.floor(minTime),
bucketsOnPage,
pagesBeforeStart,
pagesAfterEnd,
});
};
public componentDidUpdate(prevProps: LogMinimapProps) {
const hasNewTarget = prevProps.target !== this.props.target;
const hasNewIntervalSize = prevProps.intervalSize !== this.props.intervalSize;
if (hasNewTarget || hasNewIntervalSize) {
this.updateVisibleInterval();
}
}
public render() {
const {
className,

View file

@ -137,24 +137,7 @@ export const WaffleGroupByControls = injectI18n(
.filter(o => o != null)
// In this map the `o && o.field` is totally unnecessary but Typescript is
// too stupid to realize that the filter above prevents the next map from being null
.map(o => (
<EuiBadge
key={o && o.field}
iconType="cross"
iconOnClick={this.handleRemove((o && o.field) || '')}
iconOnClickAriaLabel={intl.formatMessage(
{
id: 'xpack.infra.waffle.removeGroupingItemAriaLabel',
defaultMessage: 'Remove {groupingItem} grouping',
},
{
groupingItem: o && o.text,
}
)}
>
{o && o.text}
</EuiBadge>
))
.map(o => <EuiBadge key={o && o.field}>{o && o.text}</EuiBadge>)
) : (
<FormattedMessage id="xpack.infra.waffle.groupByAllTitle" defaultMessage="All" />
);

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './with_summary';
export * from './log_summary';

View file

@ -0,0 +1,274 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import { mountHook } from 'test_utils/enzyme_helpers';
import { ApolloClientContext } from '../../../utils/apollo_context';
import { useLogSummary } from './log_summary';
describe('useLogSummary hook', () => {
it('provides an empty list of buckets by default', () => {
const mockApolloClient = {
query: jest.fn(),
};
const { getLastHookValue } = mountHook(
() => useLogSummary('SOURCE_ID', null, 1000, null),
createMockApolloProvider(mockApolloClient)
);
expect(getLastHookValue().buckets).toEqual([]);
});
/**
* This is skipped until `act` can deal with async operations, see comment
* below.
*
* The test cases below this are a temporary alternative until the
* shortcomings of the `act` function have been overcome.
*/
it.skip('queries for new summary buckets when the source id changes', async () => {
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
const mockApolloClient = {
query: jest
.fn()
.mockResolvedValueOnce(firstMockResponse)
.mockResolvedValueOnce(secondMockResponse),
};
const { act, getLastHookValue } = mountHook(
({ sourceId }) => useLogSummary(sourceId, 100000, 1000, null),
createMockApolloProvider(mockApolloClient),
{ sourceId: 'INITIAL_SOURCE_ID' }
);
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
sourceId: 'INITIAL_SOURCE_ID',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
firstMockResponse.data.source.logSummaryBetween.buckets
);
// DOESN'T WORK YET until https://github.com/facebook/react/pull/14853 has been merged
await act(async (_, setArgs) => {
setArgs({ sourceId: 'CHANGED_SOURCE_ID' });
// wait for the promise queue to be processed
await mockApolloClient.query();
});
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
sourceId: 'CHANGED_SOURCE_ID',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
secondMockResponse.data.source.logSummaryBetween.buckets
);
});
/**
* The following test cases use a bad workaround to avoid the problems
* exhibited by the skipped test case above. Instead of a real Promise we
* fake a synchronously resolving promise-like return value to avoid any
* async behavior.
*
* They should be rewritten to the cleaner async/await style shown in the
* test case above once `act` is capable of dealing with it.
*/
it('queries for new summary buckets when the source id changes', () => {
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
const mockApolloClient = {
query: jest
.fn()
.mockReturnValueOnce(createSyncMockPromise(firstMockResponse))
.mockReturnValueOnce(createSyncMockPromise(secondMockResponse)),
};
const { act, getLastHookValue } = mountHook(
({ sourceId }) => useLogSummary(sourceId, 100000, 1000, null),
createMockApolloProvider(mockApolloClient),
{ sourceId: 'INITIAL_SOURCE_ID' }
);
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
sourceId: 'INITIAL_SOURCE_ID',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
firstMockResponse.data.source.logSummaryBetween.buckets
);
act((_, setArgs) => {
setArgs({ sourceId: 'CHANGED_SOURCE_ID' });
});
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
sourceId: 'CHANGED_SOURCE_ID',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
secondMockResponse.data.source.logSummaryBetween.buckets
);
});
it('queries for new summary buckets when the filter query changes', () => {
const firstMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 1 }]);
const secondMockResponse = createMockResponse([{ start: 99000, end: 101000, entriesCount: 2 }]);
const mockApolloClient = {
query: jest
.fn()
.mockReturnValueOnce(createSyncMockPromise(firstMockResponse))
.mockReturnValueOnce(createSyncMockPromise(secondMockResponse)),
};
const { act, getLastHookValue } = mountHook(
({ filterQuery }) => useLogSummary('SOURCE_ID', 100000, 1000, filterQuery),
createMockApolloProvider(mockApolloClient),
{ filterQuery: 'INITIAL_FILTER_QUERY' }
);
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
filterQuery: 'INITIAL_FILTER_QUERY',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
firstMockResponse.data.source.logSummaryBetween.buckets
);
act((_, setArgs) => {
setArgs({ filterQuery: 'CHANGED_FILTER_QUERY' });
});
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
filterQuery: 'CHANGED_FILTER_QUERY',
}),
})
);
expect(getLastHookValue().buckets).toEqual(
secondMockResponse.data.source.logSummaryBetween.buckets
);
});
it('queries for new summary buckets when the midpoint time changes', () => {
const mockApolloClient = {
query: jest
.fn()
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([])))
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([]))),
};
const { act } = mountHook(
({ midpointTime }) => useLogSummary('SOURCE_ID', midpointTime, 1000, null),
createMockApolloProvider(mockApolloClient),
{ midpointTime: 100000 }
);
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
start: 98500,
end: 101500,
}),
})
);
act((_, setArgs) => {
setArgs({ midpointTime: 200000 });
});
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
start: 198500,
end: 201500,
}),
})
);
});
it('queries for new summary buckets when the interval size changes', () => {
const mockApolloClient = {
query: jest
.fn()
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([])))
.mockReturnValueOnce(createSyncMockPromise(createMockResponse([]))),
};
const { act } = mountHook(
({ intervalSize }) => useLogSummary('SOURCE_ID', 100000, intervalSize, null),
createMockApolloProvider(mockApolloClient),
{ intervalSize: 1000 }
);
expect(mockApolloClient.query).toHaveBeenCalledTimes(1);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
bucketSize: 10,
start: 98500,
end: 101500,
}),
})
);
act((_, setArgs) => {
setArgs({ intervalSize: 2000 });
});
expect(mockApolloClient.query).toHaveBeenCalledTimes(2);
expect(mockApolloClient.query).toHaveBeenLastCalledWith(
expect.objectContaining({
variables: expect.objectContaining({
bucketSize: 20,
start: 97000,
end: 103000,
}),
})
);
});
});
const createMockApolloProvider = (mockClient: any): React.FunctionComponent => ({ children }) => (
<ApolloClientContext.Provider value={mockClient}>{children}</ApolloClientContext.Provider>
);
const createMockResponse = (
buckets: Array<{ start: number; end: number; entriesCount: number }>
) => ({ data: { source: { logSummaryBetween: { buckets } } } });
const createSyncMockPromise = <Value extends any>(value: Value) => ({
then: (callback: (value: Value) => any) => callback(value),
});

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useMemo, useState } from 'react';
import { LogSummary as LogSummaryQuery } from '../../../graphql/types';
import { useApolloClient } from '../../../utils/apollo_context';
import { useCancellableEffect } from '../../../utils/cancellable_effect';
import { logSummaryQuery } from './log_summary.gql_query';
const LOAD_BUCKETS_PER_PAGE = 100;
export type LogSummaryBetween = LogSummaryQuery.Query['source']['logSummaryBetween'];
export type LogSummaryBuckets = LogSummaryBetween['buckets'];
export const useLogSummary = (
sourceId: string,
midpointTime: number | null,
intervalSize: number,
filterQuery: string | null
) => {
const [logSummaryBetween, setLogSummaryBetween] = useState<LogSummaryBetween>({ buckets: [] });
const apolloClient = useApolloClient();
const [bufferStart, bufferEnd] = useMemo(
() => {
if (midpointTime === null || intervalSize <= 0) {
return [null, null];
}
const halfIntervalSize = intervalSize / 2;
return [
(Math.floor((midpointTime - halfIntervalSize) / intervalSize) - 0.5) * intervalSize,
(Math.ceil((midpointTime + halfIntervalSize) / intervalSize) + 0.5) * intervalSize,
];
},
[midpointTime, intervalSize]
);
useCancellableEffect(
getIsCancelled => {
if (!apolloClient || bufferStart === null || bufferEnd === null) {
return;
}
apolloClient
.query<LogSummaryQuery.Query, LogSummaryQuery.Variables>({
fetchPolicy: 'no-cache',
query: logSummaryQuery,
variables: {
filterQuery,
sourceId,
start: bufferStart,
end: bufferEnd,
bucketSize: intervalSize / LOAD_BUCKETS_PER_PAGE,
},
})
.then(response => {
if (!getIsCancelled()) {
setLogSummaryBetween(response.data.source.logSummaryBetween);
}
});
},
[apolloClient, sourceId, filterQuery, bufferStart, bufferEnd, intervalSize]
);
return {
buckets: logSummaryBetween.buckets,
};
};

View file

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useContext } from 'react';
import { connect } from 'react-redux';
import { logFilterSelectors, logPositionSelectors, State } from '../../../store';
import { RendererFunction } from '../../../utils/typed_react';
import { LogViewConfiguration } from '../log_view_configuration';
import { LogSummaryBuckets, useLogSummary } from './log_summary';
export const WithSummary = connect((state: State) => ({
visibleMidpointTime: logPositionSelectors.selectVisibleMidpointOrTargetTime(state),
filterQuery: logFilterSelectors.selectLogFilterQueryAsJson(state),
}))(
({
children,
filterQuery,
visibleMidpointTime,
}: {
children: RendererFunction<{ buckets: LogSummaryBuckets }>;
filterQuery: string | null;
visibleMidpointTime: number | null;
}) => {
const { intervalSize } = useContext(LogViewConfiguration.Context);
const { buckets } = useLogSummary('default', visibleMidpointTime, intervalSize, filterQuery);
return children({ buckets });
}
);

View file

@ -29,7 +29,6 @@ export const withLogPosition = connect(
jumpToTargetPosition: logPositionActions.jumpToTargetPosition,
jumpToTargetPositionTime: logPositionActions.jumpToTargetPositionTime,
reportVisiblePositions: logPositionActions.reportVisiblePositions,
reportVisibleSummary: logPositionActions.reportVisibleSummary,
startLiveStreaming: logPositionActions.startAutoReload,
stopLiveStreaming: logPositionActions.stopAutoReload,
})

View file

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { connect } from 'react-redux';
import { logSummaryActions, logSummarySelectors, State } from '../../store';
import { asChildFunctionRenderer } from '../../utils/typed_react';
import { bindPlainActionCreators } from '../../utils/typed_redux';
export const withSummary = connect(
(state: State) => ({
buckets: logSummarySelectors.selectSummaryBuckets(state),
}),
bindPlainActionCreators({
load: logSummaryActions.loadSummary,
})
);
export const WithSummary = asChildFunctionRenderer(withSummary);

View file

@ -588,6 +588,50 @@ export namespace FlyoutItemQuery {
};
}
export namespace LogSummary {
export type Variables = {
sourceId?: string | null;
start: number;
end: number;
bucketSize: number;
filterQuery?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
logSummaryBetween: LogSummaryBetween;
};
export type LogSummaryBetween = {
__typename?: 'InfraLogSummaryInterval';
start?: number | null;
end?: number | null;
buckets: Buckets[];
};
export type Buckets = {
__typename?: 'InfraLogSummaryBucket';
start: number;
end: number;
entriesCount: number;
};
}
export namespace MetadataQuery {
export type Variables = {
sourceId: string;
@ -866,50 +910,6 @@ export namespace LogEntries {
};
}
export namespace LogSummary {
export type Variables = {
sourceId?: string | null;
start: number;
end: number;
bucketSize: number;
filterQuery?: string | null;
};
export type Query = {
__typename?: 'Query';
source: Source;
};
export type Source = {
__typename?: 'InfraSource';
id: string;
logSummaryBetween: LogSummaryBetween;
};
export type LogSummaryBetween = {
__typename?: 'InfraLogSummaryInterval';
start?: number | null;
end?: number | null;
buckets: Buckets[];
};
export type Buckets = {
__typename?: 'InfraLogSummaryBucket';
start: number;
end: number;
entriesCount: number;
};
}
export namespace SourceFields {
export type Fragment = {
__typename?: 'InfraSource';

View file

@ -11,10 +11,10 @@ import { AutoSizer } from '../../components/auto_sizer';
import { LogMinimap } from '../../components/logging/log_minimap';
import { ScrollableLogTextStreamView } from '../../components/logging/log_text_stream';
import { PageContent } from '../../components/page';
import { WithSummary } from '../../containers/logs/log_summary';
import { LogViewConfiguration } from '../../containers/logs/log_view_configuration';
import { WithLogPosition } from '../../containers/logs/with_log_position';
import { WithStreamItems } from '../../containers/logs/with_stream_items';
import { WithSummary } from '../../containers/logs/with_summary';
interface Props {
setFlyoutItem: (id: string) => void;
@ -79,19 +79,13 @@ export const LogsPageContent: React.FunctionComponent<Props> = ({ showFlyout, se
<WithSummary>
{({ buckets }) => (
<WithLogPosition>
{({
jumpToTargetPosition,
reportVisibleSummary,
visibleMidpointTime,
visibleTimeInterval,
}) => (
{({ jumpToTargetPosition, visibleMidpointTime, visibleTimeInterval }) => (
<LogMinimap
height={height}
width={width}
highlightedInterval={visibleTimeInterval}
intervalSize={intervalSize}
jumpToTarget={jumpToTargetPosition}
reportVisibleInterval={reportVisibleSummary}
summaryBuckets={buckets}
target={visibleMidpointTime}
/>

View file

@ -13,4 +13,4 @@ export {
waffleOptionsActions,
flyoutOptionsActions,
} from './local';
export { logEntriesActions, logSummaryActions } from './remote';
export { logEntriesActions } from './remote';

View file

@ -30,18 +30,6 @@ export const reportVisiblePositions = actionCreator<ReportVisiblePositionsPayloa
'REPORT_VISIBLE_POSITIONS'
);
export interface ReportVisibleSummaryPayload {
start: number;
end: number;
bucketsOnPage: number;
pagesBeforeStart: number;
pagesAfterEnd: number;
}
export const reportVisibleSummary = actionCreator<ReportVisibleSummaryPayload>(
'REPORT_VISIBLE_SUMMARY'
);
export const startAutoReload = actionCreator<number>('START_AUTO_RELOAD');
export const stopAutoReload = actionCreator('STOP_AUTO_RELOAD');

View file

@ -11,7 +11,6 @@ import { TimeKey } from '../../../../common/time';
import {
jumpToTargetPosition,
reportVisiblePositions,
reportVisibleSummary,
startAutoReload,
stopAutoReload,
} from './actions';
@ -37,10 +36,6 @@ export interface LogPositionState {
middleKey: TimeKey | null;
endKey: TimeKey | null;
};
visibleSummary: {
start: number | null;
end: number | null;
};
}
export const initialLogPositionState: LogPositionState = {
@ -53,10 +48,6 @@ export const initialLogPositionState: LogPositionState = {
middleKey: null,
startKey: null,
},
visibleSummary: {
start: null,
end: null,
},
};
const targetPositionReducer = reducerWithInitialState(initialLogPositionState.targetPosition).case(
@ -83,17 +74,8 @@ const visiblePositionReducer = reducerWithInitialState(
startKey,
}));
const visibleSummaryReducer = reducerWithInitialState(initialLogPositionState.visibleSummary).case(
reportVisibleSummary,
(state, { start, end }) => ({
start,
end,
})
);
export const logPositionReducer = combineReducers<LogPositionState>({
targetPosition: targetPositionReducer,
updatePolicy: targetPositionUpdatePolicyReducer,
visiblePositions: visiblePositionReducer,
visibleSummary: visibleSummaryReducer,
});

View file

@ -52,5 +52,3 @@ export const selectVisibleTimeInterval = createSelector(
}
: null
);
export const selectVisibleSummary = (state: LogPositionState) => state.visibleSummary;

View file

@ -5,4 +5,3 @@
*/
export { logEntriesActions } from './log_entries';
export { logSummaryActions } from './log_summary';

View file

@ -4,10 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { combineEpics } from 'redux-observable';
import { createLogEntriesEpic } from './log_entries';
import { createLogSummaryEpic } from './log_summary';
export const createRemoteEpic = <State>() =>
combineEpics(createLogEntriesEpic<State>(), createLogSummaryEpic<State>());
export const createRemoteEpic = <State>() => createLogEntriesEpic<State>();

View file

@ -1,99 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Action } from 'redux';
import { combineEpics, Epic, EpicWithState } from 'redux-observable';
import { merge } from 'rxjs';
import { exhaustMap, filter, map, withLatestFrom } from 'rxjs/operators';
import { logFilterActions, logPositionActions } from '../..';
import { loadSummary } from './actions';
import { loadSummaryEpic } from './operations/load';
const LOAD_BUCKETS_PER_PAGE = 100;
const MINIMUM_BUCKETS_PER_PAGE = 90;
const MINIMUM_BUFFER_PAGES = 0.5;
interface ManageSummaryDependencies<State> {
selectLogFilterQueryAsJson: (state: State) => string | null;
selectVisibleLogSummary: (
state: State
) => {
start: number | null;
end: number | null;
};
}
export const createLogSummaryEpic = <State>() =>
combineEpics(createSummaryEffectsEpic<State>(), loadSummaryEpic as EpicWithState<
typeof loadSummaryEpic,
State
>);
export const createSummaryEffectsEpic = <State>(): Epic<
Action,
Action,
State,
ManageSummaryDependencies<State>
> => (action$, state$, { selectLogFilterQueryAsJson, selectVisibleLogSummary }) => {
const filterQuery$ = state$.pipe(map(selectLogFilterQueryAsJson));
const summaryInterval$ = state$.pipe(
map(selectVisibleLogSummary),
map(({ start, end }) => (start && end ? getLoadParameters(start, end) : null)),
filter(isNotNull)
);
const shouldLoadBetweenNewInterval$ = action$.pipe(
filter(logPositionActions.reportVisibleSummary.match),
filter(
({ payload: { bucketsOnPage, pagesBeforeStart, pagesAfterEnd } }) =>
bucketsOnPage < MINIMUM_BUCKETS_PER_PAGE ||
pagesBeforeStart < MINIMUM_BUFFER_PAGES ||
pagesAfterEnd < MINIMUM_BUFFER_PAGES
),
map(({ payload: { start, end } }) => getLoadParameters(start, end))
);
const shouldLoadWithNewFilter$ = action$.pipe(
filter(logFilterActions.applyLogFilterQuery.match),
withLatestFrom(filterQuery$, (filterQuery, filterQueryString) => filterQueryString)
);
return merge(
shouldLoadBetweenNewInterval$.pipe(
withLatestFrom(filterQuery$),
exhaustMap(([{ start, end, bucketSize }, filterQuery]) => [
loadSummary({
start,
end,
sourceId: 'default',
bucketSize,
filterQuery,
}),
])
),
shouldLoadWithNewFilter$.pipe(
withLatestFrom(summaryInterval$),
exhaustMap(([filterQuery, { start, end, bucketSize }]) => [
loadSummary({
start,
end,
sourceId: 'default',
bucketSize: (end - start) / LOAD_BUCKETS_PER_PAGE,
filterQuery,
}),
])
)
);
};
const getLoadParameters = (start: number, end: number) => ({
start: start - (end - start),
end: end + (end - start),
bucketSize: (end - start) / LOAD_BUCKETS_PER_PAGE,
});
const isNotNull = <T>(value: T | null): value is T => value !== null;

View file

@ -1,13 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as logSummaryActions from './actions';
import * as logSummarySelectors from './selectors';
export { logSummaryActions, logSummarySelectors };
export * from './epic';
export * from './reducer';
export { initialLogSummaryState, LogSummaryState } from './state';

View file

@ -1,30 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { LogSummary as LogSummaryQuery } from '../../../../graphql/types';
import {
createGraphqlOperationActionCreators,
createGraphqlOperationReducer,
createGraphqlQueryEpic,
} from '../../../../utils/remote_state/remote_graphql_state';
import { initialLogSummaryState } from '../state';
import { logSummaryQuery } from './log_summary.gql_query';
const operationKey = 'load';
export const loadSummaryActionCreators = createGraphqlOperationActionCreators<
LogSummaryQuery.Query,
LogSummaryQuery.Variables
>('log_summary', operationKey);
export const loadSummaryReducer = createGraphqlOperationReducer(
operationKey,
initialLogSummaryState,
loadSummaryActionCreators,
(state, action) => action.payload.result.data.source.logSummaryBetween
);
export const loadSummaryEpic = createGraphqlQueryEpic(logSummaryQuery, loadSummaryActionCreators);

View file

@ -1,15 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import reduceReducers from 'reduce-reducers';
import { Reducer } from 'redux';
import { loadSummaryReducer } from './operations/load';
import { LogSummaryState } from './state';
export const logSummaryReducer = reduceReducers(
loadSummaryReducer /*, loadMoreSummaryReducer*/
) as Reducer<LogSummaryState>;

View file

@ -1,17 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { createSelector } from 'reselect';
import { createGraphqlStateSelectors } from '../../../utils/remote_state/remote_graphql_state';
import { LogSummaryRemoteState } from './state';
const summaryGraphlStateSelectors = createGraphqlStateSelectors<LogSummaryRemoteState>();
export const selectSummaryBuckets = createSelector(
summaryGraphlStateSelectors.selectData,
data => (data ? data.buckets : [])
);

View file

@ -1,18 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { LogSummary as LogSummaryQuery } from '../../../graphql/types';
import {
createGraphqlInitialState,
GraphqlState,
} from '../../../utils/remote_state/remote_graphql_state';
export type LogSummaryRemoteState = LogSummaryQuery.LogSummaryBetween;
export type LogSummaryState = GraphqlState<LogSummaryRemoteState>;
export const initialLogSummaryState: LogSummaryState = createGraphqlInitialState<
LogSummaryRemoteState
>();

View file

@ -6,19 +6,15 @@
import { combineReducers } from 'redux';
import { initialLogEntriesState, logEntriesReducer, LogEntriesState } from './log_entries';
import { initialLogSummaryState, logSummaryReducer, LogSummaryState } from './log_summary';
export interface RemoteState {
logEntries: LogEntriesState;
logSummary: LogSummaryState;
}
export const initialRemoteState = {
logEntries: initialLogEntriesState,
logSummary: initialLogSummaryState,
};
export const remoteReducer = combineReducers<RemoteState>({
logEntries: logEntriesReducer,
logSummary: logSummaryReducer,
});

View file

@ -6,15 +6,9 @@
import { globalizeSelectors } from '../../utils/typed_redux';
import { logEntriesSelectors as innerLogEntriesSelectors } from './log_entries';
import { logSummarySelectors as innerLogSummarySelectors } from './log_summary';
import { RemoteState } from './reducer';
export const logEntriesSelectors = globalizeSelectors(
(state: RemoteState) => state.logEntries,
innerLogEntriesSelectors
);
export const logSummarySelectors = globalizeSelectors(
(state: RemoteState) => state.logSummary,
innerLogSummarySelectors
);

View file

@ -18,10 +18,7 @@ import {
waffleTimeSelectors as localWaffleTimeSelectors,
} from './local';
import { State } from './reducer';
import {
logEntriesSelectors as remoteLogEntriesSelectors,
logSummarySelectors as remoteLogSummarySelectors,
} from './remote';
import { logEntriesSelectors as remoteLogEntriesSelectors } from './remote';
/**
* local selectors
@ -44,7 +41,6 @@ export const flyoutOptionsSelectors = globalizeSelectors(selectLocal, localFlyou
const selectRemote = (state: State) => state.remote;
export const logEntriesSelectors = globalizeSelectors(selectRemote, remoteLogEntriesSelectors);
export const logSummarySelectors = globalizeSelectors(selectRemote, remoteLogSummarySelectors);
/**
* shared selectors

View file

@ -48,7 +48,6 @@ export function createStore({ apolloClient, observableApi }: StoreDependencies)
selectLogFilterQueryAsJson: logFilterSelectors.selectLogFilterQueryAsJson,
selectLogTargetPosition: logPositionSelectors.selectTargetPosition,
selectVisibleLogMidpointOrTarget: logPositionSelectors.selectVisibleMidpointOrTarget,
selectVisibleLogSummary: logPositionSelectors.selectVisibleSummary,
selectWaffleTimeUpdatePolicyInterval: waffleTimeSelectors.selectTimeUpdatePolicyInterval,
selectMetricTimeUpdatePolicyInterval: metricTimeSelectors.selectTimeUpdatePolicyInterval,
selectMetricRangeFromTimeRange: metricTimeSelectors.selectRangeFromTimeRange,

View file

@ -0,0 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ApolloClient } from 'apollo-client';
import { createContext, useContext } from 'react';
/**
* This is a temporary provider and hook for use with hooks until react-apollo
* has upgraded to the new-style `createContext` api.
*/
export const ApolloClientContext = createContext<ApolloClient<{}> | undefined>(undefined);
export const useApolloClient = () => {
return useContext(ApolloClientContext);
};

View file

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { DependencyList, useEffect } from 'react';
export const createCancellationSignal = () => {
const cancellationSignal = {
isCancelled: false,
cancel: () => {
cancellationSignal.isCancelled = true;
},
};
return cancellationSignal;
};
export const useCancellableEffect = (
effect: (isCancelled: () => boolean) => void,
deps?: DependencyList
): void => {
useEffect(() => {
const cancellationSignal = createCancellationSignal();
effect(() => cancellationSignal.isCancelled);
return cancellationSignal.cancel;
}, deps);
};

View file

@ -4,13 +4,6 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { i18n } from '@kbn/i18n';
import { LICENSE_TYPE_BASIC } from '../../../../common/constants';
export const PLUGIN = {
ID: 'license_management',
NAME: i18n.translate('xpack.licenseMgmt.managementSectionDisplayName', {
defaultMessage: 'License Management',
}),
MINIMUM_LICENSE_REQUIRED: LICENSE_TYPE_BASIC,
};

View file

@ -8,7 +8,6 @@ import { resolve } from 'path';
import { PLUGIN } from './common/constants';
import { registerLicenseRoute, registerStartTrialRoutes, registerStartBasicRoute } from './server/routes/api/license/';
import { createRouter } from '../../server/lib/create_router';
import { registerLicenseChecker } from '../../server/lib/register_license_checker';
export function licenseManagement(kibana) {
return new kibana.Plugin({
@ -25,7 +24,6 @@ export function licenseManagement(kibana) {
init: (server) => {
const xpackInfo = server.plugins.xpack_main.info;
const router = createRouter(server, PLUGIN.ID, '/api/license');
registerLicenseChecker(server, PLUGIN.ID, PLUGIN.NAME, PLUGIN.MINIMUM_LICENSE_REQUIRED);
registerLicenseRoute(router, xpackInfo);
registerStartTrialRoutes(router, xpackInfo);
registerStartBasicRoute(router, xpackInfo);

View file

@ -1,165 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { Fragment } from 'react';
import {
EuiFieldText,
EuiFormRow,
} from '@elastic/eui';
import { AbstractTMSSource } from './tms_source';
import { TileLayer } from '../tile_layer';
import { i18n } from '@kbn/i18n';
import { getDataSourceLabel, getUrlLabel } from '../../../../common/i18n_getters';
export class WMSSource extends AbstractTMSSource {
static type = 'WMS';
static title = i18n.translate('xpack.maps.source.wmsTitle', {
defaultMessage: 'Web Map Service'
});
static description = i18n.translate('xpack.maps.source.wmsDescription', {
defaultMessage: 'Maps from OGC Standard WMS'
});
static icon = 'grid';
static createDescriptor({ serviceUrl, layers, styles }) {
return {
type: WMSSource.type,
serviceUrl: serviceUrl,
layers: layers,
styles: styles
};
}
static renderEditor({ onPreviewSource, inspectorAdapters }) {
const previewWMS = (options) => {
const sourceDescriptor = WMSSource.createDescriptor(options);
const source = new WMSSource(sourceDescriptor, inspectorAdapters);
onPreviewSource(source);
};
return (<WMSEditor previewWMS={previewWMS} />);
}
async getImmutableProperties() {
return [
{ label: getDataSourceLabel(), value: WMSSource.title },
{ label: getUrlLabel(), value: this._descriptor.serviceUrl },
{ label: i18n.translate('xpack.maps.source.wms.layersLabel', {
defaultMessage: 'Layers'
}), value: this._descriptor.layers },
{ label: i18n.translate('xpack.maps.source.wms.stylesLabel', {
defaultMessage: 'Styles'
}), value: this._descriptor.styles },
];
}
_createDefaultLayerDescriptor(options) {
return TileLayer.createDescriptor({
sourceDescriptor: this._descriptor,
...options
});
}
createDefaultLayer(options) {
return new TileLayer({
layerDescriptor: this._createDefaultLayerDescriptor(options),
source: this
});
}
async getDisplayName() {
return this._descriptor.serviceUrl;
}
getUrlTemplate() {
const styles = this._descriptor.styles || '';
// eslint-disable-next-line max-len
return `${this._descriptor.serviceUrl}?bbox={bbox-epsg-3857}&format=image/png&service=WMS&version=1.1.1&request=GetMap&srs=EPSG:3857&transparent=true&width=256&height=256&layers=${this._descriptor.layers}&styles=${styles}`;
}
}
class WMSEditor extends React.Component {
state = {
serviceUrl: '',
layers: '',
styles: ''
}
_previewIfPossible() {
if (this.state.serviceUrl && this.state.layers) {
//todo: should really debounce this so we don't get a ton of changes during typing
this.props.previewWMS({
serviceUrl: this.state.serviceUrl,
layers: this.state.layers,
styles: this.state.styles
});
}
}
async _handleServiceUrlChange(e) {
await this.setState({
serviceUrl: e.target.value
});
this._previewIfPossible();
}
async _handleLayersChange(e) {
await this.setState({
layers: e.target.value
});
this._previewIfPossible();
}
async _handleStylesChange(e) {
await this.setState({
styles: e.target.value
});
this._previewIfPossible();
}
render() {
return (
<Fragment>
<EuiFormRow label="Url">
<EuiFieldText
value={this.state.serviceUrl}
onChange={(e) => this._handleServiceUrlChange(e)}
/>
</EuiFormRow>
<EuiFormRow
label={i18n.translate('xpack.maps.source.wms.layersLabel', {
defaultMessage: 'Layers'
})}
helpText={i18n.translate('xpack.maps.source.wms.layersHelpText', {
defaultMessage: 'use comma separated list of layer names'
})}
>
<EuiFieldText
onChange={(e) => this._handleLayersChange(e)}
/>
</EuiFormRow>
<EuiFormRow
label={i18n.translate('xpack.maps.source.wms.stylesLabel', {
defaultMessage: 'Styles'
})}
helpText={i18n.translate('xpack.maps.source.wms.stylesHelpText', {
defaultMessage: 'use comma separated list of style names'
})}
>
<EuiFieldText
onChange={(e) => this._handleStylesChange(e)}
/>
</EuiFormRow>
</Fragment>
);
}
}

View file

@ -4,6 +4,4 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { to } from './to';
export const commonFunctions = [to];
export { WMSSource } from './wms_source';

View file

@ -0,0 +1,127 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import _ from 'lodash';
import { parseString } from 'xml2js';
import fetch from 'node-fetch';
export class WmsClient {
constructor({ serviceUrl }) {
this._serviceUrl = serviceUrl;
}
async _fetch(url) {
return fetch(url);
}
async _fetchCapabilities() {
const resp = await this._fetch(`${this._serviceUrl}?version=1.1.1&request=GetCapabilities&service=WMS`);
if (resp.status >= 400) {
throw new Error(`Unable to access ${this.state.serviceUrl}`);
}
const body = await resp.text();
const parsePromise = new Promise((resolve, reject) => {
parseString(body, (error, result) => {
if (error) {
reject(error);
} else {
resolve(result);
}
});
});
return await parsePromise;
}
async getCapabilities() {
const rawCapabilities = await this._fetchCapabilities();
const { layers, styles } = reduceLayers([], _.get(rawCapabilities, 'WMT_MS_Capabilities.Capability[0].Layer', []));
return {
layers: groupCapabilities(layers),
styles: groupCapabilities(styles)
};
}
}
function reduceLayers(path, layers) {
const emptyCapabilities = {
layers: [],
styles: [],
};
function createOption(optionPath, optionTitle, optionName) {
return {
path: [...optionPath, optionTitle],
value: optionName
};
}
return layers.reduce((accumulatedCapabilities, layer) => {
// Layer is hierarchical, continue traversing
if (layer.Layer) {
const hierarchicalCapabilities = reduceLayers([...path, layer.Title[0]], layer.Layer);
return {
layers: [...accumulatedCapabilities.layers, ...hierarchicalCapabilities.layers],
styles: [...accumulatedCapabilities.styles, ...hierarchicalCapabilities.styles]
};
}
const updatedStyles = [...accumulatedCapabilities.styles];
if (_.has(layer, 'Style[0]')) {
updatedStyles.push(createOption(
path,
_.get(layer, 'Style[0].Title[0]'),
_.get(layer, 'Style[0].Name[0]')
));
}
return {
layers: [
...accumulatedCapabilities.layers,
createOption(path, layer.Title[0], layer.Name[0])
],
styles: updatedStyles
};
}, emptyCapabilities);
}
// Avoid filling select box option label with text that is all the same
// Create a single group from common parts of Layer hierarchy
function groupCapabilities(list) {
if (list.length === 0) {
return [];
}
let rootCommonPath = list[0].path;
for(let listIndex = 1; listIndex < list.length; listIndex++) {
if (rootCommonPath.length === 0) {
// No commonality in root path, nothing left to verify
break;
}
const path = list[listIndex].path;
for(let pathIndex = 0; pathIndex < path.length && pathIndex < rootCommonPath.length; pathIndex++) {
if (rootCommonPath[pathIndex] !== path[pathIndex]) {
// truncate root common path at location of divergence
rootCommonPath = rootCommonPath.slice(0, pathIndex);
break;
}
}
}
if (rootCommonPath.length === 0 || list.length === 1) {
return list.map(({ path, value }) => {
return { label: path.join(' - '), value };
});
}
return [{
label: rootCommonPath.join(' - '),
options: list.map(({ path, value }) => {
return { label: path.splice(rootCommonPath.length).join(' - '), value };
})
}];
}

View file

@ -0,0 +1,198 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { WmsClient } from './wms_client';
describe('getCapabilities', () => {
it('Should extract flat Layer elements', async () => {
const wmsClient = new WmsClient({ serviceUrl: 'myWMSUrl' });
wmsClient._fetch = () => {
return {
status: 200,
text: () => {
return `
<WMT_MS_Capabilities version="1.1.1">
<Capability>
<Layer>
<Title>layer1</Title>
<Name>1</Name>
<Style>
<Name>default</Name>
<Title>defaultStyle</Title>
</Style>
</Layer>
<Layer>
<Title>layer2</Title>
<Name>2</Name>
<Style>
<Name>fancy</Name>
<Title>fancyStyle</Title>
</Style>
</Layer>
</Capability>
</WMT_MS_Capabilities>
`;
}
};
};
const capabilities = await wmsClient.getCapabilities();
expect(capabilities.layers).toEqual([
{ label: 'layer1', value: '1' },
{ label: 'layer2', value: '2' }
]);
expect(capabilities.styles).toEqual([
{ label: 'defaultStyle', value: 'default' },
{ label: 'fancyStyle', value: 'fancy' }
]);
});
// Good example of Layer hierarchy in the wild can be found at
// https://idpgis.ncep.noaa.gov/arcgis/services/NWS_Forecasts_Guidance_Warnings/NDFD_temp/MapServer/WMSServer
it('Should extract hierarchical Layer elements', async () => {
const wmsClient = new WmsClient({ serviceUrl: 'myWMSUrl' });
wmsClient._fetch = () => {
return {
status: 200,
text: () => {
return `
<WMT_MS_Capabilities version="1.1.1">
<Capability>
<Layer>
<Title><![CDATA[hierarchyLevel1PathA]]></Title>
<Layer>
<Title>hierarchyLevel2</Title>
<Layer>
<Title>layer1</Title>
<Name>1</Name>
<Style>
<Name>default</Name>
<Title>defaultStyle</Title>
</Style>
</Layer>
<Layer>
<Title>layer2</Title>
<Name>2</Name>
</Layer>
</Layer>
</Layer>
<Layer>
<Title>hierarchyLevel1PathB</Title>
<Layer>
<Title>layer3</Title>
<Name>3</Name>
<Style>
<Name>fancy</Name>
<Title>fancyStyle</Title>
</Style>
</Layer>
</Layer>
</Capability>
</WMT_MS_Capabilities>
`;
}
};
};
const capabilities = await wmsClient.getCapabilities();
expect(capabilities.layers).toEqual([
{ label: 'hierarchyLevel1PathA - hierarchyLevel2 - layer1', value: '1' },
{ label: 'hierarchyLevel1PathA - hierarchyLevel2 - layer2', value: '2' },
{ label: 'hierarchyLevel1PathB - layer3', value: '3' }
]);
expect(capabilities.styles).toEqual([
{ label: 'hierarchyLevel1PathA - hierarchyLevel2 - defaultStyle', value: 'default' },
{ label: 'hierarchyLevel1PathB - fancyStyle', value: 'fancy' }
]);
});
it('Should create group from common parts of Layer hierarchy', async () => {
const wmsClient = new WmsClient({ serviceUrl: 'myWMSUrl' });
wmsClient._fetch = () => {
return {
status: 200,
text: () => {
return `
<WMT_MS_Capabilities version="1.1.1">
<Capability>
<Layer>
<Title>hierarchyLevel1PathA</Title>
<Layer>
<Title>hierarchyLevel2</Title>
<Layer>
<Title>layer1</Title>
<Name>1</Name>
<Style>
<Name>default</Name>
<Title>defaultStyle</Title>
</Style>
</Layer>
</Layer>
</Layer>
<Layer>
<Title>hierarchyLevel1PathA</Title>
<Layer>
<Title>hierarchyLevel2</Title>
<Layer>
<Title>layer2</Title>
<Name>2</Name>
<Style>
<Name>fancy</Name>
<Title>fancyStyle</Title>
</Style>
</Layer>
</Layer>
</Layer>
</Capability>
</WMT_MS_Capabilities>
`;
}
};
};
const capabilities = await wmsClient.getCapabilities();
expect(capabilities.layers).toEqual([
{
label: 'hierarchyLevel1PathA - hierarchyLevel2',
options: [
{ label: 'layer1', value: '1' },
{ label: 'layer2', value: '2' },
]
}
]);
expect(capabilities.styles).toEqual([
{
label: 'hierarchyLevel1PathA - hierarchyLevel2',
options: [
{ label: 'defaultStyle', value: 'default' },
{ label: 'fancyStyle', value: 'fancy' },
]
}
]);
});
it('Should create not group common hierarchy when there is only a single layer', async () => {
const wmsClient = new WmsClient({ serviceUrl: 'myWMSUrl' });
wmsClient._fetch = () => {
return {
status: 200,
text: () => {
return `
<WMT_MS_Capabilities version="1.1.1">
<Capability>
<Layer>
<Title>layer1</Title>
<Name>1</Name>
</Layer>
</Capability>
</WMT_MS_Capabilities>
`;
}
};
};
const capabilities = await wmsClient.getCapabilities();
expect(capabilities.layers).toEqual([
{ label: 'layer1', value: '1' },
]);
});
});

View file

@ -0,0 +1,250 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { Component, Fragment } from 'react';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n/react';
import {
EuiButton,
EuiCallOut,
EuiComboBox,
EuiFieldText,
EuiFormRow,
EuiForm,
EuiSpacer,
} from '@elastic/eui';
import { WmsClient } from './wms_client';
const LAYERS_LABEL = i18n.translate('xpack.maps.source.wms.layersLabel', {
defaultMessage: 'Layers'
});
const STYLES_LABEL = i18n.translate('xpack.maps.source.wms.stylesLabel', {
defaultMessage: 'Styles'
});
export class WMSCreateSourceEditor extends Component {
state = {
serviceUrl: '',
layers: '',
styles: '',
isLoadingCapabilities: false,
getCapabilitiesError: null,
hasAttemptedToLoadCapabilities: false,
layerOptions: [],
styleOptions: [],
selectedLayerOptions: [],
selectedStyleOptions: [],
}
componentDidMount() {
this._isMounted = true;
}
componentWillUnmount() {
this._isMounted = false;
}
_previewIfPossible() {
const {
serviceUrl,
layers,
styles
} = this.state;
const sourceConfig = (serviceUrl && layers)
? { serviceUrl, layers, styles }
: null;
this.props.previewWMS(sourceConfig);
}
_loadCapabilities = async () => {
if (!this.state.serviceUrl) {
return;
}
this.setState({
hasAttemptedToLoadCapabilities: true,
isLoadingCapabilities: true,
getCapabilitiesError: null,
});
const wmsClient = new WmsClient({ serviceUrl: this.state.serviceUrl });
let capabilities;
try {
capabilities = await wmsClient.getCapabilities();
} catch (error) {
if (this._isMounted) {
this.setState({
isLoadingCapabilities: false,
getCapabilitiesError: error.message
});
}
return;
}
if (!this._isMounted) {
return;
}
this.setState({
isLoadingCapabilities: false,
layerOptions: capabilities.layers,
styleOptions: capabilities.styles
});
}
_handleServiceUrlChange = (e) => {
this.setState({
serviceUrl: e.target.value,
hasAttemptedToLoadCapabilities: false,
layerOptions: [],
styleOptions: [],
selectedLayerOptions: [],
selectedStyleOptions: [],
layers: '',
styles: '',
}, this._previewIfPossible);
}
_handleLayersChange = (e) => {
this.setState({ layers: e.target.value }, this._previewIfPossible);
}
_handleLayerOptionsChange = (selectedOptions) => {
this.setState({
selectedLayerOptions: selectedOptions,
layers: selectedOptions.map(selectedOption => {
return selectedOption.value;
}).join(',')
}, this._previewIfPossible);
}
_handleStylesChange = (e) => {
this.setState({ styles: e.target.value }, this._previewIfPossible);
}
_handleStyleOptionsChange = (selectedOptions) => {
this.setState({
selectedStyleOptions: selectedOptions,
styles: selectedOptions.map(selectedOption => {
return selectedOption.value;
}).join(',')
}, this._previewIfPossible);
}
_renderLayerAndStyleInputs() {
if (!this.state.hasAttemptedToLoadCapabilities || this.state.isLoadingCapabilities) {
return null;
}
if (this.state.getCapabilitiesError || this.state.layerOptions.length === 0) {
return (
<Fragment>
<EuiCallOut
title={i18n.translate('xpack.maps.source.wms.getCapabilitiesErrorCalloutTitle', {
defaultMessage: 'Unable to load service metadata'
})}
color="warning"
>
<p>{this.state.getCapabilitiesError}</p>
</EuiCallOut>
<EuiFormRow
label={LAYERS_LABEL}
helpText={i18n.translate('xpack.maps.source.wms.layersHelpText', {
defaultMessage: 'use comma separated list of layer names'
})}
>
<EuiFieldText
onChange={this._handleLayersChange}
/>
</EuiFormRow>
<EuiFormRow
label={STYLES_LABEL}
helpText={i18n.translate('xpack.maps.source.wms.stylesHelpText', {
defaultMessage: 'use comma separated list of style names'
})}
>
<EuiFieldText
onChange={this._handleStylesChange}
/>
</EuiFormRow>
</Fragment>
);
}
return (
<Fragment>
<EuiFormRow
label={LAYERS_LABEL}
>
<EuiComboBox
options={this.state.layerOptions}
selectedOptions={this.state.selectedLayerOptions}
onChange={this._handleLayerOptionsChange}
/>
</EuiFormRow>
<EuiFormRow
label={STYLES_LABEL}
>
<EuiComboBox
options={this.state.styleOptions}
selectedOptions={this.state.selectedStyleOptions}
onChange={this._handleStyleOptionsChange}
/>
</EuiFormRow>
</Fragment>
);
}
_renderGetCapabilitiesButton() {
if (!this.state.isLoadingCapabilities && this.state.hasAttemptedToLoadCapabilities) {
return null;
}
return (
<Fragment>
<EuiButton
onClick={this._loadCapabilities}
isDisabled={!this.state.serviceUrl}
isLoading={this.state.isLoadingCapabilities}
>
<FormattedMessage
id="xpack.maps.source.wms.getCapabilitiesButtonText"
defaultMessage="Load capabilities"
/>
</EuiButton>
<EuiSpacer size="m" />
</Fragment>
);
}
render() {
return (
<EuiForm>
<EuiFormRow
label={i18n.translate('xpack.maps.source.wms.urlLabel', {
defaultMessage: 'Url'
})}
>
<EuiFieldText
value={this.state.serviceUrl}
onChange={this._handleServiceUrlChange}
/>
</EuiFormRow>
{this._renderGetCapabilitiesButton()}
{this._renderLayerAndStyleInputs()}
</EuiForm>
);
}
}

View file

@ -0,0 +1,91 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import { AbstractTMSSource } from '../tms_source';
import { TileLayer } from '../../tile_layer';
import { WMSCreateSourceEditor } from './wms_create_source_editor';
import { i18n } from '@kbn/i18n';
import { getDataSourceLabel, getUrlLabel } from '../../../../../common/i18n_getters';
export class WMSSource extends AbstractTMSSource {
static type = 'WMS';
static title = i18n.translate('xpack.maps.source.wmsTitle', {
defaultMessage: 'Web Map Service'
});
static description = i18n.translate('xpack.maps.source.wmsDescription', {
defaultMessage: 'Maps from OGC Standard WMS'
});
static icon = 'grid';
static createDescriptor({ serviceUrl, layers, styles }) {
return {
type: WMSSource.type,
serviceUrl: serviceUrl,
layers: layers,
styles: styles
};
}
static renderEditor({ onPreviewSource, inspectorAdapters }) {
const previewWMS = (sourceConfig) => {
if (!sourceConfig) {
onPreviewSource(null);
return;
}
const sourceDescriptor = WMSSource.createDescriptor(sourceConfig);
const source = new WMSSource(sourceDescriptor, inspectorAdapters);
onPreviewSource(source);
};
return (<WMSCreateSourceEditor previewWMS={previewWMS} />);
}
async getImmutableProperties() {
return [
{ label: getDataSourceLabel(), value: WMSSource.title },
{ label: getUrlLabel(), value: this._descriptor.serviceUrl },
{
label: i18n.translate('xpack.maps.source.wms.layersLabel', {
defaultMessage: 'Layers'
}),
value: this._descriptor.layers
},
{
label: i18n.translate('xpack.maps.source.wms.stylesLabel', {
defaultMessage: 'Styles'
}),
value: this._descriptor.styles
},
];
}
_createDefaultLayerDescriptor(options) {
return TileLayer.createDescriptor({
sourceDescriptor: this._descriptor,
...options
});
}
createDefaultLayer(options) {
return new TileLayer({
layerDescriptor: this._createDefaultLayerDescriptor(options),
source: this
});
}
async getDisplayName() {
return this._descriptor.serviceUrl;
}
getUrlTemplate() {
const styles = this._descriptor.styles || '';
// eslint-disable-next-line max-len
return `${this._descriptor.serviceUrl}?bbox={bbox-epsg-3857}&format=image/png&service=WMS&version=1.1.1&request=GetMap&srs=EPSG:3857&transparent=true&width=256&height=256&layers=${this._descriptor.layers}&styles=${styles}`;
}
}

View file

@ -16,6 +16,21 @@ const EMPTY_FEATURE_COLLECTION = {
features: []
};
const CLOSED_SHAPE_MB_FILTER = [
'any',
['==', ['geometry-type'], 'Polygon'],
['==', ['geometry-type'], 'MultiPolygon']
];
const ALL_SHAPE_MB_FILTER = [
'any',
['==', ['geometry-type'], 'Polygon'],
['==', ['geometry-type'], 'MultiPolygon'],
['==', ['geometry-type'], 'LineString'],
['==', ['geometry-type'], 'MultiLineString']
];
export class VectorLayer extends AbstractLayer {
static type = 'VECTOR';
@ -422,13 +437,7 @@ export class VectorLayer extends AbstractLayer {
source: sourceId,
paint: {}
});
mbMap.setFilter(fillLayerId, [
'any',
['==', ['geometry-type'], 'Polygon'],
['==', ['geometry-type'], 'MultiPolygon'],
['==', ['geometry-type'], 'LineString'],
['==', ['geometry-type'], 'MultiLineString']
]);
mbMap.setFilter(fillLayerId, CLOSED_SHAPE_MB_FILTER);
}
if (!mbMap.getLayer(lineLayerId)) {
mbMap.addLayer({
@ -437,13 +446,7 @@ export class VectorLayer extends AbstractLayer {
source: sourceId,
paint: {}
});
mbMap.setFilter(lineLayerId, [
'any',
['==', ['geometry-type'], 'Polygon'],
['==', ['geometry-type'], 'MultiPolygon'],
['==', ['geometry-type'], 'LineString'],
['==', ['geometry-type'], 'MultiLineString']
]);
mbMap.setFilter(lineLayerId, ALL_SHAPE_MB_FILTER);
}
this._style.setMBPaintProperties({
alpha: this.getAlpha(),

View file

@ -100,6 +100,11 @@ export const reporting = (kibana) => {
otherwise: Joi.default(true),
}),
chromium: Joi.object({
inspect: Joi.boolean().when('$dev', {
is: false,
then: Joi.valid(false),
else: Joi.default(false),
}),
disableSandbox: Joi.boolean().default(await getDefaultChromiumSandboxDisabled()),
proxy: Joi.object({
enabled: Joi.boolean().default(false),

View file

@ -4,6 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/
import open from 'opn';
import * as Chrome from 'puppeteer-core';
import { parse as parseUrl } from 'url';
import {
@ -19,6 +20,7 @@ import {
export interface ChromiumDriverOptions {
logger: Logger;
inspect: boolean;
}
interface WaitForSelectorOpts {
@ -30,10 +32,12 @@ const WAIT_FOR_DELAY_MS: number = 100;
export class HeadlessChromiumDriver {
private readonly page: Chrome.Page;
private readonly logger: Logger;
private readonly inspect: boolean;
constructor(page: Chrome.Page, { logger }: ChromiumDriverOptions) {
constructor(page: Chrome.Page, { logger, inspect }: ChromiumDriverOptions) {
this.page = page;
this.logger = logger.clone(['headless-chromium-driver']);
this.inspect = inspect;
}
public async open(
@ -61,6 +65,11 @@ export class HeadlessChromiumDriver {
});
await this.page.goto(url, { waitUntil: 'domcontentloaded' });
if (this.inspect) {
await this.launchDebugger();
}
await this.waitForSelector(waitForSelector);
}
@ -135,6 +144,27 @@ export class HeadlessChromiumDriver {
});
}
private async launchDebugger() {
// In order to pause on execution we have to reach more deeply into Chromiums Devtools Protocol,
// and more specifically, for the page being used. _client is per-page, and puppeteer doesn't expose
// a page's client in their api, so we have to reach into internals to get this behavior.
// Finally, in order to get the inspector running, we have to know the page's internal ID (again, private)
// in order to construct the final debugging URL.
// @ts-ignore
await this.page._client.send('Debugger.enable');
// @ts-ignore
await this.page._client.send('Debugger.pause');
// @ts-ignore
const targetId = this.page._target._targetId;
const wsEndpoint = this.page.browser().wsEndpoint();
const { port } = parseUrl(wsEndpoint);
open(
`http://localhost:${port}/devtools/inspector.html?ws=localhost:${port}/devtools/page/${targetId}`
);
}
private _shouldUseCustomHeaders(conditions: ConditionalHeadersConditions, url: string) {
const { hostname, protocol, port, pathname } = parseUrl(url);

View file

@ -114,6 +114,7 @@ export class HeadlessChromiumDriverFactory {
new HeadlessChromiumDriver(page, {
maxScreenshotDimension: this.browserConfig.maxScreenshotDimension,
logger: this.logger,
inspect: this.browserConfig.inspect,
})
);

View file

@ -18,7 +18,7 @@ function getAllFetchParams(searchRequests, Promise) {
});
}
async function serializeAllFetchParams(fetchParams, searchRequests) {
function serializeAllFetchParams(fetchParams, searchRequests) {
const searchRequestsWithFetchParams = [];
const failedSearchRequests = [];

Some files were not shown because too many files have changed in this diff Show more