mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Created more robust defaults and removed support for overriding them
This commit is contained in:
parent
6b1d0c8c78
commit
b4ef144ae0
9 changed files with 189 additions and 145 deletions
|
@ -10,15 +10,19 @@ describe('createMappingsFromPatternFields', function () {
|
|||
testFields = [
|
||||
{
|
||||
'name': 'ip',
|
||||
'type': 'ip',
|
||||
'count': 2,
|
||||
'scripted': false
|
||||
'type': 'ip'
|
||||
},
|
||||
{
|
||||
'name': 'geo.coordinates',
|
||||
'type': 'geo_point',
|
||||
'count': 0,
|
||||
'scripted': false
|
||||
'type': 'geo_point'
|
||||
},
|
||||
{
|
||||
'name': 'agent',
|
||||
'type': 'string'
|
||||
},
|
||||
{
|
||||
'name': 'bytes',
|
||||
'type': 'number'
|
||||
}
|
||||
];
|
||||
});
|
||||
|
@ -35,27 +39,28 @@ describe('createMappingsFromPatternFields', function () {
|
|||
expect(_.isEqual(testFields, testFieldClone)).to.be.ok();
|
||||
});
|
||||
|
||||
it('should remove kibana properties that are not valid for ES field mappings', function () {
|
||||
const mappings = createMappingsFromPatternFields(testFields);
|
||||
expect(mappings.ip).to.not.have.property('name');
|
||||
expect(mappings.ip).to.not.have.property('count');
|
||||
expect(mappings.ip).to.not.have.property('scripted');
|
||||
expect(mappings.ip).to.not.have.property('indexed');
|
||||
expect(mappings.ip).to.not.have.property('analyzed');
|
||||
});
|
||||
|
||||
it('should set doc_values and indexed status based on the relevant kibana properties if they exist', function () {
|
||||
testFields[0].indexed = true;
|
||||
testFields[0].analyzed = false;
|
||||
testFields[0].doc_values = true;
|
||||
it('should set the same default mapping for all non-strings', function () {
|
||||
let mappings = createMappingsFromPatternFields(testFields);
|
||||
|
||||
expect(mappings.ip).to.have.property('doc_values', true);
|
||||
expect(mappings.ip).to.have.property('index', 'not_analyzed');
|
||||
_.forEach(function (mapping) {
|
||||
if (mapping.type !== 'string') {
|
||||
expect(_.isEqual(mapping, {
|
||||
type: mapping.type,
|
||||
index: 'not_analyzed',
|
||||
doc_values: true
|
||||
}));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
testFields[0].analyzed = true;
|
||||
mappings = createMappingsFromPatternFields(testFields);
|
||||
expect(mappings.ip).to.have.property('index', 'analyzed');
|
||||
it('should give strings a multi-field mapping', function () {
|
||||
let mappings = createMappingsFromPatternFields(testFields);
|
||||
|
||||
_.forEach(function (mapping) {
|
||||
if (mapping.type === 'string') {
|
||||
expect(mapping.to.have.property('fields'));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle nested fields', function () {
|
||||
|
@ -66,4 +71,11 @@ describe('createMappingsFromPatternFields', function () {
|
|||
expect(mappings.geo.properties).to.have.property('coordinates');
|
||||
expect(_.isEqual(mappings.geo.properties.coordinates, {type: 'geo_point'}));
|
||||
});
|
||||
|
||||
it('should map all number fields as an ES double', function () {
|
||||
let mappings = createMappingsFromPatternFields(testFields);
|
||||
|
||||
expect(mappings).to.have.property('bytes');
|
||||
expect(mappings.bytes).to.have.property('type', 'double');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,74 @@
|
|||
const initDefaultFieldProps = require('../init_default_field_props');
|
||||
const expect = require('expect.js');
|
||||
const _ = require('lodash');
|
||||
let fields;
|
||||
|
||||
const testData = [
|
||||
{
|
||||
'name': 'ip',
|
||||
'type': 'ip'
|
||||
}, {
|
||||
'name': '@timestamp',
|
||||
'type': 'date'
|
||||
}, {
|
||||
'name': 'agent',
|
||||
'type': 'string'
|
||||
}, {
|
||||
'name': 'bytes',
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'name': 'geo.coordinates',
|
||||
'type': 'geo_point'
|
||||
}
|
||||
];
|
||||
|
||||
describe('initDefaultFieldProps', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
fields = _.cloneDeep(testData);
|
||||
});
|
||||
|
||||
it('should throw an error if no argument is passed or the argument is not an array', function () {
|
||||
expect(initDefaultFieldProps).to.throwException(/requires an array argument/);
|
||||
expect(initDefaultFieldProps).withArgs({}).to.throwException(/requires an array argument/);
|
||||
});
|
||||
|
||||
it('should set the same defaults for everything but strings', function () {
|
||||
const results = initDefaultFieldProps(fields);
|
||||
_.forEach(results, function (field) {
|
||||
if (field.type !== 'string') {
|
||||
expect(field).to.have.property('indexed', true);
|
||||
expect(field).to.have.property('analyzed', false);
|
||||
expect(field).to.have.property('doc_values', true);
|
||||
expect(field).to.have.property('scripted', false);
|
||||
expect(field).to.have.property('count', 0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should make string fields analyzed', function () {
|
||||
const results = initDefaultFieldProps(fields);
|
||||
_.forEach(results, function (field) {
|
||||
if (field.type === 'string' && !_.contains(field.name, 'raw')) {
|
||||
expect(field).to.have.property('indexed', true);
|
||||
expect(field).to.have.property('analyzed', true);
|
||||
expect(field).to.have.property('doc_values', false);
|
||||
expect(field).to.have.property('scripted', false);
|
||||
expect(field).to.have.property('count', 0);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should create an extra raw non-analyzed field for strings', function () {
|
||||
const results = initDefaultFieldProps(fields);
|
||||
const rawField = _.find(results, function (field) {
|
||||
return _.contains(field.name, 'raw');
|
||||
});
|
||||
expect(rawField).to.have.property('indexed', true);
|
||||
expect(rawField).to.have.property('analyzed', false);
|
||||
expect(rawField).to.have.property('doc_values', true);
|
||||
expect(rawField).to.have.property('scripted', false);
|
||||
expect(rawField).to.have.property('count', 0);
|
||||
});
|
||||
});
|
|
@ -1,31 +0,0 @@
|
|||
const types = {
|
||||
string: {type: 'string', group: 'base'},
|
||||
date: {type: 'date', group: 'base'},
|
||||
boolean: {type: 'boolean', group: 'base'},
|
||||
float: {type: 'number', group: 'number'},
|
||||
double: {type: 'number', group: 'number'},
|
||||
integer: {type: 'number', group: 'number'},
|
||||
long: {type: 'number', group: 'number'},
|
||||
short: {type: 'number', group: 'number'},
|
||||
byte: {type: 'number', group: 'number'},
|
||||
token_count: {type: 'number', group: 'number'},
|
||||
geo_point: {type: 'geo_point', group: 'geo'},
|
||||
geo_shape: {type: 'geo_shape', group: 'geo'},
|
||||
ip: {type: 'ip', group: 'other'},
|
||||
attachment: {type: 'attachment', group: 'other'},
|
||||
murmur3: {type: 'murmur3', group: 'hash'},
|
||||
conflict: {type: 'conflict', group: 'other'}
|
||||
};
|
||||
|
||||
/**
|
||||
* Based on _cast_mapping_type from ui/index_patterns
|
||||
* Accepts a mapping type, and converts it into its js equivilent
|
||||
* @param {String} type - the type from the mapping's 'type' field
|
||||
* @return {String} - the most specific type that we care for
|
||||
*/
|
||||
module.exports = function castMappingType(name) {
|
||||
if (!name) return 'unknown';
|
||||
|
||||
var match = types[name];
|
||||
return match ? match.type : 'string';
|
||||
};
|
|
@ -9,24 +9,26 @@ module.exports = function createMappingsFromPatternFields(fields) {
|
|||
const mappings = {};
|
||||
|
||||
_.forEach(fields, function (field) {
|
||||
const mapping = _.cloneDeep(field);
|
||||
let mapping;
|
||||
|
||||
delete mapping.name;
|
||||
delete mapping.count;
|
||||
delete mapping.scripted;
|
||||
delete mapping.indexed;
|
||||
delete mapping.analyzed;
|
||||
|
||||
if (field.indexed === false) {
|
||||
mapping.index = 'no';
|
||||
if (field.type === 'string') {
|
||||
mapping = {
|
||||
type: 'string',
|
||||
index: 'analyzed',
|
||||
omit_norms: true,
|
||||
fielddata: {format: 'disabled'},
|
||||
fields: {
|
||||
raw: {type: 'string', index: 'not_analyzed', doc_values: true, ignore_above: 256}
|
||||
}
|
||||
};
|
||||
}
|
||||
else {
|
||||
if (field.analyzed === false) {
|
||||
mapping.index = 'not_analyzed';
|
||||
}
|
||||
else if (field.analyzed === true) {
|
||||
mapping.index = 'analyzed';
|
||||
}
|
||||
const fieldType = field.type === 'number' ? 'double' : field.type;
|
||||
mapping = {
|
||||
type: fieldType,
|
||||
index: 'not_analyzed',
|
||||
doc_values: true
|
||||
};
|
||||
}
|
||||
|
||||
_.set(mappings, field.name.replace('.', '.properties.'), mapping);
|
||||
|
|
45
src/plugins/kibana/server/lib/init_default_field_props.js
Normal file
45
src/plugins/kibana/server/lib/init_default_field_props.js
Normal file
|
@ -0,0 +1,45 @@
|
|||
const _ = require('lodash');
|
||||
|
||||
module.exports = function initDefaultFieldProps(fields) {
|
||||
if (fields === undefined || !_.isArray(fields)) {
|
||||
throw new Error('requires an array argument');
|
||||
}
|
||||
|
||||
const results = [];
|
||||
|
||||
_.forEach(fields, function (field) {
|
||||
const newField = _.cloneDeep(field);
|
||||
results.push(newField);
|
||||
|
||||
if (newField.type === 'string') {
|
||||
_.defaults(newField, {
|
||||
indexed: true,
|
||||
analyzed: true,
|
||||
doc_values: false,
|
||||
scripted: false,
|
||||
count: 0
|
||||
});
|
||||
|
||||
results.push({
|
||||
name: newField.name + '.raw',
|
||||
type: 'string',
|
||||
indexed: true,
|
||||
analyzed: false,
|
||||
doc_values: true,
|
||||
scripted: false,
|
||||
count: 0
|
||||
});
|
||||
}
|
||||
else {
|
||||
_.defaults(newField, {
|
||||
indexed: true,
|
||||
analyzed: false,
|
||||
doc_values: true,
|
||||
scripted: false,
|
||||
count: 0
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return results;
|
||||
};
|
|
@ -5,7 +5,7 @@ const indexPatternSchema = require('../../../lib/schemas/resources/index_pattern
|
|||
const handleESError = require('../../../lib/handle_es_error');
|
||||
const { convertToCamelCase } = require('../../../lib/case_conversion');
|
||||
const createMappingsFromPatternFields = require('../../../lib/create_mappings_from_pattern_fields');
|
||||
const castMappingType = require('../../../lib/cast_mapping_type');
|
||||
const initDefaultFieldProps = require('../../../lib/init_default_field_props');
|
||||
|
||||
module.exports = function registerPost(server) {
|
||||
server.route({
|
||||
|
@ -22,31 +22,8 @@ module.exports = function registerPost(server) {
|
|||
const indexPatternId = requestDocument.data.id;
|
||||
const indexPattern = convertToCamelCase(requestDocument.data.attributes);
|
||||
|
||||
_.forEach(indexPattern.fields, function (field) {
|
||||
if (field.scripted) {
|
||||
_.defaults(field, {
|
||||
indexed: false,
|
||||
analyzed: false,
|
||||
doc_values: false,
|
||||
count: 0
|
||||
});
|
||||
}
|
||||
else {
|
||||
_.defaults(field, {
|
||||
indexed: true,
|
||||
analyzed: false,
|
||||
doc_values: true,
|
||||
scripted: false,
|
||||
count: 0
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const mappings = createMappingsFromPatternFields(_.reject(indexPattern.fields, 'scripted'));
|
||||
|
||||
_.forEach(indexPattern.fields, function (field) {
|
||||
field.type = castMappingType(field.type);
|
||||
});
|
||||
const mappings = createMappingsFromPatternFields(indexPattern.fields);
|
||||
indexPattern.fields = initDefaultFieldProps(indexPattern.fields);
|
||||
|
||||
indexPattern.fields = JSON.stringify(indexPattern.fields);
|
||||
indexPattern.fieldFormatMap = JSON.stringify(indexPattern.fieldFormatMap);
|
||||
|
@ -79,7 +56,9 @@ module.exports = function registerPost(server) {
|
|||
match: '*',
|
||||
match_mapping_type: 'string',
|
||||
mapping: {
|
||||
type: 'string', index: 'analyzed', omit_norms: true,
|
||||
type: 'string',
|
||||
index: 'analyzed',
|
||||
omit_norms: true,
|
||||
fielddata: {format: 'disabled'},
|
||||
fields: {
|
||||
raw: {type: 'string', index: 'not_analyzed', doc_values: true, ignore_above: 256}
|
||||
|
|
|
@ -52,8 +52,6 @@ module.exports = function (grunt) {
|
|||
args: [
|
||||
'--server.port=' + uiConfig.servers.kibana.port,
|
||||
'--env.name=development',
|
||||
//remember to remove this
|
||||
'--server.xsrf.disableProtection=true',
|
||||
'--elasticsearch.url=' + format(uiConfig.servers.elasticsearch),
|
||||
'--logging.json=false'
|
||||
]
|
||||
|
|
|
@ -53,7 +53,7 @@ define(function (require) {
|
|||
});
|
||||
});
|
||||
|
||||
bdd.it('should provide defaults for optional field properties that need to be initialized and cast types', function createTemplate() {
|
||||
bdd.it('should provide defaults for field properties', function createTemplate() {
|
||||
return request.post('/kibana/index_patterns')
|
||||
.send(createTestData().indexPattern)
|
||||
.expect(201)
|
||||
|
@ -74,11 +74,6 @@ define(function (require) {
|
|||
expect(fields[1].indexed).to.be(true);
|
||||
expect(fields[1].analyzed).to.be(false);
|
||||
expect(fields[1].doc_values).to.be(true);
|
||||
|
||||
// API should cast Java types to JS before storing the Kibana index pattern.
|
||||
// bytes was created as a long and cast to number
|
||||
expect(fields[3].name).to.be('bytes');
|
||||
expect(fields[3].type).to.be('number');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -93,10 +88,11 @@ define(function (require) {
|
|||
.then(function (template) {
|
||||
var mappings = template['kibana-logstash-*'].mappings._default_.properties;
|
||||
expect(mappings).to.be.ok();
|
||||
expect(_.isEqual(mappings.ip, {index: 'not_analyzed', type: 'ip', doc_values: false})).to.be.ok();
|
||||
expect(_.isEqual(mappings.ip, {index: 'not_analyzed', type: 'ip', doc_values: true})).to.be.ok();
|
||||
expect(_.isEqual(mappings['@timestamp'], {index: 'not_analyzed', type: 'date', doc_values: true})).to.be.ok();
|
||||
expect(_.isEqual(mappings.agent, {index: 'analyzed', type: 'string', doc_values: false})).to.be.ok();
|
||||
expect(_.isEqual(mappings.bytes, {index: 'not_analyzed', type: 'long', doc_values: true})).to.be.ok();
|
||||
expect(_.isEqual(mappings.bytes, {index: 'not_analyzed', type: 'double', doc_values: true})).to.be.ok();
|
||||
|
||||
// object fields are mapped as such, with individual mappings for each of their properties
|
||||
expect(_.isEqual(mappings.geo, {
|
||||
properties: {
|
||||
coordinates: {
|
||||
|
@ -106,36 +102,13 @@ define(function (require) {
|
|||
}
|
||||
}
|
||||
})).to.be.ok();
|
||||
|
||||
// strings should be mapped as multi fields
|
||||
expect(mappings.agent).to.have.property('fields');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
bdd.it('scripted fields should not get added to the template', function createTemplate() {
|
||||
var testData = createTestData().indexPattern;
|
||||
testData.data.attributes.fields.push({
|
||||
'name': 'Double Bytes',
|
||||
'type': 'number',
|
||||
'scripted': true,
|
||||
'script': 'doc[\'bytes\'].value * 2',
|
||||
'lang': 'expression',
|
||||
'indexed': false,
|
||||
'analyzed': false,
|
||||
'doc_values': false
|
||||
});
|
||||
|
||||
return request.post('/kibana/index_patterns')
|
||||
.send(testData)
|
||||
.expect(201)
|
||||
.then(function () {
|
||||
return scenarioManager.client.indices.getTemplate({name: 'kibana-logstash-*'})
|
||||
.then(function (template) {
|
||||
var mappings = template['kibana-logstash-*'].mappings._default_.properties;
|
||||
expect(mappings).to.be.ok();
|
||||
expect(mappings).to.not.have.property('Double Bytes');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
bdd.it('should return 409 conflict when a pattern with the given ID already exists', function patternConflict() {
|
||||
return request.post('/kibana/index_patterns')
|
||||
.send(createTestData().indexPattern)
|
||||
|
|
|
@ -9,24 +9,16 @@ module.exports = function createTestData() {
|
|||
'time_field_name': '@timestamp',
|
||||
'fields': [{
|
||||
'name': 'ip',
|
||||
'type': 'ip',
|
||||
'count': 2,
|
||||
'scripted': false,
|
||||
'indexed': true,
|
||||
'analyzed': false,
|
||||
'doc_values': false
|
||||
'type': 'ip'
|
||||
}, {
|
||||
'name': '@timestamp',
|
||||
'type': 'date'
|
||||
}, {
|
||||
'name': 'agent',
|
||||
'type': 'string',
|
||||
'indexed': true,
|
||||
'analyzed': true,
|
||||
'doc_values': false
|
||||
'type': 'string'
|
||||
}, {
|
||||
'name': 'bytes',
|
||||
'type': 'long'
|
||||
'type': 'number'
|
||||
},
|
||||
{
|
||||
'name': 'geo.coordinates',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue