Merge pull request #8524 from elastic/jasper/backport/8135/5.0

[backport] PR #8135 to 5.0 - Remove pipelines from ingest endpoint, and import csv wizard
This commit is contained in:
Court Ewing 2016-10-03 16:50:18 -04:00 committed by GitHub
commit 234f54792a
8 changed files with 10 additions and 159 deletions

View file

@ -27,8 +27,7 @@ modules.get('apps/management')
location: 'Add Data'
});
const usePipeline = !_.isEmpty(_.get(this.results, 'pipeline.processors'));
ingest.uploadCSV(this.results.file, this.results.indexPattern.id, this.results.parseOptions.delimiter, usePipeline)
ingest.uploadCSV(this.results.file, this.results.indexPattern.id, this.results.parseOptions.delimiter)
.then(
(res) => {
this.created = 0;

View file

@ -3,7 +3,6 @@ import template from 'plugins/kibana/management/sections/indices/filebeat/direct
import IngestProvider from 'ui/ingest';
import 'plugins/kibana/management/sections/indices/add_data_steps/pattern_review_step';
import 'plugins/kibana/management/sections/indices/add_data_steps/paste_samples_step';
import 'plugins/kibana/management/sections/indices/add_data_steps/pipeline_setup';
import 'plugins/kibana/management/sections/indices/add_data_steps/install_filebeat_step';
import '../../styles/_add_data_wizard.less';

View file

@ -1,8 +1,6 @@
import Joi from 'joi';
import indexPatternSchema from './index_pattern_schema';
import pipelineSchema from './pipeline_schema';
module.exports = Joi.object({
index_pattern: indexPatternSchema.required(),
pipeline: pipelineSchema
index_pattern: indexPatternSchema.required()
});

View file

@ -6,7 +6,6 @@ import createMappingsFromPatternFields from '../../../lib/create_mappings_from_p
import initDefaultFieldProps from '../../../lib/init_default_field_props';
import {ingestToPattern, patternToIngest} from '../../../../common/lib/convert_pattern_and_ingest_name';
import { keysToCamelCaseShallow } from '../../../../common/lib/case_conversion';
import ingestPipelineApiKibanaToEsConverter from '../../../lib/converters/ingest_pipeline_api_kibana_to_es_converter';
export function registerPost(server) {
const kibanaIndex = server.config().get('kibana.index');
@ -25,7 +24,7 @@ export function registerPost(server) {
},
(patternDeletionError) => {
throw new Error(
`index-pattern ${indexPatternId} created successfully but index template or pipeline
`index-pattern ${indexPatternId} created successfully but index template
creation failed. Failed to rollback index-pattern creation, must delete manually.
${patternDeletionError.toString()}
${rootError.toString()}`
@ -34,27 +33,6 @@ export function registerPost(server) {
);
}
function templateRollback(rootError, templateName, boundCallWithRequest) {
const deleteParams = {
name: templateName
};
return boundCallWithRequest('indices.deleteTemplate', deleteParams)
.then(
() => {
throw rootError;
},
(templateDeletionError) => {
throw new Error(
`index template ${templateName} created successfully but pipeline
creation failed. Failed to rollback template creation, must delete manually.
${templateDeletionError.toString()}
${rootError.toString()}`
);
}
);
}
server.route({
path: '/api/kibana/ingest',
method: 'POST',
@ -71,7 +49,6 @@ export function registerPost(server) {
const indexPattern = keysToCamelCaseShallow(requestDocument.index_pattern);
const indexPatternId = indexPattern.id;
const ingestConfigName = patternToIngest(indexPatternId);
const shouldCreatePipeline = !_.isEmpty(requestDocument.pipeline);
delete indexPattern.id;
const mappings = createMappingsFromPatternFields(indexPattern.fields);
@ -81,8 +58,6 @@ export function registerPost(server) {
indexPattern.fields = JSON.stringify(indexPattern.fields);
indexPattern.fieldFormatMap = JSON.stringify(indexPattern.fieldFormatMap);
const pipeline = ingestPipelineApiKibanaToEsConverter(requestDocument.pipeline);
// Set up call with request params
const patternCreateParams = {
index: kibanaIndex,
@ -105,13 +80,6 @@ export function registerPost(server) {
}
};
const pipelineParams = {
path: `/_ingest/pipeline/${ingestConfigName}`,
method: 'PUT',
body: pipeline
};
return boundCallWithRequest('indices.exists', {index: indexPatternId})
.then((matchingIndices) => {
if (matchingIndices) {
@ -122,15 +90,6 @@ export function registerPost(server) {
.then(() => {
return boundCallWithRequest('indices.putTemplate', templateParams)
.catch((templateError) => {return patternRollback(templateError, indexPatternId, boundCallWithRequest);});
})
.then((templateResponse) => {
if (!shouldCreatePipeline) {
return templateResponse;
}
return boundCallWithRequest('transport.request', pipelineParams)
.catch((pipelineError) => {return templateRollback(pipelineError, ingestConfigName, boundCallWithRequest);})
.catch((templateRollbackError) => {return patternRollback(templateRollbackError, indexPatternId, boundCallWithRequest);});
});
})
.then(

View file

@ -122,7 +122,7 @@ describe('Ingest Service', function () {
it('POSTs to the kibana _data endpoint with the correct params and the file attached as multipart/form-data', function () {
$httpBackend
.expectPOST('/api/kibana/foo/_data?csv_delimiter=;&pipeline=true', function (data) {
.expectPOST('/api/kibana/foo/_data?csv_delimiter=;', function (data) {
// The assertions we can do here are limited because of poor browser support for FormData methods
return data instanceof FormData;
})
@ -130,18 +130,18 @@ describe('Ingest Service', function () {
const file = new Blob(['foo,bar'], {type : 'text/csv'});
ingest.uploadCSV(file, 'foo', ';', true);
ingest.uploadCSV(file, 'foo', ';');
$httpBackend.flush();
});
it('Returns error from the data API if there is one', function (done) {
$httpBackend
.expectPOST('/api/kibana/foo/_data?csv_delimiter=;&pipeline=true')
.expectPOST('/api/kibana/foo/_data?csv_delimiter=;')
.respond(404);
const file = new Blob(['foo,bar'], {type : 'text/csv'});
ingest.uploadCSV(file, 'foo', ';', true)
ingest.uploadCSV(file, 'foo', ';')
.then(
() => {
throw new Error('expected an error response');
@ -156,35 +156,4 @@ describe('Ingest Service', function () {
});
});
describe('getProcessors', () => {
it('Calls the processors GET endpoint of the ingest API', function () {
$httpBackend
.expectGET('/api/kibana/ingest/processors')
.respond('ok');
ingest.getProcessors();
$httpBackend.flush();
});
it('Throws user-friendly error when there is an error in the request', function (done) {
$httpBackend
.when('GET', '/api/kibana/ingest/processors')
.respond(404);
ingest.getProcessors()
.then(
() => {
throw new Error('expected an error response');
},
(error) => {
expect(error.message).to.be('Error fetching enabled processors');
done();
});
$httpBackend.flush();
});
});
});

View file

@ -9,7 +9,7 @@ export default function IngestProvider($rootScope, $http, config, $q, Private, i
const ingestAPIPrefix = chrome.addBasePath('/api/kibana/ingest');
const refreshKibanaIndex = Private(RefreshKibanaIndexProvider);
this.save = function (indexPattern, pipeline) {
this.save = function (indexPattern) {
if (_.isEmpty(indexPattern)) {
throw new Error('index pattern is required');
}
@ -17,9 +17,6 @@ export default function IngestProvider($rootScope, $http, config, $q, Private, i
const payload = {
index_pattern: keysToSnakeCaseShallow(indexPattern)
};
if (!_.isEmpty(pipeline)) {
payload.pipeline = _.map(pipeline, processor => keysToSnakeCaseShallow(processor));
}
return $http.post(`${ingestAPIPrefix}`, payload)
.then(() => {
@ -44,39 +41,7 @@ export default function IngestProvider($rootScope, $http, config, $q, Private, i
});
};
this.simulate = function (pipeline) {
function pack(pipeline) {
const result = keysToSnakeCaseShallow(pipeline);
result.processors = _.map(result.processors, processor => keysToSnakeCaseShallow(processor));
return result;
}
function unpack(response) {
const data = response.data.map(result => keysToCamelCaseShallow(result));
return data;
}
return $http.post(`${ingestAPIPrefix}/simulate`, pack(pipeline))
.then(unpack)
.catch(err => {
return $q.reject(new Error('Error simulating pipeline'));
});
};
this.getProcessors = function () {
function unpack(response) {
return response.data;
}
return $http.get(`${ingestAPIPrefix}/processors`)
.then(unpack)
.catch(err => {
return $q.reject(new Error('Error fetching enabled processors'));
});
};
this.uploadCSV = function (file, indexPattern, delimiter, pipeline) {
this.uploadCSV = function (file, indexPattern, delimiter) {
if (_.isUndefined(file)) {
throw new Error('file is required');
}
@ -91,9 +56,6 @@ export default function IngestProvider($rootScope, $http, config, $q, Private, i
if (!_.isUndefined(delimiter)) {
params.csv_delimiter = delimiter;
}
if (!_.isUndefined(pipeline)) {
params.pipeline = pipeline;
}
return $http.post(chrome.addBasePath(`/api/kibana/${indexPattern}/_data`), formData, {
params: params,

View file

@ -43,11 +43,6 @@ define(function (require) {
// Fields must have a name and type
request.post('/kibana/ingest')
.send(_.set(createTestData(), 'index_pattern.fields', [{count: 0}]))
.expect(400),
// should validate pipeline processors
request.post('/kibana/ingest')
.send(_.set(createTestData(), 'pipeline[0]', {bad: 'processor'}))
.expect(400)
]);
});
@ -165,30 +160,6 @@ define(function (require) {
});
});
bdd.it('should create a pipeline if one is included in the request', function () {
return request.post('/kibana/ingest')
.send(createTestData())
.expect(204)
.then(function () {
return scenarioManager.client.transport.request({
path: '_ingest/pipeline/kibana-logstash-*',
method: 'GET'
})
.then(function (body) {
expect(body).to.have.property('kibana-logstash-*');
});
});
});
bdd.it('pipeline should be optional', function optionalPipeline() {
const payload = createTestData();
delete payload.pipeline;
return request.post('/kibana/ingest')
.send(payload)
.expect(204);
});
bdd.it('should return 409 conflict when a pattern with the given ID already exists', function patternConflict() {
return request.post('/kibana/ingest')
.send(createTestData())

View file

@ -23,12 +23,6 @@ module.exports = function createTestData() {
'type': 'geo_point'
}
]
},
pipeline: [{
processor_id: 'processor1',
type_id: 'set',
target_field: 'foo',
value: 'bar'
}]
}
};
};