#7273 ingest to logstash DSL migration: gsub

Fixes #7311
This commit is contained in:
Armin 2017-06-05 09:09:51 +02:00 committed by Armin Braun
parent 2a67f3e15e
commit 3e579a7785
8 changed files with 113 additions and 2 deletions

View file

@ -0,0 +1,30 @@
package org.logstash.ingest;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptException;
public final class Gsub {
private Gsub() {
// Utility Wrapper for JS Script.
}
public static void main(final String... args) throws ScriptException, NoSuchMethodException {
try {
final ScriptEngine engine = JsUtil.engine();
Files.write(Paths.get(args[1]), ((String) ((Invocable) engine).invokeFunction(
"ingest_to_logstash_gsub",
new String(
Files.readAllBytes(Paths.get(args[0])), StandardCharsets.UTF_8
)
)).getBytes(StandardCharsets.UTF_8));
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
}

View file

@ -12,7 +12,9 @@ final class JsUtil {
/**
* Script names used by the converter in correct load order.
*/
private static final String[] SCRIPTS = {"shared", "date", "grok", "geoip", "pipeline", "convert"};
private static final String[] SCRIPTS =
{"shared", "date", "grok", "geoip", "gsub", "pipeline", "convert"};
private JsUtil() {
// Utility Class

View file

@ -0,0 +1,29 @@
var IngestGsub = {
has_gsub: function (processor) {
return !!processor["gsub"];
},
gsub_hash: function (processor) {
var gsub_data = processor["gsub"];
return IngestConverter.create_field(
"gsub",
"[\n" + [IngestConverter.dots_to_square_brackets(gsub_data["field"]),
gsub_data["pattern"], gsub_data["replacement"]].map(IngestConverter.quote_string)
.join(", ") + "\n]"
);
}
};
/**
* Converts Ingest JSON to LS Grok.
*/
function ingest_to_logstash_gsub(json) {
function map_processor(processor) {
return IngestConverter.filter_hash(
IngestConverter.create_hash("mutate", IngestGsub.gsub_hash(processor))
)
}
return IngestConverter.filters_to_file(JSON.parse(json)["processors"].map(map_processor));
}

View file

@ -24,7 +24,12 @@ function ingest_pipeline_to_logstash(json) {
if (IngestConvert.has_convert(processor)) {
filter_blocks.push(
IngestConverter.create_hash("mutate", IngestConvert.convert_hash(processor))
)
);
}
if (IngestGsub.has_gsub(processor)) {
filter_blocks.push(
IngestConverter.create_hash("mutate", IngestGsub.gsub_hash(processor))
);
}
return IngestConverter.join_hash_fields(filter_blocks);
}

View file

@ -0,0 +1,25 @@
package org.logstash.ingest;
import java.util.Collections;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.runners.Parameterized.Parameters;
public final class GsubTest extends IngestTest {
@Parameters
public static Iterable<String> data() {
return Collections.singletonList("GsubSimple");
}
@Test
public void convertsGsubCorrectly() throws Exception {
final String date = getResultPath(temp);
Gsub.main(resourcePath(String.format("ingest%s.json", testCase)), date);
assertThat(
utf8File(date), is(utf8File(resourcePath(String.format("logstash%s.conf", testCase))))
);
}
}

View file

@ -19,6 +19,7 @@ public final class PipelineTest extends IngestTest {
DateTest.data().forEach(cases::add);
GrokTest.data().forEach(cases::add);
ConvertTest.data().forEach(cases::add);
GsubTest.data().forEach(cases::add);
return cases;
}

View file

@ -0,0 +1,12 @@
{
"description": "ExampleGsub",
"processors": [
{
"gsub": {
"field": "field1",
"pattern": "\\.",
"replacement": "_"
}
}
]
}

View file

@ -0,0 +1,7 @@
filter {
mutate {
gsub => [
"field1", "\.", "_"
]
}
}