Java plugin API

Fixes #10216
This commit is contained in:
Dan Hermann 2018-12-17 13:56:02 -06:00
parent dc174b2802
commit e89501e47e
66 changed files with 4430 additions and 367 deletions

View file

@ -136,7 +136,11 @@ dependencies {
compile group: 'com.google.guava', name: 'guava', version: '22.0'
// Do not upgrade this, later versions require GPL licensed code in javac-shaded that is
// Apache2 incompatible
compile 'com.google.googlejavaformat:google-java-format:1.1'
compile('com.google.googlejavaformat:google-java-format:1.1') {
exclude group: 'com.google.guava', module: 'guava'
}
compile 'org.javassist:javassist:3.22.0-GA'
compile 'com.google.guava:guava:20.0'
testCompile 'org.apache.logging.log4j:log4j-core:2.9.1:tests'
testCompile 'junit:junit:4.12'
testCompile 'net.javacrumbs.json-unit:json-unit:1.9.0'

View file

@ -136,7 +136,7 @@ module LogStash; module Config; module AST
events.each{|e| block.call(e)}
end
if @generated_objects[:#{name}].respond_to?(:flush)
if !@generated_objects[:#{name}].nil? && @generated_objects[:#{name}].has_flush
@periodic_flushers << @generated_objects[:#{name}_flush] if @generated_objects[:#{name}].periodic_flush
@shutdown_flushers << @generated_objects[:#{name}_flush]
end

View file

@ -1,69 +1,2 @@
# encoding: utf-8
#
module LogStash
class FilterDelegator
extend Forwardable
DELEGATED_METHODS = [
:register,
:close,
:threadsafe?,
:do_close,
:do_stop,
:periodic_flush,
:reloadable?
]
def_delegators :@filter, *DELEGATED_METHODS
attr_reader :id
def initialize(filter, id)
@klass = filter.class
@id = id
@filter = filter
# Scope the metrics to the plugin
namespaced_metric = filter.metric
@metric_events = namespaced_metric.namespace(:events)
@metric_events_in = @metric_events.counter(:in)
@metric_events_out = @metric_events.counter(:out)
@metric_events_time = @metric_events.counter(:duration_in_millis)
namespaced_metric.gauge(:name, config_name)
# Not all the filters will do bufferings
define_flush_method if @filter.respond_to?(:flush)
end
def config_name
@klass.config_name
end
def multi_filter(events)
@metric_events_in.increment(events.size)
start_time = java.lang.System.nano_time
new_events = @filter.multi_filter(events)
@metric_events_time.increment((java.lang.System.nano_time - start_time) / 1_000_000)
# There is no guarantee in the context of filter
# that EVENTS_IN == EVENTS_OUT, see the aggregates and
# the split filter
c = new_events.count { |event| !event.cancelled? }
@metric_events_out.increment(c) if c > 0
new_events
end
private
def define_flush_method
define_singleton_method(:flush) do |options = {}|
# we also need to trace the number of events
# coming from a specific filters.
new_events = @filter.flush(options)
# Filter plugins that does buffering or spooling of events like the
# `Logstash-filter-aggregates` can return `NIL` and will flush on the next flush ticks.
@metric_events_out.increment(new_events.size) if new_events && new_events.size > 0
new_events
end
end
end
end
# The contents of this file have been ported to Java. It is included for for compatibility
# with plugins that directly include it.

View file

@ -25,6 +25,8 @@ module LogStash; class JavaPipeline < JavaBasePipeline
@worker_threads = []
@java_inputs_controller = org.logstash.execution.InputsController.new(lir_execution.javaInputs)
@drain_queue = settings.get_value("queue.drain") || settings.get("queue.type") == "memory"
@events_filtered = java.util.concurrent.atomic.LongAdder.new
@ -241,6 +243,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
def wait_inputs
@input_threads.each(&:join)
@java_inputs_controller.awaitStop
end
def start_inputs
@ -259,6 +262,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
# then after all input plugins are successfully registered, start them
inputs.each { |input| start_input(input) }
@java_inputs_controller.startInputs(self)
end
def start_input(plugin)
@ -324,6 +328,7 @@ module LogStash; class JavaPipeline < JavaBasePipeline
def stop_inputs
@logger.debug("Closing inputs", default_logging_keys)
inputs.each(&:do_stop)
@java_inputs_controller.stopInputs
@logger.debug("Closed inputs", default_logging_keys)
end

View file

@ -262,11 +262,14 @@ module LogStash module Plugins
# @param name [String] plugin name
# @return [Boolean] true if klass is a valid plugin for name
def is_a_plugin?(klass, name)
klass.ancestors.include?(LogStash::Plugin) && klass.respond_to?(:config_name) && klass.config_name == name
(klass.class == Java::JavaClass && klass.simple_name.downcase == name.gsub('_','')) ||
(klass.ancestors.include?(LogStash::Plugin) && klass.respond_to?(:config_name) && klass.config_name == name)
end
def add_plugin(type, name, klass)
if !exists?(type, name)
if klass.respond_to?("javaClass", true)
@registry[key_for(type, name)] = PluginSpecification.new(type, name, klass.javaClass)
elsif !exists?(type, name)
specification_klass = type == :universal ? UniversalPluginSpecification : PluginSpecification
@registry[key_for(type, name)] = specification_klass.new(type, name, klass)
else

View file

@ -51,7 +51,7 @@ describe LogStash::FilterDelegator do
end
it "defines a flush method" do
expect(subject.respond_to?(:flush)).to be_truthy
expect(subject.has_flush).to be_truthy
end
context "when the flush return events" do
@ -128,7 +128,7 @@ describe LogStash::FilterDelegator do
end
it "doesnt define a flush method" do
expect(subject.respond_to?(:flush)).to be_falsey
expect(subject.has_flush).to be_falsey
end
it "increments the in/out of the metric" do
@ -145,14 +145,4 @@ describe LogStash::FilterDelegator do
end
end
context "delegate methods to the original plugin" do
# I am not testing the behavior of these methods
# this is done in the plugin tests. I just want to make sure
# the proxy delegates the methods.
LogStash::FilterDelegator::DELEGATED_METHODS.each do |method|
it "delegate method: `#{method}` to the filter" do
expect(subject.respond_to?(method))
end
end
end
end

View file

@ -6,7 +6,7 @@ require "support/shared_contexts"
java_import org.logstash.RubyUtil
describe LogStash::JavaFilterDelegator do
describe LogStash::FilterDelegator do
class MockGauge
def increment(_)
@ -182,14 +182,4 @@ describe LogStash::JavaFilterDelegator do
end
end
context "delegate methods to the original plugin" do
# I am not testing the behavior of these methods
# this is done in the plugin tests. I just want to make sure
# the proxy delegates the methods.
LogStash::FilterDelegator::DELEGATED_METHODS.each do |method|
it "delegate method: `#{method}` to the filter" do
expect(subject.respond_to?(method))
end
end
end
end

View file

@ -0,0 +1,106 @@
package co.elastic.logstash.api;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
/**
* Configuration for Logstash Java plugins.
*/
public final class Configuration {
private final Map<String, Object> rawSettings;
/**
* @param raw Configuration Settings Map. Values are serialized.
*/
public Configuration(final Map<String, Object> raw) {
this.rawSettings = raw;
}
@SuppressWarnings("unchecked")
public <T> T get(final PluginConfigSpec<T> configSpec) {
if (rawSettings.containsKey(configSpec.name())) {
Object o = rawSettings.get(configSpec.name());
if (configSpec.type().isAssignableFrom(o.getClass())) {
return (T) o;
} else {
throw new IllegalStateException(
String.format("Setting value for '%s' of type '%s' incompatible with defined type of '%s'",
configSpec.name(), o.getClass(), configSpec.type()));
}
} else {
return configSpec.defaultValue();
}
}
public Object getRawValue(final PluginConfigSpec<?> configSpec) {
return rawSettings.get(configSpec.name());
}
public boolean contains(final PluginConfigSpec<?> configSpec) {
return rawSettings.containsKey(configSpec.name());
}
public Collection<String> allKeys() {
return rawSettings.keySet();
}
public static PluginConfigSpec<String> stringSetting(final String name) {
return new PluginConfigSpec<>(
name, String.class, null, false, false
);
}
public static PluginConfigSpec<String> stringSetting(final String name, final String defaultValue) {
return new PluginConfigSpec<>(
name, String.class, defaultValue, false, false
);
}
public static PluginConfigSpec<String> requiredStringSetting(final String name) {
return new PluginConfigSpec<>(name, String.class, null, false, true);
}
public static PluginConfigSpec<Long> numSetting(final String name) {
return new PluginConfigSpec<>(
name, Long.class, null, false, false
);
}
public static PluginConfigSpec<Long> numSetting(final String name, final long defaultValue) {
return new PluginConfigSpec<>(
name, Long.class, defaultValue, false, false
);
}
public static PluginConfigSpec<Path> pathSetting(final String name) {
return new PluginConfigSpec<>(name, Path.class, null, false, false);
}
public static PluginConfigSpec<Boolean> booleanSetting(final String name) {
return new PluginConfigSpec<>(name, Boolean.class, null, false, false);
}
@SuppressWarnings("unchecked")
public static PluginConfigSpec<Map<String, String>> hashSetting(final String name) {
return new PluginConfigSpec(name, Map.class, null, false, false);
}
@SuppressWarnings("unchecked")
public static <T> PluginConfigSpec<Map<String, T>> requiredFlatHashSetting(
final String name, Class<T> type) {
//TODO: enforce subtype
return new PluginConfigSpec(
name, Map.class, null, false, true
);
}
@SuppressWarnings("unchecked")
public static PluginConfigSpec<Map<String, Configuration>> requiredNestedHashSetting(
final String name, final Collection<PluginConfigSpec<?>> spec) {
return new PluginConfigSpec(
name, Map.class, null, false, true, spec
);
}
}

View file

@ -0,0 +1,13 @@
package co.elastic.logstash.api;
import org.logstash.common.io.DeadLetterQueueWriter;
/**
* Holds Logstash Environment.
*/
public final class Context {
public DeadLetterQueueWriter dlqWriter() {
return null;
}
}

View file

@ -0,0 +1,16 @@
package co.elastic.logstash.api;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Logstash plugin annotation for finding plugins on the classpath and setting their name as used
* in the configuration syntax.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface LogstashPlugin {
String name();
}

View file

@ -0,0 +1,8 @@
package co.elastic.logstash.api;
import java.util.Collection;
public interface Plugin {
Collection<PluginConfigSpec<?>> configSchema();
}

View file

@ -0,0 +1,64 @@
package co.elastic.logstash.api;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
public final class PluginConfigSpec<T> {
private final String name;
private final Class<T> type;
private final boolean deprecated;
private final boolean required;
private final T defaultValue;
private final Collection<PluginConfigSpec<?>> children;
public PluginConfigSpec(final String name, final Class<T> type,
final T defaultValue, final boolean deprecated, final boolean required) {
this(name, type, defaultValue, deprecated, required, Collections.emptyList());
}
public PluginConfigSpec(final String name, final Class<T> type,
final T defaultValue, final boolean deprecated, final boolean required,
final Collection<PluginConfigSpec<?>> children) {
this.name = name;
this.type = type;
this.defaultValue = defaultValue;
this.deprecated = deprecated;
this.required = required;
if (!children.isEmpty() && !Map.class.isAssignableFrom(type)) {
throw new IllegalArgumentException("Only map type settings can have defined children.");
}
this.children = children;
}
public Collection<PluginConfigSpec<?>> children() {
return children;
}
public boolean deprecated() {
return this.deprecated;
}
public boolean required() {
return this.required;
}
public T defaultValue() {
return this.defaultValue;
}
public String name() {
return name;
}
public Class<T> type() {
return type;
}
}

View file

@ -0,0 +1,107 @@
package co.elastic.logstash.api;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public final class PluginHelper {
public static final PluginConfigSpec<Map<String, String>> ADD_FIELD_CONFIG =
Configuration.hashSetting("add_field");
//public static final PluginConfigSpec<Array> ADD_TAG_CONFIG =
// Configuration.arraySetting("add_tag");
public static final PluginConfigSpec<String> CODEC_CONFIG =
Configuration.stringSetting("codec");
public static final PluginConfigSpec<Boolean> ENABLE_METRIC_CONFIG =
Configuration.booleanSetting("enable_metric");
public static final PluginConfigSpec<String> ID_CONFIG =
Configuration.stringSetting("id");
public static final PluginConfigSpec<Boolean> PERIODIC_FLUSH_CONFIG =
Configuration.booleanSetting("periodic_flush");
//public static final PluginConfigSpec<Array> REMOVE_FIELD_CONFIG =
// Configuration.arraySetting("remove_field");
//public static final PluginConfigSpec<Array> REMOVE_TAG_CONFIG =
// Configuration.arraySetting("remove_tag");
//public static final PluginConfigSpec<Array> TAGS_CONFIG =
// Configuration.arraySetting("tags");
public static final PluginConfigSpec<String> TYPE_CONFIG =
Configuration.stringSetting("type");
/**
* Returns a list of the options that are common to all input plugins.
*/
public static Collection<PluginConfigSpec<?>> commonInputOptions() {
return commonInputOptions(Collections.EMPTY_LIST);
}
/**
* Combines the provided list of options with the options that are common to all input plugins
* ignoring any that are already present in the provided list. This allows plugins to override
* defaults and other values on the common config options.
*/
public static Collection<PluginConfigSpec<?>> commonInputOptions(Collection<PluginConfigSpec<?>> options) {
return combineOptions(options, Arrays.asList(ADD_FIELD_CONFIG, ENABLE_METRIC_CONFIG,
CODEC_CONFIG, ID_CONFIG, /*TAGS_CONFIG,*/ TYPE_CONFIG));
}
/**
* Returns a list of the options that are common to all output plugins.
*/
public static Collection<PluginConfigSpec<?>> commonOutputOptions() {
return commonOutputOptions(Collections.EMPTY_LIST);
}
/**
* Combines the provided list of options with the options that are common to all output plugins
* ignoring any that are already present in the provided list. This allows plugins to override
* defaults and other values on the common config options.
*/
public static Collection<PluginConfigSpec<?>> commonOutputOptions(Collection<PluginConfigSpec<?>> options) {
return combineOptions(options, Arrays.asList(ENABLE_METRIC_CONFIG, CODEC_CONFIG, ID_CONFIG));
}
/**
* Returns a list of the options that are common to all filter plugins.
*/
public static Collection<PluginConfigSpec<?>> commonFilterOptions() {
return commonFilterOptions(Collections.EMPTY_LIST);
}
/**
* Combines the provided list of options with the options that are common to all filter plugins
* ignoring any that are already present in the provided list. This allows plugins to override
* defaults and other values on the common config options.
*/
public static Collection<PluginConfigSpec<?>> commonFilterOptions(Collection<PluginConfigSpec<?>> options) {
return combineOptions(options, Arrays.asList(ADD_FIELD_CONFIG, /*ADD_TAG_CONFIG,*/
ENABLE_METRIC_CONFIG, ID_CONFIG, PERIODIC_FLUSH_CONFIG /*, REMOVE_FIELD_CONFIG,
REMOVE_TAG_CONFIG*/));
}
private static Collection<PluginConfigSpec<?>> combineOptions(
Collection<PluginConfigSpec<?>> providedOptions,
Collection<PluginConfigSpec<?>> commonOptions) {
List<PluginConfigSpec<?>> options = new ArrayList<>();
options.addAll(providedOptions);
for (PluginConfigSpec pcs : commonOptions) {
if (!options.contains(pcs)) {
options.add(pcs);
}
}
return options;
}
}

View file

@ -0,0 +1,48 @@
package co.elastic.logstash.api.v0;
import co.elastic.logstash.api.Plugin;
import org.logstash.Event;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.function.Consumer;
public interface Codec extends Plugin {
/**
* Decodes events from the specified {@link ByteBuffer} and passes them to the provided
* {@link Consumer}.
*
* <li>The client (typically an {@link Input}) must provide a {@link ByteBuffer} that
* is ready for reading with with {@link ByteBuffer#position} indicating the next
* position to read and {@link ByteBuffer#limit} indicating the first byte in the
* buffer that is not safe to read.</li>
*
* <li>Implementations of {@link Codec} must ensure that {@link ByteBuffer#position}
* reflects the last-read position before returning control.</li>
*
* <li>The client is then responsible for returning the buffer
* to write mode via either {@link ByteBuffer#clear} or {@link ByteBuffer#compact} before
* resuming writes.</li>
*
* @param buffer Input buffer from which events will be decoded.
* @param eventConsumer Consumer to which decoded events will be passed.
*/
void decode(ByteBuffer buffer, Consumer<Map<String, Object>> eventConsumer);
/**
* Decodes all remaining events from the specified {@link ByteBuffer} along with any internal
* state that may remain after previous calls to {@link #decode(ByteBuffer, Consumer)}.
* @param buffer Input buffer from which events will be decoded.
* @param eventConsumer Consumer to which decoded events will be passed.
*/
void flush(ByteBuffer buffer, Consumer<Map<String, Object>> eventConsumer);
/**
* Encodes an {@link Event} and writes it to the specified {@link OutputStream}.
* @param event The event to encode.
* @param output The stream to which the encoded event should be written.
*/
void encode(Event event, OutputStream output);
}

View file

@ -0,0 +1,15 @@
package co.elastic.logstash.api.v0;
import co.elastic.logstash.api.Plugin;
import org.logstash.Event;
import java.util.Collection;
/**
* A Logstash Filter.
*/
public interface Filter extends Plugin {
Collection<Event> filter(Collection<Event> events);
}

View file

@ -0,0 +1,30 @@
package co.elastic.logstash.api.v0;
import co.elastic.logstash.api.Plugin;
import org.logstash.execution.queue.QueueWriter;
/**
* A Logstash Pipeline Input pushes to a {@link QueueWriter}.
*/
public interface Input extends Plugin {
/**
* Start pushing {@link org.logstash.Event} to given {@link QueueWriter}.
* @param writer Queue Writer to Push to
*/
void start(QueueWriter writer);
/**
* Stop the input.
* Stopping happens asynchronously, use {@link #awaitStop()} to make sure that the input has
* finished.
*/
void stop();
/**
* Blocks until the input execution has finished.
* @throws InterruptedException On Interrupt
*/
void awaitStop() throws InterruptedException;
}

View file

@ -0,0 +1,23 @@
package co.elastic.logstash.api.v0;
import co.elastic.logstash.api.Plugin;
import org.logstash.Event;
import java.util.Collection;
/**
* A Logstash Pipeline Output.
*/
public interface Output extends Plugin {
/**
* Outputs Collection of {@link Event}.
* @param events Events to Output
*/
void output(Collection<Event> events);
void stop();
void awaitStop() throws InterruptedException;
}

View file

@ -1,6 +1,5 @@
package org.logstash;
import java.util.stream.Stream;
import org.jruby.NativeException;
import org.jruby.Ruby;
import org.jruby.RubyClass;
@ -13,8 +12,10 @@ import org.logstash.ackedqueue.ext.JRubyAckedQueueExt;
import org.logstash.ackedqueue.ext.JRubyWrappedAckedQueueExt;
import org.logstash.common.AbstractDeadLetterQueueWriterExt;
import org.logstash.common.BufferedTokenizerExt;
import org.logstash.config.ir.compiler.AbstractFilterDelegatorExt;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import org.logstash.config.ir.compiler.FilterDelegatorExt;
import org.logstash.config.ir.compiler.JavaFilterDelegatorExt;
import org.logstash.config.ir.compiler.JavaOutputDelegatorExt;
import org.logstash.config.ir.compiler.OutputDelegatorExt;
import org.logstash.config.ir.compiler.OutputStrategyExt;
@ -52,6 +53,8 @@ import org.logstash.plugins.HooksRegistryExt;
import org.logstash.plugins.PluginFactoryExt;
import org.logstash.plugins.UniversalPluginExt;
import java.util.stream.Stream;
/**
* Utilities around interaction with the {@link Ruby} runtime.
*/
@ -98,10 +101,14 @@ public final class RubyUtil {
public static final RubyClass ABSTRACT_OUTPUT_DELEGATOR_CLASS;
public static final RubyClass ABSTRACT_FILTER_DELEGATOR_CLASS;
public static final RubyClass RUBY_OUTPUT_DELEGATOR_CLASS;
public static final RubyClass JAVA_OUTPUT_DELEGATOR_CLASS;
public static final RubyClass JAVA_FILTER_DELEGATOR_CLASS;
public static final RubyClass FILTER_DELEGATOR_CLASS;
public static final RubyClass OUTPUT_STRATEGY_REGISTRY;
@ -200,6 +207,8 @@ public final class RubyUtil {
public static final RubyClass JAVA_PIPELINE_CLASS;
public static final RubyClass JAVA_INPUT_WRAPPER_CLASS;
/**
* Logstash Ruby Module.
*/
@ -417,8 +426,18 @@ public final class RubyUtil {
ABSTRACT_OUTPUT_DELEGATOR_CLASS, JavaOutputDelegatorExt::new,
JavaOutputDelegatorExt.class
);
ABSTRACT_FILTER_DELEGATOR_CLASS = LOGSTASH_MODULE.defineClassUnder(
"AbstractFilterDelegator", RUBY.getObject(),
ObjectAllocator.NOT_ALLOCATABLE_ALLOCATOR
);
ABSTRACT_FILTER_DELEGATOR_CLASS.defineAnnotatedMethods(AbstractFilterDelegatorExt.class);
JAVA_FILTER_DELEGATOR_CLASS = setupLogstashClass(
ABSTRACT_FILTER_DELEGATOR_CLASS, JavaFilterDelegatorExt::new,
JavaFilterDelegatorExt.class
);
FILTER_DELEGATOR_CLASS = setupLogstashClass(
FilterDelegatorExt::new, FilterDelegatorExt.class
ABSTRACT_FILTER_DELEGATOR_CLASS, FilterDelegatorExt::new,
FilterDelegatorExt.class
);
final RubyModule loggingModule = LOGSTASH_MODULE.defineOrGetModuleUnder("Logging");
LOGGER = loggingModule.defineClassUnder("Logger", RUBY.getObject(), LoggerExt::new);
@ -433,6 +452,8 @@ public final class RubyUtil {
JAVA_PIPELINE_CLASS = setupLogstashClass(
ABSTRACT_PIPELINE_CLASS, JavaBasePipelineExt::new, JavaBasePipelineExt.class
);
JAVA_INPUT_WRAPPER_CLASS = setupLogstashClass(PluginFactoryExt.JavaInputWrapperExt::new,
PluginFactoryExt.JavaInputWrapperExt.class);
final RubyModule json = LOGSTASH_MODULE.defineOrGetModuleUnder("Json");
final RubyClass stdErr = RUBY.getStandardError();
LOGSTASH_ERROR = LOGSTASH_MODULE.defineClassUnder(

View file

@ -105,6 +105,10 @@ public final class JRubyAckedQueueExt extends RubyObject {
}
}
public void write(Event event) throws IOException {
this.queue.write(event);
}
@JRubyMethod(name = "read_batch", required = 2)
public IRubyObject ruby_read_batch(ThreadContext context, IRubyObject limit,
IRubyObject timeout) {

View file

@ -1,5 +1,34 @@
package org.logstash.config.ir;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jruby.RubyHash;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.Rubyfier;
import org.logstash.common.SourceWithMetadata;
import org.logstash.config.ir.compiler.AbstractFilterDelegatorExt;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import org.logstash.config.ir.compiler.ComputeStepSyntaxElement;
import org.logstash.config.ir.compiler.Dataset;
import org.logstash.config.ir.compiler.DatasetCompiler;
import org.logstash.config.ir.compiler.EventCondition;
import org.logstash.config.ir.compiler.RubyIntegration;
import org.logstash.config.ir.compiler.SplitDataset;
import org.logstash.config.ir.graph.IfVertex;
import org.logstash.config.ir.graph.PluginVertex;
import org.logstash.config.ir.graph.Vertex;
import org.logstash.config.ir.imperative.PluginStatement;
import co.elastic.logstash.api.v0.Input;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import org.logstash.plugins.PluginFactoryExt;
import org.logstash.plugins.discovery.PluginRegistry;
import org.logstash.ext.JrubyEventExtLibrary;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -8,26 +37,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jruby.RubyHash;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.Rubyfier;
import org.logstash.common.SourceWithMetadata;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import org.logstash.config.ir.compiler.ComputeStepSyntaxElement;
import org.logstash.config.ir.compiler.Dataset;
import org.logstash.config.ir.compiler.DatasetCompiler;
import org.logstash.config.ir.compiler.EventCondition;
import org.logstash.config.ir.compiler.FilterDelegatorExt;
import org.logstash.config.ir.compiler.RubyIntegration;
import org.logstash.config.ir.compiler.SplitDataset;
import org.logstash.config.ir.graph.IfVertex;
import org.logstash.config.ir.graph.PluginVertex;
import org.logstash.config.ir.graph.Vertex;
import org.logstash.config.ir.imperative.PluginStatement;
import org.logstash.ext.JrubyEventExtLibrary;
/**
* <h3>Compiled Logstash Pipeline Configuration.</h3>
@ -50,10 +59,15 @@ public final class CompiledPipeline {
*/
private final Collection<IRubyObject> inputs;
/**
* Configured Java Inputs.
*/
private final Collection<Input> javaInputs = new ArrayList<>();
/**
* Configured Filters, indexed by their ID as returned by {@link PluginVertex#getId()}.
*/
private final Map<String, FilterDelegatorExt> filters;
private final Map<String, AbstractFilterDelegatorExt> filters;
/**
* Configured outputs.
@ -83,7 +97,7 @@ public final class CompiledPipeline {
return Collections.unmodifiableCollection(outputs.values());
}
public Collection<FilterDelegatorExt> filters() {
public Collection<AbstractFilterDelegatorExt> filters() {
return Collections.unmodifiableCollection(filters.values());
}
@ -91,6 +105,10 @@ public final class CompiledPipeline {
return inputs;
}
public Collection<Input> javaInputs() {
return javaInputs;
}
/**
* This method contains the actual compilation of the {@link Dataset} representing the
* underlying pipeline from the Queue to the outputs.
@ -101,7 +119,7 @@ public final class CompiledPipeline {
}
/**
* Sets up all Ruby outputs learnt from {@link PipelineIR}.
* Sets up all outputs learned from {@link PipelineIR}.
*/
private Map<String, AbstractOutputDelegatorExt> setupOutputs() {
final Collection<PluginVertex> outs = pipelineIR.getOutputPluginVertices();
@ -110,8 +128,8 @@ public final class CompiledPipeline {
final PluginDefinition def = v.getPluginDefinition();
final SourceWithMetadata source = v.getSourceWithMetadata();
res.put(v.getId(), pluginFactory.buildOutput(
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def)
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def), def.getArguments()
));
});
return res;
@ -120,12 +138,17 @@ public final class CompiledPipeline {
/**
* Sets up all Ruby filters learnt from {@link PipelineIR}.
*/
private Map<String, FilterDelegatorExt> setupFilters() {
private Map<String, AbstractFilterDelegatorExt> setupFilters() {
final Collection<PluginVertex> filterPlugins = pipelineIR.getFilterPluginVertices();
final Map<String, FilterDelegatorExt> res =
new HashMap<>(filterPlugins.size(), 1.0F);
for (final PluginVertex plugin : filterPlugins) {
res.put(plugin.getId(), buildFilter(plugin));
final Map<String, AbstractFilterDelegatorExt> res = new HashMap<>(filterPlugins.size(), 1.0F);
for (final PluginVertex vertex : filterPlugins) {
final PluginDefinition def = vertex.getPluginDefinition();
final SourceWithMetadata source = vertex.getSourceWithMetadata();
res.put(vertex.getId(), pluginFactory.buildFilter(
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def), def.getArguments()
));
}
return res;
}
@ -138,11 +161,26 @@ public final class CompiledPipeline {
final Collection<IRubyObject> nodes = new HashSet<>(vertices.size());
vertices.forEach(v -> {
final PluginDefinition def = v.getPluginDefinition();
final SourceWithMetadata source = v.getSourceWithMetadata();
nodes.add(pluginFactory.buildInput(
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def)
));
final Class<Input> cls = PluginRegistry.getInputClass(def.getName());
if (cls != null) {
try {
final Constructor<Input> ctor = cls.getConstructor(Configuration.class, Context.class);
javaInputs.add(ctor.newInstance(new Configuration(def.getArguments()), new Context()));
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) {
throw new IllegalStateException(ex);
}
} else {
final SourceWithMetadata source = v.getSourceWithMetadata();
IRubyObject o = pluginFactory.buildInput(
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def), def.getArguments());
if (o instanceof PluginFactoryExt.JavaInputWrapperExt) {
javaInputs.add(((PluginFactoryExt.JavaInputWrapperExt)o).getInput());
} else {
nodes.add(o);
}
}
});
return nodes;
}
@ -164,7 +202,8 @@ public final class CompiledPipeline {
final PluginDefinition codec = ((PluginStatement) value).getPluginDefinition();
toput = pluginFactory.buildCodec(
RubyUtil.RUBY.newString(codec.getName()),
Rubyfier.deep(RubyUtil.RUBY, codec.getArguments())
Rubyfier.deep(RubyUtil.RUBY, codec.getArguments()),
def.getArguments()
);
} else {
toput = value;
@ -175,23 +214,9 @@ public final class CompiledPipeline {
}
/**
* Compiles a {@link FilterDelegatorExt} from a given {@link PluginVertex}.
* @param vertex Filter {@link PluginVertex}
* @return Compiled {@link FilterDelegatorExt}
*/
private FilterDelegatorExt buildFilter(final PluginVertex vertex) {
final PluginDefinition def = vertex.getPluginDefinition();
final SourceWithMetadata source = vertex.getSourceWithMetadata();
return pluginFactory.buildFilter(
RubyUtil.RUBY.newString(def.getName()), RubyUtil.RUBY.newFixnum(source.getLine()),
RubyUtil.RUBY.newFixnum(source.getColumn()), convertArgs(def)
);
}
/**
* Checks if a certain {@link Vertex} represents a {@link FilterDelegatorExt}.
* Checks if a certain {@link Vertex} represents a {@link AbstractFilterDelegatorExt}.
* @param vertex Vertex to check
* @return True iff {@link Vertex} represents a {@link FilterDelegatorExt}
* @return True iff {@link Vertex} represents a {@link AbstractFilterDelegatorExt}
*/
private boolean isFilter(final Vertex vertex) {
return filters.containsKey(vertex.getId());

View file

@ -0,0 +1,167 @@
package org.logstash.config.ir.compiler;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyClass;
import org.jruby.RubyHash;
import org.jruby.RubyObject;
import org.jruby.RubyString;
import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.execution.WorkerLoop;
import org.logstash.ext.JrubyEventExtLibrary;
import org.logstash.instrument.metrics.AbstractNamespacedMetricExt;
import org.logstash.instrument.metrics.MetricKeys;
import org.logstash.instrument.metrics.counter.LongCounter;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
@JRubyClass(name = "AbstractFilterDelegator")
public abstract class AbstractFilterDelegatorExt extends RubyObject {
private static final long serialVersionUID = 1L;
protected AbstractNamespacedMetricExt metricEvents;
protected RubyString id;
protected LongCounter eventMetricOut;
protected LongCounter eventMetricIn;
protected LongCounter eventMetricTime;
public AbstractFilterDelegatorExt(final Ruby runtime, final RubyClass metaClass) {
super(runtime, metaClass);
}
protected void initMetrics(final String id, final AbstractNamespacedMetricExt namespacedMetric) {
final ThreadContext context = RubyUtil.RUBY.getCurrentContext();
this.id = RubyString.newString(context.runtime, id);
synchronized(namespacedMetric.getMetric()) {
metricEvents = namespacedMetric.namespace(context, MetricKeys.EVENTS_KEY);
eventMetricOut = LongCounter.fromRubyBase(metricEvents, MetricKeys.OUT_KEY);
eventMetricIn = LongCounter.fromRubyBase(metricEvents, MetricKeys.IN_KEY);
eventMetricTime = LongCounter.fromRubyBase(metricEvents, MetricKeys.DURATION_IN_MILLIS_KEY);
namespacedMetric.gauge(context, MetricKeys.NAME_KEY, configName(context));
}
}
@JRubyMethod
public IRubyObject register(final ThreadContext context) {
doRegister(context);
return context.nil;
}
protected abstract void doRegister(final ThreadContext context);
@JRubyMethod
public IRubyObject close(final ThreadContext context) {
return closeImpl(context);
}
protected abstract IRubyObject closeImpl(final ThreadContext context);
@JRubyMethod(name = "do_close")
public IRubyObject doClose(final ThreadContext context) {
return doCloseImpl(context);
}
protected abstract IRubyObject doCloseImpl(final ThreadContext context);
@JRubyMethod(name = "do_stop")
public IRubyObject doStop(final ThreadContext context) {
return doStopImpl(context);
}
protected abstract IRubyObject doStopImpl(final ThreadContext context);
@JRubyMethod(name = "reloadable?")
public IRubyObject isReloadable(final ThreadContext context) {
return reloadable(context);
}
protected abstract IRubyObject reloadable(final ThreadContext context);
@JRubyMethod(name = "threadsafe?")
public IRubyObject concurrency(final ThreadContext context) {
return getConcurrency(context);
}
protected abstract IRubyObject getConcurrency(final ThreadContext context);
@JRubyMethod(name = "config_name")
public IRubyObject configName(final ThreadContext context) {
return getConfigName(context);
}
protected abstract IRubyObject getConfigName(ThreadContext context);
@JRubyMethod(name = "id")
public IRubyObject getId() {
return id;
}
@JRubyMethod(name = "multi_filter")
@SuppressWarnings("unchecked")
public RubyArray multiFilter(final IRubyObject input) {
RubyArray batch = (RubyArray) input;
eventMetricIn.increment((long) batch.size());
final long start = System.nanoTime();
final RubyArray result = doMultiFilter(batch);
eventMetricTime.increment(TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
int count = 0;
for (final JrubyEventExtLibrary.RubyEvent event : (Collection<JrubyEventExtLibrary.RubyEvent>) result) {
if (!event.getEvent().isCancelled()) {
++count;
}
}
eventMetricOut.increment((long) count);
return result;
}
protected abstract RubyArray doMultiFilter(final RubyArray batch);
@JRubyMethod(name = "flush")
public RubyArray flush(final IRubyObject input) {
RubyHash options = (RubyHash) input;
final ThreadContext context = WorkerLoop.THREAD_CONTEXT.get();
final IRubyObject newEvents = doFlush(context, options);
final RubyArray result;
if (newEvents.isNil()) {
result = RubyArray.newEmptyArray(context.runtime);
} else {
result = (RubyArray) newEvents;
eventMetricOut.increment((long) result.size());
}
return result;
}
@JRubyMethod(name = "has_flush")
public IRubyObject hasFlush(ThreadContext context) {
return hasFlush() ? context.tru : context.fals;
}
@JRubyMethod(name = "periodic_flush")
public IRubyObject hasPeriodicFlush(ThreadContext context) {
return periodicFlush() ? context.tru : context.fals;
}
protected abstract IRubyObject doFlush(final ThreadContext context, final RubyHash options);
public boolean hasFlush() {
return getHasFlush();
}
protected abstract boolean getHasFlush();
public boolean periodicFlush() {
return getPeriodicFlush();
}
protected abstract boolean getPeriodicFlush();
}

View file

@ -95,9 +95,7 @@ public abstract class AbstractOutputDelegatorExt extends RubyObject {
eventMetricIn.increment((long) count);
final long start = System.nanoTime();
doOutput(batch);
eventMetricTime.increment(
TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)
);
eventMetricTime.increment(TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
eventMetricOut.increment((long) count);
return this;
}
@ -109,14 +107,10 @@ public abstract class AbstractOutputDelegatorExt extends RubyObject {
synchronized (metric) {
namespacedMetric = metric.namespace(context, context.runtime.newSymbol(id));
metricEvents = namespacedMetric.namespace(context, MetricKeys.EVENTS_KEY);
namespacedMetric.gauge(
context, MetricKeys.NAME_KEY, configName(context)
);
namespacedMetric.gauge(context, MetricKeys.NAME_KEY, configName(context));
eventMetricOut = LongCounter.fromRubyBase(metricEvents, MetricKeys.OUT_KEY);
eventMetricIn = LongCounter.fromRubyBase(metricEvents, MetricKeys.IN_KEY);
eventMetricTime = LongCounter.fromRubyBase(
metricEvents, MetricKeys.DURATION_IN_MILLIS_KEY
);
eventMetricTime = LongCounter.fromRubyBase(metricEvents, MetricKeys.DURATION_IN_MILLIS_KEY);
}
}

View file

@ -85,7 +85,7 @@ public final class DatasetCompiler {
* @return Dataset representing the filter plugin
*/
public static ComputeStepSyntaxElement<Dataset> filterDataset(final Collection<Dataset> parents,
final FilterDelegatorExt plugin) {
final AbstractFilterDelegatorExt plugin) {
final ClassFields fields = new ClassFields();
final ValueSyntaxElement outputBuffer = fields.add(new ArrayList<>());
final Closure clear = Closure.wrap();
@ -192,7 +192,7 @@ public final class DatasetCompiler {
private static Closure filterBody(final ValueSyntaxElement outputBuffer,
final ValueSyntaxElement inputBuffer, final ClassFields fields,
final FilterDelegatorExt plugin) {
final AbstractFilterDelegatorExt plugin) {
final ValueSyntaxElement filterField = fields.add(plugin);
final Closure body = Closure.wrap(
buffer(outputBuffer, filterField.call("multiFilter", inputBuffer))

View file

@ -1,13 +1,10 @@
package org.logstash.config.ir.compiler;
import com.google.common.annotations.VisibleForTesting;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyClass;
import org.jruby.RubyHash;
import org.jruby.RubyObject;
import org.jruby.RubyString;
import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
@ -16,51 +13,32 @@ import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.execution.WorkerLoop;
import org.logstash.ext.JrubyEventExtLibrary;
import org.logstash.instrument.metrics.AbstractNamespacedMetricExt;
import org.logstash.instrument.metrics.MetricKeys;
import org.logstash.instrument.metrics.counter.LongCounter;
@JRubyClass(name = "JavaFilterDelegator")
public final class FilterDelegatorExt extends RubyObject {
private static final String FILTER_METHOD_NAME = "multi_filter";
@JRubyClass(name = "FilterDelegator")
public final class FilterDelegatorExt extends AbstractFilterDelegatorExt {
private static final long serialVersionUID = 1L;
private static final String FILTER_METHOD_NAME = "multi_filter";
private RubyClass filterClass;
private IRubyObject filter;
private AbstractNamespacedMetricExt metricEvents;
private RubyString id;
private LongCounter eventMetricOut;
private LongCounter eventMetricIn;
private DynamicMethod filterMethod;
private LongCounter eventMetricTime;
private boolean flushes;
@JRubyMethod
public IRubyObject initialize(final ThreadContext context, final IRubyObject filter,
final IRubyObject id) {
@JRubyMethod(name="initialize")
public IRubyObject initialize(final ThreadContext context, final IRubyObject filter, final IRubyObject id) {
this.id = (RubyString) id;
this.filter = filter;
filterClass = filter.getSingletonClass().getRealClass();
filterMethod = filterClass.searchMethod(FILTER_METHOD_NAME);
final AbstractNamespacedMetricExt namespacedMetric = (AbstractNamespacedMetricExt) filter.callMethod(context, "metric");
synchronized(namespacedMetric.getMetric()) {
metricEvents = namespacedMetric.namespace(context, MetricKeys.EVENTS_KEY);
eventMetricOut = LongCounter.fromRubyBase(metricEvents, MetricKeys.OUT_KEY);
eventMetricIn = LongCounter.fromRubyBase(metricEvents, MetricKeys.IN_KEY);
eventMetricTime = LongCounter.fromRubyBase(metricEvents, MetricKeys.DURATION_IN_MILLIS_KEY);
namespacedMetric.gauge(context, MetricKeys.NAME_KEY, configName(context));
}
initMetrics(this.id.asJavaString(), namespacedMetric);
flushes = filter.respondsTo("flush");
return this;
}
@ -80,84 +58,59 @@ public final class FilterDelegatorExt extends RubyObject {
super(runtime, metaClass);
}
@JRubyMethod
public IRubyObject register(final ThreadContext context) {
return filter.callMethod(context, "register");
@Override
protected void doRegister(final ThreadContext context) {
filter.callMethod(context, "register");
}
@JRubyMethod
public IRubyObject close(final ThreadContext context) {
@Override
protected IRubyObject closeImpl(final ThreadContext context) {
return filter.callMethod(context, "close");
}
@JRubyMethod(name = "do_close")
public IRubyObject doClose(final ThreadContext context) {
@Override
protected IRubyObject doCloseImpl(final ThreadContext context) {
return filter.callMethod(context, "do_close");
}
@JRubyMethod(name = "do_stop")
public IRubyObject doStop(final ThreadContext context) {
@Override
protected IRubyObject doStopImpl(final ThreadContext context) {
return filter.callMethod(context, "do_stop");
}
@JRubyMethod(name = "reloadable?")
public IRubyObject isReloadable(final ThreadContext context) {
@Override
protected IRubyObject reloadable(final ThreadContext context) {
return filter.callMethod(context, "reloadable?");
}
@JRubyMethod(name = "threadsafe?")
public IRubyObject concurrency(final ThreadContext context) {
@Override
protected IRubyObject getConcurrency(final ThreadContext context) {
return filter.callMethod(context, "threadsafe?");
}
@JRubyMethod(name = "config_name")
public IRubyObject configName(final ThreadContext context) {
@Override
protected IRubyObject getConfigName(final ThreadContext context) {
return filterClass.callMethod(context, "config_name");
}
@JRubyMethod(name = "id")
public IRubyObject getId() {
return id;
@Override
protected RubyArray doMultiFilter(final RubyArray batch) {
return (RubyArray) filterMethod.call(
WorkerLoop.THREAD_CONTEXT.get(), filter, filterClass, FILTER_METHOD_NAME, batch);
}
@SuppressWarnings("unchecked")
public RubyArray multiFilter(final RubyArray batch) {
eventMetricIn.increment((long) batch.size());
final long start = System.nanoTime();
final RubyArray result = (RubyArray) filterMethod.call(
WorkerLoop.THREAD_CONTEXT.get(), filter, filterClass, FILTER_METHOD_NAME, batch
);
eventMetricTime.increment(
TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS)
);
int count = 0;
for (final JrubyEventExtLibrary.RubyEvent event : (Collection<JrubyEventExtLibrary.RubyEvent>) result) {
if (!event.getEvent().isCancelled()) {
++count;
}
}
eventMetricOut.increment((long) count);
return result;
@Override
protected IRubyObject doFlush(final ThreadContext context, final RubyHash options) {
return filter.callMethod(context, "flush", options);
}
public RubyArray flush(final RubyHash options) {
final ThreadContext context = WorkerLoop.THREAD_CONTEXT.get();
final IRubyObject newEvents = filter.callMethod(context, "flush", options);
final RubyArray result;
if (newEvents.isNil()) {
result = RubyArray.newEmptyArray(context.runtime);
} else {
result = (RubyArray) newEvents;
eventMetricOut.increment((long) result.size());
}
return result;
}
public boolean hasFlush() {
@Override
protected boolean getHasFlush() {
return flushes;
}
public boolean periodicFlush() {
@Override
protected boolean getPeriodicFlush() {
return filter.callMethod(RubyUtil.RUBY.getCurrentContext(), "periodic_flush").isTrue();
}
}

View file

@ -0,0 +1,108 @@
package org.logstash.config.ir.compiler;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyClass;
import org.jruby.RubyHash;
import org.jruby.RubyString;
import org.jruby.RubySymbol;
import org.jruby.anno.JRubyClass;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.Event;
import org.logstash.RubyUtil;
import co.elastic.logstash.api.v0.Filter;
import org.logstash.ext.JrubyEventExtLibrary;
import org.logstash.instrument.metrics.AbstractNamespacedMetricExt;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
@JRubyClass(name = "JavaFilterDelegator")
public class JavaFilterDelegatorExt extends AbstractFilterDelegatorExt {
private static final RubySymbol CONCURRENCY = RubyUtil.RUBY.newSymbol("java");
private RubyString configName;
private Filter filter;
public JavaFilterDelegatorExt(final Ruby runtime, final RubyClass metaClass) {
super(runtime, metaClass);
}
public static JavaFilterDelegatorExt create(final String configName, final String id,
final AbstractNamespacedMetricExt metric,
final Filter filter) {
final JavaFilterDelegatorExt instance =
new JavaFilterDelegatorExt(RubyUtil.RUBY, RubyUtil.JAVA_FILTER_DELEGATOR_CLASS);
instance.configName = RubyUtil.RUBY.newString(configName);
instance.initMetrics(id, metric);
instance.filter = filter;
return instance;
}
@SuppressWarnings("unchecked")
@Override
protected RubyArray doMultiFilter(final RubyArray batch) {
List<Event> inputEvents = (List<Event>)batch.stream()
.map(x -> ((JrubyEventExtLibrary.RubyEvent)x).getEvent())
.collect(Collectors.toList());
Collection<Event> outputEvents = filter.filter(inputEvents);
RubyArray newBatch = RubyArray.newArray(RubyUtil.RUBY, outputEvents.size());
for (Event outputEvent : outputEvents) {
newBatch.add(JrubyEventExtLibrary.RubyEvent.newRubyEvent(RubyUtil.RUBY, outputEvent));
}
return newBatch;
}
@Override
protected void doRegister(ThreadContext context) {}
@Override
protected IRubyObject doFlush(final ThreadContext context, final RubyHash options) {
// add flush() to Java filter API?
return context.nil;
}
@Override
protected IRubyObject closeImpl(final ThreadContext context) {
return context.nil;
}
@Override
protected IRubyObject doCloseImpl(final ThreadContext context) {
return context.nil;
}
@Override
protected IRubyObject doStopImpl(final ThreadContext context) {
return context.nil;
}
@Override
protected IRubyObject reloadable(final ThreadContext context) {
return context.tru;
}
@Override
protected IRubyObject getConcurrency(final ThreadContext context) {
return CONCURRENCY;
}
@Override
protected IRubyObject getConfigName(final ThreadContext context) {
return configName;
}
@Override
protected boolean getHasFlush() {
return false;
}
@Override
protected boolean getPeriodicFlush() {
return false;
}
}

View file

@ -1,7 +1,10 @@
package org.logstash.config.ir.compiler;
import java.util.Collection;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.jruby.Ruby;
import org.jruby.RubyClass;
import org.jruby.RubyString;
@ -9,7 +12,9 @@ import org.jruby.RubySymbol;
import org.jruby.anno.JRubyClass;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.Event;
import org.logstash.RubyUtil;
import co.elastic.logstash.api.v0.Output;
import org.logstash.ext.JrubyEventExtLibrary;
import org.logstash.instrument.metrics.AbstractMetricExt;
@ -26,6 +31,8 @@ public final class JavaOutputDelegatorExt extends AbstractOutputDelegatorExt {
private Runnable registerAction;
private Output output;
public JavaOutputDelegatorExt(final Ruby runtime, final RubyClass metaClass) {
super(runtime, metaClass);
}
@ -36,14 +43,41 @@ public final class JavaOutputDelegatorExt extends AbstractOutputDelegatorExt {
final Runnable closeAction, final Runnable registerAction) {
final JavaOutputDelegatorExt instance =
new JavaOutputDelegatorExt(RubyUtil.RUBY, RubyUtil.JAVA_OUTPUT_DELEGATOR_CLASS);
instance.initMetrics(id, metric);
instance.configName = RubyUtil.RUBY.newString(configName);
instance.initMetrics(id, metric);
instance.outputFunction = outputFunction;
instance.closeAction = closeAction;
instance.registerAction = registerAction;
return instance;
}
public static JavaOutputDelegatorExt create(final String configName, final String id,
final AbstractMetricExt metric,
final Output output) {
final JavaOutputDelegatorExt instance =
new JavaOutputDelegatorExt(RubyUtil.RUBY, RubyUtil.JAVA_OUTPUT_DELEGATOR_CLASS);
instance.configName = RubyUtil.RUBY.newString(configName);
instance.initMetrics(id, metric);
instance.output = output;
instance.outputFunction = instance::outputRubyEvents;
instance.closeAction = instance::outputClose;
instance.registerAction = instance::outputRegister;
return instance;
}
void outputRubyEvents(Collection<JrubyEventExtLibrary.RubyEvent> e) {
List<Event> events = e.stream().map(JrubyEventExtLibrary.RubyEvent::getEvent).collect(Collectors.toList());
output.output(events);
}
void outputClose() {
output.stop();
}
void outputRegister() {
}
@Override
protected IRubyObject getConfigName(final ThreadContext context) {
return configName;

View file

@ -0,0 +1,67 @@
package org.logstash.config.ir.compiler;
import org.jruby.RubyInteger;
import org.jruby.RubyString;
import org.jruby.runtime.builtin.IRubyObject;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.v0.Filter;
import co.elastic.logstash.api.v0.Input;
import java.util.Map;
/**
* Factory that can instantiate Java plugins as well as Ruby plugins.
*/
public interface PluginFactory extends RubyIntegration.PluginFactory {
Input buildInput(String name, String id, Configuration configuration, Context context);
Filter buildFilter(
String name, String id, Configuration configuration, Context context
);
final class Default implements PluginFactory {
private final RubyIntegration.PluginFactory rubyFactory;
public Default(final RubyIntegration.PluginFactory rubyFactory) {
this.rubyFactory = rubyFactory;
}
@Override
public Input buildInput(final String name, final String id, final Configuration configuration, final Context context) {
return null;
}
@Override
public Filter buildFilter(final String name, final String id, final Configuration configuration, final Context context) {
return null;
}
@Override
public IRubyObject buildInput(final RubyString name, final RubyInteger line, final RubyInteger column,
final IRubyObject args, Map<String, Object> pluginArgs) {
return rubyFactory.buildInput(name, line, column, args, pluginArgs);
}
@Override
public AbstractOutputDelegatorExt buildOutput(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args,
final Map<String, Object> pluginArgs) {
return rubyFactory.buildOutput(name, line, column, args, pluginArgs);
}
@Override
public AbstractFilterDelegatorExt buildFilter(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args,
final Map<String, Object> pluginArgs) {
return rubyFactory.buildFilter(name, line, column, args, pluginArgs);
}
@Override
public IRubyObject buildCodec(final RubyString name, final IRubyObject args, Map<String, Object> pluginArgs) {
return rubyFactory.buildCodec(name, args, pluginArgs);
}
}
}

View file

@ -4,6 +4,8 @@ import org.jruby.RubyInteger;
import org.jruby.RubyString;
import org.jruby.runtime.builtin.IRubyObject;
import java.util.Map;
/**
* This class holds interfaces implemented by Ruby concrete classes.
*/
@ -19,14 +21,14 @@ public final class RubyIntegration {
public interface PluginFactory {
IRubyObject buildInput(RubyString name, RubyInteger line, RubyInteger column,
IRubyObject args);
IRubyObject args, Map<String, Object> pluginArgs);
AbstractOutputDelegatorExt buildOutput(RubyString name, RubyInteger line, RubyInteger column,
IRubyObject args);
IRubyObject args, Map<String, Object> pluginArgs);
FilterDelegatorExt buildFilter(RubyString name, RubyInteger line, RubyInteger column,
IRubyObject args);
AbstractFilterDelegatorExt buildFilter(RubyString name, RubyInteger line, RubyInteger column, IRubyObject args,
Map<String, Object> pluginArgs);
IRubyObject buildCodec(RubyString name, IRubyObject args);
IRubyObject buildCodec(RubyString name, IRubyObject args, Map<String, Object> pluginArgs);
}
}

View file

@ -0,0 +1,47 @@
package org.logstash.execution;
import co.elastic.logstash.api.v0.Input;
import java.util.ArrayList;
import java.util.Collection;
/**
* Provides a single point of control for a set of Java inputs.
*/
public class InputsController {
private final Collection<Input> inputs;
private ArrayList<Thread> threads = new ArrayList<>();
public InputsController(final Collection<Input> inputs) {
this.inputs = inputs;
}
public void startInputs(final JavaBasePipelineExt provider) {
int inputCounter = 0;
for (Input input : inputs) {
String pluginName = input.getClass().getName(); // TODO: get annotated plugin name
Thread t = new Thread(() -> input.start(provider.getQueueWriter(pluginName)));
t.setName("input_" + (inputCounter++) + "_" + pluginName);
threads.add(t);
t.start();
}
}
public void stopInputs() {
for (Input input : inputs) {
input.stop();
}
}
public void awaitStop() {
// trivial implementation
for (Input input : inputs) {
try {
input.awaitStop();
} catch (InterruptedException e) {
// do nothing
}
}
}
}

View file

@ -17,6 +17,8 @@ import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.common.IncompleteSourceWithMetadataException;
import org.logstash.config.ir.CompiledPipeline;
import org.logstash.execution.queue.QueueWriter;
import org.logstash.ext.JRubyWrappedWriteClientExt;
import org.logstash.plugins.PluginFactoryExt;
@JRubyClass(name = "JavaBasePipeline")
@ -108,4 +110,14 @@ public final class JavaBasePipelineExt extends AbstractPipelineExt {
return result;
}
public QueueWriter getQueueWriter(final String inputName) {
return new JRubyWrappedWriteClientExt(RubyUtil.RUBY, RubyUtil.WRAPPED_WRITE_CLIENT_CLASS)
.initialize(
RubyUtil.RUBY.getCurrentContext(),
new IRubyObject[]{
inputQueueClient(), pipelineId().convertToString().intern(),
metric(), RubyUtil.RUBY.newSymbol(inputName)
}
);
}
}

View file

@ -1,6 +1,5 @@
package org.logstash.execution;
import java.util.Collection;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyBasicObject;
@ -14,7 +13,9 @@ import org.jruby.runtime.Block;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.config.ir.compiler.OutputDelegatorExt;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import java.util.Collection;
@JRubyClass(name = "PipelineReporter")
public final class PipelineReporterExt extends RubyBasicObject {
@ -163,7 +164,7 @@ public final class PipelineReporterExt extends RubyBasicObject {
outputIterable = (Iterable<IRubyObject>) outputs.toJava(Iterable.class);
}
outputIterable.forEach(output -> {
final OutputDelegatorExt delegator = (OutputDelegatorExt) output;
final AbstractOutputDelegatorExt delegator = (AbstractOutputDelegatorExt) output;
final RubyHash hash = RubyHash.newHash(context.runtime);
hash.op_aset(context, TYPE_KEY, delegator.configName(context));
hash.op_aset(context, ID_KEY, delegator.getId());

View file

@ -0,0 +1,26 @@
package org.logstash.execution.queue;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import org.logstash.Event;
import org.logstash.RubyUtil;
import org.logstash.ext.JrubyEventExtLibrary;
public final class LegacyMemoryQueueWriter implements QueueWriter {
private final BlockingQueue<JrubyEventExtLibrary.RubyEvent> queue;
public LegacyMemoryQueueWriter(final BlockingQueue<JrubyEventExtLibrary.RubyEvent> queue) {
this.queue = queue;
}
@Override
public void push(final Map<String, Object> event) {
try {
queue.put(JrubyEventExtLibrary.RubyEvent.newRubyEvent(RubyUtil.RUBY, new Event(event)));
} catch (final InterruptedException ex) {
throw new IllegalStateException(ex);
}
}
}

View file

@ -0,0 +1,16 @@
package org.logstash.execution.queue;
import java.util.Map;
/**
* Writes to the Queue.
*/
public interface QueueWriter {
/**
* Pushes a single event to the Queue, blocking indefinitely if the Queue is not ready for a
* write.
* @param event Logstash Event Data
*/
void push(Map<String, Object> event);
}

View file

@ -8,9 +8,10 @@ import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.execution.queue.QueueWriter;
@JRubyClass(name = "AbstractQueueWriteClient")
public abstract class JRubyAbstractQueueWriteClientExt extends RubyBasicObject {
public abstract class JRubyAbstractQueueWriteClientExt extends RubyBasicObject implements QueueWriter {
protected JRubyAbstractQueueWriteClientExt(final Ruby runtime, final RubyClass metaClass) {
super(runtime, metaClass);

View file

@ -1,6 +1,7 @@
package org.logstash.ext;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.jruby.Ruby;
import org.jruby.RubyArray;
@ -12,13 +13,14 @@ import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.execution.queue.QueueWriter;
import org.logstash.instrument.metrics.AbstractMetricExt;
import org.logstash.instrument.metrics.AbstractNamespacedMetricExt;
import org.logstash.instrument.metrics.MetricKeys;
import org.logstash.instrument.metrics.counter.LongCounter;
@JRubyClass(name = "WrappedWriteClient")
public final class JRubyWrappedWriteClientExt extends RubyObject {
public final class JRubyWrappedWriteClientExt extends RubyObject implements QueueWriter {
private static final RubySymbol PUSH_DURATION_KEY =
RubyUtil.RUBY.newSymbol("queue_push_duration_in_millis");
@ -39,7 +41,8 @@ public final class JRubyWrappedWriteClientExt extends RubyObject {
}
@JRubyMethod(required = 4)
public IRubyObject initialize(final ThreadContext context, final IRubyObject[] args) {
public JRubyWrappedWriteClientExt initialize(final ThreadContext context,
final IRubyObject[] args) {
return initialize((JRubyAbstractQueueWriteClientExt) args[0], args[1].asJavaString(),
(AbstractMetricExt) args[2], args[3]);
}
@ -131,4 +134,12 @@ public final class JRubyWrappedWriteClientExt extends RubyObject {
}
return RubyUtil.RUBY.newArray(res);
}
@Override
public void push(Map<String, Object> event) {
final long start = System.nanoTime();
incrementCounters(1L);
writeClient.push(event);
incrementTimers(start);
}
}

View file

@ -1,6 +1,8 @@
package org.logstash.ext;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.jruby.Ruby;
import org.jruby.RubyClass;
@ -8,6 +10,7 @@ import org.jruby.anno.JRubyClass;
import org.jruby.anno.JRubyMethod;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.Event;
import org.logstash.RubyUtil;
import org.logstash.ackedqueue.ext.JRubyAckedQueueExt;
@ -69,4 +72,14 @@ public final class JrubyAckedWriteClientExt extends JRubyAbstractQueueWriteClien
throw new IllegalStateException("Tried to write to a closed queue.");
}
}
@Override
public void push(Map<String, Object> event) {
try {
queue.write(new Event(event));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}

View file

@ -1,11 +1,13 @@
package org.logstash.ext;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import org.jruby.Ruby;
import org.jruby.RubyClass;
import org.jruby.anno.JRubyClass;
import org.jruby.runtime.ThreadContext;
import org.logstash.Event;
import org.logstash.RubyUtil;
import org.logstash.common.LsQueueUtils;
@ -44,4 +46,13 @@ public final class JrubyMemoryWriteClientExt extends JRubyAbstractQueueWriteClie
LsQueueUtils.addAll(queue, batch);
return this;
}
@Override
public void push(Map<String, Object> event) {
try {
queue.put(JrubyEventExtLibrary.RubyEvent.newRubyEvent(RubyUtil.RUBY, new Event(event)));
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
}
}

View file

@ -1,18 +1,13 @@
package org.logstash.plugins;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import co.elastic.logstash.api.v0.Input;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyBasicObject;
import org.jruby.RubyClass;
import org.jruby.RubyHash;
import org.jruby.RubyInteger;
import org.jruby.RubyObject;
import org.jruby.RubyString;
import org.jruby.RubySymbol;
import org.jruby.anno.JRubyClass;
@ -21,18 +16,35 @@ import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.config.ir.PipelineIR;
import org.logstash.config.ir.compiler.AbstractFilterDelegatorExt;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import org.logstash.config.ir.compiler.FilterDelegatorExt;
import org.logstash.config.ir.compiler.JavaFilterDelegatorExt;
import org.logstash.config.ir.compiler.JavaOutputDelegatorExt;
import org.logstash.config.ir.compiler.OutputDelegatorExt;
import org.logstash.config.ir.compiler.OutputStrategyExt;
import org.logstash.config.ir.compiler.RubyIntegration;
import org.logstash.config.ir.graph.Vertex;
import org.logstash.execution.ExecutionContextExt;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.v0.Filter;
import co.elastic.logstash.api.v0.Output;
import org.logstash.instrument.metrics.AbstractMetricExt;
import org.logstash.instrument.metrics.AbstractNamespacedMetricExt;
import org.logstash.instrument.metrics.MetricKeys;
import org.logstash.instrument.metrics.NullMetricExt;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
public final class PluginFactoryExt {
@JRubyClass(name = "PluginFactory")
@ -53,16 +65,17 @@ public final class PluginFactoryExt {
@JRubyMethod(name = "filter_delegator", meta = true, required = 5)
public static IRubyObject filterDelegator(final ThreadContext context,
final IRubyObject recv, final IRubyObject[] args) {
final IRubyObject recv, final IRubyObject[] args) {
final RubyHash arguments = (RubyHash) args[2];
final IRubyObject filterInstance = args[1].callMethod(context, "new", arguments);
final RubyString id = (RubyString) arguments.op_aref(context, ID_KEY);
filterInstance.callMethod(
context, "metric=",
((AbstractMetricExt) args[3]).namespace(context, id.intern19())
context, "metric=",
((AbstractMetricExt) args[3]).namespace(context, id.intern19())
);
filterInstance.callMethod(context, "execution_context=", args[4]);
return args[0].callMethod(context, "new", new IRubyObject[]{filterInstance, id});
return new FilterDelegatorExt(context.runtime, RubyUtil.FILTER_DELEGATOR_CLASS)
.initialize(context, filterInstance, id);
}
public Plugins(final Ruby runtime, final RubyClass metaClass) {
@ -71,17 +84,17 @@ public final class PluginFactoryExt {
@JRubyMethod(required = 4)
public PluginFactoryExt.Plugins initialize(final ThreadContext context,
final IRubyObject[] args) {
final IRubyObject[] args) {
return init(
(PipelineIR) args[0].toJava(PipelineIR.class),
(PluginFactoryExt.Metrics) args[1], (PluginFactoryExt.ExecutionContext) args[2],
(RubyClass) args[3]
(PipelineIR) args[0].toJava(PipelineIR.class),
(PluginFactoryExt.Metrics) args[1], (PluginFactoryExt.ExecutionContext) args[2],
(RubyClass) args[3]
);
}
public PluginFactoryExt.Plugins init(final PipelineIR lir,
final PluginFactoryExt.Metrics metrics,
final PluginFactoryExt.ExecutionContext executionContext, final RubyClass filterClass) {
public PluginFactoryExt.Plugins init(final PipelineIR lir, final PluginFactoryExt.Metrics metrics,
final PluginFactoryExt.ExecutionContext executionContext,
final RubyClass filterClass) {
this.lir = lir;
this.metrics = metrics;
this.executionContext = executionContext;
@ -91,143 +104,148 @@ public final class PluginFactoryExt {
@SuppressWarnings("unchecked")
@Override
public IRubyObject buildInput(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
public IRubyObject buildInput(final RubyString name, final RubyInteger line, final RubyInteger column,
final IRubyObject args, Map<String, Object> pluginArgs) {
return plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.INPUT,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.INPUT,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args, pluginArgs
);
}
@JRubyMethod(required = 4)
public IRubyObject buildInput(final ThreadContext context, final IRubyObject[] args) {
return buildInput(
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(),
args[3]
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(),
args[3], null
);
}
@SuppressWarnings("unchecked")
@Override
public AbstractOutputDelegatorExt buildOutput(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
return (OutputDelegatorExt) plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.OUTPUT,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args
final RubyInteger column, final IRubyObject args,
Map<String, Object> pluginArgs) {
return (AbstractOutputDelegatorExt) plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.OUTPUT,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args, pluginArgs
);
}
@JRubyMethod(required = 4)
public AbstractOutputDelegatorExt buildOutput(final ThreadContext context,
final IRubyObject[] args) {
final IRubyObject[] args) {
return buildOutput(
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(), args[3]
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(), args[3], null
);
}
@SuppressWarnings("unchecked")
@Override
public FilterDelegatorExt buildFilter(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
return (FilterDelegatorExt) plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.FILTER,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args
public AbstractFilterDelegatorExt buildFilter(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args,
Map<String, Object> pluginArgs) {
return (AbstractFilterDelegatorExt) plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.FILTER,
name.asJavaString(), line.getIntValue(), column.getIntValue(),
(Map<String, IRubyObject>) args, pluginArgs
);
}
@JRubyMethod(required = 4)
public IRubyObject buildFilter(final ThreadContext context, final IRubyObject[] args) {
return buildFilter(
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(),
args[3]
(RubyString) args[0], args[1].convertToInteger(), args[2].convertToInteger(),
args[3], null
);
}
@SuppressWarnings("unchecked")
@Override
public IRubyObject buildCodec(final RubyString name, final IRubyObject args) {
public IRubyObject buildCodec(final RubyString name, final IRubyObject args, Map<String, Object> pluginArgs) {
return plugin(
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.CODEC,
name.asJavaString(), 0, 0, (Map<String, IRubyObject>) args
RubyUtil.RUBY.getCurrentContext(), PluginLookup.PluginType.CODEC,
name.asJavaString(), 0, 0, (Map<String, IRubyObject>) args, pluginArgs
);
}
@JRubyMethod(required = 4)
public IRubyObject buildCodec(final ThreadContext context, final IRubyObject[] args) {
return buildCodec((RubyString) args[0], args[1]);
return buildCodec((RubyString) args[0], args[1], null);
}
@SuppressWarnings("unchecked")
@JRubyMethod(required = 4, optional = 1)
public IRubyObject plugin(final ThreadContext context, final IRubyObject[] args) {
return plugin(
context,
PluginLookup.PluginType.valueOf(args[0].asJavaString().toUpperCase(Locale.ENGLISH)),
args[1].asJavaString(),
args[2].convertToInteger().getIntValue(),
args[3].convertToInteger().getIntValue(),
args.length > 4 ? (Map<String, IRubyObject>) args[4] : new HashMap<>()
context,
PluginLookup.PluginType.valueOf(args[0].asJavaString().toUpperCase(Locale.ENGLISH)),
args[1].asJavaString(),
args[2].convertToInteger().getIntValue(),
args[3].convertToInteger().getIntValue(),
args.length > 4 ? (Map<String, IRubyObject>) args[4] : new HashMap<>(),
null
);
}
private IRubyObject plugin(final ThreadContext context,
final PluginLookup.PluginType type, final String name, final int line, final int column,
final Map<String, IRubyObject> args) {
@SuppressWarnings("unchecked")
private IRubyObject plugin(final ThreadContext context, final PluginLookup.PluginType type, final String name,
final int line, final int column, final Map<String, IRubyObject> args,
Map<String, Object> pluginArgs) {
final String id;
if (type == PluginLookup.PluginType.CODEC) {
id = UUID.randomUUID().toString();
} else {
id = lir.getGraph().vertices().filter(
v -> v.getSourceWithMetadata() != null
&& v.getSourceWithMetadata().getLine() == line
&& v.getSourceWithMetadata().getColumn() == column
v -> v.getSourceWithMetadata() != null
&& v.getSourceWithMetadata().getLine() == line
&& v.getSourceWithMetadata().getColumn() == column
).findFirst().map(Vertex::getId).orElse(null);
}
if (id == null) {
throw context.runtime.newRaiseException(
RubyUtil.CONFIGURATION_ERROR_CLASS,
String.format(
"Could not determine ID for %s/%s", type.rubyLabel().asJavaString(), name
)
RubyUtil.CONFIGURATION_ERROR_CLASS,
String.format(
"Could not determine ID for %s/%s", type.rubyLabel().asJavaString(), name
)
);
}
if (pluginsById.contains(id)) {
throw context.runtime.newRaiseException(
RubyUtil.CONFIGURATION_ERROR_CLASS,
String.format("Two plugins have the id '%s', please fix this conflict", id)
RubyUtil.CONFIGURATION_ERROR_CLASS,
String.format("Two plugins have the id '%s', please fix this conflict", id)
);
}
pluginsById.add(id);
final AbstractNamespacedMetricExt typeScopedMetric =
metrics.create(context, type.rubyLabel());
final AbstractNamespacedMetricExt typeScopedMetric = metrics.create(context, type.rubyLabel());
final PluginLookup.PluginClass pluginClass = PluginLookup.lookup(type, name);
if (pluginClass.language() == PluginLookup.PluginLanguage.RUBY) {
final Map<String, Object> newArgs = new HashMap<>(args);
newArgs.put("id", id);
final RubyClass klass = (RubyClass) pluginClass.klass();
final ExecutionContextExt executionCntx = executionContext.create(
context, RubyUtil.RUBY.newString(id), klass.callMethod(context, "config_name")
context, RubyUtil.RUBY.newString(id), klass.callMethod(context, "config_name")
);
final RubyHash rubyArgs = RubyHash.newHash(context.runtime);
rubyArgs.putAll(newArgs);
if (type == PluginLookup.PluginType.OUTPUT) {
return new OutputDelegatorExt(context.runtime, RubyUtil.RUBY_OUTPUT_DELEGATOR_CLASS).initialize(
context,
new IRubyObject[]{
klass, typeScopedMetric, executionCntx,
OutputStrategyExt.OutputStrategyRegistryExt.instance(context, null),
rubyArgs
}
context,
new IRubyObject[]{
klass, typeScopedMetric, executionCntx,
OutputStrategyExt.OutputStrategyRegistryExt.instance(context, null),
rubyArgs
}
);
} else if (type == PluginLookup.PluginType.FILTER) {
return filterDelegator(
context, null,
new IRubyObject[]{
filterClass, klass, rubyArgs, typeScopedMetric, executionCntx
}
context, null,
new IRubyObject[]{
filterClass, klass, rubyArgs, typeScopedMetric, executionCntx
}
);
} else {
final IRubyObject pluginInstance = klass.callMethod(context, "new", rubyArgs);
@ -238,11 +256,84 @@ public final class PluginFactoryExt {
return pluginInstance;
}
} else {
return context.nil;
if (type == PluginLookup.PluginType.OUTPUT) {
final Class<Output> cls = (Class<Output>) pluginClass.klass();
Output output = null;
if (cls != null) {
try {
final Constructor<Output> ctor = cls.getConstructor(Configuration.class, Context.class);
output = ctor.newInstance(new Configuration(pluginArgs), new Context());
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) {
throw new IllegalStateException(ex);
}
}
if (output != null) {
return JavaOutputDelegatorExt.create(name, id, typeScopedMetric, output);
} else {
throw new IllegalStateException("Unable to instantiate output: " + pluginClass);
}
} else if (type == PluginLookup.PluginType.FILTER) {
final Class<Filter> cls = (Class<Filter>) pluginClass.klass();
Filter filter = null;
if (cls != null) {
try {
final Constructor<Filter> ctor = cls.getConstructor(Configuration.class, Context.class);
filter = ctor.newInstance(new Configuration(pluginArgs), new Context());
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) {
throw new IllegalStateException(ex);
}
}
if (filter != null) {
return JavaFilterDelegatorExt.create(name, id, typeScopedMetric, filter);
} else {
throw new IllegalStateException("Unable to instantiate filter: " + pluginClass);
}
} else if (type == PluginLookup.PluginType.INPUT) {
final Class<Input> cls = (Class<Input>) pluginClass.klass();
Input input = null;
if (cls != null) {
try {
final Constructor<Input> ctor = cls.getConstructor(Configuration.class, Context.class);
input = ctor.newInstance(new Configuration(pluginArgs), new Context());
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException ex) {
throw new IllegalStateException(ex);
}
}
if (input != null) {
return JavaInputWrapperExt.create(context, input);
} else {
throw new IllegalStateException("Unable to instantiate input: " + pluginClass);
}
} else {
throw new IllegalStateException("Unable to create plugin: " + pluginClass.toReadableString());
}
}
}
}
@JRubyClass(name = "JavaInputWrapper")
public static final class JavaInputWrapperExt extends RubyObject {
private Input input;
public JavaInputWrapperExt(Ruby runtime, RubyClass metaClass) {
super(runtime, metaClass);
}
public static JavaInputWrapperExt create(ThreadContext context, Input input) {
JavaInputWrapperExt inputWrapper = new JavaInputWrapperExt(context.runtime, RubyUtil.JAVA_INPUT_WRAPPER_CLASS);
inputWrapper.input = input;
return inputWrapper;
}
public Input getInput() {
return input;
}
}
@JRubyClass(name = "ExecutionContextFactory")
public static final class ExecutionContext extends RubyBasicObject {

View file

@ -1,8 +1,11 @@
package org.logstash.plugins;
import org.jruby.RubyClass;
import org.jruby.RubyString;
import org.jruby.javasupport.JavaClass;
import org.jruby.runtime.builtin.IRubyObject;
import org.logstash.RubyUtil;
import org.logstash.plugins.discovery.PluginRegistry;
/**
* Java Implementation of the plugin that is implemented by wrapping the Ruby
@ -11,37 +14,65 @@ import org.logstash.RubyUtil;
public final class PluginLookup {
private static final IRubyObject RUBY_REGISTRY = RubyUtil.RUBY.executeScript(
"require 'logstash/plugins/registry'\nrequire 'logstash/plugin'\nLogStash::Plugin",
""
"require 'logstash/plugins/registry'\nrequire 'logstash/plugin'\nLogStash::Plugin",
""
);
private PluginLookup() {
// Utility Class
}
public static PluginLookup.PluginClass lookup(final PluginLookup.PluginType type,
final String name) {
return new PluginLookup.PluginClass() {
@Override
public PluginLookup.PluginLanguage language() {
return PluginLookup.PluginLanguage.RUBY;
}
public static PluginLookup.PluginClass lookup(final PluginLookup.PluginType type, final String name) {
Class javaClass = PluginRegistry.getPluginClass(type, name);
if (javaClass != null) {
return new PluginLookup.PluginClass() {
@Override
public Object klass() {
return RUBY_REGISTRY.callMethod(
RubyUtil.RUBY.getCurrentContext(), "lookup",
new IRubyObject[]{type.rubyLabel(), RubyUtil.RUBY.newString(name)}
);
}
};
@Override
public PluginLookup.PluginLanguage language() {
return PluginLookup.PluginLanguage.JAVA;
}
@Override
public Object klass() {
return javaClass;
}
};
} else {
Object klass =
RUBY_REGISTRY.callMethod(
RubyUtil.RUBY.getCurrentContext(), "lookup",
new IRubyObject[]{type.rubyLabel(), RubyUtil.RUBY.newString(name)});
PluginLanguage language = klass instanceof RubyClass
? PluginLanguage.RUBY
: PluginLanguage.JAVA;
Object resolvedClass = klass instanceof JavaClass
? ((JavaClass) klass).javaClass()
: klass;
return new PluginLookup.PluginClass() {
@Override
public PluginLookup.PluginLanguage language() {
return language;
}
@Override
public Object klass() {
return resolvedClass;
}
};
}
}
public interface PluginClass {
PluginLookup.PluginLanguage language();
Object klass();
default String toReadableString() {
return String.format("Plugin class [%s], language [%s]", klass(), language());
}
}
public enum PluginLanguage {

View file

@ -0,0 +1,128 @@
package org.logstash.plugins.codecs;
import org.logstash.Event;
import org.logstash.StringInterpolation;
import co.elastic.logstash.api.v0.Codec;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.LogstashPlugin;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.PluginHelper;
import co.elastic.logstash.api.PluginConfigSpec;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Consumer;
@LogstashPlugin(name = "java-line")
public class Line implements Codec {
public static final String DEFAULT_DELIMITER = "\n";
private static final PluginConfigSpec<String> CHARSET_CONFIG =
Configuration.stringSetting("charset", "UTF-8");
private static final PluginConfigSpec<String> DELIMITER_CONFIG =
Configuration.stringSetting("delimiter", DEFAULT_DELIMITER);
private static final PluginConfigSpec<String> FORMAT_CONFIG =
Configuration.stringSetting("format");
static final String MESSAGE_FIELD = "message";
private final String delimiter;
private final Charset charset;
private String format = null;
private final CharBuffer charBuffer = ByteBuffer.allocateDirect(64 * 1024).asCharBuffer();
private final CharsetDecoder decoder;
private String remainder = "";
public Line(final Configuration configuration, final Context context) {
delimiter = configuration.get(DELIMITER_CONFIG);
charset = Charset.forName(configuration.get(CHARSET_CONFIG));
format = configuration.get(FORMAT_CONFIG);
decoder = charset.newDecoder();
decoder.onMalformedInput(CodingErrorAction.IGNORE);
}
@Override
public void decode(ByteBuffer buffer, Consumer<Map<String, Object>> eventConsumer) {
int bufferPosition = buffer.position();
CoderResult result = decoder.decode(buffer, charBuffer, false);
charBuffer.flip();
String s = (remainder == null ? "" : remainder) + charBuffer.toString();
charBuffer.clear();
if (s.endsWith(delimiter)) {
// strip trailing delimiter, if any, to match Ruby implementation
s = s.substring(0, s.length() - delimiter.length());
} else {
int lastIndex = s.lastIndexOf(delimiter);
if (lastIndex == -1) {
buffer.position(bufferPosition);
s = "";
} else {
remainder = s.substring(lastIndex + delimiter.length(), s.length());
s = s.substring(0, lastIndex);
}
}
if (s.length() > 0) {
String[] lines = s.split(delimiter, 0);
for (int k = 0; k < lines.length; k++) {
eventConsumer.accept(simpleMap(lines[k]));
}
}
}
@Override
public void flush(ByteBuffer buffer, Consumer<Map<String, Object>> eventConsumer) {
if (remainder.length() > 0 || buffer.position() != buffer.limit()) {
try {
String remainder = this.remainder + charset.newDecoder().decode(buffer).toString();
String[] lines = remainder.split(delimiter, 0);
for (int k = 0; k < lines.length; k++) {
eventConsumer.accept(simpleMap(lines[k]));
}
} catch (CharacterCodingException e) {
throw new IllegalStateException(e);
}
}
}
private static Map<String, Object> simpleMap(String message) {
HashMap<String, Object> simpleMap = new HashMap<>();
simpleMap.put(MESSAGE_FIELD, message);
return simpleMap;
}
@Override
public void encode(Event event, OutputStream output) {
try {
String outputString = (format == null
? event.toJson()
: StringInterpolation.evaluate(event, format))
+ delimiter;
output.write(outputString.getBytes(charset));
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
@Override
public Collection<PluginConfigSpec<?>> configSchema() {
return PluginHelper.commonInputOptions(
Arrays.asList(CHARSET_CONFIG, DELIMITER_CONFIG, FORMAT_CONFIG));
}
}

View file

@ -0,0 +1,84 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Multimap;
/**
*
*/
@SuppressWarnings({"RawUseOfParameterizedType", "unchecked"})
public abstract class AbstractScanner implements Scanner {
private Configuration configuration;
private Multimap<String, String> store;
private Predicate<String> resultFilter = Predicates.alwaysTrue(); //accept all by default
public boolean acceptsInput(String file) {
return getMetadataAdapter().acceptsInput(file);
}
public Object scan(Vfs.File file, Object classObject) {
if (classObject == null) {
try {
classObject = configuration.getMetadataAdapter().getOfCreateClassObject(file);
} catch (Exception e) {
throw new ReflectionsException("could not create class object from file " + file.getRelativePath(), e);
}
}
scan(classObject);
return classObject;
}
public abstract void scan(Object cls);
//
public Configuration getConfiguration() {
return configuration;
}
public void setConfiguration(final Configuration configuration) {
this.configuration = configuration;
}
public Multimap<String, String> getStore() {
return store;
}
public void setStore(final Multimap<String, String> store) {
this.store = store;
}
public Predicate<String> getResultFilter() {
return resultFilter;
}
public void setResultFilter(Predicate<String> resultFilter) {
this.resultFilter = resultFilter;
}
@Override
public Scanner filterResultsBy(Predicate<String> filter) {
this.setResultFilter(filter);
return this;
}
public boolean acceptResult(final String fqn) {
return fqn != null && resultFilter.apply(fqn);
}
protected MetadataAdapter getMetadataAdapter() {
return configuration.getMetadataAdapter();
}
//
@Override
public boolean equals(Object o) {
return this == o || o != null && getClass() == o.getClass();
}
@Override
public int hashCode() {
return getClass().hashCode();
}
}

View file

@ -0,0 +1,194 @@
package org.logstash.plugins.discovery;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
/**
* Helper methods for working with the classpath.
*/
public abstract class ClasspathHelper {
/**
* Gets the current thread context class loader.
* {@code Thread.currentThread().getContextClassLoader()}.
*
* @return the context class loader, may be null
*/
public static ClassLoader contextClassLoader() {
return Thread.currentThread().getContextClassLoader();
}
/**
* Gets the class loader of this library.
* {@code Reflections.class.getClassLoader()}.
*
* @return the static library class loader, may be null
*/
public static ClassLoader staticClassLoader() {
return Reflections.class.getClassLoader();
}
/**
* Returns an array of class Loaders initialized from the specified array.
* <p>
* If the input is null or empty, it defaults to both {@link #contextClassLoader()} and {@link #staticClassLoader()}
*
* @return the array of class loaders, not null
*/
public static ClassLoader[] classLoaders(ClassLoader... classLoaders) {
if (classLoaders != null && classLoaders.length != 0) {
return classLoaders;
} else {
ClassLoader contextClassLoader = contextClassLoader(), staticClassLoader = staticClassLoader();
return contextClassLoader != null ?
staticClassLoader != null && contextClassLoader != staticClassLoader ?
new ClassLoader[]{contextClassLoader, staticClassLoader} :
new ClassLoader[]{contextClassLoader} :
new ClassLoader[] {};
}
}
public static Collection<URL> forPackage(String name, ClassLoader... classLoaders) {
return forResource(resourceName(name), classLoaders);
}
/**
* Returns a distinct collection of URLs based on a resource.
* <p>
* This searches for the resource name, using {@link ClassLoader#getResources(String)}.
* For example, {@code forResource(test.properties)} effectively returns URLs from the
* classpath containing files of that name.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forResource(String resourceName, ClassLoader... classLoaders) {
final List<URL> result = new ArrayList<>();
final ClassLoader[] loaders = classLoaders(classLoaders);
for (ClassLoader classLoader : loaders) {
try {
final Enumeration<URL> urls = classLoader.getResources(resourceName);
while (urls.hasMoreElements()) {
final URL url = urls.nextElement();
int index = url.toExternalForm().lastIndexOf(resourceName);
if (index != -1) {
// Add old url as contextUrl to support exotic url handlers
result.add(new URL(url, url.toExternalForm().substring(0, index)));
} else {
result.add(url);
}
}
} catch (IOException e) {
}
}
return distinctUrls(result);
}
/**
* Returns the URL that contains a {@code Class}.
* <p>
* This searches for the class using {@link ClassLoader#getResource(String)}.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
*
* @return the URL containing the class, null if not found
*/
public static URL forClass(Class<?> aClass, ClassLoader... classLoaders) {
final ClassLoader[] loaders = classLoaders(classLoaders);
final String resourceName = aClass.getName().replace(".", "/") + ".class";
for (ClassLoader classLoader : loaders) {
try {
final URL url = classLoader.getResource(resourceName);
if (url != null) {
final String normalizedUrl = url.toExternalForm().substring(0, url.toExternalForm().lastIndexOf(aClass.getPackage().getName().replace(".", "/")));
return new URL(normalizedUrl);
}
} catch (MalformedURLException e) {
}
}
return null;
}
/**
* Returns a distinct collection of URLs based on URLs derived from class loaders.
* <p>
* This finds the URLs using {@link URLClassLoader#getURLs()} using both
* {@link #contextClassLoader()} and {@link #staticClassLoader()}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forClassLoader() {
return forClassLoader(classLoaders());
}
/**
* Returns a distinct collection of URLs based on URLs derived from class loaders.
* <p>
* This finds the URLs using {@link URLClassLoader#getURLs()} using the specified
* class loader, searching up the parent hierarchy.
* <p>
* If the optional {@link ClassLoader}s are not specified, then both {@link #contextClassLoader()}
* and {@link #staticClassLoader()} are used for {@link ClassLoader#getResources(String)}.
* <p>
* The returned URLs retains the order of the given {@code classLoaders}.
*
* @return the collection of URLs, not null
*/
public static Collection<URL> forClassLoader(ClassLoader... classLoaders) {
final Collection<URL> result = new ArrayList<>();
final ClassLoader[] loaders = classLoaders(classLoaders);
for (ClassLoader classLoader : loaders) {
while (classLoader != null) {
if (classLoader instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) classLoader).getURLs();
if (urls != null) {
result.addAll(Arrays.asList(urls));
}
}
classLoader = classLoader.getParent();
}
}
return distinctUrls(result);
}
private static String resourceName(String name) {
if (name != null) {
String resourceName = name.replace(".", "/");
resourceName = resourceName.replace("\\", "/");
if (resourceName.startsWith("/")) {
resourceName = resourceName.substring(1);
}
return resourceName;
}
return null;
}
//http://michaelscharf.blogspot.co.il/2006/11/javaneturlequals-and-hashcode-make.html
private static Collection<URL> distinctUrls(Collection<URL> urls) {
Map<String, URL> distinct = new LinkedHashMap<>(urls.size());
for (URL url : urls) {
distinct.put(url.toExternalForm(), url);
}
return distinct.values();
}
}

View file

@ -0,0 +1,41 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import java.net.URL;
import java.util.Set;
import java.util.concurrent.ExecutorService;
public interface Configuration {
/**
* the scanner instances used for scanning different metadata
*/
Set<Scanner> getScanners();
/**
* the urls to be scanned
*/
Set<URL> getUrls();
/**
* the metadata adapter used to fetch metadata from classes
*/
@SuppressWarnings("RawUseOfParameterizedType")
MetadataAdapter getMetadataAdapter();
/**
* get the fully qualified name filter used to filter types to be scanned
*/
Predicate<String> getInputsFilter();
/**
* executor service used to scan files. if null, scanning is done in a simple for loop
*/
ExecutorService getExecutorService();
/**
* get class loaders, might be used for resolving methods/fields
*/
ClassLoader[] getClassLoaders();
boolean shouldExpandSuperTypes();
}

View file

@ -0,0 +1,229 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.Sets;
import java.net.URL;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
public final class ConfigurationBuilder implements Configuration {
private final Set<Scanner> scanners;
private Set<URL> urls;
/*lazy*/ protected MetadataAdapter metadataAdapter;
private Predicate<String> inputsFilter;
private ExecutorService executorService;
private ClassLoader[] classLoaders;
private boolean expandSuperTypes = true;
public ConfigurationBuilder() {
scanners = Sets.newHashSet(new TypeAnnotationsScanner(), new SubTypesScanner());
urls = Sets.newHashSet();
}
@SuppressWarnings("unchecked")
public static ConfigurationBuilder build(final Object... params) {
ConfigurationBuilder builder = new ConfigurationBuilder();
//flatten
List<Object> parameters = Lists.newArrayList();
if (params != null) {
for (Object param : params) {
if (param != null) {
if (param.getClass().isArray()) {
for (Object p : (Object[]) param)
if (p != null) {
parameters.add(p);
}
} else if (param instanceof Iterable) {
for (Object p : (Iterable) param)
if (p != null) {
parameters.add(p);
}
} else {
parameters.add(param);
}
}
}
}
List<ClassLoader> loaders = Lists.newArrayList();
for (Object param : parameters)
if (param instanceof ClassLoader) {
loaders.add((ClassLoader) param);
}
ClassLoader[] classLoaders = loaders.isEmpty() ? null : loaders.toArray(new ClassLoader[loaders.size()]);
FilterBuilder filter = new FilterBuilder();
List<Scanner> scanners = Lists.newArrayList();
for (Object param : parameters) {
if (param instanceof String) {
builder.addUrls(ClasspathHelper.forPackage((String) param, classLoaders));
filter.includePackage((String) param);
} else if (param instanceof Class) {
if (Scanner.class.isAssignableFrom((Class) param)) {
try {
builder.addScanners((Scanner) ((Class) param).newInstance());
} catch (Exception e) { /*fallback*/ }
}
builder.addUrls(ClasspathHelper.forClass((Class) param, classLoaders));
filter.includePackage((Class) param);
} else if (param instanceof Scanner) {
scanners.add((Scanner) param);
} else if (param instanceof URL) {
builder.addUrls((URL) param);
} else if (param instanceof ClassLoader) { /* already taken care */ } else if (param instanceof Predicate) {
filter.add((Predicate<String>) param);
} else if (param instanceof ExecutorService) {
builder.setExecutorService((ExecutorService) param);
}
}
if (builder.getUrls().isEmpty()) {
if (classLoaders != null) {
builder.addUrls(ClasspathHelper.forClassLoader(classLoaders)); //default urls getResources("")
} else {
builder.addUrls(ClasspathHelper.forClassLoader()); //default urls getResources("")
}
}
builder.filterInputsBy(filter);
if (!scanners.isEmpty()) {
builder.setScanners(scanners.toArray(new Scanner[scanners.size()]));
}
if (!loaders.isEmpty()) {
builder.addClassLoaders(loaders);
}
return builder;
}
@Override
public Set<Scanner> getScanners() {
return scanners;
}
/**
* set the scanners instances for scanning different metadata
*/
public ConfigurationBuilder setScanners(final Scanner... scanners) {
this.scanners.clear();
return addScanners(scanners);
}
/**
* set the scanners instances for scanning different metadata
*/
public ConfigurationBuilder addScanners(final Scanner... scanners) {
this.scanners.addAll(Sets.newHashSet(scanners));
return this;
}
@Override
public Set<URL> getUrls() {
return urls;
}
/**
* add urls to be scanned
* <p>use {@link ClasspathHelper} convenient methods to get the relevant urls
*/
public ConfigurationBuilder addUrls(final Collection<URL> urls) {
this.urls.addAll(urls);
return this;
}
/**
* add urls to be scanned
* <p>use {@link ClasspathHelper} convenient methods to get the relevant urls
*/
public ConfigurationBuilder addUrls(final URL... urls) {
this.urls.addAll(Sets.newHashSet(urls));
return this;
}
/**
* returns the metadata adapter.
* if javassist library exists in the classpath, this method returns {@link JavassistAdapter} otherwise defaults to {@link JavaReflectionAdapter}.
* <p>the {@link JavassistAdapter} is preferred in terms of performance and class loading.
*/
@Override
public MetadataAdapter getMetadataAdapter() {
if (metadataAdapter != null) {
return metadataAdapter;
} else {
try {
return metadataAdapter = new JavassistAdapter();
} catch (Throwable e) {
return metadataAdapter = new JavaReflectionAdapter();
}
}
}
@Override
public Predicate<String> getInputsFilter() {
return inputsFilter;
}
/**
* sets the input filter for all resources to be scanned.
* <p> supply a {@link Predicate} or use the {@link FilterBuilder}
*/
public ConfigurationBuilder filterInputsBy(Predicate<String> inputsFilter) {
this.inputsFilter = inputsFilter;
return this;
}
@Override
public ExecutorService getExecutorService() {
return executorService;
}
/**
* sets the executor service used for scanning.
*/
public ConfigurationBuilder setExecutorService(ExecutorService executorService) {
this.executorService = executorService;
return this;
}
/**
* get class loader, might be used for scanning or resolving methods/fields
*/
@Override
public ClassLoader[] getClassLoaders() {
return classLoaders;
}
@Override
public boolean shouldExpandSuperTypes() {
return expandSuperTypes;
}
/**
* add class loader, might be used for resolving methods/fields
*/
public ConfigurationBuilder addClassLoaders(ClassLoader... classLoaders) {
this.classLoaders = this.classLoaders == null ? classLoaders : ObjectArrays.concat(this.classLoaders, classLoaders, ClassLoader.class);
return this;
}
/**
* add class loader, might be used for resolving methods/fields
*/
public ConfigurationBuilder addClassLoaders(Collection<ClassLoader> classLoaders) {
return addClassLoaders(classLoaders.toArray(new ClassLoader[classLoaders.size()]));
}
}

View file

@ -0,0 +1,129 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import java.util.List;
import java.util.regex.Pattern;
public class FilterBuilder implements Predicate<String> {
private final List<Predicate<String>> chain;
public FilterBuilder() {
chain = Lists.newArrayList();
}
/**
* exclude a regular expression
*/
public FilterBuilder exclude(final String regex) {
add(new FilterBuilder.Exclude(regex));
return this;
}
/**
* add a Predicate to the chain of predicates
*/
public FilterBuilder add(Predicate<String> filter) {
chain.add(filter);
return this;
}
/**
* include a package of a given class
*/
public FilterBuilder includePackage(final Class<?> aClass) {
return add(new FilterBuilder.Include(packageNameRegex(aClass)));
}
/**
* include packages of given prefixes
*/
public FilterBuilder includePackage(final String... prefixes) {
for (String prefix : prefixes) {
add(new FilterBuilder.Include(prefix(prefix)));
}
return this;
}
private static String packageNameRegex(Class<?> aClass) {
return prefix(aClass.getPackage().getName() + ".");
}
public static String prefix(String qualifiedName) {
return qualifiedName.replace(".", "\\.") + ".*";
}
@Override
public String toString() {
return Joiner.on(", ").join(chain);
}
public boolean apply(String regex) {
boolean accept = chain == null || chain.isEmpty() || chain.get(0) instanceof FilterBuilder.Exclude;
if (chain != null) {
for (Predicate<String> filter : chain) {
if (accept && filter instanceof FilterBuilder.Include) {
continue;
} //skip if this filter won't change
if (!accept && filter instanceof FilterBuilder.Exclude) {
continue;
}
accept = filter.apply(regex);
if (!accept && filter instanceof FilterBuilder.Exclude) {
break;
} //break on first exclusion
}
}
return accept;
}
public abstract static class Matcher implements Predicate<String> {
final Pattern pattern;
public Matcher(final String regex) {
pattern = Pattern.compile(regex);
}
public abstract boolean apply(String regex);
@Override
public String toString() {
return pattern.pattern();
}
}
public static class Include extends FilterBuilder.Matcher {
public Include(final String patternString) {
super(patternString);
}
@Override
public boolean apply(final String regex) {
return pattern.matcher(regex).matches();
}
@Override
public String toString() {
return "+" + super.toString();
}
}
public static class Exclude extends FilterBuilder.Matcher {
public Exclude(final String patternString) {
super(patternString);
}
@Override
public boolean apply(final String regex) {
return !pattern.matcher(regex).matches();
}
@Override
public String toString() {
return "-" + super.toString();
}
}
}

View file

@ -0,0 +1,70 @@
package org.logstash.plugins.discovery;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
/** */
public final class JavaReflectionAdapter implements MetadataAdapter<Class> {
public List<String> getClassAnnotationNames(Class aClass) {
return getAnnotationNames(aClass.getDeclaredAnnotations());
}
public Class getOfCreateClassObject(Vfs.File file) {
return getOfCreateClassObject(file, null);
}
public Class getOfCreateClassObject(Vfs.File file, ClassLoader... loaders) {
String name = file.getRelativePath().replace("/", ".").replace(".class", "");
return ReflectionUtils.forName(name, loaders);
}
public String getClassName(Class cls) {
return cls.getName();
}
public String getSuperclassName(Class cls) {
Class superclass = cls.getSuperclass();
return superclass != null ? superclass.getName() : "";
}
public List<String> getInterfacesNames(Class cls) {
Class[] classes = cls.getInterfaces();
List<String> names = new ArrayList<>(classes != null ? classes.length : 0);
if (classes != null) {
for (Class cls1 : classes) names.add(cls1.getName());
}
return names;
}
public boolean acceptsInput(String file) {
return file.endsWith(".class");
}
//
private List<String> getAnnotationNames(Annotation[] annotations) {
List<String> names = new ArrayList<>(annotations.length);
for (Annotation annotation : annotations) {
names.add(annotation.annotationType().getName());
}
return names;
}
public static String getName(Class type) {
if (type.isArray()) {
try {
Class cl = type;
int dim = 0;
while (cl.isArray()) {
dim++;
cl = cl.getComponentType();
}
return cl.getName() + Utils.repeat("[]", dim);
} catch (Throwable e) {
//
}
}
return type.getName();
}
}

View file

@ -0,0 +1,76 @@
package org.logstash.plugins.discovery;
import com.google.common.collect.Lists;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ClassFile;
import javassist.bytecode.annotation.Annotation;
/**
*
*/
public class JavassistAdapter implements MetadataAdapter<ClassFile> {
/**
* setting this to false will result in returning only visible annotations from the relevant methods here (only {@link java.lang.annotation.RetentionPolicy#RUNTIME})
*/
public static boolean includeInvisibleTag = true;
public List<String> getClassAnnotationNames(final ClassFile aClass) {
return getAnnotationNames((AnnotationsAttribute) aClass.getAttribute(AnnotationsAttribute.visibleTag),
includeInvisibleTag ? (AnnotationsAttribute) aClass.getAttribute(AnnotationsAttribute.invisibleTag) : null);
}
public ClassFile getOfCreateClassObject(final Vfs.File file) {
InputStream inputStream = null;
try {
inputStream = file.openInputStream();
DataInputStream dis = new DataInputStream(new BufferedInputStream(inputStream));
return new ClassFile(dis);
} catch (IOException e) {
throw new ReflectionsException("could not create class file from " + file.getName(), e);
} finally {
Utils.close(inputStream);
}
}
//
public String getClassName(final ClassFile cls) {
return cls.getName();
}
public String getSuperclassName(final ClassFile cls) {
return cls.getSuperclass();
}
public List<String> getInterfacesNames(final ClassFile cls) {
return Arrays.asList(cls.getInterfaces());
}
public boolean acceptsInput(String file) {
return file.endsWith(".class");
}
//
private List<String> getAnnotationNames(final AnnotationsAttribute... annotationsAttributes) {
List<String> result = Lists.newArrayList();
if (annotationsAttributes != null) {
for (AnnotationsAttribute annotationsAttribute : annotationsAttributes) {
if (annotationsAttribute != null) {
for (Annotation annotation : annotationsAttribute.getAnnotations()) {
result.add(annotation.getTypeName());
}
}
}
}
return result;
}
}

View file

@ -0,0 +1,23 @@
package org.logstash.plugins.discovery;
import java.util.List;
/**
*
*/
public interface MetadataAdapter<C> {
//
String getClassName(final C cls);
String getSuperclassName(final C cls);
List<String> getInterfacesNames(final C cls);
List<String> getClassAnnotationNames(final C aClass);
C getOfCreateClassObject(Vfs.File file) throws Exception;
boolean acceptsInput(String file);
}

View file

@ -0,0 +1,110 @@
package org.logstash.plugins.discovery;
import org.logstash.plugins.PluginLookup;
import co.elastic.logstash.api.v0.Codec;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.v0.Filter;
import co.elastic.logstash.api.v0.Input;
import co.elastic.logstash.api.LogstashPlugin;
import co.elastic.logstash.api.v0.Output;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Registry for built-in Java plugins (not installed via logstash-plugin)
*/
public final class PluginRegistry {
private static final Map<String, Class<Input>> INPUTS = new HashMap<>();
private static final Map<String, Class<Filter>> FILTERS = new HashMap<>();
private static final Map<String, Class<Output>> OUTPUTS = new HashMap<>();
private static final Map<String, Class<Codec>> CODECS = new HashMap<>();
static {
discoverPlugins();
}
private PluginRegistry() {} // utility class
@SuppressWarnings("unchecked")
private static void discoverPlugins() {
Reflections reflections = new Reflections("");
Set<Class<?>> annotated = reflections.getTypesAnnotatedWith(LogstashPlugin.class);
for (final Class<?> cls : annotated) {
for (final Annotation annotation : cls.getAnnotations()) {
if (annotation instanceof LogstashPlugin) {
String name = ((LogstashPlugin) annotation).name();
if (Filter.class.isAssignableFrom(cls)) {
FILTERS.put(name, (Class<Filter>) cls);
}
if (Output.class.isAssignableFrom(cls)) {
OUTPUTS.put(name, (Class<Output>) cls);
}
if (Input.class.isAssignableFrom(cls)) {
INPUTS.put(name, (Class<Input>) cls);
}
if (Codec.class.isAssignableFrom(cls)) {
CODECS.put(name, (Class<Codec>) cls);
}
break;
}
}
}
}
public static Class<?> getPluginClass(PluginLookup.PluginType pluginType, String pluginName) {
if (pluginType == PluginLookup.PluginType.FILTER) {
return getFilterClass(pluginName);
}
if (pluginType == PluginLookup.PluginType.OUTPUT) {
return getOutputClass(pluginName);
}
if (pluginType == PluginLookup.PluginType.INPUT) {
return getInputClass(pluginName);
}
if (pluginType == PluginLookup.PluginType.CODEC) {
return getCodecClass(pluginName);
}
throw new IllegalStateException("Unknown plugin type: " + pluginType);
}
public static Class<Input> getInputClass(String name) {
return INPUTS.get(name);
}
public static Class<Filter> getFilterClass(String name) {
return FILTERS.get(name);
}
public static Class<Codec> getCodecClass(String name) {
return CODECS.get(name);
}
public static Class<Output> getOutputClass(String name) {
return OUTPUTS.get(name);
}
public static Codec getCodec(String name, Configuration configuration, Context context) {
if (name != null && CODECS.containsKey(name)) {
return instantiateCodec(CODECS.get(name), configuration, context);
}
return null;
}
private static Codec instantiateCodec(Class clazz, Configuration configuration, Context context) {
try {
Constructor<Codec> constructor = clazz.getConstructor(Configuration.class, Context.class);
return constructor.newInstance(configuration, context);
} catch (Exception e) {
throw new IllegalStateException("Unable to instantiate codec", e);
}
}
}

View file

@ -0,0 +1,116 @@
package org.logstash.plugins.discovery;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
@SuppressWarnings("unchecked")
public abstract class ReflectionUtils {
public static boolean includeObject;
/**
* get the immediate supertype and interfaces of the given {@code type}
*/
public static Set<Class<?>> getSuperTypes(Class<?> type) {
Set<Class<?>> result = new LinkedHashSet<>();
Class<?> superclass = type.getSuperclass();
Class<?>[] interfaces = type.getInterfaces();
if (superclass != null && (includeObject || !superclass.equals(Object.class))) {
result.add(superclass);
}
if (interfaces != null && interfaces.length > 0) {
result.addAll(Arrays.asList(interfaces));
}
return result;
}
//predicates
public static Class<?> forName(String typeName, ClassLoader... classLoaders) {
if (getPrimitiveNames().contains(typeName)) {
return getPrimitiveTypes().get(getPrimitiveNames().indexOf(typeName));
} else {
String type;
if (typeName.contains("[")) {
int i = typeName.indexOf("[");
type = typeName.substring(0, i);
String array = typeName.substring(i).replace("]", "");
if (getPrimitiveNames().contains(type)) {
type = getPrimitiveDescriptors().get(getPrimitiveNames().indexOf(type));
} else {
type = "L" + type + ";";
}
type = array + type;
} else {
type = typeName;
}
List<ReflectionsException> reflectionsExceptions = Lists.newArrayList();
for (ClassLoader classLoader : ClasspathHelper.classLoaders(classLoaders)) {
if (type.contains("[")) {
try {
return Class.forName(type, false, classLoader);
} catch (Throwable e) {
reflectionsExceptions.add(new ReflectionsException("could not get type for name " + typeName, e));
}
}
try {
return classLoader.loadClass(type);
} catch (Throwable e) {
reflectionsExceptions.add(new ReflectionsException("could not get type for name " + typeName, e));
}
}
return null;
}
}
/**
* try to resolve all given string representation of types to a list of java types
*/
public static <T> List<Class<? extends T>> forNames(final Iterable<String> classes, ClassLoader... classLoaders) {
List<Class<? extends T>> result = new ArrayList<>();
for (String className : classes) {
Class<?> type = forName(className, classLoaders);
if (type != null) {
result.add((Class<? extends T>) type);
}
}
return result;
}
//
private static List<String> primitiveNames;
private static List<Class> primitiveTypes;
private static List<String> primitiveDescriptors;
private static void initPrimitives() {
if (primitiveNames == null) {
primitiveNames = Lists.newArrayList("boolean", "char", "byte", "short", "int", "long", "float", "double", "void");
primitiveTypes = Lists.newArrayList(boolean.class, char.class, byte.class, short.class, int.class, long.class, float.class, double.class, void.class);
primitiveDescriptors = Lists.newArrayList("Z", "C", "B", "S", "I", "J", "F", "D", "V");
}
}
private static List<String> getPrimitiveNames() {
initPrimitives();
return primitiveNames;
}
private static List<Class> getPrimitiveTypes() {
initPrimitives();
return primitiveTypes;
}
private static List<String> getPrimitiveDescriptors() {
initPrimitives();
return primitiveDescriptors;
}
}

View file

@ -0,0 +1,155 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.net.URL;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
public class Reflections {
protected final Configuration configuration;
protected Store store;
public Reflections(final Configuration configuration) {
this.configuration = configuration;
store = new Store(configuration);
if (configuration.getScanners() != null && !configuration.getScanners().isEmpty()) {
//inject to scanners
for (Scanner scanner : configuration.getScanners()) {
scanner.setConfiguration(configuration);
scanner.setStore(store.getOrCreate(scanner.getClass().getSimpleName()));
}
scan();
if (configuration.shouldExpandSuperTypes()) {
expandSuperTypes();
}
}
}
public Reflections(final String prefix, final Scanner... scanners) {
this((Object) prefix, scanners);
}
public Reflections(final Object... params) {
this(ConfigurationBuilder.build(params));
}
//
protected void scan() {
if (configuration.getUrls() == null || configuration.getUrls().isEmpty()) {
return;
}
ExecutorService executorService = configuration.getExecutorService();
List<Future<?>> futures = Lists.newArrayList();
for (final URL url : configuration.getUrls()) {
try {
if (executorService != null) {
futures.add(executorService.submit(() -> scan(url)));
} else {
scan(url);
}
} catch (ReflectionsException e) {
}
}
if (executorService != null) {
for (Future future : futures) {
try {
future.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
if (executorService != null) {
executorService.shutdown();
}
}
protected void scan(URL url) {
Vfs.Dir dir = Vfs.fromURL(url);
try {
for (final Vfs.File file : dir.getFiles()) {
// scan if inputs filter accepts file relative path or fqn
Predicate<String> inputsFilter = configuration.getInputsFilter();
String path = file.getRelativePath();
String fqn = path.replace('/', '.');
if (inputsFilter == null || inputsFilter.apply(path) || inputsFilter.apply(fqn)) {
Object classObject = null;
for (Scanner scanner : configuration.getScanners()) {
try {
if (scanner.acceptsInput(path) || scanner.acceptResult(fqn)) {
classObject = scanner.scan(file, classObject);
}
} catch (Exception e) {
}
}
}
}
} finally {
dir.close();
}
}
public void expandSuperTypes() {
if (store.keySet().contains(index(SubTypesScanner.class))) {
Multimap<String, String> mmap = store.get(index(SubTypesScanner.class));
Sets.SetView<String> keys = Sets.difference(mmap.keySet(), Sets.newHashSet(mmap.values()));
Multimap<String, String> expand = HashMultimap.create();
for (String key : keys) {
final Class<?> type = ReflectionUtils.forName(key);
if (type != null) {
expandSupertypes(expand, key, type);
}
}
mmap.putAll(expand);
}
}
private void expandSupertypes(Multimap<String, String> mmap, String key, Class<?> type) {
for (Class<?> supertype : ReflectionUtils.getSuperTypes(type)) {
if (mmap.put(supertype.getName(), key)) {
expandSupertypes(mmap, supertype.getName(), supertype);
}
}
}
public Set<Class<?>> getTypesAnnotatedWith(final Class<? extends Annotation> annotation) {
return getTypesAnnotatedWith(annotation, false);
}
public Set<Class<?>> getTypesAnnotatedWith(final Class<? extends Annotation> annotation, boolean honorInherited) {
Iterable<String> annotated = store.get(index(TypeAnnotationsScanner.class), annotation.getName());
Iterable<String> classes = getAllAnnotated(annotated, annotation.isAnnotationPresent(Inherited.class), honorInherited);
return Sets.newHashSet(Iterables.concat(ReflectionUtils.forNames(annotated, loaders()), ReflectionUtils.forNames(classes, loaders())));
}
protected Iterable<String> getAllAnnotated(Iterable<String> annotated, boolean inherited, boolean honorInherited) {
Iterable<String> subTypes = Iterables.concat(annotated, store.getAll(index(TypeAnnotationsScanner.class), annotated));
return Iterables.concat(subTypes, store.getAll(index(SubTypesScanner.class), subTypes));
}
private static String index(Class<? extends Scanner> scannerClass) {
return scannerClass.getSimpleName();
}
private ClassLoader[] loaders() {
return configuration.getClassLoaders();
}
}

View file

@ -0,0 +1,14 @@
package org.logstash.plugins.discovery;
public class ReflectionsException extends RuntimeException {
public ReflectionsException(String message) {
super(message);
}
public ReflectionsException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -0,0 +1,24 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import com.google.common.collect.Multimap;
/**
*
*/
public interface Scanner {
void setConfiguration(Configuration configuration);
Multimap<String, String> getStore();
void setStore(Multimap<String, String> store);
Scanner filterResultsBy(Predicate<String> filter);
boolean acceptsInput(String file);
Object scan(Vfs.File file, Object classObject);
boolean acceptResult(String fqn);
}

View file

@ -0,0 +1,115 @@
package org.logstash.plugins.discovery;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
public final class Store {
private transient boolean concurrent;
private final Map<String, Multimap<String, String>> storeMap;
//used via reflection
@SuppressWarnings("UnusedDeclaration")
protected Store() {
storeMap = new HashMap<>();
concurrent = false;
}
public Store(Configuration configuration) {
storeMap = new HashMap<>();
concurrent = configuration.getExecutorService() != null;
}
/**
* return all indices
*/
public Set<String> keySet() {
return storeMap.keySet();
}
/**
* get or create the multimap object for the given {@code index}
*/
public Multimap<String, String> getOrCreate(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
SetMultimap<String, String> multimap =
Multimaps.newSetMultimap(new HashMap<>(),
() -> Sets.newSetFromMap(new ConcurrentHashMap<>()));
mmap = concurrent ? Multimaps.synchronizedSetMultimap(multimap) : multimap;
storeMap.put(index, mmap);
}
return mmap;
}
public Multimap<String, String> get(String index) {
Multimap<String, String> mmap = storeMap.get(index);
if (mmap == null) {
throw new ReflectionsException("Scanner " + index + " was not configured");
}
return mmap;
}
/**
* get the values stored for the given {@code index} and {@code keys}
*/
public Iterable<String> get(String index, String... keys) {
return get(index, Arrays.asList(keys));
}
/**
* get the values stored for the given {@code index} and {@code keys}
*/
public Iterable<String> get(String index, Iterable<String> keys) {
Multimap<String, String> mmap = get(index);
IterableChain<String> result = new IterableChain<>();
for (String key : keys) {
result.addAll(mmap.get(key));
}
return result;
}
/**
* recursively get the values stored for the given {@code index} and {@code keys}, including keys
*/
private Iterable<String> getAllIncluding(String index, Iterable<String> keys, IterableChain<String> result) {
result.addAll(keys);
for (String key : keys) {
Iterable<String> values = get(index, key);
if (values.iterator().hasNext()) {
getAllIncluding(index, values, result);
}
}
return result;
}
/**
* recursively get the values stored for the given {@code index} and {@code keys}, not including keys
*/
public Iterable<String> getAll(String index, Iterable<String> keys) {
return getAllIncluding(index, get(index, keys), new IterableChain<>());
}
private static class IterableChain<T> implements Iterable<T> {
private final List<Iterable<T>> chain = Lists.newArrayList();
private void addAll(Iterable<T> iterable) {
chain.add(iterable);
}
public Iterator<T> iterator() {
return Iterables.concat(chain).iterator();
}
}
}

View file

@ -0,0 +1,36 @@
package org.logstash.plugins.discovery;
import java.util.List;
/** scans for superclass and interfaces of a class, allowing a reverse lookup for subtypes */
public class SubTypesScanner extends AbstractScanner {
/** created new SubTypesScanner. will exclude direct Object subtypes */
public SubTypesScanner() {
this(true); //exclude direct Object subtypes by default
}
/** created new SubTypesScanner.
* @param excludeObjectClass if false, include direct {@link Object} subtypes in results. */
public SubTypesScanner(boolean excludeObjectClass) {
if (excludeObjectClass) {
filterResultsBy(new FilterBuilder().exclude(Object.class.getName())); //exclude direct Object subtypes
}
}
@SuppressWarnings({"unchecked"})
public void scan(final Object cls) {
String className = getMetadataAdapter().getClassName(cls);
String superclass = getMetadataAdapter().getSuperclassName(cls);
if (acceptResult(superclass)) {
getStore().put(superclass, className);
}
for (String anInterface : (List<String>) getMetadataAdapter().getInterfacesNames(cls)) {
if (acceptResult(anInterface)) {
getStore().put(anInterface, className);
}
}
}
}

View file

@ -0,0 +1,26 @@
package org.logstash.plugins.discovery;
import java.lang.annotation.Inherited;
import java.util.List;
/**
* scans for class's annotations, where @Retention(RetentionPolicy.RUNTIME)
*/
@SuppressWarnings("unchecked")
public class TypeAnnotationsScanner extends AbstractScanner {
@Override
public void scan(final Object cls) {
final String className = getMetadataAdapter().getClassName(cls);
for (String annotationType : (List<String>) getMetadataAdapter().getClassAnnotationNames(cls)) {
if (acceptResult(annotationType) ||
annotationType.equals(Inherited.class.getName())) { //as an exception, accept Inherited as well
getStore().put(annotationType, className);
}
}
}
}

View file

@ -0,0 +1,82 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Joiner;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* a garbage can of convenient methods
*/
public abstract class Utils {
public static String repeat(String string, int times) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < times; i++) {
sb.append(string);
}
return sb.toString();
}
/**
* isEmpty compatible with Java 5
*/
public static boolean isEmpty(String s) {
return s == null || s.length() == 0;
}
public static boolean isEmpty(Object[] objects) {
return objects == null || objects.length == 0;
}
public static void close(InputStream closeable) {
try {
if (closeable != null) {
closeable.close();
}
} catch (IOException e) {
}
}
public static String name(Class type) {
if (!type.isArray()) {
return type.getName();
} else {
int dim = 0;
while (type.isArray()) {
dim++;
type = type.getComponentType();
}
return type.getName() + repeat("[]", dim);
}
}
public static List<String> names(Iterable<Class<?>> types) {
List<String> result = new ArrayList<>();
for (Class<?> type : types) result.add(name(type));
return result;
}
public static List<String> names(Class<?>... types) {
return names(Arrays.asList(types));
}
public static String name(Constructor constructor) {
return constructor.getName() + "." + "<init>" + "(" + Joiner.on(", ").join(names(constructor.getParameterTypes())) + ")";
}
public static String name(Method method) {
return method.getDeclaringClass().getName() + "." + method.getName() + "(" + Joiner.on(", ").join(names(method.getParameterTypes())) + ")";
}
public static String name(Field field) {
return field.getDeclaringClass().getName() + "." + field.getName();
}
}

View file

@ -0,0 +1,630 @@
package org.logstash.plugins.discovery;
import com.google.common.base.Predicate;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Lists;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.JarURLConnection;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLDecoder;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Stack;
import java.util.jar.JarFile;
import java.util.jar.JarInputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
public abstract class Vfs {
private static List<Vfs.UrlType> defaultUrlTypes = Lists.newArrayList(Vfs.DefaultUrlTypes.values());
/**
* an abstract vfs dir
*/
public interface Dir {
String getPath();
Iterable<Vfs.File> getFiles();
void close();
}
/**
* an abstract vfs file
*/
public interface File {
String getName();
String getRelativePath();
InputStream openInputStream() throws IOException;
}
/**
* a matcher and factory for a url
*/
public interface UrlType {
boolean matches(URL url) throws Exception;
Vfs.Dir createDir(URL url) throws Exception;
}
/**
* tries to create a Dir from the given url, using the defaultUrlTypes
*/
public static Vfs.Dir fromURL(final URL url) {
return fromURL(url, defaultUrlTypes);
}
/**
* tries to create a Dir from the given url, using the given urlTypes
*/
public static Vfs.Dir fromURL(final URL url, final List<Vfs.UrlType> urlTypes) {
for (final Vfs.UrlType type : urlTypes) {
try {
if (type.matches(url)) {
final Vfs.Dir dir = type.createDir(url);
if (dir != null) {
return dir;
}
}
} catch (final Throwable e) {
}
}
throw new ReflectionsException("could not create Vfs.Dir from url, no matching UrlType was found [" + url.toExternalForm() + "]\n" +
"either use fromURL(final URL url, final List<UrlType> urlTypes) or " +
"use the static setDefaultURLTypes(final List<UrlType> urlTypes) or addDefaultURLTypes(UrlType urlType) " +
"with your specialized UrlType.");
}
/**
* try to get {@link Vfs.File} from url
*/
public static java.io.File getFile(final URL url) {
java.io.File file;
String path;
try {
path = url.toURI().getSchemeSpecificPart();
if ((file = new java.io.File(path)).exists()) {
return file;
}
} catch (final URISyntaxException e) {
}
try {
path = URLDecoder.decode(url.getPath(), "UTF-8");
if (path.contains(".jar!")) {
path = path.substring(0, path.lastIndexOf(".jar!") + ".jar".length());
}
if ((file = new java.io.File(path)).exists()) {
return file;
}
} catch (final UnsupportedEncodingException e) {
}
try {
path = url.toExternalForm();
if (path.startsWith("jar:")) {
path = path.substring("jar:".length());
}
if (path.startsWith("wsjar:")) {
path = path.substring("wsjar:".length());
}
if (path.startsWith("file:")) {
path = path.substring("file:".length());
}
if (path.contains(".jar!")) {
path = path.substring(0, path.indexOf(".jar!") + ".jar".length());
}
if ((file = new java.io.File(path)).exists()) {
return file;
}
path = path.replace("%20", " ");
if ((file = new java.io.File(path)).exists()) {
return file;
}
} catch (final Exception e) {
}
return null;
}
private static boolean hasJarFileInPath(final URL url) {
return url.toExternalForm().matches(".*\\.jar(\\!.*|$)");
}
public enum DefaultUrlTypes implements Vfs.UrlType {
jarFile {
@Override
public boolean matches(final URL url) {
return url.getProtocol().equals("file") && hasJarFileInPath(url);
}
@Override
public Vfs.Dir createDir(final URL url) throws Exception {
return new Vfs.ZipDir(new JarFile(getFile(url)));
}
},
jarUrl {
@Override
public boolean matches(final URL url) {
return "jar".equals(url.getProtocol()) || "zip".equals(url.getProtocol()) || "wsjar".equals(url.getProtocol());
}
@Override
public Vfs.Dir createDir(final URL url) throws Exception {
try {
final URLConnection urlConnection = url.openConnection();
if (urlConnection instanceof JarURLConnection) {
return new Vfs.ZipDir(((JarURLConnection) urlConnection).getJarFile());
}
} catch (final Throwable e) { /*fallback*/ }
final java.io.File file = getFile(url);
if (file != null) {
return new Vfs.ZipDir(new JarFile(file));
}
return null;
}
},
directory {
@Override
public boolean matches(final URL url) {
if (url.getProtocol().equals("file") && !hasJarFileInPath(url)) {
final java.io.File file = getFile(url);
return file != null && file.isDirectory();
} else {
return false;
}
}
@Override
public Vfs.Dir createDir(final URL url) {
return new Vfs.SystemDir(getFile(url));
}
},
jboss_vfs {
@Override
public boolean matches(final URL url) {
return url.getProtocol().equals("vfs");
}
@Override
public Vfs.Dir createDir(final URL url) throws Exception {
final Object content = url.openConnection().getContent();
final Class<?> virtualFile = ClasspathHelper.contextClassLoader().loadClass("org.jboss.vfs.VirtualFile");
final java.io.File physicalFile = (java.io.File) virtualFile.getMethod("getPhysicalFile").invoke(content);
final String name = (String) virtualFile.getMethod("getName").invoke(content);
java.io.File file = new java.io.File(physicalFile.getParentFile(), name);
if (!file.exists() || !file.canRead()) {
file = physicalFile;
}
return file.isDirectory() ? new Vfs.SystemDir(file) : new Vfs.ZipDir(new JarFile(file));
}
},
jboss_vfsfile {
@Override
public boolean matches(final URL url) {
return "vfszip".equals(url.getProtocol()) || "vfsfile".equals(url.getProtocol());
}
@Override
public Vfs.Dir createDir(final URL url) {
return new Vfs.UrlTypeVFS().createDir(url);
}
},
bundle {
@Override
public boolean matches(final URL url) {
return url.getProtocol().startsWith("bundle");
}
@Override
public Vfs.Dir createDir(final URL url) throws Exception {
return fromURL((URL) ClasspathHelper.contextClassLoader().
loadClass("org.eclipse.core.runtime.FileLocator").getMethod("resolve", URL.class).invoke(null, url));
}
},
jarInputStream {
@Override
public boolean matches(final URL url) {
return url.toExternalForm().contains(".jar");
}
@Override
public Vfs.Dir createDir(final URL url) {
return new Vfs.JarInputDir(url);
}
}
}
private static final class JarInputDir implements Vfs.Dir {
private final URL url;
JarInputStream jarInputStream;
long cursor;
long nextCursor;
public JarInputDir(final URL url) {
this.url = url;
}
@Override
public String getPath() {
return url.getPath();
}
@Override
public Iterable<Vfs.File> getFiles() {
return () -> new AbstractIterator<Vfs.File>() {
{
try {
jarInputStream = new JarInputStream(url.openConnection().getInputStream());
} catch (final Exception e) {
throw new ReflectionsException("Could not open url connection", e);
}
}
@Override
protected Vfs.File computeNext() {
while (true) {
try {
final ZipEntry entry = jarInputStream.getNextJarEntry();
if (entry == null) {
return endOfData();
}
long size = entry.getSize();
if (size < 0) {
size = 0xffffffffl + size; //JDK-6916399
}
nextCursor += size;
if (!entry.isDirectory()) {
return new Vfs.JarInputFile(entry, Vfs.JarInputDir.this, cursor, nextCursor);
}
} catch (final IOException e) {
throw new ReflectionsException("could not get next zip entry", e);
}
}
}
};
}
@Override
public void close() {
Utils.close(jarInputStream);
}
}
public static class JarInputFile implements Vfs.File {
private final ZipEntry entry;
private final Vfs.JarInputDir jarInputDir;
private final long fromIndex;
private final long endIndex;
public JarInputFile(final ZipEntry entry, final Vfs.JarInputDir jarInputDir, final long cursor, final long nextCursor) {
this.entry = entry;
this.jarInputDir = jarInputDir;
fromIndex = cursor;
endIndex = nextCursor;
}
@Override
public String getName() {
final String name = entry.getName();
return name.substring(name.lastIndexOf("/") + 1);
}
@Override
public String getRelativePath() {
return entry.getName();
}
@Override
public InputStream openInputStream() {
return new InputStream() {
@Override
public int read() throws IOException {
if (jarInputDir.cursor >= fromIndex && jarInputDir.cursor <= endIndex) {
final int read = jarInputDir.jarInputStream.read();
jarInputDir.cursor++;
return read;
} else {
return -1;
}
}
};
}
}
public static final class ZipDir implements Vfs.Dir {
final java.util.zip.ZipFile jarFile;
public ZipDir(final JarFile jarFile) {
this.jarFile = jarFile;
}
@Override
public String getPath() {
return jarFile.getName();
}
@Override
public Iterable<Vfs.File> getFiles() {
return () -> new AbstractIterator<Vfs.File>() {
final Enumeration<? extends ZipEntry> entries = jarFile.entries();
@Override
protected Vfs.File computeNext() {
while (entries.hasMoreElements()) {
final ZipEntry entry = entries.nextElement();
if (!entry.isDirectory()) {
return new Vfs.ZipFile(Vfs.ZipDir.this, entry);
}
}
return endOfData();
}
};
}
@Override
public void close() {
try {
jarFile.close();
} catch (final IOException e) {
}
}
@Override
public String toString() {
return jarFile.getName();
}
}
public static final class ZipFile implements Vfs.File {
private final Vfs.ZipDir root;
private final ZipEntry entry;
public ZipFile(final Vfs.ZipDir root, final ZipEntry entry) {
this.root = root;
this.entry = entry;
}
@Override
public String getName() {
final String name = entry.getName();
return name.substring(name.lastIndexOf("/") + 1);
}
@Override
public String getRelativePath() {
return entry.getName();
}
@Override
public InputStream openInputStream() throws IOException {
return root.jarFile.getInputStream(entry);
}
@Override
public String toString() {
return root.getPath() + "!" + java.io.File.separatorChar + entry.toString();
}
}
public static final class SystemDir implements Vfs.Dir {
private final java.io.File file;
public SystemDir(final java.io.File file) {
if (file != null && (!file.isDirectory() || !file.canRead())) {
throw new RuntimeException("cannot use dir " + file);
}
this.file = file;
}
@Override
public String getPath() {
if (file == null) {
return "/NO-SUCH-DIRECTORY/";
}
return file.getPath().replace("\\", "/");
}
@Override
public Iterable<Vfs.File> getFiles() {
if (file == null || !file.exists()) {
return Collections.emptyList();
}
return () -> new AbstractIterator<Vfs.File>() {
final Stack<java.io.File> stack = new Stack<>();
{
stack.addAll(listFiles(file));
}
@Override
protected Vfs.File computeNext() {
while (!stack.isEmpty()) {
final java.io.File file = stack.pop();
if (file.isDirectory()) {
stack.addAll(listFiles(file));
} else {
return new Vfs.SystemFile(Vfs.SystemDir.this, file);
}
}
return endOfData();
}
};
}
private static List<java.io.File> listFiles(final java.io.File file) {
final java.io.File[] files = file.listFiles();
if (files != null) {
return Lists.newArrayList(files);
} else {
return Lists.newArrayList();
}
}
@Override
public void close() {
}
@Override
public String toString() {
return getPath();
}
}
private static final class UrlTypeVFS implements Vfs.UrlType {
public static final String[] REPLACE_EXTENSION = {".ear/", ".jar/", ".war/", ".sar/", ".har/", ".par/"};
final String VFSZIP = "vfszip";
final String VFSFILE = "vfsfile";
@Override
public boolean matches(final URL url) {
return VFSZIP.equals(url.getProtocol()) || VFSFILE.equals(url.getProtocol());
}
@Override
public Vfs.Dir createDir(final URL url) {
try {
final URL adaptedUrl = adaptURL(url);
return new Vfs.ZipDir(new JarFile(adaptedUrl.getFile()));
} catch (final Exception e) {
try {
return new Vfs.ZipDir(new JarFile(url.getFile()));
} catch (final IOException e1) {
}
}
return null;
}
public URL adaptURL(final URL url) throws MalformedURLException {
if (VFSZIP.equals(url.getProtocol())) {
return replaceZipSeparators(url.getPath(), realFile);
} else if (VFSFILE.equals(url.getProtocol())) {
return new URL(url.toString().replace(VFSFILE, "file"));
} else {
return url;
}
}
URL replaceZipSeparators(final String path, final Predicate<java.io.File> acceptFile)
throws MalformedURLException {
int pos = 0;
while (pos != -1) {
pos = findFirstMatchOfDeployableExtention(path, pos);
if (pos > 0) {
final java.io.File file = new java.io.File(path.substring(0, pos - 1));
if (acceptFile.apply(file)) {
return replaceZipSeparatorStartingFrom(path, pos);
}
}
}
throw new ReflectionsException("Unable to identify the real zip file in path '" + path + "'.");
}
int findFirstMatchOfDeployableExtention(final String path, final int pos) {
final Pattern p = Pattern.compile("\\.[ejprw]ar/");
final Matcher m = p.matcher(path);
if (m.find(pos)) {
return m.end();
} else {
return -1;
}
}
Predicate<java.io.File> realFile = file -> file.exists() && file.isFile();
URL replaceZipSeparatorStartingFrom(final String path, final int pos)
throws MalformedURLException {
final String zipFile = path.substring(0, pos - 1);
String zipPath = path.substring(pos);
int numSubs = 1;
for (final String ext : REPLACE_EXTENSION) {
while (zipPath.contains(ext)) {
zipPath = zipPath.replace(ext, ext.substring(0, 4) + "!");
numSubs++;
}
}
String prefix = "";
for (int i = 0; i < numSubs; i++) {
prefix += "zip:";
}
if (zipPath.trim().length() == 0) {
return new URL(prefix + "/" + zipFile);
} else {
return new URL(prefix + "/" + zipFile + "!" + zipPath);
}
}
}
private static final class SystemFile implements Vfs.File {
private final Vfs.SystemDir root;
private final java.io.File file;
public SystemFile(final Vfs.SystemDir root, final java.io.File file) {
this.root = root;
this.file = file;
}
@Override
public String getName() {
return file.getName();
}
@Override
public String getRelativePath() {
final String filepath = file.getPath().replace("\\", "/");
if (filepath.startsWith(root.getPath())) {
return filepath.substring(root.getPath().length() + 1);
}
return null; //should not get here
}
@Override
public InputStream openInputStream() {
try {
return new FileInputStream(file);
} catch (final FileNotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public String toString() {
return file.toString();
}
}
}

View file

@ -0,0 +1,128 @@
package org.logstash.plugins.inputs;
import co.elastic.logstash.api.v0.Codec;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.v0.Input;
import co.elastic.logstash.api.LogstashPlugin;
import co.elastic.logstash.api.PluginConfigSpec;
import co.elastic.logstash.api.PluginHelper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.logstash.execution.queue.QueueWriter;
import org.logstash.plugins.discovery.PluginRegistry;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.channels.AsynchronousCloseException;
import java.nio.channels.FileChannel;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.LongAdder;
import java.util.function.Consumer;
@LogstashPlugin(name = "java-stdin")
public class Stdin implements Input, Consumer<Map<String, Object>> {
private static final Logger logger = LogManager.getLogger(Stdin.class);
public static final PluginConfigSpec<String> CODEC_CONFIG =
Configuration.stringSetting("codec", "java-line");
private static final int BUFFER_SIZE = 64 * 1024;
private final LongAdder eventCounter = new LongAdder();
private String hostname;
private Codec codec;
private volatile boolean stopRequested = false;
private final CountDownLatch isStopped = new CountDownLatch(1);
private FileChannel input;
private QueueWriter writer;
/**
* Required Constructor Signature only taking a {@link Configuration}.
*
* @param configuration Logstash Configuration
* @param context Logstash Context
*/
public Stdin(final Configuration configuration, final Context context) {
this(configuration, context, new FileInputStream(FileDescriptor.in).getChannel());
}
Stdin(final Configuration configuration, final Context context, FileChannel inputChannel) {
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
hostname = "[unknownHost]";
}
String codecName = configuration.get(CODEC_CONFIG);
codec = PluginRegistry.getCodec(codecName, configuration, context);
if (codec == null) {
throw new IllegalStateException(String.format("Unable to obtain codec '%a'", codecName));
}
input = inputChannel;
}
@Override
public void start(QueueWriter writer) {
this.writer = writer;
final ByteBuffer buffer = ByteBuffer.allocateDirect(BUFFER_SIZE);
try {
while (!stopRequested && (input.read(buffer) > -1)) {
buffer.flip();
codec.decode(buffer, this);
buffer.compact();
}
} catch (AsynchronousCloseException e2) {
// do nothing -- this happens when stop is called during a pending read
logger.warn("Stop request interrupted pending read");
} catch (IOException e) {
stopRequested = true;
logger.error("Stopping stdin after read error", e);
throw new IllegalStateException(e);
} finally {
try {
input.close();
} catch (IOException e) {
// do nothing
}
buffer.flip();
codec.flush(buffer, this);
isStopped.countDown();
}
}
@Override
public void accept(Map<String, Object> event) {
event.putIfAbsent("hostname", hostname);
writer.push(event);
eventCounter.increment();
}
@Override
public void stop() {
stopRequested = true;
try {
input.close(); // interrupts any pending reads
} catch (IOException e) {
// do nothing
}
}
@Override
public void awaitStop() throws InterruptedException {
isStopped.await();
}
@Override
public Collection<PluginConfigSpec<?>> configSchema() {
return PluginHelper.commonInputOptions(Collections.singletonList(CODEC_CONFIG));
}
}

View file

@ -0,0 +1,67 @@
package org.logstash.plugins.outputs;
import co.elastic.logstash.api.v0.Codec;
import org.logstash.Event;
import co.elastic.logstash.api.LogstashPlugin;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.Context;
import co.elastic.logstash.api.v0.Output;
import co.elastic.logstash.api.PluginConfigSpec;
import org.logstash.plugins.discovery.PluginRegistry;
import java.io.OutputStream;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
@LogstashPlugin(name = "java-stdout")
public class Stdout implements Output {
public static final PluginConfigSpec<String> CODEC_CONFIG =
Configuration.stringSetting("codec", "java-line");
private Codec codec;
private OutputStream outputStream;
private final CountDownLatch done = new CountDownLatch(1);
/**
* Required Constructor Signature only taking a {@link Configuration}.
*
* @param configuration Logstash Configuration
* @param context Logstash Context
*/
public Stdout(final Configuration configuration, final Context context) {
this(configuration, context, System.out);
}
Stdout(final Configuration configuration, final Context context, OutputStream targetStream) {
this.outputStream = targetStream;
String codecName = configuration.get(CODEC_CONFIG);
codec = PluginRegistry.getCodec(codecName, configuration, context);
if (codec == null) {
throw new IllegalStateException(String.format("Unable to obtain codec '%a'", codecName));
}
}
@Override
public void output(final Collection<Event> events) {
for (Event e : events) {
codec.encode(e, outputStream);
}
}
@Override
public void stop() {
done.countDown();
}
@Override
public void awaitStop() throws InterruptedException {
done.await();
}
@Override
public Collection<PluginConfigSpec<?>> configSchema() {
return Collections.singletonList(CODEC_CONFIG);
}
}

View file

@ -0,0 +1,94 @@
package co.elastic.logstash.api;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
public class ConfigurationTest {
private String stringKey = "string", numberKey = "number", booleanKey = "boolean";
private String stringValue = "stringValue";
private long longValue = 42L;
private boolean booleanValue = true;
private Configuration getTestConfiguration() {
Map<String, Object> configValues = new HashMap<>();
configValues.put(stringKey, stringValue);
configValues.put(numberKey, longValue);
configValues.put(booleanKey, booleanValue);
return new Configuration(configValues);
}
@Test
public void testConfiguration() {
Configuration config = getTestConfiguration();
PluginConfigSpec<String> stringConfig = new PluginConfigSpec<>(stringKey, String.class, "", false, false);
PluginConfigSpec<Long> numberConfig = new PluginConfigSpec<>(numberKey, Long.class, 0L, false, false);
PluginConfigSpec<Boolean> booleanConfig = new PluginConfigSpec<>(booleanKey, Boolean.class, false, false, false);
Assert.assertEquals(stringValue, config.get(stringConfig));
Assert.assertEquals(longValue, (long) config.get(numberConfig));
Assert.assertEquals(booleanValue, config.get(booleanConfig));
}
@Test
public void testDefaultValues() {
Configuration unsetConfig = new Configuration(new HashMap<>());
String defaultStringValue = "defaultStringValue";
long defaultLongValue = 43L;
boolean defaultBooleanValue = false;
PluginConfigSpec<String> stringConfig = new PluginConfigSpec<>(stringKey, String.class, defaultStringValue, false, false);
PluginConfigSpec<Long> numberConfig = new PluginConfigSpec<>(numberKey, Long.class, defaultLongValue, false, false);
PluginConfigSpec<Boolean> booleanConfig = new PluginConfigSpec<>(booleanKey, Boolean.class, defaultBooleanValue, false, false);
Assert.assertEquals(defaultStringValue, unsetConfig.get(stringConfig));
Assert.assertEquals(defaultLongValue, (long) unsetConfig.get(numberConfig));
Assert.assertEquals(defaultBooleanValue, unsetConfig.get(booleanConfig));
Configuration config = getTestConfiguration();
Assert.assertNotEquals(defaultStringValue, config.get(stringConfig));
Assert.assertNotEquals(defaultLongValue, (long) config.get(numberConfig));
Assert.assertNotEquals(defaultBooleanValue, config.get(booleanConfig));
}
@Test
public void testBrokenConfig() {
Configuration config = getTestConfiguration();
PluginConfigSpec<Long> brokenLongConfig = new PluginConfigSpec<>(stringKey, Long.class, 0L, false, false);
PluginConfigSpec<Boolean> brokenBooleanConfig = new PluginConfigSpec<>(numberKey, Boolean.class, false, false, false);
PluginConfigSpec<String> brokenStringConfig = new PluginConfigSpec<>(booleanKey, String.class, "", false, false);
try {
Long l = config.get(brokenLongConfig);
Assert.fail("Did not catch invalid config value type");
} catch (IllegalStateException e1) {
Assert.assertTrue(e1.getMessage().contains("incompatible with defined type"));
} catch (Exception e2) {
Assert.fail("Did not throw correct exception for invalid config value type");
}
try {
Boolean b = config.get(brokenBooleanConfig);
Assert.fail("Did not catch invalid config value type");
} catch (IllegalStateException e1) {
Assert.assertTrue(e1.getMessage().contains("incompatible with defined type"));
} catch (Exception e2) {
Assert.fail("Did not throw correct exception for invalid config value type");
}
try {
String s = config.get(brokenStringConfig);
Assert.fail("Did not catch invalid config value type");
} catch (IllegalStateException e1) {
Assert.assertTrue(e1.getMessage().contains("incompatible with defined type"));
} catch (Exception e2) {
Assert.fail("Did not throw correct exception for invalid config value type");
}
}
}

View file

@ -1,6 +1,16 @@
package org.logstash.config.ir;
import com.google.common.base.Strings;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.Supplier;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.jruby.RubyInteger;
@ -14,21 +24,15 @@ import org.logstash.ConvertedMap;
import org.logstash.Event;
import org.logstash.RubyUtil;
import org.logstash.common.IncompleteSourceWithMetadataException;
import org.logstash.config.ir.compiler.AbstractFilterDelegatorExt;
import org.logstash.config.ir.compiler.AbstractOutputDelegatorExt;
import org.logstash.config.ir.compiler.FilterDelegatorExt;
import org.logstash.config.ir.compiler.RubyIntegration;
import org.logstash.config.ir.compiler.PluginFactory;
import org.logstash.ext.JrubyEventExtLibrary;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.Supplier;
import co.elastic.logstash.api.Configuration;
import co.elastic.logstash.api.v0.Filter;
import co.elastic.logstash.api.v0.Input;
import co.elastic.logstash.api.Context;
/**
* Tests for {@link CompiledPipeline}.
@ -413,9 +417,9 @@ public final class CompiledPipelineTest extends RubyEnvTestCase {
}
/**
* Configurable Mock {@link RubyIntegration.PluginFactory}
* Configurable Mock {@link PluginFactory}
*/
private static final class MockPluginFactory implements RubyIntegration.PluginFactory {
private static final class MockPluginFactory implements PluginFactory {
private final Map<String, Supplier<IRubyObject>> inputs;
@ -434,26 +438,27 @@ public final class CompiledPipelineTest extends RubyEnvTestCase {
@Override
public IRubyObject buildInput(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
final RubyInteger column, final IRubyObject args, Map<String, Object> pluginArgs) {
return setupPlugin(name, inputs);
}
@Override
public AbstractOutputDelegatorExt buildOutput(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
final RubyInteger column, final IRubyObject args, Map<String, Object> pluginArgs) {
return PipelineTestUtil.buildOutput(setupPlugin(name, outputs));
}
@Override
public FilterDelegatorExt buildFilter(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args) {
public AbstractFilterDelegatorExt buildFilter(final RubyString name, final RubyInteger line,
final RubyInteger column, final IRubyObject args,
Map<String, Object> pluginArgs) {
return new FilterDelegatorExt(
RubyUtil.RUBY, RubyUtil.RUBY_OUTPUT_DELEGATOR_CLASS)
RubyUtil.RUBY, RubyUtil.FILTER_DELEGATOR_CLASS)
.initForTesting(setupPlugin(name, filters));
}
@Override
public IRubyObject buildCodec(final RubyString name, final IRubyObject args) {
public IRubyObject buildCodec(final RubyString name, final IRubyObject args, Map<String, Object> pluginArgs) {
throw new IllegalStateException("No codec setup expected in this test.");
}
@ -467,5 +472,16 @@ public final class CompiledPipelineTest extends RubyEnvTestCase {
}
return suppliers.get(name.asJavaString()).get();
}
@Override
public Input buildInput(final String name, final String id, final Configuration configuration, final Context context) {
return null;
}
@Override
public Filter buildFilter(final String name, final String id,
final Configuration configuration, final Context context) {
return null;
}
}
}

View file

@ -0,0 +1,15 @@
package org.logstash.config.ir.compiler;
import org.junit.Test;
/**
* Tests for {@link PluginFactory.Default}.
*/
public final class PluginFactoryTest {
@Test
public void testBuildJavaFilter() throws Exception {
}
}

View file

@ -0,0 +1,312 @@
package org.logstash.plugins.codecs;
import org.junit.Test;
import org.logstash.Event;
import co.elastic.logstash.api.Configuration;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class LineTest {
@Test
public void testSimpleDecode() {
String input = "abc";
testDecode(null, null, input, 0, 1, new String[]{input});
}
@Test
public void testDecodeDefaultDelimiter() {
String[] inputStrings = {"foo", "bar", "baz"};
String input = String.join(Line.DEFAULT_DELIMITER, inputStrings);
testDecode(null, null, input, inputStrings.length - 1, 1, inputStrings);
}
@Test
public void testDecodeCustomDelimiter() {
String delimiter = "z";
String[] inputStrings = {"foo", "bar", "bat"};
String input = String.join(delimiter, inputStrings);
testDecode(delimiter, null, input, inputStrings.length - 1, 1, inputStrings);
}
@Test
public void testDecodeWithTrailingDelimiter() {
String delimiter = "\n";
String[] inputs = {"foo", "bar", "baz"};
String input = String.join(delimiter, inputs) + delimiter;
testDecode(null, null, input, inputs.length, 0, inputs);
}
@Test
public void testSuccessiveDecodesWithTrailingDelimiter() {
// setup inputs
String delimiter = "\n";
String[] inputs = {"foo", "bar", "baz"};
String input = String.join(delimiter, inputs) + delimiter;
byte[] inputBytes = input.getBytes();
TestEventConsumer eventConsumer = new TestEventConsumer();
TestEventConsumer flushedEvents = new TestEventConsumer();
Line line = getLineCodec(null, null);
// first call to decode
ByteBuffer buffer = ByteBuffer.allocate(inputBytes.length * 3);
buffer.put(inputBytes);
buffer.flip();
line.decode(buffer, eventConsumer);
assertEquals(inputs.length, eventConsumer.events.size());
compareMessages(inputs, eventConsumer.events, flushedEvents.events);
// second call to encode
eventConsumer.events.clear();
buffer.compact();
buffer.put(inputBytes);
buffer.flip();
line.decode(buffer, eventConsumer);
assertEquals(inputs.length, eventConsumer.events.size());
compareMessages(inputs, eventConsumer.events, flushedEvents.events);
buffer.compact();
buffer.flip();
line.flush(buffer, flushedEvents);
assertEquals(0, flushedEvents.events.size());
}
@Test
public void testDecodeOnDelimiterOnly() {
String delimiter = "z";
String input = "z";
testDecode(delimiter, null, input, 0, 0, new String[]{""});
}
@Test
public void testDecodeWithMulticharDelimiter() {
String delimiter = "xyz";
String[] inputs = {"a", "b", "c"};
String input = String.join(delimiter, inputs);
testDecode(delimiter, null, input, inputs.length - 1, 1, inputs);
}
@Test
public void testDecodeWithMulticharTrailingDelimiter() {
String delimiter = "xyz";
String[] inputs = {"foo", "bar", "baz"};
String input = String.join(delimiter, inputs) + delimiter;
testDecode(delimiter, null, input, inputs.length, 0, inputs);
}
@Test
public void testDecodeWithUtf8() {
String input = "München 安装中文输入法";
testDecode(null, null, input + Line.DEFAULT_DELIMITER, 1, 0, new String[]{input});
}
@Test
public void testDecodeAcrossMultibyteCharBoundary() {
final int BUFFER_SIZE = 12;
int lastPos = 0;
TestEventConsumer eventConsumer = new TestEventConsumer();
String input = "安安安\n安安安\n安安安";
byte[] bytes = input.getBytes();
assertTrue(bytes.length > input.length());
ByteBuffer b1 = ByteBuffer.allocate(BUFFER_SIZE);
System.out.println(b1);
b1.put(bytes, lastPos, 12);
System.out.println(b1);
b1.flip();
System.out.println(b1);
Line line = getLineCodec(null, null);
line.decode(b1, eventConsumer);
System.out.println(b1);
b1.compact();
System.out.println(b1);
int remaining = b1.remaining();
lastPos += BUFFER_SIZE;
b1.put(bytes, lastPos, remaining);
System.out.println(b1);
b1.flip();
System.out.println(b1);
line.decode(b1, eventConsumer);
System.out.println(b1);
b1.compact();
System.out.println(b1);
remaining = b1.remaining();
lastPos += remaining;
b1.put(bytes, lastPos, bytes.length - lastPos);
System.out.println(b1);
b1.flip();
System.out.println(b1);
line.decode(b1, eventConsumer);
System.out.println(b1);
b1.compact();
System.out.println(b1);
b1.flip();
System.out.println(b1);
line.flush(b1, eventConsumer);
}
@Test
public void testFlush() {
String[] inputs = {"The", "quick", "brown", "fox", "jumps"};
String input = String.join(Line.DEFAULT_DELIMITER, inputs);
testDecode(null, null, input, inputs.length - 1, 1, inputs);
}
private void testDecode(String delimiter, String charset, String inputString, Integer expectedPreflushEvents, Integer expectedFlushEvents, String[] expectedMessages) {
Line line = getLineCodec(delimiter, charset);
byte[] inputBytes = inputString.getBytes();
TestEventConsumer eventConsumer = new TestEventConsumer();
ByteBuffer inputBuffer = ByteBuffer.wrap(inputBytes, 0, inputBytes.length);
line.decode(inputBuffer, eventConsumer);
if (expectedPreflushEvents != null) {
assertEquals(expectedPreflushEvents.intValue(), eventConsumer.events.size());
}
inputBuffer.compact();
inputBuffer.flip();
TestEventConsumer flushConsumer = new TestEventConsumer();
line.flush(inputBuffer, flushConsumer);
if (expectedFlushEvents != null) {
assertEquals(expectedFlushEvents.intValue(), flushConsumer.events.size());
}
compareMessages(expectedMessages, eventConsumer.events, flushConsumer.events);
}
private static void compareMessages(String[] expectedMessages, List<Map<String, Object>> events, List<Map<String, Object>> flushedEvents) {
if (expectedMessages != null) {
for (int k = 0; k < events.size(); k++) {
assertEquals(expectedMessages[k], events.get(k).get(Line.MESSAGE_FIELD));
}
for (int k = events.size(); k < (events.size() + flushedEvents.size()); k++) {
assertEquals(expectedMessages[k], flushedEvents.get(k - events.size()).get(Line.MESSAGE_FIELD));
}
}
}
private static Line getLineCodec(String delimiter, String charset) {
Map<String, Object> config = new HashMap<>();
if (delimiter != null) {
config.put("delimiter", delimiter);
}
if (charset != null) {
config.put("charset", charset);
}
return new Line(new Configuration(config), null);
}
@Test
public void testDecodeWithCharset() throws Exception {
TestEventConsumer flushConsumer = new TestEventConsumer();
// decode with cp-1252
Line cp1252decoder = new Line(new Configuration(Collections.singletonMap("charset", "cp1252")), null);
byte[] rightSingleQuoteInCp1252 = {(byte) 0x92};
ByteBuffer b1 = ByteBuffer.wrap(rightSingleQuoteInCp1252);
cp1252decoder.decode(b1, flushConsumer);
assertEquals(0, flushConsumer.events.size());
cp1252decoder.flush(b1, flushConsumer);
assertEquals(1, flushConsumer.events.size());
String fromCp1252 = (String) flushConsumer.events.get(0).get(Line.MESSAGE_FIELD);
// decode with UTF-8
flushConsumer.events.clear();
Line utf8decoder = new Line(new Configuration(Collections.EMPTY_MAP), null);
byte[] rightSingleQuoteInUtf8 = {(byte) 0xE2, (byte) 0x80, (byte) 0x99};
ByteBuffer b2 = ByteBuffer.wrap(rightSingleQuoteInUtf8);
utf8decoder.decode(b2, flushConsumer);
assertEquals(0, flushConsumer.events.size());
utf8decoder.flush(b2, flushConsumer);
assertEquals(1, flushConsumer.events.size());
String fromUtf8 = (String) flushConsumer.events.get(0).get(Line.MESSAGE_FIELD);
assertEquals(fromCp1252, fromUtf8);
}
@Test
public void testEncode() {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Line line = new Line(new Configuration(Collections.emptyMap()), null);
Event e = new Event();
e.setField("myfield1", "myvalue1");
e.setField("myfield2", 42L);
line.encode(e, outputStream);
e.setField("myfield1", "myvalue2");
e.setField("myfield2", 43L);
line.encode(e, outputStream);
String delimiter = Line.DEFAULT_DELIMITER;
String resultingString = outputStream.toString();
// first delimiter should occur at the halfway point of the string
assertEquals(resultingString.indexOf(delimiter), (resultingString.length() / 2) - delimiter.length());
// second delimiter should occur at end of string
assertEquals(resultingString.lastIndexOf(delimiter), resultingString.length() - delimiter.length());
}
@Test
public void testEncodeWithCustomDelimiter() {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
String delimiter = "xyz";
Line line = new Line(new Configuration(Collections.singletonMap("delimiter", delimiter)), null);
Event e = new Event();
e.setField("myfield1", "myvalue1");
e.setField("myfield2", 42L);
line.encode(e, outputStream);
e.setField("myfield1", "myvalue2");
e.setField("myfield2", 43L);
line.encode(e, outputStream);
String resultingString = outputStream.toString();
// first delimiter should occur at the halfway point of the string
assertEquals(resultingString.indexOf(delimiter), (resultingString.length() / 2) - delimiter.length());
// second delimiter should occur at end of string
assertEquals(resultingString.lastIndexOf(delimiter), resultingString.length() - delimiter.length());
}
@Test
public void testEncodeWithFormat() {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Line line = new Line(new Configuration(Collections.singletonMap("format", "%{host}-%{message}")), null);
String message = "Hello world";
String host = "test";
String expectedOutput = host + "-" + message + Line.DEFAULT_DELIMITER;
Event e = new Event();
e.setField("message", message);
e.setField("host", host);
line.encode(e, outputStream);
String resultingString = outputStream.toString();
assertEquals(expectedOutput, resultingString);
}
}
class TestEventConsumer implements Consumer<Map<String, Object>> {
List<Map<String, Object>> events = new ArrayList<>();
@Override
public void accept(Map<String, Object> stringObjectMap) {
events.add(stringObjectMap);
}
}

View file

@ -0,0 +1,93 @@
package org.logstash.plugins.inputs;
import org.junit.Test;
import co.elastic.logstash.api.Configuration;
import org.logstash.plugins.codecs.Line;
import org.logstash.execution.queue.QueueWriter;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class StdinTest {
@Test
public void testSimpleEvent() throws IOException {
String testInput = "foo" + Line.DEFAULT_DELIMITER;
TestQueueWriter queueWriter = testStdin(testInput.getBytes());
assertEquals(1, queueWriter.getEvents().size());
}
@Test
public void testEvents() throws IOException {
String testInput = "foo" + Line.DEFAULT_DELIMITER + "bar" + Line.DEFAULT_DELIMITER + "baz" + Line.DEFAULT_DELIMITER;
TestQueueWriter queueWriter = testStdin(testInput.getBytes());
assertEquals(3, queueWriter.getEvents().size());
}
@Test
public void testUtf8Events() throws IOException {
String[] inputs = {"München1", "安装中文输入法", "München3"};
String testInput = String.join(Line.DEFAULT_DELIMITER, inputs) + Line.DEFAULT_DELIMITER;
TestQueueWriter queueWriter = testStdin(testInput.getBytes());
List<Map<String, Object>> events = queueWriter.getEvents();
assertEquals(3, events.size());
for (int k = 0; k < inputs.length; k++) {
assertEquals(inputs[k], events.get(k).get("message"));
}
}
private static TestQueueWriter testStdin(byte[] input) throws IOException {
TestQueueWriter queueWriter = new TestQueueWriter();
try (FileChannel inChannel = getTestFileChannel(input)) {
Stdin stdin = new Stdin(new Configuration(Collections.EMPTY_MAP), null, inChannel);
Thread t = new Thread(() -> stdin.start(queueWriter));
t.start();
try {
Thread.sleep(50);
stdin.awaitStop();
} catch (InterruptedException e) {
fail("Stdin.awaitStop failed with exception: " + e);
}
}
return queueWriter;
}
private static FileChannel getTestFileChannel(byte[] testBytes) throws IOException {
Path tempFile = Files.createTempFile("StdinTest","");
RandomAccessFile raf = new RandomAccessFile(tempFile.toString(), "rw");
FileChannel fc = raf.getChannel();
fc.write(ByteBuffer.wrap(testBytes));
fc.position(0);
return fc;
}
}
class TestQueueWriter implements QueueWriter {
private List<Map<String, Object>> events = new ArrayList<>();
@Override
public void push(Map<String, Object> event) {
synchronized (this) {
events.add(event);
}
}
public List<Map<String, Object>> getEvents() {
return events;
}
}

View file

@ -0,0 +1,66 @@
package org.logstash.plugins.outputs;
import co.elastic.logstash.api.Configuration;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.junit.Test;
import org.logstash.Event;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class StdoutTest {
private static boolean streamWasClosed = false;
/**
* Verifies that the stdout output is reloadable because it does not close the underlying
* output stream which, outside of test cases, is always {@link java.lang.System#out}.
*/
@Test
public void testUnderlyingStreamIsNotClosed() {
OutputStream dummyOutputStream = new ByteArrayOutputStream(0) {
@Override
public void close() throws IOException {
streamWasClosed = true;
super.close();
}
};
Stdout stdout = new Stdout(new Configuration(Collections.EMPTY_MAP), null, dummyOutputStream);
stdout.output(getTestEvents());
stdout.stop();
assertFalse(streamWasClosed);
}
@Test
public void testEvents() throws JsonProcessingException {
StringBuilder expectedOutput = new StringBuilder();
Collection<Event> testEvents = getTestEvents();
for (Event e : testEvents) {
expectedOutput.append(String.format(e.toJson() + "%n"));
}
OutputStream dummyOutputStream = new ByteArrayOutputStream(0);
Stdout stdout = new Stdout(new Configuration(Collections.EMPTY_MAP), null, dummyOutputStream);
stdout.output(testEvents);
stdout.stop();
assertEquals(expectedOutput.toString(), dummyOutputStream.toString());
}
private static Collection<Event> getTestEvents() {
Event e1 = new Event();
e1.setField("myField", "event1");
Event e2 = new Event();
e2.setField("myField", "event2");
Event e3 = new Event();
e3.setField("myField", "event3");
return Arrays.asList(e1, e2, e3);
}
}