LIR UI (Read Only) (#6241)

* Read-only Java IR
* Consistent ID generation as opposed to UUIDs

Fixes #6708
This commit is contained in:
Andrew Cholakian 2016-11-22 16:19:33 -06:00
parent 2974763840
commit 76711a4785
86 changed files with 9568 additions and 62 deletions

88
apache_stdout.conf Normal file
View file

@ -0,0 +1,88 @@
input {
file {
id => "logfileRead"
start_position => beginning
ignore_older => 0
path => "/Users/andrewvc/projects/ls_apache_materials/apache_access_logs"
}
stdin {id => logStdin}
}
filter {
grok {
id => "apacheCommonLog"
match => {
"message" => '%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}'
}
}
geoip {
id => "clientGeo"
source => clientip
target => geoip
}
useragent {
id => "clientUA"
source => agent
target => useragent
}
date {
id => "clientDate"
match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z" ]
locale => en
}
if [geoip][country_code2] == "US" {
mutate {
id => "addUsRegion"
add_field => { "aws-region" => "us-east-1" }
}
} else if [geoip][country_code2] == "CA" {
if [referrer] =~ /google/ {
sleep {
id => "pointlessSleep"
time => 0.001
}
}
} else {
mutate {
id => addOtherRegion
add_field => { "aws-region" => "eu-central-1" }
}
}
if [request] =~ /(?i)\.(png|jpg|gif)$/ {
grok {
id => grokImage
match => {
request => "%{(?i)\.(png|jpg|gif)$:extension}"
}
add_tag => ["image"]
}
mutate {
id => addCanadianRegion
add_field => { "aws-region" => "ca-central-1" }
}
} else if [request] =~ /articles/ {
mutate {
id => tagArticle
add_tag => ["article"]
}
}
}
output {
elasticsearch {
id => "mainEs"
index => "%{@type}-"
}
if [geoip][country_code2] != "US" {
stdout { id => "linuxStdout" codec => json_lines }
}
}

0
logstash-core/.lock Normal file
View file

View file

@ -403,6 +403,7 @@ class LogStash::Agent
if !t.alive?
return false
elsif pipeline.running?
dispatcher.fire(:pipeline_started, pipeline)
return true
else
sleep 0.01
@ -416,6 +417,7 @@ class LogStash::Agent
@logger.warn("stopping pipeline", :id => id)
pipeline.shutdown { LogStash::ShutdownWatcher.start(pipeline) }
@pipelines[id].thread.join
dispatcher.fire(:pipeline_stopped, pipeline)
end
def start_pipelines

View file

@ -0,0 +1,38 @@
require 'logstash/util/loggable'
require 'logstash/compiler/lscl/lscl_grammar'
java_import org.logstash.config.ir.Pipeline
java_import org.logstash.config.ir.graph.Graph;
java_import org.logstash.config.ir.graph.PluginVertex;
module LogStash; class Compiler
include ::LogStash::Util::Loggable
def self.compile_pipeline(config_str, source_file=nil)
graph_sections = self.compile_graph(config_str, source_file)
pipeline = org.logstash.config.ir.Pipeline.new(
graph_sections[:input],
graph_sections[:filter],
graph_sections[:output]
)
end
def self.compile_ast(config_str, source_file=nil)
grammar = LogStashCompilerLSCLGrammarParser.new
config = grammar.parse(config_str)
if config.nil?
raise ConfigurationError, grammar.failure_reason
end
config
end
def self.compile_imperative(config_str, source_file=nil)
compile_ast(config_str, source_file).compile(source_file)
end
def self.compile_graph(config_str, source_file=nil)
Hash[compile_imperative(config_str, source_file).map {|section,icompiled| [section, icompiled.toGraph]}]
end
end; end

View file

@ -0,0 +1,566 @@
# encoding: utf-8
require 'logstash/errors'
require "treetop"
require "logstash/compiler/treetop_monkeypatches"
java_import org.logstash.config.ir.DSL
java_import org.logstash.config.ir.SourceMetadata
module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSCL; module AST
# Helpers for parsing LSCL files
module Helpers
def source_meta
line, column = line_and_column
org.logstash.config.ir.SourceMetadata.new(source_file, line, column, self.text_value)
end
def source_file=(value)
set_meta(:source_file, value)
end
def source_file
get_meta(:source_file)
end
def compose(*statements)
compose_for(section_type.to_sym).call(source_meta, *statements)
end
def compose_for(section_sym)
if section_sym == :filter
jdsl.method(:iComposeSequence)
else
jdsl.method(:iComposeParallel)
end
end
def line_and_column
start = self.interval.first
[self.input.line_of(start), self.input.column_of(start)]
end
def empty_source_meta()
org.logstash.config.ir.SourceMetadata.new()
end
def jdsl
org.logstash.config.ir.DSL
end
def self.jdsl
org.logstash.config.ir.DSL
end
AND_METHOD = jdsl.method(:eAnd)
OR_METHOD = jdsl.method(:eOr)
end
class Node < Treetop::Runtime::SyntaxNode
include Helpers
def section_type
if recursive_select_parent(Plugin).any?
return "codec"
else
section = recursive_select_parent(PluginSection)
return section.first.plugin_type.text_value
end
end
end
class Config < Node
include Helpers
def compile(source_file=nil)
# There is no way to move vars across nodes in treetop :(
self.source_file = source_file
sections = recursive_select(PluginSection)
section_map = {
:input => [],
:filter => [],
:output => []
}
sections.each do |section|
section_name = section.plugin_type.text_value.to_sym
section_expr = section.expr
raise "Unknown section name #{section_name}!" if ![:input, :output, :filter].include?(section_name)
::Array[section_expr].each do |se|
section_map[section_name].concat se
end
end
section_map.keys.each do |key|
section_map[key] = compose_for(key).call(empty_source_meta, *section_map[key])
end
section_map
end
end
class Comment < Node; end
class Whitespace < Node; end
class PluginSection < Node
def expr
recursive_select(Branch, Plugin).map(&:expr)
end
end
class Plugins < Node; end
class Plugin < Node
def expr
jdsl.iPlugin(source_meta, plugin_type_enum, self.plugin_name, self.expr_attributes)
end
def plugin_type_enum
case section_type
when "input"
Java::OrgLogstashConfigIr::PluginDefinition::Type::INPUT
when "codec"
Java::OrgLogstashConfigIr::PluginDefinition::Type::CODEC
when "filter"
Java::OrgLogstashConfigIr::PluginDefinition::Type::FILTER
when "output"
Java::OrgLogstashConfigIr::PluginDefinition::Type::OUTPUT
end
end
def plugin_name
return name.text_value
end
def expr_attributes
# Turn attributes into a hash map
self.attributes.recursive_select(Attribute).map(&:expr).map {|k,v|
if v.java_kind_of?(Java::OrgLogstashConfigIrExpression::ValueExpression)
[k, v.get]
else
[k,v]
end
}.reduce({}) do |hash,kv|
k,v = kv
hash[k] = v
hash
end
end
end
class Name < Node
def expr
return text_value
end
end
class Attribute < Node
def expr
[name.text_value, value.expr]
end
end
class RValue < Node; end
class Value < RValue; end
class Bareword < Value
def expr
jdsl.eValue(source_meta, text_value)
end
end
class String < Value
def expr
jdsl.eValue(source_meta, text_value[1...-1])
end
end
class RegExp < Value
def expr
# Strip the slashes off
jdsl.eRegex(text_value[1..-2])
end
end
class Number < Value
def expr
jdsl.eValue(source_meta, text_value.include?(".") ?
text_value.to_f :
text_value.to_i)
end
end
class Array < Value
def expr
jdsl.eValue(source_meta, recursive_select(Value).map(&:expr).map(&:get))
end
end
class Hash < Value
def validate!
duplicate_values = find_duplicate_keys
if duplicate_values.size > 0
raise ConfigurationError.new(
I18n.t("logstash.runner.configuration.invalid_plugin_settings_duplicate_keys",
:keys => duplicate_values.join(', '),
:line => input.line_of(interval.first),
:column => input.column_of(interval.first),
:byte => interval.first + 1,
:after => input[0..interval.first]
)
)
end
end
def find_duplicate_keys
values = recursive_select(HashEntry).collect { |hash_entry| hash_entry.name.text_value }
values.find_all { |v| values.count(v) > 1 }.uniq
end
def expr
validate!
::Hash[recursive_select(HashEntry).map(&:expr)]
end
end
class HashEntries < Node; end
class HashEntry < Node
def expr
return [name.expr.get, value.expr.get()]
end
end
class Branch < Node
def expr
# Build this stuff as s-expressions for convenience at first
# This will turn if/elsif/else blocks into nested if/else trees
exprs = []
else_stack = [] # For turning if / elsif / else into nested ifs
self.recursive_select(Plugin, If, Elsif, Else).each do |node|
if node.is_a?(If)
exprs << :if
exprs << expr_cond(node)
exprs << expr_body(node)
elsif node.is_a?(Elsif)
condition = expr_cond(node)
body = expr_body(node)
else_stack << [:if, condition, body]
elsif node.is_a?(Else)
body = expr_body(node)
if else_stack.size >= 1
else_stack.last << body
else
exprs << body
end
end
end
else_stack.reverse.each_cons(2) do |cons|
later,earlier = cons
earlier << later
end
exprs << else_stack.first
# Then convert to the imperative java IR
javaify_sexpr(exprs)
end
def javaify_sexpr(sexpr)
return nil if sexpr.nil?
head, tail = sexpr.first
tail = sexpr[1..-1]
if head == :if
condition, t_branch, f_branch = tail
java_t_branch = t_branch && javaify_sexpr(t_branch)
java_f_branch = f_branch && javaify_sexpr(f_branch)
if java_t_branch || java_f_branch
jdsl.iIf(condition, java_t_branch || jdsl.noop, java_f_branch || jdsl.noop)
else
jdsl.noop()
end
elsif head == :compose
tail && tail.size > 0 ? compose(*tail) : jdsl.noop
else
raise "Unknown expression #{sexpr}!"
end
end
def expr_cond(node)
node.elements.find {|e| e.is_a?(Condition)}.expr
end
def expr_body(node)
[:compose, *node.recursive_select(Plugin, Branch).map(&:expr)]
end
end
# Branch covers all these
class BranchEntry < Node; end
class If < BranchEntry; end
class Elsif < BranchEntry; end
class Else < BranchEntry; end
class Condition < Node
include Helpers
def expr
first_element = elements.first
rest_elements = elements.size > 1 ? elements[1].recursive_select(BooleanOperator, Expression, SelectorElement) : []
all_elements = [first_element, *rest_elements]
if all_elements.size == 1
elem = all_elements.first
if elem.is_a?(Selector)
eventValue = elem.recursive_select(SelectorElement).first.expr
jdsl.eTruthy(source_meta, eventValue)
elsif elem.is_a?(RegexpExpression)
elem.expr
else
join_conditions(all_elements)
end
else
join_conditions(all_elements)
end
end
def precedence(op)
# Believe this is right for logstash?
case op
when AND_METHOD
2
when OR_METHOD
1
else
raise ArgumentError, "Unexpected operator #{op}"
end
end
# Converts an sexpr of :and or :or to the java imperative IR
def jconvert(sexpr)
raise "jconvert cannot handle nils!" if sexpr.nil?
if sexpr.java_kind_of?(Java::OrgLogstashConfigIrExpression::Expression)
return sexpr
end
op, left, right = sexpr
left_c = jconvert(left)
right_c = jconvert(right)
case op
when :and
return jdsl.eAnd(left, right);
when :or
return jdsl.eOr(left, right);
else
raise "Unknown op #{jop}"
end
end
def join_conditions(all_elements)
# Use Dijkstra's shunting yard algorithm
out = []
operators = []
all_elements.each do |e|
e_exp = e.expr
if e.is_a?(BooleanOperator)
if operators.last && precedence(operators.last) > precedence(e_exp)
out << operators.pop
end
operators << e_exp
else
out << e_exp
end
end
operators.reverse.each {|o| out << o}
stack = []
expr = []
out.each do |e|
if e.is_a?(Symbol)
rval, lval = stack.pop, stack.pop
stack << jconvert([e, lval, rval])
elsif e.nil?
raise "Nil expr encountered! This should not happen!"
else
stack << e
end
end
stack_to_expr(stack)
end
def stack_to_expr(stack)
raise "Got an empty stack! This should not happen!" if stack.empty?
stack = stack.reverse # We need to work the stack in reverse order
working_stack = []
while elem = stack.pop
if elem.is_a?(::Method)
right, left = working_stack.pop, working_stack.pop
working_stack << elem.call(left, right)
else
working_stack << elem
end
end
raise "Invariant violated! Stack size > 1" if working_stack.size > 1
working_stack.first
end
end
module Expression
def expr
# If we have a more specific type (like a Negative expression) use that
if defined?(super)
return super
end
exprs = self.recursive_select(Condition, Selector).map(&:expr)
raise "Exprs should only have one part!" if exprs.size != 1
exprs.first
end
end
module NegativeExpression
include Helpers
def expr
exprs = self.recursive_select(Condition, Selector).map(&:expr)
raise "Negative exprs should only have one part!" if exprs.size != 1
jdsl.eNot(source_meta, exprs.first)
end
end
module ComparisonExpression
include Helpers
def expr
lval, comparison_method, rval = self.recursive_select(Selector, Expression, ComparisonOperator, Number, String).map(&:expr)
comparison_method.call(source_meta, lval, rval)
end
end
module InExpression
include Helpers
def expr
item, list = recursive_select(RValue)
jdsl.eIn(source_meta, item.expr, list.expr)
end
end
module NotInExpression
include Helpers
def expr
item, list = recursive_select(RValue)
jdsl.eNot(source_meta, jdsl.eIn(item.expr, list.expr))
end
end
# Not implemented because no one uses this
class MethodCall < Node; end
class RegexpExpression < Node
def expr
selector, operator_method, regexp = recursive_select(
Selector,
LogStash::Compiler::LSCL::AST::RegExpOperator,
LogStash::Compiler::LSCL::AST::RegExp,
LogStash::Compiler::LSCL::AST::String # Strings work as rvalues! :p
).map(&:expr)
# Handle string rvalues, they just get turned into regexps
# Maybe we really shouldn't handle these anymore...
if regexp.class == org.logstash.config.ir.expression.ValueExpression
regexp = jdsl.eRegex(regexp.get)
end
raise "Expected a selector in #{text_value}!" unless selector
raise "Expected a regexp in #{text_value}!" unless regexp
operator_method.call(source_meta, selector, regexp);
end
end
module BranchOrPlugin; end
module ComparisonOperator
include Helpers
def expr
case self.text_value
when "=="
jdsl.method(:eEq)
when "!="
jdsl.method(:eNeq)
when ">"
jdsl.method(:eGt)
when "<"
jdsl.method(:eLt)
when ">="
jdsl.method(:eGte)
when "<="
jdsl.method(:eLte)
else
raise "Unknown operator #{self.text_value}"
end
end
end
module RegExpOperator
include Helpers
def expr
if self.text_value == '!~'
jdsl.method(:eRegexNeq)
elsif self.text_value == '=~'
jdsl.method(:eRegexEq)
else
raise "Unknown regex operator #{self.text_value}"
end
end
end
module BooleanOperator
include Helpers
def expr
case self.text_value
when "and"
AND_METHOD
when "or"
OR_METHOD
else
raise "Unknown operator #{self.text_value}"
end
end
end
class Selector < RValue
def expr
jdsl.eEventValue(source_meta, text_value)
end
end
class SelectorElement < Node;
def expr
jdsl.eEventValue(source_meta, text_value)
end
end
end; end; end; end; end;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,241 @@
require "treetop"
require "logstash/compiler/lscl.rb"
grammar LogStashCompilerLSCLGrammar
rule config
_ plugin_section _ (_ plugin_section)* _ <LogStash::Compiler::LSCL::AST::Config>
end
rule comment
(whitespace? "#" [^\r\n]* "\r"? "\n")+ <LogStash::Compiler::LSCL::AST::Comment>
end
rule _
(comment / whitespace)* <LogStash::Compiler::LSCL::AST::Whitespace>
end
rule whitespace
[ \t\r\n]+ <LogStash::Compiler::LSCL::AST::Whitespace>
end
rule plugin_section
plugin_type _ "{"
_ (branch_or_plugin _)*
"}"
<LogStash::Compiler::LSCL::AST::PluginSection>
end
rule branch_or_plugin
branch / plugin
end
rule plugin_type
("input" / "filter" / "output")
end
rule plugins
(plugin (_ plugin)*)?
<LogStash::Compiler::LSCL::AST::Plugins>
end
rule plugin
name _ "{"
_
attributes:( attribute (whitespace _ attribute)*)?
_
"}"
<LogStash::Compiler::LSCL::AST::Plugin>
end
rule name
(
([A-Za-z0-9_-]+ <LogStash::Compiler::LSCL::AST::Name>)
/ string
)
end
rule attribute
name _ "=>" _ value
<LogStash::Compiler::LSCL::AST::Attribute>
end
rule value
plugin / bareword / string / number / array / hash
end
rule array_value
bareword / string / number / array / hash
end
rule bareword
[A-Za-z_] [A-Za-z0-9_]+
<LogStash::Compiler::LSCL::AST::Bareword>
end
rule double_quoted_string
( '"' ( '\"' / !'"' . )* '"' <LogStash::Compiler::LSCL::AST::String>)
end
rule single_quoted_string
( "'" ( "\\'" / !"'" . )* "'" <LogStash::Compiler::LSCL::AST::String>)
end
rule string
double_quoted_string / single_quoted_string
end
rule regexp
( '/' ( '\/' / !'/' . )* '/' <LogStash::Compiler::LSCL::AST::RegExp>)
end
rule number
"-"? [0-9]+ ("." [0-9]*)?
<LogStash::Compiler::LSCL::AST::Number>
end
rule array
"["
_
(
value (_ "," _ value)*
)?
_
"]"
<LogStash::Compiler::LSCL::AST::Array>
end
rule hash
"{"
_
hashentries?
_
"}"
<LogStash::Compiler::LSCL::AST::Hash>
end
rule hashentries
hashentry (whitespace hashentry)*
<LogStash::Compiler::LSCL::AST::HashEntries>
end
rule hashentry
name:(number / bareword / string) _ "=>" _ value
<LogStash::Compiler::LSCL::AST::HashEntry>
end
# Conditions
rule branch
if (_ else_if)* (_ else)?
<LogStash::Compiler::LSCL::AST::Branch>
end
rule if
"if" _ condition _ "{" _ (branch_or_plugin _)* "}"
<LogStash::Compiler::LSCL::AST::If>
end
rule else_if
"else" _ "if" _ condition _ "{" _ ( branch_or_plugin _)* "}"
<LogStash::Compiler::LSCL::AST::Elsif>
end
rule else
"else" _ "{" _ (branch_or_plugin _)* "}"
<LogStash::Compiler::LSCL::AST::Else>
end
rule condition
expression (_ boolean_operator _ expression)*
<LogStash::Compiler::LSCL::AST::Condition>
end
rule expression
(
("(" _ condition _ ")")
/ negative_expression
/ in_expression
/ not_in_expression
/ compare_expression
/ regexp_expression
/ rvalue
) <LogStash::Compiler::LSCL::AST::Expression>
end
rule negative_expression
(
("!" _ "(" _ condition _ ")")
/ ("!" _ selector)
) <LogStash::Compiler::LSCL::AST::NegativeExpression>
end
rule in_expression
rvalue _ in_operator _ rvalue
<LogStash::Compiler::LSCL::AST::InExpression>
end
rule not_in_expression
rvalue _ not_in_operator _ rvalue
<LogStash::Compiler::LSCL::AST::NotInExpression>
end
rule in_operator
"in"
end
rule not_in_operator
"not " _ "in"
end
rule rvalue
string / number / selector / array / method_call / regexp
end
rule method_call
method _ "(" _
(
rvalue ( _ "," _ rvalue )*
)?
_ ")"
<LogStash::Compiler::LSCL::AST::MethodCall>
end
rule method
bareword
end
rule compare_expression
rvalue _ compare_operator _ rvalue
<LogStash::Compiler::LSCL::AST::ComparisonExpression>
end
rule compare_operator
("==" / "!=" / "<=" / ">=" / "<" / ">")
<LogStash::Compiler::LSCL::AST::ComparisonOperator>
end
rule regexp_expression
rvalue _ regexp_operator _ (string / regexp)
<LogStash::Compiler::LSCL::AST::RegexpExpression>
end
rule regexp_operator
("=~" / "!~") <LogStash::Compiler::LSCL::AST::RegExpOperator>
end
rule boolean_operator
("and" / "or" / "xor" / "nand")
<LogStash::Compiler::LSCL::AST::BooleanOperator>
end
rule selector
selector_element+
<LogStash::Compiler::LSCL::AST::Selector>
end
rule selector_element
"[" [^\],]+ "]"
<LogStash::Compiler::LSCL::AST::SelectorElement>
end
end

View file

@ -0,0 +1,92 @@
class Treetop::Runtime::SyntaxNode
def get_meta(key)
@ast_metadata ||= {}
return @ast_metadata[key] if @ast_metadata[key]
return self.parent.get_meta(key) if self.parent
nil
end
def set_meta(key, value)
@ast_metadata ||= {}
@ast_metadata[key] = value
end
def compile
return "" if elements.nil?
return elements.collect(&:compile).reject(&:empty?).join("")
end
# Traverse the syntax tree recursively.
# The order should respect the order of the configuration file as it is read
# and written by humans (and the order in which it is parsed).
def recurse(e, depth=0, &block)
r = block.call(e, depth)
e.elements.each { |e| recurse(e, depth + 1, &block) } if r && e.elements
nil
end
def recursive_inject(results=[], &block)
if !elements.nil?
elements.each do |element|
if block.call(element)
results << element
else
element.recursive_inject(results, &block)
end
end
end
return results
end
# When Treetop parses the configuration file
# it will generate a tree, the generated tree will contain
# a few `Empty` nodes to represent the actual space/tab or newline in the file.
# Some of theses node will point to our concrete class.
# To fetch a specific types of object we need to follow each branch
# and ignore the empty nodes.
def recursive_select(*klasses)
return recursive_inject { |e| klasses.any? {|k| e.is_a?(k)} }
end
def recursive_inject_parent(results=[], &block)
if !parent.nil?
if block.call(parent)
results << parent
else
parent.recursive_inject_parent(results, &block)
end
end
return results
end
def recursive_select_parent(results=[], klass)
return recursive_inject_parent(results) { |e| e.is_a?(klass) }
end
# Monkeypatch Treetop::Runtime::SyntaxNode's inspect method to skip
# any Whitespace or SyntaxNodes with no children.
def _inspect(indent="")
em = extension_modules
interesting_methods = methods-[em.last ? em.last.methods : nil]-self.class.instance_methods
im = interesting_methods.size > 0 ? " (#{interesting_methods.join(",")})" : ""
tv = text_value
tv = "...#{tv[-20..-1]}" if tv.size > 20
indent +
self.class.to_s.sub(/.*:/,'') +
em.map{|m| "+"+m.to_s.sub(/.*:/,'')}*"" +
" offset=#{interval.first}" +
", #{tv.inspect}" +
im +
(elements && elements.size > 0 ?
":" +
(elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 }||[]).map{|e|
begin
"\n"+e.inspect(indent+" ")
rescue # Defend against inspect not taking a parameter
"\n"+indent+" "+e.inspect
end
}.join("") :
""
)
end
end

View file

@ -2,61 +2,7 @@
require 'logstash/errors'
require "treetop"
class Treetop::Runtime::SyntaxNode
def compile
return "" if elements.nil?
return elements.collect(&:compile).reject(&:empty?).join("")
end
# Traverse the syntax tree recursively.
# The order should respect the order of the configuration file as it is read
# and written by humans (and the order in which it is parsed).
def recurse(e, depth=0, &block)
r = block.call(e, depth)
e.elements.each { |e| recurse(e, depth + 1, &block) } if r && e.elements
nil
end
def recursive_inject(results=[], &block)
if !elements.nil?
elements.each do |element|
if block.call(element)
results << element
else
element.recursive_inject(results, &block)
end
end
end
return results
end
# When Treetop parses the configuration file
# it will generate a tree, the generated tree will contain
# a few `Empty` nodes to represent the actual space/tab or newline in the file.
# Some of theses node will point to our concrete class.
# To fetch a specific types of object we need to follow each branch
# and ignore the empty nodes.
def recursive_select(klass)
return recursive_inject { |e| e.is_a?(klass) }
end
def recursive_inject_parent(results=[], &block)
if !parent.nil?
if block.call(parent)
results << parent
else
parent.recursive_inject_parent(results, &block)
end
end
return results
end
def recursive_select_parent(results=[], klass)
return recursive_inject_parent(results) { |e| e.is_a?(klass) }
end
end
require "logstash/compiler/treetop_monkeypatches"
module LogStash; module Config; module AST

View file

@ -222,7 +222,7 @@ module LogStash module Instrument
key_candidates = extract_filter_keys(key_paths.shift)
key_candidates.each do |key_candidate|
raise MetricNotFound, "For path: #{key_candidate}" if map[key_candidate].nil?
raise MetricNotFound, "For path: #{key_candidate}. Map keys: #{map.keys}" if map[key_candidate].nil?
if key_paths.empty? # End of the user requested path
if map[key_candidate].is_a?(Concurrent::Map)

View file

@ -21,16 +21,20 @@ require "logstash/instrument/wrapped_write_client"
require "logstash/output_delegator"
require "logstash/filter_delegator"
require "logstash/queue_factory"
require 'logstash/compiler'
module LogStash; class BasePipeline
include LogStash::Util::Loggable
attr_reader :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id
attr_reader :config_str, :config_hash, :inputs, :filters, :outputs, :pipeline_id, :lir
def initialize(config_str, settings = SETTINGS)
@logger = self.logger
@config_str = config_str
@config_hash = Digest::SHA1.hexdigest(@config_str)
@lir = compile_lir
# Every time #plugin is invoked this is incremented to give each plugin
# a unique id when auto-generating plugin ids
@plugin_counter ||= 0
@ -63,6 +67,10 @@ module LogStash; class BasePipeline
raise e
end
end
def compile_lir
LogStash::Compiler.compile_pipeline(self.config_str)
end
def plugin(plugin_type, name, *args)
@plugin_counter += 1
@ -166,6 +174,8 @@ module LogStash; class Pipeline < BasePipeline
@running = Concurrent::AtomicBoolean.new(false)
@flushing = Concurrent::AtomicReference.new(false)
end # def initialize
def ready?
@ready.value
@ -539,7 +549,6 @@ module LogStash; class Pipeline < BasePipeline
end
end
# Calculate the uptime in milliseconds
#
# @return [Fixnum] Uptime in milliseconds, 0 if the pipeline is not started

View file

@ -0,0 +1,583 @@
require "spec_helper"
require "logstash/compiler"
java_import Java::OrgLogstashConfigIr::DSL
describe LogStash::Compiler do
def j
Java::OrgLogstashConfigIr::DSL
end
# Static import of these useful enums
INPUT = Java::OrgLogstashConfigIr::PluginDefinition::Type::INPUT
FILTER = Java::OrgLogstashConfigIr::PluginDefinition::Type::FILTER
OUTPUT = Java::OrgLogstashConfigIr::PluginDefinition::Type::OUTPUT
CODEC = Java::OrgLogstashConfigIr::PluginDefinition::Type::OUTPUT
describe "compiling to Pipeline" do
subject(:source_file) { "fake_sourcefile" }
subject(:compiled) { described_class.compile_pipeline(source, source_file) }
describe "complex configs" do
shared_examples_for "compilable LSCL files" do |path|
describe "parsing #{path}" do
let(:source) { File.read(path) }
it "should compile" do
expect(compiled).to be_java_kind_of(Java::OrgLogstashConfigIr::Pipeline)
end
it "should have a hash" do
expect(compiled.uniqueHash)
end
end
end
Dir.glob(File.join(SUPPORT_DIR, "lscl_configs", "*.conf")).each do |path|
it_should_behave_like "compilable LSCL files", path
end
end
end
describe "compiling imperative" do
let(:source_file) { "fake_sourcefile" }
subject(:compiled) { described_class.compile_imperative(source, source_file) }
describe "an empty file" do
let(:source) { "input {} output {}" }
it "should have an empty input block" do
expect(compiled[:input]).to ir_eql(j.noop)
end
it "should have an empty filter block" do
expect(compiled[:filter]).to ir_eql(j.noop)
end
it "should have an empty output block" do
expect(compiled[:output]).to ir_eql(j.noop)
end
end
describe "SourceMetadata" do
let(:source) { "input { generator {} } output { }" }
it "should attach correct source text for components" do
expect(compiled[:input].get_meta.getSourceText).to eql("generator {}")
end
end
context "plugins" do
subject(:c_plugin) { compiled[:input] }
let(:source) { "input { #{plugin_source} } " }
describe "a simple plugin" do
let(:plugin_source) { "generator {}" }
it "should contain the plugin" do
expect(c_plugin).to ir_eql(j.iPlugin(INPUT, "generator"))
end
end
describe "a plugin with mixed parameter types" do
let(:plugin_source) { "generator { aarg => [1] hasharg => {foo => bar} iarg => 123 farg => 123.123 sarg => 'hello'}" }
let(:expected_plugin_args) do
{
"aarg" => [1],
"hasharg" => {"foo" => "bar"},
"iarg" => 123,
"farg" => 123.123,
"sarg" => 'hello'
}
end
it "should contain the plugin" do
expect(c_plugin).to ir_eql(j.iPlugin(INPUT, "generator", expected_plugin_args))
end
end
end
context "inputs" do
subject(:input) { compiled[:input] }
describe "a single input" do
let(:source) { "input { generator {} }" }
it "should contain the single input" do
expect(input).to ir_eql(j.iPlugin(INPUT, "generator"))
end
end
describe "two inputs" do
let(:source) { "input { generator { count => 1 } generator { count => 2 } } output { }" }
it "should contain both inputs" do
expect(input).to ir_eql(j.iComposeParallel(
j.iPlugin(INPUT, "generator", {"count" => 1}),
j.iPlugin(INPUT, "generator", {"count" => 2})
))
end
end
end
shared_examples_for "complex grammar" do |section|
let(:section_name_enum) {
case section
when :input
INPUT
when :filter
FILTER
when :output
OUTPUT
else
raise "Unknown section"
end
}
let(:section) { section }
let (:compiled_section) { compiled[section] }
def splugin(*args)
j.iPlugin(section_name_enum, *args)
end
def compose(*statements)
if section == :filter
j.iComposeSequence(*statements)
else
j.iComposeParallel(*statements)
end
end
describe "multiple section declarations" do
let(:source) do
<<-EOS
#{section} {
aplugin { count => 1 }
}
#{section} {
aplugin { count => 2 }
}
EOS
end
it "should contain both section declarations, in order" do
expect(compiled_section).to ir_eql(compose(
splugin("aplugin", {"count" => 1}),
splugin("aplugin", {"count" => 2})
))
end
end
describe "two plugins" do
let(:source) do
# We care about line/column for this test, hence the indentation
<<-EOS
#{section} {
aplugin { count => 1 }
aplugin { count => 2 }
}
EOS
end
it "should contain both" do
expect(compiled_section).to ir_eql(compose(
splugin("aplugin", {"count" => 1}),
splugin("aplugin", {"count" => 2})
))
end
it "should attach source_metadata with correct info to the statements" do
meta = compiled_section.statements.first.meta
expect(meta.getSourceText).to eql("aplugin { count => 1 }")
expect(meta.getSourceLine).to eql(2)
expect(meta.getSourceColumn).to eql(13)
expect(meta.getSourceFile).to eql(source_file)
expect(compiled_section.statements.first.meta)
expect(compiled_section)
end
end
describe "if conditions" do
describe "conditional expressions" do
let(:source) { "#{section} { if (#{expression}) { aplugin {} } }" }
let(:c_expression) { compiled_section.getBooleanExpression }
describe "logical expressions" do
describe "simple and" do
let(:expression) { "2 > 1 and 1 < 2" }
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eAnd(
j.eGt(j.eValue(2), j.eValue(1)),
j.eLt(j.eValue(1), j.eValue(2))
))
end
end
describe "'in' array" do
let(:expression) { "'foo' in ['foo', 'bar']" }
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eIn(
j.eValue('foo'),
j.eValue(['foo', 'bar'])
))
end
end
describe "'not in' array" do
let(:expression) { "'foo' not in ['foo', 'bar']" }
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eNot(
j.eIn(
j.eValue('foo'),
j.eValue(['foo', 'bar'])
)))
end
end
describe "'not'" do
let(:expression) { "!(1 > 2)" }
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eNot(j.eGt(j.eValue(1), j.eValue(2))))
end
end
describe "and or precedence" do
let(:expression) { "2 > 1 and 1 < 2 or 3 < 2" }
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eOr(
j.eAnd(
j.eGt(j.eValue(2), j.eValue(1)),
j.eLt(j.eValue(1), j.eValue(2))
),
j.eLt(j.eValue(3), j.eValue(2))
)
)
end
describe "multiple or" do
let(:expression) { "2 > 1 or 1 < 2 or 3 < 2" }
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eOr(
j.eGt(j.eValue(2), j.eValue(1)),
j.eOr(
j.eLt(j.eValue(1), j.eValue(2)),
j.eLt(j.eValue(3), j.eValue(2))
)
)
)
end
end
describe "a complex expression" do
let(:expression) { "1 > 2 and 3 > 4 or 6 > 7 and 8 > 9" }
false and false or true and true
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eOr(
j.eAnd(
j.eGt(j.eValue(1), j.eValue(2)),
j.eGt(j.eValue(3), j.eValue(4))
),
j.eAnd(
j.eGt(j.eValue(6), j.eValue(7)),
j.eGt(j.eValue(8), j.eValue(9))
)
)
)
end
end
describe "a complex nested expression" do
let(:expression) { "1 > 2 and (1 > 2 and 3 > 4 or 6 > 7 and 8 > 9) or 6 > 7 and 8 > 9" }
false and false or true and true
it "should compile correctly" do
expect(c_expression).to ir_eql(
j.eOr(
j.eAnd(
j.eGt(j.eValue(1), j.eValue(2)),
j.eOr(
j.eAnd(
j.eGt(j.eValue(1), j.eValue(2)),
j.eGt(j.eValue(3), j.eValue(4))
),
j.eAnd(
j.eGt(j.eValue(6), j.eValue(7)),
j.eGt(j.eValue(8), j.eValue(9))
)
)
),
j.eAnd(
j.eGt(j.eValue(6), j.eValue(7)),
j.eGt(j.eValue(8), j.eValue(9))
)
)
)
end
end
end
end
describe "comparisons" do
describe "field not null" do
let(:expression) { "[foo]"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eTruthy(j.eEventValue("[foo]")))
end
end
describe "'=='" do
let(:expression) { "[foo] == 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eEq(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'!='" do
let(:expression) { "[foo] != 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eNeq(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'>'" do
let(:expression) { "[foo] > 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eGt(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'<'" do
let(:expression) { "[foo] < 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eLt(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'>='" do
let(:expression) { "[foo] >= 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eGte(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'<='" do
let(:expression) { "[foo] <= 5"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eLte(j.eEventValue("[foo]"), j.eValue(5.to_java)))
end
end
describe "'=~'" do
let(:expression) { "[foo] =~ /^abc$/"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eRegexEq(j.eEventValue("[foo]"), j.eRegex('^abc$')))
end
# Believe it or not, "\.\." is a valid regexp!
describe "when given a quoted regexp" do
let(:expression) { '[foo] =~ "\\.\\."' }
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eRegexEq(j.eEventValue("[foo]"), j.eRegex('\\.\\.')))
end
end
end
describe "'!~'" do
let(:expression) { "[foo] !~ /^abc$/"}
it "should compile correctly" do
expect(c_expression).to ir_eql(j.eRegexNeq(j.eEventValue("[foo]"), j.eRegex('^abc$')))
end
end
end
end
describe "only true branch" do
let (:source) { "#{section} { if [foo] == [bar] { grok {} } }" }
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
splugin("grok")
)
)
end
end
describe "only false branch" do
let (:source) { "#{section} { if [foo] == [bar] { } else { fplugin {} } }" }
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
j.noop,
splugin("fplugin"),
)
)
end
end
describe "empty if statement" do
let (:source) { "#{section} { if [foo] == [bar] { } }" }
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
j.noop,
j.noop
)
)
end
end
describe "if else" do
let (:source) { "#{section} { if [foo] == [bar] { tplugin {} } else { fplugin {} } }" }
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
splugin("tplugin"),
splugin("fplugin")
)
)
end
end
describe "if elsif else" do
let (:source) { "#{section} { if [foo] == [bar] { tplugin {} } else if [bar] == [baz] { eifplugin {} } else { fplugin {} } }" }
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
splugin("tplugin"),
j.iIf(
j.eEq(j.eEventValue("[bar]"), j.eEventValue("[baz]")),
splugin("eifplugin"),
splugin("fplugin")
)
)
)
end
end
describe "if elsif elsif else" do
let (:source) do
<<-EOS
#{section} {
if [foo] == [bar] { tplugin {} }
else if [bar] == [baz] { eifplugin {} }
else if [baz] == [bot] { eeifplugin {} }
else { fplugin {} }
}
EOS
end
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
splugin("tplugin"),
j.iIf(
j.eEq(j.eEventValue("[bar]"), j.eEventValue("[baz]")),
splugin("eifplugin"),
j.iIf(
j.eEq(j.eEventValue("[baz]"), j.eEventValue("[bot]")),
splugin("eeifplugin"),
splugin("fplugin")
)
)
)
)
end
describe "nested ifs" do
let (:source) do
<<-EOS
#{section} {
if [foo] == [bar] {
if [bar] == [baz] { aplugin {} }
} else {
if [bar] == [baz] { bplugin {} }
else if [baz] == [bot] { cplugin {} }
else { dplugin {} }
}
}
EOS
end
it "should compile correctly" do
expect(compiled_section).to ir_eql(j.iIf(
j.eEq(j.eEventValue("[foo]"), j.eEventValue("[bar]")),
j.iIf(j.eEq(j.eEventValue("[bar]"), j.eEventValue("[baz]")),
splugin("aplugin"),
j.noop
),
j.iIf(
j.eEq(j.eEventValue("[bar]"), j.eEventValue("[baz]")),
splugin("bplugin"),
j.iIf(
j.eEq(j.eEventValue("[baz]"), j.eEventValue("[bot]")),
splugin("cplugin"),
splugin("dplugin")
)
)
)
)
end
end
end
end
end
context "filters" do
subject(:filter) { compiled[:filter] }
describe "a single filter" do
let(:source) { "input { } filter { grok {} } output { }" }
it "should contain the single input" do
expect(filter).to ir_eql(j.iPlugin(FILTER, "grok"))
end
end
it_should_behave_like "complex grammar", :filter
end
context "outputs" do
subject(:output) { compiled[:output] }
describe "a single output" do
let(:source) { "input { } output { stdout {} }" }
it "should contain the single input" do
expect(output).to ir_eql(j.iPlugin(OUTPUT, "stdout"))
end
end
it_should_behave_like "complex grammar", :output
end
end
end

View file

@ -14,3 +14,17 @@ def clear_data_dir
FileUtils.rm_rf(File.join(data_path, f))
end
end
RSpec::Matchers.define :ir_eql do |expected|
match do |actual|
next unless expected.java_kind_of?(org.logstash.config.ir.SourceComponent) && actual.java_kind_of?(org.logstash.config.ir.SourceComponent)
expected.sourceComponentEquals(actual)
end
failure_message do |actual|
"actual value \n#{actual.to_s}\nis not .sourceComponentEquals to the expected value: \n#{expected.to_s}\n"
end
end
SUPPORT_DIR = Pathname.new(::File.join(::File.dirname(__FILE__), "support"))

View file

@ -0,0 +1,38 @@
package org.logstash.common;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Created by andrewvc on 12/23/16.
*/
public class Util {
// Modified from http://stackoverflow.com/a/11009612/11105
public static MessageDigest defaultMessageDigest() {
try {
return MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
public static String digest(String base) {
MessageDigest digest = defaultMessageDigest();
byte[] hash = digest.digest(base.getBytes(StandardCharsets.UTF_8));
return bytesToHexString(hash);
}
public static String bytesToHexString(byte[] bytes) {
StringBuilder hexString = new StringBuilder();
for (byte aHash : bytes) {
String hex = Integer.toHexString(0xff & aHash);
if (hex.length() == 1) hexString.append('0');
hexString.append(hex);
}
return hexString.toString();
}
}

View file

@ -0,0 +1,26 @@
package org.logstash.config.ir;
/**
* Created by andrewvc on 9/6/16.
*
* This class is useful to inherit from for things that need to be source components
* since it handles storage of the meta property for you and reduces a lot of boilerplate.
*
*/
public abstract class BaseSourceComponent implements SourceComponent {
private final SourceMetadata meta;
public BaseSourceComponent(SourceMetadata meta) {
this.meta = meta;
}
public SourceMetadata getMeta() {
return meta;
}
public abstract boolean sourceComponentEquals(SourceComponent sourceComponent);
public String toString(int indent) {
return "toString(int indent) should be implemented for " + this.getClass().getName();
}
}

View file

@ -0,0 +1,296 @@
package org.logstash.config.ir;
import org.logstash.config.ir.expression.*;
import org.logstash.config.ir.expression.binary.*;
import org.logstash.config.ir.expression.unary.Not;
import org.logstash.config.ir.expression.unary.Truthy;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.IfVertex;
import org.logstash.config.ir.graph.PluginVertex;
import org.logstash.config.ir.imperative.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Created by andrewvc on 9/15/16.
*/
public class DSL {
public static EventValueExpression eEventValue(SourceMetadata meta, String fieldName) {
return new EventValueExpression(meta, fieldName);
}
public static EventValueExpression eEventValue(String fieldName) {
return eEventValue(new SourceMetadata(), fieldName);
}
public static ValueExpression eValue(SourceMetadata meta, Object value) throws InvalidIRException {
return new ValueExpression(meta, value);
}
public static ValueExpression eValue(Object value) throws InvalidIRException {
return eValue(new SourceMetadata(), value);
}
public static ValueExpression eRegex(SourceMetadata meta, String pattern) throws InvalidIRException {
return new RegexValueExpression(meta, pattern);
}
public static ValueExpression eRegex(String pattern) throws InvalidIRException {
return eRegex(new SourceMetadata(), pattern);
}
public static ValueExpression eValue(long value) {
try {
return eValue(new SourceMetadata(), value);
} catch (InvalidIRException e) {
e.printStackTrace(); // Can't happen with an int
return null;
}
}
public static ValueExpression eValue(double value) {
try {
return eValue(new SourceMetadata(), value);
} catch (InvalidIRException e) {
e.printStackTrace(); // Can't happen with an int
return null;
}
}
public static Gt eGt(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Gt(meta, left, right);
}
public static Gt eGt(Expression left, Expression right) throws InvalidIRException {
return new Gt(null, left, right);
}
public static Gte eGte(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Gte(meta, left, right);
}
public static Gte eGte(Expression left, Expression right) throws InvalidIRException {
return new Gte(null, left, right);
}
public static Lt eLt(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Lt(meta, left, right);
}
public static Lt eLt(Expression left, Expression right) throws InvalidIRException {
return new Lt(null, left, right);
}
public static Lte eLte(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Lte(meta, left, right);
}
public static Lte eLte(Expression left, Expression right) throws InvalidIRException {
return new Lte(null, left, right);
}
public static Eq eEq(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Eq(meta, left, right);
}
public static Eq eEq(Expression left, Expression right) throws InvalidIRException {
return new Eq(null, left, right);
}
public static And eAnd(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new And(meta, left, right);
}
public static And eAnd(Expression left, Expression right) throws InvalidIRException {
return new And(null, left, right);
}
public static Or eOr(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Or(meta, left, right);
}
public static Or eOr(Expression left, Expression right) throws InvalidIRException {
return new Or(null, left, right);
}
public static RegexEq eRegexEq(SourceMetadata meta, Expression left, ValueExpression right) throws InvalidIRException {
return new RegexEq(meta, left, right);
}
public static RegexEq eRegexEq(Expression left, ValueExpression right) throws InvalidIRException {
return new RegexEq(null, left, right);
}
public static Expression eRegexNeq(SourceMetadata meta, Expression left, ValueExpression right) throws InvalidIRException {
return eNot(eRegexEq(meta, left, right));
}
public static Expression eRegexNeq(Expression left, ValueExpression right) throws InvalidIRException {
return eNot(eRegexEq(left, right));
}
public static Neq eNeq(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new Neq(meta, left, right);
}
public static Neq eNeq(Expression left, Expression right) throws InvalidIRException {
return new Neq(null, left, right);
}
public static In eIn(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
return new In(meta, left, right);
}
public static In eIn(Expression left, Expression right) throws InvalidIRException {
return new In(null, left, right);
}
public static Not eNot(SourceMetadata meta, Expression expr) throws InvalidIRException {
return new Not(meta, expr);
}
public static Not eNot(Expression expr) throws InvalidIRException {
return new Not(null, expr);
}
public static BooleanExpression eTruthy(SourceMetadata meta, Expression expr) throws InvalidIRException {
if (expr instanceof BooleanExpression) {
return (BooleanExpression) expr;
}
return new Truthy(meta, expr);
}
public static BooleanExpression eTruthy(Expression expr) throws InvalidIRException {
return eTruthy(null, expr);
}
public static Statement iCompose(ComposedStatement.IFactory factory, SourceMetadata meta, Statement... statements) throws InvalidIRException {
if (statements.length == 0 ) {
return new NoopStatement(meta);
} else if (statements.length == 1 ) {
return statements[0];
} else {
return factory.make(meta, Arrays.asList(statements));
}
}
public static Statement iComposeSequence(SourceMetadata meta, Statement... statements) throws InvalidIRException {
return iCompose(ComposedSequenceStatement::new, meta, statements);
}
public static Statement iComposeSequence(Statement... statements) throws InvalidIRException {
return iComposeSequence(null, statements);
}
public static Statement iComposeParallel(SourceMetadata meta, Statement... statements) throws InvalidIRException {
return iCompose(ComposedParallelStatement::new, meta, statements);
}
public static Statement iComposeParallel(Statement... statements) throws InvalidIRException {
return iComposeParallel(null, statements);
}
public static NoopStatement noop(SourceMetadata meta) {
return new NoopStatement(meta);
}
public static NoopStatement noop() {
return new NoopStatement(new SourceMetadata());
}
public static PluginStatement iPlugin(SourceMetadata meta, PluginDefinition.Type pluginType, String pluginName, Map<String, Object> pluginArguments) {
return new PluginStatement(meta, new PluginDefinition(pluginType, pluginName, pluginArguments));
}
public static PluginStatement iPlugin(PluginDefinition.Type type, String pluginName, Map<String, Object> pluginArguments) {
return iPlugin(new SourceMetadata(), type, pluginName, pluginArguments);
}
public static PluginStatement iPlugin(PluginDefinition.Type type, String pluginName, MapBuilder<String, Object> argBuilder) {
return iPlugin(type, pluginName, argBuilder.build());
}
public static PluginStatement iPlugin(PluginDefinition.Type type, String pluginName, String id) {
return iPlugin(type, pluginName, argumentBuilder().put("id", id).build());
}
public static PluginStatement iPlugin(PluginDefinition.Type type, String pluginName) {
return iPlugin(type, pluginName, pargs());
}
public static IfStatement iIf(SourceMetadata meta,
Expression condition,
Statement ifTrue,
Statement ifFalse) throws InvalidIRException {
BooleanExpression booleanExpression = eTruthy(meta, condition);
return new IfStatement(meta, booleanExpression, ifTrue, ifFalse);
}
public static IfStatement iIf(Expression condition,
Statement ifTrue,
Statement ifFalse) throws InvalidIRException {
return iIf(new SourceMetadata(), condition, ifTrue, ifFalse);
}
public static IfStatement iIf(Expression condition,
Statement ifTrue) throws InvalidIRException {
return iIf(new SourceMetadata(), condition, ifTrue, noop());
}
public static class MapBuilder<K,V> {
private final HashMap<K, V> map;
public MapBuilder() {
this.map = new HashMap<>();
}
public MapBuilder<K, V> put(K k, V v) {
map.put(k, v);
return this;
}
public Map<K, V> build() {
return map;
}
}
static <K,V> MapBuilder<K,V> mapBuilder() {
return new MapBuilder<>();
}
public static MapBuilder<String, Object> argumentBuilder() {
return mapBuilder();
}
public static MapBuilder<String, Object> pargs() {
return argumentBuilder();
}
public static Graph graph() {
return new Graph();
}
public static PluginVertex gPlugin(SourceMetadata sourceMetadata, PluginDefinition.Type pluginType, String pluginName, Map<String, Object> pluginArgs) {
return new PluginVertex(sourceMetadata, new PluginDefinition(pluginType, pluginName, pluginArgs));
}
public static PluginVertex gPlugin(PluginDefinition.Type type, String pluginName, Map<String, Object> pluginArgs) {
return gPlugin(new SourceMetadata(), type, pluginName, pluginArgs);
}
public static PluginVertex gPlugin(PluginDefinition.Type type, String pluginName, String id) {
return gPlugin(type, pluginName, argumentBuilder().put("id", id).build());
}
public static PluginVertex gPlugin(PluginDefinition.Type type, String pluginName) {
return gPlugin(new SourceMetadata(), type, pluginName, new HashMap<>());
}
public static IfVertex gIf(SourceMetadata meta, BooleanExpression expression) {
return new IfVertex(meta, expression);
}
public static IfVertex gIf(BooleanExpression expression) {
return new IfVertex(new SourceMetadata(), expression);
}
}

View file

@ -0,0 +1,14 @@
package org.logstash.config.ir;
import org.logstash.common.Util;
/**
* Created by andrewvc on 12/23/16.
*/
public interface Hashable {
String hashSource();
default String uniqueHash() {
return Util.digest(this.hashSource());
}
}

View file

@ -0,0 +1,16 @@
package org.logstash.config.ir;
import org.logstash.config.ir.graph.algorithms.TopologicalSort;
/**
* Created by andrewvc on 9/6/16.
*/
public class InvalidIRException extends Exception {
public InvalidIRException(String s) {
super(s);
}
public InvalidIRException(String s, Exception e) {
super(s,e);
}
}

View file

@ -0,0 +1,102 @@
package org.logstash.config.ir;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.PluginVertex;
import org.logstash.config.ir.graph.QueueVertex;
import org.logstash.config.ir.graph.Vertex;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by andrewvc on 9/20/16.
*/
public class Pipeline implements Hashable {
public Graph getGraph() {
return graph;
}
public QueueVertex getQueue() {
return queue;
}
//public QueueVertex getFilterOut() {
// return filterOut;
//}
private final Graph graph;
private final QueueVertex queue;
//private final QueueVertex filterOut;
public Pipeline(Graph inputSection, Graph filterSection, Graph outputSection) throws InvalidIRException {
// Validate all incoming graphs, we can't turn an invalid graph into a Pipeline!
inputSection.validate();
filterSection.validate();
outputSection.validate();
Graph tempGraph = inputSection.copy(); // The input section are our roots, so we can import that wholesale
// Connect all the input vertices out to the queue
queue = new QueueVertex();
tempGraph = tempGraph.chain(queue);
// Now we connect the queue to the root of the filter section
tempGraph = tempGraph.chain(filterSection);
// Finally, connect the filter out node to all the outputs
this.graph = tempGraph.chain(outputSection);
}
public List<Vertex> getPostQueue() throws InvalidIRException {
return graph.getSortedVerticesAfter(queue);
}
public List<PluginVertex> getInputPluginVertices() {
return getPluginVertices(PluginDefinition.Type.INPUT);
}
public List<PluginVertex> getFilterPluginVertices() {
return getPluginVertices(PluginDefinition.Type.FILTER);
}
public List<PluginVertex> getOutputPluginVertices() {
return getPluginVertices(PluginDefinition.Type.OUTPUT);
}
@Override
public String toString() {
String summary = String.format("[Pipeline] Inputs: %d Filters: %d Outputs %d",
getInputPluginVertices().size(),
getFilterPluginVertices().size(),
getOutputPluginVertices().size());
return summary + "\n" + graph.toString();
}
// Return plugin vertices by type
public Stream<PluginVertex> pluginVertices(PluginDefinition.Type type) {
return pluginVertices()
.filter(v -> v.getPluginDefinition().getType().equals(type));
}
// Return plugin vertices by type
public List<PluginVertex> getPluginVertices(PluginDefinition.Type type) {
return pluginVertices(type).collect(Collectors.toList());
}
public List<PluginVertex> getPluginVertices() {
return pluginVertices().collect(Collectors.toList());
}
public Stream<PluginVertex> pluginVertices() {
return graph.vertices()
.filter(v -> v instanceof PluginVertex)
.map(v -> (PluginVertex) v);
}
@Override
public String hashSource() {
return this.graph.uniqueHash();
}
}

View file

@ -0,0 +1,102 @@
package org.logstash.config.ir;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Created by andrewvc on 9/20/16.
*/
public class PluginDefinition implements SourceComponent, Hashable {
private static ObjectMapper om = new ObjectMapper();
@Override
public String hashSource() {
try {
String serializedArgs = om.writeValueAsString(this.getArguments());
return this.getClass().getCanonicalName() + "|" +
this.getType().toString() + "|" +
this.getName() + "|" +
serializedArgs;
} catch (JsonProcessingException e) {
throw new IllegalArgumentException("Could not serialize plugin args as JSON", e);
}
}
public enum Type {
INPUT,
FILTER,
OUTPUT,
CODEC
}
private final Type type;
private final String name;
private final Map<String,Object> arguments;
public Type getType() {
return type;
}
public String getName() {
return name;
}
public Map<String, Object> getArguments() {
return arguments;
}
public PluginDefinition(Type type, String name, Map<String, Object> arguments) {
this.type = type;
this.name = name;
this.arguments = arguments;
}
public String toString() {
return type.toString().toLowerCase() + "-" + name + arguments;
}
public int hashCode() {
return Objects.hash(type, name, arguments);
}
@Override
public boolean equals(Object o) {
if (o == null) return false;
if (o instanceof PluginDefinition) {
PluginDefinition oPlugin = (PluginDefinition) o;
return type.equals(oPlugin.type) && name.equals(oPlugin.name) && arguments.equals(oPlugin.arguments);
}
return false;
}
@Override
public boolean sourceComponentEquals(SourceComponent o) {
if (o == null) return false;
if (o instanceof PluginDefinition) {
PluginDefinition oPluginDefinition = (PluginDefinition) o;
Set<String> allArgs = new HashSet<>();
allArgs.addAll(getArguments().keySet());
allArgs.addAll(oPluginDefinition.getArguments().keySet());
// Compare all arguments except the unique id
boolean argsMatch = allArgs.stream().
filter(k -> !k.equals("id")).
allMatch(k -> Objects.equals(getArguments().get(k), oPluginDefinition.getArguments().get(k)));
return argsMatch && type.equals(oPluginDefinition.type) && name.equals(oPluginDefinition.name);
}
return false;
}
@Override
public SourceMetadata getMeta() {
return null;
}
}

View file

@ -0,0 +1,9 @@
package org.logstash.config.ir;
/**
* Created by andrewvc on 9/16/16.
*/
public interface SourceComponent {
boolean sourceComponentEquals(SourceComponent sourceComponent);
SourceMetadata getMeta();
}

View file

@ -0,0 +1,52 @@
package org.logstash.config.ir;
import java.util.Objects;
/**
* Created by andrewvc on 9/6/16.
*/
public class SourceMetadata {
private final String sourceFile;
public String getSourceFile() {
return sourceFile;
}
public Integer getSourceLine() {
return sourceLine;
}
public Integer getSourceColumn() {
return sourceColumn;
}
public String getSourceText() {
return sourceText;
}
private final Integer sourceLine;
private final Integer sourceColumn;
private final String sourceText;
public SourceMetadata(String sourceFile, Integer sourceLine, Integer sourceChar, String sourceText) {
this.sourceFile = sourceFile;
this.sourceLine = sourceLine;
this.sourceColumn = sourceChar;
this.sourceText = sourceText;
}
public SourceMetadata() {
this.sourceFile = null;
this.sourceLine = null;
this.sourceColumn = null;
this.sourceText = null;
}
public int hashCode() {
return Objects.hash(this.sourceFile, this.sourceLine, this.sourceColumn, this.sourceText);
}
public String toString() {
return sourceFile + ":" + sourceLine + ":" + sourceColumn + ":```\n" + sourceText + "\n```";
}
}

View file

@ -0,0 +1,53 @@
package org.logstash.config.ir.expression;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/6/16.
*/
public abstract class BinaryBooleanExpression extends BooleanExpression {
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (this == sourceComponent) return true;
if (this.getClass().equals(sourceComponent.getClass())) {
BinaryBooleanExpression other = (BinaryBooleanExpression) sourceComponent;
return (this.getLeft().sourceComponentEquals(other.getLeft()) &&
this.getRight().sourceComponentEquals(other.getRight()));
}
return false;
}
private final Expression left;
private final Expression right;
public Expression getRight() {
return right;
}
public Expression getLeft() {
return left;
}
public BinaryBooleanExpression(SourceMetadata meta,
Expression left,
Expression right) throws InvalidIRException {
super(meta);
this.left = left;
this.right = right;
}
public abstract String rubyOperator();
@Override
public String toRubyString() {
return "(" + getLeft().toRubyString() + rubyOperator() + getRight().toRubyString() + ")";
}
@Override
public String hashSource() {
return this.getClass().getCanonicalName() + "[" + getLeft().hashSource() + "|" + getRight().hashSource() + "]";
}
}

View file

@ -0,0 +1,12 @@
package org.logstash.config.ir.expression;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/14/16.
*/
public abstract class BooleanExpression extends Expression {
public BooleanExpression(SourceMetadata meta) {
super(meta);
}
}

View file

@ -0,0 +1,46 @@
package org.logstash.config.ir.expression;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/13/16.
*/
public class EventValueExpression extends Expression {
private final String fieldName;
public EventValueExpression(SourceMetadata meta, String fieldName) {
super(meta);
this.fieldName = fieldName;
}
public String getFieldName() {
return fieldName;
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (this == sourceComponent) return true;
if (sourceComponent instanceof EventValueExpression) {
EventValueExpression other = (EventValueExpression) sourceComponent;
return (this.getFieldName().equals(other.getFieldName()));
}
return false;
}
@Override
public String toString() {
return "event.get('" + fieldName + "')";
}
@Override
public String toRubyString() {
return "event.getField('" + fieldName + "')";
}
@Override
public String hashSource() {
return this.getClass().getCanonicalName() + "|" + fieldName;
}
}

View file

@ -0,0 +1,48 @@
package org.logstash.config.ir.expression;
import org.jruby.RubyInstanceConfig;
import org.jruby.embed.AttributeName;
import org.jruby.embed.ScriptingContainer;
import org.logstash.config.ir.Hashable;
import org.logstash.config.ir.BaseSourceComponent;
import org.logstash.config.ir.SourceMetadata;
/*
* [foo] == "foostr" eAnd [bar] > 10
* eAnd(eEq(eventValueExpr("foo"), value("foostr")), eEq(eEventValue("bar"), value(10)))
*
* if [foo]
* notnull(eEventValue("foo"))
* Created by andrewvc on 9/6/16.
*/
public abstract class Expression extends BaseSourceComponent implements Hashable {
private Object compiled;
private ScriptingContainer container;
public Expression(SourceMetadata meta) {
super(meta);
}
public boolean eval() {
return true;
}
public void compile() {
container = new ScriptingContainer();
container.setCompileMode(RubyInstanceConfig.CompileMode.JIT);
container.setAttribute(AttributeName.SHARING_VARIABLES, false);
container.runScriptlet("def start(event)\n" + this.toString() + "\nend");
}
@Override
public String toString(int indent) {
return toString();
}
@Override
public String toString() {
return toRubyString();
}
public abstract String toRubyString();
}

View file

@ -0,0 +1,53 @@
package org.logstash.config.ir.expression;
import org.joni.Option;
import org.joni.Regex;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/15/16.
*/
public class RegexValueExpression extends ValueExpression {
private final Regex regex;
public RegexValueExpression(SourceMetadata meta, Object value) throws InvalidIRException {
super(meta, value);
if (!(value instanceof String)) {
throw new InvalidIRException("Regex value expressions can only take strings!");
}
byte[] patternBytes = getSource().getBytes();
this.regex = new Regex(patternBytes, 0, patternBytes.length, Option.NONE);
}
@Override
public Object get() {
return this.regex;
}
public String getSource() {
return (String) value;
}
@Override
public String toString() {
return this.value.toString();
}
@Override
public boolean sourceComponentEquals(SourceComponent other) {
if (other == null) return false;
if (other instanceof RegexValueExpression) {
return (((RegexValueExpression) other).getSource().equals(getSource()));
}
return false;
}
@Override
public String toRubyString() {
return (String) value;
}
}

View file

@ -0,0 +1,27 @@
package org.logstash.config.ir.expression;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/13/16.
*/
public abstract class UnaryBooleanExpression extends BooleanExpression {
private final Expression expression;
public Expression getExpression() {
return expression;
}
public UnaryBooleanExpression(SourceMetadata meta,
Expression expression) throws InvalidIRException {
super(meta);
if (expression == null) throw new InvalidIRException("Unary expressions cannot operate on null!");
this.expression = expression;
}
@Override
public String hashSource() {
return this.getClass().getCanonicalName() + "[" + this.expression.hashSource() + "]";
}
}

View file

@ -0,0 +1,73 @@
package org.logstash.config.ir.expression;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import java.math.BigDecimal;
import java.util.List;
/**
* Created by andrewvc on 9/13/16.
*/
public class ValueExpression extends Expression {
protected final Object value;
public ValueExpression(SourceMetadata meta, Object value) throws InvalidIRException {
super(meta);
if (!(value == null ||
value instanceof Short ||
value instanceof Long ||
value instanceof Integer ||
value instanceof Float ||
value instanceof Double ||
value instanceof BigDecimal ||
value instanceof String ||
value instanceof List ||
value instanceof java.time.Instant
)) {
// This *should* be caught by the treetop grammar, but we need this case just in case there's a bug
// somewhere
throw new InvalidIRException("Invalid eValue " + value + " with class " + value.getClass().getName());
}
this.value = value;
}
public Object get() {
return value;
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (this == sourceComponent) return true;
if (sourceComponent instanceof ValueExpression) {
ValueExpression other = (ValueExpression) sourceComponent;
if (this.get() == null) {
return (other.get() == null);
} else {
return (this.get().equals(other.get()));
}
}
return false;
}
@Override
public String toRubyString() {
if (value == null) {
return "null";
}
if (value instanceof String) {
return "'" + get() + "'";
}
return get().toString();
}
@Override
public String hashSource() {
return this.getClass().getCanonicalName() + "|" + value;
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class And extends BinaryBooleanExpression {
public And(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "&&";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Eq extends BinaryBooleanExpression {
public Eq(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "==";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Gt extends BinaryBooleanExpression {
public Gt(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return ">";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Gte extends BinaryBooleanExpression {
public Gte(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return ">=";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class In extends BinaryBooleanExpression {
public In(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return ".include?";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Lt extends BinaryBooleanExpression {
public Lt(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "<";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Lte extends BinaryBooleanExpression {
public Lte(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "<=";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Neq extends BinaryBooleanExpression {
public Neq(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "!=";
}
}

View file

@ -0,0 +1,20 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Or extends BinaryBooleanExpression {
public Or(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
}
@Override
public String rubyOperator() {
return "||";
}
}

View file

@ -0,0 +1,25 @@
package org.logstash.config.ir.expression.binary;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BinaryBooleanExpression;
import org.logstash.config.ir.expression.Expression;
import org.logstash.config.ir.expression.RegexValueExpression;
/**
* Created by andrewvc on 9/21/16.
*/
public class RegexEq extends BinaryBooleanExpression {
public RegexEq(SourceMetadata meta, Expression left, Expression right) throws InvalidIRException {
super(meta, left, right);
if (!(right instanceof RegexValueExpression)) {
throw new InvalidIRException("You must use a regexp operator with a regexp rval!" + right);
}
}
@Override
public String rubyOperator() {
return "=~";
}
}

View file

@ -0,0 +1,28 @@
package org.logstash.config.ir.expression.unary;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.Expression;
import org.logstash.config.ir.expression.UnaryBooleanExpression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Not extends UnaryBooleanExpression {
public Not(SourceMetadata meta, Expression expression) throws InvalidIRException {
super(meta, expression);
}
@Override
public String toRubyString() {
return "!(" + getExpression().toRubyString() + ")";
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
return sourceComponent != null &&
(sourceComponent instanceof Not &&
((Not) sourceComponent).getExpression().sourceComponentEquals(getExpression()));
}
}

View file

@ -0,0 +1,28 @@
package org.logstash.config.ir.expression.unary;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.Expression;
import org.logstash.config.ir.expression.UnaryBooleanExpression;
/**
* Created by andrewvc on 9/21/16.
*/
public class Truthy extends UnaryBooleanExpression {
public Truthy(SourceMetadata meta, Expression expression) throws InvalidIRException {
super(meta, expression);
}
@Override
public String toRubyString() {
return "(" + this.getExpression() + ")";
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
return sourceComponent != null &&
sourceComponent instanceof Truthy &&
((Truthy) sourceComponent).getExpression().sourceComponentEquals(this.getExpression());
}
}

View file

@ -0,0 +1,85 @@
package org.logstash.config.ir.graph;
import org.logstash.common.Util;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
/**
* Created by andrewvc on 9/15/16.
*/
public class BooleanEdge extends Edge {
public static class BooleanEdgeFactory extends EdgeFactory {
public Boolean getEdgeType() {
return edgeType;
}
private final Boolean edgeType;
public BooleanEdgeFactory(Boolean edgeType) {
this.edgeType = edgeType;
}
public BooleanEdge make(Vertex in, Vertex out) throws InvalidIRException {
return new BooleanEdge(edgeType, in, out);
}
public boolean equals(Object other) {
if (other == null) return false;
if (other instanceof BooleanEdgeFactory) {
return ((BooleanEdgeFactory) other).getEdgeType().equals(edgeType);
}
return false;
}
public String toString() {
return "BooleanEdge.BooleanEdgeFactory[" + edgeType + "]";
}
}
public static BooleanEdge.BooleanEdgeFactory trueFactory = new BooleanEdge.BooleanEdgeFactory(true);
public static BooleanEdge.BooleanEdgeFactory falseFactory = new BooleanEdge.BooleanEdgeFactory(false);
private final Boolean edgeType;
public Boolean getEdgeType() {
return edgeType;
}
public BooleanEdge(Boolean edgeType, Vertex outVertex, Vertex inVertex) throws InvalidIRException {
super(outVertex, inVertex);
this.edgeType = edgeType;
}
@Override
public String individualHashSource() {
return this.getClass().getCanonicalName() + "|" + this.getEdgeType() + "|";
}
@Override
public String getId() {
return Util.digest(this.getFrom().getId() + "[" + this.getEdgeType() + "]->" + this.getTo().getId());
}
public String toString() {
return getFrom() + " -|" + this.edgeType + "|-> " + getTo();
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (sourceComponent == this) return true;
if (sourceComponent instanceof BooleanEdge) {
BooleanEdge otherE = (BooleanEdge) sourceComponent;
return this.getFrom().sourceComponentEquals(otherE.getFrom()) &&
this.getTo().sourceComponentEquals(otherE.getTo()) &&
this.getEdgeType().equals(otherE.getEdgeType());
}
return false;
}
@Override
public BooleanEdge copy(Vertex from, Vertex to) throws InvalidIRException {
return new BooleanEdge(this.edgeType, from, to);
}
}

View file

@ -0,0 +1,99 @@
package org.logstash.config.ir.graph;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import java.util.stream.Stream;
/**
* Created by andrewvc on 9/15/16.
*/
public abstract class Edge implements SourceComponent {
private Graph graph;
public void setGraph(Graph graph) {
if (this.graph == graph) {
return;
} else if (this.graph == null) {
this.graph = graph;
} else {
throw new IllegalArgumentException("Attempted to set graph for edge that already has one!" + this);
}
}
public abstract Edge copy(Vertex from, Vertex to) throws InvalidIRException;
public static abstract class EdgeFactory {
public abstract Edge make(Vertex from, Vertex to) throws InvalidIRException;
}
private final Vertex from;
private final Vertex to;
public Stream<Edge> ancestors() {
// Without all the distinct calls this can be slow
return Stream.concat(this.from.incomingEdges(), this.from.incomingEdges().flatMap(Edge::ancestors).distinct()).distinct();
}
public Stream<Edge> descendants() {
// Without all the distinct calls this can be slow
return Stream.concat(this.to.outgoingEdges(), this.to.outgoingEdges().flatMap(Edge::ancestors).distinct()).distinct();
}
public Stream<Edge> lineage() {
return Stream.concat(Stream.concat(ancestors(), Stream.of(this)), descendants());
}
public Edge(Vertex from, Vertex to) throws InvalidIRException {
this.from = from;
this.to = to;
if (this.from == this.to) {
throw new InvalidIRException("Cannot create a cyclic vertex! " + to);
}
if (!this.from.acceptsOutgoingEdge(this)) {
throw new Vertex.InvalidEdgeTypeException(String.format("Invalid outgoing edge %s for edge %s", this.from, this));
}
if (!this.to.acceptsIncomingEdge(this)) {
throw new Vertex.InvalidEdgeTypeException(String.format("Invalid incoming edge %s for edge %s", this.from, this));
}
}
public Vertex getTo() {
return to;
}
public Vertex getFrom() {
return from;
}
public String toString() {
return getFrom() + " -> " + getTo();
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (sourceComponent == this) return true;
if (sourceComponent.getClass() == this.getClass()) {
Edge otherE = (Edge) sourceComponent;
return this.getFrom().sourceComponentEquals(otherE.getFrom()) &&
this.getTo().sourceComponentEquals(otherE.getTo());
}
return false;
}
public abstract String individualHashSource();
public abstract String getId();
@Override
public SourceMetadata getMeta() {
return null;
}
}

View file

@ -0,0 +1,434 @@
package org.logstash.config.ir.graph;
import org.logstash.common.Util;
import org.logstash.config.ir.Hashable;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.algorithms.BreadthFirst;
import org.logstash.config.ir.graph.algorithms.GraphDiff;
import org.logstash.config.ir.graph.algorithms.TopologicalSort;
import java.security.MessageDigest;
import java.util.*;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by andrewvc on 9/15/16.
*/
public class Graph implements SourceComponent, Hashable {
public final Set<Vertex> vertices = new HashSet<>();
private final Set<Edge> edges = new HashSet<>();
private Map<Vertex, Integer> vertexRanks = new HashMap<>();
private final Map<Vertex,Set<Edge>> outgoingEdgeLookup = new HashMap<>();
private final Map<Vertex,Set<Edge>> incomingEdgeLookup = new HashMap<>();
private List<Vertex> sortedVertices;
// Builds a graph that has the specified vertices and edges
// Note that this does *not* validate the result
public Graph(Collection<Vertex> vertices, Collection<Edge> edges) throws InvalidIRException {
for (Vertex vertex : vertices) { this.addVertex(vertex, false); }
for (Edge edge : edges) { this.addEdge(edge, false); }
this.refresh();
}
public Graph() {}
public static Graph empty() {
return new Graph();
}
public void addVertex(Vertex v) throws InvalidIRException {
addVertex(v, true);
}
private void addVertex(Vertex v, boolean doRefresh) throws InvalidIRException {
// If this belongs to another graph use a copy
if (v.getGraph() != null && v.getGraph() != this) {
throw new InvalidIRException("Attempted to add vertex already belonging to a graph!");
}
v.setGraph(this);
this.vertices.add(v);
if (doRefresh) this.refresh();
}
// Takes an arbitrary vertex from any graph and brings it into this one.
// It may have to copy it. The actual vertex that gets used is returned
public Vertex importVertex(Vertex v) throws InvalidIRException {
if (v.getGraph() != this) {
if (v.getGraph() == null) {
this.addVertex(v);
return v;
} else {
Vertex copy = v.copy();
this.addVertex(copy);
return copy;
}
} else {
return v;
}
}
public Vertex getVertexById(String id) {
return this.vertices().filter(v -> v.getId().equals(id)).findAny().get();
}
// Use chainVertices instead
private Graph addEdge(Edge e) throws InvalidIRException {
return addEdge(e, true);
}
private Graph addEdge(Edge e, boolean doRefresh) throws InvalidIRException {
if (!(this.getVertices().contains(e.getFrom()) && this.getVertices().contains(e.getTo()))) {
throw new InvalidIRException("Attempted to add edge referencing vertices not in this graph!");
}
this.edges.add(e);
BiFunction<Vertex, Set<Edge>, Set<Edge>> lookupComputeFunction = (vertex, edgeSet) -> {
if (edgeSet == null) edgeSet = new HashSet<>();
edgeSet.add(e);
return edgeSet;
};
this.outgoingEdgeLookup.compute(e.getFrom(), lookupComputeFunction);
this.incomingEdgeLookup.compute(e.getTo(), lookupComputeFunction);
e.setGraph(this);
if (doRefresh) this.refresh();
return this;
}
protected Collection<Edge> getOutgoingEdges(Vertex v) {
return this.outgoingEdgeLookup.getOrDefault(v, Collections.emptySet());
}
protected Collection<Edge> getIncomingEdges(Vertex v) {
return this.incomingEdgeLookup.getOrDefault(v, Collections.emptySet());
}
// Returns a copy of this graph
public Graph copy() throws InvalidIRException {
return Graph.combine(this).graph;
}
// Returns a new graph that is the union of all provided graphs.
// If a single graph is passed in this will return a copy of it
public static GraphCombinationResult combine(Graph... graphs) throws InvalidIRException {
Map<Vertex, Vertex> oldToNewVertices = new HashMap<>();
Map<Edge,Edge> oldToNewEdges = new HashMap<>();
for (Graph graph : graphs) {
graph.vertices().forEach(v -> oldToNewVertices.put(v, v.copy()));
for (Edge e : graph.getEdges()) {
Edge copy = e.copy(oldToNewVertices.get(e.getFrom()), oldToNewVertices.get(e.getTo()));
oldToNewEdges.put(e, copy);
}
}
Graph newGraph = new Graph(oldToNewVertices.values(), oldToNewEdges.values());
return new GraphCombinationResult(newGraph, oldToNewVertices, oldToNewEdges);
}
public static final class GraphCombinationResult {
public final Graph graph;
public final Map<Vertex, Vertex> oldToNewVertices;
public final Map<Edge, Edge> oldToNewEdges;
GraphCombinationResult(Graph graph, Map<Vertex, Vertex> oldToNewVertices, Map<Edge, Edge> oldToNewEdges) {
this.graph = graph;
this.oldToNewVertices = oldToNewVertices;
this.oldToNewEdges = oldToNewEdges;
}
}
/*
Return a copy of this graph with the other graph's nodes to this one by connection this graph's leaves to
the other graph's root
*/
public Graph chain(Graph otherGraph) throws InvalidIRException {
if (otherGraph.getVertices().size() == 0) return this.copy();
if (this.isEmpty()) return otherGraph.copy();
GraphCombinationResult combineResult = Graph.combine(this, otherGraph);
// Build these lists here since we do mutate the graph in place later
// This isn't strictly necessary, but makes things less confusing
Collection<Vertex> fromLeaves = allLeaves().map(combineResult.oldToNewVertices::get).collect(Collectors.toSet());
Collection<Vertex> toRoots = otherGraph.roots().map(combineResult.oldToNewVertices::get).collect(Collectors.toSet());
return combineResult.graph.chain(fromLeaves, toRoots);
}
public Graph chain(Vertex... otherVertex) throws InvalidIRException {
chain(this.getAllLeaves(), Arrays.asList(otherVertex));
return this;
}
// This does *not* return a copy for performance reasons
private Graph chain(Collection<Vertex> fromLeaves, Collection<Vertex> toVertices) throws InvalidIRException {
for (Vertex leaf : fromLeaves) {
for (Edge.EdgeFactory unusedEf : leaf.getUnusedOutgoingEdgeFactories()) {
for (Vertex toVertex : toVertices) {
this.chainVertices(unusedEf, leaf, toVertex);
}
}
}
return this;
}
public Collection<Edge> chainVerticesById(String... vertexIds) throws InvalidIRException {
return chainVerticesById(PlainEdge.factory, vertexIds);
}
public Collection<Edge> chainVerticesById(Edge.EdgeFactory edgeFactory, String... vertexIds) throws InvalidIRException {
Vertex[] argVertices = new Vertex[vertexIds.length];
for (int i = 0; i < vertexIds.length; i ++) {
String id = vertexIds[i];
Vertex v = getVertexById(id);
if (v==null) throw new InvalidIRException("Could not chain vertex, id not found in graph: !" + id + "\n" + this);
argVertices[i] = v;
}
return chainVertices(edgeFactory, argVertices);
}
// Will not validate the graph after running!
// You must invoke validate the graph yourself
// after invoking
public Collection<Edge> chainVerticesUnsafe(Edge.EdgeFactory edgeFactory, Vertex... argVertices) throws InvalidIRException {
List<Vertex> importedVertices = new ArrayList<>(argVertices.length);
for (Vertex va : argVertices) {
importedVertices.add(this.importVertex(va));
}
List<Edge> newEdges = new ArrayList<>();
for (int i = 0; i < importedVertices.size()-1; i++) {
Vertex from = importedVertices.get(i);
Vertex to = importedVertices.get(i+1);
this.addVertex(from, false);
this.addVertex(to, false);
Edge edge = edgeFactory.make(from, to);
newEdges.add(edge);
this.addEdge(edge, false);
}
refresh();
return newEdges;
}
public Collection<Edge> chainVertices(Edge.EdgeFactory edgeFactory, Vertex... argVertices) throws InvalidIRException {
Collection<Edge> edges = chainVerticesUnsafe(edgeFactory, argVertices);
validate();
return edges;
}
public Edge chainVertices(Vertex a, Vertex b) throws InvalidIRException {
return chainVertices(PlainEdge.factory, a, b).stream().findFirst().get();
}
public Collection<Edge> chainVertices(Vertex... vertices) throws InvalidIRException {
return chainVertices(PlainEdge.factory, vertices);
}
public Collection<Edge> chainVertices(boolean bool, Vertex... vertices) throws InvalidIRException {
Edge.EdgeFactory factory = bool ? BooleanEdge.trueFactory : BooleanEdge.falseFactory;
return chainVertices(factory, vertices);
}
// Many of the operations we perform involve modifying one graph by adding vertices/edges
// from another. This method ensures that all the vertices/edges we know about having been pulled into
// this graph. Methods in this class that add or remove externally provided vertices/edges
// should call this method to ensure that the rest of the graph these items depend on are pulled
// in.
public void refresh() throws InvalidIRException {
this.calculateRanks();
this.calculateTopologicalSort();
}
private void calculateTopologicalSort() throws InvalidIRException {
try {
this.sortedVertices = TopologicalSort.sortVertices(this);
} catch (TopologicalSort.UnexpectedGraphCycleError unexpectedGraphCycleError) {
throw new InvalidIRException("Graph is not a dag!", unexpectedGraphCycleError);
}
}
private void calculateRanks() {
vertexRanks = BreadthFirst.breadthFirst(this.getRoots()).vertexDistances;
}
public Integer rank(Vertex vertex) {
Integer rank = vertexRanks.get(vertex);
// This should never happen
if (rank == null) throw new RuntimeException("Attempted to get rank from vertex where it is not yet calculated: " + this);
return rank;
}
public Map<String, List<Vertex>> verticesByHash() {
return this.vertices().collect(Collectors.groupingBy(Vertex::uniqueHash));
}
public void validate() throws InvalidIRException {
if (this.isEmpty()) return;
if (this.getVertices().stream().noneMatch(Vertex::isLeaf)) {
throw new InvalidIRException("Graph has no leaf vertices!\n" + this.toString());
}
List<List<Vertex>> duplicates = verticesByHash().values().stream().filter((group) -> group.size() > 1).collect(Collectors.toList());
if (!duplicates.isEmpty()) {
Stream<String> errorMessageGroups = duplicates.stream().
map((group) -> group.stream().map(Object::toString).collect(Collectors.joining("===")));
String joinedErrorMessageGroups = errorMessageGroups.collect(Collectors.joining("\n---\n"));
throw new InvalidIRException("Some nodes on the graph are fully redundant!\n" + this + "|" + joinedErrorMessageGroups);
}
}
public Stream<Vertex> roots() {
return vertices.stream().filter(Vertex::isRoot);
}
public Collection<Vertex> getRoots() {
return roots().collect(Collectors.toList());
}
// Vertices which are partially leaves in that they support multiple
// outgoing edge types but only have one or fewer attached
public Stream<Vertex> allLeaves() {
return vertices.stream().filter(Vertex::isPartialLeaf);
}
// Get all leaves whether partial or not
public Collection<Vertex> getAllLeaves() {
return allLeaves().collect(Collectors.toList());
}
public Stream<Vertex> leaves() {
return vertices.stream().filter(Vertex::isLeaf);
}
public Collection<Vertex> getLeaves() {
return leaves().collect(Collectors.toList());
}
public Set<Vertex> getVertices() {
return vertices;
}
public Set<Edge> getEdges() {
return edges;
}
public String toString() {
Stream<Edge> edgesToFormat;
try {
edgesToFormat = sortedEdges();
} catch (InvalidIRException e) {
// Even if this isn't a valid graph we still need to print it
edgesToFormat = edges.stream();
}
String edgelessVerticesStr;
if (this.isolatedVertices().count() > 0) {
edgelessVerticesStr = "\n== Vertices Without Edges ==\n" +
this.isolatedVertices().map(Vertex::toString).collect(Collectors.joining("\n"));
} else {
edgelessVerticesStr = "";
}
return "**GRAPH**\n" +
"Vertices: " + this.vertices.size()+ " Edges: " + this.edges().count() + "\n" +
"----------------------" +
edgesToFormat.map(Edge::toString).collect(Collectors.joining("\n")) +
edgelessVerticesStr +
"\n**GRAPH**";
}
public Stream<Vertex> isolatedVertices() {
return this.getVertices().stream().filter(v -> v.getOutgoingEdges().isEmpty() && v.getIncomingEdges().isEmpty());
}
public List<Vertex> getSortedVertices() {
return this.sortedVertices;
}
public Stream<Edge> sortedEdges() throws InvalidIRException {
return getSortedVertices().stream().
flatMap(Vertex::outgoingEdges);
}
public List<Vertex> getSortedVerticesBefore(Vertex end) throws InvalidIRException {
return getSortedVerticesBetween(null, end);
}
public List<Vertex> getSortedVerticesAfter(Vertex start) throws InvalidIRException {
return getSortedVerticesBetween(start, null);
}
public List<Vertex> getSortedVerticesBetween(Vertex start, Vertex end) throws InvalidIRException {
List<Vertex> sortedVertices = getSortedVertices();
int startIndex = start == null ? 0 : sortedVertices.indexOf(start);
int endIndex = end == null ? sortedVertices.size() : sortedVertices.indexOf(end);
return sortedVertices.subList(startIndex+1, endIndex);
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == this) return true;
if (sourceComponent instanceof Graph) {
Graph otherGraph = (Graph) sourceComponent;
GraphDiff.DiffResult diff = GraphDiff.diff(this, otherGraph);
return diff.isIdentical();
}
return false;
}
// returns true if this graph has a .sourceComponentEquals equivalent edge
public boolean hasEquivalentEdge(Edge otherE) {
return this.getEdges().stream().anyMatch(e -> e.sourceComponentEquals(otherE));
}
public boolean hasEquivalentVertex(Vertex otherV) {
return this.getVertices().stream().anyMatch(v -> v.sourceComponentEquals(otherV));
}
@Override
public SourceMetadata getMeta() {
return null;
}
public boolean isEmpty() {
return (this.getVertices().size() == 0);
}
public Stream<Vertex> vertices() {
return this.vertices.stream();
}
public Stream<Edge> edges() {
return this.edges.stream();
}
@Override
public String hashSource() {
MessageDigest lineageDigest = Util.defaultMessageDigest();
List<byte[]> sources = this.vertices.stream().parallel().map(Vertex::uniqueHash).sorted().map(String::getBytes).collect(Collectors.toList());
sources.forEach(lineageDigest::update);
return Util.bytesToHexString(lineageDigest.digest());
}
}

View file

@ -0,0 +1,103 @@
package org.logstash.config.ir.graph;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BooleanExpression;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Created by andrewvc on 9/15/16.
*/
public class IfVertex extends Vertex {
public BooleanExpression getBooleanExpression() {
return booleanExpression;
}
private final BooleanExpression booleanExpression;
public IfVertex(SourceMetadata meta, BooleanExpression booleanExpression) {
super(meta);
this.booleanExpression = booleanExpression;
}
public String toString() {
return "[if " + booleanExpression.toString(0) + "]";
}
@Override
public boolean sourceComponentEquals(SourceComponent other) {
if (other == null) return false;
if (other == this) return true;
if (other instanceof IfVertex) {
IfVertex otherV = (IfVertex) other;
// We don't check the id because we're comparing functional equality, not
// identity
return otherV.booleanExpression.sourceComponentEquals(this.booleanExpression);
}
return false;
}
// An IfVertex has no real metadata in and of itself, but its expression does!
@Override
public SourceMetadata getMeta() {
return null;
}
public boolean hasEdgeType(boolean type) {
for (Edge e : getOutgoingEdges()) {
BooleanEdge bEdge = (BooleanEdge) e; // There should only be boolean edges here!
if (bEdge.getEdgeType() == type) return true;
}
return false;
}
public Collection<Edge.EdgeFactory> getUnusedOutgoingEdgeFactories() {
List<Edge.EdgeFactory> l = new LinkedList<>();
if (!hasEdgeType(true)) l.add(BooleanEdge.trueFactory);
if (!hasEdgeType(false)) l.add(BooleanEdge.falseFactory);
return l;
}
public boolean acceptsOutgoingEdge(Edge e) {
return (e instanceof BooleanEdge);
}
@Override
public String getId() {
return this.uniqueHash();
}
public Collection<BooleanEdge> getOutgoingBooleanEdges() {
// Wish there was a way to do this as a java a cast without an operation
return getOutgoingEdges().stream().map(e -> (BooleanEdge) e).collect(Collectors.toList());
}
public Collection<BooleanEdge> getOutgoingBooleanEdgesByType(Boolean edgeType) {
return getOutgoingBooleanEdges().stream().filter(e -> e.getEdgeType().equals(edgeType)).collect(Collectors.toList());
}
// The easiest readable version of this for a human.
// If the original source is available we use that, otherwise we serialize the expression
public String humanReadableExpression() {
String sourceText = this.booleanExpression.getMeta() != null ? this.booleanExpression.getMeta().getSourceText() : null;
if (sourceText != null) {
return sourceText;
} else {
return this.getBooleanExpression().toRubyString();
}
}
@Override
public IfVertex copy() {
return new IfVertex(getMeta(),getBooleanExpression());
}
@Override
public String individualHashSource() {
return this.getClass().getCanonicalName() + "{" + this.booleanExpression.hashSource() + "}";
}
}

View file

@ -0,0 +1,37 @@
package org.logstash.config.ir.graph;
import org.logstash.common.Util;
import org.logstash.config.ir.InvalidIRException;
/**
* Created by andrewvc on 9/19/16.
*/
public class PlainEdge extends Edge {
public static class PlainEdgeFactory extends Edge.EdgeFactory {
@Override
public PlainEdge make(Vertex from, Vertex to) throws InvalidIRException {
return new PlainEdge(from, to);
}
}
public static PlainEdgeFactory factory = new PlainEdgeFactory();
@Override
public String individualHashSource() {
return this.getClass().getCanonicalName();
}
@Override
public String getId() {
return Util.digest(this.getFrom().getId() + "->" + this.getTo().getId());
}
public PlainEdge(Vertex from, Vertex to) throws InvalidIRException {
super(from, to);
}
@Override
public PlainEdge copy(Vertex from, Vertex to) throws InvalidIRException {
return new PlainEdge(from, to);
}
}

View file

@ -0,0 +1,85 @@
package org.logstash.config.ir.graph;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.logstash.common.Util;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.PluginDefinition;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/15/16.
*/
public class PluginVertex extends Vertex {
private final SourceMetadata meta;
private final String id;
private final PluginDefinition pluginDefinition;
public String getId() {
if (id != null) return id;
if (this.getGraph() == null) {
throw new RuntimeException("Attempted to get ID from PluginVertex before attaching it to a graph!");
}
return this.uniqueHash();
}
public PluginDefinition getPluginDefinition() {
return pluginDefinition;
}
@Override
public SourceMetadata getMeta() {
return meta;
}
public PluginVertex(SourceMetadata meta, PluginDefinition pluginDefinition) {
super(meta);
this.meta = meta;
this.pluginDefinition = pluginDefinition;
Object argId = this.pluginDefinition.getArguments().get("id");
this.id = argId != null ? argId.toString() : null;
}
public String toString() {
return "P[" + pluginDefinition + "|" + this.getMeta() + "]";
}
@Override
public String individualHashSource() {
ObjectMapper objectMapper = new ObjectMapper();
try {
return Util.digest(this.getClass().getCanonicalName() + "|" +
(this.id != null ? this.id : "NOID") + "|" +
this.pluginDefinition.getName() + "|" +
this.pluginDefinition.getType().toString() + "|" +
objectMapper.writeValueAsString(this.pluginDefinition.getArguments()));
} catch (JsonProcessingException e) {
// This is basically impossible given the constrained values in the plugin definition
throw new RuntimeException(e);
}
}
public String individualHash() {
return Util.digest(individualHashSource());
}
@Override
public PluginVertex copy() {
return new PluginVertex(meta, getPluginDefinition());
}
@Override
public boolean sourceComponentEquals(SourceComponent other) {
if (other == null) return false;
if (other == this) return true;
if (other instanceof PluginVertex) {
PluginVertex otherV = (PluginVertex) other;
// We don't test ID equality because we're testing
// Semantics, and ids have nothing to do with that
return otherV.getPluginDefinition().sourceComponentEquals(this.getPluginDefinition());
}
return false;
}
}

View file

@ -0,0 +1,44 @@
package org.logstash.config.ir.graph;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.SourceMetadata;
/**
* Created by andrewvc on 9/15/16.
*/
public class QueueVertex extends Vertex {
public QueueVertex() {
super(null);
}
@Override
public String getId() {
return "__QUEUE__";
}
@Override
public String individualHashSource() {
return this.getClass().getCanonicalName();
}
public String toString() {
return this.getId();
}
@Override
public QueueVertex copy() {
return new QueueVertex();
}
@Override
public boolean sourceComponentEquals(SourceComponent other) {
if (other == null) return false;
return other instanceof QueueVertex;
}
// Special vertices really have no metadata
@Override
public SourceMetadata getMeta() {
return null;
}
}

View file

@ -0,0 +1,201 @@
package org.logstash.config.ir.graph;
import org.logstash.common.Util;
import org.logstash.config.ir.Hashable;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.algorithms.DepthFirst;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.Collection;
import java.util.Collections;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by andrewvc on 9/15/16.
*/
public abstract class Vertex implements SourceComponent, Hashable {
private final SourceMetadata sourceMetadata;
private Graph graph = this.getGraph();
public Vertex() {
this.sourceMetadata = null;
}
public Vertex(SourceMetadata sourceMetadata) {
this.sourceMetadata = sourceMetadata;
}
public abstract Vertex copy();
public static class InvalidEdgeTypeException extends InvalidIRException {
public InvalidEdgeTypeException(String s) {
super(s);
}
}
public Graph getGraph() {
return this.graph;
}
public void setGraph(Graph graph) {
if (this.graph == graph) {
return;
} else if (this.graph == null) {
this.graph = graph;
} else {
throw new IllegalArgumentException("Cannot set graph property on Vertex that is already assigned to an existing graph!");
}
}
public boolean isRoot() {
return getIncomingEdges().isEmpty();
}
public boolean isLeaf() {
return getOutgoingEdges().isEmpty();
}
public boolean hasIncomingEdges() {
return !getIncomingEdges().isEmpty();
}
public boolean hasOutgoingEdges() {
return !getOutgoingEdges().isEmpty();
}
public Collection<Edge> getIncomingEdges() {
return incomingEdges().collect(Collectors.toSet());
}
public Collection<Edge> getOutgoingEdges() {
return outgoingEdges().collect(Collectors.toSet());
}
public Collection<Vertex> getOutgoingVertices() {
return outgoingVertices().collect(Collectors.toList());
}
public Stream<Vertex> outgoingVertices() {
return outgoingEdges().map(Edge::getTo);
}
public Collection<Vertex> getIncomingVertices() {
return incomingVertices().collect(Collectors.toList());
}
public Stream<Vertex> incomingVertices() {
return incomingEdges().map(Edge::getFrom);
}
public Stream<Edge> incomingEdges() {
return this.getGraph().getIncomingEdges(this).stream();
}
public Stream<Edge> outgoingEdges() {
return this.getGraph().getOutgoingEdges(this).stream();
}
public Stream<Vertex> ancestors() {
return DepthFirst.reverseDepthFirst(this).filter(v -> v != this);
}
public Stream<Vertex> roots() {
return ancestors().filter(Vertex::isRoot);
}
public Stream<Vertex> descendants() {
return DepthFirst.depthFirst(this).filter(v -> v != this);
}
public Stream<Vertex> lineage() {
return Stream.concat(Stream.concat(ancestors(), Stream.of(this)), descendants());
}
// Rank is the shortest distance to a root for this vertex
public int rank() {
return this.graph.rank(this);
}
@Override
public String uniqueHash() {
// Sort the lineage to ensure consistency. We prepend each item with a lexicographically sortable
// encoding of its rank (using hex notation) so that the sort order is identical to the traversal order.
// This is a required since there may be individually identical components in different locations in the graph.
// It is, however, illegal to have functionally identical vertices, that is to say two vertices with the same
// contents that have the same lineage.
MessageDigest lineageDigest = Util.defaultMessageDigest();
lineageDigest.update(hashPrefix().getBytes());
// The lineage can be quite long and we want to avoid the quadratic complexity of string concatenation
// Thus, in this case there's no real way to get the hash source, we just hash as we go.
lineage().
map(Vertex::contextualHashSource).
sorted().
forEachOrdered(v -> {
byte[] bytes = v.getBytes(StandardCharsets.UTF_8);
lineageDigest.update(bytes);
});
String digest = Util.bytesToHexString(lineageDigest.digest());
return digest;
}
@Override
public String hashSource() {
// In this case the source can be quite large, so we never actually use this function.
return this.uniqueHash();
}
public String hashPrefix() {
return String.format("Vertex[%08x]=", this.rank()) + this.individualHashSource() + "|";
}
public String contextualHashSource() {
// This string must be lexicographically sortable hence the ID at the front. It also must have the individualHashSource
// repeated at the front for the case of a graph with two nodes at the same rank, same contents, but different lineages
StringBuilder result = new StringBuilder();
result.append(hashPrefix());
result.append(individualHashSource());
result.append("I:");
this.incomingEdges().map(Edge::individualHashSource).sorted().forEachOrdered(result::append);
result.append("O:");
this.outgoingEdges().map(Edge::individualHashSource).sorted().forEachOrdered(result::append);
return result.toString();
}
public abstract String individualHashSource();
// Can be overriden in subclasses to define multiple
// expected Edge classes this Vertex can take.
// If any EdgeFactory instances are returned this Vertex is considered
// a partial leaf.
public Collection<Edge.EdgeFactory> getUnusedOutgoingEdgeFactories() {
if (!this.hasOutgoingEdges()) {
return Collections.singletonList(new PlainEdge.PlainEdgeFactory());
}
return Collections.emptyList();
}
public boolean isPartialLeaf() {
return getUnusedOutgoingEdgeFactories().size() > 0;
}
public boolean acceptsIncomingEdge(Edge e) {
return true;
}
public boolean acceptsOutgoingEdge(Edge e) {
return true;
}
public abstract String getId();
}

View file

@ -0,0 +1,64 @@
package org.logstash.config.ir.graph.algorithms;
import org.logstash.config.ir.graph.Vertex;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Stream;
/**
* Created by andrewvc on 1/5/17.
*
*/
public class BreadthFirst {
public static BfsResult breadthFirst(Collection<Vertex> roots) {
return breadthFirst(roots, false, null);
}
/* This isn't as pretty as the DFS search with its streaminess, but for our current uses we only really
* care about using this to get the calculated vertexDistances, so that's fine. */
public static BfsResult breadthFirst(Collection<Vertex> roots,
boolean reverse,
Consumer<Map.Entry<Vertex, Integer>> consumer
) {
Map<Vertex, Integer> vertexDistances = new HashMap<>();
Map<Vertex, Vertex> vertexParents = new HashMap<>();
Deque<Vertex> queue = new ArrayDeque<>(roots);
roots.forEach(v -> vertexDistances.put(v, 0));
while (!queue.isEmpty()) {
Vertex currentVertex = queue.removeFirst();
Integer currentDistance = vertexDistances.get(currentVertex);
if (consumer != null) {
consumer.accept(new AbstractMap.SimpleImmutableEntry<>(currentVertex, currentDistance));
}
Stream<Vertex> nextVertices = reverse ? currentVertex.incomingVertices() : currentVertex.outgoingVertices();
nextVertices.forEach(nextVertex -> {
if (vertexDistances.get(nextVertex) == null) {
vertexDistances.put(nextVertex, currentDistance+1);
vertexParents.put(nextVertex, currentVertex);
queue.push(nextVertex);
}
});
}
return new BfsResult(vertexDistances, vertexParents);
}
public static final class BfsResult {
public final Map<Vertex, Integer> vertexDistances;
private final Map<Vertex, Vertex> vertexParents;
BfsResult(Map<Vertex, Integer> vertexDistances, Map<Vertex,Vertex> vertexParents) {
this.vertexDistances = vertexDistances;
this.vertexParents = vertexParents;
}
public Collection<Vertex> getVertices() {
return vertexDistances.keySet();
}
}
}

View file

@ -0,0 +1,71 @@
package org.logstash.config.ir.graph.algorithms;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.*;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
/**
* Created by andrewvc on 1/5/17.
*/
public class DepthFirst {
public static Stream<Vertex> depthFirst(Graph g) {
return depthFirst(g.getRoots());
}
public static Stream<Vertex> reverseDepthFirst(Graph g) {
return reverseDepthFirst(g.getLeaves());
}
public static Stream<Vertex> depthFirst(Vertex v) {
return depthFirst(Collections.singleton(v));
}
public static Stream<Vertex> reverseDepthFirst(Vertex v) {
return depthFirst(Collections.singleton(v));
}
public static Stream<Vertex> depthFirst(Collection<Vertex> v) {
return streamify(new Traversal(v, false));
}
public static Stream<Vertex> reverseDepthFirst(Collection<Vertex> v) {
return streamify(new Traversal(v, true));
}
private static Stream<Vertex> streamify(Traversal t) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(t, Spliterator.DISTINCT),false);
}
public static class Traversal implements Iterator<Vertex> {
private final Set<Vertex> visited = new HashSet<>();
private final Deque<Vertex> pending;
private final boolean reverse;
Traversal(Collection<Vertex> initialVertices, boolean reverse) {
this.reverse = reverse;
this.pending = new ArrayDeque<>(initialVertices);
}
@Override
public boolean hasNext() {
return !pending.isEmpty();
}
@Override
public Vertex next() {
Vertex current = pending.removeFirst();
this.visited.add(current);
Stream<Vertex> next = reverse ? current.incomingVertices() : current.outgoingVertices();
next.forEach(v -> {
if (!visited.contains(v)) {
this.pending.add(v);
}
});
return current;
}
}
}

View file

@ -0,0 +1,100 @@
package org.logstash.config.ir.graph.algorithms;
import org.logstash.config.ir.graph.Edge;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by andrewvc on 1/5/17.
*/
public class GraphDiff {
public static DiffResult diff(Graph left, Graph right) {
List<Edge> removedEdges = left.edges().filter(e -> !right.hasEquivalentEdge(e)).collect(Collectors.toList());
List<Vertex> removedVertices = left.vertices().filter(v -> !right.hasEquivalentVertex(v)).collect(Collectors.toList());
List<Edge> addedEdges = right.edges().filter(e -> !left.hasEquivalentEdge(e)).collect(Collectors.toList());
List<Vertex> addedVertices = right.vertices().filter(v -> !left.hasEquivalentVertex(v)).collect(Collectors.toList());
return new DiffResult(removedEdges, addedEdges, removedVertices, addedVertices);
}
public static class DiffResult {
private final List<Vertex> removedVertices;
private final List<Vertex> addedVertices;
private final Collection<Edge> removedEdges;
private final Collection<Edge> addedEdges;
public Collection<Edge> getRemovedEdges() {
return removedEdges;
}
public Collection<Edge> getAddedEdges() {
return addedEdges;
}
public Collection<Vertex> getRemovedVertices() {
return removedVertices;
}
public Collection<Vertex> getAddedVertices() {
return addedVertices;
}
public DiffResult(Collection<Edge> removedEdges, Collection<Edge> addedEdges, List<Vertex> removedVertices, List<Vertex> addedVertices) {
this.removedEdges = removedEdges;
this.addedEdges = addedEdges;
this.removedVertices = removedVertices;
this.addedVertices = addedVertices;
}
public String summary() {
String template = "(-%d,+%d Edges | -%d,+%d Vertices)";
return String.format(template, removedEdges.size(), addedEdges.size(), removedVertices.size(), addedVertices.size());
}
public boolean hasSameEdges() {
return addedEdges.isEmpty() && removedEdges.isEmpty();
}
public boolean hasSameVertices() {
return addedVertices.isEmpty() && removedVertices.isEmpty();
}
public boolean isIdentical() {
return hasSameEdges() && hasSameVertices();
}
public String toString() {
if (isIdentical()) return "Identical Graphs";
StringBuilder output = new StringBuilder();
output.append(this.summary());
if (!hasSameEdges()) {
output.append("\n");
output.append(detailedDiffFor("Edges", removedEdges, addedEdges));
}
if (!hasSameVertices()) {
output.append("\n");
output.append(detailedDiffFor("Vertices", removedVertices, addedVertices));
}
output.append("\n");
return output.toString();
}
private String detailedDiffFor(String name, Collection removed, Collection added) {
return (name + " GraphDiff: " + "\n") +
"--------------------------\n" +
Stream.concat(removed.stream().map(c -> "-" + c.toString()),
added.stream().map(c -> "+" + c.toString())).
map(Object::toString).
collect(Collectors.joining("\n")) +
"\n--------------------------";
}
}
}

View file

@ -0,0 +1,106 @@
package org.logstash.config.ir.graph.algorithms;
import org.logstash.config.ir.graph.Vertex;
import java.security.cert.CollectionCertStoreParameters;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Created by andrewvc on 1/5/17.
* This class isn't currently used anywhere, I wrote it for some code that is now removed, however, I'm sure it will be
* useful shortly, so we should hold onto it for a while.
*/
public class ShortestPath {
static class InvalidShortestPathArguments extends Exception {
private final Collection<Vertex> invalidVertices;
public InvalidShortestPathArguments(Collection<Vertex> invalidVertices) {
super();
this.invalidVertices = invalidVertices;
}
@Override
public String getMessage() {
String verticesMessage = invalidVertices.stream().map(Vertex::toString).collect(Collectors.joining(", "));
return "Attempted to determine path for vertex that is not in the search space!" + verticesMessage;
}
}
public static List<Vertex> shortestPath(Vertex from, Vertex to) throws InvalidShortestPathArguments {
return shortestPath(from, Collections.singleton(to)).get(to);
}
public static Map<Vertex, List<Vertex>> shortestPath(Vertex from, Collection<Vertex> to) throws InvalidShortestPathArguments {
return shortestPath(from, to, false);
}
// Finds the shortest paths to the specified vertices traversing edges backward using Dijkstra's algorithm.
// The items in `to` must be ancestors of this Vertex!
public static Map<Vertex, List<Vertex>> shortestReversePath(Vertex from, Collection<Vertex> to) throws InvalidShortestPathArguments {
return shortestPath(from, to, true);
}
// Finds the shortest paths to the specified vertices using Dijkstra's algorithm.
// The items in `to` must be ancestors of this Vertex!
public static Map<Vertex, List<Vertex>> shortestPath(Vertex from, Collection<Vertex> to, boolean reverseSearch) throws InvalidShortestPathArguments {
Map<Vertex, Integer> vertexDistances = new HashMap<>();
Map<Vertex, Vertex> vertexPathPrevious = new HashMap<>();
List<Vertex> pending = new ArrayList<>();
Stream<Vertex> searchSpace = reverseSearch ? from.ancestors() : from.descendants();
searchSpace.forEach((vertex) -> {
pending.add(vertex);
// Max value is an unknown distance
// using this is more convenient and concise than null in later code
vertexDistances.put(vertex, Integer.MAX_VALUE);
});
pending.add(from);
vertexDistances.put(from, 0);
Collection<Vertex> invalidVertices = to.stream().filter(v -> !pending.contains(v)).collect(Collectors.toList());
if (!invalidVertices.isEmpty()) {
throw new InvalidShortestPathArguments(invalidVertices);
}
while (!pending.isEmpty()) {
Vertex current = pending.stream().min(Comparator.comparing(vertexDistances::get)).get();
int currentDistance = vertexDistances.get(current);
pending.remove(current);
Stream<Vertex> toProcess = reverseSearch ? current.incomingVertices() : current.outgoingVertices();
toProcess.forEach((v) -> {
Integer curDistance = vertexDistances.get(v);
int altDistance = currentDistance + 1; // Fixed cost per edge of 1
if (altDistance < curDistance) {
vertexDistances.put(v, altDistance);
vertexPathPrevious.put(v, current);
}
});
}
Map<Vertex, List<Vertex>> result = new HashMap<>(to.size());
for (Vertex toVertex : to) {
int toVertexDistance = vertexDistances.get(toVertex);
List<Vertex> path = new ArrayList<>(toVertexDistance+1);
Vertex pathCurrentVertex = toVertex;
while (pathCurrentVertex != from) {
path.add(pathCurrentVertex);
pathCurrentVertex = vertexPathPrevious.get(pathCurrentVertex);
}
path.add(from);
Collections.reverse(path);
result.put(toVertex, path);
}
return result;
}
}

View file

@ -0,0 +1,51 @@
package org.logstash.config.ir.graph.algorithms;
import org.logstash.config.ir.graph.Edge;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.*;
/**
* Created by andrewvc on 1/7/17.
*/
public class TopologicalSort {
public static class UnexpectedGraphCycleError extends Exception {
UnexpectedGraphCycleError(Graph g) {
super("Graph has cycles, is not a DAG! " + g);
}
}
// Uses Kahn's algorithm to do a topological sort and detect cycles
public static List<Vertex> sortVertices(Graph g) throws UnexpectedGraphCycleError {
if (g.getEdges().size() == 0) return new ArrayList<>(g.getVertices());
List<Vertex> sorted = new ArrayList<>(g.getVertices().size());
Deque<Vertex> pending = new LinkedList<>();
pending.addAll(g.getRoots());
Set<Edge> traversedEdges = new HashSet<>();
while (!pending.isEmpty()) {
Vertex currentVertex = pending.removeFirst();
sorted.add(currentVertex);
currentVertex.getOutgoingEdges().forEach(edge -> {
traversedEdges.add(edge);
Vertex toVertex = edge.getTo();
if (toVertex.getIncomingEdges().stream().allMatch(traversedEdges::contains)) {
pending.add(toVertex);
}
});
}
// Check for cycles
if (g.edges().noneMatch(traversedEdges::contains)) {
throw new UnexpectedGraphCycleError(g);
}
return sorted;
}
}

View file

@ -0,0 +1,32 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.Graph;
import java.util.List;
/**
* Created by andrewvc on 9/22/16.
*/
public class ComposedParallelStatement extends ComposedStatement {
public ComposedParallelStatement(SourceMetadata meta, List<Statement> statements) throws InvalidIRException {
super(meta, statements);
}
@Override
protected String composeTypeString() {
return "composed-parallel";
}
@Override
public Graph toGraph() throws InvalidIRException {
Graph g = Graph.empty();
for (Statement s : getStatements()) {
g = Graph.combine(g, s.toGraph()).graph;
}
return g;
}
}

View file

@ -0,0 +1,33 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.Graph;
import java.util.List;
/**
* Created by andrewvc on 9/22/16.
*/
public class ComposedSequenceStatement extends ComposedStatement {
public ComposedSequenceStatement(SourceMetadata meta, List<Statement> statements) throws InvalidIRException {
super(meta, statements);
}
@Override
protected String composeTypeString() {
return "do-sequence";
}
@Override
public Graph toGraph() throws InvalidIRException {
Graph g = Graph.empty();
for (Statement statement : getStatements()) {
Graph sg = statement.toGraph();
g = g.chain(sg);
}
return g;
}
}

View file

@ -0,0 +1,65 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import java.util.List;
import java.util.stream.Collectors;
/**
* Created by andrewvc on 9/6/16.
*/
public abstract class ComposedStatement extends Statement {
public interface IFactory {
ComposedStatement make(SourceMetadata meta, List<Statement> statements) throws InvalidIRException;
}
private final List<Statement> statements;
public ComposedStatement(SourceMetadata meta, List<Statement> statements) throws InvalidIRException {
super(meta);
if (statements == null || statements.stream().anyMatch(s -> s == null)) {
throw new InvalidIRException("Nulls eNot allowed for list eOr in statement list");
}
this.statements = statements;
}
public List<Statement> getStatements() {
return this.statements;
}
public int size() {
return getStatements().size();
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (this == sourceComponent) return true;
if (sourceComponent.getClass().equals(this.getClass())) {
ComposedStatement other = (ComposedStatement) sourceComponent;
if (this.size() != other.size()) {
return false;
}
for (int i = 0; i < size(); i++) {
Statement s = this.getStatements().get(i);
Statement os = other.getStatements().get(i);
if (!(s.sourceComponentEquals(os))) return false;
}
return true;
}
return false;
}
@Override
public String toString(int indent) {
return "(" + this.composeTypeString() + "\n" +
getStatements().stream().
map(s -> s.toString(indent+2)).
collect(Collectors.joining("\n")) +
"\n";
}
protected abstract String composeTypeString();
}

View file

@ -0,0 +1,105 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.expression.BooleanExpression;
import org.logstash.config.ir.graph.BooleanEdge;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.IfVertex;
import org.logstash.config.ir.graph.Vertex;
import java.util.Collection;
import java.util.stream.Collectors;
/**
* Created by andrewvc on 9/6/16.
* if 5 {
*
* }
*/
public class IfStatement extends Statement {
private final BooleanExpression booleanExpression;
private final Statement trueStatement;
private final Statement falseStatement;
public BooleanExpression getBooleanExpression() {
return booleanExpression;
}
public Statement getTrueStatement() {
return trueStatement;
}
public Statement getFalseStatement() {
return falseStatement;
}
public IfStatement(SourceMetadata meta,
BooleanExpression booleanExpression,
Statement trueStatement,
Statement falseStatement
) throws InvalidIRException {
super(meta);
if (booleanExpression == null) throw new InvalidIRException("Boolean expr must eNot be null!");
if (trueStatement == null) throw new InvalidIRException("If Statement needs true statement!");
if (falseStatement == null) throw new InvalidIRException("If Statement needs false statement!");
this.booleanExpression = booleanExpression;
this.trueStatement = trueStatement;
this.falseStatement = falseStatement;
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (sourceComponent == this) return true;
if (sourceComponent instanceof IfStatement) {
IfStatement other = (IfStatement) sourceComponent;
return (this.booleanExpression.sourceComponentEquals(other.getBooleanExpression()) &&
this.trueStatement.sourceComponentEquals(other.trueStatement) &&
this.falseStatement.sourceComponentEquals(other.falseStatement));
}
return false;
}
@Override
public String toString(int indent) {
return indentPadding(indent) +
"(if " + booleanExpression.toString(0) +
"\n" +
this.trueStatement +
"\n" +
this.falseStatement +
")";
}
@Override
public Graph toGraph() throws InvalidIRException {
Graph trueGraph = getTrueStatement().toGraph();
Graph falseGraph = getFalseStatement().toGraph();
Graph.GraphCombinationResult combination = Graph.combine(trueGraph, falseGraph);
Graph newGraph = combination.graph;
Collection<Vertex> trueRoots = trueGraph.roots().map(combination.oldToNewVertices::get).collect(Collectors.toList());
Collection<Vertex> falseRoots = falseGraph.roots().map(combination.oldToNewVertices::get).collect(Collectors.toList());
IfVertex ifVertex = new IfVertex(this.getMeta(), this.booleanExpression);
newGraph.addVertex(ifVertex);
for (Vertex v : trueRoots) {
newGraph.chainVerticesUnsafe(BooleanEdge.trueFactory, ifVertex, v);
}
for (Vertex v : falseRoots) {
newGraph.chainVerticesUnsafe(BooleanEdge.falseFactory, ifVertex, v);
}
return newGraph;
}
}

View file

@ -0,0 +1,33 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.Graph;
/**
* Created by andrewvc on 9/15/16.
*/
public class NoopStatement extends Statement {
public NoopStatement(SourceMetadata meta) {
super(meta);
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (sourceComponent instanceof NoopStatement) return true;
return false;
}
@Override
public String toString(int indent) {
return indentPadding(indent) + "(Noop)";
}
@Override
public Graph toGraph() {
return Graph.empty();
}
}

View file

@ -0,0 +1,45 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.SourceComponent;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.PluginDefinition;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.PluginVertex;
import org.logstash.config.ir.graph.Vertex;
/**
* Created by andrewvc on 9/6/16.
*/
public class PluginStatement extends Statement {
private final PluginDefinition pluginDefinition;
public PluginStatement(SourceMetadata meta, PluginDefinition pluginDefinition) {
super(meta);
this.pluginDefinition = pluginDefinition;
}
@Override
public boolean sourceComponentEquals(SourceComponent sourceComponent) {
if (sourceComponent == null) return false;
if (sourceComponent == this) return true;
if (sourceComponent instanceof PluginStatement) {
PluginStatement other = (PluginStatement) sourceComponent;
return this.pluginDefinition.equals(other.pluginDefinition);
}
return false;
}
@Override
public String toString(int indent) {
return indentPadding(indent) + this.pluginDefinition;
}
@Override
public Graph toGraph() throws InvalidIRException {
Vertex pluginVertex = new PluginVertex(getMeta(), pluginDefinition);
Graph g = Graph.empty();
g.addVertex(pluginVertex);
return g;
}
}

View file

@ -0,0 +1,27 @@
package org.logstash.config.ir.imperative;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.BaseSourceComponent;
import org.logstash.config.ir.SourceMetadata;
import org.logstash.config.ir.graph.Graph;
/**
* Created by andrewvc on 9/6/16.
*/
public abstract class Statement extends BaseSourceComponent {
public Statement(SourceMetadata meta) {
super(meta);
}
public abstract Graph toGraph() throws InvalidIRException;
public String toString() {
return toString(2);
}
public abstract String toString(int indent);
public String indentPadding(int length) {
return String.format("%" + length + "s", "");
}
}

View file

@ -0,0 +1,139 @@
package org.logstash.config.ir;
import org.hamcrest.MatcherAssert;
import org.logstash.config.ir.expression.BooleanExpression;
import org.logstash.config.ir.expression.ValueExpression;
import org.logstash.config.ir.expression.unary.Truthy;
import org.logstash.config.ir.graph.Edge;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import org.logstash.config.ir.graph.algorithms.GraphDiff;
import java.util.HashMap;
import java.util.Objects;
import java.util.UUID;
import static org.logstash.config.ir.DSL.*;
import static org.logstash.config.ir.PluginDefinition.Type.*;
/**
* Created by andrewvc on 9/19/16.
*/
public class IRHelpers {
public static void assertSyntaxEquals(SourceComponent left, SourceComponent right) {
String message = String.format("Expected '%s' to equal '%s'", left, right);
MatcherAssert.assertThat(message, left.sourceComponentEquals(right));
}
public static void assertSyntaxEquals(Graph left, Graph right) {
String message = String.format("Expected \n'%s'\n to equal \n'%s'\n%s", left, right, GraphDiff.diff(left, right));
MatcherAssert.assertThat(message, left.sourceComponentEquals(right));
}
public static Vertex createTestVertex() {
return createTestVertex(UUID.randomUUID().toString());
}
public static Vertex createTestVertex(String id) {
return new TestVertex(id);
}
static class TestVertex extends Vertex {
private String id;
public TestVertex(String id) {
this.id = id;
}
@Override
public Vertex copy() {
return new TestVertex(id);
}
@Override
public String individualHashSource() {
return "TVertex" + "|" + id;
}
@Override
public String getId() {
return this.id;
}
public String toString() {
return "TestVertex-" + id;
}
@Override
public boolean sourceComponentEquals(SourceComponent other) {
if (other == null) return false;
if (other instanceof TestVertex) {
return Objects.equals(getId(), ((TestVertex) other).getId());
}
return false;
}
@Override
public SourceMetadata getMeta() {
return null;
}
}
public static Edge createTestEdge() throws InvalidIRException {
Vertex v1 = createTestVertex();
Vertex v2 = createTestVertex();
return new TestEdge(v1,v2);
}
public static Edge createTestEdge(Vertex from, Vertex to) throws InvalidIRException {
return new TestEdge(from, to);
}
public static final class TestEdge extends Edge {
TestEdge(Vertex from, Vertex to) throws InvalidIRException {
super(from, to);
}
@Override
public Edge copy(Vertex from, Vertex to) throws InvalidIRException {
return new TestEdge(from, to);
}
@Override
public String individualHashSource() {
return "TEdge";
}
@Override
public String getId() {
return individualHashSource();
}
}
public static BooleanExpression createTestExpression() throws InvalidIRException {
return new Truthy(null, new ValueExpression(null, 1));
}
public static SourceMetadata testMetadata() {
return new SourceMetadata("/fake/file", 1, 2, "<fakesource>");
}
public static PluginDefinition testPluginDefinition() {
return new PluginDefinition(PluginDefinition.Type.FILTER, "testDefinition", new HashMap<String, Object>());
}
public static Pipeline samplePipeline() throws InvalidIRException {
Graph inputSection = iComposeParallel(iPlugin(INPUT, "generator"), iPlugin(INPUT, "stdin")).toGraph();
Graph filterSection = iIf(eEq(eEventValue("[foo]"), eEventValue("[bar]")),
iPlugin(FILTER, "grok"),
iPlugin(FILTER, "kv")).toGraph();
Graph outputSection = iIf(eGt(eEventValue("[baz]"), eValue(1000)),
iComposeParallel(
iPlugin(OUTPUT, "s3"),
iPlugin(OUTPUT, "elasticsearch")),
iPlugin(OUTPUT, "stdout")).toGraph();
return new Pipeline(inputSection, filterSection, outputSection);
}
}

View file

@ -0,0 +1,32 @@
package org.logstash.config.ir;
import org.junit.Test;
import org.logstash.config.ir.graph.Graph;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.logstash.config.ir.DSL.*;
import static org.logstash.config.ir.PluginDefinition.Type.*;
/**
* Created by andrewvc on 9/20/16.
*/
public class PipelineTest {
@Test
public void testPipelineCreation() throws InvalidIRException {
Graph inputSection = iComposeParallel(iPlugin(INPUT, "generator"), iPlugin(INPUT, "stdin")).toGraph();
Graph filterSection = iIf(eEq(eEventValue("[foo]"), eEventValue("[bar]")),
iPlugin(FILTER, "grok"),
iPlugin(FILTER, "kv")).toGraph();
Graph outputSection = iIf(eGt(eEventValue("[baz]"), eValue(1000)),
iComposeParallel(
iPlugin(OUTPUT, "s3"),
iPlugin(OUTPUT, "elasticsearch")),
iPlugin(OUTPUT, "stdout")).toGraph();
Pipeline pipeline = new Pipeline(inputSection, filterSection, outputSection);
assertEquals(2, pipeline.getInputPluginVertices().size());
assertEquals(2, pipeline.getFilterPluginVertices().size());
assertEquals(3, pipeline.getOutputPluginVertices().size());
}
}

View file

@ -0,0 +1,35 @@
package org.logstash.config.ir.graph;
import org.junit.experimental.theories.DataPoint;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
import org.logstash.config.ir.InvalidIRException;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.assertThat;
import static org.logstash.config.ir.IRHelpers.*;
/**
* Created by andrewvc on 11/21/16.
*/
@RunWith(Theories.class)
public class BooleanEdgeTest {
@DataPoint
public static Boolean TRUE = true;
@DataPoint
public static Boolean FALSE = false;
@Theory
public void testBasicBooleanEdgeProperties(Boolean edgeType) throws InvalidIRException {
BooleanEdge be = new BooleanEdge(edgeType, createTestVertex(), createTestVertex());
assertThat(be.getEdgeType(), is(edgeType));
}
@Theory
public void testFactoryCreation(Boolean edgeType) throws InvalidIRException {
BooleanEdge.BooleanEdgeFactory factory = new BooleanEdge.BooleanEdgeFactory(edgeType);
BooleanEdge be = factory.make(createTestVertex(), createTestVertex());
assertThat(be.getEdgeType(), is(edgeType));
}
}

View file

@ -0,0 +1,21 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
/**
* Created by andrewvc on 11/21/16.
*/
public class EdgeTest {
@Test
public void testBasicEdge() throws InvalidIRException {
Edge e = IRHelpers.createTestEdge();
assertThat("From is edge", e.getFrom(), notNullValue());
assertThat("To is edge", e.getTo(), notNullValue());
}
}

View file

@ -0,0 +1,175 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.DSL;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.PluginDefinition;
import org.logstash.config.ir.imperative.IfStatement;
import java.util.Collection;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
import static org.logstash.config.ir.IRHelpers.createTestExpression;
import static org.logstash.config.ir.IRHelpers.createTestVertex;
/**
* Created by andrewvc on 11/18/16.
*/
public class GraphTest {
@Test
public void testGraphBasics() throws InvalidIRException {
Graph g = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
g.chainVertices(v1, v2);
Edge e = v1.outgoingEdges().findFirst().get();
assertEquals("Connects vertex edges correctly", v1.getOutgoingEdges(), v2.getIncomingEdges());
assertEquals("Has one edge", g.getEdges(), Collections.singleton(e));
assertTrue("Has v1", g.getVertices().contains(v1));
assertTrue("Has v2", g.getVertices().contains(v2));
}
// Expect an Invalid IR Exception from the cycle
@Test(expected = org.logstash.config.ir.InvalidIRException.class)
public void testGraphCycleDetection() throws InvalidIRException {
Graph g = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
g.chainVertices(v1, v2);
g.chainVertices(v2, v1);
}
@Test
public void chaining() throws InvalidIRException {
Graph fromGraph = Graph.empty();
fromGraph.chainVertices(createTestVertex("fromV1"), createTestVertex("fromV2"));
Graph toGraph = Graph.empty();
toGraph.chainVertices(createTestVertex("toV1"), createTestVertex("toV2"));
Graph result = fromGraph.chain(toGraph);
assertEquals(3, result.getEdges().size());
assertVerticesConnected(result, "fromV2", "toV1");
}
@Test
public void chainingIntoMultipleRoots() throws InvalidIRException {
Graph fromGraph = Graph.empty();
fromGraph.chainVertices(createTestVertex("fromV1"), createTestVertex("fromV2"));
Graph toGraph = Graph.empty();
toGraph.chainVertices(createTestVertex("toV1"), createTestVertex("toV2"));
toGraph.addVertex(createTestVertex("toV3"));
Graph result = fromGraph.chain(toGraph);
assertEquals(4, result.getEdges().size());
assertVerticesConnected(result, "fromV2", "toV1");
assertVerticesConnected(result, "fromV2", "toV3");
}
@Test
public void SimpleConsistencyTest() throws InvalidIRException {
Graph g1 = Graph.empty();
g1.addVertex(createTestVertex("a"));
Graph g2 = Graph.empty();
g2.addVertex(createTestVertex("a"));
assertEquals(g1.uniqueHash(), g2.uniqueHash());
}
@Test
public void complexConsistencyTest() throws InvalidIRException {
Graph g1 = IRHelpers.samplePipeline().getGraph();
Graph g2 = IRHelpers.samplePipeline().getGraph();
assertEquals(g1.uniqueHash(), g2.uniqueHash());
}
@Test
public void testThreading() throws InvalidIRException {
Graph graph = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
graph.chainVertices(v1, v2);
assertVerticesConnected(v1, v2);
Edge v1Edge = v1.outgoingEdges().findFirst().get();
Edge v2Edge = v2.incomingEdges().findFirst().get();
assertThat(v1Edge, is(v2Edge));
assertThat(v1Edge, instanceOf(PlainEdge.class));
}
@Test
public void testThreadingMulti() throws InvalidIRException {
Graph graph = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
Vertex v3 = IRHelpers.createTestVertex();
Collection<Edge> multiEdges = graph.chainVertices(v1, v2, v3);
assertThat(v1.getOutgoingVertices(), is(Collections.singletonList(v2)));
assertThat(v2.getIncomingVertices(), is(Collections.singletonList(v1)));
assertThat(v2.getOutgoingVertices(), is(Collections.singletonList(v3)));
assertThat(v3.getIncomingVertices(), is(Collections.singletonList(v2)));
}
@Test
public void testThreadingTyped() throws InvalidIRException {
Graph graph = Graph.empty();
Vertex if1 = new IfVertex(null, createTestExpression());
Vertex condT = IRHelpers.createTestVertex();
Edge tEdge = graph.chainVertices(BooleanEdge.trueFactory, if1, condT).stream().findFirst().get();
assertThat(tEdge, instanceOf(BooleanEdge.class));
BooleanEdge tBooleanEdge = (BooleanEdge) tEdge;
assertThat(tBooleanEdge.getEdgeType(), is(true));
}
@Test
public void copyTest() throws InvalidIRException {
Graph left = Graph.empty();
left.addVertex(createTestVertex("t1"));
Graph right = left.copy();
Vertex lv = left.getVertexById("t1");
Vertex rv = right.getVertexById("t1");
assertTrue(lv.sourceComponentEquals(rv));
assertTrue(rv.sourceComponentEquals(lv));
}
@Test
public void uniqueHashingOfSimilarLeaves() throws InvalidIRException {
// the initial implementation didn't handle this well, so we'll leave it here as a tricky test
IfStatement imperative = DSL.iIf(
DSL.eTruthy(DSL.eValue("1")),
DSL.iPlugin(PluginDefinition.Type.FILTER, "drop"),
DSL.iIf(
DSL.eTruthy(DSL.eValue("2")),
DSL.iPlugin(PluginDefinition.Type.FILTER, "drop"),
DSL.iIf(
DSL.eTruthy(DSL.eValue("3")),
DSL.iPlugin(PluginDefinition.Type.FILTER, "drop")
)
)
);
Graph g = imperative.toGraph();
g.validate();
}
private void assertVerticesConnected(Graph graph, String fromId, String toId) {
Vertex from = graph.getVertexById(fromId);
assertNotNull(from);
Vertex to = graph.getVertexById(toId);
assertNotNull(to);
assertVerticesConnected(from, to);
}
public void assertVerticesConnected(Vertex from, Vertex to) {
assertTrue("No connection: " + from + " -> " + to, from.getOutgoingVertices().contains(to));
assertTrue("No reverse connection: " + from + " -> " + to, to.getIncomingVertices().contains(from));
}
}

View file

@ -0,0 +1,66 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.InvalidIRException;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.assertThat;
import static org.logstash.config.ir.IRHelpers.*;
/**
* Created by andrewvc on 11/22/16.
*/
public class IfVertexTest {
@Test
public void testIfVertexCreation() throws InvalidIRException {
testIfVertex();
}
@Test(expected = Vertex.InvalidEdgeTypeException.class)
public void testDoesNotAcceptNonBooleanEdges() throws InvalidIRException {
Graph graph = Graph.empty();
IfVertex ifV = testIfVertex();
Vertex otherV = createTestVertex();
graph.chainVertices(PlainEdge.factory, ifV, otherV);
}
@Test
public void testEdgeTypeHandling() throws InvalidIRException {
Graph graph = Graph.empty();
IfVertex ifV = testIfVertex();
graph.addVertex(ifV);
Vertex trueV = createTestVertex();
graph.addVertex(trueV);
assertThat(ifV.hasEdgeType(true), is(false));
assertThat(ifV.hasEdgeType(false), is(false));
assertThat(ifV.getUnusedOutgoingEdgeFactories().size(), is(2));
graph.chainVertices(BooleanEdge.trueFactory, ifV, trueV);
assertThat(ifV.hasEdgeType(true), is(true));
assertThat(ifV.hasEdgeType(false), is(false));
assertThat(ifV.getUnusedOutgoingEdgeFactories().size(), is(1));
assertThat(
ifV.getUnusedOutgoingEdgeFactories().stream().findFirst().get(),
is(BooleanEdge.falseFactory)
);
Vertex falseV = createTestVertex();
graph.chainVertices(BooleanEdge.falseFactory, ifV, falseV);
assertThat(ifV.hasEdgeType(false), is(true));
assertThat(ifV.getUnusedOutgoingEdgeFactories().isEmpty(), is(true));
BooleanEdge trueEdge = ifV.getOutgoingBooleanEdgesByType(true).stream().findAny().get();
BooleanEdge falseEdge = ifV.getOutgoingBooleanEdgesByType(false).stream().findAny().get();
assertThat(trueEdge.getEdgeType(), is(true));
assertThat(falseEdge.getEdgeType(), is(false));
}
public IfVertex testIfVertex() throws InvalidIRException {
return new IfVertex(testMetadata(), createTestExpression());
}
}

View file

@ -0,0 +1,21 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
/**
* Created by andrewvc on 11/22/16.
*/
public class PlainEdgeTest {
@Test
public void creationDoesNotRaiseException() throws InvalidIRException {
new PlainEdge(IRHelpers.createTestVertex(), IRHelpers.createTestVertex());
}
@Test
public void testFactoryCreationDoesNotRaiseException() throws InvalidIRException {
PlainEdge.PlainEdgeFactory factory = new PlainEdge.PlainEdgeFactory();
factory.make(IRHelpers.createTestVertex(), IRHelpers.createTestVertex());
}
}

View file

@ -0,0 +1,38 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.PluginDefinition;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.assertThat;
import static org.logstash.config.ir.IRHelpers.*;
/**
* Created by andrewvc on 11/22/16.
*/
public class PluginVertexTest {
@Test
public void testConstructionIdHandlingWhenNoExplicitId() throws InvalidIRException {
PluginDefinition pluginDefinition = testPluginDefinition();
PluginVertex pluginVertex = new PluginVertex(testMetadata(), pluginDefinition);
Graph graph = Graph.empty();
graph.addVertex(pluginVertex);
assertThat(pluginVertex.getId(), notNullValue());
}
@Test
public void testConstructionIdHandlingWhenExplicitId() {
String customId = "mycustomid";
Map<String, Object> pluginArguments = new HashMap<>();
pluginArguments.put("id", customId);
PluginDefinition pluginDefinition = new PluginDefinition(PluginDefinition.Type.FILTER, "myPlugin", pluginArguments);
PluginVertex pluginVertex = new PluginVertex(testMetadata(), pluginDefinition);
assertThat(pluginVertex.getId(), is(customId));
assertThat(pluginVertex.getPluginDefinition().getArguments().get("id"), is(customId));
}
}

View file

@ -0,0 +1,13 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
/**
* Created by andrewvc on 11/22/16.
*/
public class QueueVertexTest {
@Test
public void testConstruction() {
new QueueVertex();
}
}

View file

@ -0,0 +1,65 @@
package org.logstash.config.ir.graph;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import static org.junit.Assert.*;
/**
* Created by andrewvc on 11/21/16.
*/
public class VertexTest {
@Test
public void TestVertexBasics() throws InvalidIRException {
Graph graph = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
graph.chainVertices(v1, v2);
assertTrue("v1 has v2 as an outgoing vertex", v1.outgoingVertices().anyMatch(v2::equals));
assertTrue("v2 has v1 as an incoming vertex", v2.incomingVertices().anyMatch(v1::equals));
}
@Test
public void testIsLeafAndRoot() throws InvalidIRException {
Graph graph = Graph.empty();
Vertex v = IRHelpers.createTestVertex();
graph.addVertex(v);
// Nodes should be leaves and roots if they are isolated
assertTrue(v.isLeaf());
assertTrue(v.isRoot());
Vertex otherV = IRHelpers.createTestVertex();
graph.chainVertices(v, otherV);
assertFalse(v.isLeaf());
assertTrue(v.isRoot());
assertTrue(otherV.isLeaf());
assertFalse(otherV.isRoot());
}
@Test
public void testPartialLeafOnUnconnectedVertex() throws InvalidIRException {
Graph g = Graph.empty();
Vertex v = IRHelpers.createTestVertex();
g.addVertex(v);
assertEquals(v.getUnusedOutgoingEdgeFactories().size(), 1);
assertTrue(v.isPartialLeaf());
}
@Test
public void testPartialLeafOnConnectedVertex() throws InvalidIRException {
Vertex v = IRHelpers.createTestVertex();
Vertex otherV = IRHelpers.createTestVertex();
Graph graph = Graph.empty();
graph.chainVertices(v, otherV);
assertEquals(v.getUnusedOutgoingEdgeFactories().size(), 0);
assertFalse(v.isPartialLeaf());
}
}

View file

@ -0,0 +1,32 @@
package org.logstash.config.ir.graph.algorithms;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Graph;
import java.util.concurrent.atomic.AtomicInteger;
import static junit.framework.TestCase.assertEquals;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by andrewvc on 1/5/17.
*/
public class BreadthFirstTest {
@Test
public void testBFSBasic() throws InvalidIRException {
Graph g = Graph.empty();
g.chainVertices(IRHelpers.createTestVertex(), IRHelpers.createTestVertex(), IRHelpers.createTestVertex());
// We don't *really* need threadsafety for the count,
// but since we're using a lambda we need something that's final
final AtomicInteger visitCount = new AtomicInteger();
BreadthFirst.BfsResult res = BreadthFirst.breadthFirst(g.getRoots(), false, (v -> visitCount.incrementAndGet()));
assertEquals("It should visit each node once", visitCount.get(), 3);
assertThat(res.getVertices(), is(g.getVertices()));
}
}

View file

@ -0,0 +1,28 @@
package org.logstash.config.ir.graph.algorithms;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static junit.framework.TestCase.assertEquals;
/**
* Created by andrewvc on 1/5/17.
*/
public class DepthFirstTest {
@Test
public void testDFSBasic() throws InvalidIRException {
Graph g = Graph.empty();
g.chainVertices(IRHelpers.createTestVertex(), IRHelpers.createTestVertex(), IRHelpers.createTestVertex());
final AtomicInteger visitCount = new AtomicInteger();
final List<Vertex> visited = new ArrayList<>();
DepthFirst.depthFirst(g).forEach(v -> visitCount.incrementAndGet());
assertEquals("It should visit each node once", visitCount.get(), 3);
}
}

View file

@ -0,0 +1,66 @@
package org.logstash.config.ir.graph.algorithms;
import org.junit.Test;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Edge;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.logstash.config.ir.IRHelpers.createTestVertex;
/**
* Created by andrewvc on 1/5/17.
*/
public class GraphDiffTest {
@Test
public void testIdenticalGraphs() throws InvalidIRException {
Graph left = simpleGraph();
Graph right = simpleGraph();
GraphDiff.DiffResult result = GraphDiff.diff(left, right);
assertTrue(result.isIdentical());
assertTrue(result.getAddedEdges().isEmpty());
assertTrue(result.getRemovedEdges().isEmpty());
assertTrue(result.getAddedVertices().isEmpty());
assertTrue(result.getRemovedVertices().isEmpty());
}
@Test
public void testDifferentSimpleGraphs() throws InvalidIRException {
Graph left = simpleGraph();
Graph right = left.copy();
Vertex new1 = createTestVertex("new1");
right.addVertex(new1);
right.chainVerticesById("t3", "new1");
GraphDiff.DiffResult result = GraphDiff.diff(left, right);
assertFalse(result.isIdentical());
assertThat(result.getAddedVertices().size(), is(1));
assertThat(result.getAddedVertices().stream().findAny().get().getId(), is("new1"));
assertThat(result.getAddedEdges().size(), is(1));
Edge expectedEdge = new1.incomingEdges().findAny().get();
assertTrue(result.getAddedEdges().stream().findAny().get().sourceComponentEquals(expectedEdge));
assertTrue(result.getRemovedVertices().isEmpty());
assertTrue(result.getRemovedEdges().isEmpty());
}
public Graph simpleGraph() throws InvalidIRException {
Graph graph = Graph.empty();
graph.addVertex(createTestVertex("t1"));
graph.addVertex(createTestVertex("t2"));
graph.addVertex(createTestVertex("t3"));
graph.chainVerticesById("t1", "t2", "t3");
graph.chainVerticesById("t1", "t3");
return graph;
}
}

View file

@ -0,0 +1,38 @@
package org.logstash.config.ir.graph.algorithms;
import org.junit.Test;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.logstash.config.ir.IRHelpers.createTestVertex;
/**
* Created by andrewvc on 1/5/17.
*/
public class ShortestPathTest {
@Test
public void testShortestPathBasic() throws InvalidIRException, ShortestPath.InvalidShortestPathArguments {
Graph g = Graph.empty();
Vertex v1 = createTestVertex("v1");
g.addVertex(v1);
Vertex v2 = createTestVertex("v2");
g.addVertex(v2);
Vertex v3 = createTestVertex("v3");
g.addVertex(v3);
Vertex v4 = createTestVertex("v4");
g.addVertex(v4);
g.chainVertices(v1, v2, v3, v4);
g.chainVertices(v2, v4);
List<Vertex> path = ShortestPath.shortestPath(v1, v4);
List<Vertex> expected = Arrays.asList(v1,v2,v4);
assertThat(path, is(expected));
}
}

View file

@ -0,0 +1,46 @@
package org.logstash.config.ir.graph.algorithms;
import org.junit.Test;
import org.logstash.config.ir.IRHelpers;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.Vertex;
import java.util.Arrays;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.AnyOf.anyOf;
import static org.hamcrest.core.Is.is;
/**
* Created by andrewvc on 1/7/17.
*/
public class TopologicalSortTest {
@Test(expected = InvalidIRException.class)
public void testGraphCycleDetection() throws InvalidIRException {
Graph g = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
Vertex v3 = IRHelpers.createTestVertex();
g.chainVertices(v1, v2);
g.chainVertices(v2, v3);
g.chainVertices(v2, v1);
}
@Test
public void testSortOrder() throws InvalidIRException, TopologicalSort.UnexpectedGraphCycleError {
Graph g = Graph.empty();
Vertex v1 = IRHelpers.createTestVertex();
Vertex v2 = IRHelpers.createTestVertex();
Vertex v3 = IRHelpers.createTestVertex();
Vertex v4 = IRHelpers.createTestVertex();
g.chainVertices(v3, v1, v2);
g.chainVertices(v4, v1, v2);
assertThat(TopologicalSort.sortVertices(g),
anyOf(
is(Arrays.asList(v3,v4,v1,v2)),
is(Arrays.asList(v4,v3,v1,v2))
));
}
}

View file

@ -0,0 +1,58 @@
package org.logstash.config.ir.imperative;
import org.junit.Test;
import org.logstash.config.ir.BaseSourceComponent;
import org.logstash.config.ir.InvalidIRException;
import static org.logstash.config.ir.DSL.*;
import static org.logstash.config.ir.IRHelpers.assertSyntaxEquals;
import static org.logstash.config.ir.PluginDefinition.Type.*;
/**
* Created by andrewvc on 9/13/16.
*/
public class DSLTest {
@Test
public void testDSLOnePluginEquality() {
assertSyntaxEquals(iPlugin(FILTER, "foo"), iPlugin(FILTER, "foo"));
}
@Test
public void testComposedPluginEquality() throws InvalidIRException {
assertSyntaxEquals(composedPlugins(), composedPlugins());
}
@Test
public void testDSLComplexEquality() throws InvalidIRException {
assertSyntaxEquals(complexExpression(), complexExpression());
}
@Test
public void testComposeSingle() throws InvalidIRException {
assertSyntaxEquals(iPlugin(FILTER, "grok"), iComposeSequence(iPlugin(FILTER, "grok")));
}
@Test
public void testComposeMulti() throws InvalidIRException {
Statement composed = iComposeSequence(iPlugin(FILTER, "grok"), iPlugin(FILTER, "foo"));
assertSyntaxEquals(iComposeSequence(iPlugin(FILTER, "grok"), iPlugin(FILTER, "foo")), composed);
}
public BaseSourceComponent composedPlugins() throws InvalidIRException {
return iComposeSequence(iPlugin(FILTER, "json"), iPlugin(FILTER, "stuff"));
}
public BaseSourceComponent complexExpression() throws InvalidIRException {
return iComposeSequence(
iPlugin(FILTER, "grok"),
iPlugin(FILTER, "kv"),
iIf(eAnd(eTruthy(eValue(5l)), eTruthy(eValue(null))),
iPlugin(FILTER, "grok"),
iComposeSequence(iPlugin(FILTER, "json"), iPlugin(FILTER, "stuff"))
)
);
}
}

View file

@ -0,0 +1,166 @@
package org.logstash.config.ir.imperative;
import org.junit.Test;
import org.logstash.config.ir.InvalidIRException;
import org.logstash.config.ir.graph.Graph;
import org.logstash.config.ir.graph.IfVertex;
import org.logstash.config.ir.graph.PluginVertex;
import static org.logstash.config.ir.DSL.*;
import static org.logstash.config.ir.IRHelpers.assertSyntaxEquals;
import static org.logstash.config.ir.PluginDefinition.Type.*;
/**
* Created by andrewvc on 9/15/16.
*/
public class ImperativeToGraphtest {
@Test
public void convertSimpleExpression() throws InvalidIRException {
Graph imperative = iComposeSequence(iPlugin(FILTER, "json"), iPlugin(FILTER, "stuff")).toGraph();
imperative.validate(); // Verify this is a valid graph
Graph regular = Graph.empty();
regular.chainVertices(gPlugin(FILTER, "json"), gPlugin(FILTER, "stuff"));
assertSyntaxEquals(imperative, regular);
}
@Test
public void testIdsDontAffectSourceComponentEquality() throws InvalidIRException {
Graph imperative = iComposeSequence(iPlugin(FILTER, "json", "oneid"), iPlugin(FILTER, "stuff", "anotherid")).toGraph();
imperative.validate(); // Verify this is a valid graph
Graph regular = Graph.empty();
regular.chainVertices(gPlugin(FILTER, "json", "someotherid"), gPlugin(FILTER, "stuff", "graphid"));
assertSyntaxEquals(imperative, regular);
}
@Test
public void convertComplexExpression() throws InvalidIRException {
Graph imperative = iComposeSequence(
iPlugin(FILTER, "p1"),
iPlugin(FILTER, "p2"),
iIf(eAnd(eTruthy(eValue(5l)), eTruthy(eValue(null))),
iPlugin(FILTER, "p3"),
iComposeSequence(iPlugin(FILTER, "p4"), iPlugin(FILTER, "p5"))
)
).toGraph();
imperative.validate(); // Verify this is a valid graph
PluginVertex p1 = gPlugin(FILTER, "p1");
PluginVertex p2 = gPlugin(FILTER, "p2");
PluginVertex p3 = gPlugin(FILTER, "p3");
PluginVertex p4 = gPlugin(FILTER, "p4");
PluginVertex p5 = gPlugin(FILTER, "p5");
IfVertex testIf = gIf(eAnd(eTruthy(eValue(5l)), eTruthy(eValue(null))));
Graph expected = Graph.empty();
expected.chainVertices(p1,p2,testIf);
expected.chainVertices(true, testIf, p3);
expected.chainVertices(false, testIf, p4);
expected.chainVertices(p4, p5);
assertSyntaxEquals(expected, imperative);
}
// This test has an imperative grammar with nested ifs and dangling
// partial leaves. This makes sure they all wire-up right
@Test
public void deepDanglingPartialLeaves() throws InvalidIRException {
Graph imperative = iComposeSequence(
iPlugin(FILTER, "p0"),
iIf(eTruthy(eValue(1)),
iPlugin(FILTER, "p1"),
iIf(eTruthy(eValue(3)),
iPlugin(FILTER, "p5"))
),
iIf(eTruthy(eValue(2)),
iPlugin(FILTER, "p3"),
iPlugin(FILTER, "p4")
),
iPlugin(FILTER, "pLast")
).toGraph();
imperative.validate(); // Verify this is a valid graph
IfVertex if1 = gIf(eTruthy(eValue(1)));
IfVertex if2 = gIf(eTruthy(eValue(2)));
IfVertex if3 = gIf(eTruthy(eValue(3)));
PluginVertex p0 = gPlugin(FILTER, "p0");
PluginVertex p1 = gPlugin(FILTER, "p1");
PluginVertex p2 = gPlugin(FILTER, "p2");
PluginVertex p3 = gPlugin(FILTER, "p3");
PluginVertex p4 = gPlugin(FILTER, "p4");
PluginVertex p5 = gPlugin(FILTER, "p5");
PluginVertex pLast = gPlugin(FILTER, "pLast");
Graph expected = Graph.empty();
expected.chainVertices(p0, if1);
expected.chainVertices(true, if1, p1);
expected.chainVertices(false, if1, if3);
expected.chainVertices(true, if3, p5);
expected.chainVertices(false, if3, if2);
expected.chainVertices(p5, if2);
expected.chainVertices(p1, if2);
expected.chainVertices(true, if2, p3);
expected.chainVertices(false, if2, p4);
expected.chainVertices(p3, pLast);
expected.chainVertices(p4,pLast);
assertSyntaxEquals(imperative, expected);
}
// This is a good test for what the filter block will do, where there
// will be a composed set of ifs potentially, all of which must terminate at a
// single node
@Test
public void convertComplexExpressionWithTerminal() throws InvalidIRException {
Graph imperative = iComposeSequence(
iPlugin(FILTER, "p1"),
iIf(eTruthy(eValue(1)),
iComposeSequence(
iIf(eTruthy(eValue(2)), noop(), iPlugin(FILTER, "p2")),
iIf(eTruthy(eValue(3)), iPlugin(FILTER, "p3"), noop())
),
iComposeSequence(
iIf(eTruthy(eValue(4)), iPlugin(FILTER, "p4")),
iPlugin(FILTER, "p5")
)
),
iPlugin(FILTER, "terminal")
).toGraph();
imperative.validate(); // Verify this is a valid graph
PluginVertex p1 = gPlugin(FILTER,"p1");
PluginVertex p2 = gPlugin(FILTER, "p2");
PluginVertex p3 = gPlugin(FILTER, "p3");
PluginVertex p4 = gPlugin(FILTER, "p4");
PluginVertex p5 = gPlugin(FILTER, "p5");
PluginVertex terminal = gPlugin(FILTER, "terminal");
IfVertex if1 = gIf(eTruthy(eValue(1)));
IfVertex if2 = gIf(eTruthy(eValue(2)));
IfVertex if3 = gIf(eTruthy(eValue(3)));
IfVertex if4 = gIf(eTruthy(eValue(4)));
Graph expected = Graph.empty();
expected.chainVertices(p1, if1);
expected.chainVertices(true, if1, if2);
expected.chainVertices(false, if1, if4);
expected.chainVertices(true, if2, if3);
expected.chainVertices(false, if2, p2);
expected.chainVertices(p2, if3);
expected.chainVertices(true, if3, p3);
expected.chainVertices(false, if3, terminal);
expected.chainVertices(p3, terminal);
expected.chainVertices(true, if4, p4);
expected.chainVertices(false, if4, p5);
expected.chainVertices(p4, p5);
expected.chainVertices(p5, terminal);
assertSyntaxEquals(imperative, expected);
}
}

View file

@ -10,10 +10,17 @@ namespace "compile" do
desc "Compile the config grammar"
task "grammar" => "logstash-core/lib/logstash/config/grammar.rb"
def safe_system(*args)
if !system(*args)
status = $?
raise "Got exit status #{status.exitstatus} attempting to execute #{args.inspect}!"
end
end
task "logstash-core-java" do
puts("Building logstash-core using gradle")
system("./gradlew", "jar")
safe_system("./gradlew", "jar")
end
desc "Build everything"

View file

@ -10,8 +10,6 @@ require "flores/rspec"
require "flores/random"
require "pathname"
SUPPORT_DIR = Pathname.new(::File.join(::File.dirname(__FILE__), "support"))
class JSONIOThingy < IO
def initialize; end
def flush; end

View file

@ -0,0 +1,39 @@
input {
stdin { }
}
filter {
grok {
match => {
"message" => '%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}'
}
}
date {
match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z" ]
locale => en
}
geoip {
source => "clientip"
}
useragent {
source => "agent"
target => "useragent"
}
}
output {
stdout {
codec => dots {}
}
elasticsearch {
index => "apache_elastic_example"
template => "./apache_template.json"
template_name => "apache_elastic_example"
template_overwrite => true
}
}

View file

@ -0,0 +1,42 @@
input {
stdin {
codec => json
}
}
filter {
date {
match => ["time", "dd/MMM/YYYY:HH:mm:ss Z" ]
locale => en
}
geoip {
source => "remote_ip"
target => "geoip"
}
useragent {
source => "agent"
target => "user_agent"
}
grok {
match => [ "request" , "%{WORD:request_action} %{DATA:request1} HTTP/%{NUMBER:http_version}" ]
}
}
output {
stdout {
codec => dots {}
}
elasticsearch {
index => "nginx_json_elastic_stack_example"
document_type => "logs"
template => "./nginx_json_template.json"
template_name => "nginx_json_elastic_stack_example"
template_overwrite => true
}
}

View file

@ -0,0 +1,39 @@
input {
stdin { }
}
filter {
grok {
match => {
"message" => '%{IPORHOST:remote_ip} - %{DATA:user_name} \[%{HTTPDATE:time}\] "%{WORD:request_action} %{DATA:request} HTTP/%{NUMBER:http_version}" %{NUMBER:response} %{NUMBER:bytes} "%{DATA:referrer}" "%{DATA:agent}"'
}
}
date {
match => [ "time", "dd/MMM/YYYY:HH:mm:ss Z" ]
locale => en
}
geoip {
source => "remote_ip"
target => "geoip"
}
useragent {
source => "agent"
target => "user_agent"
}
}
output {
stdout {
codec => dots {}
}
elasticsearch {
index => "nginx_elastic_stack_example"
document_type => "logs"
template => "./nginx_template.json"
template_name => "nginx_elastic_stack_example"
template_overwrite => true
}
}

View file

@ -0,0 +1,156 @@
input {
stdin { }
}
filter {
csv {
columns => ["date","time","borough","zip_code","latitude","longitude","location","on_street_name","cross_street_name","off_street_name","number_of_persons_injured","number_of_persons_killed","number_of_pedestrians_injured","number_of_pedestrians_killed","number_of_cyclist_injured","number_of_cyclist_killed","number_of_motorist_injured","number_of_motorist_killed","contributing_factor_vehicle_1","contributing_factor_vehicle_2","contributing_factor_vehicle_3","contributing_factor_vehicle_4","contributing_factor_vehicle_5","unique_key","vehicle_type_code_1","vehicle_type_code_2","vehicle_type_code_3","vehicle_type_code_4","vehicle_type_code_5"]
}
# Drop the first (header) row in the file
if ([date] == "DATE") {
drop { }
}
# Combine latitude and longitude into single coords field
if [latitude] and [longitude] {
mutate {
add_field => {
"coords" => "%{longitude}"
"tmplat" => "%{latitude}"
}
}
mutate {
merge => ["coords", "tmplat"]
}
mutate {
# Convert our new array of strings back to float
convert => [ "coords", "float" ]
# Delete our temporary latitude field
remove_field => [ "tmplat" ]
}
}
if [on_street_name] and [cross_street_name] {
ruby {
# create new intersection field that combines cross street & on street names
code => "event.set('intersection',[event.get('on_street_name'), event.get('cross_street_name')].sort.join('--'))"
}
}
# Merge date and time into datetime
mutate {
add_field => {
"datetime" => "%{date} %{time}"
"contributing_factor_vehicle" => "%{contributing_factor_vehicle_1}"
"vehicle_type" => "%{vehicle_type_code_1}"
}
# convert to integer type
convert => ["number_of_persons_injured","integer","number_of_persons_killed","integer","number_of_pedestrians_injured","integer","number_of_pedestrians_killed","integer","number_of_cyclist_injured","integer","number_of_cyclist_killed","integer","number_of_motorist_injured","integer","number_of_motorist_killed","integer"]
strip => ["on_street_name", "cross_street_name"]
}
if ![number_of_persons_killed]
{
mutate {
add_field => {"number_of_persons_killed" => "0"}
}
}
if ![number_of_persons_injured]
{
mutate {
add_field => {"number_of_persons_injured" => "0"}
}
}
ruby {
# Get total number of persons impacted
code => "event.set('number_persons_impacted',event.get('number_of_persons_killed') + event.get('number_of_persons_injured'))"
}
# Combine contributing_factor_vehicle_X (X=1,2,3,4,5) fields into a single field
if [contributing_factor_vehicle_2] and "Unspecified" != [contributing_factor_vehicle_2] and [contributing_factor_vehicle_2] not in [contributing_factor_vehicle] {
mutate {
merge => ["contributing_factor_vehicle", "contributing_factor_vehicle_2"]
}
}
if [contributing_factor_vehicle_3] and "Unspecified" != [contributing_factor_vehicle_3] and [contributing_factor_vehicle_3] not in [contributing_factor_vehicle] {
mutate {
merge => ["contributing_factor_vehicle", "contributing_factor_vehicle_3"]
}
}
if [contributing_factor_vehicle_4] and "Unspecified" != [contributing_factor_vehicle_4] and [contributing_factor_vehicle_4] not in [contributing_factor_vehicle] {
mutate {
merge => ["contributing_factor_vehicle", "contributing_factor_vehicle_4"]
}
}
if [contributing_factor_vehicle_5] and "Unspecified" != [contributing_factor_vehicle_5] and [contributing_factor_vehicle_5] not in [contributing_factor_vehicle] {
mutate {
merge => ["contributing_factor_vehicle", "contributing_factor_vehicle_5"]
}
}
# Combine vehicle_type_code_X (X=1,2,3,4,5) fields into a single field
if [vehicle_type_code_2] and "Unspecified" != [vehicle_type_code_2] and [vehicle_type_code_2] not in [vehicle_type] {
mutate {
merge => ["vehicle_type", "vehicle_type_code_2"]
}
}
if [vehicle_type_code_3] and "Unspecified" != [vehicle_type_code_3] and [vehicle_type_code_3] not in [vehicle_type] {
mutate {
merge => ["vehicle_type", "vehicle_type_code_3"]
}
}
if [vehicle_type_code_4] and "Unspecified" != [vehicle_type_code_4] and [vehicle_type_code_4] not in [vehicle_type] {
mutate {
merge => ["vehicle_type", "vehicle_type_code_4"]
}
}
if [vehicle_type_code_5] and "Unspecified" != [vehicle_type_code_5] and [vehicle_type_code_5] not in [vehicle_type] {
mutate {
merge => ["vehicle_type", "vehicle_type_code_5"]
}
}
# Map @timestamp (event timestamp) to datetime
date {
match => [ "datetime", "MM/dd/YY HH:mm", "MM/dd/YY H:mm"]
timezone => "EST"
}
# Grab hour of day from time
grok {
match => { "time" => "%{DATA:hour_of_day}:%{GREEDYDATA}" }
}
mutate {
convert => ["hour_of_day", "integer"]
}
# Remove extra fields
mutate {
remove_field => ["datetime", "contributing_factor_vehicle_1", "contributing_factor_vehicle_2", "contributing_factor_vehicle_3", "contributing_factor_vehicle_4", "contributing_factor_vehicle_5","vehicle_type_code_1", "vehicle_type_code_2", "vehicle_type_code_3", "vehicle_type_code_4", "vehicle_type_code_5"]
}
}
output {
#stdout {codec => rubydebug}
stdout { codec => dots }
elasticsearch {
index => "nyc_visionzero"
template => "./nyc_collision_template.json"
template_name => "nyc_visionzero"
template_overwrite => true
}
}

View file

@ -0,0 +1,26 @@
input {
twitter {
consumer_key => "INSERT YOUR CONSUMER KEY"
consumer_secret => "INSERT YOUR CONSUMER SECRET"
oauth_token => "INSERT YOUR ACCESS TOKEN"
oauth_token_secret => "INSERT YOUR ACCESS TOKEN SECRET"
keywords => [ "thor", "spiderman", "wolverine", "ironman", "hulk"]
full_tweet => true
}
}
filter { }
output {
stdout {
codec => dots
}
elasticsearch {
hosts => "localhost:9200"
index => "twitter_elastic_example"
document_type => "tweets"
template => "./twitter_template.json"
template_name => "twitter_elastic_example"
template_overwrite => true
}
}

View file

@ -0,0 +1,30 @@
input {
stdin {
codec => json_lines {}
}
}
filter {
if !([transactionDate] == "")
{
date {
match => [ "transactionDate", "MMddyyyy", "MM/dd/yyyy"]
}
}
mutate {
convert => ["transactionAmount", "float"]
replace => { "type" => "%{recordType}" }
}
}
output {
#stdout { codec => rubydebug }
stdout { codec => dots }
elasticsearch {
hosts => "localhost:9200"
index => "usfec_%{recordType}"
template => "usfec_template.json"
template_name => "usfec"
template_overwrite => true
}
}