mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 22:57:16 -04:00
add Java Collection delete, & and | support. refactored Java interfaces equivalence. specs & more java_integration specs
add Java Collection delete support with specs & more java_integration specs split a spec refactored for better specs intersection specs Ruby intersection on Java collections, refactored to use Java interfaces specs for remove_tag from events from json input refactor usage of subject added Java Collection union with Ruby array and specs refactored specs to also test for hash from deserialized json for JrJackson & Java Collections typo and comments solves #2261
This commit is contained in:
parent
058e9967a7
commit
e599284e62
4 changed files with 652 additions and 424 deletions
|
@ -6,27 +6,11 @@ require "java"
|
||||||
# not test for is_a?(Array) or is_a?(Hash) and we do not want to include tests for
|
# not test for is_a?(Array) or is_a?(Hash) and we do not want to include tests for
|
||||||
# both classes everywhere. see LogStash::JSon.
|
# both classes everywhere. see LogStash::JSon.
|
||||||
|
|
||||||
class Java::JavaUtil::ArrayList
|
|
||||||
# have ArrayList objects report is_a?(Array) == true
|
|
||||||
def is_a?(clazz)
|
|
||||||
return true if clazz == Array
|
|
||||||
super
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class Java::JavaUtil::LinkedHashMap
|
|
||||||
# have LinkedHashMap objects report is_a?(Array) == true
|
|
||||||
def is_a?(clazz)
|
|
||||||
return true if clazz == Hash
|
|
||||||
super
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class Array
|
class Array
|
||||||
# enable class equivalence between Array and ArrayList
|
# enable class equivalence between Array and ArrayList
|
||||||
# so that ArrayList will work with case o when Array ...
|
# so that ArrayList will work with case o when Array ...
|
||||||
def self.===(other)
|
def self.===(other)
|
||||||
return true if other.is_a?(Java::JavaUtil::ArrayList)
|
return true if other.is_a?(Java::JavaUtil::Collection)
|
||||||
super
|
super
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -35,7 +19,44 @@ class Hash
|
||||||
# enable class equivalence between Hash and LinkedHashMap
|
# enable class equivalence between Hash and LinkedHashMap
|
||||||
# so that LinkedHashMap will work with case o when Hash ...
|
# so that LinkedHashMap will work with case o when Hash ...
|
||||||
def self.===(other)
|
def self.===(other)
|
||||||
return true if other.is_a?(Java::JavaUtil::LinkedHashMap)
|
return true if other.is_a?(Java::JavaUtil::Map)
|
||||||
super
|
super
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
module java::util::Map
|
||||||
|
# have Map objects like LinkedHashMap objects report is_a?(Array) == true
|
||||||
|
def is_a?(clazz)
|
||||||
|
return true if clazz == Hash
|
||||||
|
super
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
module java::util::Collection
|
||||||
|
# have Collections objects like ArrayList report is_a?(Array) == true
|
||||||
|
def is_a?(clazz)
|
||||||
|
return true if clazz == Array
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
# support the Ruby Array delete method on a Java Collection
|
||||||
|
def delete(o)
|
||||||
|
self.removeAll([o]) ? o : block_given? ? yield : nil
|
||||||
|
end
|
||||||
|
|
||||||
|
# support the Ruby intersection method on Java Collection
|
||||||
|
def &(other)
|
||||||
|
# transform self into a LinkedHashSet to remove duplicates and preserve order as defined by the Ruby Array intersection contract
|
||||||
|
duped = Java::JavaUtil::LinkedHashSet.new(self)
|
||||||
|
duped.retainAll(other)
|
||||||
|
duped
|
||||||
|
end
|
||||||
|
|
||||||
|
# support the Ruby union method on Java Collection
|
||||||
|
def |(other)
|
||||||
|
# transform self into a LinkedHashSet to remove duplicates and preserve order as defined by the Ruby Array union contract
|
||||||
|
duped = Java::JavaUtil::LinkedHashSet.new(self)
|
||||||
|
duped.addAll(other)
|
||||||
|
duped
|
||||||
|
end
|
||||||
|
end
|
|
@ -2,9 +2,422 @@
|
||||||
require "spec_helper"
|
require "spec_helper"
|
||||||
|
|
||||||
describe LogStash::Event do
|
describe LogStash::Event do
|
||||||
subject do
|
|
||||||
LogStash::Event.new(
|
shared_examples "all event tests" do
|
||||||
"@timestamp" => Time.iso8601("2013-01-01T00:00:00.000Z"),
|
context "[]=" do
|
||||||
|
it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
|
||||||
|
expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should assign simple fields" do
|
||||||
|
expect(subject["foo"]).to be_nil
|
||||||
|
expect(subject["foo"] = "bar").to eq("bar")
|
||||||
|
expect(subject["foo"]).to eq("bar")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should overwrite simple fields" do
|
||||||
|
expect(subject["foo"]).to be_nil
|
||||||
|
expect(subject["foo"] = "bar").to eq("bar")
|
||||||
|
expect(subject["foo"]).to eq("bar")
|
||||||
|
|
||||||
|
expect(subject["foo"] = "baz").to eq("baz")
|
||||||
|
expect(subject["foo"]).to eq("baz")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should assign deep fields" do
|
||||||
|
expect(subject["[foo][bar]"]).to be_nil
|
||||||
|
expect(subject["[foo][bar]"] = "baz").to eq("baz")
|
||||||
|
expect(subject["[foo][bar]"]).to eq("baz")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should overwrite deep fields" do
|
||||||
|
expect(subject["[foo][bar]"]).to be_nil
|
||||||
|
expect(subject["[foo][bar]"] = "baz").to eq("baz")
|
||||||
|
expect(subject["[foo][bar]"]).to eq("baz")
|
||||||
|
|
||||||
|
expect(subject["[foo][bar]"] = "zab").to eq("zab")
|
||||||
|
expect(subject["[foo][bar]"]).to eq("zab")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "#sprintf" do
|
||||||
|
it "should report a unix timestamp for %{+%s}" do
|
||||||
|
expect(subject.sprintf("%{+%s}")).to eq("1356998400")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should raise error when formatting %{+%s} when @timestamp field is missing" do
|
||||||
|
str = "hello-%{+%s}"
|
||||||
|
subj = subject.clone
|
||||||
|
subj.remove("[@timestamp]")
|
||||||
|
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
|
||||||
|
expect(subject.sprintf("%{+YYYY}")).to eq("2013")
|
||||||
|
expect(subject.sprintf("%{+MM}")).to eq("01")
|
||||||
|
expect(subject.sprintf("%{+HH}")).to eq("00")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
|
||||||
|
str = "logstash-%{+YYYY}"
|
||||||
|
subj = subject.clone
|
||||||
|
subj.remove("[@timestamp]")
|
||||||
|
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should report fields with %{field} syntax" do
|
||||||
|
expect(subject.sprintf("%{type}")).to eq("sprintf")
|
||||||
|
expect(subject.sprintf("%{message}")).to eq(subject["message"])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should print deep fields" do
|
||||||
|
expect(subject.sprintf("%{[j][k1]}")).to eq("v")
|
||||||
|
expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be able to take a non-string for the format" do
|
||||||
|
expect(subject.sprintf(2)).to eq("2")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should allow to use the metadata when calling #sprintf" do
|
||||||
|
expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should allow to use nested hash from the metadata field" do
|
||||||
|
expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "#[]" do
|
||||||
|
it "should fetch data" do
|
||||||
|
expect(subject["type"]).to eq("sprintf")
|
||||||
|
end
|
||||||
|
it "should fetch fields" do
|
||||||
|
expect(subject["a"]).to eq("b")
|
||||||
|
expect(subject['c']['d']).to eq("f")
|
||||||
|
end
|
||||||
|
it "should fetch deep fields" do
|
||||||
|
expect(subject["[j][k1]"]).to eq("v")
|
||||||
|
expect(subject["[c][d]"]).to eq("f")
|
||||||
|
expect(subject['[f][g][h]']).to eq("i")
|
||||||
|
expect(subject['[j][k3][4]']).to eq("m")
|
||||||
|
expect(subject['[j][5]']).to eq(7)
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be fast?", :performance => true do
|
||||||
|
count = 1000000
|
||||||
|
2.times do
|
||||||
|
start = Time.now
|
||||||
|
count.times { subject["[j][k1]"] }
|
||||||
|
duration = Time.now - start
|
||||||
|
puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "#include?" do
|
||||||
|
it "should include existing fields" do
|
||||||
|
expect(subject.include?("c")).to be_true
|
||||||
|
expect(subject.include?("[c][d]")).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should include field with nil value" do
|
||||||
|
expect(subject.include?("nilfield")).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should include @metadata field" do
|
||||||
|
expect(subject.include?("@metadata")).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should include field within @metadata" do
|
||||||
|
expect(subject.include?("[@metadata][fancy]")).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should not include non-existing fields" do
|
||||||
|
expect(subject.include?("doesnotexist")).to be_false
|
||||||
|
expect(subject.include?("[j][doesnotexist]")).to be_false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "#overwrite" do
|
||||||
|
it "should swap data with new content" do
|
||||||
|
new_event = LogStash::Event.new(
|
||||||
|
"type" => "new",
|
||||||
|
"message" => "foo bar",
|
||||||
|
)
|
||||||
|
subject.overwrite(new_event)
|
||||||
|
|
||||||
|
expect(subject["message"]).to eq("foo bar")
|
||||||
|
expect(subject["type"]).to eq("new")
|
||||||
|
|
||||||
|
["tags", "source", "a", "c", "f", "j"].each do |field|
|
||||||
|
expect(subject[field]).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "#append" do
|
||||||
|
it "should append strings to an array" do
|
||||||
|
subject.append(LogStash::Event.new("message" => "another thing"))
|
||||||
|
expect(subject["message"]).to eq([ "hello world", "another thing" ])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should concatenate tags" do
|
||||||
|
subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
|
||||||
|
# added to_a for when array is a Java Collection when produced from json input
|
||||||
|
# TODO: we have to find a better way to handle this in tests. maybe override
|
||||||
|
# rspec eq or == to do an explicit to_a when comparing arrays?
|
||||||
|
expect(subject["tags"].to_a).to eq([ "tag1", "tag2" ])
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when event field is nil" do
|
||||||
|
it "should add single value as string" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
||||||
|
expect(subject[ "field1" ]).to eq("append1")
|
||||||
|
end
|
||||||
|
it "should add multi values as array" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "append1","append2" ])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when event field is a string" do
|
||||||
|
before { subject[ "field1" ] = "original1" }
|
||||||
|
|
||||||
|
it "should append string to values, if different from current" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
||||||
|
end
|
||||||
|
it "should not change value, if appended value is equal current" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
||||||
|
expect(subject[ "field1" ]).to eq("original1")
|
||||||
|
end
|
||||||
|
it "should concatenate values in an array" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
||||||
|
end
|
||||||
|
it "should join array, removing duplicates" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
context "when event field is an array" do
|
||||||
|
before { subject[ "field1" ] = [ "original1", "original2" ] }
|
||||||
|
|
||||||
|
it "should append string values to array, if not present in array" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
|
||||||
|
end
|
||||||
|
it "should not append string values, if the array already contains it" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
|
||||||
|
end
|
||||||
|
it "should join array, removing duplicates" do
|
||||||
|
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
||||||
|
expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it "timestamp parsing speed", :performance => true do
|
||||||
|
warmup = 10000
|
||||||
|
count = 1000000
|
||||||
|
|
||||||
|
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
||||||
|
event = LogStash::Event.new(data)
|
||||||
|
expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
|
||||||
|
|
||||||
|
duration = 0
|
||||||
|
[warmup, count].each do |i|
|
||||||
|
start = Time.now
|
||||||
|
i.times do
|
||||||
|
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
||||||
|
LogStash::Event.new(data.clone)
|
||||||
|
end
|
||||||
|
duration = Time.now - start
|
||||||
|
end
|
||||||
|
puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
||||||
|
end
|
||||||
|
|
||||||
|
context "acceptable @timestamp formats" do
|
||||||
|
subject { LogStash::Event.new }
|
||||||
|
|
||||||
|
formats = [
|
||||||
|
"YYYY-MM-dd'T'HH:mm:ss.SSSZ",
|
||||||
|
"YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
|
||||||
|
"YYYY-MM-dd'T'HH:mm:ss.SSS",
|
||||||
|
"YYYY-MM-dd'T'HH:mm:ss",
|
||||||
|
"YYYY-MM-dd'T'HH:mm:ssZ",
|
||||||
|
]
|
||||||
|
formats.each do |format|
|
||||||
|
it "includes #{format}" do
|
||||||
|
time = subject.sprintf("%{+#{format}}")
|
||||||
|
begin
|
||||||
|
LogStash::Event.new("@timestamp" => time)
|
||||||
|
rescue => e
|
||||||
|
raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "from LOGSTASH-1738" do
|
||||||
|
it "does not error" do
|
||||||
|
LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "from LOGSTASH-1732" do
|
||||||
|
it "does not error" do
|
||||||
|
LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "timestamp initialization" do
|
||||||
|
let(:logger) { double("logger") }
|
||||||
|
|
||||||
|
it "should coerce timestamp" do
|
||||||
|
t = Time.iso8601("2014-06-12T00:12:17.114Z")
|
||||||
|
expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original
|
||||||
|
expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
|
||||||
|
expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
|
||||||
|
expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should assign current time when no timestamp" do
|
||||||
|
ts = LogStash::Timestamp.now
|
||||||
|
expect(LogStash::Timestamp).to receive(:now).and_return(ts)
|
||||||
|
expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should tag and warn for invalid value" do
|
||||||
|
ts = LogStash::Timestamp.now
|
||||||
|
expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts)
|
||||||
|
expect(LogStash::Event::LOGGER).to receive(:warn).twice
|
||||||
|
|
||||||
|
event = LogStash::Event.new("@timestamp" => :foo)
|
||||||
|
expect(event.timestamp.to_i).to eq(ts.to_i)
|
||||||
|
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
||||||
|
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo)
|
||||||
|
|
||||||
|
event = LogStash::Event.new("@timestamp" => 666)
|
||||||
|
expect(event.timestamp.to_i).to eq(ts.to_i)
|
||||||
|
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
||||||
|
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should tag and warn for invalid string format" do
|
||||||
|
ts = LogStash::Timestamp.now
|
||||||
|
expect(LogStash::Timestamp).to receive(:now).and_return(ts)
|
||||||
|
expect(LogStash::Event::LOGGER).to receive(:warn)
|
||||||
|
|
||||||
|
event = LogStash::Event.new("@timestamp" => "foo")
|
||||||
|
expect(event.timestamp.to_i).to eq(ts.to_i)
|
||||||
|
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
||||||
|
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "to_json" do
|
||||||
|
it "should support to_json" do
|
||||||
|
new_event = LogStash::Event.new(
|
||||||
|
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
||||||
|
"message" => "foo bar",
|
||||||
|
)
|
||||||
|
json = new_event.to_json
|
||||||
|
|
||||||
|
expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should support to_json and ignore arguments" do
|
||||||
|
new_event = LogStash::Event.new(
|
||||||
|
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
||||||
|
"message" => "foo bar",
|
||||||
|
)
|
||||||
|
json = new_event.to_json(:foo => 1, :bar => "baz")
|
||||||
|
|
||||||
|
expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "metadata" do
|
||||||
|
context "with existing metadata" do
|
||||||
|
subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
|
||||||
|
|
||||||
|
it "should not include metadata in to_hash" do
|
||||||
|
expect(subject.to_hash.keys).not_to include("@metadata")
|
||||||
|
|
||||||
|
# 'hello', '@timestamp', and '@version'
|
||||||
|
expect(subject.to_hash.keys.count).to eq(3)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should still allow normal field access" do
|
||||||
|
expect(subject["hello"]).to eq("world")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "with set metadata" do
|
||||||
|
let(:fieldref) { "[@metadata][foo][bar]" }
|
||||||
|
let(:value) { "bar" }
|
||||||
|
subject { LogStash::Event.new("normal" => "normal") }
|
||||||
|
before do
|
||||||
|
# Verify the test is configured correctly.
|
||||||
|
expect(fieldref).to start_with("[@metadata]")
|
||||||
|
|
||||||
|
# Set it.
|
||||||
|
subject[fieldref] = value
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should still allow normal field access" do
|
||||||
|
expect(subject["normal"]).to eq("normal")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should allow getting" do
|
||||||
|
expect(subject[fieldref]).to eq(value)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be hidden from .to_json" do
|
||||||
|
require "json"
|
||||||
|
obj = JSON.parse(subject.to_json)
|
||||||
|
expect(obj).not_to include("@metadata")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be hidden from .to_hash" do
|
||||||
|
expect(subject.to_hash).not_to include("@metadata")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be accessible through #to_hash_with_metadata" do
|
||||||
|
obj = subject.to_hash_with_metadata
|
||||||
|
expect(obj).to include("@metadata")
|
||||||
|
expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "with no metadata" do
|
||||||
|
subject { LogStash::Event.new("foo" => "bar") }
|
||||||
|
it "should have no metadata" do
|
||||||
|
expect(subject["@metadata"]).to be_empty
|
||||||
|
end
|
||||||
|
it "should still allow normal field access" do
|
||||||
|
expect(subject["foo"]).to eq("bar")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "signal events" do
|
||||||
|
it "should define the shutdown event" do
|
||||||
|
# the SHUTDOWN and FLUSH constants are part of the plugin API contract
|
||||||
|
# if they are changed, all plugins must be updated
|
||||||
|
expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
|
||||||
|
expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
let(:event_hash) do
|
||||||
|
{
|
||||||
|
"@timestamp" => "2013-01-01T00:00:00.000Z",
|
||||||
"type" => "sprintf",
|
"type" => "sprintf",
|
||||||
"message" => "hello world",
|
"message" => "hello world",
|
||||||
"tags" => [ "tag1" ],
|
"tags" => [ "tag1" ],
|
||||||
|
@ -24,414 +437,20 @@ describe LogStash::Event do
|
||||||
},
|
},
|
||||||
"nilfield" => nil,
|
"nilfield" => nil,
|
||||||
"@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
|
"@metadata" => { "fancy" => "pants", "have-to-go" => { "deeper" => "inception" } }
|
||||||
)
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
context "[]=" do
|
describe "using normal hash input" do
|
||||||
it "should raise an exception if you attempt to set @timestamp to a value type other than a Time object" do
|
it_behaves_like "all event tests" do
|
||||||
expect{subject["@timestamp"] = "crash!"}.to raise_error(TypeError)
|
subject{LogStash::Event.new(event_hash)}
|
||||||
end
|
|
||||||
|
|
||||||
it "should assign simple fields" do
|
|
||||||
expect(subject["foo"]).to be_nil
|
|
||||||
expect(subject["foo"] = "bar").to eq("bar")
|
|
||||||
expect(subject["foo"]).to eq("bar")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should overwrite simple fields" do
|
|
||||||
expect(subject["foo"]).to be_nil
|
|
||||||
expect(subject["foo"] = "bar").to eq("bar")
|
|
||||||
expect(subject["foo"]).to eq("bar")
|
|
||||||
|
|
||||||
expect(subject["foo"] = "baz").to eq("baz")
|
|
||||||
expect(subject["foo"]).to eq("baz")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should assign deep fields" do
|
|
||||||
expect(subject["[foo][bar]"]).to be_nil
|
|
||||||
expect(subject["[foo][bar]"] = "baz").to eq("baz")
|
|
||||||
expect(subject["[foo][bar]"]).to eq("baz")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should overwrite deep fields" do
|
|
||||||
expect(subject["[foo][bar]"]).to be_nil
|
|
||||||
expect(subject["[foo][bar]"] = "baz").to eq("baz")
|
|
||||||
expect(subject["[foo][bar]"]).to eq("baz")
|
|
||||||
|
|
||||||
expect(subject["[foo][bar]"] = "zab").to eq("zab")
|
|
||||||
expect(subject["[foo][bar]"]).to eq("zab")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context "#sprintf" do
|
describe "using hash input from deserialized json" do
|
||||||
it "should report a unix timestamp for %{+%s}" do
|
# this is to test the case when JrJackson deserialises Json and produces
|
||||||
expect(subject.sprintf("%{+%s}")).to eq("1356998400")
|
# native Java Collections objects for efficiency
|
||||||
end
|
it_behaves_like "all event tests" do
|
||||||
|
subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))}
|
||||||
it "should raise error when formatting %{+%s} when @timestamp field is missing" do
|
|
||||||
str = "hello-%{+%s}"
|
|
||||||
subj = subject.clone
|
|
||||||
subj.remove("[@timestamp]")
|
|
||||||
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should report a time with %{+format} syntax", :if => RUBY_ENGINE == "jruby" do
|
|
||||||
expect(subject.sprintf("%{+YYYY}")).to eq("2013")
|
|
||||||
expect(subject.sprintf("%{+MM}")).to eq("01")
|
|
||||||
expect(subject.sprintf("%{+HH}")).to eq("00")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should raise error with %{+format} syntax when @timestamp field is missing", :if => RUBY_ENGINE == "jruby" do
|
|
||||||
str = "logstash-%{+YYYY}"
|
|
||||||
subj = subject.clone
|
|
||||||
subj.remove("[@timestamp]")
|
|
||||||
expect{ subj.sprintf(str) }.to raise_error(LogStash::Error)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should report fields with %{field} syntax" do
|
|
||||||
expect(subject.sprintf("%{type}")).to eq("sprintf")
|
|
||||||
expect(subject.sprintf("%{message}")).to eq(subject["message"])
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should print deep fields" do
|
|
||||||
expect(subject.sprintf("%{[j][k1]}")).to eq("v")
|
|
||||||
expect(subject.sprintf("%{[j][k2][0]}")).to eq("w")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should be able to take a non-string for the format" do
|
|
||||||
expect(subject.sprintf(2)).to eq("2")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should allow to use the metadata when calling #sprintf" do
|
|
||||||
expect(subject.sprintf("super-%{[@metadata][fancy]}")).to eq("super-pants")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should allow to use nested hash from the metadata field" do
|
|
||||||
expect(subject.sprintf("%{[@metadata][have-to-go][deeper]}")).to eq("inception")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
context "#[]" do
|
|
||||||
it "should fetch data" do
|
|
||||||
expect(subject["type"]).to eq("sprintf")
|
|
||||||
end
|
|
||||||
it "should fetch fields" do
|
|
||||||
expect(subject["a"]).to eq("b")
|
|
||||||
expect(subject['c']['d']).to eq("f")
|
|
||||||
end
|
|
||||||
it "should fetch deep fields" do
|
|
||||||
expect(subject["[j][k1]"]).to eq("v")
|
|
||||||
expect(subject["[c][d]"]).to eq("f")
|
|
||||||
expect(subject['[f][g][h]']).to eq("i")
|
|
||||||
expect(subject['[j][k3][4]']).to eq("m")
|
|
||||||
expect(subject['[j][5]']).to eq(7)
|
|
||||||
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should be fast?", :performance => true do
|
|
||||||
count = 1000000
|
|
||||||
2.times do
|
|
||||||
start = Time.now
|
|
||||||
count.times { subject["[j][k1]"] }
|
|
||||||
duration = Time.now - start
|
|
||||||
puts "event #[] rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "#include?" do
|
|
||||||
it "should include existing fields" do
|
|
||||||
expect(subject.include?("c")).to be_true
|
|
||||||
expect(subject.include?("[c][d]")).to be_true
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should include field with nil value" do
|
|
||||||
expect(subject.include?("nilfield")).to be_true
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should include @metadata field" do
|
|
||||||
expect(subject.include?("@metadata")).to be_true
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should include field within @metadata" do
|
|
||||||
expect(subject.include?("[@metadata][fancy]")).to be_true
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should not include non-existing fields" do
|
|
||||||
expect(subject.include?("doesnotexist")).to be_false
|
|
||||||
expect(subject.include?("[j][doesnotexist]")).to be_false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "#overwrite" do
|
|
||||||
it "should swap data with new content" do
|
|
||||||
new_event = LogStash::Event.new(
|
|
||||||
"type" => "new",
|
|
||||||
"message" => "foo bar",
|
|
||||||
)
|
|
||||||
subject.overwrite(new_event)
|
|
||||||
|
|
||||||
expect(subject["message"]).to eq("foo bar")
|
|
||||||
expect(subject["type"]).to eq("new")
|
|
||||||
|
|
||||||
["tags", "source", "a", "c", "f", "j"].each do |field|
|
|
||||||
expect(subject[field]).to be_nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "#append" do
|
|
||||||
it "should append strings to an array" do
|
|
||||||
subject.append(LogStash::Event.new("message" => "another thing"))
|
|
||||||
expect(subject["message"]).to eq([ "hello world", "another thing" ])
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should concatenate tags" do
|
|
||||||
subject.append(LogStash::Event.new("tags" => [ "tag2" ]))
|
|
||||||
expect(subject["tags"]).to eq([ "tag1", "tag2" ])
|
|
||||||
end
|
|
||||||
|
|
||||||
context "when event field is nil" do
|
|
||||||
it "should add single value as string" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
|
||||||
expect(subject[ "field1" ]).to eq("append1")
|
|
||||||
end
|
|
||||||
it "should add multi values as array" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => [ "append1","append2" ]}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "append1","append2" ])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "when event field is a string" do
|
|
||||||
before { subject[ "field1" ] = "original1" }
|
|
||||||
|
|
||||||
it "should append string to values, if different from current" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
|
||||||
end
|
|
||||||
it "should not change value, if appended value is equal current" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
|
||||||
expect(subject[ "field1" ]).to eq("original1")
|
|
||||||
end
|
|
||||||
it "should concatenate values in an array" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => [ "append1" ]}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
|
||||||
end
|
|
||||||
it "should join array, removing duplicates" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "append1" ])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
context "when event field is an array" do
|
|
||||||
before { subject[ "field1" ] = [ "original1", "original2" ] }
|
|
||||||
|
|
||||||
it "should append string values to array, if not present in array" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => "append1"}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
|
|
||||||
end
|
|
||||||
it "should not append string values, if the array already contains it" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => "original1"}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "original2" ])
|
|
||||||
end
|
|
||||||
it "should join array, removing duplicates" do
|
|
||||||
subject.append(LogStash::Event.new({"field1" => [ "append1","original1" ]}))
|
|
||||||
expect(subject[ "field1" ]).to eq([ "original1", "original2", "append1" ])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it "timestamp parsing speed", :performance => true do
|
|
||||||
warmup = 10000
|
|
||||||
count = 1000000
|
|
||||||
|
|
||||||
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
|
||||||
event = LogStash::Event.new(data)
|
|
||||||
expect(event["@timestamp"]).to be_a(LogStash::Timestamp)
|
|
||||||
|
|
||||||
duration = 0
|
|
||||||
[warmup, count].each do |i|
|
|
||||||
start = Time.now
|
|
||||||
i.times do
|
|
||||||
data = { "@timestamp" => "2013-12-21T07:25:06.605Z" }
|
|
||||||
LogStash::Event.new(data.clone)
|
|
||||||
end
|
|
||||||
duration = Time.now - start
|
|
||||||
end
|
|
||||||
puts "event @timestamp parse rate: #{"%02.0f/sec" % (count / duration)}, elapsed: #{duration}s"
|
|
||||||
end
|
|
||||||
|
|
||||||
context "acceptable @timestamp formats" do
|
|
||||||
subject { LogStash::Event.new }
|
|
||||||
|
|
||||||
formats = [
|
|
||||||
"YYYY-MM-dd'T'HH:mm:ss.SSSZ",
|
|
||||||
"YYYY-MM-dd'T'HH:mm:ss.SSSSSSZ",
|
|
||||||
"YYYY-MM-dd'T'HH:mm:ss.SSS",
|
|
||||||
"YYYY-MM-dd'T'HH:mm:ss",
|
|
||||||
"YYYY-MM-dd'T'HH:mm:ssZ",
|
|
||||||
]
|
|
||||||
formats.each do |format|
|
|
||||||
it "includes #{format}" do
|
|
||||||
time = subject.sprintf("%{+#{format}}")
|
|
||||||
begin
|
|
||||||
LogStash::Event.new("@timestamp" => time)
|
|
||||||
rescue => e
|
|
||||||
raise StandardError, "Time '#{time}' was rejected. #{e.class}: #{e.to_s}"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "from LOGSTASH-1738" do
|
|
||||||
it "does not error" do
|
|
||||||
LogStash::Event.new("@timestamp" => "2013-12-29T23:12:52.371240+02:00")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "from LOGSTASH-1732" do
|
|
||||||
it "does not error" do
|
|
||||||
LogStash::Event.new("@timestamp" => "2013-12-27T11:07:25+00:00")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "timestamp initialization" do
|
|
||||||
let(:logger) { double("logger") }
|
|
||||||
|
|
||||||
it "should coerce timestamp" do
|
|
||||||
t = Time.iso8601("2014-06-12T00:12:17.114Z")
|
|
||||||
expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original
|
|
||||||
expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
|
|
||||||
expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i)
|
|
||||||
expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should assign current time when no timestamp" do
|
|
||||||
ts = LogStash::Timestamp.now
|
|
||||||
expect(LogStash::Timestamp).to receive(:now).and_return(ts)
|
|
||||||
expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should tag and warn for invalid value" do
|
|
||||||
ts = LogStash::Timestamp.now
|
|
||||||
expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts)
|
|
||||||
expect(LogStash::Event::LOGGER).to receive(:warn).twice
|
|
||||||
|
|
||||||
event = LogStash::Event.new("@timestamp" => :foo)
|
|
||||||
expect(event.timestamp.to_i).to eq(ts.to_i)
|
|
||||||
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
|
||||||
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo)
|
|
||||||
|
|
||||||
event = LogStash::Event.new("@timestamp" => 666)
|
|
||||||
expect(event.timestamp.to_i).to eq(ts.to_i)
|
|
||||||
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
|
||||||
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should tag and warn for invalid string format" do
|
|
||||||
ts = LogStash::Timestamp.now
|
|
||||||
expect(LogStash::Timestamp).to receive(:now).and_return(ts)
|
|
||||||
expect(LogStash::Event::LOGGER).to receive(:warn)
|
|
||||||
|
|
||||||
event = LogStash::Event.new("@timestamp" => "foo")
|
|
||||||
expect(event.timestamp.to_i).to eq(ts.to_i)
|
|
||||||
expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG])
|
|
||||||
expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "to_json" do
|
|
||||||
it "should support to_json" do
|
|
||||||
new_event = LogStash::Event.new(
|
|
||||||
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
|
||||||
"message" => "foo bar",
|
|
||||||
)
|
|
||||||
json = new_event.to_json
|
|
||||||
|
|
||||||
expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should support to_json and ignore arguments" do
|
|
||||||
new_event = LogStash::Event.new(
|
|
||||||
"@timestamp" => Time.iso8601("2014-09-23T19:26:15.832Z"),
|
|
||||||
"message" => "foo bar",
|
|
||||||
)
|
|
||||||
json = new_event.to_json(:foo => 1, :bar => "baz")
|
|
||||||
|
|
||||||
expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "metadata" do
|
|
||||||
context "with existing metadata" do
|
|
||||||
subject { LogStash::Event.new("hello" => "world", "@metadata" => { "fancy" => "pants" }) }
|
|
||||||
|
|
||||||
it "should not include metadata in to_hash" do
|
|
||||||
expect(subject.to_hash.keys).not_to include("@metadata")
|
|
||||||
|
|
||||||
# 'hello', '@timestamp', and '@version'
|
|
||||||
expect(subject.to_hash.keys.count).to eq(3)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should still allow normal field access" do
|
|
||||||
expect(subject["hello"]).to eq("world")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "with set metadata" do
|
|
||||||
let(:fieldref) { "[@metadata][foo][bar]" }
|
|
||||||
let(:value) { "bar" }
|
|
||||||
subject { LogStash::Event.new("normal" => "normal") }
|
|
||||||
before do
|
|
||||||
# Verify the test is configured correctly.
|
|
||||||
expect(fieldref).to start_with("[@metadata]")
|
|
||||||
|
|
||||||
# Set it.
|
|
||||||
subject[fieldref] = value
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should still allow normal field access" do
|
|
||||||
expect(subject["normal"]).to eq("normal")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should allow getting" do
|
|
||||||
expect(subject[fieldref]).to eq(value)
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should be hidden from .to_json" do
|
|
||||||
require "json"
|
|
||||||
obj = JSON.parse(subject.to_json)
|
|
||||||
expect(obj).not_to include("@metadata")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should be hidden from .to_hash" do
|
|
||||||
expect(subject.to_hash).not_to include("@metadata")
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should be accessible through #to_hash_with_metadata" do
|
|
||||||
obj = subject.to_hash_with_metadata
|
|
||||||
expect(obj).to include("@metadata")
|
|
||||||
expect(obj["@metadata"]["foo"]["bar"]).to eq(value)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "with no metadata" do
|
|
||||||
subject { LogStash::Event.new("foo" => "bar") }
|
|
||||||
it "should have no metadata" do
|
|
||||||
expect(subject["@metadata"]).to be_empty
|
|
||||||
end
|
|
||||||
it "should still allow normal field access" do
|
|
||||||
expect(subject["foo"]).to eq("bar")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "signal events" do
|
|
||||||
it "should define the shutdown event" do
|
|
||||||
# the SHUTDOWN and FLUSH constants are part of the plugin API contract
|
|
||||||
# if they are changed, all plugins must be updated
|
|
||||||
expect(LogStash::SHUTDOWN).to be_a(LogStash::ShutdownEvent)
|
|
||||||
expect(LogStash::FLUSH).to be_a(LogStash::FlushEvent)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
require "spec_helper"
|
require "spec_helper"
|
||||||
|
require "logstash/json"
|
||||||
|
|
||||||
# use a dummy NOOP filter to test Filters::Base
|
# use a dummy NOOP filter to test Filters::Base
|
||||||
class LogStash::Filters::NOOP < LogStash::Filters::Base
|
class LogStash::Filters::NOOP < LogStash::Filters::Base
|
||||||
|
@ -196,9 +197,21 @@ describe LogStash::Filters::NOOP do
|
||||||
insist { subject["tags"] } == ["t1"]
|
insist { subject["tags"] } == ["t1"]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
||||||
|
# see https://github.com/elastic/logstash/issues/2261
|
||||||
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\", \"t3\"]}")) do
|
||||||
|
insist { subject["tags"] } == ["t1"]
|
||||||
|
end
|
||||||
|
|
||||||
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
sample("type" => "noop", "tags" => ["t1", "t2"]) do
|
||||||
insist { subject["tags"] } == ["t1"]
|
insist { subject["tags"] } == ["t1"]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
||||||
|
# see https://github.com/elastic/logstash/issues/2261
|
||||||
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"t2\"]}")) do
|
||||||
|
insist { subject["tags"] } == ["t1"]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "remove_tag with dynamic value" do
|
describe "remove_tag with dynamic value" do
|
||||||
|
@ -215,6 +228,12 @@ describe LogStash::Filters::NOOP do
|
||||||
sample("type" => "noop", "tags" => ["t1", "goaway", "t3"], "blackhole" => "goaway") do
|
sample("type" => "noop", "tags" => ["t1", "goaway", "t3"], "blackhole" => "goaway") do
|
||||||
insist { subject["tags"] } == ["t1", "t3"]
|
insist { subject["tags"] } == ["t1", "t3"]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# also test from Json deserialized data to test the handling of native Java collections by JrJackson
|
||||||
|
# see https://github.com/elastic/logstash/issues/2261
|
||||||
|
sample(LogStash::Json.load("{\"type\":\"noop\", \"tags\":[\"t1\", \"goaway\", \"t3\"], \"blackhole\":\"goaway\"}")) do
|
||||||
|
insist { subject["tags"] } == ["t1", "t3"]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "remove_field" do
|
describe "remove_field" do
|
||||||
|
|
169
spec/lib/logstash/java_integration_spec.rb
Normal file
169
spec/lib/logstash/java_integration_spec.rb
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
# encoding: utf-8
|
||||||
|
require "spec_helper"
|
||||||
|
require "logstash/java_integration"
|
||||||
|
|
||||||
|
describe "Java integration" do
|
||||||
|
|
||||||
|
context "type equivalence" do
|
||||||
|
|
||||||
|
# here we test for both is_a? and case/when usage of the Java types
|
||||||
|
# because these are the specific use-cases in our code and the expected
|
||||||
|
# behaviour.
|
||||||
|
|
||||||
|
context "Java::JavaUtil::ArrayList" do
|
||||||
|
|
||||||
|
it "should report to be a Ruby Array" do
|
||||||
|
expect(Java::JavaUtil::ArrayList.new.is_a?(Array)).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be class equivalent to Ruby Array" do
|
||||||
|
expect do
|
||||||
|
case Java::JavaUtil::ArrayList.new
|
||||||
|
when Array
|
||||||
|
true
|
||||||
|
else
|
||||||
|
raise
|
||||||
|
end
|
||||||
|
end.not_to raise_error
|
||||||
|
|
||||||
|
expect(Array === Java::JavaUtil::ArrayList.new).to be_true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "Java::JavaUtil::LinkedHashMap" do
|
||||||
|
it "should report to be a Ruby Hash" do
|
||||||
|
expect(Java::JavaUtil::LinkedHashMap.new.is_a?(Hash)).to be_true
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should be class equivalent to Ruby Hash" do
|
||||||
|
expect do
|
||||||
|
case Java::JavaUtil::LinkedHashMap.new
|
||||||
|
when Hash
|
||||||
|
true
|
||||||
|
else
|
||||||
|
raise
|
||||||
|
end
|
||||||
|
end.not_to raise_error
|
||||||
|
|
||||||
|
expect(Hash === Java::JavaUtil::LinkedHashMap.new).to be_true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "Java::JavaUtil::Collection" do
|
||||||
|
subject{Java::JavaUtil::ArrayList.new(initial_array)}
|
||||||
|
|
||||||
|
context "when deleting a unique instance" do
|
||||||
|
let(:initial_array) {["foo", "bar"]}
|
||||||
|
|
||||||
|
it "should return the deleted object" do
|
||||||
|
expect(subject.delete("foo")).to eq("foo")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should remove the object to delete" do
|
||||||
|
expect{subject.delete("foo")}.to change{subject.to_a}.from(initial_array).to(["bar"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when deleting multiple instances" do
|
||||||
|
let(:initial_array) {["foo", "bar", "foo"]}
|
||||||
|
|
||||||
|
it "should return the last deleted object" do
|
||||||
|
expect(subject.delete("foo")).to eq("foo")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should remove all the objects to delete" do
|
||||||
|
expect{subject.delete("foo")}.to change{subject.to_a}.from(initial_array).to(["bar"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when deleting non existing object" do
|
||||||
|
let(:initial_array) {["foo", "bar", "foo"]}
|
||||||
|
|
||||||
|
it "should return nil" do
|
||||||
|
expect(subject.delete("baz")).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should not change the collection" do
|
||||||
|
expect{subject.delete("baz")}.to_not change{subject.to_a}
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should yield to block when given" do
|
||||||
|
expect(subject.delete("baz"){"foobar"}).to eq("foobar")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when deleting on empty collection" do
|
||||||
|
let(:initial_array) {[]}
|
||||||
|
|
||||||
|
it "should return nil" do
|
||||||
|
expect(subject.delete("baz")).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should not change the collection" do
|
||||||
|
expect{subject.delete("baz")}.to_not change{subject.to_a}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when intersecting with a Ruby Array" do
|
||||||
|
|
||||||
|
context "using string collection with duplicates and single result" do
|
||||||
|
let(:initial_array) {["foo", "bar", "foo"]}
|
||||||
|
|
||||||
|
it "should not change original collection" do
|
||||||
|
expect{subject & ["foo"]}.to_not change{subject.to_a}
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should return a new array containing elements common to the two arrays, excluding any duplicate" do
|
||||||
|
expect((subject & ["foo"]).to_a).to eq(["foo"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "using string collection with duplicates and multiple results" do
|
||||||
|
let(:original) {["foo", "bar", "foo", "baz"]}
|
||||||
|
let(:target) {["baz", "foo"]}
|
||||||
|
let(:result) {["foo", "baz"]}
|
||||||
|
|
||||||
|
it "should return a new array containing elements common to the two arrays, excluding any duplicate and preserve order from the original array" do
|
||||||
|
# this is the Ruby contract
|
||||||
|
expect(original & target).to eq(result)
|
||||||
|
|
||||||
|
# this should work the same
|
||||||
|
expect((Java::JavaUtil::ArrayList.new(original) & target).to_a).to eq(result)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "Ruby doc examples" do
|
||||||
|
it "should return a new array containing elements common to the two arrays, excluding any duplicate" do
|
||||||
|
expect(Java::JavaUtil::ArrayList.new(([1, 1, 3, 5]) & [1, 2, 3]).to_a).to eq([1, 3])
|
||||||
|
expect(Java::JavaUtil::ArrayList.new((['a', 'b', 'b', 'z']) & ['a', 'b', 'c']).to_a).to eq(['a', 'b'])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when unioning with a Ruby Array" do
|
||||||
|
|
||||||
|
context "using string collection with duplicates" do
|
||||||
|
let(:initial_array) {["foo", "bar", "foo"]}
|
||||||
|
|
||||||
|
it "should not change original collection" do
|
||||||
|
expect{subject | ["bar", "baz"]}.to_not change{subject.to_a}
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should return a new array by joining excluding any duplicates and preserving the order from the original array" do
|
||||||
|
expect((subject | ["bar", "baz"]).to_a).to eq(["foo", "bar", "baz"])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should remove duplicates when joining empty array" do
|
||||||
|
expect((subject | []).to_a).to eq(["foo", "bar"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "Ruby doc examples" do
|
||||||
|
it "should return a new array containing elements common to the two arrays, excluding any duplicate" do
|
||||||
|
expect(Java::JavaUtil::ArrayList.new((["a", "b", "c"]) | ["c", "d", "a"]).to_a).to eq(["a", "b", "c", "d"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
Loading…
Add table
Add a link
Reference in a new issue