mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 14:47:19 -04:00
- date filter specs now passing against event v1
This commit is contained in:
parent
8085178a8b
commit
48e23f7ac6
4 changed files with 65 additions and 66 deletions
|
@ -39,10 +39,18 @@ module LogStash::EventV1
|
|||
@data["@version"] = "1" if !@data.include?("@version")
|
||||
end # def initialize
|
||||
|
||||
# Add class methods on inclusion.
|
||||
public
|
||||
def self.from_json(json)
|
||||
return self.new(JSON.parse(json))
|
||||
end # def self.from_json
|
||||
def self.included(klass)
|
||||
klass.extend(ClassMethods)
|
||||
end # def included
|
||||
|
||||
module ClassMethods
|
||||
public
|
||||
def from_json(json)
|
||||
return self.new(JSON.parse(json))
|
||||
end # def from_json
|
||||
end
|
||||
|
||||
public
|
||||
def cancel
|
||||
|
|
|
@ -20,7 +20,10 @@ require "logstash/time_addon"
|
|||
# set in the event. For example, with file input, the timestamp is set to the
|
||||
# time of each read.
|
||||
class LogStash::Filters::Date < LogStash::Filters::Base
|
||||
JavaException = java.lang.Exception if RUBY_ENGINE == "jruby"
|
||||
if RUBY_ENGINE == "jruby"
|
||||
JavaException = java.lang.Exception
|
||||
UTC = org.joda.time.DateTimeZone.forID("UTC")
|
||||
end
|
||||
|
||||
config_name "date"
|
||||
plugin_status "stable"
|
||||
|
@ -184,8 +187,6 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|||
def filter(event)
|
||||
@logger.debug? && @logger.debug("Date filter: received event", :type => event.type)
|
||||
return unless filter?(event)
|
||||
now = Time.now
|
||||
|
||||
@parsers.each do |field, fieldparsers|
|
||||
@logger.debug? && @logger.debug("Date filter looking for field",
|
||||
:type => event.type, :field => field)
|
||||
|
@ -224,7 +225,7 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|||
missing.each do |t|
|
||||
case t
|
||||
when "y"
|
||||
time = time.withYear(now.year)
|
||||
time = time.withYear(Time.now.year)
|
||||
when "S"
|
||||
# TODO(sissel): Old behavior was to default to fractional sec == 0
|
||||
#time.setMillisOfSecond(now.usec / 1000)
|
||||
|
@ -238,10 +239,13 @@ class LogStash::Filters::Date < LogStash::Filters::Base
|
|||
end
|
||||
end
|
||||
#@logger.info :JodaTime => time.to_s
|
||||
time = time.withZone(org.joda.time.DateTimeZone.forID("UTC"))
|
||||
event.timestamp = time.to_s
|
||||
#event.timestamp = LogStash::Time.to_iso8601(time)
|
||||
@logger.debug? && @logger.debug("Date parsing done", :value => value, :timestamp => event.timestamp)
|
||||
time = time.withZone(UTC)
|
||||
event["@timestamp"] = Time.utc(
|
||||
time.getYear, time.getMonthOfYear, time.getDayOfMonth,
|
||||
time.getHourOfDay, time.getMinuteOfHour, time.getSecondOfMinute,
|
||||
time.getMillisOfSecond * 1000
|
||||
)
|
||||
@logger.debug? && @logger.debug("Date parsing done", :value => value, :timestamp => event["@timestamp"])
|
||||
rescue StandardError, JavaException => e
|
||||
@logger.warn("Failed parsing date from field", :field => field,
|
||||
:value => value, :exception => e)
|
||||
|
|
|
@ -119,7 +119,8 @@ class LogStash::Inputs::Base < LogStash::Plugin
|
|||
# JSON must be valid UTF-8, and many inputs come from ruby IO
|
||||
# instances, which almost all default to ASCII-8BIT. Force UTF-8
|
||||
event = LogStash::Event.from_json(raw.force_encoding("UTF-8"))
|
||||
event.tags += @tags
|
||||
event["tags"] ||= []
|
||||
event["tags"] += @tags
|
||||
if @message_format
|
||||
event.message ||= event.sprintf(@message_format)
|
||||
end
|
||||
|
@ -128,19 +129,17 @@ class LogStash::Inputs::Base < LogStash::Plugin
|
|||
# plain text and try to do the best we can with it?
|
||||
@logger.info? and @logger.info("Trouble parsing json input, falling " \
|
||||
"back to plain text", :input => raw,
|
||||
:source => source, :exception => e)
|
||||
:source => source, :exception => e, :stack => e.backtrace)
|
||||
event.message = raw
|
||||
event.tags << "_jsonparsefailure"
|
||||
end
|
||||
|
||||
if event.source == "unknown"
|
||||
event.source = source
|
||||
event["tags"] ||= []
|
||||
event["tags"] << "_jsonparsefailure"
|
||||
end
|
||||
when "msgpack_event"
|
||||
begin
|
||||
# Msgpack does not care about UTF-8
|
||||
event = LogStash::Event.new(MessagePack.unpack(raw))
|
||||
event.tags += @tags
|
||||
event["tags"] ||= []
|
||||
event["tags"] |= @tags
|
||||
if @message_format
|
||||
event.message ||= event.sprintf(@message_format)
|
||||
end
|
||||
|
@ -150,7 +149,8 @@ class LogStash::Inputs::Base < LogStash::Plugin
|
|||
@logger.warn("Trouble parsing msgpack input, falling back to plain text",
|
||||
:input => raw, :source => source, :exception => e)
|
||||
event.message = raw
|
||||
event.tags << "_msgpackparsefailure"
|
||||
event["tags"] ||= []
|
||||
event["tags"] << "_msgpackparsefailure"
|
||||
end
|
||||
|
||||
if event.source == "unknown"
|
||||
|
|
|
@ -31,11 +31,16 @@ describe LogStash::Filters::Date do
|
|||
"2001-11-06T20:45:45.123-0000" => "2001-11-06T20:45:45.123Z",
|
||||
"2001-12-07T23:54:54.123Z" => "2001-12-07T23:54:54.123Z",
|
||||
}
|
||||
|
||||
times.each do |input, output|
|
||||
sample({"@fields" => {"mydate" => input}}) do
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject.timestamp } == output
|
||||
insist { subject["@timestamp"] } == output
|
||||
sample("mydate" => input) do
|
||||
begin
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject["@timestamp"] } == Time.iso8601(output).utc
|
||||
rescue
|
||||
require "pry"; binding.pry
|
||||
raise
|
||||
end
|
||||
end
|
||||
end # times.each
|
||||
end
|
||||
|
@ -57,10 +62,9 @@ describe LogStash::Filters::Date do
|
|||
"Nov 24 01:29:01 -0800" => "#{year}-11-24T09:29:01.000Z",
|
||||
}
|
||||
times.each do |input, output|
|
||||
sample({"@fields" => {"mydate" => input}}) do
|
||||
sample("mydate" => input) do
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject.timestamp } == output
|
||||
insist { subject["@timestamp"] } == output
|
||||
insist { subject["@timestamp"] } == Time.iso8601(output).utc
|
||||
end
|
||||
end # times.each
|
||||
end
|
||||
|
@ -83,10 +87,9 @@ describe LogStash::Filters::Date do
|
|||
1000000000 => "2001-09-09T01:46:40.000Z"
|
||||
}
|
||||
times.each do |input, output|
|
||||
sample({"@fields" => {"mydate" => input}}) do
|
||||
sample("mydate" => input) do
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject.timestamp } == output
|
||||
insist { subject["@timestamp"] } == output
|
||||
insist { subject["@timestamp"] } == Time.iso8601(output).utc
|
||||
end
|
||||
end # times.each
|
||||
end
|
||||
|
@ -100,8 +103,9 @@ describe LogStash::Filters::Date do
|
|||
}
|
||||
CONFIG
|
||||
|
||||
sample({"@fields" => {"mydate" => "1350414944.123456"}}) do
|
||||
insist { subject.timestamp } == "2012-10-16T19:15:44.123Z"
|
||||
sample("mydate" => "1350414944.123456") do
|
||||
# Joda time only supports milliseconds :\
|
||||
insist { subject.timestamp } == Time.iso8601("2012-10-16T12:15:44.123-07:00").utc
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -125,10 +129,9 @@ describe LogStash::Filters::Date do
|
|||
1000000000123 => "2001-09-09T01:46:40.123Z"
|
||||
}
|
||||
times.each do |input, output|
|
||||
sample({"@fields" => {"mydate" => input}}) do
|
||||
sample("mydate" => input) do
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject.timestamp } == output
|
||||
insist { subject["@timestamp"] } == output
|
||||
insist { subject["@timestamp"] } == Time.iso8601(output)
|
||||
end
|
||||
end # times.each
|
||||
end
|
||||
|
@ -138,8 +141,8 @@ describe LogStash::Filters::Date do
|
|||
input {
|
||||
generator {
|
||||
lines => [
|
||||
'{ "@fields": { "mydate": "this will not parse" } }',
|
||||
'{ "@fields": { } }'
|
||||
'{ "mydate": "this will not parse" }',
|
||||
'{ }'
|
||||
]
|
||||
format => json_event
|
||||
type => foo
|
||||
|
@ -165,20 +168,20 @@ describe LogStash::Filters::Date do
|
|||
config <<-'CONFIG'
|
||||
filter {
|
||||
date {
|
||||
t => TAI64N
|
||||
match => [ t, TAI64N ]
|
||||
}
|
||||
}
|
||||
CONFIG
|
||||
|
||||
# Try without leading "@"
|
||||
sample({ "@fields" => { "t" => "4000000050d506482dbdf024" } }) do
|
||||
insist { subject.timestamp } == "2012-12-22T01:00:46.767Z"
|
||||
sample("t" => "4000000050d506482dbdf024") do
|
||||
insist { subject.timestamp } == Time.iso8601("2012-12-22T01:00:46.767Z").utc
|
||||
end
|
||||
|
||||
# Should still parse successfully if it's a full tai64n time (with leading
|
||||
# '@')
|
||||
sample({ "@fields" => { "t" => "@4000000050d506482dbdf024" } }) do
|
||||
insist { subject.timestamp } == "2012-12-22T01:00:46.767Z"
|
||||
sample("t" => "@4000000050d506482dbdf024") do
|
||||
insist { subject.timestamp } == Time.iso8601("2012-12-22T01:00:46.767Z").utc
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -193,10 +196,9 @@ describe LogStash::Filters::Date do
|
|||
|
||||
time = "2001-09-09T01:46:40.000Z"
|
||||
|
||||
sample({"@fields" => {"mydate" => time}}) do
|
||||
sample("mydate" => time) do
|
||||
insist { subject["mydate"] } == time
|
||||
insist { subject.timestamp } == time
|
||||
insist { subject["@timestamp"] } == time
|
||||
insist { subject["@timestamp"] } == Time.iso8601(time).utc
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -204,27 +206,13 @@ describe LogStash::Filters::Date do
|
|||
config <<-CONFIG
|
||||
filter {
|
||||
date {
|
||||
match => [ "data.deep", "ISO8601" ]
|
||||
match => [ "[data][deep]", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
CONFIG
|
||||
|
||||
sample({ "@fields" => { "data" => { "deep" => "2013-01-01T00:00:00.000Z" } } }) do
|
||||
insist { subject["@timestamp"] } == "2013-01-01T00:00:00.000Z"
|
||||
end
|
||||
end
|
||||
|
||||
describe "support deep field access" do
|
||||
config <<-CONFIG
|
||||
filter {
|
||||
date {
|
||||
match => [ "data\\.deep", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
CONFIG
|
||||
|
||||
sample({ "@fields" => { "data.deep" => "2013-01-01T00:00:00.000Z" } }) do
|
||||
insist { subject["@timestamp"] } == "2013-01-01T00:00:00.000Z"
|
||||
sample("data" => { "deep" => "2013-01-01T00:00:00.000Z" }) do
|
||||
insist { subject["@timestamp"] } == Time.iso8601("2013-01-01T00:00:00.000Z").utc
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -237,7 +225,7 @@ describe LogStash::Filters::Date do
|
|||
}
|
||||
CONFIG
|
||||
|
||||
sample({ "@fields" => { "thedate" => "2013/Apr/21" } }) do
|
||||
sample("thedate" => "2013/Apr/21") do
|
||||
insist { subject["@timestamp"] } != "2013-04-21T00:00:00.000Z"
|
||||
end
|
||||
end
|
||||
|
@ -258,10 +246,9 @@ describe LogStash::Filters::Date do
|
|||
"2013 Jun 24 01:29:01" => "2013-06-24T08:29:01.000Z",
|
||||
}
|
||||
times.each do |input, output|
|
||||
sample({"@fields" => {"mydate" => input}}) do
|
||||
sample("mydate" => input) do
|
||||
insist { subject["mydate"] } == input
|
||||
insist { subject.timestamp } == output
|
||||
insist { subject["@timestamp"] } == output
|
||||
insist { subject["@timestamp"] } == Time.iso8601(output).utc
|
||||
end
|
||||
end # times.each
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue