mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 06:37:19 -04:00
- Start using our own logger
- Add LogStash::Time::to_iso8601 to convert DateTime objects to iso8601 format. - Add a 'date' filter for using a specific field into the real timestamp for the event. Example filters: - date: linux-syslog: date: %b %e %H:%M:%S apache-access: timestamp: "%d/%b/%Y:%H:%M:%S %Z" Syntax is: <tag>: <field>: <format> Supported format is 'strftime'
This commit is contained in:
parent
bd65c610b4
commit
8cb4676420
5 changed files with 82 additions and 29 deletions
|
@ -6,16 +6,12 @@ require "logstash/outputs"
|
|||
require "logstash/filters"
|
||||
require "logstash/logging"
|
||||
|
||||
# TODO(sissel): Make our own logger.
|
||||
require "logger"
|
||||
|
||||
# Collect logs, ship them out.
|
||||
class LogStash::Agent
|
||||
attr_reader :config
|
||||
include LogStash::Logging
|
||||
|
||||
def initialize(config)
|
||||
init_logging
|
||||
@logger = LogStash::Logger.new(STDERR)
|
||||
|
||||
@config = config
|
||||
@outputs = []
|
||||
|
@ -62,16 +58,10 @@ class LogStash::Agent
|
|||
|
||||
if @config.include?("filters")
|
||||
filters = @config["filters"]
|
||||
filters.each do |value|
|
||||
# If value is an array, then "filters" is a hash.
|
||||
if filters.is_a?(Hash)
|
||||
name, filterconfig = value
|
||||
else
|
||||
name = value
|
||||
filterconfig = {}
|
||||
end
|
||||
@logger.debug("Using filter #{name}")
|
||||
filter = LogStash::Filters.from_name(name, filterconfig)
|
||||
filters.collect { |x| x.to_a[0] }.each do |filter|
|
||||
name, value = filter
|
||||
@logger.debug("Using filter #{name} => #{value.inspect}")
|
||||
filter = LogStash::Filters.from_name(name, value)
|
||||
filter.register
|
||||
@filters << filter
|
||||
end # each filter
|
||||
|
|
57
lib/logstash/filters/date.rb
Normal file
57
lib/logstash/filters/date.rb
Normal file
|
@ -0,0 +1,57 @@
|
|||
require "logstash/namespace"
|
||||
require "logstash/time"
|
||||
require "logstash/logging"
|
||||
|
||||
class LogStash::Filters::Date
|
||||
# The 'date' filter will take a value from your event and use it as the
|
||||
# event timestamp. This is useful for parsing logs generated on remote
|
||||
# servers or for importing old logs.
|
||||
#
|
||||
# The config looks like this:
|
||||
#
|
||||
# filters:
|
||||
# date:
|
||||
# tagname1:
|
||||
# <fieldname>: <format>
|
||||
# tagname2:
|
||||
# <fieldname>: <format>
|
||||
def initialize(config = {})
|
||||
@config = config
|
||||
@tags = Hash.new { |h,k| h[k] = [] }
|
||||
@logger = LogStash::Logger.new(STDERR)
|
||||
end # def initialize
|
||||
|
||||
def register
|
||||
@config.each do |tag, tagconfig|
|
||||
@tags[tag] << tagconfig
|
||||
end # @config.each
|
||||
end # def register
|
||||
|
||||
def filter(event)
|
||||
return unless event.include?("tags")
|
||||
event["tags"].each do |tag|
|
||||
next unless @tags.include?(tag)
|
||||
@tags[tag].each do |tagconfig|
|
||||
tagconfig.each do |field, format|
|
||||
#if event.include?(field) or (event["fields"].include?(field) rescue false)
|
||||
#value = (event[field] or event["fields"][field])
|
||||
if (event["fields"].include?(field) rescue false)
|
||||
fieldvalue = event["fields"][field]
|
||||
#fieldvalue = [fieldvalue] if fieldvalue.is_a?(String)
|
||||
@logger.info fieldvalue
|
||||
fieldvalue.each do |value|
|
||||
#value = event["fields"][field]
|
||||
begin
|
||||
time = DateTime.strptime(value, format)
|
||||
event["timestamp"] = LogStash::Time.to_iso8601(time)
|
||||
@logger.debug "Parsed #{value.inspect} as #{event["timestamp"]}"
|
||||
rescue => e
|
||||
@logger.warn "Failed parsing date #{value.inspect} from field #{field} with format #{format.inspect}. Exception: #{e}"
|
||||
end
|
||||
end # fieldvalue.each
|
||||
end # if this event has a field we expect to be a timestamp
|
||||
end # tagconfig.each
|
||||
end # @tags[tag].each
|
||||
end # event["tags"].each
|
||||
end # def filter
|
||||
end # class LogStash::Filters::Date
|
|
@ -61,8 +61,5 @@ class LogStash::Filters::Grok
|
|||
else
|
||||
event["PARSEFAILURE"] = 1
|
||||
end
|
||||
|
||||
# TODO(sissel): Flatten single-entry arrays into a single value?
|
||||
return event
|
||||
end
|
||||
end # class LogStash::Filters::Grok
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
require "logstash/namespace"
|
||||
require "logger"
|
||||
require "ap"
|
||||
|
||||
class LogStash::Logger < Logger
|
||||
def initialize(*args)
|
||||
|
@ -9,12 +10,13 @@ class LogStash::Logger < Logger
|
|||
end
|
||||
end
|
||||
|
||||
class LogStash::Logger::Formatter #< Logger::Formatter
|
||||
class LogStash::Logger::Formatter < Logger::Formatter
|
||||
# [:call, "INFO", Wed Oct 27 01:48:46 -0700 2010, nil, {"hello"=>12345}]e
|
||||
def call(level, timestamp, object)
|
||||
# TODO(sissel): implement
|
||||
end
|
||||
end
|
||||
def call(level, timestamp, progname, object)
|
||||
#TODO(sissel): implement
|
||||
super(level, timestamp, progname, object.awesome_inspect)
|
||||
end
|
||||
end # class LogStash::Logger::Formatter
|
||||
|
||||
#a =Logger.new(STDOUT)
|
||||
#a.formatter = LogStash::Logger::Formatter.new
|
||||
|
|
|
@ -7,14 +7,21 @@
|
|||
# >> LogStash::Time.now.utc.to_iso8601
|
||||
# => "2010-10-17 07:25:26.788704Z"
|
||||
module LogStash; class Time < ::Time
|
||||
ISO8601 = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
# Return a string that is this time in ISO8601 format.
|
||||
def to_iso8601
|
||||
if self.utc?
|
||||
tz = "Z"
|
||||
else
|
||||
tz = self.strftime("%z")
|
||||
end
|
||||
tz = self.utc? ? "Z" : self.strftime("%z")
|
||||
# zero-pad tv_usec so the time string is sortable.
|
||||
return "%s.%06d%s" % [self.strftime("%Y-%m-%dT%H:%M:%S"), self.tv_usec, tz]
|
||||
return "%s.%06d%s" % [self.strftime(ISO8601), self.tv_usec, tz]
|
||||
end
|
||||
|
||||
def self.to_iso8601(obj)
|
||||
if obj.is_a?(DateTime)
|
||||
tz = obj.offset == 0 ? "Z" : obj.strftime("%z")
|
||||
return "%s.%06d%s" % [obj.strftime(ISO8601), obj.sec_fraction.to_f, tz]
|
||||
else
|
||||
raise "Can't convert object of type #{obj.class} (#{obj}) to iso8601."
|
||||
end
|
||||
end
|
||||
end; end # class LogStash::Time
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue