Merge branch 'LOGSTASH-238' of github.com:logstash/logstash

Conflicts:
	Gemfile
	Gemfile.lock
This commit is contained in:
Jordan Sissel 2011-10-16 00:33:08 -07:00
commit 52a3f5c315
23 changed files with 190 additions and 252 deletions

View file

@ -4,7 +4,7 @@ def jruby?
return RUBY_ENGINE == "jruby"
end
gem "cabin" # for logging, Apache 2 license
gem "cabin", "0.1.3" # for logging. apache 2 license
gem "bunny" # for amqp support, MIT-style license
gem "uuidtools" # for naming amqp queues, License ???
gem "filewatch", "~> 0.3.0" # for file tailing, BSD License

View file

@ -5,7 +5,7 @@ GEM
bouncy-castle-java (1.5.0146.1)
bson (1.4.0)
bson (1.4.0-java)
bunny (0.7.6)
bunny (0.7.8)
cabin (0.1.3)
json
filewatch (0.3.0)
@ -15,7 +15,7 @@ GEM
gmetric (0.1.3)
haml (3.1.3)
jls-grok (0.9.0)
jruby-elasticsearch (0.0.11)
jruby-elasticsearch (0.0.12)
jruby-openssl (0.7.4)
bouncy-castle-java
json (1.6.1)
@ -25,13 +25,16 @@ GEM
rack (>= 1.0.0)
mongo (1.4.0)
bson (= 1.4.0)
rack (1.3.3)
rack (1.3.4)
rack-protection (1.1.4)
rack
rake (0.9.2)
redis (2.2.2)
sass (3.1.7)
sinatra (1.2.6)
rack (~> 1.1)
tilt (>= 1.2.2, < 2.0)
sass (3.1.10)
sinatra (1.3.1)
rack (~> 1.3, >= 1.3.4)
rack-protection (~> 1.1, >= 1.1.2)
tilt (~> 1.3, >= 1.3.3)
statsd-ruby (0.3.0)
stomp (1.1.9)
tilt (1.3.3)
@ -45,7 +48,7 @@ PLATFORMS
DEPENDENCIES
awesome_print
bunny
cabin
cabin (= 0.1.3)
filewatch (~> 0.3.0)
gelf
gelfd (~> 0.1.0)

View file

@ -135,7 +135,7 @@ class LogStash::Agent
# These are 'unknown' flags that begin --<plugin>-flag
# Put any plugin paths into the ruby library path for requiring later.
@plugin_paths.each do |p|
@logger.debug("Adding #{p.inspect} to ruby load path")
@logger.debug("Adding to ruby load path", :path => p)
$:.unshift p
end
@ -158,16 +158,17 @@ class LogStash::Agent
%w{inputs outputs filters}.each do |component|
@plugin_paths.each do |path|
plugin = File.join(path, component, name) + ".rb"
@logger.debug("Flag #{arg} found; trying to load #{plugin}")
@logger.debug("Plugin flag found; trying to load it",
:flag => arg, :plugin => plugin)
if File.file?(plugin)
@logger.info("Loading plugin #{plugin}")
@logger.info("Loading plugin", :plugin => plugin)
require plugin
[LogStash::Inputs, LogStash::Filters, LogStash::Outputs].each do |c|
# If we get flag --foo-bar, check for LogStash::Inputs::Foo
# and add any options to our option parser.
klass_name = name.capitalize
if c.const_defined?(klass_name)
@logger.debug("Found plugin class #{c}::#{klass_name})")
@logger.debug("Found plugin class", :class => "#{c}::#{klass_name})")
klass = c.const_get(klass_name)
# See LogStash::Config::Mixin::DSL#options
klass.options(@opts)
@ -187,7 +188,7 @@ class LogStash::Agent
begin
remainder = @opts.parse(args)
rescue OptionParser::InvalidOption => e
@logger.info e
@logger.info("Invalid option", :exception => e)
raise e
end
@ -197,22 +198,22 @@ class LogStash::Agent
private
def configure
if @config_path && @config_string
@logger.fatal "Can't use -f and -e at the same time"
@logger.fatal("Can't use -f and -e at the same time")
raise "Configuration problem"
elsif (@config_path.nil? || @config_path.empty?) && @config_string.nil?
@logger.fatal "No config file given. (missing -f or --config flag?)"
@logger.fatal @opts.help
@logger.fatal("No config file given. (missing -f or --config flag?)")
@logger.fatal(@opts.help)
raise "Configuration problem"
end
#if @config_path and !File.exist?(@config_path)
if @config_path and Dir.glob(@config_path).length == 0
@logger.fatal "Config file '#{@config_path}' does not exist."
@logger.fatal("Config file does not exist.", :path => @config_path)
raise "Configuration problem"
end
if @daemonize
@logger.fatal "Can't daemonize, no support yet in JRuby."
@logger.fatal("Can't daemonize, no support yet in JRuby.")
raise "Can't daemonize, no fork in JRuby."
end
@ -227,14 +228,14 @@ class LogStash::Agent
end
if @verbose >= 3 # Uber debugging.
@logger.level = Logger::DEBUG
@logger.level = :debug
$DEBUG = true
elsif @verbose == 2 # logstash debug logs
@logger.level = Logger::DEBUG
@logger.level = :debug
elsif @verbose == 1 # logstash info logs
@logger.level = Logger::INFO
@logger.level = :info
else # Default log level
@logger.level = Logger::WARN
@logger.level = :warn
end
end # def configure
@ -243,7 +244,8 @@ class LogStash::Agent
# Support directory of config files.
# https://logstash.jira.com/browse/LOGSTASH-106
if File.directory?(@config_path)
@logger.debug("Loading '#{@config_path}' as directory")
@logger.debug("Config path is a directory, scanning files",
:path => @config_path)
paths = Dir.glob(File.join(@config_path, "*")).sort
else
# Get a list of files matching a glob. If the user specified a single
@ -330,7 +332,7 @@ class LogStash::Agent
private
def start_input(input)
@logger.debug(["Starting input", input])
@logger.debug("Starting input", :plugin => input)
# inputs should write directly to output queue if there are no filters.
input_target = @filters.length > 0 ? @filter_queue : @output_queue
@plugins[input] = Thread.new(input, input_target) do |*args|
@ -340,7 +342,7 @@ class LogStash::Agent
private
def start_output(output)
@logger.debug(["Starting output", output])
@logger.debug("Starting output", :plugin => output)
queue = SizedQueue.new(10)
@output_queue.add_queue(queue)
@output_plugin_queues[output] = queue
@ -462,9 +464,9 @@ class LogStash::Agent
finished_queue = Queue.new
# Tell everything to shutdown.
@logger.debug(plugins.keys.collect(&:to_s))
@logger.debug("Plugins to shutdown", :plugins => plugins.keys.collect(&:to_s))
plugins.each do |p, thread|
@logger.debug("Telling to shutdown: #{p.to_s}")
@logger.debug("Sending shutdown to: #{p.to_s}", :plugin => p)
p.shutdown(finished_queue)
end
@ -474,7 +476,7 @@ class LogStash::Agent
while remaining.size > 0
if (Time.now > force_shutdown_time)
@logger.warn("Time to quit, even if some plugins aren't finished yet.")
@logger.warn("Stuck plugins? #{remaining.map(&:first).join(", ")}")
@logger.warn("Stuck plugins?", :remaining => remaining.map(&:first))
break
end
@ -485,9 +487,9 @@ class LogStash::Agent
sleep(1)
else
remaining = plugins.select { |p, thread| plugin.running? }
@logger.debug("#{p.to_s} finished, waiting on " \
"#{remaining.size} plugins; " \
"#{remaining.map(&:first).join(", ")}")
@logger.debug("Plugin #{p.to_s} finished, waiting on the rest.",
:count => remaining.size,
:remaining => remaining.map(&:first))
end
end # while remaining.size > 0
end
@ -506,7 +508,7 @@ class LogStash::Agent
config = read_config
reloaded_inputs, reloaded_filters, reloaded_outputs = parse_config(config)
rescue Exception => e
@logger.error "Aborting reload due to bad configuration: #{e}"
@logger.error("Aborting reload due to bad configuration", :exception => e)
return
end
@ -526,7 +528,7 @@ class LogStash::Agent
obsolete_plugins[p] = @plugins[p]
@plugins.delete(p)
else
@logger.warn("Couldn't find input plugin to stop: #{p}")
@logger.warn("Couldn't find input plugin to stop", :plugin => p)
end
end
@ -536,7 +538,7 @@ class LogStash::Agent
@plugins.delete(p)
@output_queue.remove_queue(@output_plugin_queues[p])
else
@logger.warn("Couldn't find output plugin to stop: #{p}")
@logger.warn("Couldn't find output plugin to stop", :plugin => p)
end
end
@ -548,7 +550,7 @@ class LogStash::Agent
deleted_filters.each {|f| obsolete_plugins[f] = nil}
if obsolete_plugins.size > 0
@logger.info("Stopping removed plugins:\n\t" + obsolete_plugins.keys.join("\n\t"))
@logger.info("Stopping removed plugins:", :plugins => obsolete_plugins.keys)
shutdown_plugins(obsolete_plugins)
end
# require 'pry'; binding.pry()
@ -556,7 +558,7 @@ class LogStash::Agent
# Start up filters
if new_filters.size > 0 || deleted_filters.size > 0
if new_filters.size > 0
@logger.info("Starting new filters: #{new_filters.join(', ')}")
@logger.info("Starting new filters", :plugins => new_filters)
new_filters.each do |f|
f.logger = @logger
f.register
@ -569,13 +571,13 @@ class LogStash::Agent
end
if new_inputs.size > 0
@logger.info("Starting new inputs: #{new_inputs.join(', ')}")
@logger.info("Starting new inputs", :plugins => new_inputs)
new_inputs.each do |p|
start_input(p)
end
end
if new_outputs.size > 0
@logger.info("Starting new outputs: #{new_outputs.join(', ')}")
@logger.info("Starting new outputs", :plugins => new_outputs)
new_inputs.each do |p|
start_output(p)
end
@ -637,7 +639,7 @@ class LogStash::Agent
LogStash::Util::set_thread_name("input|#{input.to_s}")
input.logger = @logger
input.register
@logger.info("Input #{input.to_s} registered")
@logger.info("Input registered", :plugin => input)
@ready_queue << input
done = false
@ -646,12 +648,11 @@ class LogStash::Agent
input.run(queue)
done = true
rescue => e
@logger.warn(["Input #{input.to_s} thread exception", e])
@logger.debug(["Input #{input.to_s} thread exception backtrace",
e.backtrace])
@logger.error("Restarting input #{input.to_s} due to exception")
@logger.warn("Input thread exception", :plugin => input,
:exception => e, :backtrace => e.backtrace)
@logger.error("Restarting input due to exception", :plugin => input)
sleep(1)
retry # This jumps to the top of this proc (to the start of 'do')
retry # This jumps to the top of the 'begin'
end
end
@ -668,7 +669,7 @@ class LogStash::Agent
def run_filter(filterworker, index, output_queue)
LogStash::Util::set_thread_name("filter|worker|#{index}")
filterworker.run
@logger.warn("Filter worker ##{index} shutting down")
@logger.warn("Filter worker shutting down", :index => index)
# If we get here, the plugin finished, check if we need to shutdown.
shutdown_if_none_running(LogStash::FilterWorker, output_queue) unless @reloading
@ -679,26 +680,25 @@ class LogStash::Agent
LogStash::Util::set_thread_name("output|#{output.to_s}")
output.logger = @logger
output.register
@logger.info("Output #{output.to_s} registered")
@logger.info("Output registered", :plugin => output)
@ready_queue << output
# TODO(sissel): We need a 'reset' or 'restart' method to call on errors
begin
while event = queue.pop do
@logger.debug("Sending event to #{output.to_s}")
@logger.debug("Sending event", :target => output)
output.handle(event)
end
rescue Exception => e
@logger.warn(["Output #{output.to_s} thread exception", e])
@logger.debug(["Output #{output.to_s} thread exception backtrace",
e.backtrace])
@logger.warn("Output thread exception", :plugin => plugin,
:exception => e, :backtrace => e.backtrace)
# TODO(sissel): should we abort after too many failures?
sleep(1)
retry
end # begin/rescue
@logger.warn("Output #{input.to_s} shutting down")
@logger.warn("Output shutting down", :plugin => output)
# If we get here, the plugin finished, check if we need to shutdown.
shutdown_if_none_running(LogStash::Outputs::Base) unless @reloading
@ -714,7 +714,8 @@ class LogStash::Agent
remaining = @plugins.count do |plugin, thread|
plugin.is_a?(pluginclass) and plugin.running?
end
@logger.debug("#{pluginclass} still running: #{remaining}")
@logger.debug("Plugins still running", :type => pluginclass,
:remaining => remaining)
if remaining == 0
@logger.debug("All #{pluginclass} finished. Shutting down.")

View file

@ -11,7 +11,7 @@ class LogStash::Config::File
def initialize(path=nil, string=nil)
@path = path
@string = string
@logger = Logger.new(STDERR)
@logger = LogStash::Logger.new(STDERR)
if (path.nil? and string.nil?) or (!path.nil? and !string.nil?)
raise "Must give path or string, not both or neither"

View file

@ -44,7 +44,7 @@ module LogStash::Config::Mixin
# Validation will modify the values inside params if necessary.
# For example: converting a string to a number, etc.
if !self.class.validate(params)
@logger.error "Config validation failed."
@logger.error("Config validation failed.")
exit 1
end
@ -53,7 +53,7 @@ module LogStash::Config::Mixin
opts = self.class.get_config[name]
if opts && opts[:deprecated]
@logger.warn("Deprecated config item #{name.inspect} set " +
"in #{self.class.name}")
"in #{self.class.name}", :name => name, :plugin => self)
end
end

View file

@ -63,11 +63,12 @@ class LogStash::Filters::Base < LogStash::Plugin
(@add_field or {}).each do |field, value|
event[field] ||= []
event[field] << event.sprintf(value)
@logger.debug("filters/#{self.class.name}: adding #{value} to field #{field}")
@logger.debug("filters/#{self.class.name}: adding value to field",
:field => field, :value => value)
end
(@add_tag or []).each do |tag|
@logger.debug("filters/#{self.class.name}: adding tag #{tag}")
@logger.debug("filters/#{self.class.name}: adding tag", :tag => tag)
event.tags << event.sprintf(tag)
#event.tags |= [ event.sprintf(tag) ]
end

View file

@ -94,7 +94,8 @@ class LogStash::Filters::Date < LogStash::Filters::Base
missing = DATEPATTERNS.reject { |p| format.include?(p) }
end
@logger.debug "Adding type #{@type} with date config: #{field} => #{format}"
@logger.debug("Adding type with date config", :type => @type,
:field => field, :format => format)
@parsers[field] << {
:parser => parser.withOffsetParsed,
:missing => missing
@ -105,13 +106,13 @@ class LogStash::Filters::Date < LogStash::Filters::Base
public
def filter(event)
@logger.debug "DATE FILTER: received event of type #{event.type}"
@logger.debug("Date filter: received event", :type => event.type)
return unless event.type == @type
now = Time.now
@parsers.each do |field, fieldparsers|
@logger.debug "DATE FILTER: type #{event.type}, looking for field #{field.inspect}"
@logger.debug("Date filter: type #{event.type}, looking for field #{field.inspect}",
:type => event.type, :field => field)
# TODO(sissel): check event.message, too.
next unless event.fields.member?(field)
@ -165,12 +166,14 @@ class LogStash::Filters::Date < LogStash::Filters::Base
time = time.withZone(org.joda.time.DateTimeZone.forID("UTC"))
event.timestamp = time.to_s
#event.timestamp = LogStash::Time.to_iso8601(time)
@logger.debug "Parsed #{value.inspect} as #{event.timestamp}"
@logger.debug("Date parsing done", :value => value, :timestamp => event.timestamp)
rescue => e
@logger.warn "Failed parsing date #{value.inspect} from field #{field}: #{e}"
@logger.debug(["Backtrace", e.backtrace])
@logger.warn("Failed parsing date from field", :field => field,
:value => value, :exception => e,
:backtrace => e.backtrace)
# Raising here will bubble all the way up and cause an exit.
# TODO(sissel): Maybe we shouldn't raise?
# TODO(sissel): What do we do on a failure? Tag it like grok does?
#raise e
end # begin
end # fieldvalue.each

View file

@ -43,23 +43,25 @@ class LogStash::Filters::Grep < LogStash::Filters::Base
re = Regexp.new(pattern)
@patterns[field] << re
@logger.debug(["grep: #{@type}/#{field}", pattern, re])
@logger.debug("Registered grep", :type => @type, :field => field,
:pattern => pattern, :regexp => re)
end # @match.merge.each
end # def register
public
def filter(event)
if event.type != @type
@logger.debug("grep: skipping type #{event.type} from #{event.source}")
@logger.debug("grep: skipping non-matching event type", :type =>
event.type, :wanted_type => @type, :event => event)
return
end
@logger.debug(["Running grep filter", event.to_hash, config])
@logger.debug("Running grep filter", :event => event, :config => config)
matches = 0
@patterns.each do |field, regexes|
if !event[field]
@logger.debug(["Skipping match object, field not present", field,
event, event[field]])
@logger.debug("Skipping match object, field not present",
:field => field, :event => event)
next
end
@ -77,13 +79,13 @@ class LogStash::Filters::Grep < LogStash::Filters::Base
(event[field].is_a?(Array) ? event[field] : [event[field]]).each do |value|
if @negate
@logger.debug(["want negate match", re, value])
@logger.debug("negate match", :regexp => re, :value => value)
next if re.match(value)
@logger.debug(["grep not-matched (negate requsted)", { field => value }])
@logger.debug("grep not-matched (negate requested)", field => value)
else
@logger.debug(["want match", re, value])
@logger.debug("want match", :regexp => re, :value => value)
next unless re.match(value)
@logger.debug(["grep matched", { field => value }])
@logger.debug("grep matched", field => value)
end
match_count += 1
break
@ -92,10 +94,9 @@ class LogStash::Filters::Grep < LogStash::Filters::Base
if match_count == match_want
matches += 1
@logger.debug("matched all fields (#{match_count})")
@logger.debug("matched all fields", :count => match_count)
else
@logger.debug("match block failed " \
"(#{match_count}/#{match_want} matches)")
@logger.debug("match failed", :count => match_count, :wanted => match_want)
end # match["match"].each
end # @patterns.each
@ -111,6 +112,6 @@ class LogStash::Filters::Grep < LogStash::Filters::Base
return
end
@logger.debug(["Event after grep filter", event.to_hash])
@logger.debug("Event after grep filter", :event => event)
end # def filter
end # class LogStash::Filters::Grep

View file

@ -107,13 +107,13 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
@patternfiles = []
@patterns_dir += @@patterns_path.to_a
@logger.info("Grok patterns path: #{@patterns_dir.join(":")}")
@logger.info("Grok patterns path", :patterns_dir => @patterns_dir)
@patterns_dir.each do |path|
# Can't read relative paths from jars, try to normalize away '../'
while path =~ /file:\/.*\.jar!.*\/\.\.\//
# replace /foo/bar/../baz => /foo/baz
path = path.gsub(/[^\/]+\/\.\.\//, "")
@logger.debug "In-jar path to read: #{path}"
@logger.debug("In-jar path to read", :path => path)
end
if File.directory?(path)
@ -121,14 +121,14 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
end
Dir.glob(path).each do |file|
@logger.info("Grok loading patterns from #{file}")
@logger.info("Grok loading patterns from file", :path => file)
@patternfiles << file
end
end
@patterns = Hash.new { |h,k| h[k] = [] }
@logger.info(:match => @match)
@logger.info("Match data", :match => @match)
# TODO(sissel): Hash.merge actually overrides, not merges arrays.
# Work around it by implementing our own?
@ -144,9 +144,9 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
@patterns[field] = Grok::Pile.new
add_patterns_from_files(@patternfiles, @patterns[field])
end
@logger.info(["Grok compile", { :field => field, :patterns => patterns }])
@logger.info("Grok compile", :field => field, :patterns => patterns)
patterns.each do |pattern|
@logger.debug(["regexp: #{@type}/#{field}", pattern])
@logger.debug("regexp: #{@type}/#{field}", :pattern => pattern)
@patterns[field].compile(pattern)
end
end # @config.each
@ -158,26 +158,23 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
matched = false
# Only filter events we are configured for
if event.type != @type
return
end
if @type != event.type
@logger.debug("Skipping grok for event type=#{event.type} (wanted '#{@type}')")
@logger.debug("Skipping grok for event with wrong type",
:type => event.type, :wanted_type => @type)
return
end
@logger.debug(["Running grok filter", event])
@logger.debug("Running grok filter", :event => event);
done = false
@patterns.each do |field, pile|
break if done
if !event[field]
@logger.debug(["Skipping match object, field not present", field,
event, event[field]])
@logger.debug("Skipping match object, field not present",
:field => field, :event => event)
next
end
@logger.debug(["Trying pattern for type #{event.type}", { :pile => pile, :field => field }])
@logger.debug("Trying pattern", :pile => pile, :field => field )
(event[field].is_a?(Array) ? event[field] : [event[field]]).each do |fieldvalue|
grok, match = pile.match(fieldvalue)
next unless match
@ -207,13 +204,13 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
if fieldvalue == value and field == "@message"
# Skip patterns that match the entire message
@logger.debug("Skipping capture '#{key}' since it matches the whole line.")
@logger.debug("Skipping capture since it matches the whole line.", :field => key)
next
end
if @named_captures_only && key =~ /^[A-Z]+/
@logger.debug("Skipping capture '#{key}' since it is not a named " \
"capture and named_captures_only is true.")
@logger.debug("Skipping capture since it is not a named " \
"capture and named_captures_only is true.", :field => key)
next
end
@ -240,7 +237,7 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
event.tags << "_grokparsefailure"
end
@logger.debug(["Event now: ", event.to_hash])
@logger.debug("Event now: ", :event => event)
end # def filter
private
@ -259,8 +256,8 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
# the end. I don't know if this is a bug or intentional, but we need
# to chomp it.
name, pattern = line.chomp.split(/\s+/, 2)
@logger.debug "Adding pattern '#{name}' from file #{path}"
@logger.debug name => pattern
@logger.debug("Adding pattern from file", :name => name,
:pattern => pattern, :path => path)
pile.add_pattern(name, pattern)
end
else

View file

@ -82,14 +82,13 @@ class LogStash::Filters::Multiline < LogStash::Filters::Base
public
def register
@logger.debug "Setting type #{@type.inspect} to the config #{@config.inspect}"
begin
@pattern = Regexp.new(@pattern)
rescue RegexpError => e
@logger.fatal(["Invalid pattern for multiline filter on type '#{@type}'",
@pattern, e])
@logger.fatal("Invalid pattern for multiline filter",
:pattern => @pattern, :exception => e, :backtrace => e.backtrace)
end
@logger.debug("Registered multiline plugin", :type => @type, :config => @config)
end # def register
public
@ -100,7 +99,8 @@ class LogStash::Filters::Multiline < LogStash::Filters::Base
key = [event.source, event.type]
pending = @pending[key]
@logger.debug(["Reg: ", @pattern, event.message, { :match => match, :negate => @negate }])
@logger.debug("Multiline", :pattern => @pattern, :message => event.message,
:match => match, :negate => @negate)
# Add negate option
match = (match and !@negate) || (!match and @negate)
@ -152,7 +152,8 @@ class LogStash::Filters::Multiline < LogStash::Filters::Base
end
end # if/else match
else
@logger.warn(["Unknown multiline 'what' value.", { :what => @what }])
# TODO(sissel): Make this part of the 'register' method.
@logger.warn("Unknown multiline 'what' value.", :what => @what)
end # case @what
if !event.cancelled?

View file

@ -30,8 +30,8 @@ class LogStash::Filters::Mutate < LogStash::Filters::Base
# TODO(sissel): Validate conversion requests if provided.
@convert.nil? or @convert.each do |field, type|
if !valid_conversions.include?(type)
@logger.error(["Invalid conversion type",
{ "type" => type, "expected one of" => valid_types }])
@logger.error("Invalid conversion type",
"type" => type, "expected one of" => valid_types)
# TODO(sissel): It's 2011, man, let's actually make like.. a proper
# 'configuration broken' exception
raise "Bad configuration, aborting."
@ -84,8 +84,8 @@ class LogStash::Filters::Mutate < LogStash::Filters::Base
# calls convert_{string,integer,float} depending on type requested.
converter = method("convert_" + type)
if original.is_a?(Hash)
@logger.debug(["I don't know how to type convert a hash, skipping",
{ "field" => field, "value" => original }])
@logger.debug("I don't know how to type convert a hash, skipping",
:field => field, :value => original)
next
elsif original.is_a?(Array)
value = original.map { |v| converter.call(v) }

View file

@ -48,7 +48,7 @@ class LogStash::Filters::Split < LogStash::Filters::Base
next if value.empty?
event_split = event.clone
@logger.debug(["Split event", { :value => value, :field => @field }])
@logger.debug("Split event", :value => value, :field => @field)
event_split[@field] = value
filter_matched(event_split)

View file

@ -58,15 +58,14 @@ class LogStash::FilterWorker < LogStash::Plugin
events << newevent
end
if event.cancelled?
@logger.debug({:message => "Event cancelled",
:event => event,
:filter => filter.class,
})
@logger.debug("Event cancelled", :event => event,
:filter => filter.class)
break
end
end # @filters.each
@logger.debug(["Event finished filtering", { :event => event, :thread => Thread.current[:name] }])
@logger.debug("Event finished filtering", :event => event,
:thread => Thread.current[:name])
@output_queue.push(event) unless event.cancelled?
end # events.each
end # def filter

View file

@ -81,11 +81,11 @@ class LogStash::Inputs::Base < LogStash::Plugin
fields = JSON.parse(raw)
fields.each { |k, v| event[k] = v }
rescue => e
@logger.warn({:message => "Trouble parsing json input",
:input => raw,
:source => source,
})
@logger.debug(["Backtrace", e.backtrace])
# TODO(sissel): Instead of dropping the event, should we treat it as
# plain text and try to do the best we can with it?
@logger.warn("Trouble parsing json input", :input => raw,
:source => source, :exception => e,
:backtrace => e.backtrace)
return nil
end
@ -98,11 +98,11 @@ class LogStash::Inputs::Base < LogStash::Plugin
begin
event = LogStash::Event.from_json(raw)
rescue => e
@logger.warn({:message => "Trouble parsing json_event input",
:input => raw,
:source => source,
})
@logger.debug(["Backtrace", e.backtrace])
# TODO(sissel): Instead of dropping the event, should we treat it as
# plain text and try to do the best we can with it?
@logger.warn("Trouble parsing json input", :input => raw,
:source => source, :exception => e,
:backtrace => e.backtrace)
return nil
end
else

View file

@ -27,15 +27,15 @@ class LogStash::Inputs::Exec < LogStash::Inputs::Base
public
def register
@logger.info(["Registering Exec Input", {:type => @type,
:command => @command, :interval => @interval}])
@logger.info("Registering Exec Input", :type => @type,
:command => @command, :interval => @interval)
end # def register
public
def run(queue)
loop do
start = Time.now
@logger.info(["Running exec", { :command => @command }]) if @debug
@logger.info("Running exec", :command => @command) if @debug
out = IO.popen(@command)
# out.read will block until the process finishes.
e = to_event(out.read, "exec://#{Socket.gethostname}/")
@ -44,17 +44,17 @@ class LogStash::Inputs::Exec < LogStash::Inputs::Base
duration = Time.now - start
if @debug
@logger.info(["Command completed",
{ :command => @command, :duration => duration } ])
@logger.info("Command completed", :command => @command,
:duration => duration)
end
# Sleep for the remainder of the interval, or 0 if the duration ran
# longer than the interval.
sleeptime = [0, @interval - duration].max
if sleeptime == 0
@logger.warn(["Execution ran longer than the interval. Skipping sleep...",
{ :command => @command, :duration => duration,
:interval => @interval }])
@logger.warn("Execution ran longer than the interval. Skipping sleep.",
:command => @command, :duration => duration,
:interval => @interval)
else
sleep(sleeptime)
end

View file

@ -47,7 +47,7 @@ class LogStash::Inputs::File < LogStash::Inputs::Base
def register
require "filewatch/tail"
LogStash::Util::set_thread_name("input|file|#{path.join(":")}")
@logger.info("Registering file input for #{path.join(":")}")
@logger.info("Registering file input", :path => @path)
end # def register
public
@ -67,7 +67,7 @@ class LogStash::Inputs::File < LogStash::Inputs::Base
tail.subscribe do |path, line|
source = "file://#{hostname}/#{path}"
@logger.debug({:path => path, :line => line})
@logger.debug("Received line", :path => path, :line => line)
e = to_event(line, source)
if e
queue << e

View file

@ -56,8 +56,7 @@ class LogStash::Inputs::Gelf < LogStash::Inputs::Base
begin
udp_listener(output_queue)
rescue => e
@logger.warn("gelf listener died: #{$!}")
@logger.debug(["Backtrace", e.backtrace])
@logger.warn("gelf listener died", :exception => e, :backtrace => e.backtrace)
sleep(5)
retry
end # begin
@ -66,7 +65,7 @@ class LogStash::Inputs::Gelf < LogStash::Inputs::Base
private
def udp_listener(output_queue)
@logger.info("Starting gelf listener on #{@host}:#{@port}")
@logger.info("Starting gelf listener", :address => "#{@host}:#{@port}")
if @udp
@udp.close_read

View file

@ -73,7 +73,7 @@ class LogStash::Inputs::Redis < LogStash::Inputs::Base
end
# end TODO
@logger.info "Registering redis #{identity}"
@logger.info("Registering redis", :identity => identity)
end # def register
# A string used to identify a redis instance in log messages
@ -101,8 +101,8 @@ class LogStash::Inputs::Redis < LogStash::Inputs::Base
event = to_event msg, identity
output_queue << event if event
rescue => e # parse or event creation error
@logger.error(["Failed to create event with '#{msg}'", e])
@logger.debug(["Backtrace", e.backtrace])
@logger.error("Failed to create event", :message => msg, exception => e,
:backtrace => e.backtrace);
end
end
@ -115,16 +115,16 @@ class LogStash::Inputs::Redis < LogStash::Inputs::Base
private
def channel_listener(redis, output_queue)
redis.subscribe @key do |on|
on.subscribe do |ch, count|
@logger.info "Subscribed to #{ch} (#{count})"
on.subscribe do |channel, count|
@logger.info("Subscribed", :channel => channel, :count => count)
end
on.message do |ch, message|
on.message do |channel, message|
queue_event message, output_queue
end
on.unsubscribe do |ch, count|
@logger.info "Unsubscribed from #{ch} (#{count})"
on.unsubscribe do |channel, count|
@logger.info("Unsubscribed", :channel => channel, :count => count)
end
end
end
@ -132,16 +132,16 @@ class LogStash::Inputs::Redis < LogStash::Inputs::Base
private
def pattern_channel_listener(redis, output_queue)
redis.psubscribe @key do |on|
on.psubscribe do |ch, count|
@logger.info "Subscribed to #{ch} (#{count})"
on.psubscribe do |channel, count|
@logger.info("Subscribed", :channel => channel, :count => count)
end
on.pmessage do |ch, event, message|
queue_event message, output_queue
end
on.punsubscribe do |ch, count|
@logger.info "Unsubscribed from #{ch} (#{count})"
on.punsubscribe do |channel, count|
@logger.info("Unsubscribed", :channel => channel, :count => count)
end
end
end
@ -155,7 +155,8 @@ class LogStash::Inputs::Redis < LogStash::Inputs::Base
@redis ||= connect
self.send listener, @redis, output_queue
rescue => e # redis error
@logger.warn(["Failed to get event from redis #{@name}. ", e])
@logger.warn("Failed to get event from redis", :name => @name,
:exception => e, :backtrace => e.backtrace)
raise e
end
end # loop

View file

@ -61,8 +61,9 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
udp_listener(output_queue)
rescue => e
break if @shutdown_requested
@logger.warn("syslog udp listener died: #{$!}")
@logger.debug(["Backtrace", e.backtrace])
@logger.warn("syslog udp listener died",
:address => "#{@host}:#{@port}", :exception => e,
:backtrace => e.backtrace)
sleep(5)
retry
end # begin
@ -75,8 +76,9 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
tcp_listener(output_queue)
rescue => e
break if @shutdown_requested
@logger.warn("syslog tcp listener died: #{$!}")
@logger.debug(["Backtrace", e.backtrace])
@logger.warn("syslog tcp listener died",
:address => "#{@host}:#{@port}", :exception => e,
:backtrace => e.backtrace)
sleep(5)
retry
end # begin
@ -85,7 +87,7 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
private
def udp_listener(output_queue)
@logger.info("Starting syslog udp listener on #{@host}:#{@port}")
@logger.info("Starting syslog udp listener", :address => "#{@host}:#{@port}")
if @udp
@udp.close_read
@ -111,7 +113,7 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
private
def tcp_listener(output_queue)
@logger.info("Starting syslog tcp listener on #{@host}:#{@port}")
@logger.info("Starting syslog tcp listener", :address => "#{@host}:#{@port}")
@tcp = TCPServer.new(@host, @port)
@tcp_clients = []
@ -120,7 +122,7 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
@tcp_clients << client
Thread.new(client) do |client|
ip, port = client.peeraddr[3], client.peeraddr[1]
@logger.warn("got connection from #{ip}:#{port}")
@logger.info("new connection", :client => "#{ip}:#{port}")
LogStash::Util::set_thread_name("input|syslog|tcp|#{ip}:#{port}}")
if ip.include?(":") # ipv6
source = "syslog://[#{ip}]/"
@ -192,7 +194,7 @@ class LogStash::Inputs::Syslog < LogStash::Inputs::Base
@date_filter.filter(event)
else
@logger.info(["NOT SYSLOG", event.message])
@logger.info("NOT SYSLOG", :message => event.message)
url = "syslog://#{Socket.gethostname}/" if url == "syslog://127.0.0.1/"
# RFC3164 says unknown messages get pri=13

View file

@ -41,7 +41,7 @@ class LogStash::Inputs::Tcp < LogStash::Inputs::Base
public
def register
if server?
@logger.info("Starting tcp input listener on #{@host}:#{@port}")
@logger.info("Starting tcp input listener", :address => "#{@host}:#{@port}")
@server_socket = TCPServer.new(@host, @port)
end
end # def register
@ -67,10 +67,11 @@ class LogStash::Inputs::Tcp < LogStash::Inputs::Base
end
end # loop do
rescue => e
@logger.debug(["Closing connection with #{socket.peer}", $!])
@logger.debug(["Backtrace", e.backtrace])
@logger.debug("Closing connection", :client => socket.peer,
:exception => e, :backtrace => e.backtrace)
rescue Timeout::Error
@logger.debug("Closing connection with #{socket.peer} after read timeout")
@logger.debug("Closing connection after read timeout",
:client => socket.peer)
end # begin
begin
@ -95,7 +96,8 @@ class LogStash::Inputs::Tcp < LogStash::Inputs::Base
# monkeypatch a 'peer' method onto the socket.
s.instance_eval { class << self; include SocketPeer end }
@logger.debug("Accepted connection from #{s.peer} on #{@host}:#{@port}")
@logger.debug("Accepted connection", :client => s.peer,
:server => "#{@host}:#{@port}")
handle_socket(s, output_queue, "tcp://#{@host}:#{@port}/client/#{s.peer}")
end # Thread.start
end # loop
@ -103,7 +105,7 @@ class LogStash::Inputs::Tcp < LogStash::Inputs::Base
loop do
client_socket = TCPSocket.new(@host, @port)
client_socket.instance_eval { class << self; include SocketPeer end }
@logger.debug("Opened connection to #{client_socket.peer}")
@logger.debug("Opened connection", :client => "#{client_socket.peer}")
handle_socket(client_socket, output_queue, "tcp://#{client_socket.peer}/server")
end # loop
end

View file

@ -41,9 +41,9 @@ class LogStash::Inputs::Twitter < LogStash::Inputs::Base
#stream = TweetStream::Client.new(@user, @password.value)
#stream.track(*@keywords) do |status|
track(*@keywords) do |status|
@logger.debug :status => status
@logger.debug("twitter keyword track status", :status => status)
#@logger.debug("Got twitter status from @#{status[:user][:screen_name]}")
@logger.info("Got twitter status from @#{status["user"]["screen_name"]}")
@logger.info("Got twitter status", :user => status["user"]["screen_name"])
e = to_event(status["text"], "http://twitter.com/#{status["user"]["screen_name"]}/status/#{status["id"]}")
next unless e
@ -92,13 +92,13 @@ class LogStash::Inputs::Twitter < LogStash::Inputs::Base
response.read_body do |chunk|
#@logger.info("Twitter: #{chunk.inspect}")
buffer.extract(chunk).each do |line|
@logger.info("Twitter line: #{line.inspect}")
@logger.info("Twitter line", :line => line)
begin
status = JSON.parse(line)
yield status
rescue => e
@logger.error e
@logger.debug(["Backtrace", e.backtrace])
@logger.error("Error parsing json from twitter", :exception => e,
:backtrace => e.backtrace);
end
end # buffer.extract
end # response.read_body

View file

@ -1,91 +1,19 @@
require "logstash/namespace"
require "cabin"
require "logger"
class LogStash::Logger < Logger
# Try to load awesome_print, if it fails, log it later
# but otherwise we will continue to operate as normal.
begin
require "ap"
@@have_awesome_print = true
rescue LoadError => e
@@have_awesome_print = false
@@notify_awesome_print_load_failed = e
end
class LogStash::Logger < Cabin::Channel
public
def initialize(*args)
super(*args)
@formatter = LogStash::Logger::Formatter.new
super()
# Set default loglevel to WARN unless $DEBUG is set (run with 'ruby -d')
self.level = $DEBUG ? Logger::DEBUG: Logger::INFO
@level = $DEBUG ? :debug : :info
if ENV["LOGSTASH_DEBUG"]
self.level = Logger::DEBUG
self.level = :debug
end
@formatter.progname = self.progname = File.basename($0)
# Conditional support for awesome_print
if !@@have_awesome_print && @@notify_awesome_print_load_failed
debug [ "awesome_print not found, falling back to Object#inspect." \
"If you want prettier log output, run 'gem install "\
"awesome_print'",
{ :exception => @@notify_awesome_print_load_failed }]
# Only show this once.
@@notify_awesome_print_load_failed = nil
end
#self[:program] = File.basename($0)
subscribe(::Logger.new(*args))
end # def initialize
public
def level=(level)
super(level)
@formatter.level = level
end # def level=
end # class LogStash::Logger
# Implement a custom Logger::Formatter that uses awesome_inspect on non-strings.
class LogStash::Logger::Formatter < Logger::Formatter
attr_accessor :level
attr_accessor :progname
public
def call(severity, timestamp, who, object)
# override progname to be the caller if the log level threshold is DEBUG
# We only do this if the logger level is DEBUG because inspecting the
# stack and doing extra string manipulation can have performance impacts
# under high logging rates.
if @level == Logger::DEBUG
# callstack inspection, include our caller
# turn this: "/usr/lib/ruby/1.8/irb/workspace.rb:52:in `irb_binding'"
# into this: ["/usr/lib/ruby/1.8/irb/workspace.rb", "52", "irb_binding"]
#
# caller[3] is actually who invoked the Logger#<type>
# This only works if you use the severity methods
path, line, method = caller[3].split(/(?::in `|:|')/)
# Trim RUBYLIB path from 'file' if we can
#whence = $:.select { |p| path.start_with?(p) }[0]
whence = $:.detect { |p| path.start_with?(p) }
if !whence
# We get here if the path is not in $:
file = path
else
file = path[whence.length + 1..-1]
end
who = "#{file}:#{line}##{method}"
end
# Log like normal if we got a string.
if object.is_a?(String)
super(severity, timestamp, who, object)
else
# If we logged an object, use .awesome_inspect (or just .inspect)
# to stringify it for higher sanity logging.
if object.respond_to?(:awesome_inspect)
super(severity, timestamp, who, object.awesome_inspect)
else
super(severity, timestamp, who, object.inspect)
end
end
end # def call
end # class LogStash::Logger::Formatter

View file

@ -32,7 +32,7 @@ class LogStash::Plugin
# If you need to take special efforts to shutdown (like waiting for
# an operation to complete, etc)
teardown
@logger.info("Got shutdown signal for #{self}")
@logger.info("Received shutdown signal", :plugin => self)
@shutdown_queue = queue
if @plugin_state == :finished
@ -47,12 +47,12 @@ class LogStash::Plugin
public
def finished
if @shutdown_queue
@logger.info("Sending shutdown event to agent queue. (plugin #{to_s})")
@logger.info("Sending shutdown event to agent queue", :plugin => self)
@shutdown_queue << self
end
if @plugin_state != :finished
@logger.info("Plugin #{to_s} is finished")
@logger.info("Plugin is finished", :plugin => self)
@plugin_state = :finished
end
end # def finished