Merge branch 'master' of github.com:logstash/logstash

This commit is contained in:
Jordan Sissel 2011-05-20 12:23:26 -07:00
commit 27784a4c5a
7 changed files with 49 additions and 16 deletions

View file

@ -1,5 +1,11 @@
1.0.10 (????)
...
- Fix tcp input bug (LOGSTASH-88) that would drop connections.
- Grok patterns_dir (filter config) and --grok-patterns-dir (cmdline opt)
are now working.
- GELF output now properly sends extra fields from the log event (prefixed
with a "_") and sets timestamp to seconds-since-epoch (millisecond
precision and time zone information is lost, but this is the format GELF
asks for).
1.0.9 (May 18, 2011)
- Fix crash bug caused by refactoring that left 'break' calls in code

View file

@ -1,4 +1,4 @@
#!/usr/bin/env ruby
#!/usr/bin/env jruby
$: << File.dirname($0) + "/../lib"
$: << File.dirname($0) + "/../test"

View file

@ -0,0 +1,20 @@
# Useful config for testing grok expressions (update "pattern" below)
input {
stdin {
type => test
}
}
filter {
grok {
type => "test"
pattern => "%{SYSLOGLINE}"
}
}
output {
stdout {
debug => true
}
}

View file

@ -46,6 +46,11 @@ class LogStash::Event
def timestamp; @data["@timestamp"]; end # def timestamp
def timestamp=(val); @data["@timestamp"] = val; end # def timestamp=
def unix_timestamp
time = @@date_parser.parseDateTime(timestamp)
return time.getMillis.to_f / 1000
end
public
def source; @data["@source"]; end # def source
def source=(val)

View file

@ -1,5 +1,6 @@
require "logstash/filters/base"
require "logstash/namespace"
require "set"
# Parse arbitrary text and structure it.
# Grok is currently the best way in logstash to parse crappy unstructured log
@ -50,20 +51,17 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
# requested in: googlecode/issue/26
config :drop_if_match, :validate => :boolean, :default => false
class << self
attr_accessor :patterns_dir
end
# Detect if we are running from a jarfile, pick the right path.
@@patterns_path ||= Set.new
if __FILE__ =~ /file:\/.*\.jar!.*/
self.patterns_dir = ["#{File.dirname(__FILE__)}/../../patterns/*"]
@@patterns_path += ["#{File.dirname(__FILE__)}/../../patterns/*"]
else
self.patterns_dir = ["#{File.dirname(__FILE__)}/../../../patterns/*"]
@@patterns_path += ["#{File.dirname(__FILE__)}/../../../patterns/*"]
end
# This flag becomes "--grok-patterns-path"
flag("--patterns-path PATH", "Colon-delimited path of patterns to load") do |val|
@patterns_dir += val.split(":")
@@patterns_path += val.split(":")
end
@@grokpiles = Hash.new { |h, k| h[k] = [] }
@ -75,12 +73,14 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
require "grok" # rubygem 'jls-grok'
@pile = Grok::Pile.new
@logger.info("Grok patterns paths: #{self.class.patterns_dir.inspect}")
self.class.patterns_dir.each do |path|
@patterns_dir ||= []
@patterns_dir += @@patterns_path.to_a
@logger.info("Grok patterns path: #{@patterns_dir.join(":")}")
@patterns_dir.each do |path|
# Can't read relative paths from jars, try to normalize away '../'
while path =~ /file:\/.*\.jar!.*\/\.\.\//
# replace /foo/bar/../baz => /foo/baz
path.gsub!(/[^\/]+\/\.\.\//, "")
path = path.gsub(/[^\/]+\/\.\.\//, "")
@logger.debug "In-jar path to read: #{path}"
end

View file

@ -50,11 +50,11 @@ class LogStash::Inputs::Tcp < LogStash::Inputs::Base
"@tags" => @tags.clone,
})
e.source = "tcp://#{@host}:#{@port}/client/#{peer}"
@logger.debug(["Received message from #{peer}"], e)
@logger.debug(["Received message from #{peer}", e])
output_queue << e
end # loop do
rescue
@logger.debug("Closing connection with #{peer}")
@logger.debug(["Closing connection with #{peer}", $!])
rescue Timeout::Error
@logger.debug("Closing connection with #{peer} after read timeout")
end # begin

View file

@ -69,15 +69,17 @@ class LogStash::Outputs::Gelf < LogStash::Outputs::Base
event.fields.each do |name, value|
next if value == nil or value.empty?
m["#{name}"] = value
name = "_id" if name == "id" # "_id" is reserved, so use "__id"
m["_#{name}"] = (value.length == 1) ? value.first : value
end
# Allow 'INFO' 'I' or number. for 'level'
level = event.sprintf(@level.to_s)
m["level"] = (@level_map[level.downcase] || level).to_i
m["facility"] = event.sprintf(@facility)
m["timestamp"] = event.timestamp
m["timestamp"] = event.unix_timestamp.to_i
@logger.debug(["Sending GELF event", m])
@gelf.notify!(m)
end # def receive
end # class LogStash::Outputs::Gelf