mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 14:47:19 -04:00
Merge branch 'ruby1.9'
This commit is contained in:
commit
a9f1a98634
23 changed files with 292 additions and 269 deletions
3
Gemfile
3
Gemfile
|
@ -3,13 +3,14 @@ source :rubygems
|
|||
gem "bunny" # for amqp support, MIT-style license
|
||||
gem "uuidtools" # for naming amqp queues, License ???
|
||||
gem "filewatch", "~> 0.2.5" # for file tailing, BSD License
|
||||
gem "jls-grok", "~> 0.4.7" # for grok filter, BSD License
|
||||
gem "jls-grok", "0.9.0" # for grok filter, BSD License
|
||||
gem "jruby-elasticsearch", "~> 0.0.10" # BSD License
|
||||
gem "stomp" # for stomp protocol, Apache 2.0 License
|
||||
gem "json" # Ruby license
|
||||
gem "awesome_print" # MIT License
|
||||
gem "jruby-openssl" # For enabling SSL support, CPL/GPL 2.0
|
||||
|
||||
gem "minitest" # License: Ruby
|
||||
gem "rack" # License: MIT
|
||||
gem "mizuno" # License: Apache 2.0
|
||||
gem "sinatra" # License: MIT-style
|
||||
|
|
|
@ -12,12 +12,12 @@ GEM
|
|||
json
|
||||
gmetric (0.1.3)
|
||||
haml (3.1.2)
|
||||
jls-grok (0.4.7)
|
||||
ffi (>= 0.6.3)
|
||||
jls-grok (0.9.0)
|
||||
jruby-elasticsearch (0.0.10)
|
||||
jruby-openssl (0.7.4)
|
||||
bouncy-castle-java
|
||||
json (1.5.3-java)
|
||||
minitest (2.5.1)
|
||||
mizuno (0.4.0)
|
||||
rack (>= 1.0.0)
|
||||
mongo (1.3.1)
|
||||
|
@ -44,10 +44,11 @@ DEPENDENCIES
|
|||
gelf
|
||||
gmetric (~> 0.1.3)
|
||||
haml
|
||||
jls-grok (~> 0.4.7)
|
||||
jls-grok (= 0.9.0)
|
||||
jruby-elasticsearch (~> 0.0.10)
|
||||
jruby-openssl
|
||||
json
|
||||
minitest
|
||||
mizuno
|
||||
mongo
|
||||
rack
|
||||
|
|
38
Makefile
38
Makefile
|
@ -3,13 +3,17 @@ VERSION=$(shell ruby -r./VERSION -e 'puts LOGSTASH_VERSION')
|
|||
JRUBY_VERSION=1.6.4
|
||||
JRUBY_URL=http://repository.codehaus.org/org/jruby/jruby-complete/$(JRUBY_VERSION)
|
||||
JRUBY=vendor/jar/jruby-complete-$(JRUBY_VERSION).jar
|
||||
JRUBYC=java -Djruby.compat.version=RUBY1_9 -jar $(PWD)/$(JRUBY) -S jrubyc
|
||||
ELASTICSEARCH_VERSION=0.17.6
|
||||
ELASTICSEARCH_URL=http://github.com/downloads/elasticsearch/elasticsearch
|
||||
ELASTICSEARCH=vendor/jar/elasticsearch-$(ELASTICSEARCH_VERSION)
|
||||
|
||||
PLUGIN_FILES=$(shell git ls-files | egrep '^lib/logstash/(inputs|outputs|filters)/' | egrep -v '/base.rb$$')
|
||||
|
||||
default: compile
|
||||
default: jar
|
||||
|
||||
debug:
|
||||
echo $(JRUBY)
|
||||
|
||||
# Compile config grammar (ragel -> ruby)
|
||||
.PHONY: compile-grammar
|
||||
|
@ -28,8 +32,8 @@ compile: compile-grammar compile-runner | build/ruby
|
|||
|
||||
.PHONY: compile-runner
|
||||
compile-runner: build/ruby/logstash/runner.class
|
||||
build/ruby/logstash/runner.class: lib/logstash/runner.rb | build/ruby
|
||||
(cd lib; jrubyc -t ../build/ruby logstash/runner.rb)
|
||||
build/ruby/logstash/runner.class: lib/logstash/runner.rb | build/ruby $(JRUBY)
|
||||
(cd lib; JRUBY_OPTS=--1.9 $(JRUBYC) -t ../build/ruby logstash/runner.rb)
|
||||
|
||||
# TODO(sissel): Stop using cpio for this
|
||||
.PHONY: copy-ruby-files
|
||||
|
@ -46,10 +50,26 @@ vendor:
|
|||
vendor/jar: | vendor
|
||||
mkdir $@
|
||||
|
||||
.PHONY: vendor-jruby
|
||||
vendor-jruby: $(JRUBY)
|
||||
$(JRUBY): | vendor/jar
|
||||
wget -O $@ $(JRUBY_URL)/$(shell basename $@)
|
||||
.PHONY: build-jruby
|
||||
build-jruby: $(JRUBY)
|
||||
|
||||
$(JRUBY): build/jruby/jruby-1.6.4/lib/jruby-complete.jar | vendor/jar
|
||||
cp $< $@
|
||||
|
||||
build/jruby: build
|
||||
mkdir -p $@
|
||||
|
||||
build/jruby/jruby-1.6.4/lib/jruby-complete.jar: build/jruby/jruby-$(JRUBY_VERSION)
|
||||
# Patch that, yo.
|
||||
sed -i -e 's/jruby.default.ruby.version=.*/jruby.default.ruby.version=1.9/' $</default.build.properties
|
||||
(cd $<; ant jar-jruby-complete)
|
||||
|
||||
build/jruby/jruby-$(JRUBY_VERSION): build/jruby/jruby-src-$(JRUBY_VERSION).tar.gz
|
||||
tar -C build/jruby/ -zxf $<
|
||||
# Build jruby from source targeted at 1.9
|
||||
|
||||
build/jruby/jruby-src-$(JRUBY_VERSION).tar.gz: | build/jruby
|
||||
wget -O $@ http://jruby.org.s3.amazonaws.com/downloads/$(JRUBY_VERSION)/jruby-src-$(JRUBY_VERSION).tar.gz
|
||||
|
||||
vendor/jar/elasticsearch-$(ELASTICSEARCH_VERSION).tar.gz: | vendor/jar
|
||||
@# --no-check-certificate is for github and wget not supporting wildcard
|
||||
|
@ -92,7 +112,7 @@ build/ruby: | build
|
|||
# TODO(sissel): Skip sigar?
|
||||
# Run this one always? Hmm..
|
||||
.PHONY: build/monolith
|
||||
build/monolith: vendor-elasticsearch vendor-jruby vendor-gems | build
|
||||
build/monolith: $(ELASTICSEARCH) $(JRUBY) vendor-gems | build
|
||||
build/monolith: compile copy-ruby-files
|
||||
-mkdir -p $@
|
||||
@# Unpack all the 3rdparty jars and any jars in gems
|
||||
|
@ -100,7 +120,7 @@ build/monolith: compile copy-ruby-files
|
|||
| (cd $@; xargs -tn1 jar xf)
|
||||
@# Purge any extra files we don't need in META-INF (like manifests and
|
||||
@# TODO(sissel): Simplify this.
|
||||
-rm -f $@/META-INF/{INDEX.LIST,MANIFEST.MF,ECLIPSEF.RSA,ECLIPSEF.SF}
|
||||
-rm -f $@/META-INF/{INDEX.LIST,MANIFEST.MF,ECLIPSEF.RSA,ECLIPSEF.SF,BCKEY.SF,BCKEY.DSA,NOTICE{,.txt},LICENSE{,.txt}}
|
||||
|
||||
# Learned how to do pack gems up into the jar mostly from here:
|
||||
# http://blog.nicksieger.com/articles/2009/01/10/jruby-1-1-6-gems-in-a-jar
|
||||
|
|
10
Rakefile
10
Rakefile
|
@ -1,5 +1,5 @@
|
|||
require "tempfile"
|
||||
require "ftools" # fails in 1.9.2
|
||||
require "fileutils"
|
||||
|
||||
require File.join(File.dirname(__FILE__), "VERSION") # For LOGSTASH_VERSION
|
||||
|
||||
|
@ -131,12 +131,6 @@ namespace :package do
|
|||
monolith_deps = [ "vendor:jruby", "vendor:gems", "vendor:elasticsearch", "compile" ]
|
||||
|
||||
namespace :monolith do
|
||||
task :tar => monolith_deps do
|
||||
paths = %w{ bin CHANGELOG CONTRIBUTORS etc examples Gemfile Gemfile.lock
|
||||
INSTALL lib LICENSE patterns Rakefile README.md STYLE.md test
|
||||
TODO USAGE vendor/bundle vendor/jar }
|
||||
sh "tar -zcf logstash-monolithic-someversion.tar.gz #{paths.join(" ")}"
|
||||
end # package:monolith:tar
|
||||
|
||||
task :jar => monolith_deps do
|
||||
builddir = "build/monolith-jar"
|
||||
|
@ -162,7 +156,7 @@ namespace :package do
|
|||
target = File.join(builddir, file.gsub("build/ruby/", ""))
|
||||
mkdir_p File.dirname(target)
|
||||
puts "=> Copying #{file} => #{target}"
|
||||
File.copy(file, target)
|
||||
FileUtils.copy(file, target)
|
||||
end
|
||||
|
||||
# Purge any extra files we don't need in META-INF (like manifests and
|
||||
|
|
|
@ -1 +1 @@
|
|||
LOGSTASH_VERSION = "1.0.17"
|
||||
LOGSTASH_VERSION = "1.0.18pre1-ruby19"
|
||||
|
|
|
@ -348,7 +348,11 @@ class LogStash::Agent
|
|||
|
||||
# Create N filter-worker threads
|
||||
if @filters.length > 0
|
||||
1.times do |n|
|
||||
@filters.each do |filter|
|
||||
filter.logger = @logger
|
||||
filter.register
|
||||
end
|
||||
2.times do |n|
|
||||
# TODO(sissel): facter this out into a 'filterworker' that accepts
|
||||
# 'shutdown'
|
||||
# Start a filter worker
|
||||
|
|
|
@ -96,7 +96,7 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
|
|||
public
|
||||
def register
|
||||
gem "jls-grok", ">=0.4.3"
|
||||
require "grok" # rubygem 'jls-grok'
|
||||
require "grok-pure" # rubygem 'jls-grok'
|
||||
|
||||
@patternfiles = []
|
||||
@patterns_dir += @@patterns_path.to_a
|
||||
|
@ -134,6 +134,8 @@ class LogStash::Filters::Grok < LogStash::Filters::Base
|
|||
next if ["add_tag", "add_field", "type", "match", "patterns_dir",
|
||||
"drop_if_match", "named_captures_only", "pattern",
|
||||
"break_on_match" ].include?(field)
|
||||
patterns = [patterns] if patterns.is_a?(String)
|
||||
|
||||
if !@patterns.include?(field)
|
||||
@patterns[field] = Grok::Pile.new
|
||||
add_patterns_from_files(@patternfiles, @patterns[field])
|
||||
|
|
|
@ -15,10 +15,10 @@ class LogStash::FilterWorker < LogStash::Plugin
|
|||
|
||||
def run
|
||||
# for each thread.
|
||||
@filters.each do |filter|
|
||||
filter.logger = @logger
|
||||
filter.register
|
||||
end
|
||||
#@filters.each do |filter|
|
||||
#filter.logger = @logger
|
||||
#filter.register
|
||||
#end
|
||||
|
||||
while event = @input_queue.pop
|
||||
if event == LogStash::SHUTDOWN
|
||||
|
@ -59,7 +59,7 @@ class LogStash::FilterWorker < LogStash::Plugin
|
|||
end
|
||||
end # @filters.each
|
||||
|
||||
@logger.debug(["Event finished filtering", event])
|
||||
@logger.debug(["Event finished filtering", { :event => event, :thread => Thread.current[:name] }])
|
||||
@output_queue.push(event) unless event.cancelled?
|
||||
end # events.each
|
||||
end # def filter
|
||||
|
|
|
@ -9,7 +9,7 @@ require "logstash/outputs/base"
|
|||
# uses for it's client. Currently we use elasticsearch 0.17.6
|
||||
#
|
||||
# You can learn more about elasticseasrch at <http://elasticsearch.org>
|
||||
class LogStash::Outputs::Elasticsearch < LogStash::Outputs::Base
|
||||
class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
||||
|
||||
config_name "elasticsearch"
|
||||
|
||||
|
|
|
@ -2,11 +2,17 @@ require "rubygems"
|
|||
$: << File.join(File.dirname(__FILE__), "..")
|
||||
$: << File.join(File.dirname(__FILE__), "..", "..", "test")
|
||||
require "logstash/namespace"
|
||||
require "java"
|
||||
|
||||
class LogStash::Runner
|
||||
def main(args)
|
||||
$: << File.join(File.dirname(__FILE__), "../")
|
||||
|
||||
if args.empty?
|
||||
$stderr.puts "No arguments given."
|
||||
java.lang.System.exit(1)
|
||||
end
|
||||
|
||||
@runners = []
|
||||
while !args.empty?
|
||||
#p :args => args
|
||||
|
|
|
@ -65,9 +65,10 @@ class LogStash::Test
|
|||
return true
|
||||
end
|
||||
|
||||
def run_tests
|
||||
def run_tests(args)
|
||||
require "logstash_test_runner"
|
||||
return Test::Unit::AutoRunner.run
|
||||
return MiniTest::Unit.new.run(args)
|
||||
#return Test::Unit::AutoRunner.run
|
||||
end # def run_tests
|
||||
|
||||
def run(args)
|
||||
|
@ -81,7 +82,7 @@ class LogStash::Test
|
|||
@success = false
|
||||
end
|
||||
|
||||
if !run_tests
|
||||
if !run_tests(args)
|
||||
puts "Test suite failed."
|
||||
@success = false
|
||||
end
|
||||
|
|
|
@ -32,7 +32,7 @@ Gem::Specification.new do |spec|
|
|||
spec.add_dependency "bunny" # for amqp support
|
||||
spec.add_dependency "uuidtools" # for naming amqp queues
|
||||
spec.add_dependency "filewatch", "~> 0.2.3" # for file tailing
|
||||
spec.add_dependency "jls-grok", "~> 0.4.7" # for grok filter
|
||||
spec.add_dependency "jls-grok", "~> 0.5.2" # for grok filter
|
||||
spec.add_dependency "jruby-elasticsearch", "~> 0.0.7"
|
||||
spec.add_dependency "stomp" # for stomp protocol
|
||||
spec.add_dependency "json"
|
||||
|
|
|
@ -7,12 +7,11 @@ BASE16NUM (?<![0-9A-Fa-f])(?:[+-]?(?:0x)?(?:[0-9A-Fa-f]+))
|
|||
BASE16FLOAT \b(?<![0-9A-Fa-f.])(?:[+-]?(?:0x)?(?:(?:[0-9A-Fa-f]+(?:\.[0-9A-Fa-f]*)?)|(?:\.[0-9A-Fa-f]+)))\b
|
||||
|
||||
POSINT \b(?:[0-9]+)\b
|
||||
TWODIGITINT [0-9]{2}
|
||||
WORD \b\w+\b
|
||||
NOTSPACE \S+
|
||||
DATA .*?
|
||||
GREEDYDATA .*
|
||||
QUOTEDSTRING (?:(?<!\\)(?:"(?:\\.|[^\\"])*"|(?:'(?:\\.|[^\\'])*')|(?:`(?:\\.|[^\\`])*`)))
|
||||
QUOTEDSTRING (?:(?<!\\\\)(?:"(?:\\\\.|[^\\\\"])*"|(?:'(?:\\\\.|[^\\\\'])*')|(?:`(?:\\\\.|[^\\\\`])*`)))
|
||||
|
||||
# Networking
|
||||
MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
|
||||
|
@ -27,21 +26,21 @@ HOSTPORT (?:%{IPORHOST=~/\./}:%{POSINT})
|
|||
|
||||
# paths
|
||||
PATH (?:%{UNIXPATH}|%{WINPATH})
|
||||
UNIXPATH (?<![\w\\/])(?:/(?:[\w_%!$@:.,-]+|\\.)*)+
|
||||
UNIXPATH (?:/(?:[\w_%!$@:.,-]+|\\.)*)+
|
||||
#UNIXPATH (?<![\w\/])(?:/[^\/\s?*]*)+
|
||||
LINUXTTY (?:/dev/pts/%{POSINT})
|
||||
BSDTTY (?:/dev/tty[pq][a-z0-9])
|
||||
TTY (?:%{BSDTTY}|%{LINUXTTY})
|
||||
WINPATH (?:[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
|
||||
WINPATH (?:[A-Za-z]+:|\\\\)(?:\\\\[^\\\\?*]*)+
|
||||
URIPROTO [A-Za-z]+(\+[A-Za-z+]+)?
|
||||
URIHOST %{IPORHOST}(?::%{POSINT})?
|
||||
URIHOST %{IPORHOST}(?::%{POSINT:port})?
|
||||
# uripath comes loosely from RFC1738, but mostly from what Firefox
|
||||
# doesn't turn into %XX
|
||||
URIPATH (?:/[A-Za-z0-9$.+!*'(),~:#%_-]*)+
|
||||
#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
|
||||
URIPARAM \?[A-Za-z0-9$.+!*'(),~#%&/=:;_-]*
|
||||
URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
|
||||
URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:$|%{URIPATHPARAM})
|
||||
URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
|
||||
|
||||
# Months: January, Feb, 3, 03, 12, December
|
||||
MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b
|
||||
|
@ -73,7 +72,7 @@ DATE %{DATE_US}|%{DATE_EU}
|
|||
DATESTAMP %{DATE}[- ]%{TIME}
|
||||
TZ (?:[PMCE][SD]T)
|
||||
DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
|
||||
DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} (?:%{TZ} )?%{YEAR}
|
||||
DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
|
||||
|
||||
# Syslog Dates: Month Day HH:MM:SS
|
||||
SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
|
||||
|
@ -88,11 +87,4 @@ QS %{QUOTEDSTRING}
|
|||
|
||||
# Log formats
|
||||
SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}:
|
||||
COMBINEDAPACHELOG %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{URIPATHPARAM:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response} (?:%{NUMBER:bytes}|-) (?:"%{URI:referrer}"|%{QUOTEDSTRING:referrer}|"-") %{QS:agent}
|
||||
|
||||
#
|
||||
# Custom formats
|
||||
# Add additional custom patterns below
|
||||
DATESTAMP_RAILS %{DAY} %{MONTH} %{MONTHDAY} %{TIME} (?:%{INT:ZONE} )?%{YEAR}
|
||||
DATESTAMP_MYSQL %{TWODIGITINT:year}%{TWODIGITINT:month}%{TWODIGITINT:day}\s+%{TIME}
|
||||
|
||||
COMBINEDAPACHELOG %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{URIPATHPARAM:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response} (?:%{NUMBER:bytes}|-) "(?:%{URI:referrer}|-)" %{QS:agent}
|
||||
|
|
34
src/net/logstash/logstash.java
Normal file
34
src/net/logstash/logstash.java
Normal file
|
@ -0,0 +1,34 @@
|
|||
/* This is the runner for logstash when it is packed up in a jar file.
|
||||
* It exists to work around http://jira.codehaus.org/browse/JRUBY-6015
|
||||
*/
|
||||
package net.logstash;
|
||||
import org.jruby.embed.ScriptingContainer;
|
||||
import org.jruby.embed.PathType;
|
||||
import org.jruby.CompatVersion;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class logstash {
|
||||
private ScriptingContainer container;
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Malkovich malkovich? Malkovich!
|
||||
logstash logstash = new logstash();
|
||||
logstash.run(args);
|
||||
} /* void main */
|
||||
|
||||
|
||||
public logstash() {
|
||||
this.container = new ScriptingContainer();
|
||||
this.container.setCompatVersion(CompatVersion.RUBY1_9);
|
||||
}
|
||||
|
||||
public void run(String[] args) {
|
||||
final String script_path = "logstash/runner.rb";
|
||||
ClassLoader loader = this.getClass().getClassLoader();
|
||||
InputStream script = loader.getResourceAsStream(script_path);
|
||||
//container.runScriptlet(PathType.RELATIVE, "logstash/runner.rb");
|
||||
this.container.setArgv(args);
|
||||
this.container.runScriptlet(script, script_path);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +1,6 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../"
|
||||
require File.join(File.dirname(__FILE__), "..", "minitest")
|
||||
|
||||
require "test/unit"
|
||||
require "logstash"
|
||||
require "logstash/loadlibs"
|
||||
require "logstash/filters"
|
||||
|
@ -10,15 +8,11 @@ require "logstash/filters/date"
|
|||
require "logstash/event"
|
||||
require "timeout"
|
||||
|
||||
class TestFilterDate < Test::Unit::TestCase
|
||||
|
||||
describe LogStash::Filters::Date do
|
||||
# These tests assume a given timezone.
|
||||
def setup
|
||||
before do
|
||||
ENV["TZ"] = "PST8PDT"
|
||||
end
|
||||
|
||||
def test_name(name)
|
||||
@typename = name.gsub(/[ ]/, "_")
|
||||
@typename = "foozle"
|
||||
end
|
||||
|
||||
def config(cfg)
|
||||
|
@ -33,8 +27,7 @@ class TestFilterDate < Test::Unit::TestCase
|
|||
@filter.register
|
||||
end
|
||||
|
||||
def test_iso8601
|
||||
test_name "iso8601"
|
||||
test "ISO8601 date parsing" do
|
||||
config "field1" => "ISO8601"
|
||||
|
||||
times = {
|
||||
|
@ -63,11 +56,10 @@ class TestFilterDate < Test::Unit::TestCase
|
|||
@filter.filter(event)
|
||||
assert_equal(output, event.timestamp,
|
||||
"Time '#{input}' should parse to '#{output}' but got '#{event.timestamp}'")
|
||||
end
|
||||
end # def test_iso8601
|
||||
end # times.each
|
||||
end # testing ISO8601
|
||||
|
||||
def test_formats
|
||||
test_name "format test"
|
||||
test "parsing with java SimpleDateFormat syntax" do
|
||||
#config "field1" => "%b %e %H:%M:%S"
|
||||
config "field1" => "MMM dd HH:mm:ss"
|
||||
|
||||
|
@ -85,10 +77,9 @@ class TestFilterDate < Test::Unit::TestCase
|
|||
@filter.filter(event)
|
||||
assert_equal(output, event.timestamp)
|
||||
end
|
||||
end # test_formats
|
||||
end # SimpleDateFormat tests
|
||||
|
||||
def test_speed
|
||||
test_name "speed test"
|
||||
test "performance" do
|
||||
config "field1" => "MMM dd HH:mm:ss"
|
||||
iterations = 50000
|
||||
|
||||
|
@ -101,10 +92,10 @@ class TestFilterDate < Test::Unit::TestCase
|
|||
event = LogStash::Event.new
|
||||
event.type = @typename
|
||||
event.fields["field1"] = input
|
||||
check_interval = 1500
|
||||
check_interval = 997
|
||||
max_duration = 10
|
||||
Timeout.timeout(max_duration * 2) do
|
||||
1.upto(50000).each do |i|
|
||||
Timeout.timeout(max_duration) do
|
||||
1.upto(iterations).each do |i|
|
||||
@filter.filter(event)
|
||||
if i % check_interval == 0
|
||||
assert_equal(event.timestamp, output)
|
||||
|
@ -115,5 +106,5 @@ class TestFilterDate < Test::Unit::TestCase
|
|||
duration = Time.now - start
|
||||
puts "filters/date speed test; #{iterations} iterations: #{duration} seconds (#{iterations / duration} per sec)"
|
||||
assert(duration < 10, "Should be able to do #{iterations} date parses in less than #{max_duration} seconds, got #{duration} seconds")
|
||||
end # test_formats
|
||||
end
|
||||
end # performance test
|
||||
end # describe LogStash::Filters::Date
|
||||
|
|
|
@ -1,28 +1,23 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../"
|
||||
require File.join(File.dirname(__FILE__), "..", "minitest")
|
||||
|
||||
require "test/unit"
|
||||
require "logstash/loadlibs"
|
||||
require "logstash"
|
||||
require "logstash/filters"
|
||||
require "logstash/filters/grep"
|
||||
require "logstash/event"
|
||||
|
||||
class TestFilterGrep < Test::Unit::TestCase
|
||||
def setup
|
||||
describe LogStash::Filters::Grep do
|
||||
before do
|
||||
@filter = LogStash::Filters.from_name("grep", {})
|
||||
end # def setup
|
||||
|
||||
def test_name(name)
|
||||
@typename = name
|
||||
end # def test_name
|
||||
@typename = "grepper"
|
||||
end
|
||||
|
||||
def config(cfg)
|
||||
cfg["type"] = @typename
|
||||
cfg.each_key do |key|
|
||||
if cfg[key].is_a?(String)
|
||||
cfg[key] = cfg[key].to_a
|
||||
cfg[key] = [cfg[key]]
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -30,8 +25,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
@filter.register
|
||||
end # def config
|
||||
|
||||
def test_single_match
|
||||
test_name "single_match"
|
||||
test "single grep match" do
|
||||
config "str" => "test"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -39,10 +33,9 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
event["str"] = "test: this should not be dropped"
|
||||
@filter.filter(event)
|
||||
assert_equal(false, event.cancelled?)
|
||||
end # def test_single_match
|
||||
end # testing a single match
|
||||
|
||||
def test_single_match_drop
|
||||
test_name "single_match_dropp"
|
||||
test "single match failure cancels the event" do
|
||||
config "str" => "test"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -50,10 +43,9 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
event["str"] = "foo: this should be dropped"
|
||||
@filter.filter(event)
|
||||
assert_equal(true, event.cancelled?)
|
||||
end # def test_single_match_drop
|
||||
end
|
||||
|
||||
def test_multiple_match
|
||||
test_name "multiple_match"
|
||||
test "multiple match conditions" do
|
||||
config "str" => "test", "bar" => "baz"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -64,8 +56,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(false, event.cancelled?)
|
||||
end # test_multiple_match
|
||||
|
||||
def test_multiple_match_drop
|
||||
test_name "multiple_match_drop"
|
||||
test "multiple match conditions should cancel on failure" do
|
||||
config "str" => "test", "bar" => "baz"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -76,8 +67,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(true, event.cancelled?)
|
||||
end # test_multiple_match_drop
|
||||
|
||||
def test_single_match_regexp
|
||||
test_name "single_match_regexp"
|
||||
test "single condition with regexp syntax" do
|
||||
config "str" => "(?i)test.*foo"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -85,10 +75,9 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
event["str"] = "TeST regexp match FoO"
|
||||
@filter.filter(event)
|
||||
assert_equal(false, event.cancelled?)
|
||||
end # def test_single_match_regexp
|
||||
end
|
||||
|
||||
def test_single_match_regexp_drop
|
||||
test_name "single_match_regexp_drop"
|
||||
test "single condition with regexp syntax cancels on failure" do
|
||||
config "str" => "test.*foo"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -98,8 +87,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(true, event.cancelled?)
|
||||
end # def test_single_match_regexp_drop
|
||||
|
||||
def test_add_fields
|
||||
test_name "add_field"
|
||||
test "adding fields on success" do
|
||||
config "str" => "test",
|
||||
"add_field" => ["new_field", "new_value"]
|
||||
|
||||
|
@ -110,8 +98,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(["new_value"], event["new_field"])
|
||||
end # def test_add_fields
|
||||
|
||||
def test_add_fields_with_format
|
||||
test_name "add_field_with_format"
|
||||
test "adding fields with a sprintf value" do
|
||||
config "str" => "test",
|
||||
"add_field" => ["new_field", "%{@type}"]
|
||||
|
||||
|
@ -123,7 +110,6 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
end # def test_add_fields_with_format
|
||||
|
||||
def __DISABLED_FOR_NOW_test_add_fields_multiple_match
|
||||
test_name "add_fields_multiple_match"
|
||||
#config "match" => {"str" => "test"},
|
||||
#"add_fields" => {"new_field" => "new_value"}},
|
||||
#"match" => {"str" => ".*"},
|
||||
|
@ -137,8 +123,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(["new_value", "new_value_2"], event["new_field"])
|
||||
end # def test_add_fields_multiple_match
|
||||
|
||||
def test_add_tags
|
||||
test_name "add_tags"
|
||||
test "add tags" do
|
||||
config "str" => "test",
|
||||
"add_tag" => ["new_tag"]
|
||||
|
||||
|
@ -150,8 +135,7 @@ class TestFilterGrep < Test::Unit::TestCase
|
|||
assert_equal(["tag", "new_tag"], event.tags)
|
||||
end # def test_add_tags
|
||||
|
||||
def test_add_tags_with_format
|
||||
test_name "add_tags_with_format"
|
||||
test "add tags with sprintf value" do
|
||||
config "str" => "test",
|
||||
"add_tag" => ["%{str}"]
|
||||
|
||||
|
|
|
@ -1,43 +1,33 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../"
|
||||
require File.join(File.dirname(__FILE__), "..", "minitest")
|
||||
|
||||
require "test/unit"
|
||||
require "logstash"
|
||||
require "logstash/loadlibs"
|
||||
require "logstash/filters"
|
||||
require "logstash/filters/grok"
|
||||
require "logstash/event"
|
||||
|
||||
class TestFilterGrok < Test::Unit::TestCase
|
||||
|
||||
def setup
|
||||
@typename = nil
|
||||
describe LogStash::Filters::Grok do
|
||||
before do
|
||||
@typename = "groktest"
|
||||
@filter = nil
|
||||
end
|
||||
|
||||
def test_name(name)
|
||||
@typename = name.gsub(/[ ]/, "_")
|
||||
end
|
||||
|
||||
def config(cfg)
|
||||
cfg["type"] = @typename
|
||||
cfg.each_key do |key|
|
||||
if cfg[key].is_a?(String)
|
||||
cfg[key] = cfg[key].to_a
|
||||
cfg[key] = [cfg[key]]
|
||||
end
|
||||
end
|
||||
|
||||
p :config => cfg
|
||||
p :filter => @filter
|
||||
#p :config => cfg
|
||||
#p :filter => @filter
|
||||
@filter = LogStash::Filters::Grok.new(cfg)
|
||||
|
||||
@filter.register
|
||||
#p :newfilter => @filter
|
||||
end
|
||||
|
||||
def test_grok_normal
|
||||
test_name "groknormal"
|
||||
test "normal grok" do
|
||||
config "pattern" => [ "%{SYSLOGLINE}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -54,19 +44,27 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
|
||||
@filter.filter(event)
|
||||
assert_equal(event.fields["logsource"], [logsource],
|
||||
"Expected field 'logsource' to be [#{logsource.inspect}], is #{event.fields["logsource"].inspect}")
|
||||
"Expected field 'logsource' to be [#{logsource.inspect}], " \
|
||||
"is #{event.fields["logsource"].inspect}")
|
||||
|
||||
assert_equal(event.fields["timestamp"], [timestamp], "Expected field 'timestamp' to be [#{timestamp.inspect}], is #{event.fields["timestamp"].inspect}")
|
||||
assert_equal(event.fields["timestamp"], [timestamp],
|
||||
"Expected field 'timestamp' to be [#{timestamp.inspect}], " \
|
||||
"is #{event.fields["timestamp"].inspect}")
|
||||
|
||||
assert_equal(event.fields["message"], [message], "Expected field 'message' to be ['#{message.inspect}'], is #{event.fields["message"].inspect}")
|
||||
assert_equal(event.fields["message"], [message],
|
||||
"Expected field 'message' to be ['#{message.inspect}'], " \
|
||||
"is #{event.fields["message"].inspect}")
|
||||
|
||||
assert_equal(event.fields["program"], [program], "Expected field 'program' to be ['#{program.inspect}'], is #{event.fields["program"].inspect}")
|
||||
assert_equal(event.fields["program"], [program],
|
||||
"Expected field 'program' to be ['#{program.inspect}'], " \
|
||||
"is #{event.fields["program"].inspect}")
|
||||
|
||||
assert_equal(event.fields["pid"], [pid], "Expected field 'pid' to be ['#{pid.inspect}'], is #{event.fields["pid"].inspect}")
|
||||
end # def test_grok_normal
|
||||
assert_equal(event.fields["pid"], [pid],
|
||||
"Expected field 'pid' to be ['#{pid.inspect}'], " \
|
||||
"is #{event.fields["pid"].inspect}")
|
||||
end # test normal
|
||||
|
||||
def test_grok_multiple_message
|
||||
test_name "groknormal"
|
||||
test "parsing an event with multiple messages (array of strings)" do
|
||||
config "pattern" => [ "(?:hello|world) %{NUMBER}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -75,16 +73,15 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
|
||||
@filter.filter(event)
|
||||
assert_equal(event.fields["NUMBER"].sort, ["12345", "23456"])
|
||||
end # def test_grok_multiple_message
|
||||
end # parsing event with multiple messages
|
||||
|
||||
def test_speed
|
||||
test_name "grokspeed"
|
||||
test "performance" do
|
||||
config "pattern" => [ "%{SYSLOGLINE}" ]
|
||||
puts "Doing performance test"
|
||||
|
||||
iterations = 5000
|
||||
iterations = 50000
|
||||
|
||||
start = Time.now
|
||||
|
||||
event = LogStash::Event.new
|
||||
event.type = @typename
|
||||
|
||||
|
@ -96,20 +93,22 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
|
||||
event.message = "#{timestamp} #{logsource} #{program}[#{pid}]: #{message}"
|
||||
|
||||
check_interval = 1500
|
||||
check_interval = 997
|
||||
1.upto(iterations).each do |i|
|
||||
event.fields.clear
|
||||
@filter.filter(event)
|
||||
end
|
||||
|
||||
duration = Time.now - start
|
||||
max_duration = 10
|
||||
puts "filters/grok speed test; #{iterations} iterations: #{duration} seconds (#{"%.3f" % (iterations / duration)} per sec)"
|
||||
assert(duration < max_duration, "Should be able to do #{iterations} grok parses in less than #{max_duration} seconds, got #{duration} seconds")
|
||||
end # test_formats
|
||||
max_duration = 20
|
||||
puts "filters/grok speed test; #{iterations} iterations: #{duration} " \
|
||||
"seconds (#{"%.3f" % (iterations / duration)} per sec)"
|
||||
assert(duration < max_duration,
|
||||
"Should be able to do #{iterations} grok parses in less " \
|
||||
"than #{max_duration} seconds, got #{duration} seconds")
|
||||
end # performance test
|
||||
|
||||
def test_grok_type_hinting_int
|
||||
test_name "groktypehinting_int"
|
||||
test "grok pattern type coercion to integer" do
|
||||
config "pattern" => [ "%{NUMBER:foo:int}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -125,10 +124,9 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
assert_equal([expect], event.fields["foo"],
|
||||
"Expected field 'foo' to be [#{expect.inspect}], is " \
|
||||
"#{event.fields["expect"].inspect}")
|
||||
end # def test_grok_type_hinting_int
|
||||
end # test int type coercion
|
||||
|
||||
def test_grok_type_hinting_float
|
||||
test_name "groktypehinting_float"
|
||||
test "pattern type coercion to float" do
|
||||
config "pattern" => [ "%{NUMBER:foo:float}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -138,12 +136,15 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
event.message = "#{expect}"
|
||||
|
||||
@filter.filter(event)
|
||||
assert_equal(expect.class, event.fields["foo"].first.class, "Expected field 'foo' to be of type #{expect.class.name} but got #{event.fields["foo"].first.class.name}")
|
||||
assert_equal([expect], event.fields["foo"], "Expected field 'foo' to be [#{expect.inspect}], is #{event.fields["foo"].inspect}")
|
||||
end # def test_grok_type_hinting_float
|
||||
assert_equal(expect.class, event.fields["foo"].first.class,
|
||||
"Expected field 'foo' to be of type #{expect.class.name} " \
|
||||
"but got #{event.fields["foo"].first.class.name}")
|
||||
assert_equal([expect], event.fields["foo"],
|
||||
"Expected field 'foo' to be [#{expect.inspect}], " \
|
||||
"is #{event.fields["foo"].inspect}")
|
||||
end # test float coercion
|
||||
|
||||
def test_grok_inline_define
|
||||
test_name "grok_inline_define"
|
||||
test "in-line pattern definitions" do
|
||||
config "pattern" => [ "%{FIZZLE=\\d+}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -153,12 +154,15 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
event.message = "hello #{expect}"
|
||||
|
||||
@filter.filter(event)
|
||||
assert_equal(expect.class, event.fields["FIZZLE"].first.class, "Expected field 'FIZZLE' to be of type #{expect.class.name} but got #{event.fields["FIZZLE"].first.class.name}")
|
||||
assert_equal([expect], event.fields["FIZZLE"], "Expected field 'FIZZLE' to be [#{expect.inspect}], is #{event.fields["FIZZLE"].inspect}")
|
||||
end # def test_grok_type_hinting_float
|
||||
assert_equal(expect.class, event.fields["FIZZLE"].first.class,
|
||||
"Expected field 'FIZZLE' to be of type #{expect.class.name} " \
|
||||
"but got #{event.fields["FIZZLE"].first.class.name}")
|
||||
assert_equal([expect], event.fields["FIZZLE"],
|
||||
"Expected field 'FIZZLE' to be [#{expect.inspect}], " \
|
||||
"is #{event.fields["FIZZLE"].inspect}")
|
||||
end # test in-line definitions
|
||||
|
||||
def test_grok_field_name_attribute
|
||||
test_name "grok_field_name_attribute"
|
||||
test "processing fields other than the @message" do
|
||||
config "rum" => [ "%{FIZZLE=\\d+}" ]
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -168,12 +172,15 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
event.fields["rum"] = "hello #{expect}"
|
||||
|
||||
@filter.filter(event)
|
||||
assert_equal(expect.class, event.fields["FIZZLE"].first.class, "Expected field 'FIZZLE' to be of type #{expect.class.name} but got #{event.fields["FIZZLE"].first.class.name}")
|
||||
assert_equal([expect], event.fields["FIZZLE"], "Expected field 'FIZZLE' to be [#{expect.inspect}], is #{event.fields["FIZZLE"].inspect}")
|
||||
end # def test_grok_field_name_attribute
|
||||
assert_equal(expect.class, event.fields["FIZZLE"].first.class,
|
||||
"Expected field 'FIZZLE' to be of type #{expect.class.name}, " \
|
||||
"but got #{event.fields["FIZZLE"].first.class.name}")
|
||||
assert_equal([expect], event.fields["FIZZLE"],
|
||||
"Expected field 'FIZZLE' to be [#{expect.inspect}], " \
|
||||
"is #{event.fields["FIZZLE"].inspect}")
|
||||
end # test processing custom fields
|
||||
|
||||
def test_grok_field_name_and_pattern_coexisting
|
||||
test_name "grok_field_name_attribute"
|
||||
test "parsing custom fields and default @message" do
|
||||
config "rum" => [ "%{FIZZLE=\\d+}" ], "pattern" => "%{WORD}", "break_on_match" => "false"
|
||||
|
||||
event = LogStash::Event.new
|
||||
|
@ -190,14 +197,12 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
assert_equal([expect], event.fields["FIZZLE"],
|
||||
"Expected field 'FIZZLE' to be [#{expect.inspect}], is " \
|
||||
"#{event.fields["FIZZLE"].inspect}")
|
||||
|
||||
assert_equal(["something"], event.fields["WORD"],
|
||||
"Expected field 'WORD' to be ['something'], is " \
|
||||
"#{event.fields["WORD"].inspect}")
|
||||
end # def test_grok_field_name_attribute
|
||||
|
||||
def test_add_fields
|
||||
test_name "add_field"
|
||||
test "adding fields on match" do
|
||||
config "str" => "test",
|
||||
"add_field" => ["new_field", "new_value"]
|
||||
|
||||
|
@ -206,10 +211,9 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
event["str"] = "test"
|
||||
@filter.filter(event)
|
||||
assert_equal(["new_value"], event["new_field"])
|
||||
end # def test_add_fields
|
||||
end # adding fields on match
|
||||
|
||||
def test_add_fields_does_not_occur_if_match_failed
|
||||
test_name "add_field"
|
||||
test "should not add fields if match fails" do
|
||||
config "str" => "test",
|
||||
"add_field" => ["new_field", "new_value"]
|
||||
|
||||
|
@ -219,6 +223,5 @@ class TestFilterGrok < Test::Unit::TestCase
|
|||
@filter.filter(event)
|
||||
assert_equal(nil, event["new_field"],
|
||||
"Grok should not add fields on failed matches")
|
||||
end # def test_add_fields
|
||||
|
||||
end # class TestFilterGrok
|
||||
end # should not add fields if match fails
|
||||
end # tests for LogStash::Filters::Grok
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../"
|
||||
require "test/unit"
|
||||
require File.join(File.dirname(__FILE__), "..", "minitest")
|
||||
|
||||
require "logstash"
|
||||
require "logstash/filters"
|
||||
require "logstash/filters/multiline"
|
||||
require "logstash/event"
|
||||
|
||||
class TestFilterMultiline < Test::Unit::TestCase
|
||||
def setup
|
||||
@filter = LogStash::Filters.from_name("multiline", {})
|
||||
end
|
||||
|
||||
def test_name(name)
|
||||
@typename = name
|
||||
describe LogStash::Filters::Multiline do
|
||||
before do
|
||||
@typename = "multiline-test"
|
||||
end
|
||||
|
||||
# TODO(sissel): Refactor this into a reusable method.
|
||||
def config(cfg)
|
||||
cfg["type"] = @typename
|
||||
cfg.each_key do |key|
|
||||
|
@ -28,8 +24,7 @@ class TestFilterMultiline < Test::Unit::TestCase
|
|||
@filter.register
|
||||
end
|
||||
|
||||
def test_with_next
|
||||
test_name "with next"
|
||||
test "using 'next' mode" do
|
||||
config "pattern" => "\\.\\.\\.$", "what" => "next"
|
||||
|
||||
inputs = [
|
||||
|
@ -63,10 +58,9 @@ class TestFilterMultiline < Test::Unit::TestCase
|
|||
expected_outputs.zip(outputs).each do |expected, actual|
|
||||
assert_equal(expected, actual)
|
||||
end
|
||||
end # def test_with_next
|
||||
end # test with what => 'next'
|
||||
|
||||
def test_with_previous
|
||||
test_name "with previous"
|
||||
test "using 'previous' mode" do
|
||||
config "pattern" => "^\\s", "what" => "previous"
|
||||
|
||||
inputs = [
|
||||
|
@ -108,11 +102,10 @@ class TestFilterMultiline < Test::Unit::TestCase
|
|||
expected_outputs.zip(outputs).each do |expected, actual|
|
||||
assert_equal(expected, actual)
|
||||
end
|
||||
end
|
||||
end # test using 'previous'
|
||||
|
||||
def test_with_negate_true
|
||||
test "with negate => true" do
|
||||
@logger = LogStash::Logger.new(STDERR)
|
||||
test_name "with negate true"
|
||||
config "pattern" => "^\\S", "what" => "previous", "negate" => "true"
|
||||
|
||||
inputs = [
|
||||
|
@ -153,11 +146,10 @@ class TestFilterMultiline < Test::Unit::TestCase
|
|||
expected_outputs.zip(outputs).each do |expected, actual|
|
||||
assert_equal(expected, actual)
|
||||
end
|
||||
end
|
||||
end # negate tests
|
||||
|
||||
def test_with_negate_false
|
||||
test "with negate => 'false'" do
|
||||
@logger = LogStash::Logger.new(STDERR)
|
||||
test_name "with negate true"
|
||||
config "pattern" => "^\\s", "what" => "previous", "negate" => "false"
|
||||
|
||||
inputs = [
|
||||
|
@ -198,5 +190,5 @@ class TestFilterMultiline < Test::Unit::TestCase
|
|||
expected_outputs.zip(outputs).each do |expected, actual|
|
||||
assert_equal(expected, actual)
|
||||
end
|
||||
end
|
||||
end
|
||||
end # negate false
|
||||
end # tests for LogStash::Filters::Multiline
|
||||
|
|
22
test/logstash/minitest.rb
Normal file
22
test/logstash/minitest.rb
Normal file
|
@ -0,0 +1,22 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../"
|
||||
|
||||
require "minitest/spec"
|
||||
require "logstash"
|
||||
|
||||
# Autorun if the caller script is the name of the process.
|
||||
# Sort of like 'if $0 == __FILE__' but works with require()
|
||||
parent = caller.collect {
|
||||
|c| c.gsub(/:[0-9]+(:.*)$/, "")
|
||||
}.find { |c| c != __FILE__ }
|
||||
require "minitest/autorun" if parent == $0
|
||||
|
||||
# I don't really like monkeypatching, but whatever, this is probably better
|
||||
# than overriding the 'describe' method.
|
||||
class MiniTest::Spec
|
||||
class << self
|
||||
# 'it' sounds wrong, call it 'test'
|
||||
alias :test :it
|
||||
end
|
||||
end
|
|
@ -1,6 +1,5 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../"
|
||||
require File.join(File.dirname(__FILE__), "..", "minitest")
|
||||
|
||||
require "logstash/loadlibs"
|
||||
require "logstash/testcase"
|
||||
|
@ -11,40 +10,23 @@ require "logstash/search/elasticsearch"
|
|||
require "logstash/search/query"
|
||||
|
||||
require "tmpdir"
|
||||
#require "spoon" # rubygem 'spoon' - implements posix_spawn via FFI
|
||||
|
||||
class TestOutputElasticSearch < Test::Unit::TestCase
|
||||
ELASTICSEARCH_VERSION = "0.16.0"
|
||||
|
||||
def setup
|
||||
# TODO(sissel): elasticsearch somehow picks the old pwd when doing this,
|
||||
# not sure why
|
||||
#@tmpdir = Dir.mktmpdir
|
||||
#puts "Using tempdir: #{@tmpdir}"
|
||||
#@pwd = Dir.pwd
|
||||
#Dir.chdir(@tmpdir)
|
||||
#puts "Dir: #{Dir.pwd}"
|
||||
describe LogStash::Outputs::ElasticSearch do
|
||||
before do
|
||||
FileUtils.rm_r("data") if File.exists?("data")
|
||||
@output = LogStash::Outputs::Elasticsearch.new({
|
||||
@output = LogStash::Outputs::ElasticSearch.new({
|
||||
"type" => ["foo"],
|
||||
"embedded" => ["true"],
|
||||
})
|
||||
@output.register
|
||||
end # def setup
|
||||
end # before
|
||||
|
||||
def teardown
|
||||
after do
|
||||
@output.teardown
|
||||
FileUtils.rm_r("data") if File.exists?("data")
|
||||
#Dir.chdir(@pwd)
|
||||
#if @tmpdir !~ /^\/tmp/
|
||||
#$stderr.puts("Tempdir is '#{@tmpdir}' - not in /tmp, I won't " \
|
||||
#"remove in case it's not safe.")
|
||||
#else
|
||||
#FileUtils.rm_r(@tmpdir)
|
||||
#end
|
||||
end # def teardown
|
||||
end # after
|
||||
|
||||
def test_elasticsearch_basic
|
||||
test "elasticsearch basic output" do
|
||||
events = []
|
||||
myfile = File.basename(__FILE__)
|
||||
1.upto(5).each do |i|
|
||||
|
@ -90,13 +72,5 @@ class TestOutputElasticSearch < Test::Unit::TestCase
|
|||
|
||||
sleep 0.2
|
||||
end # while tries > 0
|
||||
end # def test_elasticsearch_basic
|
||||
end # class TestOutputElasticSearch
|
||||
|
||||
#class TestOutputElasticSearch0_15_1 < TestOutputElasticSearch
|
||||
#ELASTICSEARCH_VERSION = self.name[/[0-9_]+/].gsub("_", ".")
|
||||
#end # class TestOutputElasticSearch0_15_1
|
||||
|
||||
#class TestOutputElasticSearch0_13_1 < TestOutputElasticSearch
|
||||
#ELASTICSEARCH_VERSION = self.name[/[0-9_]+/].gsub("_", ".")
|
||||
#end # class TestOutputElasticSearch0_13_1
|
||||
end # test_elasticsearch_basic
|
||||
end # testing for LogStash::Outputs::ElasticSearch
|
||||
|
|
|
@ -1,30 +1,25 @@
|
|||
require "rubygems"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../"
|
||||
|
||||
require "test/unit"
|
||||
require "logstash"
|
||||
require File.join(File.dirname(__FILE__), "minitest")
|
||||
require "logstash/event"
|
||||
|
||||
class TestEvent < Test::Unit::TestCase
|
||||
def test_name(name)
|
||||
@typename = name
|
||||
describe LogStash::Event do
|
||||
before do
|
||||
@event = LogStash::Event.new
|
||||
@event.type = "sprintf"
|
||||
@event.message = "hello world"
|
||||
@event.source = "/home/foo"
|
||||
end
|
||||
|
||||
def test_sprintf
|
||||
test_name "sprintf"
|
||||
event = LogStash::Event.new
|
||||
event.type = @typename
|
||||
event.message = "hello world"
|
||||
event.source = "/home/foo"
|
||||
event["test"] = "test"
|
||||
|
||||
test "sprintf method should work" do
|
||||
@event["test"] = "test"
|
||||
["@type", "@message", "@source", "test"].each do |name|
|
||||
assert_equal(event[name], event.sprintf("%{#{name}}"))
|
||||
assert_equal(@event[name], @event.sprintf("%{#{name}}"))
|
||||
assert_equal("hello " + @event[name] + " world",
|
||||
@event.sprintf("hello %{#{name}} world"))
|
||||
end
|
||||
end
|
||||
|
||||
event.fields["foo"] = ["one", "two", "three"]
|
||||
|
||||
assert_equal(event.fields["foo"].join(","), event.sprintf("%{foo}"))
|
||||
end # def test_sprintf
|
||||
end # TestEvent
|
||||
test "sprintf should join array fields by comma" do
|
||||
@event.fields["foo"] = ["one", "two", "three"]
|
||||
assert_equal(@event.fields["foo"].join(","), @event.sprintf("%{foo}"))
|
||||
end # sprintf testing
|
||||
end # describe LogStash::Event
|
||||
|
|
|
@ -1,15 +1,24 @@
|
|||
require "rubygems"
|
||||
require "test/unit"
|
||||
$:.unshift File.dirname(__FILE__) + "/../../lib"
|
||||
$:.unshift File.dirname(__FILE__) + "/../"
|
||||
|
||||
class SyntaxCheckTest < Test::Unit::TestCase
|
||||
def setup
|
||||
@dir = File.dirname(__FILE__)
|
||||
end
|
||||
require "minitest/spec"
|
||||
require "minitest/autorun" if $0 == __FILE__
|
||||
|
||||
def test_ruby_syntax
|
||||
Dir["#{@dir}/../**/*.rb"].each do |path|
|
||||
output = %x{ruby -c #{path} 2>&1}
|
||||
assert_equal(0, $?.exitstatus, "Syntax error for #{path}: #{output}")
|
||||
end
|
||||
end
|
||||
end
|
||||
describe "syntax check" do
|
||||
source = File.join(File.dirname(__FILE__), "..", "..", "lib", "logstash", "**", "*.rb")
|
||||
|
||||
Dir.glob(source).each do |path|
|
||||
it "must load #{path} without syntax errors" do
|
||||
# We could use 'load' here but that implies a bunch more than just syntax
|
||||
# checking. Most especially it will fail if we try to use java libraries
|
||||
# not currently in the classpath.
|
||||
#begin
|
||||
#load path
|
||||
#rescue LoadError => e
|
||||
#flunk("Error loading #{path}: #{e.inspect}")
|
||||
#end
|
||||
assert(system("ruby", "-c", path), "Error parsing #{path}")
|
||||
end # syntax check a file
|
||||
end # find all ruby files
|
||||
end # syntax check
|
||||
|
|
|
@ -25,5 +25,3 @@ use "logstash/outputs/test_elasticsearch"
|
|||
skip "logstash/inputs/test_file"
|
||||
skip "logstash/inputs/test_syslog"
|
||||
skip "logstash/inputs/test_stomp"
|
||||
|
||||
#Test::Unit::AutoRunner.run
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue