[RSPEC] Refactoring step 1

- Move helper functions in own modules and extend Rspec
- Refactor files into correct naming and paths
- Modify files to use new spec_helper and helpers
- Pin rspec to 2.14.x

Fixes #1758
This commit is contained in:
Richard Pijnenburg 2014-09-16 14:05:07 +00:00 committed by Jordan Sissel
parent f13a0cc0b0
commit 88a7ae3283
85 changed files with 244 additions and 298 deletions

View file

@ -111,8 +111,7 @@ class LogStash::Runner
require "rspec"
spec_path = File.expand_path(File.join(File.dirname(__FILE__), "/../../spec"))
$LOAD_PATH << spec_path
require "test_utils"
all_specs = Dir.glob(File.join(spec_path, "/**/*.rb"))
all_specs = Dir.glob(File.join(spec_path, "/**/*_spec.rb"))
rspec = LogStash::RSpecsRunner.new(args.empty? ? all_specs : args)
return rspec.run
end,

View file

@ -102,7 +102,7 @@ Gem::Specification.new do |gem|
gem.add_runtime_dependency "spoon" #(Apache 2.0 license)
gem.add_runtime_dependency "mocha" #(MIT license)
gem.add_runtime_dependency "shoulda" #(MIT license)
gem.add_runtime_dependency "rspec" #(MIT license)
gem.add_runtime_dependency "rspec", "~> 2.14.0" #(MIT license)
gem.add_runtime_dependency "insist", "1.0.0" #(Apache 2.0 license)
gem.add_runtime_dependency "rumbster" # For faking smtp in email tests (Apache 2.0 license)

View file

@ -1,4 +1,4 @@
require "test_utils"
require "spec_helper"
module ConditionalFanciness
def description
@ -47,7 +47,6 @@ describe "conditionals in output" do
end
describe "conditionals in filter" do
extend LogStash::RSpec
extend ConditionalFanciness
describe "simple" do

View file

@ -1,10 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/anonymize"
describe LogStash::Filters::Anonymize do
extend LogStash::RSpec
describe "anonymize ipaddress with IPV4_NETWORK algorithm" do
# The logstash config goes here.

View file

@ -1,11 +1,10 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/checksum"
require 'openssl'
describe LogStash::Filters::Checksum do
extend LogStash::RSpec
LogStash::Filters::Checksum::ALGORITHMS.each do |alg|
describe "#{alg} checksum with single field" do

View file

@ -1,8 +1,7 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/clone"
describe LogStash::Filters::Clone do
extend LogStash::RSpec
describe "all defaults" do
type "original"

View file

@ -1,10 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/csv"
describe LogStash::Filters::CSV do
extend LogStash::RSpec
describe "all defaults" do
# The logstash config goes here.

View file

@ -1,9 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/date"
puts "Skipping date performance tests because this ruby is not jruby" if RUBY_ENGINE != "jruby"
RUBY_ENGINE == "jruby" and describe LogStash::Filters::Date do
extend LogStash::RSpec
describe "giving an invalid match config, raise a configuration error" do
config <<-CONFIG

View file

@ -1,11 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/dns"
require "resolv"
describe LogStash::Filters::DNS do
extend LogStash::RSpec
before(:all) do
begin

View file

@ -1,8 +1,7 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/drop"
describe LogStash::Filters::Drop do
extend LogStash::RSpec
describe "drop the event" do
config <<-CONFIG

View file

@ -1,10 +1,8 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/fingerprint"
describe LogStash::Filters::Fingerprint do
extend LogStash::RSpec
describe "fingerprint ipaddress with IPV4_NETWORK method" do
config <<-CONFIG

View file

@ -1,8 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/geoip"
describe LogStash::Filters::GeoIP do
extend LogStash::RSpec
describe "defaults" do
config <<-CONFIG
filter {

View file

@ -1,5 +1,5 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
# Test suite for the grok patterns defined in patterns/java
# For each pattern:
@ -7,7 +7,6 @@ require "test_utils"
# - a sample is considered invalid i.e. "should NOT match" where message != result
#
describe "java grok pattern" do
extend LogStash::RSpec
describe "JAVACLASS" do
config <<-CONFIG

View file

@ -1,10 +1,8 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/grok"
describe LogStash::Filters::Grok do
extend LogStash::RSpec
describe "simple syslog line" do
# The logstash config goes here.
@ -629,4 +627,22 @@ describe LogStash::Filters::Grok do
end
end
describe "grok with unicode" do
config <<-CONFIG
filter {
grok {
#pattern => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}"
pattern => "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(:?)(?:\\[%{GREEDYDATA:syslog_pid}\\])?(:?) %{GREEDYDATA:syslog_message}"
}
}
CONFIG
sample "<22>Jan 4 07:50:46 mailmaster postfix/policy-spf[9454]: : SPF permerror (Junk encountered in record 'v=spf1 mx a:mail.domain.no ip4:192.168.0.4 <20>all'): Envelope-from: email@domain.no" do
insist { subject["tags"] }.nil?
insist { subject["syslog_pri"] } == "22"
insist { subject["syslog_program"] } == "postfix/policy-spf"
end
end
end

View file

@ -1,9 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/json"
require "logstash/timestamp"
describe LogStash::Filters::Json do
extend LogStash::RSpec
describe "parse message into the event" do
config <<-CONFIG

View file

@ -1,8 +1,7 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/kv"
describe LogStash::Filters::KV do
extend LogStash::RSpec
describe "defaults" do
# The logstash config goes here.

View file

@ -1,3 +1,4 @@
require "spec_helper"
require "logstash/filters/metrics"
describe LogStash::Filters::Metrics do

View file

@ -1,12 +1,10 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/multiline"
describe LogStash::Filters::Multiline do
extend LogStash::RSpec
describe "simple multiline" do
config <<-CONFIG
filter {

View file

@ -1,10 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/mutate"
describe LogStash::Filters::Mutate do
extend LogStash::RSpec
context "config validation" do
describe "invalid convert type should raise a configuration error" do

View file

@ -1,9 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/noop"
#NOOP filter is perfect for testing Filters::Base features with minimal overhead
describe LogStash::Filters::NOOP do
extend LogStash::RSpec
describe "adding multiple value to one field" do
config <<-CONFIG

View file

@ -1,10 +1,8 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/split"
describe LogStash::Filters::Split do
extend LogStash::RSpec
describe "all defaults" do
config <<-CONFIG

View file

@ -1,9 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/spool"
#NOOP filter is perfect for testing Filters::Base features with minimal overhead
describe LogStash::Filters::Spool do
extend LogStash::RSpec
# spool test are really flush tests. spool does nothing more than waiting for flush to be called.

View file

@ -1,8 +1,7 @@
require "test_utils"
require "spec_helper"
require "logstash/filters/throttle"
describe LogStash::Filters::Throttle do
extend LogStash::RSpec
describe "no before_count" do
config <<-CONFIG

View file

@ -1,10 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/urldecode"
describe LogStash::Filters::Urldecode do
extend LogStash::RSpec
describe "urldecode of correct urlencoded data" do
# The logstash config goes here.

View file

@ -1,10 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/useragent"
describe LogStash::Filters::UserAgent do
extend LogStash::RSpec
describe "defaults" do
config <<-CONFIG
@ -25,7 +24,7 @@ describe LogStash::Filters::UserAgent do
end
end
describe "" do
describe "Without target field" do
config <<-CONFIG
filter {
useragent {

View file

@ -1,9 +1,8 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/filters/xml"
describe LogStash::Filters::Xml do
extend LogStash::RSpec
describe "parse standard xml (Deprecated checks)" do
config <<-CONFIG

View file

@ -1,5 +1,5 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
describe "LogStash::Inputs::Base#fix_streaming_codecs" do
it "should carry the charset setting along when switching" do

View file

@ -1,9 +1,9 @@
require "test_utils"
require "spec_helper"
require "socket"
require "tempfile"
describe "inputs/collectd", :socket => true do
extend LogStash::RSpec
udp_sock = UDPSocket.new(Socket::AF_INET)
describe "parses a normal packet" do

View file

@ -1,8 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/inputs/elasticsearch"
describe "inputs/elasticsearch" do
extend LogStash::RSpec
search_response = <<-RESPONSE
{

View file

@ -1,10 +1,10 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "tempfile"
describe "inputs/file" do
extend LogStash::RSpec
describe "starts at the end of an existing file" do
tmp_file = Tempfile.new('logstash-spec-input-file')

View file

@ -1,8 +1,8 @@
require "test_utils"
require "spec_helper"
require "gelf"
describe "inputs/gelf" do
extend LogStash::RSpec
describe "reads chunked gelf messages " do
port = 12209

View file

@ -1,7 +1,7 @@
require "test_utils"
require "spec_helper"
describe "inputs/generator" do
extend LogStash::RSpec
context "performance", :performance => true do
event_count = 100000 + rand(50000)

View file

@ -7,7 +7,7 @@ require 'logstash/inputs/kafka'
require 'logstash/errors'
describe LogStash::Inputs::Kafka do
extend LogStash::RSpec
let (:kafka_config) {{"topic_id" => "test"}}

View file

@ -1,9 +1,9 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "tempfile"
describe "inputs/pipe" do
extend LogStash::RSpec
describe "echo" do
event_count = 1

View file

@ -1,4 +1,4 @@
require "test_utils"
require "spec_helper"
require "redis"
def populate(key, event_count)
@ -23,7 +23,7 @@ def process(pipeline, queue, event_count)
end # process
describe "inputs/redis", :redis => true do
extend LogStash::RSpec
describe "read events from a list" do
key = 10.times.collect { rand(10).to_s }.join("")

View file

@ -1,5 +1,5 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "socket"
require "logstash/inputs/stdin"

View file

@ -1,11 +1,11 @@
# coding: utf-8
require "test_utils"
require "spec_helper"
require "socket"
require "logstash/inputs/syslog"
require "logstash/event"
describe "inputs/syslog" do
extend LogStash::RSpec
it "should properly handle priority, severity and facilities", :socket => true do
port = 5511

View file

@ -1,12 +1,12 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "socket"
require "timeout"
require "logstash/json"
require "logstash/inputs/tcp"
describe LogStash::Inputs::Tcp do
extend LogStash::RSpec
context "codec (PR #1372)" do
it "switches from plain to line" do
@ -205,7 +205,7 @@ describe LogStash::Inputs::Tcp do
describe "one message per connection" do
event_count = 10
port = 5515
port = 5516
config <<-CONFIG
input {
tcp {
@ -224,7 +224,7 @@ describe LogStash::Inputs::Tcp do
socket.flush
socket.close
end
# wait till all events have been processed
Timeout.timeout(1) {sleep 0.1 while queue.size < event_count}
@ -239,7 +239,7 @@ describe LogStash::Inputs::Tcp do
describe "connection threads are cleaned up when connection is closed" do
event_count = 10
port = 5515
port = 5517
config <<-CONFIG
input {
tcp {

77
spec/logstash_helpers.rb Normal file
View file

@ -0,0 +1,77 @@
require "logstash/agent"
require "logstash/pipeline"
require "logstash/event"
module LogStashHelper
def config(configstr)
let(:config) { configstr }
end # def config
def type(default_type)
let(:default_type) { default_type }
end
def tags(*tags)
let(:default_tags) { tags }
puts "Setting default tags: #{@default_tags}"
end
def sample(sample_event, &block)
name = sample_event.is_a?(String) ? sample_event : LogStash::Json.dump(sample_event)
name = name[0..50] + "..." if name.length > 50
describe "\"#{name}\"" do
let(:pipeline) { LogStash::Pipeline.new(config) }
let(:event) do
sample_event = [sample_event] unless sample_event.is_a?(Array)
next sample_event.collect do |e|
e = { "message" => e } if e.is_a?(String)
next LogStash::Event.new(e)
end
end
let(:results) do
results = []
pipeline.instance_eval { @filters.each(&:register) }
event.each do |e|
pipeline.filter(e) {|new_event| results << new_event }
end
pipeline.flush_filters(:final => true) do |e|
results << e unless e.cancelled?
end
results
end
subject { results.length > 1 ? results: results.first }
it("when processed", &block)
end
end # def sample
def input(&block)
it "inputs" do
pipeline = LogStash::Pipeline.new(config)
queue = Queue.new
pipeline.instance_eval do
@output_func = lambda { |event| queue << event }
end
block.call(pipeline, queue)
pipeline.shutdown
end
end # def input
def agent(&block)
it("agent(#{caller[0].gsub(/ .*/, "")}) runs") do
pipeline = LogStash::Pipeline.new(config)
pipeline.run
block.call
end
end # def agent
end # module LogStash

View file

@ -0,0 +1,18 @@
require "spec_helper"
require "logstash/plugin"
require "logstash/json"
describe "outputs/cloudwatch" do
output = LogStash::Plugin.lookup("output", "cloudwatch").new
it "should register" do
expect {output.register}.to_not raise_error
end
it "should respond correctly to a receive call" do
event = LogStash::Event.new
expect { output.receive(event) }.to_not raise_error
end
end

View file

@ -1,10 +1,10 @@
require "csv"
require "tempfile"
require "test_utils"
require "spec_helper"
require "logstash/outputs/csv"
describe LogStash::Outputs::CSV do
extend LogStash::RSpec
describe "Write a single field to a csv file" do
tmpfile = Tempfile.new('logstash-spec-output-csv')

View file

@ -1,8 +1,8 @@
require "test_utils"
require "spec_helper"
require "logstash/json"
describe "outputs/elasticsearch_http", :elasticsearch => true do
extend LogStash::RSpec
describe "ship lots of events w/ default index_type" do
# Generate a random index name

View file

@ -1,10 +1,10 @@
require "test_utils"
require "spec_helper"
require "ftw"
require "logstash/plugin"
require "logstash/json"
describe "outputs/elasticsearch" do
extend LogStash::RSpec
it "should register" do
output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")

View file

@ -1,9 +1,9 @@
require "test_utils"
require "spec_helper"
require "rumbster"
require "message_observers"
describe "outputs/email", :broken => true do
extend LogStash::RSpec
@@port=2525
let (:rumbster) { Rumbster.new(@@port) }

View file

@ -1,10 +1,10 @@
require "test_utils"
require "spec_helper"
require "logstash/outputs/file"
require "logstash/json"
require "tempfile"
describe LogStash::Outputs::File do
extend LogStash::RSpec
describe "ship lots of events to a file" do
event_count = 10000 + rand(500)

View file

@ -1,9 +1,9 @@
require "test_utils"
require "spec_helper"
require "logstash/outputs/graphite"
require "mocha/api"
describe LogStash::Outputs::Graphite, :socket => true do
extend LogStash::RSpec
describe "defaults should include all metrics" do
port = 4939

View file

@ -1,10 +1,10 @@
require "test_utils"
require "spec_helper"
require "logstash/outputs/redis"
require "logstash/json"
require "redis"
describe LogStash::Outputs::Redis, :redis => true do
extend LogStash::RSpec
describe "ship lots of events to a list" do
key = 10.times.collect { rand(10).to_s }.join("")

View file

@ -1,10 +1,10 @@
require "test_utils"
require "spec_helper"
require "logstash/outputs/statsd"
require "mocha/api"
require "socket"
describe LogStash::Outputs::Statsd do
extend LogStash::RSpec
port = 4399
udp_server = UDPSocket.new
udp_server.bind("127.0.0.1", port)

48
spec/spec_helper.rb Normal file
View file

@ -0,0 +1,48 @@
require "logstash/logging"
require 'logstash_helpers'
require "insist"
if ENV['COVERAGE']
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
add_filter 'spec/'
add_filter 'vendor/'
end
end
$TESTING = true
if RUBY_VERSION < "1.9.2"
$stderr.puts "Ruby 1.9.2 or later is required. (You are running: " + RUBY_VERSION + ")"
raise LoadError
end
$logger = LogStash::Logger.new(STDOUT)
if ENV["TEST_DEBUG"]
$logger.level = :debug
else
$logger.level = :error
end
puts("Using Accessor#strict_set for specs")
# mokey path LogStash::Event to use strict_set in tests
# ugly, I know, but this avoids adding conditionals in performance critical section
class LogStash::Event
def []=(str, value)
if str == TIMESTAMP && !value.is_a?(LogStash::Timestamp)
raise TypeError, "The field '@timestamp' must be a LogStash::Timestamp, not a #{value.class} (#{value})"
end
@accessors.strict_set(str, value)
end # def []=
end
RSpec.configure do |config|
config.extend LogStashHelper
config.filter_run_excluding :redis => true, :socket => true, :performance => true, :elasticsearch => true, :broken => true, :export_cypher => true
end

View file

@ -1,21 +0,0 @@
# This spec covers the question here:
# https://logstash.jira.com/browse/LOGSTASH-733
require "test_utils"
describe "LOGSTASH-733" do
extend LogStash::RSpec
describe "pipe-delimited fields" do
config <<-CONFIG
filter {
kv { field_split => "|" }
}
CONFIG
sample "field1=test|field2=another test|field3=test3" do
insist { subject["field1"] } == "test"
insist { subject["field2"] } == "another test"
insist { subject["field3"] } == "test3"
end
end
end

View file

@ -1,25 +0,0 @@
# encoding: utf-8
# This spec covers the question here:
# https://logstash.jira.com/browse/LOGSTASH-820
require "test_utils"
describe "LOGSTASH-820" do
extend LogStash::RSpec
describe "grok with unicode" do
config <<-CONFIG
filter {
grok {
#pattern => "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}"
pattern => "<%{POSINT:syslog_pri}>%{SPACE}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}(:?)(?:\\[%{GREEDYDATA:syslog_pid}\\])?(:?) %{GREEDYDATA:syslog_message}"
}
}
CONFIG
sample "<22>Jan 4 07:50:46 mailmaster postfix/policy-spf[9454]: : SPF permerror (Junk encountered in record 'v=spf1 mx a:mail.domain.no ip4:192.168.0.4 <20>all'): Envelope-from: email@domain.no" do
insist { subject["tags"] }.nil?
insist { subject["syslog_pri"] } == "22"
insist { subject["syslog_program"] } == "postfix/policy-spf"
end
end
end

View file

@ -1,7 +1,6 @@
require "test_utils"
require "spec_helper"
describe "..." do
extend LogStash::RSpec
describe "Akamai Grok pattern" do
config <<-'CONFIG'
filter {

View file

@ -1,7 +1,6 @@
require "test_utils"
require "spec_helper"
describe "http dates", :if => RUBY_ENGINE == "jruby" do
extend LogStash::RSpec
config <<-'CONFIG'
filter {

View file

@ -3,10 +3,10 @@
# This spec covers the question here:
# https://github.com/logstash/logstash/pull/375
require "test_utils"
require "spec_helper"
describe "pull #375" do
extend LogStash::RSpec
describe "kv after grok" do
config <<-CONFIG
filter {

View file

@ -1,143 +0,0 @@
# encoding: utf-8
require "logstash/json"
require "logstash/timestamp"
if ENV['COVERAGE']
require 'simplecov'
require 'coveralls'
SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter[
SimpleCov::Formatter::HTMLFormatter,
Coveralls::SimpleCov::Formatter
]
SimpleCov.start do
add_filter 'spec/'
add_filter 'vendor/'
end
end
require "insist"
require "logstash/agent"
require "logstash/pipeline"
require "logstash/event"
require "logstash/logging"
require "insist"
require "stud/try"
$TESTING = true
if RUBY_VERSION < "1.9.2"
$stderr.puts "Ruby 1.9.2 or later is required. (You are running: " + RUBY_VERSION + ")"
$stderr.puts "Options for fixing this: "
$stderr.puts " * If doing 'ruby bin/logstash ...' add --1.9 flag to 'ruby'"
$stderr.puts " * If doing 'java -jar ... ' add -Djruby.compat.version=RUBY1_9 to java flags"
raise LoadError
end
$logger = LogStash::Logger.new(STDOUT)
if ENV["TEST_DEBUG"]
$logger.level = :debug
else
$logger.level = :error
end
puts("Using Accessor#strict_set for specs")
# mokey path LogStash::Event to use strict_set in tests
# ugly, I know, but this avoids adding conditionals in performance critical section
class LogStash::Event
def []=(str, value)
if str == TIMESTAMP && !value.is_a?(LogStash::Timestamp)
raise TypeError, "The field '@timestamp' must be a LogStash::Timestamp, not a #{value.class} (#{value})"
end
@accessors.strict_set(str, value)
end # def []=
end
RSpec.configure do |config|
config.filter_run_excluding :redis => true, :socket => true, :performance => true, :elasticsearch => true, :broken => true, :export_cypher => true
end
module LogStash
module RSpec
def config(configstr)
let(:config) { configstr }
end # def config
def type(default_type)
let(:default_type) { default_type }
end
def tags(*tags)
let(:default_tags) { tags }
puts "Setting default tags: #{@default_tags}"
end
def sample(sample_event, &block)
name = sample_event.is_a?(String) ? sample_event : LogStash::Json.dump(sample_event)
name = name[0..50] + "..." if name.length > 50
describe "\"#{name}\"" do
extend LogStash::RSpec
let(:pipeline) { LogStash::Pipeline.new(config) }
let(:event) do
sample_event = [sample_event] unless sample_event.is_a?(Array)
next sample_event.collect do |e|
e = { "message" => e } if e.is_a?(String)
next LogStash::Event.new(e)
end
end
let(:results) do
results = []
pipeline.instance_eval { @filters.each(&:register) }
event.each do |e|
pipeline.filter(e) {|new_event| results << new_event }
end
pipeline.flush_filters(:final => true) do |e|
results << e unless e.cancelled?
end
results
end
subject { results.length > 1 ? results: results.first }
it("when processed", &block)
end
end # def sample
def input(&block)
it "inputs" do
pipeline = LogStash::Pipeline.new(config)
queue = Queue.new
pipeline.instance_eval do
@output_func = lambda { |event| queue << event }
end
block.call(pipeline, queue)
pipeline.shutdown
end
end # def input
def agent(&block)
require "logstash/pipeline"
it("agent(#{caller[0].gsub(/ .*/, "")}) runs") do
pipeline = LogStash::Pipeline.new(config)
pipeline.run
block.call
end
end # def agent
end # module RSpec
end # module LogStash
class Shiftback
def initialize(&block)
@block = block
end
def <<(event)
@block.call(event)
end
end # class Shiftback

View file

@ -1,6 +1,6 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/util/accessors"
describe LogStash::Util::Accessors, :if => true do

View file

@ -1,6 +1,6 @@
# encoding: utf-8
require "test_utils"
require "spec_helper"
require "logstash/util/charset"
describe LogStash::Util::Charset do
@ -29,7 +29,8 @@ describe LogStash::Util::Charset do
["foo \xED\xB9\x81\xC3", "bar \xAD"].each do |data|
insist { data.encoding.name } == "UTF-8"
insist { data.valid_encoding? } == false
logger.should_receive(:warn).twice
expect(logger).to receive(:warn).exactly(2).times
#logger.should_receive(:warn).twice
insist { subject.convert(data) } == data.inspect[1..-2]
insist { subject.convert(data).encoding.name } == "UTF-8"
end

View file

@ -1,4 +1,4 @@
require "test_utils"
require "spec_helper"
require "logstash/util/fieldreference"
describe LogStash::Util::FieldReference, :if => true do

View file

@ -42,20 +42,22 @@ describe LogStash::Json do
context "jruby serialize" do
it "should respond to dump and serialize object" do
expect(JrJackson::Json).to receive(:dump).with(string).and_call_original
expect(JrJackson::Json).to receive(:dump).with(string).and_call_original
expect(LogStash::Json.dump(string)).to eql(json_string)
end
it "should call JrJackson::Raw.generate for Hash" do
expect(JrJackson::Raw).to receive(:generate).with(hash).and_call_original
#expect(JrJackson::Raw).to receive(:generate).with(hash).and_call_original
expect(LogStash::Json.dump(hash)).to eql(json_hash)
end
it "should call JrJackson::Raw.generate for Array" do
expect(JrJackson::Raw).to receive(:generate).with(array).and_call_original
#expect(JrJackson::Raw).to receive(:generate).with(array).and_call_original
expect(LogStash::Json.dump(array)).to eql(json_array)
end
end
else
### MRI specific