- copy tests from elasticsearch_http to test type settings

This commit is contained in:
Jordan Sissel 2013-10-03 11:57:05 -07:00
parent 1729db4f89
commit faf6023dea

View file

@ -4,31 +4,29 @@ require "ftw"
describe "outputs/elasticsearch" do
extend LogStash::RSpec
describe "ship lots of events" do
describe "ship lots of events w/ default index_type" do
# Generate a random index name
index = 10.times.collect { rand(10).to_s }.join("")
type = 10.times.collect { rand(10).to_s }.join("")
# Write about 10000 events. Add jitter to increase likeliness of finding
# boundary-related bugs.
event_count = 10000 + rand(500)
embedded_http_port = rand(20000) + 10000
flush_size = rand(200) + 1
config <<-CONFIG
input {
generator {
message => "hello world"
count => #{event_count}
type => "generator"
type => "#{type}"
}
}
output {
elasticsearch {
embedded => true
embedded_http_port => #{embedded_http_port}
cluster => "#{index}"
host => "127.0.0.1"
index => "#{index}"
index_type => "testing"
flush_size => #{flush_size}
}
}
CONFIG
@ -39,16 +37,125 @@ describe "outputs/elasticsearch" do
#
# We try multiple times to allow final agent flushes as well as allowing
# elasticsearch to finish processing everything.
ftw = FTW::Agent.new
ftw.post!("http://localhost:9200/#{index}/_flush")
# Wait until all events are available.
Stud::try(10.times) do
ftw = FTW::Agent.new
data = ""
response = ftw.get!("http://127.0.0.1:#{embedded_http_port}/#{index}/_count?q=*")
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
response.read_body { |chunk| data << chunk }
count = JSON.parse(data)["count"]
result = JSON.parse(data)
count = result["count"]
insist { count } == event_count
end
puts "Rate: #{event_count / @duration}/sec"
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
data = ""
response.read_body { |chunk| data << chunk }
result = JSON.parse(data)
result["hits"]["hits"].each do |doc|
# With no 'index_type' set, the document type should be the type
# set on the input
insist { doc["_type"] } == type
insist { doc["_index"] } == index
insist { doc["_source"]["message"] } == "hello world"
end
end
end
describe "testing index_type" do
describe "no type value" do
# Generate a random index name
index = 10.times.collect { rand(10).to_s }.join("")
event_count = 100 + rand(100)
flush_size = rand(200) + 1
config <<-CONFIG
input {
generator {
message => "hello world"
count => #{event_count}
}
}
output {
elasticsearch {
host => "127.0.0.1"
index => "#{index}"
flush_size => #{flush_size}
}
}
CONFIG
agent do
ftw = FTW::Agent.new
ftw.post!("http://localhost:9200/#{index}/_flush")
# Wait until all events are available.
Stud::try(10.times) do
data = ""
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
response.read_body { |chunk| data << chunk }
result = JSON.parse(data)
count = result["count"]
insist { count } == event_count
end
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
data = ""
response.read_body { |chunk| data << chunk }
result = JSON.parse(data)
result["hits"]["hits"].each do |doc|
insist { doc["_type"] } == "logs"
end
end
end
describe "default event type value" do
# Generate a random index name
index = 10.times.collect { rand(10).to_s }.join("")
event_count = 100 + rand(100)
flush_size = rand(200) + 1
config <<-CONFIG
input {
generator {
message => "hello world"
count => #{event_count}
type => "generated"
}
}
output {
elasticsearch {
host => "127.0.0.1"
index => "#{index}"
flush_size => #{flush_size}
}
}
CONFIG
agent do
ftw = FTW::Agent.new
ftw.post!("http://localhost:9200/#{index}/_flush")
# Wait until all events are available.
Stud::try(10.times) do
data = ""
response = ftw.get!("http://127.0.0.1:9200/#{index}/_count?q=*")
response.read_body { |chunk| data << chunk }
result = JSON.parse(data)
count = result["count"]
insist { count } == event_count
end
response = ftw.get!("http://127.0.0.1:9200/#{index}/_search?q=*&size=1000")
data = ""
response.read_body { |chunk| data << chunk }
result = JSON.parse(data)
result["hits"]["hits"].each do |doc|
insist { doc["_type"] } == "generated"
end
end
end
end
end