mirror of
https://github.com/elastic/logstash.git
synced 2025-04-19 04:15:23 -04:00
Rubocop: Enable most SpaceInside cops (#15201)
Enabled: * SpaceInsideArrayLiteralBrackets * SpaceInsideParens * SpaceInsidePercentLiteralDelimiters * SpaceInsideStringInterpolation * Add enforced style for SpaceInsideStringInterpolation Enabled without offenses: * SpaceInsideArrayPercentLiteral * Layout/SpaceInsideRangeLiteral * Layout/SpaceInsideReferenceBrackets
This commit is contained in:
parent
0f8695593e
commit
cf67cb1377
59 changed files with 185 additions and 181 deletions
32
.rubocop.yml
32
.rubocop.yml
|
@ -89,6 +89,24 @@ Layout/SpaceBeforeFirstArg:
|
||||||
Enabled: true
|
Enabled: true
|
||||||
Layout/SpaceBeforeSemicolon:
|
Layout/SpaceBeforeSemicolon:
|
||||||
Enabled: true
|
Enabled: true
|
||||||
|
Layout/SpaceInsideArrayLiteralBrackets:
|
||||||
|
Enabled: true
|
||||||
|
EnforcedStyle: no_space
|
||||||
|
EnforcedStyleForEmptyBrackets: no_space
|
||||||
|
Layout/SpaceInsideArrayPercentLiteral:
|
||||||
|
Enabled: true # no offenses
|
||||||
|
Layout/SpaceInsideParens:
|
||||||
|
Enabled: true
|
||||||
|
EnforcedStyle: no_space
|
||||||
|
Layout/SpaceInsidePercentLiteralDelimiters:
|
||||||
|
Enabled: true
|
||||||
|
Layout/SpaceInsideRangeLiteral:
|
||||||
|
Enabled: true # no offenses
|
||||||
|
Layout/SpaceInsideReferenceBrackets:
|
||||||
|
Enabled: true # no offenses
|
||||||
|
Layout/SpaceInsideStringInterpolation:
|
||||||
|
Enabled: true
|
||||||
|
EnforcedStyle: no_space
|
||||||
|
|
||||||
##### Need review #####
|
##### Need review #####
|
||||||
Layout/AccessModifierIndentation:
|
Layout/AccessModifierIndentation:
|
||||||
|
@ -213,21 +231,7 @@ Layout/SpaceAfterNot:
|
||||||
Enabled: false
|
Enabled: false
|
||||||
Layout/SpaceInLambdaLiteral:
|
Layout/SpaceInLambdaLiteral:
|
||||||
Enabled: false
|
Enabled: false
|
||||||
Layout/SpaceInsideArrayLiteralBrackets:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsideArrayPercentLiteral:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsideBlockBraces:
|
Layout/SpaceInsideBlockBraces:
|
||||||
Enabled: false
|
Enabled: false
|
||||||
Layout/SpaceInsideHashLiteralBraces:
|
Layout/SpaceInsideHashLiteralBraces:
|
||||||
Enabled: false
|
Enabled: false
|
||||||
Layout/SpaceInsideParens:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsidePercentLiteralDelimiters:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsideRangeLiteral:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsideReferenceBrackets:
|
|
||||||
Enabled: false
|
|
||||||
Layout/SpaceInsideStringInterpolation:
|
|
||||||
Enabled: false
|
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
|
|
||||||
require "jar_dependencies"
|
require "jar_dependencies"
|
||||||
|
|
||||||
def require_jar( *args )
|
def require_jar(*args)
|
||||||
return nil unless Jars.require?
|
return nil unless Jars.require?
|
||||||
result = Jars.require_jar( *args )
|
result = Jars.require_jar(*args)
|
||||||
if result.is_a? String
|
if result.is_a? String
|
||||||
# JAR_DEBUG=1 will now show theses
|
# JAR_DEBUG=1 will now show theses
|
||||||
Jars.debug { "--- jar coordinate #{args[0..-2].join( ':' )} already loaded with version #{result} - omit version #{args[-1]}" }
|
Jars.debug { "--- jar coordinate #{args[0..-2].join(':')} already loaded with version #{result} - omit version #{args[-1]}" }
|
||||||
Jars.debug { " try to load from #{caller.join("\n\t")}" }
|
Jars.debug { " try to load from #{caller.join("\n\t")}" }
|
||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,7 +22,7 @@ require "logstash/environment"
|
||||||
|
|
||||||
# Bundler + gemspec already setup $LOAD_PATH << '.../lib'
|
# Bundler + gemspec already setup $LOAD_PATH << '.../lib'
|
||||||
# but since we load specs from 2 locations we need to hook up these:
|
# but since we load specs from 2 locations we need to hook up these:
|
||||||
[ LogStash::Environment::LOGSTASH_HOME, LogStash::Environment::LOGSTASH_CORE ].each do |path|
|
[LogStash::Environment::LOGSTASH_HOME, LogStash::Environment::LOGSTASH_CORE].each do |path|
|
||||||
spec_path = File.join(path, "spec")
|
spec_path = File.join(path, "spec")
|
||||||
$LOAD_PATH.unshift(spec_path) unless $LOAD_PATH.include?(spec_path)
|
$LOAD_PATH.unshift(spec_path) unless $LOAD_PATH.include?(spec_path)
|
||||||
end
|
end
|
||||||
|
|
|
@ -24,7 +24,7 @@ require "pathname"
|
||||||
|
|
||||||
class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
|
class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
|
||||||
|
|
||||||
TYPES = [ "input", "filter", "output", "codec" ]
|
TYPES = ["input", "filter", "output", "codec"]
|
||||||
|
|
||||||
option "--type", "TYPE", "Type of the plugin {input, filter, codec, output}s", :required => true
|
option "--type", "TYPE", "Type of the plugin {input, filter, codec, output}s", :required => true
|
||||||
option "--name", "PLUGIN", "Name of the new plugin", :required => true
|
option "--name", "PLUGIN", "Name of the new plugin", :required => true
|
||||||
|
@ -58,7 +58,7 @@ class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
|
||||||
|
|
||||||
def transform_r(source, target)
|
def transform_r(source, target)
|
||||||
Dir.entries(source).each do |entry|
|
Dir.entries(source).each do |entry|
|
||||||
next if [ ".", ".." ].include?(entry)
|
next if [".", ".."].include?(entry)
|
||||||
source_entry = File.join(source, entry)
|
source_entry = File.join(source, entry)
|
||||||
target_entry = File.join(target, entry)
|
target_entry = File.join(target, entry)
|
||||||
|
|
||||||
|
@ -98,8 +98,8 @@ class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
|
||||||
|
|
||||||
def get_git_info
|
def get_git_info
|
||||||
git = OpenStruct.new
|
git = OpenStruct.new
|
||||||
git.author = %x{ git config --get user.name }.strip rescue "your_username"
|
git.author = %x{git config --get user.name}.strip rescue "your_username"
|
||||||
git.email = %x{ git config --get user.email }.strip rescue "your_username@example.com"
|
git.email = %x{git config --get user.email}.strip rescue "your_username@example.com"
|
||||||
git
|
git
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ class LogStash::PluginManager::Pack < LogStash::PluginManager::PackCommand
|
||||||
def delete_target_file?
|
def delete_target_file?
|
||||||
return true if overwrite?
|
return true if overwrite?
|
||||||
puts("File #{target_file} exist, do you want to overwrite it? (Y/N)")
|
puts("File #{target_file} exist, do you want to overwrite it? (Y/N)")
|
||||||
( "y" == STDIN.gets.strip.downcase ? true : false)
|
("y" == STDIN.gets.strip.downcase ? true : false)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_target_file
|
def validate_target_file
|
||||||
|
|
|
@ -41,7 +41,7 @@ class LogStash::PluginManager::Unpack < LogStash::PluginManager::PackCommand
|
||||||
cache_location = LogStash::Environment::CACHE_PATH
|
cache_location = LogStash::Environment::CACHE_PATH
|
||||||
if File.exist?(cache_location)
|
if File.exist?(cache_location)
|
||||||
puts("Directory #{cache_location} is going to be overwritten, do you want to continue? (Y/N)")
|
puts("Directory #{cache_location} is going to be overwritten, do you want to continue? (Y/N)")
|
||||||
override = ( "y" == STDIN.gets.strip.downcase ? true : false)
|
override = ("y" == STDIN.gets.strip.downcase ? true : false)
|
||||||
if override
|
if override
|
||||||
FileUtils.rm_rf(cache_location)
|
FileUtils.rm_rf(cache_location)
|
||||||
else
|
else
|
||||||
|
|
|
@ -29,7 +29,7 @@ class HotThreadsReport
|
||||||
|
|
||||||
def to_s
|
def to_s
|
||||||
hash = to_hash[:hot_threads]
|
hash = to_hash[:hot_threads]
|
||||||
report = "#{I18n.t("logstash.web_api.hot_threads.title", :hostname => hash[:hostname], :time => hash[:time], :top_count => @thread_dump.top_count )} \n"
|
report = "#{I18n.t("logstash.web_api.hot_threads.title", :hostname => hash[:hostname], :time => hash[:time], :top_count => @thread_dump.top_count)} \n"
|
||||||
report << '=' * STRING_SEPARATOR_LENGTH
|
report << '=' * STRING_SEPARATOR_LENGTH
|
||||||
report << "\n"
|
report << "\n"
|
||||||
hash[:threads].each do |thread|
|
hash[:threads].each do |thread|
|
||||||
|
|
|
@ -49,14 +49,14 @@ module LogStash
|
||||||
:vertices => as_boolean(params.fetch("vertices", false))}
|
:vertices => as_boolean(params.fetch("vertices", false))}
|
||||||
payload = node.pipeline(pipeline_id, opts)
|
payload = node.pipeline(pipeline_id, opts)
|
||||||
halt(404) if payload.empty?
|
halt(404) if payload.empty?
|
||||||
respond_with(:pipelines => { pipeline_id => payload } )
|
respond_with(:pipelines => { pipeline_id => payload })
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/pipelines" do
|
get "/pipelines" do
|
||||||
opts = {:graph => as_boolean(params.fetch("graph", false)),
|
opts = {:graph => as_boolean(params.fetch("graph", false)),
|
||||||
:vertices => as_boolean(params.fetch("vertices", false))}
|
:vertices => as_boolean(params.fetch("vertices", false))}
|
||||||
payload = node.pipelines(opts)
|
payload = node.pipelines(opts)
|
||||||
respond_with(:pipelines => payload )
|
respond_with(:pipelines => payload)
|
||||||
end
|
end
|
||||||
|
|
||||||
get "/?:filter?" do
|
get "/?:filter?" do
|
||||||
|
|
|
@ -80,7 +80,7 @@ module LogStash module Config
|
||||||
duplicate_ids = find_duplicate_ids(pipeline_configs)
|
duplicate_ids = find_duplicate_ids(pipeline_configs)
|
||||||
|
|
||||||
if duplicate_ids.any?
|
if duplicate_ids.any?
|
||||||
logger.debug("Fetching pipelines with duplicate ids", duplicate_ids.each { |k, v| v.collect(&:pipeline_id) } )
|
logger.debug("Fetching pipelines with duplicate ids", duplicate_ids.each { |k, v| v.collect(&:pipeline_id) })
|
||||||
return FailedFetch.new("Found duplicate ids in your source: #{duplicate_ids.keys.sort.join(", ")}")
|
return FailedFetch.new("Found duplicate ids in your source: #{duplicate_ids.keys.sort.join(", ")}")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ require 'fileutils'
|
||||||
require 'securerandom'
|
require 'securerandom'
|
||||||
|
|
||||||
class LogStash::DependencyReport < Clamp::Command
|
class LogStash::DependencyReport < Clamp::Command
|
||||||
option [ "--csv" ], "OUTPUT_PATH", "The path to write the dependency report in csv format.",
|
option ["--csv"], "OUTPUT_PATH", "The path to write the dependency report in csv format.",
|
||||||
:required => true, :attribute_name => :output_path
|
:required => true, :attribute_name => :output_path
|
||||||
|
|
||||||
OTHER_DEPENDENCIES = [
|
OTHER_DEPENDENCIES = [
|
||||||
|
@ -40,7 +40,7 @@ class LogStash::DependencyReport < Clamp::Command
|
||||||
tmp_dir = java.lang.System.getProperty("java.io.tmpdir")
|
tmp_dir = java.lang.System.getProperty("java.io.tmpdir")
|
||||||
ruby_output_path = File.join(tmp_dir, SecureRandom.uuid)
|
ruby_output_path = File.join(tmp_dir, SecureRandom.uuid)
|
||||||
# Write a CSV with just the ruby stuff
|
# Write a CSV with just the ruby stuff
|
||||||
CSV.open(ruby_output_path, "wb", :headers => [ "name", "version", "url", "license", "copyright", "sourceURL" ], :write_headers => true) do |csv|
|
CSV.open(ruby_output_path, "wb", :headers => ["name", "version", "url", "license", "copyright", "sourceURL"], :write_headers => true) do |csv|
|
||||||
puts "Finding gem dependencies"
|
puts "Finding gem dependencies"
|
||||||
gems.each { |d| csv << d }
|
gems.each { |d| csv << d }
|
||||||
puts "Finding gem embedded java/jar dependencies"
|
puts "Finding gem embedded java/jar dependencies"
|
||||||
|
|
|
@ -56,7 +56,7 @@ module LogStash class ElasticsearchClient
|
||||||
if password.is_a?(LogStash::Util::Password)
|
if password.is_a?(LogStash::Util::Password)
|
||||||
password = password.value
|
password = password.value
|
||||||
end
|
end
|
||||||
@client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp } }
|
@client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64("#{username}:#{password}").chomp } }
|
||||||
end
|
end
|
||||||
|
|
||||||
@client = Elasticsearch::Client.new(@client_args)
|
@client = Elasticsearch::Client.new(@client_args)
|
||||||
|
|
|
@ -98,7 +98,7 @@ module LogStash module Modules class KibanaClient
|
||||||
if password.is_a?(LogStash::Util::Password)
|
if password.is_a?(LogStash::Util::Password)
|
||||||
password = password.value
|
password = password.value
|
||||||
end
|
end
|
||||||
@http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp
|
@http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64("#{username}:#{password}").chomp
|
||||||
end
|
end
|
||||||
|
|
||||||
# e.g. {"name":"Elastics-MacBook-Pro.local","version":{"number":"6.0.0-beta1","build_hash":"41e69","build_number":15613,"build_snapshot":true}..}
|
# e.g. {"name":"Elastics-MacBook-Pro.local","version":{"number":"6.0.0-beta1","build_hash":"41e69","build_number":15613,"build_snapshot":true}..}
|
||||||
|
|
|
@ -170,7 +170,7 @@ module LogStash module Plugins
|
||||||
logger.debug("Executing hooks", :name => plugin_context.name, :type => plugin_context.type, :hooks_file => plugin_context.hooks_file)
|
logger.debug("Executing hooks", :name => plugin_context.name, :type => plugin_context.type, :hooks_file => plugin_context.hooks_file)
|
||||||
plugin_context.execute_hooks!
|
plugin_context.execute_hooks!
|
||||||
rescue => e
|
rescue => e
|
||||||
logger.error("error occured when loading plugins hooks file", :name => plugin_context.name, :type => plugin_context.type, :exception => e.message, :stacktrace => e.backtrace )
|
logger.error("error occured when loading plugins hooks file", :name => plugin_context.name, :type => plugin_context.type, :exception => e.message, :stacktrace => e.backtrace)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,7 +20,7 @@ java_import 'org.logstash.instrument.reports.ThreadsReport'
|
||||||
module LogStash
|
module LogStash
|
||||||
module Util
|
module Util
|
||||||
class ThreadDump
|
class ThreadDump
|
||||||
SKIPPED_THREADS = [ "Finalizer", "Reference Handler", "Signal Dispatcher" ].freeze
|
SKIPPED_THREADS = ["Finalizer", "Reference Handler", "Signal Dispatcher"].freeze
|
||||||
THREADS_COUNT_DEFAULT = 10.freeze
|
THREADS_COUNT_DEFAULT = 10.freeze
|
||||||
IGNORE_IDLE_THREADS_DEFAULT = true.freeze
|
IGNORE_IDLE_THREADS_DEFAULT = true.freeze
|
||||||
|
|
||||||
|
|
|
@ -198,7 +198,7 @@ describe "conditionals in filter" do
|
||||||
CONFIG
|
CONFIG
|
||||||
|
|
||||||
sample_one("foo" => 123, "bar" => 123) do
|
sample_one("foo" => 123, "bar" => 123) do
|
||||||
expect(subject.get("tags") ).to include("woot")
|
expect(subject.get("tags")).to include("woot")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ describe "conditionals in filter" do
|
||||||
}
|
}
|
||||||
CONFIG
|
CONFIG
|
||||||
|
|
||||||
sample_one("foo" => "foo", "somelist" => [ "one", "two" ], "foobar" => "foobar", "greeting" => "hello world", "tags" => [ "fancypantsy" ]) do
|
sample_one("foo" => "foo", "somelist" => ["one", "two"], "foobar" => "foobar", "greeting" => "hello world", "tags" => ["fancypantsy"]) do
|
||||||
# verify the original exists
|
# verify the original exists
|
||||||
expect(subject.get("tags")).to include("fancypantsy")
|
expect(subject.get("tags")).to include("fancypantsy")
|
||||||
|
|
||||||
|
@ -263,8 +263,8 @@ describe "conditionals in filter" do
|
||||||
|
|
||||||
describe "operators" do
|
describe "operators" do
|
||||||
conditional "[message] == 'sample'" do
|
conditional "[message] == 'sample'" do
|
||||||
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
|
sample_one("sample") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("different") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("different") { expect(subject.get("tags")).to include("failure") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "'sample' == [message]" do
|
conditional "'sample' == [message]" do
|
||||||
|
@ -281,30 +281,30 @@ describe "conditionals in filter" do
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] != 'sample'" do
|
conditional "[message] != 'sample'" do
|
||||||
sample_one("sample") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("sample") { expect(subject.get("tags")).to include("failure") }
|
||||||
sample_one("different") { expect(subject.get("tags") ).to include("success") }
|
sample_one("different") { expect(subject.get("tags")).to include("success") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] < 'sample'" do
|
conditional "[message] < 'sample'" do
|
||||||
sample_one("apple") { expect(subject.get("tags") ).to include("success") }
|
sample_one("apple") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("zebra") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("zebra") { expect(subject.get("tags")).to include("failure") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] > 'sample'" do
|
conditional "[message] > 'sample'" do
|
||||||
sample_one("zebra") { expect(subject.get("tags") ).to include("success") }
|
sample_one("zebra") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] <= 'sample'" do
|
conditional "[message] <= 'sample'" do
|
||||||
sample_one("apple") { expect(subject.get("tags") ).to include("success") }
|
sample_one("apple") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("zebra") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("zebra") { expect(subject.get("tags")).to include("failure") }
|
||||||
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
|
sample_one("sample") { expect(subject.get("tags")).to include("success") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] >= 'sample'" do
|
conditional "[message] >= 'sample'" do
|
||||||
sample_one("zebra") { expect(subject.get("tags") ).to include("success") }
|
sample_one("zebra") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
|
sample_one("sample") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] == 5" do
|
conditional "[message] == 5" do
|
||||||
|
@ -357,9 +357,9 @@ describe "conditionals in filter" do
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] =~ /sample/" do
|
conditional "[message] =~ /sample/" do
|
||||||
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
|
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
|
||||||
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
|
sample_one("sample") { expect(subject.get("tags")).to include("success") }
|
||||||
sample_one("some sample") { expect(subject.get("tags") ).to include("success") }
|
sample_one("some sample") { expect(subject.get("tags")).to include("success") }
|
||||||
end
|
end
|
||||||
|
|
||||||
conditional "[message] !~ /sample/" do
|
conditional "[message] !~ /sample/" do
|
||||||
|
|
|
@ -34,7 +34,7 @@ describe LogStash::Api::Commands::DefaultMetadata do
|
||||||
|
|
||||||
before :all do
|
before :all do
|
||||||
registerIfNot(LogStash::Setting::Boolean.new("xpack.monitoring.enabled", false))
|
registerIfNot(LogStash::Setting::Boolean.new("xpack.monitoring.enabled", false))
|
||||||
registerIfNot(LogStash::Setting::ArrayCoercible.new("xpack.monitoring.elasticsearch.hosts", String, [ "http://localhost:9200" ] ))
|
registerIfNot(LogStash::Setting::ArrayCoercible.new("xpack.monitoring.elasticsearch.hosts", String, ["http://localhost:9200"]))
|
||||||
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
|
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
|
||||||
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
|
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
|
||||||
end
|
end
|
||||||
|
|
|
@ -525,7 +525,7 @@ describe LogStash::Config::Mixin do
|
||||||
plugin_class.new(
|
plugin_class.new(
|
||||||
"oneString" => "${notExistingVar:foo}",
|
"oneString" => "${notExistingVar:foo}",
|
||||||
"oneBoolean" => "${notExistingVar:true}",
|
"oneBoolean" => "${notExistingVar:true}",
|
||||||
"oneArray" => [ "first array value", "${notExistingVar:foo}", "${notExistingVar:}", "${notExistingVar: }", "${notExistingVar:foo bar}" ],
|
"oneArray" => ["first array value", "${notExistingVar:foo}", "${notExistingVar:}", "${notExistingVar: }", "${notExistingVar:foo bar}"],
|
||||||
"oneHash" => { "key" => "${notExistingVar:foo}" }
|
"oneHash" => { "key" => "${notExistingVar:foo}" }
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -556,7 +556,7 @@ describe LogStash::Config::Mixin do
|
||||||
plugin_class.new(
|
plugin_class.new(
|
||||||
"oneString" => "${FunString:foo}",
|
"oneString" => "${FunString:foo}",
|
||||||
"oneBoolean" => "${FunBool:false}",
|
"oneBoolean" => "${FunBool:false}",
|
||||||
"oneArray" => [ "first array value", "${FunString:foo}" ],
|
"oneArray" => ["first array value", "${FunString:foo}"],
|
||||||
"oneHash" => { "key1" => "${FunString:foo}", "key2" => "${FunString} is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" },
|
"oneHash" => { "key1" => "${FunString:foo}", "key2" => "${FunString} is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" },
|
||||||
"nestedHash" => { "level1" => { "key1" => "http://${FunString}:8080/blah.txt" } },
|
"nestedHash" => { "level1" => { "key1" => "http://${FunString}:8080/blah.txt" } },
|
||||||
"nestedArray" => { "level1" => [{ "key1" => "http://${FunString}:8080/blah.txt" }, { "key2" => "http://${FunString}:8080/foo.txt" }] },
|
"nestedArray" => { "level1" => [{ "key1" => "http://${FunString}:8080/blah.txt" }, { "key2" => "http://${FunString}:8080/foo.txt" }] },
|
||||||
|
@ -568,7 +568,7 @@ describe LogStash::Config::Mixin do
|
||||||
skip("This test fails on Windows, tracked in https://github.com/elastic/logstash/issues/10454")
|
skip("This test fails on Windows, tracked in https://github.com/elastic/logstash/issues/10454")
|
||||||
expect(subject.oneString).to(be == "fancy")
|
expect(subject.oneString).to(be == "fancy")
|
||||||
expect(subject.oneBoolean).to(be_truthy)
|
expect(subject.oneBoolean).to(be_truthy)
|
||||||
expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
|
expect(subject.oneArray).to(be == ["first array value", "fancy"])
|
||||||
expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
|
expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
|
||||||
expect(subject.nestedHash).to(be == { "level1" => { "key1" => "http://fancy:8080/blah.txt" } })
|
expect(subject.nestedHash).to(be == { "level1" => { "key1" => "http://fancy:8080/blah.txt" } })
|
||||||
expect(subject.nestedArray).to(be == { "level1" => [{ "key1" => "http://fancy:8080/blah.txt" }, { "key2" => "http://fancy:8080/foo.txt" }] })
|
expect(subject.nestedArray).to(be == { "level1" => [{ "key1" => "http://fancy:8080/blah.txt" }, { "key2" => "http://fancy:8080/foo.txt" }] })
|
||||||
|
|
|
@ -319,7 +319,7 @@ describe LogStash::Config::Source::Local do
|
||||||
|
|
||||||
context "when only the `config.string` is set" do
|
context "when only the `config.string` is set" do
|
||||||
let(:settings) do
|
let(:settings) do
|
||||||
mock_settings( "config.string" => filter_block)
|
mock_settings("config.string" => filter_block)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns a config" do
|
it "returns a config" do
|
||||||
|
@ -330,7 +330,7 @@ describe LogStash::Config::Source::Local do
|
||||||
context "when only the `path.config` is set" do
|
context "when only the `path.config` is set" do
|
||||||
let(:config_file) { temporary_file(input_block) }
|
let(:config_file) { temporary_file(input_block) }
|
||||||
let(:settings) do
|
let(:settings) do
|
||||||
mock_settings( "path.config" => config_file)
|
mock_settings("path.config" => config_file)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns a config" do
|
it "returns a config" do
|
||||||
|
@ -358,7 +358,7 @@ describe LogStash::Config::Source::Local do
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:settings) do
|
let(:settings) do
|
||||||
mock_settings( "path.config" => remote_url)
|
mock_settings("path.config" => remote_url)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns a config" do
|
it "returns a config" do
|
||||||
|
@ -389,7 +389,7 @@ describe LogStash::Config::Source::Local do
|
||||||
file.close # we need to flush the write
|
file.close # we need to flush the write
|
||||||
path
|
path
|
||||||
end
|
end
|
||||||
let(:settings) { mock_settings( "path.config" => config_path) }
|
let(:settings) { mock_settings("path.config" => config_path) }
|
||||||
|
|
||||||
it "doesn't add anything" do
|
it "doesn't add anything" do
|
||||||
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
||||||
|
@ -397,7 +397,7 @@ describe LogStash::Config::Source::Local do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when the input block is missing" do
|
context "when the input block is missing" do
|
||||||
let(:settings) { mock_settings( "config.string" => "#{filter_block} #{output_block}") }
|
let(:settings) { mock_settings("config.string" => "#{filter_block} #{output_block}") }
|
||||||
|
|
||||||
it "add stdin input" do
|
it "add stdin input" do
|
||||||
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.input)
|
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.input)
|
||||||
|
@ -405,7 +405,7 @@ describe LogStash::Config::Source::Local do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when the output block is missing" do
|
context "when the output block is missing" do
|
||||||
let(:settings) { mock_settings( "config.string" => "#{input_block} #{filter_block}") }
|
let(:settings) { mock_settings("config.string" => "#{input_block} #{filter_block}") }
|
||||||
|
|
||||||
it "add stdout output" do
|
it "add stdout output" do
|
||||||
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output)
|
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output)
|
||||||
|
@ -413,7 +413,7 @@ describe LogStash::Config::Source::Local do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when both the output block and input block are missing" do
|
context "when both the output block and input block are missing" do
|
||||||
let(:settings) { mock_settings( "config.string" => "#{filter_block}") }
|
let(:settings) { mock_settings("config.string" => "#{filter_block}") }
|
||||||
|
|
||||||
it "add stdin and output" do
|
it "add stdin and output" do
|
||||||
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
||||||
|
@ -421,7 +421,7 @@ describe LogStash::Config::Source::Local do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when it has an input and an output" do
|
context "when it has an input and an output" do
|
||||||
let(:settings) { mock_settings( "config.string" => "#{input_block} #{filter_block} #{output_block}") }
|
let(:settings) { mock_settings("config.string" => "#{input_block} #{filter_block} #{output_block}") }
|
||||||
|
|
||||||
it "doesn't add anything" do
|
it "doesn't add anything" do
|
||||||
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
|
||||||
|
|
|
@ -336,8 +336,8 @@ describe LogStash::Event do
|
||||||
|
|
||||||
it "should allow to pass a block that acts as an event factory" do
|
it "should allow to pass a block that acts as an event factory" do
|
||||||
events = LogStash::Event.from_json(source_json) { |data| LogStash::Event.new(data).tap { |e| e.set('answer', 42) } }
|
events = LogStash::Event.from_json(source_json) { |data| LogStash::Event.new(data).tap { |e| e.set('answer', 42) } }
|
||||||
expect( events.size ).to eql 1
|
expect(events.size).to eql 1
|
||||||
expect( events.first.get('answer') ).to eql 42
|
expect(events.first.get('answer')).to eql 42
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
require "logstash/instrument/metric_store"
|
require "logstash/instrument/metric_store"
|
||||||
|
|
||||||
describe LogStash::Instrument::MetricStore do
|
describe LogStash::Instrument::MetricStore do
|
||||||
let(:namespaces) { [ :root, :pipelines, :pipeline_01 ] }
|
let(:namespaces) { [:root, :pipelines, :pipeline_01] }
|
||||||
let(:key) { :events_in }
|
let(:key) { :events_in }
|
||||||
let(:counter) { LogStash::Instrument::MetricType::Counter.new(namespaces, key) }
|
let(:counter) { LogStash::Instrument::MetricType::Counter.new(namespaces, key) }
|
||||||
|
|
||||||
|
|
|
@ -22,18 +22,18 @@ describe LogStash::Modules::CLIParser do
|
||||||
subject { LogStash::Modules::CLIParser.new(module_names, module_variables) }
|
subject { LogStash::Modules::CLIParser.new(module_names, module_variables) }
|
||||||
let(:logger) { double("logger") }
|
let(:logger) { double("logger") }
|
||||||
let(:module_name) { "foo" }
|
let(:module_name) { "foo" }
|
||||||
let(:module_names) { [ module_name, "bar" ] }
|
let(:module_names) { [module_name, "bar"] }
|
||||||
let(:proto_key_value) { "var.input.stdin.type=example" }
|
let(:proto_key_value) { "var.input.stdin.type=example" }
|
||||||
let(:proto_mod_vars) { module_name + "." + proto_key_value }
|
let(:proto_mod_vars) { module_name + "." + proto_key_value }
|
||||||
let(:module_variables) { [ proto_mod_vars ] }
|
let(:module_variables) { [proto_mod_vars] }
|
||||||
let(:expected_output) { { "name" => module_name, "var.input.stdin.type" => "example" } }
|
let(:expected_output) { { "name" => module_name, "var.input.stdin.type" => "example" } }
|
||||||
|
|
||||||
describe ".parse_modules" do
|
describe ".parse_modules" do
|
||||||
let(:module1) { "module1" }
|
let(:module1) { "module1" }
|
||||||
let(:module2) { "module2" }
|
let(:module2) { "module2" }
|
||||||
let(:csv_modules) { "#{module1},#{module2}" }
|
let(:csv_modules) { "#{module1},#{module2}" }
|
||||||
let(:list_with_csv) { [ module_name, csv_modules ] }
|
let(:list_with_csv) { [module_name, csv_modules] }
|
||||||
let(:post_parse) { [ module_name, module1, module2 ] }
|
let(:post_parse) { [module_name, module1, module2] }
|
||||||
|
|
||||||
context "when it receives an array without a csv entry" do
|
context "when it receives an array without a csv entry" do
|
||||||
it "return the array unaltered" do
|
it "return the array unaltered" do
|
||||||
|
@ -54,7 +54,7 @@ describe LogStash::Modules::CLIParser do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when it receives an array with a bad csv entry" do
|
context "when it receives an array with a bad csv entry" do
|
||||||
let(:bad_modules) { [ "-Minvalid", module1 ] }
|
let(:bad_modules) { ["-Minvalid", module1] }
|
||||||
it "raise a LogStash::ConfigLoadingError exception" do
|
it "raise a LogStash::ConfigLoadingError exception" do
|
||||||
expect { subject.parse_modules(bad_modules) }.to raise_error LogStash::ConfigLoadingError
|
expect { subject.parse_modules(bad_modules) }.to raise_error LogStash::ConfigLoadingError
|
||||||
end
|
end
|
||||||
|
@ -126,7 +126,7 @@ describe LogStash::Modules::CLIParser do
|
||||||
|
|
||||||
describe ".parse_it" do
|
describe ".parse_it" do
|
||||||
context "when it receives a valid module_list and module_variable_list" do
|
context "when it receives a valid module_list and module_variable_list" do
|
||||||
let(:module_names) { [ module_name ]}
|
let(:module_names) { [module_name]}
|
||||||
it "@output is array of hashes with the module name and associated variables as key value pairs" do
|
it "@output is array of hashes with the module name and associated variables as key value pairs" do
|
||||||
expect(subject.output).to eq([expected_output])
|
expect(subject.output).to eq([expected_output])
|
||||||
end
|
end
|
||||||
|
|
|
@ -63,7 +63,7 @@ describe LogStash::PersistedQueueConfigValidator do
|
||||||
before do
|
before do
|
||||||
# create a 2MB file
|
# create a 2MB file
|
||||||
::File.open(page_file, 'wb') do |f|
|
::File.open(page_file, 'wb') do |f|
|
||||||
f.write( SecureRandom.random_bytes( 2**21 ) )
|
f.write(SecureRandom.random_bytes(2**21))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -164,13 +164,13 @@ describe LogStash::PersistedQueueConfigValidator do
|
||||||
|
|
||||||
it "gives true when add a new pipeline " do
|
it "gives true when add a new pipeline " do
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
|
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
|
||||||
expect(pq_config_validator.cache_check_fail?(pipeline_configs2)).to be_truthy
|
expect(pq_config_validator.cache_check_fail?(pipeline_configs2)).to be_truthy
|
||||||
end
|
end
|
||||||
|
|
||||||
it "gives false when remove a old pipeline" do
|
it "gives false when remove a old pipeline" do
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs2 )
|
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs2)
|
||||||
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
|
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -178,7 +178,7 @@ describe LogStash::PersistedQueueConfigValidator do
|
||||||
context("last check fail") do
|
context("last check fail") do
|
||||||
it "gives true" do
|
it "gives true" do
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pass, false)
|
pq_config_validator.instance_variable_set(:@last_check_pass, false)
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
|
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
|
||||||
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_truthy
|
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_truthy
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -186,7 +186,7 @@ describe LogStash::PersistedQueueConfigValidator do
|
||||||
context("no update and last check pass") do
|
context("no update and last check pass") do
|
||||||
it "gives false" do
|
it "gives false" do
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
pq_config_validator.instance_variable_set(:@last_check_pass, true)
|
||||||
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
|
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
|
||||||
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
|
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,7 +22,7 @@ describe ::LogStash::Plugins::Builtin::Pipeline do
|
||||||
let(:input_options) { { "address" => address }}
|
let(:input_options) { { "address" => address }}
|
||||||
let(:output_options) { { "send_to" => [address] }}
|
let(:output_options) { { "send_to" => [address] }}
|
||||||
|
|
||||||
let(:execution_context) { double("execution_context" )}
|
let(:execution_context) { double("execution_context")}
|
||||||
let(:agent) { double("agent") }
|
let(:agent) { double("agent") }
|
||||||
let(:pipeline_bus) { org.logstash.plugins.pipeline.PipelineBus.new }
|
let(:pipeline_bus) { org.logstash.plugins.pipeline.PipelineBus.new }
|
||||||
|
|
||||||
|
|
|
@ -38,9 +38,9 @@ describe LogStash::Plugins::EventFactorySupport do
|
||||||
|
|
||||||
shared_examples 'an event factory' do
|
shared_examples 'an event factory' do
|
||||||
it 'returns an event' do
|
it 'returns an event' do
|
||||||
expect( event_factory.new_event ).to be_a LogStash::Event
|
expect(event_factory.new_event).to be_a LogStash::Event
|
||||||
expect( event = event_factory.new_event('foo' => 'bar') ).to be_a LogStash::Event
|
expect(event = event_factory.new_event('foo' => 'bar')).to be_a LogStash::Event
|
||||||
expect( event.get('foo') ).to eql 'bar'
|
expect(event.get('foo')).to eql 'bar'
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ describe LogStash::Plugins::EventFactorySupport do
|
||||||
it_behaves_like 'an event factory'
|
it_behaves_like 'an event factory'
|
||||||
|
|
||||||
it 'memoizes the factory instance' do
|
it 'memoizes the factory instance' do
|
||||||
expect( event_factory ).to be plugin.send(:event_factory)
|
expect(event_factory).to be plugin.send(:event_factory)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -76,29 +76,29 @@ describe LogStash::Plugins::EventFactorySupport do
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'memoizes the factory instance' do
|
it 'memoizes the factory instance' do
|
||||||
expect( targeted_event_factory ).to be plugin.send(:targeted_event_factory)
|
expect(targeted_event_factory).to be plugin.send(:targeted_event_factory)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'uses the basic event factory (no target specified)' do
|
it 'uses the basic event factory (no target specified)' do
|
||||||
expect( targeted_event_factory ).to be plugin.send(:event_factory)
|
expect(targeted_event_factory).to be plugin.send(:event_factory)
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with target' do
|
context 'with target' do
|
||||||
let(:options) { super().merge('target' => '[the][baz]') }
|
let(:options) { super().merge('target' => '[the][baz]') }
|
||||||
|
|
||||||
it 'returns an event' do
|
it 'returns an event' do
|
||||||
expect( targeted_event_factory.new_event ).to be_a LogStash::Event
|
expect(targeted_event_factory.new_event).to be_a LogStash::Event
|
||||||
expect( event = targeted_event_factory.new_event('foo' => 'bar') ).to be_a LogStash::Event
|
expect(event = targeted_event_factory.new_event('foo' => 'bar')).to be_a LogStash::Event
|
||||||
expect( event.include?('foo') ).to be false
|
expect(event.include?('foo')).to be false
|
||||||
expect( event.get('[the][baz][foo]') ).to eql 'bar'
|
expect(event.get('[the][baz][foo]')).to eql 'bar'
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'memoizes the factory instance' do
|
it 'memoizes the factory instance' do
|
||||||
expect( targeted_event_factory ).to be plugin.send(:targeted_event_factory)
|
expect(targeted_event_factory).to be plugin.send(:targeted_event_factory)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'uses a different factory from the basic one' do
|
it 'uses a different factory from the basic one' do
|
||||||
expect( targeted_event_factory ).not_to be plugin.send(:event_factory)
|
expect(targeted_event_factory).not_to be plugin.send(:event_factory)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -109,9 +109,9 @@ describe LogStash::Plugins::EventFactorySupport do
|
||||||
|
|
||||||
it 'works' do
|
it 'works' do
|
||||||
events = LogStash::Event.from_json(json) { |data| targeted_event_factory.new_event(data) }
|
events = LogStash::Event.from_json(json) { |data| targeted_event_factory.new_event(data) }
|
||||||
expect( events.size ).to eql 2
|
expect(events.size).to eql 2
|
||||||
expect( events[0].get('[internal]') ).to eql 'foo' => 'bar'
|
expect(events[0].get('[internal]')).to eql 'foo' => 'bar'
|
||||||
expect( events[1].get('[internal]') ).to eql 'baz' => { 'a' => 1 }
|
expect(events[1].get('[internal]')).to eql 'baz' => { 'a' => 1 }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,7 +20,7 @@ require "logstash/settings"
|
||||||
|
|
||||||
describe LogStash::Setting::ArrayCoercible do
|
describe LogStash::Setting::ArrayCoercible do
|
||||||
subject { described_class.new("option", element_class, value) }
|
subject { described_class.new("option", element_class, value) }
|
||||||
let(:value) { [ ] }
|
let(:value) { [] }
|
||||||
let(:element_class) { Object }
|
let(:element_class) { Object }
|
||||||
|
|
||||||
context "when given a non array value" do
|
context "when given a non array value" do
|
||||||
|
@ -76,7 +76,7 @@ describe LogStash::Setting::ArrayCoercible do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
context "and the other also the same value in an array" do
|
context "and the other also the same value in an array" do
|
||||||
let(:value_2) { [ "a string" ] }
|
let(:value_2) { ["a string"] }
|
||||||
it "should be equal" do
|
it "should be equal" do
|
||||||
expect(setting_1).to be == setting_2
|
expect(setting_1).to be == setting_2
|
||||||
end
|
end
|
||||||
|
@ -88,7 +88,7 @@ describe LogStash::Setting::ArrayCoercible do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
context "and the other a different value in an array" do
|
context "and the other a different value in an array" do
|
||||||
let(:value_2) { [ "a different string" ] }
|
let(:value_2) { ["a different string"] }
|
||||||
it "should be equal" do
|
it "should be equal" do
|
||||||
expect(setting_1).to_not be == setting_2
|
expect(setting_1).to_not be == setting_2
|
||||||
end
|
end
|
||||||
|
@ -96,9 +96,9 @@ describe LogStash::Setting::ArrayCoercible do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "where one was given a value in an array" do
|
context "where one was given a value in an array" do
|
||||||
let(:value_1) { [ "a string"] }
|
let(:value_1) { ["a string"] }
|
||||||
context "and the other the same value in an array" do
|
context "and the other the same value in an array" do
|
||||||
let(:value_2) { [ "a string" ] }
|
let(:value_2) { ["a string"] }
|
||||||
it "should be equal" do
|
it "should be equal" do
|
||||||
expect(setting_1).to be == setting_2
|
expect(setting_1).to be == setting_2
|
||||||
end
|
end
|
||||||
|
@ -110,7 +110,7 @@ describe LogStash::Setting::ArrayCoercible do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
context "and the other a different value in an array" do
|
context "and the other a different value in an array" do
|
||||||
let(:value_2) { [ "a different string" ] }
|
let(:value_2) { ["a different string"] }
|
||||||
it "should be equal" do
|
it "should be equal" do
|
||||||
expect(setting_1).to_not be == setting_2
|
expect(setting_1).to_not be == setting_2
|
||||||
end
|
end
|
||||||
|
|
|
@ -56,7 +56,7 @@ describe LogStash::Setting::Bytes do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "which is not a valid byte unit" do
|
context "which is not a valid byte unit" do
|
||||||
values = [ "hello world", "1234", "", "-__-" ]
|
values = ["hello world", "1234", "", "-__-"]
|
||||||
values.each do |value|
|
values.each do |value|
|
||||||
it "should fail" do
|
it "should fail" do
|
||||||
expect { subject.set(value) }.to raise_error
|
expect { subject.set(value) }.to raise_error
|
||||||
|
|
|
@ -33,7 +33,7 @@ describe LogStash::Settings do
|
||||||
expect { subject.register(numeric_setting) }.to raise_error
|
expect { subject.register(numeric_setting) }.to raise_error
|
||||||
end
|
end
|
||||||
it "registered? should return true" do
|
it "registered? should return true" do
|
||||||
expect( subject.registered?(numeric_setting_name)).to be_truthy
|
expect(subject.registered?(numeric_setting_name)).to be_truthy
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
context "if setting hasn't been registered" do
|
context "if setting hasn't been registered" do
|
||||||
|
@ -41,7 +41,7 @@ describe LogStash::Settings do
|
||||||
expect { subject.register(numeric_setting) }.to_not raise_error
|
expect { subject.register(numeric_setting) }.to_not raise_error
|
||||||
end
|
end
|
||||||
it "registered? should return false" do
|
it "registered? should return false" do
|
||||||
expect( subject.registered?(numeric_setting_name)).to be_falsey
|
expect(subject.registered?(numeric_setting_name)).to be_falsey
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -68,7 +68,7 @@ describe LogStash::StateResolver do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when the pipeline config contains a new one and the existing" do
|
context "when the pipeline config contains a new one and the existing" do
|
||||||
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config ] }
|
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config] }
|
||||||
|
|
||||||
it "creates the new one and keep the other one" do
|
it "creates the new one and keep the other one" do
|
||||||
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions(
|
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions(
|
||||||
|
@ -186,7 +186,7 @@ describe LogStash::StateResolver do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when pipeline config contains a new one and the existing" do
|
context "when pipeline config contains a new one and the existing" do
|
||||||
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config ] }
|
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config] }
|
||||||
|
|
||||||
it "creates the new one and keep the other one stop" do
|
it "creates the new one and keep the other one stop" do
|
||||||
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions([:Create, :hello_world])
|
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions([:Create, :hello_world])
|
||||||
|
|
|
@ -163,7 +163,7 @@ describe "LogStash::Util::Accessors", :if => class_exists do
|
||||||
str = "[hello][0]"
|
str = "[hello][0]"
|
||||||
data = {"hello" => ["foo", "bar"]}
|
data = {"hello" => ["foo", "bar"]}
|
||||||
accessors = LogStash::Util::Accessors.new(data)
|
accessors = LogStash::Util::Accessors.new(data)
|
||||||
expect(accessors.set(str, "world") ).to eq("world")
|
expect(accessors.set(str, "world")).to eq("world")
|
||||||
expect(data).to eq({"hello" => ["world", "bar"]})
|
expect(data).to eq({"hello" => ["world", "bar"]})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -175,7 +175,7 @@ describe "LogStash::Util::Accessors", :if => class_exists do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "should retrieve array item containing hash" do
|
it "should retrieve array item containing hash" do
|
||||||
data = { "hello" => { "world" => [ { "a" => 123 }, { "b" => 345 } ], "bar" => "baz" } }
|
data = { "hello" => { "world" => [{ "a" => 123 }, { "b" => 345 }], "bar" => "baz" } }
|
||||||
accessors = LogStash::Util::Accessors.new(data)
|
accessors = LogStash::Util::Accessors.new(data)
|
||||||
expect(accessors.get("[hello][world][0][a]")).to eq(data["hello"]["world"][0]["a"])
|
expect(accessors.get("[hello][world][0][a]")).to eq(data["hello"]["world"][0]["a"])
|
||||||
expect(accessors.get("[hello][world][1][b]")).to eq(data["hello"]["world"][1]["b"])
|
expect(accessors.get("[hello][world][1][b]")).to eq(data["hello"]["world"][1]["b"])
|
||||||
|
|
|
@ -86,8 +86,8 @@ describe LogStash::Util do
|
||||||
describe ".get_thread_id" do
|
describe ".get_thread_id" do
|
||||||
it "returns native identifier" do
|
it "returns native identifier" do
|
||||||
thread_id = LogStash::Util.get_thread_id(Thread.current)
|
thread_id = LogStash::Util.get_thread_id(Thread.current)
|
||||||
expect( thread_id ).to be_a Integer
|
expect(thread_id).to be_a Integer
|
||||||
expect( thread_id ).to eq(java.lang.Thread.currentThread.getId)
|
expect(thread_id).to eq(java.lang.Thread.currentThread.getId)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -44,7 +44,7 @@ experimental = (ENV['LS_QA_EXPERIMENTAL_OS'].to_s.downcase || "false") == "true"
|
||||||
config = PlatformConfig.new
|
config = PlatformConfig.new
|
||||||
LOGSTASH_LATEST_VERSION = config.latest
|
LOGSTASH_LATEST_VERSION = config.latest
|
||||||
|
|
||||||
default_vagrant_boxes = ( platform == 'all' ? config.platforms : config.filter_type(platform, {"experimental" => experimental}) )
|
default_vagrant_boxes = (platform == 'all' ? config.platforms : config.filter_type(platform, {"experimental" => experimental}))
|
||||||
|
|
||||||
selected_boxes = if ENV.include?('LS_VAGRANT_HOST') then
|
selected_boxes = if ENV.include?('LS_VAGRANT_HOST') then
|
||||||
config.platforms.select { |p| p.name == ENV['LS_VAGRANT_HOST'] }
|
config.platforms.select { |p| p.name == ENV['LS_VAGRANT_HOST'] }
|
||||||
|
|
|
@ -26,7 +26,7 @@ class TestSettings
|
||||||
FIXTURES_DIR = File.join(INTEG_TESTS_DIR, "fixtures")
|
FIXTURES_DIR = File.join(INTEG_TESTS_DIR, "fixtures")
|
||||||
|
|
||||||
def initialize(test_file_path)
|
def initialize(test_file_path)
|
||||||
test_name = File.basename(test_file_path, ".*" )
|
test_name = File.basename(test_file_path, ".*")
|
||||||
@tests_settings_file = File.join(FIXTURES_DIR, "#{test_name}.yml")
|
@tests_settings_file = File.join(FIXTURES_DIR, "#{test_name}.yml")
|
||||||
# Global suite settings
|
# Global suite settings
|
||||||
@suite_settings = YAML.load(ERB.new(File.new(SUITE_SETTINGS_FILE).read).result)
|
@suite_settings = YAML.load(ERB.new(File.new(SUITE_SETTINGS_FILE).read).result)
|
||||||
|
|
|
@ -4,7 +4,7 @@ Gem::Specification.new do |s|
|
||||||
s.licenses = ['Apache License (2.0)']
|
s.licenses = ['Apache License (2.0)']
|
||||||
s.summary = "Tests LS binary"
|
s.summary = "Tests LS binary"
|
||||||
s.description = "This is a Logstash integration test helper gem"
|
s.description = "This is a Logstash integration test helper gem"
|
||||||
s.authors = [ "Elastic"]
|
s.authors = ["Elastic"]
|
||||||
s.email = 'info@elastic.co'
|
s.email = 'info@elastic.co'
|
||||||
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
||||||
|
|
||||||
|
|
|
@ -299,7 +299,7 @@ class LogstashService < Service
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(*args)
|
def run(*args)
|
||||||
run_cmd [ @logstash_bin, *args ]
|
run_cmd [@logstash_bin, *args]
|
||||||
end
|
end
|
||||||
|
|
||||||
class PluginCli
|
class PluginCli
|
||||||
|
|
|
@ -51,8 +51,8 @@ describe "uncaught exception" do
|
||||||
expect(@logstash.exit_code).to be 120
|
expect(@logstash.exit_code).to be 120
|
||||||
|
|
||||||
log_file = "#{logs_dir}/logstash-plain.log"
|
log_file = "#{logs_dir}/logstash-plain.log"
|
||||||
expect( File.exist?(log_file) ).to be true
|
expect(File.exist?(log_file)).to be true
|
||||||
expect( File.read(log_file) ).to match /\[FATAL\]\[org.logstash.Logstash.*?java.lang.AssertionError: a fatal error/m
|
expect(File.read(log_file)).to match /\[FATAL\]\[org.logstash.Logstash.*?java.lang.AssertionError: a fatal error/m
|
||||||
end
|
end
|
||||||
|
|
||||||
it "logs unexpected exception (from Java thread)" do
|
it "logs unexpected exception (from Java thread)" do
|
||||||
|
@ -64,8 +64,8 @@ describe "uncaught exception" do
|
||||||
expect(@logstash.exit_code).to be 0 # normal exit
|
expect(@logstash.exit_code).to be 0 # normal exit
|
||||||
|
|
||||||
log_file = "#{logs_dir}/logstash-plain.log"
|
log_file = "#{logs_dir}/logstash-plain.log"
|
||||||
expect( File.exist?(log_file) ).to be true
|
expect(File.exist?(log_file)).to be true
|
||||||
expect( File.read(log_file) ).to match /\[ERROR\]\[org.logstash.Logstash.*?uncaught exception \(in thread .*?java.io.EOFException: unexpected/m
|
expect(File.read(log_file)).to match /\[ERROR\]\[org.logstash.Logstash.*?uncaught exception \(in thread .*?java.io.EOFException: unexpected/m
|
||||||
end
|
end
|
||||||
|
|
||||||
def spawn_logstash_and_wait_for_exit!(config, timeout)
|
def spawn_logstash_and_wait_for_exit!(config, timeout)
|
||||||
|
|
|
@ -102,7 +102,7 @@ describe "Test Monitoring API" do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when a drop filter is in the pipeline" do
|
context "when a drop filter is in the pipeline" do
|
||||||
let(:config) { @fixture.config("dropping_events", { :port => tcp_port } ) }
|
let(:config) { @fixture.config("dropping_events", { :port => tcp_port }) }
|
||||||
|
|
||||||
it 'expose the correct output counter' do
|
it 'expose the correct output counter' do
|
||||||
try(max_retry) do
|
try(max_retry) do
|
||||||
|
@ -118,7 +118,7 @@ describe "Test Monitoring API" do
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when a clone filter is in the pipeline" do
|
context "when a clone filter is in the pipeline" do
|
||||||
let(:config) { @fixture.config("cloning_events", { :port => tcp_port } ) }
|
let(:config) { @fixture.config("cloning_events", { :port => tcp_port }) }
|
||||||
|
|
||||||
it 'expose the correct output counter' do
|
it 'expose the correct output counter' do
|
||||||
try(max_retry) do
|
try(max_retry) do
|
||||||
|
|
|
@ -105,7 +105,7 @@ namespace "artifact" do
|
||||||
|
|
||||||
def oss_exclude_paths
|
def oss_exclude_paths
|
||||||
return @oss_excludes if @oss_excludes
|
return @oss_excludes if @oss_excludes
|
||||||
@oss_excludes = default_exclude_paths + [ "x-pack/**/*" ]
|
@oss_excludes = default_exclude_paths + ["x-pack/**/*"]
|
||||||
end
|
end
|
||||||
|
|
||||||
def files(exclude_paths = default_exclude_paths)
|
def files(exclude_paths = default_exclude_paths)
|
||||||
|
@ -388,7 +388,7 @@ namespace "artifact" do
|
||||||
"build_snapshot" => SNAPSHOT_BUILD
|
"build_snapshot" => SNAPSHOT_BUILD
|
||||||
}
|
}
|
||||||
|
|
||||||
metadata = [ "# encoding: utf-8", "BUILD_INFO = #{build_info}" ]
|
metadata = ["# encoding: utf-8", "BUILD_INFO = #{build_info}"]
|
||||||
IO.write(BUILD_METADATA_FILE.path, metadata.join("\n"))
|
IO.write(BUILD_METADATA_FILE.path, metadata.join("\n"))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -15,4 +15,4 @@
|
||||||
# specific language governing permissions and limitations
|
# specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
task "bootstrap" => [ "vendor:all", "compile:all" ]
|
task "bootstrap" => ["vendor:all", "compile:all"]
|
||||||
|
|
|
@ -84,4 +84,4 @@ namespace "test" do
|
||||||
task "install-default" => ["bootstrap", "plugin:install-default", "plugin:install-development-dependencies"]
|
task "install-default" => ["bootstrap", "plugin:install-default", "plugin:install-development-dependencies"]
|
||||||
end
|
end
|
||||||
|
|
||||||
task "test" => [ "test:core" ]
|
task "test" => ["test:core"]
|
||||||
|
|
|
@ -24,7 +24,7 @@ describe "Project licenses" do
|
||||||
# Expected licenses are Apache License 2.0, BSD license, MIT license and the ruby one,
|
# Expected licenses are Apache License 2.0, BSD license, MIT license and the ruby one,
|
||||||
# this not exclude that this list change in the feature.
|
# this not exclude that this list change in the feature.
|
||||||
##
|
##
|
||||||
Regexp.union([ /mit/,
|
Regexp.union([/mit/,
|
||||||
/apache*/,
|
/apache*/,
|
||||||
/bsd/,
|
/bsd/,
|
||||||
/artistic 2.*/,
|
/artistic 2.*/,
|
||||||
|
@ -32,7 +32,7 @@ describe "Project licenses" do
|
||||||
/lgpl/,
|
/lgpl/,
|
||||||
/epl/,
|
/epl/,
|
||||||
/elastic/i
|
/elastic/i
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
##
|
##
|
||||||
|
|
|
@ -21,7 +21,7 @@ describe LogStash::PluginManager::InstallStrategyFactory do
|
||||||
subject { described_class }
|
subject { described_class }
|
||||||
|
|
||||||
context "when the plugins args is valid" do
|
context "when the plugins args is valid" do
|
||||||
let(:plugins_args) { [ "logstash-pack-mega" ] }
|
let(:plugins_args) { ["logstash-pack-mega"] }
|
||||||
|
|
||||||
it "returns the first matched strategy" do
|
it "returns the first matched strategy" do
|
||||||
success = double("urifetch success")
|
success = double("urifetch success")
|
||||||
|
|
|
@ -39,7 +39,7 @@ describe LogStash::PluginManager::Update do
|
||||||
|
|
||||||
context "when skipping validation" do
|
context "when skipping validation" do
|
||||||
let(:cmd) { LogStash::PluginManager::Update.new("update") }
|
let(:cmd) { LogStash::PluginManager::Update.new("update") }
|
||||||
let(:plugin) { OpenStruct.new(:name => "dummy", :options => {} ) }
|
let(:plugin) { OpenStruct.new(:name => "dummy", :options => {}) }
|
||||||
|
|
||||||
before(:each) do
|
before(:each) do
|
||||||
expect(cmd.gemfile).to receive(:find).with(plugin).and_return(plugin)
|
expect(cmd.gemfile).to receive(:find).with(plugin).and_return(plugin)
|
||||||
|
|
|
@ -24,7 +24,7 @@ describe LogStash::PluginManager do
|
||||||
let(:plugin_name) { "logstash-output-elasticsearch" }
|
let(:plugin_name) { "logstash-output-elasticsearch" }
|
||||||
|
|
||||||
let(:version_data) do
|
let(:version_data) do
|
||||||
[ { "authors" => "Elastic", "built_at" => "2015-08-11T00:00:00.000Z", "description" => "Output events to elasticsearch",
|
[{ "authors" => "Elastic", "built_at" => "2015-08-11T00:00:00.000Z", "description" => "Output events to elasticsearch",
|
||||||
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "2.0.0.pre",
|
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "2.0.0.pre",
|
||||||
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => true,
|
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => true,
|
||||||
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"},
|
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"},
|
||||||
|
@ -35,7 +35,7 @@ describe LogStash::PluginManager do
|
||||||
{ "authors" => "Elastic", "built_at" => "2015-08-09T00:00:00.000Z", "description" => "Output events to elasticsearch",
|
{ "authors" => "Elastic", "built_at" => "2015-08-09T00:00:00.000Z", "description" => "Output events to elasticsearch",
|
||||||
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "1.0.4",
|
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "1.0.4",
|
||||||
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => false,
|
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => false,
|
||||||
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"} ]
|
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}]
|
||||||
end
|
end
|
||||||
|
|
||||||
before(:each) do
|
before(:each) do
|
||||||
|
|
|
@ -66,7 +66,7 @@ describe LogStash::Util::Zip do
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:zip_file) do
|
let(:zip_file) do
|
||||||
[ "foo", "bar", "zoo" ].inject([]) do |acc, name|
|
["foo", "bar", "zoo"].inject([]) do |acc, name|
|
||||||
acc << OpenStruct.new(:name => name)
|
acc << OpenStruct.new(:name => name)
|
||||||
acc
|
acc
|
||||||
end
|
end
|
||||||
|
@ -134,7 +134,7 @@ describe LogStash::Util::Zip do
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:dir_files) do
|
let(:dir_files) do
|
||||||
[ "foo", "bar", "zoo" ]
|
["foo", "bar", "zoo"]
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:zip_file) { Class.new }
|
let(:zip_file) { Class.new }
|
||||||
|
@ -163,7 +163,7 @@ describe LogStash::Util::Tar do
|
||||||
let(:gzip_file) { Class.new }
|
let(:gzip_file) { Class.new }
|
||||||
|
|
||||||
let(:tar_file) do
|
let(:tar_file) do
|
||||||
[ "foo", "bar", "zoo" ].inject([]) do |acc, name|
|
["foo", "bar", "zoo"].inject([]) do |acc, name|
|
||||||
acc << OpenStruct.new(:full_name => name)
|
acc << OpenStruct.new(:full_name => name)
|
||||||
acc
|
acc
|
||||||
end
|
end
|
||||||
|
@ -189,7 +189,7 @@ describe LogStash::Util::Tar do
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:dir_files) do
|
let(:dir_files) do
|
||||||
[ "foo", "bar", "zoo" ]
|
["foo", "bar", "zoo"]
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:tar_file) { Class.new }
|
let(:tar_file) { Class.new }
|
||||||
|
|
|
@ -88,7 +88,7 @@ module LogStash module Docgen
|
||||||
g.fetch
|
g.fetch
|
||||||
g.merge("origin/main")
|
g.merge("origin/main")
|
||||||
else
|
else
|
||||||
g = Git.clone(repository, path, :depth => 1 )
|
g = Git.clone(repository, path, :depth => 1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -79,10 +79,10 @@ module LogStash module Docgen
|
||||||
CANONICAL_NAME_PREFIX = "logstash"
|
CANONICAL_NAME_PREFIX = "logstash"
|
||||||
GLOBAL_BLOCKLIST = ["enable_metric", "id"]
|
GLOBAL_BLOCKLIST = ["enable_metric", "id"]
|
||||||
BLOCKLIST = {
|
BLOCKLIST = {
|
||||||
"input" => GLOBAL_BLOCKLIST + [ "type", "debug", "format", "charset", "message_format", "codec", "tags", "add_field"],
|
"input" => GLOBAL_BLOCKLIST + ["type", "debug", "format", "charset", "message_format", "codec", "tags", "add_field"],
|
||||||
"codec" => GLOBAL_BLOCKLIST,
|
"codec" => GLOBAL_BLOCKLIST,
|
||||||
"output" => GLOBAL_BLOCKLIST + [ "type", "tags", "exclude_tags", "codec", "workers" ],
|
"output" => GLOBAL_BLOCKLIST + ["type", "tags", "exclude_tags", "codec", "workers"],
|
||||||
"filter" => GLOBAL_BLOCKLIST + ["type", "tags", "add_tag", "remove_tag", "add_field", "remove_field", "periodic_flush" ]
|
"filter" => GLOBAL_BLOCKLIST + ["type", "tags", "add_tag", "remove_tag", "add_field", "remove_field", "periodic_flush"]
|
||||||
}
|
}
|
||||||
|
|
||||||
attr_accessor :description, :config_name, :section, :name, :default_plugin, :gemspec
|
attr_accessor :description, :config_name, :section, :name, :default_plugin, :gemspec
|
||||||
|
|
|
@ -38,12 +38,12 @@ module LogStash::Docgen
|
||||||
|
|
||||||
def initialize(context)
|
def initialize(context)
|
||||||
@rules = [
|
@rules = [
|
||||||
[ COMMENT_RE, :parse_comment ],
|
[COMMENT_RE, :parse_comment],
|
||||||
[ CLASS_DEFINITION_RE, :parse_class_description ],
|
[CLASS_DEFINITION_RE, :parse_class_description],
|
||||||
[ NEW_CLASS_DEFINITION_RE_ML, :parse_new_class_description ],
|
[NEW_CLASS_DEFINITION_RE_ML, :parse_new_class_description],
|
||||||
[ CONFIG_OPTION_RE, :parse_config ],
|
[CONFIG_OPTION_RE, :parse_config],
|
||||||
[ CONFIG_NAME_RE, :parse_config_name ],
|
[CONFIG_NAME_RE, :parse_config_name],
|
||||||
[ RESET_BUFFER_RE, :reset_buffer ]
|
[RESET_BUFFER_RE, :reset_buffer]
|
||||||
]
|
]
|
||||||
|
|
||||||
@context = context
|
@context = context
|
||||||
|
|
|
@ -31,7 +31,7 @@ module Paquet
|
||||||
|
|
||||||
uri = URI.parse(source)
|
uri = URI.parse(source)
|
||||||
|
|
||||||
http = Net::HTTP.new(uri.host, uri.port, )
|
http = Net::HTTP.new(uri.host, uri.port,)
|
||||||
http.use_ssl = uri.scheme == HTTPS_SCHEME
|
http.use_ssl = uri.scheme == HTTPS_SCHEME
|
||||||
|
|
||||||
response = http.get(uri.path)
|
response = http.get(uri.path)
|
||||||
|
|
|
@ -26,7 +26,7 @@ module LogStash
|
||||||
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.pipeline.id", String, ["main"]))
|
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.pipeline.id", String, ["main"]))
|
||||||
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.username", "logstash_system"))
|
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.username", "logstash_system"))
|
||||||
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.password"))
|
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.password"))
|
||||||
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.elasticsearch.hosts", String, [ "https://localhost:9200" ] ))
|
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.elasticsearch.hosts", String, ["https://localhost:9200"]))
|
||||||
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_id"))
|
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_id"))
|
||||||
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_auth"))
|
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_auth"))
|
||||||
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.api_key"))
|
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.api_key"))
|
||||||
|
|
|
@ -260,7 +260,7 @@ module LogStash
|
||||||
private
|
private
|
||||||
def register_monitoring_settings(settings, prefix = "")
|
def register_monitoring_settings(settings, prefix = "")
|
||||||
settings.register(LogStash::Setting::Boolean.new("#{prefix}monitoring.enabled", false))
|
settings.register(LogStash::Setting::Boolean.new("#{prefix}monitoring.enabled", false))
|
||||||
settings.register(LogStash::Setting::ArrayCoercible.new("#{prefix}monitoring.elasticsearch.hosts", String, [ "http://localhost:9200" ] ))
|
settings.register(LogStash::Setting::ArrayCoercible.new("#{prefix}monitoring.elasticsearch.hosts", String, ["http://localhost:9200"]))
|
||||||
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.interval", "10s"))
|
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.interval", "10s"))
|
||||||
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.timeout_interval", "10m"))
|
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.timeout_interval", "10m"))
|
||||||
settings.register(LogStash::Setting::NullableString.new("#{prefix}monitoring.elasticsearch.username", "logstash_system"))
|
settings.register(LogStash::Setting::NullableString.new("#{prefix}monitoring.elasticsearch.username", "logstash_system"))
|
||||||
|
|
|
@ -53,7 +53,7 @@ module LogStash
|
||||||
ssl_key = bound_scope.setting("var.input.tcp.ssl_key", "")
|
ssl_key = bound_scope.setting("var.input.tcp.ssl_key", "")
|
||||||
lines.push("ssl_key => '#{ssl_key}'") unless ssl_key.empty?
|
lines.push("ssl_key => '#{ssl_key}'") unless ssl_key.empty?
|
||||||
|
|
||||||
lines.push("ssl_key_passphrase => '#{ bound_scope.setting("var.input.tcp.ssl_key_passphrase", "")}'")
|
lines.push("ssl_key_passphrase => '#{bound_scope.setting("var.input.tcp.ssl_key_passphrase", "")}'")
|
||||||
|
|
||||||
certs_array_as_string = bound_scope.array_to_string(
|
certs_array_as_string = bound_scope.array_to_string(
|
||||||
bound_scope.get_setting(LogStash::Setting::SplittableStringArray.new("var.input.tcp.ssl_extra_chain_certs", String, []))
|
bound_scope.get_setting(LogStash::Setting::SplittableStringArray.new("var.input.tcp.ssl_extra_chain_certs", String, []))
|
||||||
|
|
|
@ -69,8 +69,8 @@ describe "Read configuration from elasticsearch" do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "reloads the configuration when its different from the running pipeline" do
|
it "reloads the configuration when its different from the running pipeline" do
|
||||||
[ File.join(Stud::Temporary.directory, "hello.log"),
|
[File.join(Stud::Temporary.directory, "hello.log"),
|
||||||
File.join(Stud::Temporary.directory, "whole-new-file.log") ].each do |temporary_file|
|
File.join(Stud::Temporary.directory, "whole-new-file.log")].each do |temporary_file|
|
||||||
new_config = "input { generator { count => 10000 }} output { file { path => '#{temporary_file}' } }"
|
new_config = "input { generator { count => 10000 }} output { file { path => '#{temporary_file}' } }"
|
||||||
|
|
||||||
expect(File.exist?(temporary_file)).to be_falsey
|
expect(File.exist?(temporary_file)).to be_falsey
|
||||||
|
|
|
@ -158,7 +158,7 @@ def logstash_with_empty_default(cmd, options = {}, default_settings = {})
|
||||||
|
|
||||||
logstash_yaml = File.join(temporary_settings, "logstash.yml")
|
logstash_yaml = File.join(temporary_settings, "logstash.yml")
|
||||||
IO.write(logstash_yaml, YAML::dump(default_settings.merge(options.fetch(:settings, {}))))
|
IO.write(logstash_yaml, YAML::dump(default_settings.merge(options.fetch(:settings, {}))))
|
||||||
FileUtils.cp(File.join(get_logstash_path, "config", "log4j2.properties"), File.join(temporary_settings, "log4j2.properties") )
|
FileUtils.cp(File.join(get_logstash_path, "config", "log4j2.properties"), File.join(temporary_settings, "log4j2.properties"))
|
||||||
|
|
||||||
puts "Running logstash with #{cmd} in #{get_logstash_path} with settings #{options.inspect}"
|
puts "Running logstash with #{cmd} in #{get_logstash_path} with settings #{options.inspect}"
|
||||||
Belzebuth.run(cmd, {:directory => get_logstash_path }.merge(options.fetch(:belzebuth, { })))
|
Belzebuth.run(cmd, {:directory => get_logstash_path }.merge(options.fetch(:belzebuth, { })))
|
||||||
|
|
|
@ -227,17 +227,17 @@ describe LogStash::Filters::Geoip do
|
||||||
it 'sets up periodic task when download triggered' do
|
it 'sets up periodic task when download triggered' do
|
||||||
db_manager.send :trigger_download
|
db_manager.send :trigger_download
|
||||||
download_task = db_manager.instance_variable_get(:@download_task)
|
download_task = db_manager.instance_variable_get(:@download_task)
|
||||||
expect( download_task ).to_not be nil
|
expect(download_task).to_not be nil
|
||||||
expect( download_task.running? ).to be true
|
expect(download_task.running?).to be true
|
||||||
expect( download_task.execution_interval ).to eq 86_400
|
expect(download_task.execution_interval).to eq 86_400
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'executes download job after interval passes' do
|
it 'executes download job after interval passes' do
|
||||||
db_manager.instance_variable_set(:@download_interval, 1.5)
|
db_manager.instance_variable_set(:@download_interval, 1.5)
|
||||||
db_manager.send :trigger_download
|
db_manager.send :trigger_download
|
||||||
download_task = db_manager.instance_variable_get(:@download_task)
|
download_task = db_manager.instance_variable_get(:@download_task)
|
||||||
expect( download_task.running? ).to be true
|
expect(download_task.running?).to be true
|
||||||
expect( db_manager ).to receive :database_update_check
|
expect(db_manager).to receive :database_update_check
|
||||||
sleep 2.0 # wait for task execution
|
sleep 2.0 # wait for task execution
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -396,7 +396,7 @@ describe LogStash::Filters::Geoip do
|
||||||
|
|
||||||
eula_db_dirname = get_dir_path("foo")
|
eula_db_dirname = get_dir_path("foo")
|
||||||
FileUtils.mkdir_p(eula_db_dirname)
|
FileUtils.mkdir_p(eula_db_dirname)
|
||||||
rewrite_temp_metadata(metadata_path, [ ["City", "1620246514", "", "foo", true],
|
rewrite_temp_metadata(metadata_path, [["City", "1620246514", "", "foo", true],
|
||||||
["ASN", "1620246514", "", "foo", true]])
|
["ASN", "1620246514", "", "foo", true]])
|
||||||
|
|
||||||
path = db_manager.subscribe_database_path(CITY, nil, mock_geoip_plugin)
|
path = db_manager.subscribe_database_path(CITY, nil, mock_geoip_plugin)
|
||||||
|
|
|
@ -198,8 +198,8 @@ describe LogStash::Filters::Geoip do
|
||||||
|
|
||||||
context "reset md5" do
|
context "reset md5" do
|
||||||
it "should reset md5 to empty string only" do
|
it "should reset md5 to empty string only" do
|
||||||
rewrite_temp_metadata(temp_metadata_path, [ ["ASN", "1620246514", "SOME MD5", "1620246514", true],
|
rewrite_temp_metadata(temp_metadata_path, [["ASN", "1620246514", "SOME MD5", "1620246514", true],
|
||||||
["City", "1620246514", "SOME MD5", "1620246514", true] ])
|
["City", "1620246514", "SOME MD5", "1620246514", true]])
|
||||||
|
|
||||||
dbm.reset_md5(database_type)
|
dbm.reset_md5(database_type)
|
||||||
row = dbm.get_metadata(database_type).last
|
row = dbm.get_metadata(database_type).last
|
||||||
|
@ -212,8 +212,8 @@ describe LogStash::Filters::Geoip do
|
||||||
context "dirnames" do
|
context "dirnames" do
|
||||||
it "should reset md5 to empty string only" do
|
it "should reset md5 to empty string only" do
|
||||||
write_temp_metadata(temp_metadata_path, city2_metadata)
|
write_temp_metadata(temp_metadata_path, city2_metadata)
|
||||||
rewrite_temp_metadata(temp_metadata_path, [ ["ASN", "1620246514", "SOME MD5", "CC", true],
|
rewrite_temp_metadata(temp_metadata_path, [["ASN", "1620246514", "SOME MD5", "CC", true],
|
||||||
city2_metadata ])
|
city2_metadata])
|
||||||
|
|
||||||
dirnames = dbm.dirnames
|
dirnames = dbm.dirnames
|
||||||
expect(dirnames).to match_array([second_dirname, "CC"])
|
expect(dirnames).to match_array([second_dirname, "CC"])
|
||||||
|
|
|
@ -23,15 +23,15 @@ module GeoipHelper
|
||||||
end
|
end
|
||||||
|
|
||||||
def default_city_db_path
|
def default_city_db_path
|
||||||
::File.join(get_data_dir_path, "CC", default_city_db_name )
|
::File.join(get_data_dir_path, "CC", default_city_db_name)
|
||||||
end
|
end
|
||||||
|
|
||||||
def default_city_gz_path
|
def default_city_gz_path
|
||||||
::File.join(get_data_dir_path, "CC", "GeoLite2-City.tgz" )
|
::File.join(get_data_dir_path, "CC", "GeoLite2-City.tgz")
|
||||||
end
|
end
|
||||||
|
|
||||||
def default_asn_db_path
|
def default_asn_db_path
|
||||||
::File.join(get_data_dir_path, "CC", default_asn_db_name )
|
::File.join(get_data_dir_path, "CC", default_asn_db_name)
|
||||||
end
|
end
|
||||||
|
|
||||||
def metadata_path
|
def metadata_path
|
||||||
|
@ -47,11 +47,11 @@ module GeoipHelper
|
||||||
end
|
end
|
||||||
|
|
||||||
def second_city_db_path
|
def second_city_db_path
|
||||||
::File.join(get_data_dir_path, second_dirname, default_city_db_name )
|
::File.join(get_data_dir_path, second_dirname, default_city_db_name)
|
||||||
end
|
end
|
||||||
|
|
||||||
def second_asn_db_path
|
def second_asn_db_path
|
||||||
::File.join(get_data_dir_path, second_dirname, default_asn_db_name )
|
::File.join(get_data_dir_path, second_dirname, default_asn_db_name)
|
||||||
end
|
end
|
||||||
|
|
||||||
def second_dirname
|
def second_dirname
|
||||||
|
|
|
@ -124,7 +124,7 @@ describe LogStash::LicenseChecker::LicenseManager do
|
||||||
context 'when the type changes' do
|
context 'when the type changes' do
|
||||||
let(:new_type) { 'basic' }
|
let(:new_type) { 'basic' }
|
||||||
let(:second_license) do
|
let(:second_license) do
|
||||||
{ 'license' => license['license'].merge( { 'type' => new_type })}
|
{ 'license' => license['license'].merge({ 'type' => new_type })}
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'updates observers' do
|
it 'updates observers' do
|
||||||
|
@ -139,7 +139,7 @@ describe LogStash::LicenseChecker::LicenseManager do
|
||||||
context 'when the status changes' do
|
context 'when the status changes' do
|
||||||
let(:new_status) { 'expired' }
|
let(:new_status) { 'expired' }
|
||||||
let(:second_license) do
|
let(:second_license) do
|
||||||
{ 'license' => license['license'].merge( { 'status' => new_status })}
|
{ 'license' => license['license'].merge({ 'status' => new_status })}
|
||||||
end
|
end
|
||||||
it 'updates observers' do
|
it 'updates observers' do
|
||||||
expect(license_reader).to receive(:fetch_xpack_info).and_return LogStash::LicenseChecker::XPackInfo.from_es_response(second_license)
|
expect(license_reader).to receive(:fetch_xpack_info).and_return LogStash::LicenseChecker::XPackInfo.from_es_response(second_license)
|
||||||
|
|
|
@ -25,7 +25,7 @@ describe LogStash::LicenseChecker::LicenseReader do
|
||||||
let(:settings) do
|
let(:settings) do
|
||||||
{
|
{
|
||||||
"xpack.monitoring.enabled" => true,
|
"xpack.monitoring.enabled" => true,
|
||||||
"xpack.monitoring.elasticsearch.hosts" => [ elasticsearch_url],
|
"xpack.monitoring.elasticsearch.hosts" => [elasticsearch_url],
|
||||||
"xpack.monitoring.elasticsearch.username" => elasticsearch_username,
|
"xpack.monitoring.elasticsearch.username" => elasticsearch_username,
|
||||||
"xpack.monitoring.elasticsearch.password" => elasticsearch_password,
|
"xpack.monitoring.elasticsearch.password" => elasticsearch_password,
|
||||||
}
|
}
|
||||||
|
@ -125,10 +125,10 @@ describe LogStash::LicenseChecker::LicenseReader do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "builds ES client" do
|
it "builds ES client" do
|
||||||
expect( subject.client.options[:hosts].size ).to eql 1
|
expect(subject.client.options[:hosts].size).to eql 1
|
||||||
expect( subject.client.options[:hosts][0].to_s ).to eql elasticsearch_url # URI#to_s
|
expect(subject.client.options[:hosts][0].to_s).to eql elasticsearch_url # URI#to_s
|
||||||
expect( subject.client.options ).to include(:user => elasticsearch_username, :password => elasticsearch_password)
|
expect(subject.client.options).to include(:user => elasticsearch_username, :password => elasticsearch_password)
|
||||||
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
|
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'with cloud_id' do
|
context 'with cloud_id' do
|
||||||
|
@ -148,10 +148,10 @@ describe LogStash::LicenseChecker::LicenseReader do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "builds ES client" do
|
it "builds ES client" do
|
||||||
expect( subject.client.options[:hosts].size ).to eql 1
|
expect(subject.client.options[:hosts].size).to eql 1
|
||||||
expect( subject.client.options[:hosts][0].to_s ).to eql 'https://e1e631201fb64d55a75f431eb6349589.westeurope.azure.elastic-cloud.com:9243'
|
expect(subject.client.options[:hosts][0].to_s).to eql 'https://e1e631201fb64d55a75f431eb6349589.westeurope.azure.elastic-cloud.com:9243'
|
||||||
expect( subject.client.options ).to include(:user => 'elastic', :password => 'LnWMLeK3EQPTf3G3F1IBdFvO')
|
expect(subject.client.options).to include(:user => 'elastic', :password => 'LnWMLeK3EQPTf3G3F1IBdFvO')
|
||||||
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
|
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -166,8 +166,8 @@ describe LogStash::LicenseChecker::LicenseReader do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "builds ES client" do
|
it "builds ES client" do
|
||||||
expect( subject.client.client_settings[:headers] ).to include("Authorization" => "ApiKey Zm9vOmJhcg==")
|
expect(subject.client.client_settings[:headers]).to include("Authorization" => "ApiKey Zm9vOmJhcg==")
|
||||||
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
|
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -23,7 +23,7 @@ describe LogStash::Inputs::TimerTaskLogger do
|
||||||
let(:exception) { Concurrent::TimeoutError.new }
|
let(:exception) { Concurrent::TimeoutError.new }
|
||||||
|
|
||||||
it "logs the exception in debug mode" do
|
it "logs the exception in debug mode" do
|
||||||
expect(subject.logger).to receive(:debug).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message ))
|
expect(subject.logger).to receive(:debug).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message))
|
||||||
subject.update(run_at, result, exception)
|
subject.update(run_at, result, exception)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -32,7 +32,7 @@ describe LogStash::Inputs::TimerTaskLogger do
|
||||||
let(:exception) { ArgumentError.new }
|
let(:exception) { ArgumentError.new }
|
||||||
|
|
||||||
it "logs the exception in debug mode" do
|
it "logs the exception in debug mode" do
|
||||||
expect(subject.logger).to receive(:error).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message ))
|
expect(subject.logger).to receive(:error).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message))
|
||||||
subject.update(run_at, result, exception)
|
subject.update(run_at, result, exception)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Add table
Reference in a new issue