Rubocop: Enable most SpaceInside cops (#15201)

Enabled:
* SpaceInsideArrayLiteralBrackets
* SpaceInsideParens
* SpaceInsidePercentLiteralDelimiters
* SpaceInsideStringInterpolation
* Add enforced style for SpaceInsideStringInterpolation

Enabled without offenses:
* SpaceInsideArrayPercentLiteral
* Layout/SpaceInsideRangeLiteral
* Layout/SpaceInsideReferenceBrackets
This commit is contained in:
Andres Rodriguez 2023-07-20 09:49:46 -04:00 committed by GitHub
parent 0f8695593e
commit cf67cb1377
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
59 changed files with 185 additions and 181 deletions

View file

@ -89,6 +89,24 @@ Layout/SpaceBeforeFirstArg:
Enabled: true
Layout/SpaceBeforeSemicolon:
Enabled: true
Layout/SpaceInsideArrayLiteralBrackets:
Enabled: true
EnforcedStyle: no_space
EnforcedStyleForEmptyBrackets: no_space
Layout/SpaceInsideArrayPercentLiteral:
Enabled: true # no offenses
Layout/SpaceInsideParens:
Enabled: true
EnforcedStyle: no_space
Layout/SpaceInsidePercentLiteralDelimiters:
Enabled: true
Layout/SpaceInsideRangeLiteral:
Enabled: true # no offenses
Layout/SpaceInsideReferenceBrackets:
Enabled: true # no offenses
Layout/SpaceInsideStringInterpolation:
Enabled: true
EnforcedStyle: no_space
##### Need review #####
Layout/AccessModifierIndentation:
@ -213,21 +231,7 @@ Layout/SpaceAfterNot:
Enabled: false
Layout/SpaceInLambdaLiteral:
Enabled: false
Layout/SpaceInsideArrayLiteralBrackets:
Enabled: false
Layout/SpaceInsideArrayPercentLiteral:
Enabled: false
Layout/SpaceInsideBlockBraces:
Enabled: false
Layout/SpaceInsideHashLiteralBraces:
Enabled: false
Layout/SpaceInsideParens:
Enabled: false
Layout/SpaceInsidePercentLiteralDelimiters:
Enabled: false
Layout/SpaceInsideRangeLiteral:
Enabled: false
Layout/SpaceInsideReferenceBrackets:
Enabled: false
Layout/SpaceInsideStringInterpolation:
Enabled: false

View file

@ -17,12 +17,12 @@
require "jar_dependencies"
def require_jar( *args )
def require_jar(*args)
return nil unless Jars.require?
result = Jars.require_jar( *args )
result = Jars.require_jar(*args)
if result.is_a? String
# JAR_DEBUG=1 will now show theses
Jars.debug { "--- jar coordinate #{args[0..-2].join( ':' )} already loaded with version #{result} - omit version #{args[-1]}" }
Jars.debug { "--- jar coordinate #{args[0..-2].join(':')} already loaded with version #{result} - omit version #{args[-1]}" }
Jars.debug { " try to load from #{caller.join("\n\t")}" }
return false
end

View file

@ -22,7 +22,7 @@ require "logstash/environment"
# Bundler + gemspec already setup $LOAD_PATH << '.../lib'
# but since we load specs from 2 locations we need to hook up these:
[ LogStash::Environment::LOGSTASH_HOME, LogStash::Environment::LOGSTASH_CORE ].each do |path|
[LogStash::Environment::LOGSTASH_HOME, LogStash::Environment::LOGSTASH_CORE].each do |path|
spec_path = File.join(path, "spec")
$LOAD_PATH.unshift(spec_path) unless $LOAD_PATH.include?(spec_path)
end

View file

@ -24,7 +24,7 @@ require "pathname"
class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
TYPES = [ "input", "filter", "output", "codec" ]
TYPES = ["input", "filter", "output", "codec"]
option "--type", "TYPE", "Type of the plugin {input, filter, codec, output}s", :required => true
option "--name", "PLUGIN", "Name of the new plugin", :required => true
@ -58,7 +58,7 @@ class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
def transform_r(source, target)
Dir.entries(source).each do |entry|
next if [ ".", ".." ].include?(entry)
next if [".", ".."].include?(entry)
source_entry = File.join(source, entry)
target_entry = File.join(target, entry)
@ -98,8 +98,8 @@ class LogStash::PluginManager::Generate < LogStash::PluginManager::Command
def get_git_info
git = OpenStruct.new
git.author = %x{ git config --get user.name }.strip rescue "your_username"
git.email = %x{ git config --get user.email }.strip rescue "your_username@example.com"
git.author = %x{git config --get user.name}.strip rescue "your_username"
git.email = %x{git config --get user.email}.strip rescue "your_username@example.com"
git
end

View file

@ -41,7 +41,7 @@ class LogStash::PluginManager::Pack < LogStash::PluginManager::PackCommand
def delete_target_file?
return true if overwrite?
puts("File #{target_file} exist, do you want to overwrite it? (Y/N)")
( "y" == STDIN.gets.strip.downcase ? true : false)
("y" == STDIN.gets.strip.downcase ? true : false)
end
def validate_target_file

View file

@ -41,7 +41,7 @@ class LogStash::PluginManager::Unpack < LogStash::PluginManager::PackCommand
cache_location = LogStash::Environment::CACHE_PATH
if File.exist?(cache_location)
puts("Directory #{cache_location} is going to be overwritten, do you want to continue? (Y/N)")
override = ( "y" == STDIN.gets.strip.downcase ? true : false)
override = ("y" == STDIN.gets.strip.downcase ? true : false)
if override
FileUtils.rm_rf(cache_location)
else

View file

@ -29,7 +29,7 @@ class HotThreadsReport
def to_s
hash = to_hash[:hot_threads]
report = "#{I18n.t("logstash.web_api.hot_threads.title", :hostname => hash[:hostname], :time => hash[:time], :top_count => @thread_dump.top_count )} \n"
report = "#{I18n.t("logstash.web_api.hot_threads.title", :hostname => hash[:hostname], :time => hash[:time], :top_count => @thread_dump.top_count)} \n"
report << '=' * STRING_SEPARATOR_LENGTH
report << "\n"
hash[:threads].each do |thread|

View file

@ -49,14 +49,14 @@ module LogStash
:vertices => as_boolean(params.fetch("vertices", false))}
payload = node.pipeline(pipeline_id, opts)
halt(404) if payload.empty?
respond_with(:pipelines => { pipeline_id => payload } )
respond_with(:pipelines => { pipeline_id => payload })
end
get "/pipelines" do
opts = {:graph => as_boolean(params.fetch("graph", false)),
:vertices => as_boolean(params.fetch("vertices", false))}
payload = node.pipelines(opts)
respond_with(:pipelines => payload )
respond_with(:pipelines => payload)
end
get "/?:filter?" do

View file

@ -80,7 +80,7 @@ module LogStash module Config
duplicate_ids = find_duplicate_ids(pipeline_configs)
if duplicate_ids.any?
logger.debug("Fetching pipelines with duplicate ids", duplicate_ids.each { |k, v| v.collect(&:pipeline_id) } )
logger.debug("Fetching pipelines with duplicate ids", duplicate_ids.each { |k, v| v.collect(&:pipeline_id) })
return FailedFetch.new("Found duplicate ids in your source: #{duplicate_ids.keys.sort.join(", ")}")
end

View file

@ -26,7 +26,7 @@ require 'fileutils'
require 'securerandom'
class LogStash::DependencyReport < Clamp::Command
option [ "--csv" ], "OUTPUT_PATH", "The path to write the dependency report in csv format.",
option ["--csv"], "OUTPUT_PATH", "The path to write the dependency report in csv format.",
:required => true, :attribute_name => :output_path
OTHER_DEPENDENCIES = [
@ -40,7 +40,7 @@ class LogStash::DependencyReport < Clamp::Command
tmp_dir = java.lang.System.getProperty("java.io.tmpdir")
ruby_output_path = File.join(tmp_dir, SecureRandom.uuid)
# Write a CSV with just the ruby stuff
CSV.open(ruby_output_path, "wb", :headers => [ "name", "version", "url", "license", "copyright", "sourceURL" ], :write_headers => true) do |csv|
CSV.open(ruby_output_path, "wb", :headers => ["name", "version", "url", "license", "copyright", "sourceURL"], :write_headers => true) do |csv|
puts "Finding gem dependencies"
gems.each { |d| csv << d }
puts "Finding gem embedded java/jar dependencies"

View file

@ -56,7 +56,7 @@ module LogStash class ElasticsearchClient
if password.is_a?(LogStash::Util::Password)
password = password.value
end
@client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp } }
@client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64("#{username}:#{password}").chomp } }
end
@client = Elasticsearch::Client.new(@client_args)

View file

@ -98,7 +98,7 @@ module LogStash module Modules class KibanaClient
if password.is_a?(LogStash::Util::Password)
password = password.value
end
@http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp
@http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64("#{username}:#{password}").chomp
end
# e.g. {"name":"Elastics-MacBook-Pro.local","version":{"number":"6.0.0-beta1","build_hash":"41e69","build_number":15613,"build_snapshot":true}..}

View file

@ -170,7 +170,7 @@ module LogStash module Plugins
logger.debug("Executing hooks", :name => plugin_context.name, :type => plugin_context.type, :hooks_file => plugin_context.hooks_file)
plugin_context.execute_hooks!
rescue => e
logger.error("error occured when loading plugins hooks file", :name => plugin_context.name, :type => plugin_context.type, :exception => e.message, :stacktrace => e.backtrace )
logger.error("error occured when loading plugins hooks file", :name => plugin_context.name, :type => plugin_context.type, :exception => e.message, :stacktrace => e.backtrace)
end
end
end

View file

@ -20,7 +20,7 @@ java_import 'org.logstash.instrument.reports.ThreadsReport'
module LogStash
module Util
class ThreadDump
SKIPPED_THREADS = [ "Finalizer", "Reference Handler", "Signal Dispatcher" ].freeze
SKIPPED_THREADS = ["Finalizer", "Reference Handler", "Signal Dispatcher"].freeze
THREADS_COUNT_DEFAULT = 10.freeze
IGNORE_IDLE_THREADS_DEFAULT = true.freeze

View file

@ -198,7 +198,7 @@ describe "conditionals in filter" do
CONFIG
sample_one("foo" => 123, "bar" => 123) do
expect(subject.get("tags") ).to include("woot")
expect(subject.get("tags")).to include("woot")
end
end
@ -248,7 +248,7 @@ describe "conditionals in filter" do
}
CONFIG
sample_one("foo" => "foo", "somelist" => [ "one", "two" ], "foobar" => "foobar", "greeting" => "hello world", "tags" => [ "fancypantsy" ]) do
sample_one("foo" => "foo", "somelist" => ["one", "two"], "foobar" => "foobar", "greeting" => "hello world", "tags" => ["fancypantsy"]) do
# verify the original exists
expect(subject.get("tags")).to include("fancypantsy")
@ -263,8 +263,8 @@ describe "conditionals in filter" do
describe "operators" do
conditional "[message] == 'sample'" do
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
sample_one("different") { expect(subject.get("tags") ).to include("failure") }
sample_one("sample") { expect(subject.get("tags")).to include("success") }
sample_one("different") { expect(subject.get("tags")).to include("failure") }
end
conditional "'sample' == [message]" do
@ -281,30 +281,30 @@ describe "conditionals in filter" do
end
conditional "[message] != 'sample'" do
sample_one("sample") { expect(subject.get("tags") ).to include("failure") }
sample_one("different") { expect(subject.get("tags") ).to include("success") }
sample_one("sample") { expect(subject.get("tags")).to include("failure") }
sample_one("different") { expect(subject.get("tags")).to include("success") }
end
conditional "[message] < 'sample'" do
sample_one("apple") { expect(subject.get("tags") ).to include("success") }
sample_one("zebra") { expect(subject.get("tags") ).to include("failure") }
sample_one("apple") { expect(subject.get("tags")).to include("success") }
sample_one("zebra") { expect(subject.get("tags")).to include("failure") }
end
conditional "[message] > 'sample'" do
sample_one("zebra") { expect(subject.get("tags") ).to include("success") }
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
sample_one("zebra") { expect(subject.get("tags")).to include("success") }
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
end
conditional "[message] <= 'sample'" do
sample_one("apple") { expect(subject.get("tags") ).to include("success") }
sample_one("zebra") { expect(subject.get("tags") ).to include("failure") }
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
sample_one("apple") { expect(subject.get("tags")).to include("success") }
sample_one("zebra") { expect(subject.get("tags")).to include("failure") }
sample_one("sample") { expect(subject.get("tags")).to include("success") }
end
conditional "[message] >= 'sample'" do
sample_one("zebra") { expect(subject.get("tags") ).to include("success") }
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
sample_one("zebra") { expect(subject.get("tags")).to include("success") }
sample_one("sample") { expect(subject.get("tags")).to include("success") }
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
end
conditional "[message] == 5" do
@ -357,9 +357,9 @@ describe "conditionals in filter" do
end
conditional "[message] =~ /sample/" do
sample_one("apple") { expect(subject.get("tags") ).to include("failure") }
sample_one("sample") { expect(subject.get("tags") ).to include("success") }
sample_one("some sample") { expect(subject.get("tags") ).to include("success") }
sample_one("apple") { expect(subject.get("tags")).to include("failure") }
sample_one("sample") { expect(subject.get("tags")).to include("success") }
sample_one("some sample") { expect(subject.get("tags")).to include("success") }
end
conditional "[message] !~ /sample/" do

View file

@ -34,7 +34,7 @@ describe LogStash::Api::Commands::DefaultMetadata do
before :all do
registerIfNot(LogStash::Setting::Boolean.new("xpack.monitoring.enabled", false))
registerIfNot(LogStash::Setting::ArrayCoercible.new("xpack.monitoring.elasticsearch.hosts", String, [ "http://localhost:9200" ] ))
registerIfNot(LogStash::Setting::ArrayCoercible.new("xpack.monitoring.elasticsearch.hosts", String, ["http://localhost:9200"]))
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
registerIfNot(LogStash::Setting::NullableString.new("xpack.monitoring.elasticsearch.username", "logstash_TEST system"))
end

View file

@ -525,7 +525,7 @@ describe LogStash::Config::Mixin do
plugin_class.new(
"oneString" => "${notExistingVar:foo}",
"oneBoolean" => "${notExistingVar:true}",
"oneArray" => [ "first array value", "${notExistingVar:foo}", "${notExistingVar:}", "${notExistingVar: }", "${notExistingVar:foo bar}" ],
"oneArray" => ["first array value", "${notExistingVar:foo}", "${notExistingVar:}", "${notExistingVar: }", "${notExistingVar:foo bar}"],
"oneHash" => { "key" => "${notExistingVar:foo}" }
)
end
@ -556,7 +556,7 @@ describe LogStash::Config::Mixin do
plugin_class.new(
"oneString" => "${FunString:foo}",
"oneBoolean" => "${FunBool:false}",
"oneArray" => [ "first array value", "${FunString:foo}" ],
"oneArray" => ["first array value", "${FunString:foo}"],
"oneHash" => { "key1" => "${FunString:foo}", "key2" => "${FunString} is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" },
"nestedHash" => { "level1" => { "key1" => "http://${FunString}:8080/blah.txt" } },
"nestedArray" => { "level1" => [{ "key1" => "http://${FunString}:8080/blah.txt" }, { "key2" => "http://${FunString}:8080/foo.txt" }] },
@ -568,7 +568,7 @@ describe LogStash::Config::Mixin do
skip("This test fails on Windows, tracked in https://github.com/elastic/logstash/issues/10454")
expect(subject.oneString).to(be == "fancy")
expect(subject.oneBoolean).to(be_truthy)
expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
expect(subject.oneArray).to(be == ["first array value", "fancy"])
expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
expect(subject.nestedHash).to(be == { "level1" => { "key1" => "http://fancy:8080/blah.txt" } })
expect(subject.nestedArray).to(be == { "level1" => [{ "key1" => "http://fancy:8080/blah.txt" }, { "key2" => "http://fancy:8080/foo.txt" }] })

View file

@ -319,7 +319,7 @@ describe LogStash::Config::Source::Local do
context "when only the `config.string` is set" do
let(:settings) do
mock_settings( "config.string" => filter_block)
mock_settings("config.string" => filter_block)
end
it "returns a config" do
@ -330,7 +330,7 @@ describe LogStash::Config::Source::Local do
context "when only the `path.config` is set" do
let(:config_file) { temporary_file(input_block) }
let(:settings) do
mock_settings( "path.config" => config_file)
mock_settings("path.config" => config_file)
end
it "returns a config" do
@ -358,7 +358,7 @@ describe LogStash::Config::Source::Local do
end
let(:settings) do
mock_settings( "path.config" => remote_url)
mock_settings("path.config" => remote_url)
end
it "returns a config" do
@ -389,7 +389,7 @@ describe LogStash::Config::Source::Local do
file.close # we need to flush the write
path
end
let(:settings) { mock_settings( "path.config" => config_path) }
let(:settings) { mock_settings("path.config" => config_path) }
it "doesn't add anything" do
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
@ -397,7 +397,7 @@ describe LogStash::Config::Source::Local do
end
context "when the input block is missing" do
let(:settings) { mock_settings( "config.string" => "#{filter_block} #{output_block}") }
let(:settings) { mock_settings("config.string" => "#{filter_block} #{output_block}") }
it "add stdin input" do
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.input)
@ -405,7 +405,7 @@ describe LogStash::Config::Source::Local do
end
context "when the output block is missing" do
let(:settings) { mock_settings( "config.string" => "#{input_block} #{filter_block}") }
let(:settings) { mock_settings("config.string" => "#{input_block} #{filter_block}") }
it "add stdout output" do
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output)
@ -413,7 +413,7 @@ describe LogStash::Config::Source::Local do
end
context "when both the output block and input block are missing" do
let(:settings) { mock_settings( "config.string" => "#{filter_block}") }
let(:settings) { mock_settings("config.string" => "#{filter_block}") }
it "add stdin and output" do
expect(subject.pipeline_configs.first.config_string).to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)
@ -421,7 +421,7 @@ describe LogStash::Config::Source::Local do
end
context "when it has an input and an output" do
let(:settings) { mock_settings( "config.string" => "#{input_block} #{filter_block} #{output_block}") }
let(:settings) { mock_settings("config.string" => "#{input_block} #{filter_block} #{output_block}") }
it "doesn't add anything" do
expect(subject.pipeline_configs.first.config_string).not_to include(LogStash::Config::Defaults.output, LogStash::Config::Defaults.input)

View file

@ -336,8 +336,8 @@ describe LogStash::Event do
it "should allow to pass a block that acts as an event factory" do
events = LogStash::Event.from_json(source_json) { |data| LogStash::Event.new(data).tap { |e| e.set('answer', 42) } }
expect( events.size ).to eql 1
expect( events.first.get('answer') ).to eql 42
expect(events.size).to eql 1
expect(events.first.get('answer')).to eql 42
end
end

View file

@ -18,7 +18,7 @@
require "logstash/instrument/metric_store"
describe LogStash::Instrument::MetricStore do
let(:namespaces) { [ :root, :pipelines, :pipeline_01 ] }
let(:namespaces) { [:root, :pipelines, :pipeline_01] }
let(:key) { :events_in }
let(:counter) { LogStash::Instrument::MetricType::Counter.new(namespaces, key) }

View file

@ -22,18 +22,18 @@ describe LogStash::Modules::CLIParser do
subject { LogStash::Modules::CLIParser.new(module_names, module_variables) }
let(:logger) { double("logger") }
let(:module_name) { "foo" }
let(:module_names) { [ module_name, "bar" ] }
let(:module_names) { [module_name, "bar"] }
let(:proto_key_value) { "var.input.stdin.type=example" }
let(:proto_mod_vars) { module_name + "." + proto_key_value }
let(:module_variables) { [ proto_mod_vars ] }
let(:module_variables) { [proto_mod_vars] }
let(:expected_output) { { "name" => module_name, "var.input.stdin.type" => "example" } }
describe ".parse_modules" do
let(:module1) { "module1" }
let(:module2) { "module2" }
let(:csv_modules) { "#{module1},#{module2}" }
let(:list_with_csv) { [ module_name, csv_modules ] }
let(:post_parse) { [ module_name, module1, module2 ] }
let(:list_with_csv) { [module_name, csv_modules] }
let(:post_parse) { [module_name, module1, module2] }
context "when it receives an array without a csv entry" do
it "return the array unaltered" do
@ -54,7 +54,7 @@ describe LogStash::Modules::CLIParser do
end
context "when it receives an array with a bad csv entry" do
let(:bad_modules) { [ "-Minvalid", module1 ] }
let(:bad_modules) { ["-Minvalid", module1] }
it "raise a LogStash::ConfigLoadingError exception" do
expect { subject.parse_modules(bad_modules) }.to raise_error LogStash::ConfigLoadingError
end
@ -126,7 +126,7 @@ describe LogStash::Modules::CLIParser do
describe ".parse_it" do
context "when it receives a valid module_list and module_variable_list" do
let(:module_names) { [ module_name ]}
let(:module_names) { [module_name]}
it "@output is array of hashes with the module name and associated variables as key value pairs" do
expect(subject.output).to eq([expected_output])
end

View file

@ -63,7 +63,7 @@ describe LogStash::PersistedQueueConfigValidator do
before do
# create a 2MB file
::File.open(page_file, 'wb') do |f|
f.write( SecureRandom.random_bytes( 2**21 ) )
f.write(SecureRandom.random_bytes(2**21))
end
end
@ -164,13 +164,13 @@ describe LogStash::PersistedQueueConfigValidator do
it "gives true when add a new pipeline " do
pq_config_validator.instance_variable_set(:@last_check_pass, true)
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
expect(pq_config_validator.cache_check_fail?(pipeline_configs2)).to be_truthy
end
it "gives false when remove a old pipeline" do
pq_config_validator.instance_variable_set(:@last_check_pass, true)
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs2 )
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs2)
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
end
end
@ -178,7 +178,7 @@ describe LogStash::PersistedQueueConfigValidator do
context("last check fail") do
it "gives true" do
pq_config_validator.instance_variable_set(:@last_check_pass, false)
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_truthy
end
end
@ -186,7 +186,7 @@ describe LogStash::PersistedQueueConfigValidator do
context("no update and last check pass") do
it "gives false" do
pq_config_validator.instance_variable_set(:@last_check_pass, true)
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs )
pq_config_validator.instance_variable_set(:@last_check_pipeline_configs, pipeline_configs)
expect(pq_config_validator.cache_check_fail?(pipeline_configs)).to be_falsey
end
end

View file

@ -22,7 +22,7 @@ describe ::LogStash::Plugins::Builtin::Pipeline do
let(:input_options) { { "address" => address }}
let(:output_options) { { "send_to" => [address] }}
let(:execution_context) { double("execution_context" )}
let(:execution_context) { double("execution_context")}
let(:agent) { double("agent") }
let(:pipeline_bus) { org.logstash.plugins.pipeline.PipelineBus.new }

View file

@ -38,9 +38,9 @@ describe LogStash::Plugins::EventFactorySupport do
shared_examples 'an event factory' do
it 'returns an event' do
expect( event_factory.new_event ).to be_a LogStash::Event
expect( event = event_factory.new_event('foo' => 'bar') ).to be_a LogStash::Event
expect( event.get('foo') ).to eql 'bar'
expect(event_factory.new_event).to be_a LogStash::Event
expect(event = event_factory.new_event('foo' => 'bar')).to be_a LogStash::Event
expect(event.get('foo')).to eql 'bar'
end
end
@ -50,7 +50,7 @@ describe LogStash::Plugins::EventFactorySupport do
it_behaves_like 'an event factory'
it 'memoizes the factory instance' do
expect( event_factory ).to be plugin.send(:event_factory)
expect(event_factory).to be plugin.send(:event_factory)
end
end
@ -76,29 +76,29 @@ describe LogStash::Plugins::EventFactorySupport do
end
it 'memoizes the factory instance' do
expect( targeted_event_factory ).to be plugin.send(:targeted_event_factory)
expect(targeted_event_factory).to be plugin.send(:targeted_event_factory)
end
it 'uses the basic event factory (no target specified)' do
expect( targeted_event_factory ).to be plugin.send(:event_factory)
expect(targeted_event_factory).to be plugin.send(:event_factory)
end
context 'with target' do
let(:options) { super().merge('target' => '[the][baz]') }
it 'returns an event' do
expect( targeted_event_factory.new_event ).to be_a LogStash::Event
expect( event = targeted_event_factory.new_event('foo' => 'bar') ).to be_a LogStash::Event
expect( event.include?('foo') ).to be false
expect( event.get('[the][baz][foo]') ).to eql 'bar'
expect(targeted_event_factory.new_event).to be_a LogStash::Event
expect(event = targeted_event_factory.new_event('foo' => 'bar')).to be_a LogStash::Event
expect(event.include?('foo')).to be false
expect(event.get('[the][baz][foo]')).to eql 'bar'
end
it 'memoizes the factory instance' do
expect( targeted_event_factory ).to be plugin.send(:targeted_event_factory)
expect(targeted_event_factory).to be plugin.send(:targeted_event_factory)
end
it 'uses a different factory from the basic one' do
expect( targeted_event_factory ).not_to be plugin.send(:event_factory)
expect(targeted_event_factory).not_to be plugin.send(:event_factory)
end
end
@ -109,9 +109,9 @@ describe LogStash::Plugins::EventFactorySupport do
it 'works' do
events = LogStash::Event.from_json(json) { |data| targeted_event_factory.new_event(data) }
expect( events.size ).to eql 2
expect( events[0].get('[internal]') ).to eql 'foo' => 'bar'
expect( events[1].get('[internal]') ).to eql 'baz' => { 'a' => 1 }
expect(events.size).to eql 2
expect(events[0].get('[internal]')).to eql 'foo' => 'bar'
expect(events[1].get('[internal]')).to eql 'baz' => { 'a' => 1 }
end
end
end

View file

@ -20,7 +20,7 @@ require "logstash/settings"
describe LogStash::Setting::ArrayCoercible do
subject { described_class.new("option", element_class, value) }
let(:value) { [ ] }
let(:value) { [] }
let(:element_class) { Object }
context "when given a non array value" do
@ -76,7 +76,7 @@ describe LogStash::Setting::ArrayCoercible do
end
end
context "and the other also the same value in an array" do
let(:value_2) { [ "a string" ] }
let(:value_2) { ["a string"] }
it "should be equal" do
expect(setting_1).to be == setting_2
end
@ -88,7 +88,7 @@ describe LogStash::Setting::ArrayCoercible do
end
end
context "and the other a different value in an array" do
let(:value_2) { [ "a different string" ] }
let(:value_2) { ["a different string"] }
it "should be equal" do
expect(setting_1).to_not be == setting_2
end
@ -96,9 +96,9 @@ describe LogStash::Setting::ArrayCoercible do
end
context "where one was given a value in an array" do
let(:value_1) { [ "a string"] }
let(:value_1) { ["a string"] }
context "and the other the same value in an array" do
let(:value_2) { [ "a string" ] }
let(:value_2) { ["a string"] }
it "should be equal" do
expect(setting_1).to be == setting_2
end
@ -110,7 +110,7 @@ describe LogStash::Setting::ArrayCoercible do
end
end
context "and the other a different value in an array" do
let(:value_2) { [ "a different string" ] }
let(:value_2) { ["a different string"] }
it "should be equal" do
expect(setting_1).to_not be == setting_2
end

View file

@ -56,7 +56,7 @@ describe LogStash::Setting::Bytes do
end
context "which is not a valid byte unit" do
values = [ "hello world", "1234", "", "-__-" ]
values = ["hello world", "1234", "", "-__-"]
values.each do |value|
it "should fail" do
expect { subject.set(value) }.to raise_error

View file

@ -33,7 +33,7 @@ describe LogStash::Settings do
expect { subject.register(numeric_setting) }.to raise_error
end
it "registered? should return true" do
expect( subject.registered?(numeric_setting_name)).to be_truthy
expect(subject.registered?(numeric_setting_name)).to be_truthy
end
end
context "if setting hasn't been registered" do
@ -41,7 +41,7 @@ describe LogStash::Settings do
expect { subject.register(numeric_setting) }.to_not raise_error
end
it "registered? should return false" do
expect( subject.registered?(numeric_setting_name)).to be_falsey
expect(subject.registered?(numeric_setting_name)).to be_falsey
end
end
end

View file

@ -68,7 +68,7 @@ describe LogStash::StateResolver do
end
context "when the pipeline config contains a new one and the existing" do
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config ] }
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config] }
it "creates the new one and keep the other one" do
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions(
@ -186,7 +186,7 @@ describe LogStash::StateResolver do
end
context "when pipeline config contains a new one and the existing" do
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config ] }
let(:pipeline_configs) { [mock_pipeline_config(:hello_world), main_pipeline_config] }
it "creates the new one and keep the other one stop" do
expect(subject.resolve(pipelines, pipeline_configs)).to have_actions([:Create, :hello_world])

View file

@ -163,7 +163,7 @@ describe "LogStash::Util::Accessors", :if => class_exists do
str = "[hello][0]"
data = {"hello" => ["foo", "bar"]}
accessors = LogStash::Util::Accessors.new(data)
expect(accessors.set(str, "world") ).to eq("world")
expect(accessors.set(str, "world")).to eq("world")
expect(data).to eq({"hello" => ["world", "bar"]})
end
@ -175,7 +175,7 @@ describe "LogStash::Util::Accessors", :if => class_exists do
end
it "should retrieve array item containing hash" do
data = { "hello" => { "world" => [ { "a" => 123 }, { "b" => 345 } ], "bar" => "baz" } }
data = { "hello" => { "world" => [{ "a" => 123 }, { "b" => 345 }], "bar" => "baz" } }
accessors = LogStash::Util::Accessors.new(data)
expect(accessors.get("[hello][world][0][a]")).to eq(data["hello"]["world"][0]["a"])
expect(accessors.get("[hello][world][1][b]")).to eq(data["hello"]["world"][1]["b"])

View file

@ -86,8 +86,8 @@ describe LogStash::Util do
describe ".get_thread_id" do
it "returns native identifier" do
thread_id = LogStash::Util.get_thread_id(Thread.current)
expect( thread_id ).to be_a Integer
expect( thread_id ).to eq(java.lang.Thread.currentThread.getId)
expect(thread_id).to be_a Integer
expect(thread_id).to eq(java.lang.Thread.currentThread.getId)
end
end
end

View file

@ -44,7 +44,7 @@ experimental = (ENV['LS_QA_EXPERIMENTAL_OS'].to_s.downcase || "false") == "true"
config = PlatformConfig.new
LOGSTASH_LATEST_VERSION = config.latest
default_vagrant_boxes = ( platform == 'all' ? config.platforms : config.filter_type(platform, {"experimental" => experimental}) )
default_vagrant_boxes = (platform == 'all' ? config.platforms : config.filter_type(platform, {"experimental" => experimental}))
selected_boxes = if ENV.include?('LS_VAGRANT_HOST') then
config.platforms.select { |p| p.name == ENV['LS_VAGRANT_HOST'] }

View file

@ -26,7 +26,7 @@ class TestSettings
FIXTURES_DIR = File.join(INTEG_TESTS_DIR, "fixtures")
def initialize(test_file_path)
test_name = File.basename(test_file_path, ".*" )
test_name = File.basename(test_file_path, ".*")
@tests_settings_file = File.join(FIXTURES_DIR, "#{test_name}.yml")
# Global suite settings
@suite_settings = YAML.load(ERB.new(File.new(SUITE_SETTINGS_FILE).read).result)

View file

@ -4,7 +4,7 @@ Gem::Specification.new do |s|
s.licenses = ['Apache License (2.0)']
s.summary = "Tests LS binary"
s.description = "This is a Logstash integration test helper gem"
s.authors = [ "Elastic"]
s.authors = ["Elastic"]
s.email = 'info@elastic.co'
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"

View file

@ -299,7 +299,7 @@ class LogstashService < Service
end
def run(*args)
run_cmd [ @logstash_bin, *args ]
run_cmd [@logstash_bin, *args]
end
class PluginCli

View file

@ -51,8 +51,8 @@ describe "uncaught exception" do
expect(@logstash.exit_code).to be 120
log_file = "#{logs_dir}/logstash-plain.log"
expect( File.exist?(log_file) ).to be true
expect( File.read(log_file) ).to match /\[FATAL\]\[org.logstash.Logstash.*?java.lang.AssertionError: a fatal error/m
expect(File.exist?(log_file)).to be true
expect(File.read(log_file)).to match /\[FATAL\]\[org.logstash.Logstash.*?java.lang.AssertionError: a fatal error/m
end
it "logs unexpected exception (from Java thread)" do
@ -64,8 +64,8 @@ describe "uncaught exception" do
expect(@logstash.exit_code).to be 0 # normal exit
log_file = "#{logs_dir}/logstash-plain.log"
expect( File.exist?(log_file) ).to be true
expect( File.read(log_file) ).to match /\[ERROR\]\[org.logstash.Logstash.*?uncaught exception \(in thread .*?java.io.EOFException: unexpected/m
expect(File.exist?(log_file)).to be true
expect(File.read(log_file)).to match /\[ERROR\]\[org.logstash.Logstash.*?uncaught exception \(in thread .*?java.io.EOFException: unexpected/m
end
def spawn_logstash_and_wait_for_exit!(config, timeout)

View file

@ -102,7 +102,7 @@ describe "Test Monitoring API" do
end
context "when a drop filter is in the pipeline" do
let(:config) { @fixture.config("dropping_events", { :port => tcp_port } ) }
let(:config) { @fixture.config("dropping_events", { :port => tcp_port }) }
it 'expose the correct output counter' do
try(max_retry) do
@ -118,7 +118,7 @@ describe "Test Monitoring API" do
end
context "when a clone filter is in the pipeline" do
let(:config) { @fixture.config("cloning_events", { :port => tcp_port } ) }
let(:config) { @fixture.config("cloning_events", { :port => tcp_port }) }
it 'expose the correct output counter' do
try(max_retry) do

View file

@ -105,7 +105,7 @@ namespace "artifact" do
def oss_exclude_paths
return @oss_excludes if @oss_excludes
@oss_excludes = default_exclude_paths + [ "x-pack/**/*" ]
@oss_excludes = default_exclude_paths + ["x-pack/**/*"]
end
def files(exclude_paths = default_exclude_paths)
@ -388,7 +388,7 @@ namespace "artifact" do
"build_snapshot" => SNAPSHOT_BUILD
}
metadata = [ "# encoding: utf-8", "BUILD_INFO = #{build_info}" ]
metadata = ["# encoding: utf-8", "BUILD_INFO = #{build_info}"]
IO.write(BUILD_METADATA_FILE.path, metadata.join("\n"))
end

View file

@ -15,4 +15,4 @@
# specific language governing permissions and limitations
# under the License.
task "bootstrap" => [ "vendor:all", "compile:all" ]
task "bootstrap" => ["vendor:all", "compile:all"]

View file

@ -84,4 +84,4 @@ namespace "test" do
task "install-default" => ["bootstrap", "plugin:install-default", "plugin:install-development-dependencies"]
end
task "test" => [ "test:core" ]
task "test" => ["test:core"]

View file

@ -24,7 +24,7 @@ describe "Project licenses" do
# Expected licenses are Apache License 2.0, BSD license, MIT license and the ruby one,
# this not exclude that this list change in the feature.
##
Regexp.union([ /mit/,
Regexp.union([/mit/,
/apache*/,
/bsd/,
/artistic 2.*/,
@ -32,7 +32,7 @@ describe "Project licenses" do
/lgpl/,
/epl/,
/elastic/i
])
])
}
##

View file

@ -21,7 +21,7 @@ describe LogStash::PluginManager::InstallStrategyFactory do
subject { described_class }
context "when the plugins args is valid" do
let(:plugins_args) { [ "logstash-pack-mega" ] }
let(:plugins_args) { ["logstash-pack-mega"] }
it "returns the first matched strategy" do
success = double("urifetch success")

View file

@ -39,7 +39,7 @@ describe LogStash::PluginManager::Update do
context "when skipping validation" do
let(:cmd) { LogStash::PluginManager::Update.new("update") }
let(:plugin) { OpenStruct.new(:name => "dummy", :options => {} ) }
let(:plugin) { OpenStruct.new(:name => "dummy", :options => {}) }
before(:each) do
expect(cmd.gemfile).to receive(:find).with(plugin).and_return(plugin)

View file

@ -24,7 +24,7 @@ describe LogStash::PluginManager do
let(:plugin_name) { "logstash-output-elasticsearch" }
let(:version_data) do
[ { "authors" => "Elastic", "built_at" => "2015-08-11T00:00:00.000Z", "description" => "Output events to elasticsearch",
[{ "authors" => "Elastic", "built_at" => "2015-08-11T00:00:00.000Z", "description" => "Output events to elasticsearch",
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "2.0.0.pre",
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => true,
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"},
@ -35,7 +35,7 @@ describe LogStash::PluginManager do
{ "authors" => "Elastic", "built_at" => "2015-08-09T00:00:00.000Z", "description" => "Output events to elasticsearch",
"downloads_count" => 1638, "metadata" => {"logstash_group" => "output", "logstash_plugin" => "true"}, "number" => "1.0.4",
"summary" => "Logstash Output to Elasticsearch", "platform" => "java", "ruby_version" => ">= 0", "prerelease" => false,
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"} ]
"licenses" => ["apache-2.0"], "requirements" => [], "sha" => "194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}]
end
before(:each) do

View file

@ -66,7 +66,7 @@ describe LogStash::Util::Zip do
end
let(:zip_file) do
[ "foo", "bar", "zoo" ].inject([]) do |acc, name|
["foo", "bar", "zoo"].inject([]) do |acc, name|
acc << OpenStruct.new(:name => name)
acc
end
@ -134,7 +134,7 @@ describe LogStash::Util::Zip do
end
let(:dir_files) do
[ "foo", "bar", "zoo" ]
["foo", "bar", "zoo"]
end
let(:zip_file) { Class.new }
@ -163,7 +163,7 @@ describe LogStash::Util::Tar do
let(:gzip_file) { Class.new }
let(:tar_file) do
[ "foo", "bar", "zoo" ].inject([]) do |acc, name|
["foo", "bar", "zoo"].inject([]) do |acc, name|
acc << OpenStruct.new(:full_name => name)
acc
end
@ -189,7 +189,7 @@ describe LogStash::Util::Tar do
end
let(:dir_files) do
[ "foo", "bar", "zoo" ]
["foo", "bar", "zoo"]
end
let(:tar_file) { Class.new }

View file

@ -88,7 +88,7 @@ module LogStash module Docgen
g.fetch
g.merge("origin/main")
else
g = Git.clone(repository, path, :depth => 1 )
g = Git.clone(repository, path, :depth => 1)
end
end

View file

@ -79,10 +79,10 @@ module LogStash module Docgen
CANONICAL_NAME_PREFIX = "logstash"
GLOBAL_BLOCKLIST = ["enable_metric", "id"]
BLOCKLIST = {
"input" => GLOBAL_BLOCKLIST + [ "type", "debug", "format", "charset", "message_format", "codec", "tags", "add_field"],
"input" => GLOBAL_BLOCKLIST + ["type", "debug", "format", "charset", "message_format", "codec", "tags", "add_field"],
"codec" => GLOBAL_BLOCKLIST,
"output" => GLOBAL_BLOCKLIST + [ "type", "tags", "exclude_tags", "codec", "workers" ],
"filter" => GLOBAL_BLOCKLIST + ["type", "tags", "add_tag", "remove_tag", "add_field", "remove_field", "periodic_flush" ]
"output" => GLOBAL_BLOCKLIST + ["type", "tags", "exclude_tags", "codec", "workers"],
"filter" => GLOBAL_BLOCKLIST + ["type", "tags", "add_tag", "remove_tag", "add_field", "remove_field", "periodic_flush"]
}
attr_accessor :description, :config_name, :section, :name, :default_plugin, :gemspec

View file

@ -38,12 +38,12 @@ module LogStash::Docgen
def initialize(context)
@rules = [
[ COMMENT_RE, :parse_comment ],
[ CLASS_DEFINITION_RE, :parse_class_description ],
[ NEW_CLASS_DEFINITION_RE_ML, :parse_new_class_description ],
[ CONFIG_OPTION_RE, :parse_config ],
[ CONFIG_NAME_RE, :parse_config_name ],
[ RESET_BUFFER_RE, :reset_buffer ]
[COMMENT_RE, :parse_comment],
[CLASS_DEFINITION_RE, :parse_class_description],
[NEW_CLASS_DEFINITION_RE_ML, :parse_new_class_description],
[CONFIG_OPTION_RE, :parse_config],
[CONFIG_NAME_RE, :parse_config_name],
[RESET_BUFFER_RE, :reset_buffer]
]
@context = context

View file

@ -31,7 +31,7 @@ module Paquet
uri = URI.parse(source)
http = Net::HTTP.new(uri.host, uri.port, )
http = Net::HTTP.new(uri.host, uri.port,)
http.use_ssl = uri.scheme == HTTPS_SCHEME
response = http.get(uri.path)

View file

@ -26,7 +26,7 @@ module LogStash
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.pipeline.id", String, ["main"]))
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.username", "logstash_system"))
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.password"))
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.elasticsearch.hosts", String, [ "https://localhost:9200" ] ))
settings.register(LogStash::Setting::ArrayCoercible.new("xpack.management.elasticsearch.hosts", String, ["https://localhost:9200"]))
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_id"))
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.cloud_auth"))
settings.register(LogStash::Setting::NullableString.new("xpack.management.elasticsearch.api_key"))

View file

@ -260,7 +260,7 @@ module LogStash
private
def register_monitoring_settings(settings, prefix = "")
settings.register(LogStash::Setting::Boolean.new("#{prefix}monitoring.enabled", false))
settings.register(LogStash::Setting::ArrayCoercible.new("#{prefix}monitoring.elasticsearch.hosts", String, [ "http://localhost:9200" ] ))
settings.register(LogStash::Setting::ArrayCoercible.new("#{prefix}monitoring.elasticsearch.hosts", String, ["http://localhost:9200"]))
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.interval", "10s"))
settings.register(LogStash::Setting::TimeValue.new("#{prefix}monitoring.collection.timeout_interval", "10m"))
settings.register(LogStash::Setting::NullableString.new("#{prefix}monitoring.elasticsearch.username", "logstash_system"))

View file

@ -53,7 +53,7 @@ module LogStash
ssl_key = bound_scope.setting("var.input.tcp.ssl_key", "")
lines.push("ssl_key => '#{ssl_key}'") unless ssl_key.empty?
lines.push("ssl_key_passphrase => '#{ bound_scope.setting("var.input.tcp.ssl_key_passphrase", "")}'")
lines.push("ssl_key_passphrase => '#{bound_scope.setting("var.input.tcp.ssl_key_passphrase", "")}'")
certs_array_as_string = bound_scope.array_to_string(
bound_scope.get_setting(LogStash::Setting::SplittableStringArray.new("var.input.tcp.ssl_extra_chain_certs", String, []))

View file

@ -69,8 +69,8 @@ describe "Read configuration from elasticsearch" do
end
it "reloads the configuration when its different from the running pipeline" do
[ File.join(Stud::Temporary.directory, "hello.log"),
File.join(Stud::Temporary.directory, "whole-new-file.log") ].each do |temporary_file|
[File.join(Stud::Temporary.directory, "hello.log"),
File.join(Stud::Temporary.directory, "whole-new-file.log")].each do |temporary_file|
new_config = "input { generator { count => 10000 }} output { file { path => '#{temporary_file}' } }"
expect(File.exist?(temporary_file)).to be_falsey

View file

@ -158,7 +158,7 @@ def logstash_with_empty_default(cmd, options = {}, default_settings = {})
logstash_yaml = File.join(temporary_settings, "logstash.yml")
IO.write(logstash_yaml, YAML::dump(default_settings.merge(options.fetch(:settings, {}))))
FileUtils.cp(File.join(get_logstash_path, "config", "log4j2.properties"), File.join(temporary_settings, "log4j2.properties") )
FileUtils.cp(File.join(get_logstash_path, "config", "log4j2.properties"), File.join(temporary_settings, "log4j2.properties"))
puts "Running logstash with #{cmd} in #{get_logstash_path} with settings #{options.inspect}"
Belzebuth.run(cmd, {:directory => get_logstash_path }.merge(options.fetch(:belzebuth, { })))

View file

@ -227,17 +227,17 @@ describe LogStash::Filters::Geoip do
it 'sets up periodic task when download triggered' do
db_manager.send :trigger_download
download_task = db_manager.instance_variable_get(:@download_task)
expect( download_task ).to_not be nil
expect( download_task.running? ).to be true
expect( download_task.execution_interval ).to eq 86_400
expect(download_task).to_not be nil
expect(download_task.running?).to be true
expect(download_task.execution_interval).to eq 86_400
end
it 'executes download job after interval passes' do
db_manager.instance_variable_set(:@download_interval, 1.5)
db_manager.send :trigger_download
download_task = db_manager.instance_variable_get(:@download_task)
expect( download_task.running? ).to be true
expect( db_manager ).to receive :database_update_check
expect(download_task.running?).to be true
expect(db_manager).to receive :database_update_check
sleep 2.0 # wait for task execution
end
end
@ -396,7 +396,7 @@ describe LogStash::Filters::Geoip do
eula_db_dirname = get_dir_path("foo")
FileUtils.mkdir_p(eula_db_dirname)
rewrite_temp_metadata(metadata_path, [ ["City", "1620246514", "", "foo", true],
rewrite_temp_metadata(metadata_path, [["City", "1620246514", "", "foo", true],
["ASN", "1620246514", "", "foo", true]])
path = db_manager.subscribe_database_path(CITY, nil, mock_geoip_plugin)

View file

@ -198,8 +198,8 @@ describe LogStash::Filters::Geoip do
context "reset md5" do
it "should reset md5 to empty string only" do
rewrite_temp_metadata(temp_metadata_path, [ ["ASN", "1620246514", "SOME MD5", "1620246514", true],
["City", "1620246514", "SOME MD5", "1620246514", true] ])
rewrite_temp_metadata(temp_metadata_path, [["ASN", "1620246514", "SOME MD5", "1620246514", true],
["City", "1620246514", "SOME MD5", "1620246514", true]])
dbm.reset_md5(database_type)
row = dbm.get_metadata(database_type).last
@ -212,8 +212,8 @@ describe LogStash::Filters::Geoip do
context "dirnames" do
it "should reset md5 to empty string only" do
write_temp_metadata(temp_metadata_path, city2_metadata)
rewrite_temp_metadata(temp_metadata_path, [ ["ASN", "1620246514", "SOME MD5", "CC", true],
city2_metadata ])
rewrite_temp_metadata(temp_metadata_path, [["ASN", "1620246514", "SOME MD5", "CC", true],
city2_metadata])
dirnames = dbm.dirnames
expect(dirnames).to match_array([second_dirname, "CC"])

View file

@ -23,15 +23,15 @@ module GeoipHelper
end
def default_city_db_path
::File.join(get_data_dir_path, "CC", default_city_db_name )
::File.join(get_data_dir_path, "CC", default_city_db_name)
end
def default_city_gz_path
::File.join(get_data_dir_path, "CC", "GeoLite2-City.tgz" )
::File.join(get_data_dir_path, "CC", "GeoLite2-City.tgz")
end
def default_asn_db_path
::File.join(get_data_dir_path, "CC", default_asn_db_name )
::File.join(get_data_dir_path, "CC", default_asn_db_name)
end
def metadata_path
@ -47,11 +47,11 @@ module GeoipHelper
end
def second_city_db_path
::File.join(get_data_dir_path, second_dirname, default_city_db_name )
::File.join(get_data_dir_path, second_dirname, default_city_db_name)
end
def second_asn_db_path
::File.join(get_data_dir_path, second_dirname, default_asn_db_name )
::File.join(get_data_dir_path, second_dirname, default_asn_db_name)
end
def second_dirname

View file

@ -124,7 +124,7 @@ describe LogStash::LicenseChecker::LicenseManager do
context 'when the type changes' do
let(:new_type) { 'basic' }
let(:second_license) do
{ 'license' => license['license'].merge( { 'type' => new_type })}
{ 'license' => license['license'].merge({ 'type' => new_type })}
end
it 'updates observers' do
@ -139,7 +139,7 @@ describe LogStash::LicenseChecker::LicenseManager do
context 'when the status changes' do
let(:new_status) { 'expired' }
let(:second_license) do
{ 'license' => license['license'].merge( { 'status' => new_status })}
{ 'license' => license['license'].merge({ 'status' => new_status })}
end
it 'updates observers' do
expect(license_reader).to receive(:fetch_xpack_info).and_return LogStash::LicenseChecker::XPackInfo.from_es_response(second_license)

View file

@ -25,7 +25,7 @@ describe LogStash::LicenseChecker::LicenseReader do
let(:settings) do
{
"xpack.monitoring.enabled" => true,
"xpack.monitoring.elasticsearch.hosts" => [ elasticsearch_url],
"xpack.monitoring.elasticsearch.hosts" => [elasticsearch_url],
"xpack.monitoring.elasticsearch.username" => elasticsearch_username,
"xpack.monitoring.elasticsearch.password" => elasticsearch_password,
}
@ -125,10 +125,10 @@ describe LogStash::LicenseChecker::LicenseReader do
end
it "builds ES client" do
expect( subject.client.options[:hosts].size ).to eql 1
expect( subject.client.options[:hosts][0].to_s ).to eql elasticsearch_url # URI#to_s
expect( subject.client.options ).to include(:user => elasticsearch_username, :password => elasticsearch_password)
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
expect(subject.client.options[:hosts].size).to eql 1
expect(subject.client.options[:hosts][0].to_s).to eql elasticsearch_url # URI#to_s
expect(subject.client.options).to include(:user => elasticsearch_username, :password => elasticsearch_password)
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
end
context 'with cloud_id' do
@ -148,10 +148,10 @@ describe LogStash::LicenseChecker::LicenseReader do
end
it "builds ES client" do
expect( subject.client.options[:hosts].size ).to eql 1
expect( subject.client.options[:hosts][0].to_s ).to eql 'https://e1e631201fb64d55a75f431eb6349589.westeurope.azure.elastic-cloud.com:9243'
expect( subject.client.options ).to include(:user => 'elastic', :password => 'LnWMLeK3EQPTf3G3F1IBdFvO')
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
expect(subject.client.options[:hosts].size).to eql 1
expect(subject.client.options[:hosts][0].to_s).to eql 'https://e1e631201fb64d55a75f431eb6349589.westeurope.azure.elastic-cloud.com:9243'
expect(subject.client.options).to include(:user => 'elastic', :password => 'LnWMLeK3EQPTf3G3F1IBdFvO')
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
end
end
@ -166,8 +166,8 @@ describe LogStash::LicenseChecker::LicenseReader do
end
it "builds ES client" do
expect( subject.client.client_settings[:headers] ).to include("Authorization" => "ApiKey Zm9vOmJhcg==")
expect( subject.client.client_settings[:headers] ).to include(product_origin_header)
expect(subject.client.client_settings[:headers]).to include("Authorization" => "ApiKey Zm9vOmJhcg==")
expect(subject.client.client_settings[:headers]).to include(product_origin_header)
end
end
end

View file

@ -23,7 +23,7 @@ describe LogStash::Inputs::TimerTaskLogger do
let(:exception) { Concurrent::TimeoutError.new }
it "logs the exception in debug mode" do
expect(subject.logger).to receive(:debug).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message ))
expect(subject.logger).to receive(:debug).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message))
subject.update(run_at, result, exception)
end
end
@ -32,7 +32,7 @@ describe LogStash::Inputs::TimerTaskLogger do
let(:exception) { ArgumentError.new }
it "logs the exception in debug mode" do
expect(subject.logger).to receive(:error).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message ))
expect(subject.logger).to receive(:error).with(/metric shipper/, hash_including(:exception => exception.class, :message => exception.message))
subject.update(run_at, result, exception)
end
end