Rubocop: Enable SpaceBefore cops (#15197)

Enables the following cops:

 * Layout/SpaceBeforeBlockBraces
 * Layout/SpaceBeforeBrackets
 * Layout/SpaceBeforeComma
 * Layout/SpaceBeforeComment
 * Layout/SpaceBeforeFirstArg
 * Layout/SpaceBeforeSemicolon
This commit is contained in:
Andres Rodriguez 2023-07-18 22:32:17 -04:00 committed by GitHub
parent 4255a8fd1c
commit 2165d43e1a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
57 changed files with 147 additions and 147 deletions

View file

@ -77,6 +77,18 @@ Layout/SpaceAroundMethodCallOperator:
Enabled: true Enabled: true
Layout/SpaceAroundOperators: Layout/SpaceAroundOperators:
Enabled: true Enabled: true
Layout/SpaceBeforeBlockBraces:
Enabled: true
Layout/SpaceBeforeBrackets:
Enabled: true
Layout/SpaceBeforeComma:
Enabled: true
Layout/SpaceBeforeComment:
Enabled: true
Layout/SpaceBeforeFirstArg:
Enabled: true
Layout/SpaceBeforeSemicolon:
Enabled: true
##### Need review ##### ##### Need review #####
Layout/AccessModifierIndentation: Layout/AccessModifierIndentation:
@ -199,18 +211,6 @@ Layout/SpaceAfterMethodName:
Enabled: false Enabled: false
Layout/SpaceAfterNot: Layout/SpaceAfterNot:
Enabled: false Enabled: false
Layout/SpaceBeforeBlockBraces:
Enabled: false
Layout/SpaceBeforeBrackets:
Enabled: false
Layout/SpaceBeforeComma:
Enabled: false
Layout/SpaceBeforeComment:
Enabled: false
Layout/SpaceBeforeFirstArg:
Enabled: false
Layout/SpaceBeforeSemicolon:
Enabled: false
Layout/SpaceInLambdaLiteral: Layout/SpaceInLambdaLiteral:
Enabled: false Enabled: false
Layout/SpaceInsideArrayLiteralBrackets: Layout/SpaceInsideArrayLiteralBrackets:

View file

@ -115,7 +115,7 @@ module LogStash
end end
def to_s def to_s
[sources_to_s, gemspec_to_s, gems_to_s].select{|s| !s.empty?}.join("\n") + "\n" [sources_to_s, gemspec_to_s, gems_to_s].select {|s| !s.empty?}.join("\n") + "\n"
end end
# @return [Gem] found gem or nil if not found # @return [Gem] found gem or nil if not found
@ -180,21 +180,21 @@ module LogStash
def sources_to_s def sources_to_s
return "" if @sources.empty? return "" if @sources.empty?
@sources.map{|source| "source #{source.inspect}"}.join("\n") @sources.map {|source| "source #{source.inspect}"}.join("\n")
end end
def gems_to_s def gems_to_s
return "" if @gems.empty? return "" if @gems.empty?
@gems.map do |gem| @gems.map do |gem|
requirements = gem.requirements.empty? ? nil : gem.requirements.map{|r| r.inspect}.join(", ") requirements = gem.requirements.empty? ? nil : gem.requirements.map {|r| r.inspect}.join(", ")
options = gem.options.empty? ? nil : gem.options.map{|k, v| "#{k.inspect} => #{v.inspect}"}.join(", ") options = gem.options.empty? ? nil : gem.options.map {|k, v| "#{k.inspect} => #{v.inspect}"}.join(", ")
"gem " + [gem.name.inspect, requirements, options].compact.join(", ") "gem " + [gem.name.inspect, requirements, options].compact.join(", ")
end.join("\n") end.join("\n")
end end
def gemspec_to_s def gemspec_to_s
return "" if @gemspec.empty? return "" if @gemspec.empty?
options = @gemspec.map{|k, v| "#{k.inspect} => #{v.inspect}"}.join(", ") options = @gemspec.map {|k, v| "#{k.inspect} => #{v.inspect}"}.join(", ")
"gemspec #{options}" "gemspec #{options}"
end end
end end
@ -237,7 +237,7 @@ module LogStash
def initialize(name, requirements = [], options = {}) def initialize(name, requirements = [], options = {})
@name = name @name = name
@requirements = requirements.map{|r| r.to_s.strip}.select{|r| !r.empty?} @requirements = requirements.map {|r| r.to_s.strip}.select {|r| !r.empty?}
@options = options @options = options
end end

View file

@ -36,7 +36,7 @@ class LogStash::PluginManager::List < LogStash::PluginManager::Command
installed_plugin_names = filtered_specs.collect {|spec| spec.name} installed_plugin_names = filtered_specs.collect {|spec| spec.name}
filtered_specs.sort_by{|spec| spec.name}.each do |spec| filtered_specs.sort_by {|spec| spec.name}.each do |spec|
line = "#{spec.name}" line = "#{spec.name}"
line += " (#{spec.version})" if verbose? line += " (#{spec.version})" if verbose?
puts(line) puts(line)
@ -63,9 +63,9 @@ class LogStash::PluginManager::List < LogStash::PluginManager::Command
specs = LogStash::PluginManager.find_plugins_gem_specs specs = LogStash::PluginManager.find_plugins_gem_specs
# apply filters # apply filters
specs = specs.select{|spec| gemfile.find(spec.name)} if installed? specs = specs.select {|spec| gemfile.find(spec.name)} if installed?
specs = specs.select{|spec| spec_matches_search?(spec) } if plugin specs = specs.select {|spec| spec_matches_search?(spec) } if plugin
specs = specs.select{|spec| spec.metadata['logstash_group'] == group} if group specs = specs.select {|spec| spec.metadata['logstash_group'] == group} if group
specs specs
end end

View file

@ -102,7 +102,7 @@ class LogStash::PluginManager::Update < LogStash::PluginManager::Command
# create list of plugins to update # create list of plugins to update
def plugins_to_update(previous_gem_specs_map) def plugins_to_update(previous_gem_specs_map)
if update_all? if update_all?
previous_gem_specs_map.values.map{|spec| spec.name} previous_gem_specs_map.values.map {|spec| spec.name}
else else
# If the plugins isn't available in the gemspec or in # If the plugins isn't available in the gemspec or in
# the gemfile defined with a local path, we assume the plugins is not # the gemfile defined with a local path, we assume the plugins is not
@ -153,7 +153,7 @@ class LogStash::PluginManager::Update < LogStash::PluginManager::Command
def find_latest_gem_specs def find_latest_gem_specs
LogStash::PluginManager.all_installed_plugins_gem_specs(gemfile).inject({}) do |result, spec| LogStash::PluginManager.all_installed_plugins_gem_specs(gemfile).inject({}) do |result, spec|
previous = result[spec.name.downcase] previous = result[spec.name.downcase]
result[spec.name.downcase] = previous ? [previous, spec].max_by{|s| s.version} : spec result[spec.name.downcase] = previous ? [previous, spec].max_by {|s| s.version} : spec
result result
end end
end end

View file

@ -124,7 +124,7 @@ module LogStash::PluginManager
# @return [Array<Gem::Specification>] all local logstash plugin gem specs # @return [Array<Gem::Specification>] all local logstash plugin gem specs
def self.find_plugins_gem_specs(name = nil) def self.find_plugins_gem_specs(name = nil)
specs = name ? ::Gem::Specification.find_all_by_name(name) : ::Gem::Specification.find_all specs = name ? ::Gem::Specification.find_all_by_name(name) : ::Gem::Specification.find_all
specs.select{|spec| logstash_plugin_gem_spec?(spec)} specs.select {|spec| logstash_plugin_gem_spec?(spec)}
end end
# list of all locally installed plugins specs specified in the Gemfile. # list of all locally installed plugins specs specified in the Gemfile.
@ -135,7 +135,7 @@ module LogStash::PluginManager
def self.all_installed_plugins_gem_specs(gemfile) def self.all_installed_plugins_gem_specs(gemfile)
# we start form the installed gemspecs so we can verify the metadata for valid logstash plugin # we start form the installed gemspecs so we can verify the metadata for valid logstash plugin
# then filter out those not included in the Gemfile # then filter out those not included in the Gemfile
find_plugins_gem_specs.select{|spec| !!gemfile.find(spec.name)} find_plugins_gem_specs.select {|spec| !!gemfile.find(spec.name)}
end end
# @param plugin [String] plugin name # @param plugin [String] plugin name

View file

@ -55,7 +55,7 @@ module LogStash
:config_reload_interval, :config_reload_interval,
:dead_letter_queue_enabled, :dead_letter_queue_enabled,
:dead_letter_queue_path, :dead_letter_queue_path,
).reject{|_, v| v.nil?} ).reject {|_, v| v.nil?}
if options.fetch(:graph, false) if options.fetch(:graph, false)
extended_stats = extract_metrics([:stats, :pipelines, pipeline_id.to_sym, :config], :graph) extended_stats = extract_metrics([:stats, :pipelines, pipeline_id.to_sym, :config], :graph)
decorated_vertices = extended_stats[:graph]["graph"]["vertices"].map { |vertex| decorate_with_cluster_uuids(vertex) } decorated_vertices = extended_stats[:graph]["graph"]["vertices"].map { |vertex| decorate_with_cluster_uuids(vertex) }

View file

@ -37,7 +37,7 @@ module LogStash
end end
def find_plugins_gem_specs def find_plugins_gem_specs
@specs ||= ::Gem::Specification.find_all.select{|spec| logstash_plugin_gem_spec?(spec)} @specs ||= ::Gem::Specification.find_all.select {|spec| logstash_plugin_gem_spec?(spec)}
end end
def logstash_plugin_gem_spec?(spec) def logstash_plugin_gem_spec?(spec)

View file

@ -93,13 +93,13 @@ class Treetop::Runtime::SyntaxNode
indent + indent +
self.class.to_s.sub(/.*:/, '') + self.class.to_s.sub(/.*:/, '') +
em.map{|m| "+" + m.to_s.sub(/.*:/, '')} * "" + em.map {|m| "+" + m.to_s.sub(/.*:/, '')} * "" +
" offset=#{interval.first}" + " offset=#{interval.first}" +
", #{tv.inspect}" + ", #{tv.inspect}" +
im + im +
(elements && elements.size > 0 ? (elements && elements.size > 0 ?
":" + ":" +
(elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 } || []).map{|e| (elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 } || []).map {|e|
begin begin
"\n" + e.inspect(indent + " ") "\n" + e.inspect(indent + " ")
rescue # Defend against inspect not taking a parameter rescue # Defend against inspect not taking a parameter

View file

@ -570,13 +570,13 @@ class Treetop::Runtime::SyntaxNode
indent + indent +
self.class.to_s.sub(/.*:/, '') + self.class.to_s.sub(/.*:/, '') +
em.map{|m| "+" + m.to_s.sub(/.*:/, '')} * "" + em.map {|m| "+" + m.to_s.sub(/.*:/, '')} * "" +
" offset=#{interval.first}" + " offset=#{interval.first}" +
", #{tv.inspect}" + ", #{tv.inspect}" +
im + im +
(elements && elements.size > 0 ? (elements && elements.size > 0 ?
":" + ":" +
(elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 } || []).map{|e| (elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 } || []).map {|e|
begin begin
"\n" + e.inspect(indent + " ") "\n" + e.inspect(indent + " ")
rescue # Defend against inspect not taking a parameter rescue # Defend against inspect not taking a parameter

View file

@ -173,7 +173,7 @@ class LogStash::Filters::Base < LogStash::Plugin
events.each do |event| events.each do |event|
unless event.cancelled? unless event.cancelled?
result << event result << event
do_filter(event){|new_event| result << new_event} do_filter(event) {|new_event| result << new_event}
end end
end end
result result

View file

@ -63,7 +63,7 @@ module LogStash module Instrument module PeriodicPoller
def cgroup_available? def cgroup_available?
# don't cache to ivar, in case the files are mounted after logstash starts?? # don't cache to ivar, in case the files are mounted after logstash starts??
CRITICAL_PATHS.all?{|path| ::File.exist?(path)} CRITICAL_PATHS.all? {|path| ::File.exist?(path)}
end end
def controller_groups def controller_groups

View file

@ -290,7 +290,7 @@ module LogStash; class JavaPipeline < AbstractPipeline
.map(&:value) .map(&:value)
workers_init_elapsed = Time.now - workers_init_start workers_init_elapsed = Time.now - workers_init_start
fail("Some worker(s) were not correctly initialized") if worker_loops.any?{|v| v.nil?} fail("Some worker(s) were not correctly initialized") if worker_loops.any? {|v| v.nil?}
@logger.info("Pipeline Java execution initialization time", "seconds" => workers_init_elapsed.round(2)) @logger.info("Pipeline Java execution initialization time", "seconds" => workers_init_elapsed.round(2))
@ -475,7 +475,7 @@ module LogStash; class JavaPipeline < AbstractPipeline
@shutdownRequested.set(true) @shutdownRequested.set(true)
@worker_threads.each do |t| @worker_threads.each do |t|
@logger.debug("Shutdown waiting for worker thread" , default_logging_keys(:thread => t.inspect)) @logger.debug("Shutdown waiting for worker thread", default_logging_keys(:thread => t.inspect))
t.join t.join
end end

View file

@ -32,7 +32,7 @@ module LogStash module Modules module SettingsMerger
# union will also coalesce identical hashes # union will also coalesce identical hashes
# this "|" operator is provided to Java List by RubyJavaIntegration # this "|" operator is provided to Java List by RubyJavaIntegration
union_of_settings = (cli_settings | yml_settings) union_of_settings = (cli_settings | yml_settings)
grouped_by_name = union_of_settings.group_by{|e| e["name"]} grouped_by_name = union_of_settings.group_by {|e| e["name"]}
grouped_by_name.each do |_, array| grouped_by_name.each do |_, array|
if array.size == 2 if array.size == 2
merged << array.last.merge(array.first) merged << array.last.merge(array.first)

View file

@ -162,13 +162,13 @@ class LogStash::Runner < Clamp::StrictCommand
:default => LogStash::SETTINGS.get_default('pipeline.ecs_compatibility') :default => LogStash::SETTINGS.get_default('pipeline.ecs_compatibility')
# Data Path Setting # Data Path Setting
option ["--path.data"] , "PATH", option ["--path.data"], "PATH",
I18n.t("logstash.runner.flag.datapath"), I18n.t("logstash.runner.flag.datapath"),
:attribute_name => "path.data", :attribute_name => "path.data",
:default => LogStash::SETTINGS.get_default("path.data") :default => LogStash::SETTINGS.get_default("path.data")
# Plugins Settings # Plugins Settings
option ["-p", "--path.plugins"] , "PATH", option ["-p", "--path.plugins"], "PATH",
I18n.t("logstash.runner.flag.pluginpath"), I18n.t("logstash.runner.flag.pluginpath"),
:multivalued => true, :attribute_name => "path.plugins", :multivalued => true, :attribute_name => "path.plugins",
:default => LogStash::SETTINGS.get_default("path.plugins") :default => LogStash::SETTINGS.get_default("path.plugins")

View file

@ -169,9 +169,9 @@ module LogStash::Util
def self.normalize(o) def self.normalize(o)
case o case o
when Java::JavaUtil::LinkedHashMap when Java::JavaUtil::LinkedHashMap
o.inject({}){|r, (k, v)| r[k] = normalize(v); r} o.inject({}) {|r, (k, v)| r[k] = normalize(v); r}
when Java::JavaUtil::ArrayList when Java::JavaUtil::ArrayList
o.map{|i| normalize(i)} o.map {|i| normalize(i)}
else else
o o
end end
@ -180,9 +180,9 @@ module LogStash::Util
def self.stringify_symbols(o) def self.stringify_symbols(o)
case o case o
when Hash when Hash
o.inject({}){|r, (k, v)| r[k.is_a?(Symbol) ? k.to_s : k] = stringify_symbols(v); r} o.inject({}) {|r, (k, v)| r[k.is_a?(Symbol) ? k.to_s : k] = stringify_symbols(v); r}
when Array when Array
o.map{|i| stringify_symbols(i)} o.map {|i| stringify_symbols(i)}
when Symbol when Symbol
o.to_s o.to_s
else else

View file

@ -114,7 +114,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
sleep 0.1 sleep 0.1
expect { queue.close }.not_to raise_error expect { queue.close }.not_to raise_error
sleep 0.1 sleep 0.1
files = Dir.glob(path + '/*').map{|f| f.sub("#{path}/", '')} files = Dir.glob(path + '/*').map {|f| f.sub("#{path}/", '')}
if files.count != 2 if files.count != 2
output_strings << "File count after close mismatch expected: 2 got: #{files.count}" output_strings << "File count after close mismatch expected: 2 got: #{files.count}"
output_strings.concat files output_strings.concat files
@ -123,7 +123,7 @@ describe LogStash::WrappedAckedQueue, :stress_test => true do
queue.close queue.close
if output_strings.any? if output_strings.any?
output_strings << __memoized.reject{|k, v| reject_memo_keys.include?(k)}.inspect output_strings << __memoized.reject {|k, v| reject_memo_keys.include?(k)}.inspect
end end
expect(output_strings).to eq([]) expect(output_strings).to eq([])

View file

@ -193,7 +193,7 @@ describe LogStash::Agent do
end end
it "increment the pipeline successes" do it "increment the pipeline successes" do
expect{ subject.converge_state_and_update }.to change { mval(:stats, :pipelines, pipeline_name, :reloads, :successes) }.by(1) expect { subject.converge_state_and_update }.to change { mval(:stats, :pipelines, pipeline_name, :reloads, :successes) }.by(1)
end end
it "record the `last_success_timestamp`" do it "record the `last_success_timestamp`" do
@ -213,7 +213,7 @@ describe LogStash::Agent do
end end
it "increment the pipeline failures" do it "increment the pipeline failures" do
expect{ subject.converge_state_and_update }.to change { mval(:stats, :pipelines, pipeline_name, :reloads, :failures) }.by(1) expect { subject.converge_state_and_update }.to change { mval(:stats, :pipelines, pipeline_name, :reloads, :failures) }.by(1)
end end
end end
end end
@ -252,8 +252,8 @@ describe LogStash::Agent do
# since the pipeline is async, it can actually take some time to have metrics recordings # since the pipeline is async, it can actually take some time to have metrics recordings
# so we try a few times # so we try a few times
try(20) do try(20) do
expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error , "Events pipeline stats should exist" expect { mhash(:stats, :pipelines, :main, :events) }.not_to raise_error, "Events pipeline stats should exist"
expect { mhash(:stats, :pipelines, :main, :flow) }.not_to raise_error , "Events pipeline stats should exist" expect { mhash(:stats, :pipelines, :main, :flow) }.not_to raise_error, "Events pipeline stats should exist"
expect { mhash(:stats, :pipelines, :main, :plugins) }.not_to raise_error, "Plugins pipeline stats should exist" expect { mhash(:stats, :pipelines, :main, :plugins) }.not_to raise_error, "Plugins pipeline stats should exist"
end end

View file

@ -57,7 +57,7 @@ describe LogStash::Environment do
after(:each) { $LOAD_PATH.delete(path) } after(:each) { $LOAD_PATH.delete(path) }
it "should add the path to $LOAD_PATH" do it "should add the path to $LOAD_PATH" do
expect{subject.add_plugin_path(path)}.to change{$LOAD_PATH.size}.by(1) expect {subject.add_plugin_path(path)}.to change {$LOAD_PATH.size}.by(1)
expect($LOAD_PATH).to include(path) expect($LOAD_PATH).to include(path)
end end
end end

View file

@ -323,14 +323,14 @@ describe LogStash::Event do
end end
it "should consistently handle nil" do it "should consistently handle nil" do
expect{LogStash::Event.from_json(nil)}.to raise_error(TypeError) expect {LogStash::Event.from_json(nil)}.to raise_error(TypeError)
expect{LogStash::Event.new(LogStash::Json.load(nil))}.to raise_error # java.lang.ClassCastException expect {LogStash::Event.new(LogStash::Json.load(nil))}.to raise_error # java.lang.ClassCastException
end end
it "should consistently handle bare string" do it "should consistently handle bare string" do
bare_strings.each do |s| bare_strings.each do |s|
expect{LogStash::Event.from_json(s)}.to raise_error LogStash::Json::ParserError expect {LogStash::Event.from_json(s)}.to raise_error LogStash::Json::ParserError
expect{LogStash::Event.new(LogStash::Json.load(s))}.to raise_error LogStash::Json::ParserError expect {LogStash::Event.new(LogStash::Json.load(s))}.to raise_error LogStash::Json::ParserError
end end
end end

View file

@ -55,7 +55,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin" config_name "super_plugin"
config :host, :validate => :string config :host, :validate => :string
def register; end def register; end
def flush(options = {}); @events ; end def flush(options = {}); @events; end
def filter(event) def filter(event)
@events ||= [] @events ||= []
@ -102,7 +102,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin" config_name "super_plugin"
config :host, :validate => :string config :host, :validate => :string
def register; end def register; end
def flush(options = {}); @events ; end def flush(options = {}); @events; end
# naive split filter implementation # naive split filter implementation
def filter(event) def filter(event)

View file

@ -36,8 +36,8 @@ describe LogStash::Filters::Base do
subject {LogStash::Filters::Base.new({})} subject {LogStash::Filters::Base.new({})}
it "should provide method interfaces to override" do it "should provide method interfaces to override" do
expect{subject.register}.to raise_error(RuntimeError) expect {subject.register}.to raise_error(RuntimeError)
expect{subject.filter(:foo)}.to raise_error(RuntimeError) expect {subject.filter(:foo)}.to raise_error(RuntimeError)
end end
it "should provide class public API" do it "should provide class public API" do
@ -47,8 +47,8 @@ describe LogStash::Filters::Base do
end end
context "multi_filter" do context "multi_filter" do
let(:event1){LogStash::Event.new} let(:event1) {LogStash::Event.new}
let(:event2){LogStash::Event.new} let(:event2) {LogStash::Event.new}
it "should multi_filter without new events" do it "should multi_filter without new events" do
allow(subject).to receive(:filter) do |event, &block| allow(subject).to receive(:filter) do |event, &block|

View file

@ -26,7 +26,7 @@ describe LogStash::Instrument::MetricType::Counter do
describe "#increment" do describe "#increment" do
it "increment the counter" do it "increment the counter" do
expect{ subject.increment }.to change { subject.value }.by(1) expect { subject.increment }.to change { subject.value }.by(1)
end end
end end

View file

@ -79,7 +79,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin" config_name "super_plugin"
config :host, :validate => :string config :host, :validate => :string
def register; end def register; end
def flush(options = {}); @events ; end def flush(options = {}); @events; end
def filter(event) def filter(event)
@events ||= [] @events ||= []
@ -139,7 +139,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin" config_name "super_plugin"
config :host, :validate => :string config :host, :validate => :string
def register; end def register; end
def flush(options = {}); @events ; end def flush(options = {}); @events; end
# naive split filter implementation # naive split filter implementation
def filter(event) def filter(event)

View file

@ -65,8 +65,8 @@ describe "Java integration" do
context "Java::JavaUtil::Map" do context "Java::JavaUtil::Map" do
# this is to test the Java 8 Map interface change for the merge method # this is to test the Java 8 Map interface change for the merge method
let(:merger){{:a => 1, :b => 2}} let(:merger) {{:a => 1, :b => 2}}
let(:mergee){{:b => 3, :c => 4}} let(:mergee) {{:b => 3, :c => 4}}
shared_examples "map merge" do shared_examples "map merge" do
it "should support merging" do it "should support merging" do
@ -74,25 +74,25 @@ describe "Java integration" do
end end
it "should return a new hash and not change original hash" do it "should return a new hash and not change original hash" do
expect{subject.merge(mergee)}.to_not change{subject} expect {subject.merge(mergee)}.to_not change {subject}
end end
end end
context "with Java::JavaUtil::LinkedHashMap" do context "with Java::JavaUtil::LinkedHashMap" do
it_behaves_like "map merge" do it_behaves_like "map merge" do
subject{Java::JavaUtil::LinkedHashMap.new(merger)} subject {Java::JavaUtil::LinkedHashMap.new(merger)}
end end
end end
context "with Java::JavaUtil::HashMap" do context "with Java::JavaUtil::HashMap" do
it_behaves_like "map merge" do it_behaves_like "map merge" do
subject{Java::JavaUtil::HashMap.new(merger)} subject {Java::JavaUtil::HashMap.new(merger)}
end end
end end
end end
context "Java::JavaUtil::Collection" do context "Java::JavaUtil::Collection" do
subject{Java::JavaUtil::ArrayList.new(initial_array)} subject {Java::JavaUtil::ArrayList.new(initial_array)}
context "when inspecting a list" do context "when inspecting a list" do
let(:items) { [:a, {:b => :c}] } let(:items) { [:a, {:b => :c}] }
@ -130,7 +130,7 @@ describe "Java integration" do
end end
it "should remove the object to delete" do it "should remove the object to delete" do
expect{subject.delete("foo")}.to change{subject.to_a}.from(initial_array).to(["bar"]) expect {subject.delete("foo")}.to change {subject.to_a}.from(initial_array).to(["bar"])
end end
end end
@ -142,7 +142,7 @@ describe "Java integration" do
end end
it "should remove all the objects to delete" do it "should remove all the objects to delete" do
expect{subject.delete("foo")}.to change{subject.to_a}.from(initial_array).to(["bar"]) expect {subject.delete("foo")}.to change {subject.to_a}.from(initial_array).to(["bar"])
end end
end end
@ -154,11 +154,11 @@ describe "Java integration" do
end end
it "should not change the collection" do it "should not change the collection" do
expect{subject.delete("baz")}.to_not change{subject.to_a} expect {subject.delete("baz")}.to_not change {subject.to_a}
end end
it "should yield to block when given" do it "should yield to block when given" do
expect(subject.delete("baz"){"foobar"}).to eq("foobar") expect(subject.delete("baz") {"foobar"}).to eq("foobar")
end end
end end
@ -170,7 +170,7 @@ describe "Java integration" do
end end
it "should not change the collection" do it "should not change the collection" do
expect{subject.delete("baz")}.to_not change{subject.to_a} expect {subject.delete("baz")}.to_not change {subject.to_a}
end end
end end
@ -179,7 +179,7 @@ describe "Java integration" do
let(:initial_array) {["foo", "bar", "foo"]} let(:initial_array) {["foo", "bar", "foo"]}
it "should not change original collection" do it "should not change original collection" do
expect{subject & ["foo"]}.to_not change{subject.to_a} expect {subject & ["foo"]}.to_not change {subject.to_a}
end end
it "should return a new array containing elements common to the two arrays, excluding any duplicate" do it "should return a new array containing elements common to the two arrays, excluding any duplicate" do
@ -214,7 +214,7 @@ describe "Java integration" do
let(:initial_array) {["foo", "bar", "foo"]} let(:initial_array) {["foo", "bar", "foo"]}
it "should not change original collection" do it "should not change original collection" do
expect{subject | ["bar", "baz"]}.to_not change{subject.to_a} expect {subject | ["bar", "baz"]}.to_not change {subject.to_a}
end end
it "should return a new array by joining excluding any duplicates and preserving the order from the original array" do it "should return a new array by joining excluding any duplicates and preserving the order from the original array" do

View file

@ -277,7 +277,7 @@ describe LogStash::JavaPipeline do
sleep 0.01 until pipeline.stopped? sleep 0.01 until pipeline.stopped?
end end
pipeline.shutdown pipeline.shutdown
expect(output.events.map{|e| e.get("message")}).to include("END") expect(output.events.map {|e| e.get("message")}).to include("END")
expect(output.events.size).to eq(2) expect(output.events.size).to eq(2)
expect(output.events[0].get("tags")).to eq(["notdropped"]) expect(output.events[0].get("tags")).to eq(["notdropped"])
expect(output.events[1].get("tags")).to eq(["notdropped"]) expect(output.events[1].get("tags")).to eq(["notdropped"])
@ -432,7 +432,7 @@ describe LogStash::JavaPipeline do
# when the pipeline has exited, no input threads should be alive # when the pipeline has exited, no input threads should be alive
wait(5).for {subject.input_threads.any?(&:alive?)}.to be_falsey wait(5).for {subject.input_threads.any?(&:alive?)}.to be_falsey
expect{dummyinput.push_once}.to raise_error(/Tried to write to a closed queue/) expect {dummyinput.push_once}.to raise_error(/Tried to write to a closed queue/)
end end
end end
end end
@ -684,7 +684,7 @@ describe LogStash::JavaPipeline do
CONFIG CONFIG
sample_one(["a", "1", "b", "2", "c", "3"]) do sample_one(["a", "1", "b", "2", "c", "3"]) do
expect(subject.map{|e| e.get("message")}).to eq(["a", "1", "b", "2", "c", "3"]) expect(subject.map {|e| e.get("message")}).to eq(["a", "1", "b", "2", "c", "3"])
end end
end end
@ -699,7 +699,7 @@ describe LogStash::JavaPipeline do
let(:pipeline) { mock_java_pipeline_from_string(config, settings) } let(:pipeline) { mock_java_pipeline_from_string(config, settings) }
it "should raise error" do it "should raise error" do
expect{pipeline.run}.to raise_error(RuntimeError, /pipeline\.ordered/) expect {pipeline.run}.to raise_error(RuntimeError, /pipeline\.ordered/)
pipeline.close pipeline.close
end end
end end
@ -723,7 +723,7 @@ describe LogStash::JavaPipeline do
CONFIG CONFIG
sample_one(["a", "1", "b", "2", "c", "3"]) do sample_one(["a", "1", "b", "2", "c", "3"]) do
expect(subject.map{|e| e.get("message")}).to eq(["a", "1", "b", "2", "c", "3"]) expect(subject.map {|e| e.get("message")}).to eq(["a", "1", "b", "2", "c", "3"])
end end
end end
@ -758,7 +758,7 @@ describe LogStash::JavaPipeline do
CONFIG CONFIG
sample_one(["a", "1", "b", "2", "c", "3"]) do sample_one(["a", "1", "b", "2", "c", "3"]) do
expect(subject.map{|e| e.get("message")}).to eq(["1", "2", "3", "a", "b", "c"]) expect(subject.map {|e| e.get("message")}).to eq(["1", "2", "3", "a", "b", "c"])
end end
end end
@ -781,7 +781,7 @@ describe LogStash::JavaPipeline do
CONFIG CONFIG
sample_one(["a", "1", "b", "2", "c", "3"]) do sample_one(["a", "1", "b", "2", "c", "3"]) do
expect(subject.map{|e| e.get("message")}).to eq(["1", "2", "3", "a", "b", "c"]) expect(subject.map {|e| e.get("message")}).to eq(["1", "2", "3", "a", "b", "c"])
end end
end end
end end

View file

@ -104,7 +104,7 @@ describe "LogStash::Json" do
end end
it "should raise Json::ParserError on invalid json" do it "should raise Json::ParserError on invalid json" do
expect{LogStash::Json.load("abc")}.to raise_error LogStash::Json::ParserError expect {LogStash::Json.load("abc")}.to raise_error LogStash::Json::ParserError
end end
it "should return nil on empty string" do it "should return nil on empty string" do

View file

@ -56,7 +56,7 @@ describe LogStash::Timestamp do
end end
it "should raise on invalid string coerce" do it "should raise on invalid string coerce" do
expect{LogStash::Timestamp.coerce("foobar")}.to raise_error LogStash::TimestampParserError expect {LogStash::Timestamp.coerce("foobar")}.to raise_error LogStash::TimestampParserError
end end
it "should return nil on invalid object coerce" do it "should return nil on invalid object coerce" do
@ -203,11 +203,11 @@ describe LogStash::Timestamp do
context "with illegal parameters" do context "with illegal parameters" do
it "should raise exception on nil input" do it "should raise exception on nil input" do
expect{LogStash::Timestamp.at(nil)}.to raise_error expect {LogStash::Timestamp.at(nil)}.to raise_error
end end
it "should raise exception on invalid input type" do it "should raise exception on invalid input type" do
expect{LogStash::Timestamp.at(:foo)}.to raise_error expect {LogStash::Timestamp.at(:foo)}.to raise_error
end end
end end
end end

View file

@ -37,7 +37,7 @@ module LogStash module Modules
context "when supplied with conflicting scheme data" do context "when supplied with conflicting scheme data" do
let(:settings) { {"var.kibana.scheme" => "http", "var.kibana.host" => kibana_host} } let(:settings) { {"var.kibana.scheme" => "http", "var.kibana.host" => kibana_host} }
it "a new instance will throw an error" do it "a new instance will throw an error" do
expect{described_class.new(settings, test_client)}.to raise_error(ArgumentError, /Detected differing Kibana host schemes as sourced from var\.kibana\.host: 'https' and var\.kibana\.scheme: 'http'/) expect {described_class.new(settings, test_client)}.to raise_error(ArgumentError, /Detected differing Kibana host schemes as sourced from var\.kibana\.host: 'https' and var\.kibana\.scheme: 'http'/)
end end
end end
@ -45,7 +45,7 @@ module LogStash module Modules
["httpd", "ftp", "telnet"].each do |uri_scheme| ["httpd", "ftp", "telnet"].each do |uri_scheme|
it "a new instance will throw an error" do it "a new instance will throw an error" do
re = /Kibana host scheme given is invalid, given value: '#{uri_scheme}' - acceptable values: 'http', 'https'/ re = /Kibana host scheme given is invalid, given value: '#{uri_scheme}' - acceptable values: 'http', 'https'/
expect{described_class.new({"var.kibana.scheme" => uri_scheme}, test_client)}.to raise_error(ArgumentError, re) expect {described_class.new({"var.kibana.scheme" => uri_scheme}, test_client)}.to raise_error(ArgumentError, re)
end end
end end
end end

View file

@ -148,7 +148,7 @@ ERB
expect(resource2.import_path).to eq("api/kibana/dashboards/import") expect(resource2.import_path).to eq("api/kibana/dashboards/import")
expect(resource2.content).to be_a(Array) expect(resource2.content).to be_a(Array)
expect(resource2.content.size).to eq(5) expect(resource2.content.size).to eq(5)
expect(resource2.content.map{|o| o.class}.uniq).to eq([LogStash::Modules::KibanaResource]) expect(resource2.content.map {|o| o.class}.uniq).to eq([LogStash::Modules::KibanaResource])
test_object = resource2.content[0] test_object = resource2.content[0]
expect(test_object.content_id).to eq("foo-*") expect(test_object.content_id).to eq("foo-*")

View file

@ -127,7 +127,7 @@ describe LogStash::Modules::SettingsMerger do
context "when only cloud.auth is supplied" do context "when only cloud.auth is supplied" do
let(:ls_settings) { SubstituteSettingsForRSpec.new({"cloud.auth" => cloud_auth}) } let(:ls_settings) { SubstituteSettingsForRSpec.new({"cloud.auth" => cloud_auth}) }
it "should raise an error" do it "should raise an error" do
expect{ described_class.merge_cloud_settings(mod_settings, ls_settings) }.to raise_exception(ArgumentError) expect { described_class.merge_cloud_settings(mod_settings, ls_settings) }.to raise_exception(ArgumentError)
end end
end end

View file

@ -63,7 +63,7 @@ describe "LogStash::Outputs::Base#new" do
params = { "dummy_option" => "potatoes", "codec" => "json", "workers" => 2 } params = { "dummy_option" => "potatoes", "codec" => "json", "workers" => 2 }
worker_params = params.dup; worker_params["workers"] = 1 worker_params = params.dup; worker_params["workers"] = 1
expect{ subject }.not_to raise_error expect { subject }.not_to raise_error
end end
it "should set concurrency correctly" do it "should set concurrency correctly" do

View file

@ -58,7 +58,7 @@ describe LogStash::DelegatingLogWriter do
context "#error" do context "#error" do
it "should log an :error message and raise LogStash::UnrecoverablePumaError" do it "should log an :error message and raise LogStash::UnrecoverablePumaError" do
expect(logger).to receive(:error).with(text) expect(logger).to receive(:error).with(text)
expect{ subject.send(:error, text) }.to raise_error(LogStash::UnrecoverablePumaError, text) expect { subject.send(:error, text) }.to raise_error(LogStash::UnrecoverablePumaError, text)
end end
end end

View file

@ -68,7 +68,7 @@ describe LogStash::PipelineAction::Create do
end end
end end
context "when the pipeline doesn't start" do context "when the pipeline doesn't start" do
context "with a syntax error" do context "with a syntax error" do
let(:pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput { id => '123' } } output { stdout ") } # bad syntax let(:pipeline_config) { mock_pipeline_config(:main, "input { dummyblockinginput { id => '123' } } output { stdout ") } # bad syntax

View file

@ -56,7 +56,7 @@ describe LogStash::Plugins::CATrustedFingerprintSupport do
allow(plugin_class).to receive(:logger).and_return(logger_stub) allow(plugin_class).to receive(:logger).and_return(logger_stub)
end end
it 'logs helpfully and raises an exception' do it 'logs helpfully and raises an exception' do
expect{plugin}.to raise_exception(LogStash::ConfigurationError) expect {plugin}.to raise_exception(LogStash::ConfigurationError)
expect(logger_stub).to have_received(:error).with(a_string_including "Expected a hex-encoded SHA-256 fingerprint") expect(logger_stub).to have_received(:error).with(a_string_including "Expected a hex-encoded SHA-256 fingerprint")
end end
end end

View file

@ -120,11 +120,11 @@ describe LogStash::Runner do
it "should fail with single invalid dir path" do it "should fail with single invalid dir path" do
expect(LogStash::Environment).not_to receive(:add_plugin_path) expect(LogStash::Environment).not_to receive(:add_plugin_path)
expect{subject.configure_plugin_paths(invalid_directory)}.to raise_error(Clamp::UsageError) expect {subject.configure_plugin_paths(invalid_directory)}.to raise_error(Clamp::UsageError)
end end
it "should add multiple valid dir path to the environment" do it "should add multiple valid dir path to the environment" do
multiple_paths.each{|path| expect(LogStash::Environment).to receive(:add_plugin_path).with(path)} multiple_paths.each {|path| expect(LogStash::Environment).to receive(:add_plugin_path).with(path)}
subject.configure_plugin_paths(multiple_paths) subject.configure_plugin_paths(multiple_paths)
end end
end end

View file

@ -120,7 +120,7 @@ describe "LogStash::Util::Accessors", :if => class_exists do
data = {} data = {}
accessors = LogStash::Util::Accessors.new(data) accessors = LogStash::Util::Accessors.new(data)
expect(accessors.get(str)).to be_nil expect(accessors.get(str)).to be_nil
expect(data).to be_empty expect(data).to be_empty
expect(accessors.set(str, "foo")).to eq("foo") expect(accessors.set(str, "foo")).to eq("foo")
expect(data).to eq({ "hello" => {"world" => "foo"} }) expect(data).to eq({ "hello" => {"world" => "foo"} })
end end

View file

@ -60,7 +60,7 @@ describe LogStash::Util::Charset do
["foobar", "foobar"], ["foobar", "foobar"],
["\xE0 Montr\xE9al", "à Montréal"], ["\xE0 Montr\xE9al", "à Montréal"],
] ]
samples.map{|(a, b)| [a.force_encoding("ISO-8859-1"), b]}.each do |(a, b)| samples.map {|(a, b)| [a.force_encoding("ISO-8859-1"), b]}.each do |(a, b)|
expect(a.encoding.name).to eq("ISO-8859-1") expect(a.encoding.name).to eq("ISO-8859-1")
expect(b.encoding.name).to eq("UTF-8") expect(b.encoding.name).to eq("UTF-8")
expect(a.valid_encoding?).to eq(true) expect(a.valid_encoding?).to eq(true)
@ -78,7 +78,7 @@ describe LogStash::Util::Charset do
["\xE0 Montr\xE9al", "<EFBFBD> Montr<74>al"], ["\xE0 Montr\xE9al", "<EFBFBD> Montr<74>al"],
["\xCE\xBA\xCF\x8C\xCF\x83\xCE\xBC\xCE\xB5", "<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"], ["\xCE\xBA\xCF\x8C\xCF\x83\xCE\xBC\xCE\xB5", "<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"],
] ]
samples.map{|(a, b)| [a.force_encoding("ASCII-8BIT"), b]}.each do |(a, b)| samples.map {|(a, b)| [a.force_encoding("ASCII-8BIT"), b]}.each do |(a, b)|
expect(a.encoding.name).to eq("ASCII-8BIT") expect(a.encoding.name).to eq("ASCII-8BIT")
expect(b.encoding.name).to eq("UTF-8") expect(b.encoding.name).to eq("UTF-8")
expect(subject.convert(a).encoding.name).to eq("UTF-8") expect(subject.convert(a).encoding.name).to eq("UTF-8")

View file

@ -24,7 +24,7 @@ describe LogStash::Util::CloudSettingId do
describe "when given unacceptable input" do describe "when given unacceptable input" do
it "a nil input does not raise an exception" do it "a nil input does not raise an exception" do
expect{described_class.new(nil)}.not_to raise_exception expect {described_class.new(nil)}.not_to raise_exception
end end
it "when given a nil input, the accessors are all nil" do it "when given a nil input, the accessors are all nil" do
cloud_id = described_class.new(nil) cloud_id = described_class.new(nil)
@ -41,7 +41,7 @@ describe LogStash::Util::CloudSettingId do
let(:raw) {%w(first second)} let(:raw) {%w(first second)}
let(:input) { described_class.cloud_id_encode(*raw) } let(:input) { described_class.cloud_id_encode(*raw) }
it "raises an error" do it "raises an error" do
expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{raw[0]}$#{raw[1]}\".") expect {subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{raw[0]}$#{raw[1]}\".")
end end
end end
@ -49,7 +49,7 @@ describe LogStash::Util::CloudSettingId do
let(:raw) {["first", "", "third"]} let(:raw) {["first", "", "third"]}
let(:input) { described_class.cloud_id_encode(*raw) } let(:input) { described_class.cloud_id_encode(*raw) }
it "raises an error" do it "raises an error" do
expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{raw[0]}$#{raw[1]}$#{raw[2]}\".") expect {subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{raw[0]}$#{raw[1]}$#{raw[2]}\".")
end end
end end
@ -57,7 +57,7 @@ describe LogStash::Util::CloudSettingId do
let(:raw) {%w(us-east-1.aws.found.io undefined my-kibana)} let(:raw) {%w(us-east-1.aws.found.io undefined my-kibana)}
let(:input) { described_class.cloud_id_encode(*raw) } let(:input) { described_class.cloud_id_encode(*raw) }
it "raises an error" do it "raises an error" do
expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, elasticsearch segment is 'undefined', literally.") expect {subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, elasticsearch segment is 'undefined', literally.")
end end
end end
@ -65,7 +65,7 @@ describe LogStash::Util::CloudSettingId do
let(:raw) {%w(us-east-1.aws.found.io my-elastic-cluster undefined)} let(:raw) {%w(us-east-1.aws.found.io my-elastic-cluster undefined)}
let(:input) { described_class.cloud_id_encode(*raw) } let(:input) { described_class.cloud_id_encode(*raw) }
it "raises an error" do it "raises an error" do
expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.") expect {subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.")
end end
end end
end end

View file

@ -72,7 +72,7 @@ module LogStash module Util
context 'malformed uris via string' do context 'malformed uris via string' do
MALFORMED_URIS.each do |arg| MALFORMED_URIS.each do |arg|
it "#{arg}: should raise an error" do it "#{arg}: should raise an error" do
expect{LogStash::Util::SafeURI.new(arg)}.to raise_error(ArgumentError) expect {LogStash::Util::SafeURI.new(arg)}.to raise_error(ArgumentError)
end end
end end
end end
@ -81,7 +81,7 @@ module LogStash module Util
MALFORMED_URIS.each do |arg| MALFORMED_URIS.each do |arg|
it "#{arg}: should raise an error" do it "#{arg}: should raise an error" do
java_uri = java.net.URI.new(arg) java_uri = java.net.URI.new(arg)
expect{LogStash::Util::SafeURI.new(java_uri)}.to raise_error(ArgumentError) expect {LogStash::Util::SafeURI.new(java_uri)}.to raise_error(ArgumentError)
end end
end end
end end
@ -90,7 +90,7 @@ module LogStash module Util
MALFORMED_URIS.each do |arg| MALFORMED_URIS.each do |arg|
it "#{arg}: should raise an error" do it "#{arg}: should raise an error" do
ruby_uri = URI.parse(arg) ruby_uri = URI.parse(arg)
expect{LogStash::Util::SafeURI.new(ruby_uri)}.to raise_error(ArgumentError) expect {LogStash::Util::SafeURI.new(ruby_uri)}.to raise_error(ArgumentError)
end end
end end
end end

View file

@ -230,14 +230,14 @@ describe LogStash::IOWrappedLogger do
end end
it "responds to sync=(v)" do it "responds to sync=(v)" do
expect{ subject.sync = true }.not_to raise_error expect { subject.sync = true }.not_to raise_error
end end
it "responds to sync" do it "responds to sync" do
expect{ subject.sync }.not_to raise_error expect { subject.sync }.not_to raise_error
end end
it "responds to flush" do it "responds to flush" do
expect{ subject.flush }.not_to raise_error expect { subject.flush }.not_to raise_error
end end
end end

View file

@ -19,8 +19,8 @@ def qualified_version
end end
def find_image(flavor) def find_image(flavor)
Docker::Image.all.detect{ Docker::Image.all.detect {
|image| image.info['RepoTags'].detect{ |image| image.info['RepoTags'].detect {
|tag| tag == "docker.elastic.co/logstash/logstash-#{flavor}:#{qualified_version}" |tag| tag == "docker.elastic.co/logstash/logstash-#{flavor}:#{qualified_version}"
}} }}
end end
@ -96,7 +96,7 @@ def get_plugin_info(container, type, id, pipeline = 'main')
puts "Unable to find plugins from #{pipeline_info}, when looking for #{type} plugins in #{pipeline}" puts "Unable to find plugins from #{pipeline_info}, when looking for #{type} plugins in #{pipeline}"
return nil return nil
end end
all_plugins.find{|plugin| plugin['id'] == id} all_plugins.find {|plugin| plugin['id'] == id}
end end
def logstash_available?(container) def logstash_available?(container)

View file

@ -48,7 +48,7 @@ describe "Test Logstash instance" do
let(:num_retries) { 50 } let(:num_retries) { 50 }
let(:config1) { config_to_temp_file(@fixture.config("root", { :port => port1, :random_file => file_config1 })) } let(:config1) { config_to_temp_file(@fixture.config("root", { :port => port1, :random_file => file_config1 })) }
let(:config2) { config_to_temp_file(@fixture.config("root", { :port => port2 , :random_file => file_config2 })) } let(:config2) { config_to_temp_file(@fixture.config("root", { :port => port2, :random_file => file_config2 })) }
let(:config3) { config_to_temp_file(@fixture.config("root", { :port => port3, :random_file => file_config3 })) } let(:config3) { config_to_temp_file(@fixture.config("root", { :port => port3, :random_file => file_config3 })) }
let(:port1) { random_port } let(:port1) { random_port }
let(:port2) { random_port } let(:port2) { random_port }

View file

@ -56,7 +56,7 @@ describe "Test Logstash Pipeline id" do
"pipeline.separate_logs" => false "pipeline.separate_logs" => false
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate wait_logstash_process_terminate
deprecation_log_file = "#{temp_dir}/logstash-deprecation.log" deprecation_log_file = "#{temp_dir}/logstash-deprecation.log"

View file

@ -82,7 +82,7 @@ describe "Ruby codec when used in" do
it "should encode correctly to file and don't log any ERROR" do it "should encode correctly to file and don't log any ERROR" do
logstash_service.env_variables = {'PATH_TO_OUT' => out_capture.path} logstash_service.env_variables = {'PATH_TO_OUT' => out_capture.path}
logstash_service.spawn_logstash("-w", "1" , "-e", config) logstash_service.spawn_logstash("-w", "1", "-e", config)
logstash_service.wait_for_logstash logstash_service.wait_for_logstash
logstash_service.wait_for_rest_api logstash_service.wait_for_rest_api
@ -105,7 +105,7 @@ describe "Ruby codec when used in" do
let(:config) { @fixture.config("output_encode") } let(:config) { @fixture.config("output_encode") }
it "should encode correctly without any ERROR log" do it "should encode correctly without any ERROR log" do
logstash_service.spawn_logstash("-w", "1" , "-e", config) logstash_service.spawn_logstash("-w", "1", "-e", config)
logstash_service.wait_for_logstash logstash_service.wait_for_logstash
logstash_service.wait_for_rest_api logstash_service.wait_for_rest_api

View file

@ -94,7 +94,7 @@ describe "Test Monitoring API" do
let(:logstash_service) { @fixture.get_service("logstash") } let(:logstash_service) { @fixture.get_service("logstash") }
before(:each) do before(:each) do
logstash_service.spawn_logstash("-w", "1" , "-e", config) logstash_service.spawn_logstash("-w", "1", "-e", config)
logstash_service.wait_for_logstash logstash_service.wait_for_logstash
wait_for_port(tcp_port, 60) wait_for_port(tcp_port, 60)

View file

@ -56,7 +56,7 @@ describe "Test Logstash Pipeline id" do
"pipeline.id" => pipeline_name "pipeline.id" => pipeline_name
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
plainlog_file = "#{temp_dir}/logstash-plain.log" plainlog_file = "#{temp_dir}/logstash-plain.log"
expect(File.exist?(plainlog_file)).to be true expect(File.exist?(plainlog_file)).to be true
@ -70,7 +70,7 @@ describe "Test Logstash Pipeline id" do
"pipeline.id" => pipeline_name "pipeline.id" => pipeline_name
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
plainlog_file = "#{temp_dir}/logstash-plain.log" plainlog_file = "#{temp_dir}/logstash-plain.log"
expect(File.exist?(plainlog_file)).to be true expect(File.exist?(plainlog_file)).to be true
@ -99,7 +99,7 @@ describe "Test Logstash Pipeline id" do
"pipeline.separate_logs" => true "pipeline.separate_logs" => true
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
pipeline_log_file = "#{temp_dir}/pipeline_#{pipeline_name}.log" pipeline_log_file = "#{temp_dir}/pipeline_#{pipeline_name}.log"
@ -130,7 +130,7 @@ describe "Test Logstash Pipeline id" do
expect(log_definition).to match(/appender\.rolling\.filePattern\s*=\s*.*\/logstash-plain-%d{yyyy-MM-dd}\.log/) expect(log_definition).to match(/appender\.rolling\.filePattern\s*=\s*.*\/logstash-plain-%d{yyyy-MM-dd}\.log/)
FileUtils.cp("fixtures/logs_rollover/log4j2.properties", temp_dir) FileUtils.cp("fixtures/logs_rollover/log4j2.properties", temp_dir)
@ls.spawn_logstash("--path.settings", temp_dir, "-w", "1" , "-e", config) @ls.spawn_logstash("--path.settings", temp_dir, "-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
logstash_logs = Dir.glob("logstash-plain*.log", base: temp_dir) logstash_logs = Dir.glob("logstash-plain*.log", base: temp_dir)
@ -158,7 +158,7 @@ describe "Test Logstash Pipeline id" do
expect(log_definition).to match(/appender\.routing\.pipeline\.policy\.size\s*=\s*1KB/) expect(log_definition).to match(/appender\.routing\.pipeline\.policy\.size\s*=\s*1KB/)
FileUtils.cp("fixtures/logs_rollover/log4j2.properties", temp_dir) FileUtils.cp("fixtures/logs_rollover/log4j2.properties", temp_dir)
@ls.spawn_logstash("--path.settings", temp_dir, "-w", "1" , "-e", config) @ls.spawn_logstash("--path.settings", temp_dir, "-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
pipeline_logs = Dir.glob("pipeline*.log", base: temp_dir) pipeline_logs = Dir.glob("pipeline*.log", base: temp_dir)
@ -173,7 +173,7 @@ describe "Test Logstash Pipeline id" do
"pipeline.separate_logs" => false "pipeline.separate_logs" => false
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate(@ls) wait_logstash_process_terminate(@ls)
pipeline_log_file = "#{temp_dir}/pipeline_#{pipeline_name}.log" pipeline_log_file = "#{temp_dir}/pipeline_#{pipeline_name}.log"

View file

@ -53,7 +53,7 @@ describe "Test Logstash Pipeline id" do
"log.level" => "debug" "log.level" => "debug"
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
wait_logstash_process_terminate() wait_logstash_process_terminate()
plainlog_file = "#{temp_dir}/logstash-plain.log" plainlog_file = "#{temp_dir}/logstash-plain.log"
expect(File.exist?(plainlog_file)).to be true expect(File.exist?(plainlog_file)).to be true

View file

@ -53,7 +53,7 @@ describe "Test Logstash Slowlog" do
"slowlog.threshold.warn" => "500ms" "slowlog.threshold.warn" => "500ms"
} }
IO.write(@ls.application_settings_file, settings.to_yaml) IO.write(@ls.application_settings_file, settings.to_yaml)
@ls.spawn_logstash("-w", "1" , "-e", config) @ls.spawn_logstash("-w", "1", "-e", config)
@ls.wait_for_logstash @ls.wait_for_logstash
sleep 2 until @ls.exited? sleep 2 until @ls.exited?
slowlog_file = "#{temp_dir}/logstash-slowlog-plain.log" slowlog_file = "#{temp_dir}/logstash-slowlog-plain.log"

View file

@ -90,7 +90,7 @@ class PlatformConfig
def select_names_for(platform, options = {}) def select_names_for(platform, options = {})
filter_options = { "experimental" => options.fetch("experimental", false) } filter_options = { "experimental" => options.fetch("experimental", false) }
filter_type(platform, filter_options).map{ |p| p.name } filter_type(platform, filter_options).map { |p| p.name }
end end
def types def types

View file

@ -100,7 +100,7 @@ module ServiceTester
end end
def download(from, to) def download(from, to)
client.download(from, to , host) client.download(from, to, host)
end end
def replace_in_gemfile(pattern, replace) def replace_in_gemfile(pattern, replace)

View file

@ -407,7 +407,7 @@ namespace "artifact" do
gem_line_regex = /^\s*gem\s+["']logstash-core["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i gem_line_regex = /^\s*gem\s+["']logstash-core["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i
lines = File.readlines("Gemfile") lines = File.readlines("Gemfile")
matches = lines.select{|line| line[gem_line_regex]} matches = lines.select {|line| line[gem_line_regex]}
abort("ERROR: Gemfile format error, need a single logstash-core gem specification") if matches.size != 1 abort("ERROR: Gemfile format error, need a single logstash-core gem specification") if matches.size != 1
path = matches.first[gem_line_regex, 1] path = matches.first[gem_line_regex, 1]
@ -427,7 +427,7 @@ namespace "artifact" do
gem_line_regex = /^\s*gem\s+["']logstash-core-plugin-api["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i gem_line_regex = /^\s*gem\s+["']logstash-core-plugin-api["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i
lines = File.readlines("Gemfile") lines = File.readlines("Gemfile")
matches = lines.select{|line| line[gem_line_regex]} matches = lines.select {|line| line[gem_line_regex]}
abort("ERROR: Gemfile format error, need a single logstash-core-plugin-api gem specification") if matches.size != 1 abort("ERROR: Gemfile format error, need a single logstash-core-plugin-api gem specification") if matches.size != 1
path = matches.first[gem_line_regex, 1] path = matches.first[gem_line_regex, 1]

View file

@ -58,7 +58,7 @@ describe "logstash Gemfile Manager" do
gemspec "boom" gemspec "boom"
END END
expect{LogStash::Gemfile.new(StringIO.new(file)).load}.to raise_error(LogStash::GemfileError) expect {LogStash::Gemfile.new(StringIO.new(file)).load}.to raise_error(LogStash::GemfileError)
end end
it "should add gems" do it "should add gems" do
@ -79,7 +79,7 @@ describe "logstash Gemfile Manager" do
gem "foo" gem "foo"
END END
expect{LogStash::Gemfile.new(StringIO.new(file)).load}.to raise_error(LogStash::GemfileError) expect {LogStash::Gemfile.new(StringIO.new(file)).load}.to raise_error(LogStash::GemfileError)
end end
it "should add gems with only name" do it "should add gems with only name" do

View file

@ -24,8 +24,8 @@ module LogStash
fetch_xpack_info fetch_xpack_info
if @executor.nil? if @executor.nil?
@executor = Executors.new_single_thread_scheduled_executor{ |runnable| create_daemon_thread (runnable)} @executor = Executors.new_single_thread_scheduled_executor { |runnable| create_daemon_thread (runnable)}
@executor.schedule_at_fixed_rate(Proc.new{fetch_xpack_info}, refresh_period, refresh_period, refresh_unit) @executor.schedule_at_fixed_rate(Proc.new {fetch_xpack_info}, refresh_period, refresh_period, refresh_unit)
end end
end end

View file

@ -33,6 +33,6 @@ describe "Monitoring is disabled" do
let(:monitoring_index) { ".monitoring-logstash-2-*" } let(:monitoring_index) { ".monitoring-logstash-2-*" }
it "doesn't record any metrics" do it "doesn't record any metrics" do
expect(elasticsearch_client.search(:index => monitoring_index)["hits"]["total"]["value"]).to eq(0) expect(elasticsearch_client.search(:index => monitoring_index)["hits"]["total"]["value"]).to eq(0)
end end
end end

View file

@ -253,7 +253,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
it "#fetch_config should raise error" do it "#fetch_config should raise error" do
expect(mock_client).to receive(:get).with("#{described_class::SYSTEM_INDICES_API_PATH}/").and_return(elasticsearch_8_err_response.clone) expect(mock_client).to receive(:get).with("#{described_class::SYSTEM_INDICES_API_PATH}/").and_return(elasticsearch_8_err_response.clone)
expect{ subject.fetch_config(es_version_8_2, ["apache", "nginx"], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError) expect { subject.fetch_config(es_version_8_2, ["apache", "nginx"], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError)
end end
describe "wildcard" do describe "wildcard" do
@ -353,12 +353,12 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
it "#fetch_config should raise error" do it "#fetch_config should raise error" do
expect(mock_client).to receive(:post).with("#{described_class::PIPELINE_INDEX}/_mget", {}, "{\"docs\":[{\"_id\":\"#{pipeline_id}\"},{\"_id\":\"#{another_pipeline_id}\"}]}").and_return(elasticsearch_7_9_err_response) expect(mock_client).to receive(:post).with("#{described_class::PIPELINE_INDEX}/_mget", {}, "{\"docs\":[{\"_id\":\"#{pipeline_id}\"},{\"_id\":\"#{another_pipeline_id}\"}]}").and_return(elasticsearch_7_9_err_response)
expect(mock_logger).to receive(:warn).never expect(mock_logger).to receive(:warn).never
expect{ subject.fetch_config(empty_es_version, [pipeline_id, another_pipeline_id], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError) expect { subject.fetch_config(empty_es_version, [pipeline_id, another_pipeline_id], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError)
end end
it "#fetch_config should raise error when response is empty" do it "#fetch_config should raise error when response is empty" do
expect(mock_client).to receive(:post).with("#{described_class::PIPELINE_INDEX}/_mget", {}, "{\"docs\":[{\"_id\":\"#{pipeline_id}\"},{\"_id\":\"#{another_pipeline_id}\"}]}").and_return(LogStash::Json.load("{}")) expect(mock_client).to receive(:post).with("#{described_class::PIPELINE_INDEX}/_mget", {}, "{\"docs\":[{\"_id\":\"#{pipeline_id}\"},{\"_id\":\"#{another_pipeline_id}\"}]}").and_return(LogStash::Json.load("{}"))
expect{ subject.fetch_config(empty_es_version, [pipeline_id, another_pipeline_id], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError) expect { subject.fetch_config(empty_es_version, [pipeline_id, another_pipeline_id], mock_client) }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError)
end end
it "#fetch_config should log unmatched pipeline id" do it "#fetch_config should log unmatched pipeline id" do
@ -557,7 +557,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
end end
it 'should raise an error' do it 'should raise an error' do
expect{subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError) expect {subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError)
end end
end end
@ -568,7 +568,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
end end
it 'should raise an error' do it 'should raise an error' do
expect{subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError) expect {subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError)
end end
end end
@ -616,7 +616,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
let(:license_type) { 'basic' } let(:license_type) { 'basic' }
it 'should raise an error' do it 'should raise an error' do
expect{subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError) expect {subject.pipeline_configs}.to raise_error(LogStash::LicenseChecker::LicenseError)
end end
end end
@ -697,7 +697,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
end end
context "when any error returned from elasticsearch [#{es_version}]" do context "when any error returned from elasticsearch [#{es_version}]" do
let(:elasticsearch_8_response){"{\"error\" : \"no handler found for uri [/_logstash/pipelines?pretty] and method [GET]\"}" } let(:elasticsearch_8_response) {"{\"error\" : \"no handler found for uri [/_logstash/pipelines?pretty] and method [GET]\"}" }
let(:elasticsearch_7_9_response) { '{ "error":{"root_cause":[{"type":"illegal_argument_exception","reason":"No endpoint or operation is available at [testing_ph]"}],"type":"illegal_argument_exception","reason":"No endpoint or operation is available at [testing_ph]"},"status":400}' } let(:elasticsearch_7_9_response) { '{ "error":{"root_cause":[{"type":"illegal_argument_exception","reason":"No endpoint or operation is available at [testing_ph]"}],"type":"illegal_argument_exception","reason":"No endpoint or operation is available at [testing_ph]"},"status":400}' }
before do before do
@ -769,7 +769,7 @@ describe LogStash::ConfigManagement::ElasticsearchSource do
it "responses with an error" do it "responses with an error" do
allow(mock_client).to receive(:get).with("/").and_return(elasticsearch_8_err_response) allow(mock_client).to receive(:get).with("/").and_return(elasticsearch_8_err_response)
expect{ subject.get_es_version }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError) expect { subject.get_es_version }.to raise_error(LogStash::ConfigManagement::ElasticsearchSource::RemoteConfigError)
end end
end end

View file

@ -109,7 +109,7 @@ describe LogStash::Filters::Geoip do
it "should raise error if md5 does not match" do it "should raise error if md5 does not match" do
allow(Down).to receive(:download) allow(Down).to receive(:download)
expect{ download_manager.send(:download_database, database_type, dirname, db_info) }.to raise_error /wrong checksum/ expect { download_manager.send(:download_database, database_type, dirname, db_info) }.to raise_error /wrong checksum/
end end
it "should download file and return zip path" do it "should download file and return zip path" do
@ -161,7 +161,7 @@ describe LogStash::Filters::Geoip do
end end
it "should raise error if file is invalid" do it "should raise error if file is invalid" do
expect{ download_manager.send(:assert_database!, "Gemfile") }.to raise_error /failed to load database/ expect { download_manager.send(:assert_database!, "Gemfile") }.to raise_error /failed to load database/
end end
it "should pass validation" do it "should pass validation" do

View file

@ -9,7 +9,7 @@ describe "ArcSight module" do
let(:logstash_config_class) { LogStash::Modules::LogStashConfig } let(:logstash_config_class) { LogStash::Modules::LogStashConfig }
let(:module_name) { "arcsight" } let(:module_name) { "arcsight" }
let(:module_path) { ::File.join(LogStash::Environment::LOGSTASH_HOME, "x-pack", "modules", module_name, "configuration") } let(:module_path) { ::File.join(LogStash::Environment::LOGSTASH_HOME, "x-pack", "modules", module_name, "configuration") }
let(:mod) { instance_double("arcsight", :directory => module_path , :module_name => module_name) } let(:mod) { instance_double("arcsight", :directory => module_path, :module_name => module_name) }
let(:settings) { {} } let(:settings) { {} }
subject { logstash_config_class.new(mod, settings) } subject { logstash_config_class.new(mod, settings) }