Rubocop: Enable various EmptyLine cops (#15194)

Disabled:
 * EmptyLineAfterGuardClause
 * EmptyLineAfterMultilineCondition
 * EmptyLinesAroundAccessModifier

Enabled:
 * Layout/EmptyLineAfterMagicComment
 * Layout/EmptyLineBetweenDefs
 * Layout/EmptyLines
 * Layout/EmptyLinesAroundArguments
 * Layout/EmptyLinesAroundAttributeAccessor
 * Layout/EmptyLinesAroundBeginBody
 * Layout/EmptyLinesAroundBlockBody
 * Layout/EmptyLinesAroundExceptionHandlingKeywords
 * Layout/EmptyLinesAroundMethodBody
 * Layout/EmptyLinesAroundModuleBody
This commit is contained in:
Andres Rodriguez 2023-07-18 16:49:16 -04:00 committed by GitHub
parent d95a0bba74
commit acd87a69e7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
212 changed files with 92 additions and 447 deletions

View file

@ -30,7 +30,33 @@ Lint/BigDecimalNew:
Enabled: true
#################### Layout ###########################
##### Enabled Cops #####
##### Enabled/Disabled Cops #####
Layout/EmptyLineAfterGuardClause:
Enabled: false
Layout/EmptyLineAfterMagicComment:
Enabled: true
Layout/EmptyLineAfterMultilineCondition:
Enabled: false
Layout/EmptyLineBetweenDefs:
EnabLEd: true
Layout/EmptyLines:
Enabled: true
Layout/EmptyLinesAroundAccessModifier:
Enabled: false
Layout/EmptyLinesAroundArguments:
Enabled: true
Layout/EmptyLinesAroundAttributeAccessor:
Enabled: true
Layout/EmptyLinesAroundBeginBody:
Enabled: true
Layout/EmptyLinesAroundBlockBody:
Enabled: true
Layout/EmptyLinesAroundExceptionHandlingKeywords:
Enabled: true
Layout/EmptyLinesAroundMethodBody:
Enabled: true
Layout/EmptyLinesAroundModuleBody:
Enabled: true
Layout/TrailingWhitespace:
Enabled: true
Layout/TrailingEmptyLines:
@ -77,34 +103,8 @@ Layout/ElseAlignment:
Enabled: false
Layout/EmptyComment:
Enabled: false
Layout/EmptyLineAfterGuardClause:
Enabled: false
Layout/EmptyLineAfterMagicComment:
Enabled: false
Layout/EmptyLineAfterMultilineCondition:
Enabled: false
Layout/EmptyLineBetweenDefs:
Enabled: false
Layout/EmptyLines:
Enabled: false
Layout/EmptyLinesAroundAccessModifier:
Enabled: false
Layout/EmptyLinesAroundArguments:
Enabled: false
Layout/EmptyLinesAroundAttributeAccessor:
Enabled: false
Layout/EmptyLinesAroundBeginBody:
Enabled: false
Layout/EmptyLinesAroundBlockBody:
Enabled: false
Layout/EmptyLinesAroundClassBody:
Enabled: false
Layout/EmptyLinesAroundExceptionHandlingKeywords:
Enabled: false
Layout/EmptyLinesAroundMethodBody:
Enabled: false
Layout/EmptyLinesAroundModuleBody:
Enabled: false
Layout/EndAlignment:
Enabled: false
Layout/EndOfLine:

View file

@ -104,7 +104,6 @@ class Plugin
end
end
# reason could be a symbol, describing the phase that broke:
# :unit_test, :gem_build, :gem_install
FailureDetail = Struct.new(:plugin_name, :reason)
@ -267,5 +266,3 @@ if failed_plugins
else
puts "NO ERROR ON PLUGINS!"
end

View file

@ -143,7 +143,6 @@ module LogStash
"BUNDLE_GEMFILE" => LogStash::Environment::GEMFILE_PATH,
"BUNDLE_SILENCE_ROOT_WARNING" => "true",
"BUNDLE_WITHOUT" => options[:without].join(":")}) do
if !debug?
# Will deal with transient network errors
execute_bundler_with_retry(options)
@ -311,6 +310,5 @@ module LogStash
ensure
$stdout = old_stdout
end
end
end

View file

@ -72,7 +72,6 @@ module LogStash
def pattern_path(path)
return ::File.join(LOGSTASH_HOME, "patterns", path)
end
end
end

View file

@ -29,6 +29,7 @@ module LogStash
require_relative "patches/gems"
Gems.versions(plugin)
end
# Take a gem package and extract it to a specific target
# @param [String] Gem file, this must be a path
# @param [String, String] Return a Gem::Package and the installed path
@ -51,6 +52,5 @@ module LogStash
return [package, target_path]
end
end
end

View file

@ -22,12 +22,10 @@ require "zlib"
require "stud/temporary"
module LogStash
class CompressError < StandardError; end
module Util
module Zip
extend self
# Extract a zip file into a destination directory.
@ -61,7 +59,6 @@ module LogStash
end
module Tar
extend self
# Extract a tar.gz file into a destination directory.

View file

@ -26,7 +26,6 @@ require "pluginmanager/gemfile"
require "rubygems/specification"
require "pathname"
# This class cannot be in the logstash namespace, because of the way the DSL
# class interact with the other libraries
module Bundler

View file

@ -47,7 +47,6 @@ module Bundler
def uninstall!(gem_name)
unfreeze_gemfile do
dependencies_from = dependants_gems(gem_name)
if dependencies_from.size > 0

View file

@ -21,6 +21,7 @@ module LogStash module PluginManager
class UnpackablePluginError < PluginManagerError; end
class FileNotFoundError < PluginManagerError; end
class InvalidPackError < PluginManagerError; end
class InstallError < PluginManagerError
attr_reader :original_exception

View file

@ -23,7 +23,6 @@ require "fileutils"
require "stud/temporary"
require "jar-dependencies"
# This is a bit of a hack, to make sure that all of our call pass to a specific proxies.
# We do this before any jar-dependences check is done, meaning we have to silence him.
module Jars

View file

@ -19,7 +19,6 @@ require "rubygems/package"
require "yaml"
module LogStash::PluginManager
def self.load_aliases_definitions(path = File.expand_path('plugin_aliases.yml', __dir__))
content = IO.read(path)
@ -58,7 +57,6 @@ module LogStash::PluginManager
# @option options [Array<String>] :rubygems_source Gem sources to lookup for the verification
# @return [Boolean] true if valid logstash plugin gem name & version or a .gem file
def self.logstash_plugin?(plugin, version = nil, options={})
if plugin_file?(plugin)
begin
return logstash_plugin_gem_spec?(plugin_file_spec(plugin))
@ -174,7 +172,6 @@ module LogStash::PluginManager
# @param plugin_list [Array] array of [plugin name, version] tuples
# @return [Array] array of [plugin name, version, ...] tuples when duplicate names have been merged and non duplicate version requirements added
def self.merge_duplicates(plugin_list)
# quick & dirty naive dedup for now
# TODO: properly merge versions requirements
plugin_list.uniq(&:first)

View file

@ -16,11 +16,9 @@
# under the License.
module LogStash; module PluginManager
# Centralised messaging about installing and removing x-pack, which is no longer a plugin, but
# part of the distribution.
module XPackInterceptor
module Install
extend self

View file

@ -1,4 +1,5 @@
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)

View file

@ -488,7 +488,6 @@ class LogStash::Agent
resolve_actions_and_converge_state([]) # We stop all the pipeline, so we converge to a empty state
end
def setting(key)
@settings.get(key)
end

View file

@ -22,7 +22,6 @@ require "logstash/api/commands/stats"
require "logstash/api/commands/node"
require "logstash/api/commands/default_metadata"
module LogStash
module Api
class CommandFactory

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require "logstash/plugin"
# This is the base class for logstash codecs.

View file

@ -87,6 +87,7 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
end
class Plugins < Node; end
class Plugin < Node
def expr
expr_attributes = self.map_expr_attributes
@ -337,7 +338,6 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
all_elements = [first_element, *rest_elements]
res = if all_elements.size == 1
elem = all_elements.first
if elem.is_a?(Selector)

View file

@ -1,6 +1,5 @@
# Autogenerated from a Treetop grammar. Edits may be lost.
require "treetop/runtime"
require "logstash/compiler/lscl.rb"
@ -284,7 +283,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_plugin_section
@ -470,7 +468,6 @@ module LogStashCompilerLSCLGrammar
def plugin
elements[0]
end
end
def _nt_plugins
@ -549,7 +546,6 @@ module LogStashCompilerLSCLGrammar
def attribute
elements[0]
end
end
module Plugin2
@ -572,7 +568,6 @@ module LogStashCompilerLSCLGrammar
def cs3
elements[5]
end
end
def _nt_plugin
@ -1479,7 +1474,6 @@ module LogStashCompilerLSCLGrammar
def value
elements[0]
end
end
module Array2
@ -1490,7 +1484,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_array
@ -1610,7 +1603,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_hash
@ -1687,7 +1679,6 @@ module LogStashCompilerLSCLGrammar
def hashentry
elements[0]
end
end
def _nt_hashentries
@ -1854,7 +1845,6 @@ module LogStashCompilerLSCLGrammar
def if
elements[0]
end
end
def _nt_branch
@ -1958,7 +1948,6 @@ module LogStashCompilerLSCLGrammar
def cs3
elements[5]
end
end
def _nt_if
@ -2086,7 +2075,6 @@ module LogStashCompilerLSCLGrammar
def cs4
elements[7]
end
end
def _nt_else_if
@ -2216,7 +2204,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_else
@ -2328,7 +2315,6 @@ module LogStashCompilerLSCLGrammar
def expression
elements[0]
end
end
def _nt_condition
@ -2404,7 +2390,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_expression
@ -2529,7 +2514,6 @@ module LogStashCompilerLSCLGrammar
def cs3
elements[5]
end
end
module NegativeExpression1
@ -2808,7 +2792,6 @@ module LogStashCompilerLSCLGrammar
def cs
elements[1]
end
end
def _nt_not_in_operator
@ -2932,7 +2915,6 @@ module LogStashCompilerLSCLGrammar
def rvalue
elements[0]
end
end
module MethodCall2
@ -2951,7 +2933,6 @@ module LogStashCompilerLSCLGrammar
def cs3
elements[5]
end
end
def _nt_method_call
@ -3268,7 +3249,6 @@ module LogStashCompilerLSCLGrammar
def cs2
elements[3]
end
end
def _nt_regexp_expression
@ -3547,7 +3527,6 @@ module LogStashCompilerLSCLGrammar
r0
end
end
class LogStashCompilerLSCLGrammarParser < Treetop::Runtime::CompiledParser

View file

@ -92,7 +92,6 @@ module LogStash; module Config; module AST
set_meta(PROCESS_ESCAPE_SEQUENCES, val)
end
def compile
LogStash::Config::AST.exclusive { do_compile }
end
@ -156,6 +155,7 @@ module LogStash; module Config; module AST
class Comment < Node; end
class Whitespace < Node; end
class PluginSection < Node
# Global plugin numbering for the janky instance variable naming we use
# like @filter_<name>_1
@ -168,8 +168,6 @@ module LogStash; module Config; module AST
generate_variables
code = []
@variables.each do |plugin, name|
code << <<-CODE
@generated_objects[:#{name}] = #{plugin.compile_initializer}
@#{plugin.plugin_type}s << @generated_objects[:#{name}]
@ -228,6 +226,7 @@ module LogStash; module Config; module AST
end
class Plugins < Node; end
class Plugin < Node
def plugin_type
if recursive_select_parent(Plugin).any?
@ -324,11 +323,13 @@ module LogStash; module Config; module AST
return text_value.inspect
end
end
class Attribute < Node
def compile
return %Q(#{name.compile} => #{value.compile})
end
end
class RValue < Node; end
class Value < RValue; end
@ -343,6 +344,7 @@ module LogStash; module Config; module AST
return Unicode.wrap(text_value)
end
end
class String < Value
def compile
if get_meta(PROCESS_ESCAPE_SEQUENCES)
@ -352,21 +354,25 @@ module LogStash; module Config; module AST
end
end
end
class RegExp < Value
def compile
return "Regexp.new(" + Unicode.wrap(text_value[1...-1]) + ")"
end
end
class Number < Value
def compile
return text_value
end
end
class Array < Value
def compile
return "[" << recursive_select(Value).collect(&:compile).reject(&:empty?).join(", ") << "]"
end
end
class Hash < Value
def validate!
duplicate_values = find_duplicate_keys
@ -408,7 +414,6 @@ module LogStash; module Config; module AST
class Branch < Node
def compile
# this construct is non obvious. we need to loop through each event and apply the conditional.
# each branch of a conditional will contain a construct (a filter for example) that also loops through
# the events variable so we have to initialize it to [event] for the branch code.
@ -454,6 +459,7 @@ module LogStash; module Config; module AST
<< children.collect(&:compile).map { |s| s.split("\n", -1).map { |l| " " + l }.join("\n") }.join("") << "\n"
end
end
class Elsif < BranchEntry
def compile
children = recursive_inject { |e| e.is_a?(Branch) || e.is_a?(Plugin) }
@ -461,6 +467,7 @@ module LogStash; module Config; module AST
<< children.collect(&:compile).map { |s| s.split("\n", -1).map { |l| " " + l }.join("\n") }.join("") << "\n"
end
end
class Else < BranchEntry
def compile
children = recursive_inject { |e| e.is_a?(Branch) || e.is_a?(Plugin) }
@ -529,26 +536,28 @@ module LogStash; module Config; module AST
return " #{text_value} "
end
end
module RegExpOperator
def compile
return " #{text_value} "
end
end
module BooleanOperator
def compile
return " #{text_value} "
end
end
class Selector < RValue
def compile
return "event.get(#{text_value.inspect})"
end
end
class SelectorElement < Node; end
end; end; end
# Monkeypatch Treetop::Runtime::SyntaxNode's inspect method to skip
# any Whitespace or SyntaxNodes with no children.
class Treetop::Runtime::SyntaxNode

View file

@ -18,7 +18,6 @@
require "logstash/config/defaults"
module LogStash module Config module CpuCoreStrategy
extend self
def maximum

View file

@ -18,7 +18,6 @@
require "concurrent"
module LogStash module Config module Defaults
extend self
def input

View file

@ -1,6 +1,5 @@
# Autogenerated from a Treetop grammar. Edits may be lost.
require "treetop/runtime"
require "logstash/config/config_ast"
@ -308,7 +307,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_plugin_section
@ -494,7 +492,6 @@ module LogStashConfig
def plugin
elements[0]
end
end
def _nt_plugins
@ -573,7 +570,6 @@ module LogStashConfig
def attribute
elements[0]
end
end
module Plugin2
@ -596,7 +592,6 @@ module LogStashConfig
def cs3
elements[5]
end
end
def _nt_plugin
@ -1503,7 +1498,6 @@ module LogStashConfig
def value
elements[0]
end
end
module Array2
@ -1514,7 +1508,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_array
@ -1634,7 +1627,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_hash
@ -1711,7 +1703,6 @@ module LogStashConfig
def hashentry
elements[0]
end
end
def _nt_hashentries
@ -1878,7 +1869,6 @@ module LogStashConfig
def if
elements[0]
end
end
def _nt_branch
@ -1982,7 +1972,6 @@ module LogStashConfig
def cs3
elements[5]
end
end
def _nt_if
@ -2110,7 +2099,6 @@ module LogStashConfig
def cs4
elements[7]
end
end
def _nt_else_if
@ -2240,7 +2228,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_else
@ -2352,7 +2339,6 @@ module LogStashConfig
def expression
elements[0]
end
end
def _nt_condition
@ -2428,7 +2414,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_expression
@ -2553,7 +2538,6 @@ module LogStashConfig
def cs3
elements[5]
end
end
module NegativeExpression1
@ -2832,7 +2816,6 @@ module LogStashConfig
def cs
elements[1]
end
end
def _nt_not_in_operator
@ -2956,7 +2939,6 @@ module LogStashConfig
def rvalue
elements[0]
end
end
module MethodCall2
@ -2975,7 +2957,6 @@ module LogStashConfig
def cs3
elements[5]
end
end
def _nt_method_call
@ -3292,7 +3273,6 @@ module LogStashConfig
def cs2
elements[3]
end
end
def _nt_regexp_expression
@ -3571,7 +3551,6 @@ module LogStashConfig
r0
end
end
class LogStashConfigParser < Treetop::Runtime::CompiledParser

View file

@ -47,7 +47,6 @@ LogStash::Environment.load_locale!
# }
#
module LogStash::Config::Mixin
include LogStash::Util::SubstitutionVariables
include LogStash::Util::Loggable
@ -166,7 +165,6 @@ module LogStash::Config::Mixin
end # def config_init
module DSL
include LogStash::Util::SubstitutionVariables
attr_accessor :flags

View file

@ -81,7 +81,6 @@ module LogStash module Config
next
end
alt_name = "module-#{module_name}"
pipeline_id = alt_name
module_settings.set("pipeline.id", pipeline_id)

View file

@ -80,7 +80,6 @@ class LogStash::DependencyReport < Clamp::Command
# Look at META-INF/MANIFEST.MF for any jars in each gem
# Note any important details.
Gem::Specification.select { |g| g.requirements && g.requirements.any? { |r| r =~ /^jar / } }.collect do |gem|
# Where is the gem installed
root = gem.full_gem_path

View file

@ -27,6 +27,7 @@ module LogStash class ElasticsearchClient
# duplicated here from Elasticsearch::Transport::Transport::Response
# to create a normalised response across different client IMPL
attr_reader :status, :body, :headers
def initialize(status, body, headers={})
@status, @body, @headers = status, body, headers
@body = body.force_encoding('UTF-8') if body.respond_to?(:force_encoding)

View file

@ -113,8 +113,6 @@ module LogStash
# post_process
].each {|setting| SETTINGS.register(setting) }
# Compute the default queue path based on `path.data`
default_queue_file_path = ::File.join(SETTINGS.get("path.data"), "queue")
SETTINGS.register Setting::WritableDirectory.new("path.queue", default_queue_file_path)

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require "logstash/plugin"
require "logstash/config/mixin"
require "logstash/util/decorators"
@ -160,7 +159,6 @@ class LogStash::Filters::Base < LogStash::Plugin
@slow_logger.on_event("event processing time", @original_params, event, java.lang.System.nanoTime - time)
end
# in 1.5.0 multi_filter is meant to be used in the generated filter function in LogStash::Config::AST::Plugin only
# and is temporary until we refactor the filter method interface to accept events list and return events list,
# just list in multi_filter see https://github.com/elastic/logstash/issues/2872.

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require "logstash/plugin"
require "logstash/config/mixin"
require "logstash/codecs/base"

View file

@ -66,7 +66,6 @@ module LogStash module Instrument
# to the provided default_value_generator block will be stored.
# @return [Metric] the value as it exists in the tree after this operation
def fetch_or_store(namespaces, key, default_value = nil)
# We first check in the `@fast_lookup` store to see if we have already see that metrics before,
# This give us a `o(1)` access, which is faster than searching through the structured
# data store (Which is a `o(n)` operation where `n` is the number of element in the namespace and
@ -134,7 +133,6 @@ module LogStash module Instrument
key_paths.reduce(get(*key_paths)) {|acc, p| acc[p]}
end
# Return a hash including the values of the keys given at the path given
#
# Example Usage:

View file

@ -22,7 +22,6 @@ module LogStash module Instrument module MetricType
def initialize(namespaces, key)
super(key.to_s)
end
def execute(action, value = 1)

View file

@ -22,6 +22,7 @@ module LogStash module Instrument module PeriodicPoller
include LogStash::Util::Loggable
class Override
attr_reader :key, :value
def initialize(key)
@key = key
@value = java.lang.System.getProperty(@key)
@ -89,6 +90,7 @@ module LogStash module Instrument module PeriodicPoller
module ControllerResource
attr_reader :base_path, :override, :offset_path
def implemented?
true
end
@ -102,6 +104,7 @@ module LogStash module Instrument module PeriodicPoller
@procs[:read_int] = lambda {|path| IO.readlines(path).first.to_i }
@procs[:read_lines] = lambda {|path| IO.readlines(path) }
end
def call_if_file_exists(call_key, file, not_found_value)
path = ::File.join(@base_path, @offset_path, file)
if ::File.exist?(path)
@ -125,6 +128,7 @@ module LogStash module Instrument module PeriodicPoller
def initialize(original_path)
common_initialize(CPUACCT_DIR, "ls.cgroup.cpuacct.path.override", original_path)
end
def to_hash
{:control_group => offset_path, :usage_nanos => cpuacct_usage}
end
@ -140,6 +144,7 @@ module LogStash module Instrument module PeriodicPoller
def initialize(original_path)
common_initialize(CPU_DIR, "ls.cgroup.cpu.path.override", original_path)
end
def to_hash
{
:control_group => offset_path,
@ -152,9 +157,11 @@ module LogStash module Instrument module PeriodicPoller
def cfs_period_us
call_if_file_exists(:read_int, "cpu.cfs_period_us", -1)
end
def cfs_quota_us
call_if_file_exists(:read_int, "cpu.cfs_quota_us", -1)
end
def build_cpu_stats_hash
stats = CpuStats.new
lines = call_if_file_exists(:read_lines, "cpu.stat", [])
@ -165,9 +172,11 @@ module LogStash module Instrument module PeriodicPoller
class UnimplementedResource
attr_reader :controller, :original_path
def initialize(controller, original_path)
@controller, @original_path = controller, original_path
end
def implemented?
false
end
@ -179,6 +188,7 @@ module LogStash module Instrument module PeriodicPoller
@number_of_times_throttled = -1
@time_throttled_nanos = -1
end
def update(lines)
lines.each do |line|
fields = line.split(/\s+/)
@ -190,6 +200,7 @@ module LogStash module Instrument module PeriodicPoller
end
end
end
def to_hash
{
:number_of_elapsed_periods => @number_of_elapsed_periods,

View file

@ -32,7 +32,6 @@ java_import 'javax.naming.directory.Attribute'
java_import 'org.logstash.instrument.reports.MemoryReport'
java_import 'org.logstash.instrument.reports.ProcessReport'
module LogStash module Instrument module PeriodicPoller
class JVM < Base
class GarbageCollectorName
@ -118,7 +117,6 @@ module LogStash module Instrument module PeriodicPoller
metric.gauge(cpu_path, :total_in_millis, cpu_metrics["total_in_millis"])
metric.gauge(path + [:mem], :total_virtual_in_bytes, process_metrics["mem"]["total_virtual_in_bytes"])
end
def collect_load_average

View file

@ -645,5 +645,4 @@ module LogStash; class JavaPipeline < AbstractPipeline
def inputs_running?
@input_threads.any?(&:alive?)
end
end; end

View file

@ -27,7 +27,6 @@ module LogStash
# options[:symbolize_keys] ? JrJackson::Raw.parse_sym(data) : JrJackson::Raw.parse_raw(data)
JrJackson::Ruby.parse(data, options)
rescue JrJackson::ParseError => e
raise LogStash::Json::ParserError.new(e.message)
end
@ -38,13 +37,11 @@ module LogStash
# also look for Java::JavaUtil::ArrayList, see TODO submit issue
# o.is_a?(Enumerable) ? JrJackson::Raw.generate(o) : JrJackson::Json.dump(o)
JrJackson::Base.generate(o, options)
rescue => e
raise LogStash::Json::GeneratorError.new(e.message)
end
alias_method :load, "jruby_load".to_sym
alias_method :dump, "jruby_dump".to_sym
end
end

View file

@ -19,6 +19,7 @@ module LogStash module Modules class CLIParser
include LogStash::Util::Loggable
attr_reader :output
def initialize(module_names, module_variables)
@output = []
# The #compact here catches instances when module_variables may be nil or

View file

@ -46,5 +46,4 @@ module LogStash module Modules class ElasticsearchImporter
response = @client.head(path)
response.status >= 200 && response.status < 300
end
end end end # class LogStash::Modules::Importer

View file

@ -28,6 +28,7 @@ module LogStash module Modules class KibanaClient
class Response
# to create a custom response with body as an Object (Hash or Array)
attr_reader :status, :body, :headers
def initialize(status, body, headers={})
@status, @body, @headers = status, body, headers
@body = body.is_a?(String) ? LogStash::Json.load(body) : body
@ -130,7 +131,6 @@ module LogStash module Modules class KibanaClient
# content will be converted to a json string
def post(relative_path, content, headers = nil)
body = content.is_a?(String) ? content : LogStash::Json.dump(content)
options = {:body => body}.merge(headers || @http_options)
safely(:post, relative_path, options)

View file

@ -22,6 +22,7 @@ module LogStash module Modules class KibanaSettings < KibanaBase
class Setting
attr_reader :name, :value
def initialize(name, value)
@name, @value = name, value
end

View file

@ -66,5 +66,4 @@ module LogStash module Modules class Scaffold
def is_enabled?(settings)
true
end
end end end # class LogStash::Modules::Scaffold

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require "logstash/plugin"
require "logstash/config/mixin"
require "concurrent/atomic/atomic_fixnum"

View file

@ -44,6 +44,7 @@ if LogStash::Environment.windows?
alias_method :orig_peeraddr, :peeraddr
include JRubyBug2558SocketPeerAddrBugFix
end
class UDPSocket
alias_method :orig_peeraddr, :peeraddr
include JRubyBug2558SocketPeerAddrBugFix

View file

@ -33,7 +33,6 @@ module Clamp
end
module Option
module Declaration
def deprecated_option(switches, type, description, opts = {})
Option::Definition.new(switches, type, description, opts).tap do |option|
@ -45,7 +44,6 @@ module Clamp
end
module StrictDeclaration
include Clamp::Attribute::Declaration
include LogStash::Util::Loggable

View file

@ -18,7 +18,6 @@
require "logstash/pipeline_action/base"
require "logstash/java_pipeline"
module LogStash module PipelineAction
class Create < Base
include LogStash::Util::Loggable

View file

@ -122,9 +122,9 @@ module LogStash
end
end
class PipelinesRegistry
attr_reader :states
include LogStash::Util::Loggable
def initialize

View file

@ -36,7 +36,6 @@ module ::LogStash; module Plugins; module Builtin; module Pipeline; class Input
end
# add address to the plugin stats
metric.gauge(:address, address)
end
def run(queue)
@ -85,5 +84,4 @@ module ::LogStash; module Plugins; module Builtin; module Pipeline; class Input
def isRunning
@running.get
end
end; end; end; end; end

View file

@ -3,7 +3,6 @@ require 'logstash/util/thread_safe_attributes'
module LogStash
module Plugins
module EventFactorySupport
extend LogStash::Util::ThreadSafeAttributes
# The event_factory method is effectively final and should not be re-defined by plugins.
@ -62,7 +61,6 @@ module LogStash
end
private_constant :TargetedEventFactory
end
end
end

View file

@ -368,7 +368,6 @@ class LogStash::Runner < Clamp::StrictCommand
configure_plugin_paths(setting("path.plugins"))
@settings.format_settings.each {|line| logger.debug(line) }
# Check for absence of any configuration
@ -441,7 +440,6 @@ class LogStash::Runner < Clamp::StrictCommand
org.apache.logging.log4j.LogManager.shutdown
agent_return
rescue org.logstash.LockException => e
logger.fatal(I18n.t("logstash.runner.locked-data-path", :path => setting("path.data")))
return 1

View file

@ -24,7 +24,6 @@ require "logstash/util/time_value"
require "i18n"
module LogStash
class Settings
include LogStash::Util::SubstitutionVariables
@ -77,7 +76,6 @@ module LogStash
"queue.type",
]
def initialize
@settings = {}
# Theses settings were loaded from the yaml file

View file

@ -74,7 +74,6 @@ module LogStash::Util
}
end
# Merge hash 'src' into 'dst' nondestructively
#
# Duplicate keys will become array values
@ -156,7 +155,6 @@ module LogStash::Util
return dst
end # def hash_merge_many
# normalize method definition based on platform.
# normalize is used to convert an object create through
# json deserialization from JrJackson in :raw mode to pure Ruby
@ -234,5 +232,4 @@ module LogStash::Util
value.respond_to?(:empty?) ? value.empty? : !value
end
end
end # module LogStash::Util

View file

@ -18,7 +18,6 @@
require "logstash/util"
module LogStash::Util
# Decorators provides common manipulation on the event data.
module Decorators
include LogStash::Util::Loggable
@ -64,7 +63,5 @@ module LogStash::Util
event.set("tags", tags)
end
end # module LogStash::Util::Decorators
end # module LogStash::Util

View file

@ -26,7 +26,6 @@ class LogStash::Util::SafeURI
extend Forwardable
attr_reader :uri
public

View file

@ -30,7 +30,6 @@ require "logstash/environment"
# The methods should be called in the above order before the settings are ready to be used.
########################
module LogStash::Util::SettingsHelper
# The `path.settings` and `path.logs` can not be defined in "logstash/environment" since the `Environment::LOGSTASH_HOME` doesn't
# exist unless launched via "bootstrap/environment"
def self.pre_process
@ -93,5 +92,4 @@ module LogStash::Util::SettingsHelper
# I know, double negative
!(["--help", "-h"] & args).empty?
end
end

View file

@ -21,7 +21,6 @@ require_relative 'lazy_singleton'
require_relative 'password'
module ::LogStash::Util::SubstitutionVariables
include LogStash::Util::Loggable
SUBSTITUTION_PLACEHOLDER_REGEX = /\${(?<name>[a-zA-Z_.][a-zA-Z0-9_.]*)(:(?<default>[^}]*))?}/

View file

@ -19,7 +19,6 @@ module LogStash
module Util
# @api internal
module ThreadSafeAttributes
# Thread-safe lazy initialized attribute with a given (variable) name.
def lazy_init_attr(attribute, variable: "@#{attribute}".to_sym, &block)
raise ArgumentError.new("invalid attribute name: #{attribute}") unless attribute.match? /^[_A-Za-z]\w*$/
@ -38,7 +37,6 @@ module LogStash
end
end
end
end
end
end

View file

@ -1,4 +1,5 @@
# encoding: utf-8
module LogStash::Util::UnicodeTrimmer
# The largest possible unicode chars are 4 bytes
# http://stackoverflow.com/questions/9533258/what-is-the-maximum-number-of-bytes-for-a-utf-8-encoded-character

View file

@ -19,7 +19,6 @@ require "logstash/util"
# This class exists to format the settings for default worker threads
module LogStash module Util class WorkerThreadsDefaultPrinter
def initialize(settings)
@setting = settings.fetch('pipeline.workers', 0)
@default = settings.fetch('default-pipeline-workers', 0)
@ -39,5 +38,4 @@ module LogStash module Util class WorkerThreadsDefaultPrinter
return if @default == 0
collector.push "Default pipeline workers: #{@default}"
end
end end end

View file

@ -47,6 +47,7 @@ describe "conditionals in output" do
def register
end
def multi_receive(events)
end
end
@ -366,7 +367,6 @@ describe "conditionals in filter" do
sample_one("sample") { expect(subject.get("tags")).to include("failure") }
sample_one("some sample") { expect(subject.get("tags")).to include("failure") }
end
end
describe "negated expressions" do
@ -413,7 +413,6 @@ describe "conditionals in filter" do
sample_one("sample") { expect(subject.get("tags")).not_to include("failure") }
sample_one("some sample") { expect(subject.get("tags")).not_to include("failure") }
end
end
describe "value as an expression" do

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require "logstash/instrument/namespaced_metric"
describe LogStash::WrappedAckedQueue, :stress_test => true do

View file

@ -64,11 +64,9 @@ describe LogStash::Agent do
end
context "system pipeline" do
let(:system_pipeline_config) { mock_pipeline_config(:system_pipeline, "input { dummyblockinginput { } } output { null {} }", { "pipeline.system" => true }) }
context "when we have a finite pipeline and a system pipeline running" do
let(:finite_pipeline_config) { mock_pipeline_config(:main, "input { generator { count => 1000 } } output { null {} }") }
let(:source_loader) do

View file

@ -81,7 +81,6 @@ describe LogStash::Agent do
expect(mval(:stats, :flow, :queue_backpressure)).to be_a_kind_of(java.util.Map)
expect(mval(:stats, :flow, :worker_concurrency)).to be_a_kind_of(java.util.Map)
end
end
context "when we try to start one pipeline" do

View file

@ -28,9 +28,7 @@ require 'timeout'
java_import org.logstash.Timestamp
describe LogStash::Agent do
shared_examples "all Agent tests" do
let(:agent_settings) { mock_settings({}) }
let(:agent_args) { {} }
let(:pipeline_settings) { agent_settings.clone }
@ -194,7 +192,6 @@ describe LogStash::Agent do
t.join
end
end
end
context "when calling reload_state!" do
context "with a pipeline with auto reloading turned off" do
@ -286,7 +283,6 @@ describe LogStash::Agent do
end
context "referenced environment variable does not exist" do
it "does not converge the pipeline" do
expect(subject.converge_state_and_update).not_to be_a_successful_converge
end
@ -393,7 +389,6 @@ describe LogStash::Agent do
end
context "metrics after config reloading" do
let(:initial_generator_threshold) { 1000 }
let(:original_config_output) { Stud::Temporary.pathname }
let(:new_config_output) { Stud::Temporary.pathname }

View file

@ -24,7 +24,6 @@ describe LogStash::Api::Modules::Logging do
include_context "api setup"
describe "#logging" do
context "when setting a logger's log level" do
it "should return a positive acknowledgement on success" do
put '/', '{"logger.logstash": "ERROR"}'

View file

@ -25,7 +25,6 @@ describe LogStash::Api::Modules::Node do
include_examples "not found"
describe "#hot threads" do
before(:all) do
get "/hot_threads"
end
@ -39,7 +38,6 @@ describe LogStash::Api::Modules::Node do
end
context "#threads count" do
before(:all) do
get "/hot_threads?threads=5"
end
@ -63,7 +61,6 @@ describe LogStash::Api::Modules::Node do
end
context "broken params in URL" do
before(:all) do
get "/hot_threads?human=?threads=5"
end
@ -80,7 +77,6 @@ describe LogStash::Api::Modules::Node do
"/hot_threads?human=1",
"/hot_threads?human=t",
].each do |path|
before(:all) do
get path
end

View file

@ -61,7 +61,6 @@ describe LogStash::Api::Modules::Plugins do
end
context "#values" do
let(:payload) { LogStash::Json.load(last_response.body) }
it "return totals of plugins" do

View file

@ -22,6 +22,7 @@ DATA_DOUBLE = "data".freeze
# use a dummy NOOP output to test Outputs::Base
class LogStash::Codecs::NOOPAsync < LogStash::Codecs::Base
attr_reader :last_result
config_name "noop_async"
def encode(event)
@ -31,6 +32,7 @@ end
class LogStash::Codecs::NOOPSync < LogStash::Codecs::Base
attr_reader :last_result
config_name "noop_sync"
def encode_sync(event)
@ -40,6 +42,7 @@ end
class LogStash::Codecs::NOOPMulti < LogStash::Codecs::Base
attr_reader :last_result
config_name "noop_multi"
def encode_sync(event)

View file

@ -298,7 +298,6 @@ describe LogStash::Compiler do
it "should merge the values of the duplicate keys into an array" do
expect(c_plugin).to ir_eql(j.iPlugin(rand_meta, FILTER, "grok", expected_plugin_args))
end
end
describe "a filter plugin that has nested Hash directives" do
@ -428,7 +427,6 @@ describe LogStash::Compiler do
EOS
end
it "should contain both section declarations, in order" do
expect(compiled_section).to ir_eql(compose(
splugin("aplugin", {"count" => 1}),

View file

@ -102,7 +102,6 @@ describe LogStashConfigParser do
end
end
context "invalid configuration" do
it "rejects duplicate hash key" do
parser = LogStashConfigParser.new
@ -223,6 +222,7 @@ describe LogStashConfigParser do
@code = @config.compile
eval(@code)
end
def plugin(*args); end
def line_to_source(*args); end
end
@ -235,7 +235,6 @@ describe LogStashConfigParser do
output { output1 { } }"
}
it "should create a pipeline with both sections" do
generated_objects = pipeline_klass.new(config_string, settings).instance_variable_get("@generated_objects")
filters = generated_objects.keys.map(&:to_s).select {|obj_name| obj_name.match(/^filter.+?_\d+$/) }
@ -250,7 +249,6 @@ describe LogStashConfigParser do
output { output1 { } }"
}
it "should create a pipeline with both sections" do
generated_objects = pipeline_klass.new(config_string, settings).instance_variable_get("@generated_objects")
outputs = generated_objects.keys.map(&:to_s).select {|obj_name| obj_name.match(/^output.+?_\d+$/) }
@ -277,6 +275,7 @@ describe LogStashConfigParser do
@code = @config.compile
eval(@code)
end
def plugin(*args); end
def line_to_source(*args); end
end

View file

@ -19,7 +19,6 @@ require "spec_helper"
require "logstash/config/cpu_core_strategy"
describe LogStash::Config::CpuCoreStrategy do
before do
allow(LogStash::Config::Defaults).to receive(:cpu_cores).and_return(cores)
end
@ -135,5 +134,4 @@ describe LogStash::Config::CpuCoreStrategy do
expect(described_class.max_minus_two).to eq(1)
end
end
end

View file

@ -24,7 +24,6 @@ describe LogStash::ConvergeResult do
subject { described_class.new(expected_actions_count) }
context "When the action was executed" do
it "returns the time of execution" do
expect(LogStash::ConvergeResult::FailedAction.new("testing").executed_at.class).to eq(LogStash::Timestamp)

View file

@ -19,9 +19,7 @@ require "spec_helper"
require "logstash/environment"
describe LogStash::Environment do
context "when loading jars dependencies" do
let(:default_jars_location) { File.join("vendor", "jar-dependencies") }
let(:default_runtime_location) { File.join(default_jars_location, "runtime-jars", "*.jar") }
let(:default_test_location) { File.join(default_jars_location, "test-jars", "*.jar") }
@ -37,7 +35,6 @@ describe LogStash::Environment do
end
context "when loading a jar file" do
let(:dummy_jar_file) { File.join(default_jars_location, "runtime-jars", "elasticsearch.jar") }
it "requires the jar files if there are jars to load" do
@ -65,7 +62,6 @@ describe LogStash::Environment do
end
end
describe "OS detection" do
windows_host_os = %w(bccwin cygwin mingw mswin wince)
linux_host_os = %w(linux)

View file

@ -247,7 +247,6 @@ describe LogStash::Event do
end
end
# TODO(talevy): migrate tests to Java. no reason to test logging logic in ruby when it is being
# done in java land.
@ -343,7 +342,6 @@ describe LogStash::Event do
end
context "initialize" do
it "should accept Ruby Hash" do
e = LogStash::Event.new({"foo" => 1, TIMESTAMP => "2015-05-28T23:02:05.350Z"})
expect(e.get("foo")).to eq(1)

View file

@ -21,7 +21,6 @@ require "logstash/filter_delegator"
require "support/shared_contexts"
describe LogStash::FilterDelegator do
include_context "execution_context"
let(:filter_id) { "my-filter" }
@ -57,6 +56,7 @@ describe LogStash::FilterDelegator do
config :host, :validate => :string
def register; end
def flush(options = {}); @events ; end
def filter(event)
@events ||= []
@events << event
@ -85,7 +85,6 @@ describe LogStash::FilterDelegator do
end
context "when the filter buffer events" do
it "has incremented :in" do
subject.multi_filter(events)
expect(collector.snapshot_metric.metric_store.get_with_path("/null")[:null]["my-filter".to_sym][:events][:in].value).to eq(events.size)
@ -132,6 +131,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin"
config :host, :validate => :string
def register; end
def filter(event)
event
end
@ -159,5 +159,4 @@ describe LogStash::FilterDelegator do
expect(subject.config_name).to eq("super_plugin")
end
end
end

View file

@ -382,7 +382,6 @@ describe LogStash::Filters::NOOP do
sample_one("type" => "noop", "go" => "away", "tags" => "blackhole") do
expect(subject.get("[tags]")).to eq("blackhole")
end
end
describe "when metrics are disabled" do

View file

@ -35,5 +35,4 @@ describe LogStash::Instrument::MetricType::Counter do
expect(LogStash::Json.dump(subject)).to eq("0")
end
end
end

View file

@ -41,6 +41,4 @@ describe LogStash::Instrument::MetricType::Gauge do
expect(LogStash::Json.dump(subject)).to eq("\"#{value}\"")
end
end
end

View file

@ -43,5 +43,4 @@ describe LogStash::Instrument::NamespacedNullMetric do
expect(subject.namespace_name).to eq([namespace])
expect(new_namespace.namespace_name).to eq([:root, :wally])
end
end

View file

@ -20,7 +20,6 @@ require_relative "../../support/matchers"
require "spec_helper"
describe LogStash::Instrument::NullMetric do
let(:key) { "test" }
subject { LogStash::Instrument::NullMetric.new(nil) }

View file

@ -44,7 +44,6 @@ describe LogStash::Instrument::PeriodicPoller::JVM::GarbageCollectorName do
end
end
describe LogStash::Instrument::PeriodicPoller::JVM do
let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
let(:options) { {} }

View file

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
require_relative "../../support/mocks_classes"
require "spec_helper"
require "java"
@ -139,5 +138,4 @@ describe LogStash::WrappedWriteClient do
include_examples "queue tests"
end
end

View file

@ -22,7 +22,6 @@ require "support/shared_contexts"
java_import org.logstash.RubyUtil
describe LogStash::FilterDelegator do
class MockGauge
def increment(_)
end
@ -81,6 +80,7 @@ describe LogStash::FilterDelegator do
config :host, :validate => :string
def register; end
def flush(options = {}); @events ; end
def filter(event)
@events ||= []
@events << event
@ -116,7 +116,6 @@ describe LogStash::FilterDelegator do
end
context "when the filter buffer events" do
it "has incremented :in" do
subject.to_java.multiFilter(events)
expect(
@ -167,6 +166,7 @@ describe LogStash::FilterDelegator do
config_name "super_plugin"
config :host, :validate => :string
def register; end
def filter(event)
event
end
@ -196,5 +196,4 @@ describe LogStash::FilterDelegator do
expect(subject.config_name).to eq("super_plugin")
end
end
end

View file

@ -18,15 +18,12 @@
require "spec_helper"
describe "Java integration" do
context "type equivalence" do
# here we test for both is_a? and case/when usage of the Java types
# because these are the specific use-cases in our code and the expected
# behaviour.
context "Java::JavaUtil::ArrayList" do
it "should report to be a Ruby Array" do
expect(Java::JavaUtil::ArrayList.new.is_a?(Array)).to eq(true)
end
@ -178,7 +175,6 @@ describe "Java integration" do
end
context "when intersecting with a Ruby Array" do
context "using string collection with duplicates and single result" do
let(:initial_array) {["foo", "bar", "foo"]}
@ -214,7 +210,6 @@ describe "Java integration" do
end
context "when unioning with a Ruby Array" do
context "using string collection with duplicates" do
let(:initial_array) {["foo", "bar", "foo"]}
@ -271,7 +266,6 @@ describe "Java integration" do
context "Enumerable implementation" do
context "Java Map interface should report key with nil value as included" do
it "should support include? method" do
expect(Java::JavaUtil::LinkedHashMap.new({"foo" => nil}).include?("foo")).to eq(true)
end
@ -290,7 +284,6 @@ describe "Java integration" do
end
context "Java Map interface should report key with a value as included" do
it "should support include? method" do
expect(Java::JavaUtil::LinkedHashMap.new({"foo" => 1}).include?("foo")).to eq(true)
end
@ -309,7 +302,6 @@ describe "Java integration" do
end
context "Java Map interface should report non existing key as not included" do
it "should support include? method" do
expect(Java::JavaUtil::LinkedHashMap.new({"foo" => 1})).not_to include("bar")
end

View file

@ -155,12 +155,15 @@ class DummyFlushingFilter < LogStash::Filters::Base
def register() end
def filter(event) end
def periodic_flush
true
end
def flush(options)
[::LogStash::Event.new("message" => "dummy_flush")]
end
def close() end
end
@ -544,7 +547,6 @@ describe LogStash::JavaPipeline do
allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(::LogStash::Outputs::DummyOutput)
end
let(:test_config_without_output_workers) {
<<-eos
input {
@ -784,7 +786,6 @@ describe LogStash::JavaPipeline do
end
end
describe "max inflight warning" do
let(:config) { "input { dummyinput {} } output { dummyoutput {} }" }
let(:batch_size) { 1 }
@ -984,7 +985,6 @@ describe LogStash::JavaPipeline do
pipeline.shutdown
end
end
it_behaves_like 'it flushes correctly'
@ -1315,7 +1315,6 @@ describe LogStash::JavaPipeline do
collect_stats
expect(collected_stats).to be_nil
end
end
context 'when dlq is enabled' do

View file

@ -21,7 +21,6 @@ require "logstash/environment"
require "logstash/util"
describe "LogStash::Json" do
let(:hash) {{"a" => 1}}
let(:json_hash) {"{\"a\":1}"}
@ -73,7 +72,6 @@ describe "LogStash::Json" do
end
context "pretty print" do
let(:hash) { { "foo" => "bar", :zoo => 2 } }
it "should serialize with pretty print" do

View file

@ -19,7 +19,6 @@ require "spec_helper"
require "logstash/modules/cli_parser"
describe LogStash::Modules::CLIParser do
subject { LogStash::Modules::CLIParser.new(module_names, module_variables) }
let(:logger) { double("logger") }
let(:module_name) { "foo" }
@ -140,5 +139,4 @@ describe LogStash::Modules::CLIParser do
end
end
end
end

View file

@ -23,6 +23,7 @@ module LogStash module Modules
def http(method, endpoint, options)
self
end
def call
KibanaTestResponse.new(200, '{"version":{"number":"1.2.3","build_snapshot":false}}', {})
end

View file

@ -46,7 +46,6 @@ describe LogStash::Modules::SettingsMerger do
end
describe "#merge_kibana_auth" do
before do
described_class.merge_kibana_auth!(mod_settings)
end
@ -72,7 +71,6 @@ describe LogStash::Modules::SettingsMerger do
expect(mod_settings["var.kibana.password"]).to eq("stott")
end
end
end
describe "#merge_cloud_settings" do

View file

@ -22,7 +22,6 @@ require 'securerandom'
require_relative '../support/helpers'
describe LogStash::PersistedQueueConfigValidator do
context("when persisted queues are enabled") do
let(:input_block) { "input { generator {} }" }
let(:config_path) { temporary_file(input_block) }
@ -212,6 +211,5 @@ describe LogStash::PersistedQueueConfigValidator do
end
end
end
end
end

View file

@ -45,7 +45,6 @@ describe LogStash::PipelineAction::Create do
expect(subject.pipeline_id).to eq(:main)
end
context "when we have really short lived pipeline" do
let(:pipeline_config) { mock_pipeline_config(:main, "input { generator { count => 1 } } output { null {} }") }

View file

@ -115,5 +115,4 @@ describe LogStash::JavaPipeline do
subject.shutdown
end
end
end

View file

@ -19,7 +19,6 @@ require "spec_helper"
require "logstash/pipelines_registry"
describe LogStash::PipelinesRegistry do
let(:pipeline_id) { "test".to_sym }
let(:pipeline) { double("Pipeline") }
let(:logger) { double("Logger") }
@ -109,7 +108,6 @@ describe LogStash::PipelinesRegistry do
let (:pipeline2) { double("pipeline2") }
it "should create a loading state before calling the create block" do
# create a thread which calls create_pipeline and wait in the create
# block so we can controle the pipeline initialization phase
t = Thread.new do
@ -198,7 +196,6 @@ describe LogStash::PipelinesRegistry do
# 2nd call: finished_execution? is true
expect(subject.running_pipelines).to be_empty
queue = Queue.new # threadsafe queue
in_block = Concurrent::AtomicBoolean.new(false)
@ -343,6 +340,5 @@ describe LogStash::PipelinesRegistry do
expect(subject.running_user_defined_pipelines).to be_empty
end
end
end
end

View file

@ -191,7 +191,6 @@ describe LogStash::Plugin do
config :foo_tag, :validate => :string, :default => "bar"
end
].each do |klass|
it "subclass #{klass.name} does not modify params" do
klass.new(args)
expect(args).to be_empty
@ -199,7 +198,6 @@ describe LogStash::Plugin do
end
context "codec initialization" do
class LogStash::Codecs::Noop < LogStash::Codecs::Base
config_name "noop"
@ -212,7 +210,6 @@ describe LogStash::Plugin do
expect_any_instance_of(LogStash::Codecs::Noop).to receive(:register).once
LogStash::Plugin.new(args)
end
end
end
@ -304,8 +301,6 @@ describe LogStash::Plugin do
'when there is not ID configured for the plugin' => {},
'when a user provide an ID for the plugin' => { 'id' => 'ABC' },
}.each do |desc, config_override|
context(desc) do
let(:config) { super().merge(config_override) }
@ -428,7 +423,6 @@ describe LogStash::Plugin do
end
end
end
end
describe "deprecation logger" do

View file

@ -120,7 +120,6 @@ describe ::LogStash::Plugins::Builtin::Pipeline do
)[:filter]
expect(event_metrics[:send_to].value).to eq([address])
end
end
after(:each) do

View file

@ -9,7 +9,6 @@ require 'logstash/outputs/base'
java_import "org.logstash.util.CATrustedFingerprintTrustStrategy"
describe LogStash::Plugins::CATrustedFingerprintSupport do
let(:ca_trusted_fingerprint_support) { described_class }
[
@ -18,9 +17,7 @@ describe LogStash::Plugins::CATrustedFingerprintSupport do
LogStash::Codecs::Base,
LogStash::Outputs::Base
].each do |base_class|
context "that inherits from `#{base_class}`" do
let(:plugin_base_class) { base_class }
subject(:plugin_class) do
@ -39,7 +36,6 @@ describe LogStash::Plugins::CATrustedFingerprintSupport do
let(:plugin) { plugin_class.new(options) }
context '#initialize' do
shared_examples 'normalizes fingerprints' do
context '#ca_trusted_fingerprint' do
it "normalizes to an array of capital hex fingerprints" do
@ -109,5 +105,4 @@ describe LogStash::Plugins::CATrustedFingerprintSupport do
end
end
end
end

View file

@ -8,7 +8,6 @@ require 'logstash/codecs/base'
require 'logstash/outputs/base'
describe LogStash::Plugins::EventFactorySupport do
let(:event_factory_support) { described_class }
[
@ -17,9 +16,7 @@ describe LogStash::Plugins::EventFactorySupport do
LogStash::Codecs::Base,
LogStash::Outputs::Base
].each do |base_class|
context "that inherits from `#{base_class}`" do
let(:plugin_base_class) { base_class }
subject(:plugin_class) do
@ -40,17 +37,14 @@ describe LogStash::Plugins::EventFactorySupport do
let(:plugin) { plugin_class.new(options) }
shared_examples 'an event factory' do
it 'returns an event' do
expect( event_factory.new_event ).to be_a LogStash::Event
expect( event = event_factory.new_event('foo' => 'bar') ).to be_a LogStash::Event
expect( event.get('foo') ).to eql 'bar'
end
end
describe 'event_factory' do
subject(:event_factory) { plugin.send(:event_factory) }
it_behaves_like 'an event factory'
@ -58,19 +52,15 @@ describe LogStash::Plugins::EventFactorySupport do
it 'memoizes the factory instance' do
expect( event_factory ).to be plugin.send(:event_factory)
end
end
describe 'targeted_event_factory (no config :target option)' do
it 'raises an error' do
expect { plugin.send(:targeted_event_factory) }.to raise_error(ArgumentError, /target/)
end
end
describe 'targeted_event_factory' do
subject(:plugin_class) do
Class.new(plugin_base_class) do
config_name 'sample'
@ -94,7 +84,6 @@ describe LogStash::Plugins::EventFactorySupport do
end
context 'with target' do
let(:options) { super().merge('target' => '[the][baz]') }
it 'returns an event' do
@ -111,11 +100,9 @@ describe LogStash::Plugins::EventFactorySupport do
it 'uses a different factory from the basic one' do
expect( targeted_event_factory ).not_to be plugin.send(:event_factory)
end
end
context 'from_json (integration)' do
let(:json) { '[ {"foo": "bar"}, { "baz": { "a": 1 } } ]' }
let(:options) { super().merge('target' => 'internal') }
@ -126,11 +113,8 @@ describe LogStash::Plugins::EventFactorySupport do
expect( events[0].get('[internal]') ).to eql 'foo' => 'bar'
expect( events[1].get('[internal]') ).to eql 'baz' => { 'a' => 1 }
end
end
end
end
end
end

View file

@ -26,7 +26,6 @@ class LogStash::Inputs::Dummy < LogStash::Inputs::Base
def register; end
end
class LogStash::Inputs::NewPlugin < LogStash::Inputs::Base
config_name "new_plugin"

View file

@ -31,7 +31,6 @@ require_relative "../support/helpers"
require_relative "../support/matchers"
describe LogStash::Runner do
subject(:runner) { LogStash::Runner }
let(:logger) { double("logger") }
let(:agent) { double("agent") }
@ -87,7 +86,6 @@ describe LogStash::Runner do
subject { LogStash::Runner.new("") }
context "when -e is given" do
let(:args) { ["-e", "input {} output {}"] }
let(:agent) { double("agent") }
@ -134,7 +132,6 @@ describe LogStash::Runner do
context "--auto-reload" do
subject { LogStash::Runner.new("") }
context "when -e is given" do
let(:args) { ["-r", "-e", "input {} output {}"] }
it "should exit immediately" do
@ -212,8 +209,6 @@ describe LogStash::Runner do
expect(settings.get("path.queue")).to eq(queue_override_path)
end
args = ["--path.data", test_data_path, "-e", pipeline_string]
subject.run("bin/logstash", args)
end

View file

@ -37,7 +37,6 @@ describe LogStash::Setting::Bytes do
subject { described_class.new("a byte value", default, false) }
describe "#set" do
# Hard-coded test just to make sure at least one known case is working
context "when given '10mb'" do
it "returns 10485760" do

View file

@ -19,7 +19,6 @@ require "logstash/settings"
require "spec_helper"
describe LogStash::Setting::PortRange do
context "When the value is an Integer" do
subject { LogStash::Setting::PortRange.new("mynewtest", 9000) }
@ -65,7 +64,6 @@ describe LogStash::Setting::PortRange do
expect { subject }.to raise_error
end
it "raises an exception on update" do
expect { LogStash::Setting::PortRange.new("mynewtest", 10000).set("dsfnsdknfksdnfjksdnfjns") }.to raise_error
end
@ -74,12 +72,10 @@ describe LogStash::Setting::PortRange do
context "when the value is an unknown type" do
subject { LogStash::Setting::PortRange.new("mynewtest", 0.1) }
it "raises an argument error" do
expect { subject }.to raise_error
end
it "raises an exception on update" do
expect { LogStash::Setting::PortRange.new("mynewtest", 10000).set(0.1) }.to raise_error
end

View file

@ -19,7 +19,6 @@ require "spec_helper"
require "logstash/settings"
describe LogStash::Setting::SettingWithDeprecatedAlias do
let(:canonical_setting_name) { "canonical.setting" }
let(:deprecated_setting_name) { "legacy.setting" }
@ -56,7 +55,6 @@ describe LogStash::Setting::SettingWithDeprecatedAlias do
end
context "when only the deprecated alias is set" do
let(:value) { "crusty_value" }
before(:each) do
@ -78,7 +76,6 @@ describe LogStash::Setting::SettingWithDeprecatedAlias do
end
context 'using a boolean setting' do
let(:value) { true }
let(:default_value) { false }
@ -93,7 +90,6 @@ describe LogStash::Setting::SettingWithDeprecatedAlias do
it 'validates deprecated alias' do
expect { settings.get_setting(canonical_setting_name).deprecated_alias.validate_value }.to_not raise_error
end
end
end

Some files were not shown because too many files have changed in this diff Show more