Migrate Logstash to Log4j2 Logging (#5651)

Migrate to use Log4j2 for Logstash logging
This commit is contained in:
Tal Levy 2016-08-25 10:30:22 -07:00 committed by GitHub
parent 6de1c83c4a
commit 1b2f7a7668
62 changed files with 792 additions and 516 deletions

38
config/log4j2.properties Normal file
View file

@ -0,0 +1,38 @@
status = error
name = LogstashPropertiesConfig
appender.console.type = Console
appender.console.name = STDOUT
# Pattern Logging
#
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
# JSON Logging
#
# appender.console.layout.type = JSONLayout
# appender.console.layout.compact = true
# appender.console.layout.eventEol = true
# Rolling File Appender
#
#property.filename = /tmp/logstash/logstash.log
#
# appender.rolling.type = RollingFile
# appender.rolling.name = RollingFile
# appender.rolling.fileName = ${filename}
# appender.rolling.filePattern = /tmp/logstash/logstash-%d{MM-dd-yy-HH-mm-ss}-%i.log.gz
# appender.rolling.policies.type = Policies
# appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
# appender.rolling.policies.time.interval = 2
# appender.rolling.policies.time.modulate = true
# appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
# appender.rolling.policies.size.size=100MB
# appender.rolling.strategy.type = DefaultRolloverStrategy
# appender.rolling.strategy.max = 5
# appender.rolling.layout.type = PatternLayout
# appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
rootLogger.level = error
rootLogger.appenderRef.stdout.ref = STDOUT

View file

@ -99,13 +99,14 @@
# ------------ Debugging Settings --------------
#
# Options for log.level:
# * warn => warn (default)
# * quiet => error
# * verbose => info
# * debug => debug
# * fatal
# * error
# * warn (default)
# * info
# * debug
# * trace
#
# log.level: warn
# log.format: plain (or 'json')
# path.log:
#
# ------------ Other Settings --------------

View file

@ -45,17 +45,17 @@ added[5.0.0-alpha3, Command-line flags have dots instead of dashes in their name
*`--log.level LEVEL`*::
Valid levels are:
* `fatal`: log very severe error messages that will usually be followed by the application aborting
* `error`: log errors
* `warn`: log warnings
* `quiet`: log errors
* `verbose`: log verbose info (for users)
* `info`: log verbose info (for users)
* `debug`: log debugging info (for developers)
* `trace`: finer-grained messages beyond debug
*`--log.format FORMAT`*::
Set to "json" to log in JSON format, or "plain" (default) to use `Object#.inspect`.
*`--path.settings SETTINGS_DIR`*::
Set the directory containing the `logstash.yml` <<logstash-settings-file,settings file>>.
Set the directory containing the `logstash.yml` <<logstash-settings-file,settings file>> as well
as the log4j logging configuration.
*`--node.name NAME`*::
Set a descriptive name for the node. If no value is specified, defaults to the machine's hostname.

View file

@ -875,7 +875,7 @@ time.
**Version messaging from Logstash**
If you start Logstash with the `--log.level verbose` flag, you will see messages like
If you start Logstash with the `--log.level info` flag, you will see messages like
these to indicate the relative maturity indicated by the plugin version number:
** **0.1.x**

View file

@ -16,7 +16,7 @@ available in your deployment:
[source,shell]
----------------------------------
bin/logstash-plugin list <1>
bin/logstash-plugin list --verbose <2>
bin/logstash-plugin list --info <2>
bin/logstash-plugin list '*namefragment*' <3>
bin/logstash-plugin list --group output <4>
----------------------------------

View file

@ -1,6 +1,6 @@
#Sat Jun 11 09:10:51 BST 2016
#Wed Jun 29 13:06:17 PDT 2016
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.8-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-2.13-bin.zip

View file

@ -6,12 +6,30 @@
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
@ -30,6 +48,7 @@ die ( ) {
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
@ -40,26 +59,11 @@ case "`uname`" in
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
@ -85,7 +89,7 @@ location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then

View file

@ -8,14 +8,14 @@
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
@ -46,7 +46,7 @@ echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args

View file

@ -94,6 +94,7 @@ idea {
dependencies {
compile 'com.fasterxml.jackson.core:jackson-core:2.7.3'
compile 'com.fasterxml.jackson.core:jackson-databind:2.7.3'
compile 'org.apache.logging.log4j:log4j-api:2.6.2'
provided 'org.jruby:jruby-core:1.7.25'
testCompile 'junit:junit:4.12'
testCompile 'net.javacrumbs.json-unit:json-unit:1.9.0'

View file

@ -3,7 +3,6 @@
require "logstash/namespace"
require "logstash/json"
require "logstash/string_interpolation"
require "cabin"
# transcient pipeline events for normal in-flow signaling as opposed to
# flow altering exceptions. for now having base classes is adequate and

View file

@ -194,62 +194,27 @@ describe LogStash::Event do
end
# noop logger used to test the injectable logger in Event
# this implementation is not complete because only the warn
# method is used in Event.
module DummyLogger
def self.warn(message)
# do nothing
end
end
# TODO(talevy): migrate tests to Java. no reason to test logging logic in ruby when it is being
# done in java land.
context "logger" do
# context "logger" do
let(:logger) { double("Logger") }
after(:each) { LogStash::Event.logger = LogStash::Event::DEFAULT_LOGGER }
# let(:logger) { double("Logger") }
# the following 2 specs are using both a real module (DummyLogger)
# and a mock. both tests are needed to make sure the implementation
# supports both types of objects.
# before(:each) do
# allow(LogStash::Event).to receive(:logger).and_return(logger)
# end
it "should set logger using a module" do
LogStash::Event.logger = DummyLogger
expect(DummyLogger).to receive(:warn).once
LogStash::Event.new(TIMESTAMP => "invalid timestamp")
end
# it "should set logger using a module" do
# expect(logger).to receive(:warn).once
# LogStash::Event.new(TIMESTAMP => "invalid timestamp")
# end
it "should set logger using a mock" do
LogStash::Event.logger = logger
expect(logger).to receive(:warn).once
LogStash::Event.new(TIMESTAMP => "invalid timestamp")
end
it "should unset logger" do
# first set
LogStash::Event.logger = logger
expect(logger).to receive(:warn).once
LogStash::Event.new(TIMESTAMP => "invalid timestamp")
# then unset
LogStash::Event.logger = LogStash::Event::DEFAULT_LOGGER
expect(logger).to receive(:warn).never
# this will produce a log line in stdout by the Java Event
LogStash::Event.new(TIMESTAMP => "ignore this log")
end
it "should warn on parsing error" do
LogStash::Event.logger = logger
expect(logger).to receive(:warn).once.with(/^Error parsing/)
LogStash::Event.new(TIMESTAMP => "invalid timestamp")
end
it "should warn on invalid timestamp object" do
LogStash::Event.logger = logger
expect(logger).to receive(:warn).once.with(/^Unrecognized/)
LogStash::Event.new(TIMESTAMP => Array.new)
end
end
# it "should warn on invalid timestamp object" do
# expect(logger).to receive(:warn).once.with(/^Unrecognized/)
# LogStash::Event.new(TIMESTAMP => Array.new)
# end
# end
context "to_hash" do
let (:source_hash) { {"a" => 1, "b" => [1, 2, 3, {"h" => 1, "i" => "baz"}], "c" => {"d" => "foo", "e" => "bar", "f" => [4, 5, "six"]}} }

View file

@ -6,6 +6,8 @@ import com.logstash.bivalues.StringBiValue;
import com.logstash.bivalues.TimeBiValue;
import com.logstash.bivalues.TimestampBiValue;
import com.logstash.ext.JrubyTimestampExtLibrary;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.joda.time.DateTime;
import org.jruby.RubySymbol;
@ -35,13 +37,9 @@ public class Event implements Cloneable, Serializable {
public static final String VERSION = "@version";
public static final String VERSION_ONE = "1";
private static final Logger DEFAULT_LOGGER = new StdioLogger();
private static final Logger logger = LogManager.getLogger(Event.class);
private static final ObjectMapper mapper = new ObjectMapper();
// logger is static since once set there is no point in changing it at runtime
// for other reasons than in tests/specs.
private transient static Logger logger = DEFAULT_LOGGER;
public Event()
{
this.metadata = new HashMap<String, Object>();
@ -283,10 +281,10 @@ public class Event implements Cloneable, Serializable {
} else if (o instanceof RubySymbol) {
return new Timestamp(((RubySymbol) o).asJavaString());
} else {
Event.logger.warn("Unrecognized " + TIMESTAMP + " value type=" + o.getClass().toString());
logger.warn("Unrecognized " + TIMESTAMP + " value type=" + o.getClass().toString());
}
} catch (IllegalArgumentException e) {
Event.logger.warn("Error parsing " + TIMESTAMP + " string value=" + o.toString());
logger.warn("Error parsing " + TIMESTAMP + " string value=" + o.toString());
}
tag(TIMESTAMP_FAILURE_TAG);
@ -306,10 +304,4 @@ public class Event implements Cloneable, Serializable {
tags.add(tag);
}
}
// Event.logger is static since once set there is no point in changing it at runtime
// for other reasons than in tests/specs.
public static void setLogger(Logger logger) {
Event.logger = logger;
}
}

View file

@ -1,13 +0,0 @@
package com.logstash;
// minimalist Logger interface to wire a logger callback in the Event class
// for now only warn is defined because this is the only method that's required
// in the Event class.
// TODO: (colin) generalize this
public interface Logger {
// TODO: (colin) complete interface beyond warn when needed
void warn(String message);
}

View file

@ -1,10 +0,0 @@
package com.logstash;
public class StdioLogger implements Logger {
// TODO: (colin) complete implementation beyond warn when needed
public void warn(String message) {
System.out.println(message);
}
}

View file

@ -38,7 +38,6 @@ public class JrubyEventExtLibrary implements Library {
clazz.setConstant("TIMESTAMP", runtime.newString(Event.TIMESTAMP));
clazz.setConstant("TIMESTAMP_FAILURE_TAG", runtime.newString(Event.TIMESTAMP_FAILURE_TAG));
clazz.setConstant("TIMESTAMP_FAILURE_FIELD", runtime.newString(Event.TIMESTAMP_FAILURE_FIELD));
clazz.setConstant("DEFAULT_LOGGER", runtime.getModule("Cabin").getClass("Channel").callMethod("get", runtime.getModule("LogStash")));
clazz.setConstant("VERSION", runtime.newString(Event.VERSION));
clazz.setConstant("VERSION_ONE", runtime.newString(Event.VERSION_ONE));
clazz.defineAnnotatedMethods(RubyEvent.class);
@ -58,24 +57,9 @@ public class JrubyEventExtLibrary implements Library {
}
}
public static class ProxyLogger implements Logger {
private RubyObject logger;
public ProxyLogger(RubyObject logger) {
this.logger = logger;
}
// TODO: (colin) complete implementation beyond warn when needed
public void warn(String message) {
logger.callMethod("warn", RubyString.newString(logger.getRuntime(), message));
}
}
@JRubyClass(name = "Event", parent = "Object")
public static class RubyEvent extends RubyObject {
private Event event;
private static RubyObject logger;
public RubyEvent(Ruby runtime, RubyClass klass) {
super(runtime, klass);
@ -314,14 +298,5 @@ public class JrubyEventExtLibrary implements Library {
this.event.setTimestamp(((JrubyTimestampExtLibrary.RubyTimestamp)value).getTimestamp());
return value;
}
// set a new logger for all Event instances
// there is no point in changing it at runtime for other reasons than in tests/specs.
@JRubyMethod(name = "logger=", required = 1, meta = true)
public static IRubyObject ruby_set_logger(ThreadContext context, IRubyObject recv, IRubyObject value)
{
Event.setLogger(new ProxyLogger((RubyObject)value));
return value;
}
}
}

View file

@ -391,12 +391,6 @@ describe LogStash::Event do
end
context "timestamp initialization" do
let(:logger_mock) { double("logger") }
after(:each) do
LogStash::Event.logger = LogStash::Event::DEFAULT_LOGGER
end
it "should coerce timestamp" do
t = Time.iso8601("2014-06-12T00:12:17.114Z")
expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i)
@ -421,10 +415,6 @@ describe LogStash::Event do
end
it "should warn for invalid value" do
LogStash::Event.logger = logger_mock
expect(logger_mock).to receive(:warn).twice
LogStash::Event.new("@timestamp" => :foo)
LogStash::Event.new("@timestamp" => 666)
end
@ -437,9 +427,6 @@ describe LogStash::Event do
end
it "should warn for invalid string format" do
LogStash::Event.logger = logger_mock
expect(logger_mock).to receive(:warn)
LogStash::Event.new("@timestamp" => "foo")
end
end

View file

@ -0,0 +1,75 @@
import java.nio.file.Files
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING
apply plugin: 'java'
apply plugin: 'idea'
group = 'org.logstash'
version = '5.0.0-alpha6'
description = """Logstash Core Java"""
sourceCompatibility = 1.8
targetCompatibility = 1.8
configurations {
provided
}
project.sourceSets {
main.compileClasspath += project.configurations.provided
main.runtimeClasspath += project.configurations.provided
test.compileClasspath += project.configurations.provided
test.runtimeClasspath += project.configurations.provided
}
project.javadoc.classpath += project.configurations.provided
repositories {
mavenCentral()
}
dependencies {
runtime 'org.apache.logging.log4j:log4j-1.2-api:2.6.2'
compile 'org.apache.logging.log4j:log4j-api:2.6.2'
compile 'org.apache.logging.log4j:log4j-core:2.6.2'
compile 'com.fasterxml.jackson.core:jackson-core:2.7.4'
compile 'com.fasterxml.jackson.core:jackson-databind:2.7.4'
testCompile 'org.apache.logging.log4j:log4j-core:2.6.2:tests'
testCompile 'org.apache.logging.log4j:log4j-api:2.6.2:tests'
testCompile 'junit:junit:4.12'
provided 'org.jruby:jruby-core:1.7.25'
}
idea {
module {
scopes.PROVIDED.plus += [project.configurations.provided]
}
}
task generateGemJarRequiresFile << {
File jars_file = file('lib/jars.rb')
jars_file.newWriter().withWriter { w ->
w << "require \'jar_dependencies\'\n"
configurations.runtime.allDependencies.each {
w << "require_jar(\'${it.group}\', \'${it.name}\', \'${it.version}\')\n"
}
w << "require_jar(\'${project.group}\', \'${project.name}\', \'${project.version}\')\n"
}
}
task vendor << {
String vendorPathPrefix = "vendor/jars"
configurations.runtime.allDependencies.each { dep ->
File f = configurations.runtime.filter { it.absolutePath.contains("${dep.group}/${dep.name}/${dep.version}") }.singleFile
String groupPath = dep.group.replaceAll('\\.', '/')
File newJarFile = file("${vendorPathPrefix}/${groupPath}/${dep.name}/${dep.version}/${dep.name}-${dep.version}.jar")
newJarFile.mkdirs()
Files.copy(f.toPath(), newJarFile.toPath(), REPLACE_EXISTING)
}
String projectGroupPath = project.group.replaceAll('\\.', '/')
File projectJarFile = file("${vendorPathPrefix}/${projectGroupPath}/${project.name}/${project.version}/${project.name}-${project.version}.jar")
projectJarFile.mkdirs()
Files.copy(file("$buildDir/libs/${project.name}-${project.version}.jar").toPath(), projectJarFile.toPath(), REPLACE_EXISTING)
}
vendor.dependsOn(jar, generateGemJarRequiresFile)

View file

@ -0,0 +1,7 @@
require 'jar_dependencies'
require_jar('org.apache.logging.log4j', 'log4j-1.2-api', '2.6.2')
require_jar('org.apache.logging.log4j', 'log4j-api', '2.6.2')
require_jar('org.apache.logging.log4j', 'log4j-core', '2.6.2')
require_jar('com.fasterxml.jackson.core', 'jackson-core', '2.7.4')
require_jar('com.fasterxml.jackson.core', 'jackson-databind', '2.7.4')
require_jar('org.logstash', 'logstash-core', '5.0.0-alpha6')

View file

@ -18,6 +18,7 @@ require "securerandom"
LogStash::Environment.load_locale!
class LogStash::Agent
include LogStash::Util::Loggable
STARTED_AT = Time.now.freeze
attr_reader :metric, :node_name, :pipelines, :settings, :webserver
@ -28,10 +29,9 @@ class LogStash::Agent
# :node_name [String] - identifier for the agent
# :auto_reload [Boolean] - enable reloading of pipelines
# :reload_interval [Integer] - reload pipelines every X seconds
# :logger [Cabin::Channel] - logger instance
def initialize(settings = LogStash::SETTINGS)
@logger = self.class.logger
@settings = settings
@logger = Cabin::Channel.get(LogStash)
@auto_reload = setting("config.reload.automatic")
@pipelines = {}

View file

@ -10,18 +10,17 @@ require "logstash/api/modules/stats"
module LogStash
module Api
module RackApp
# Cabin is not compatible with CommonLogger, and this gives us more control anyway
METADATA_FIELDS = [:request_method, :path_info, :query_string, :http_version, :http_accept].freeze
def self.log_metadata(status, env)
METADATA_FIELDS.reduce({:status => status}) do |acc, field|
acc[field] = env[field.to_s.upcase]
acc
end
end
end
class ApiLogger
LOG_MESSAGE = "API HTTP Request".freeze
def initialize(app, logger)
@app = app
@logger = logger
@ -30,24 +29,24 @@ module LogStash
def call(env)
res = @app.call(env)
status, headers, body = res
if fatal_error?(status)
@logger.warn? && @logger.warn(LOG_MESSAGE, RackApp.log_metadata(status, env))
else
@logger.info? && @logger.info(LOG_MESSAGE, RackApp.log_metadata(status, env))
@logger.warn? && @logger.warn(LOG_MESSAGE, RackApp.log_metadata(status, env))
else
@logger.info? && @logger.info(LOG_MESSAGE, RackApp.log_metadata(status, env))
end
res
res
end
def fatal_error?(status)
status >= 500 && status < 600
end
end
class ApiErrorHandler
LOG_MESSAGE = "Internal API server error".freeze
def initialize(app, logger)
@app = app
@logger = logger
@ -65,21 +64,21 @@ module LogStash
})
@logger.error(LOG_MESSAGE, body)
[500,
{'Content-Type' => 'application/json'},
[LogStash::Json.dump(body)]
]
end
end
def self.app(logger, agent, environment)
namespaces = rack_namespaces(agent)
Rack::Builder.new do
# Custom logger object. Rack CommonLogger does not work with cabin
use ApiLogger, logger
# In test env we want errors to propogate up the chain
# so we get easy to understand test failures.
# In production / dev we don't want a bad API endpoint
@ -87,7 +86,7 @@ module LogStash
if environment != "test"
use ApiErrorHandler, logger
end
run LogStash::Api::Modules::Root.new(nil, agent)
namespaces.each_pair do |namespace, app|
map(namespace) do

View file

@ -123,7 +123,7 @@ module LogStash; module Config; module AST
definitions << "define_singleton_method :#{type}_func do |event|"
definitions << " targeted_outputs = []" if type == "output"
definitions << " events = [event]" if type == "filter"
definitions << " @logger.debug? && @logger.debug(\"#{type} received\", :event => event.to_hash)"
definitions << " @logger.debug? && @logger.debug(\"#{type} received\", \"event\" => event.to_hash)"
sections.select { |s| s.plugin_type.text_value == type }.each do |s|
definitions << s.compile.split("\n", -1).map { |e| " #{e}" }
@ -174,7 +174,7 @@ module LogStash; module Config; module AST
return if events.nil? || events.empty?
@logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:#{name}], :events => events)
@logger.debug? && @logger.debug(\"Flushing\", :plugin => @generated_objects[:#{name}], :events => events.map { |x| x.to_hash })
#{plugin.compile_starting_here.gsub(/^/, " ")}

View file

@ -8,11 +8,10 @@ require "logger"
class LogStash::Config::File
include Enumerable
attr_accessor :logger
include LogStash::Util::Loggable
public
def initialize(text)
@logger = Cabin::Channel.get(LogStash)
@text = text
@config = parse(text)
end # def initialize

View file

@ -66,7 +66,7 @@ module LogStash::Config::Mixin
if opts && opts[:deprecated]
extra = opts[:deprecated].is_a?(String) ? opts[:deprecated] : ""
extra.gsub!("%PLUGIN%", self.class.config_name)
@logger.warn("You are using a deprecated config setting " +
self.logger.warn("You are using a deprecated config setting " +
"#{name.inspect} set in #{self.class.config_name}. " +
"Deprecated settings will continue to work, " +
"but are scheduled for removal from logstash " +
@ -141,7 +141,7 @@ module LogStash::Config::Mixin
next if key[0, 1] == "@"
# Set this key as an instance variable only if it doesn't start with an '@'
@logger.debug("config #{self.class.name}/@#{key} = #{value.inspect}")
self.logger.debug("config #{self.class.name}/@#{key} = #{value.inspect}")
instance_variable_set("@#{key}", value)
end
@ -198,8 +198,7 @@ module LogStash::Config::Mixin
# Deprecated: Declare the version of the plugin
# inside the gemspec.
def milestone(m = nil)
@logger = Cabin::Channel.get(LogStash)
@logger.debug(I18n.t('logstash.plugin.deprecated_milestone', :plugin => config_name))
self.logger.debug(I18n.t('logstash.plugin.deprecated_milestone', :plugin => config_name))
end
# Define a new configuration setting
@ -262,7 +261,6 @@ module LogStash::Config::Mixin
def validate(params)
@plugin_name = config_name
@plugin_type = ancestors.find { |a| a.name =~ /::Base$/ }.config_name
@logger = Cabin::Channel.get(LogStash)
is_valid = true
print_version_notice
@ -282,12 +280,12 @@ module LogStash::Config::Mixin
if plugin_version < PLUGIN_VERSION_1_0_0
if plugin_version < PLUGIN_VERSION_0_9_0
@logger.info(I18n.t("logstash.plugin.version.0-1-x",
self.logger.info(I18n.t("logstash.plugin.version.0-1-x",
:type => @plugin_type,
:name => @config_name,
:LOGSTASH_VERSION => LOGSTASH_VERSION))
else
@logger.info(I18n.t("logstash.plugin.version.0-9-x",
self.logger.info(I18n.t("logstash.plugin.version.0-9-x",
:type => @plugin_type,
:name => @config_name,
:LOGSTASH_VERSION => LOGSTASH_VERSION))
@ -297,7 +295,7 @@ module LogStash::Config::Mixin
# If we cannot find a version in the currently installed gems we
# will display this message. This could happen in the test, if you
# create an anonymous class to test a plugin.
@logger.warn(I18n.t("logstash.plugin.no_version",
self.logger.warn(I18n.t("logstash.plugin.no_version",
:type => @plugin_type,
:name => @config_name,
:LOGSTASH_VERSION => LOGSTASH_VERSION))
@ -321,7 +319,7 @@ module LogStash::Config::Mixin
if invalid_params.size > 0
invalid_params.each do |name|
@logger.error("Unknown setting '#{name}' for #{@plugin_name}")
self.logger.error("Unknown setting '#{name}' for #{@plugin_name}")
end
return false
end # if invalid_params.size > 0
@ -348,7 +346,7 @@ module LogStash::Config::Mixin
value = params[config_key]
if value.nil? || (config[:list] && Array(value).empty?)
@logger.error(I18n.t("logstash.runner.configuration.setting_missing",
self.logger.error(I18n.t("logstash.runner.configuration.setting_missing",
:setting => config_key, :plugin => @plugin_name,
:type => @plugin_type))
is_valid = false
@ -396,7 +394,7 @@ module LogStash::Config::Mixin
# Used for converting values in the config to proper objects.
params[key] = processed_value
else
@logger.error(I18n.t("logstash.runner.configuration.setting_invalid",
self.logger.error(I18n.t("logstash.runner.configuration.setting_invalid",
:plugin => @plugin_name, :type => @plugin_type,
:setting => key, :value => value.inspect,
:value_type => config_settings[:validate],

View file

@ -1,5 +1,6 @@
# encoding: utf-8
require "logstash/errors"
require "logstash/java_integration"
require "logstash/config/cpu_core_strategy"
require "logstash/settings"
require "socket"
@ -32,7 +33,7 @@ module LogStash
Setting.new("path.plugins", Array, []),
Setting::String.new("interactive", nil, false),
Setting::Boolean.new("config.debug", false),
Setting::String.new("log.level", "warn", true, ["quiet", "verbose", "warn", "debug"]),
Setting::String.new("log.level", "warn", true, ["fatal", "error", "warn", "debug", "info", "trace"]),
Setting::Boolean.new("version", false),
Setting::Boolean.new("help", false),
Setting::String.new("path.log", nil, false),

View file

@ -7,6 +7,7 @@ require "logstash/config/mixin"
require "logstash/util/decorators"
class LogStash::Filters::Base < LogStash::Plugin
include LogStash::Util::Loggable
include LogStash::Config::Mixin
config_name "filter"

View file

@ -9,6 +9,7 @@ require "logstash/util/decorators"
# This is the base class for Logstash inputs.
class LogStash::Inputs::Base < LogStash::Plugin
include LogStash::Util::Loggable
include LogStash::Config::Mixin
config_name "input"

View file

@ -1,5 +1,6 @@
# encoding: utf-8
require "java"
require "jars"
# this is mainly for usage with JrJackson json parsing in :raw mode which genenerates
# Java::JavaUtil::ArrayList and Java::JavaUtil::LinkedHashMap native objects for speed.

View file

@ -1,91 +1,3 @@
# encoding: utf-8
require "logstash/logging/logger"
require "logstash/namespace"
require "cabin"
require "logger"
class LogStash::Logger
attr_accessor :target
public
def initialize(*args)
super()
#self[:program] = File.basename($0)
#subscribe(::Logger.new(*args))
@target = args[0]
@channel = Cabin::Channel.get(LogStash)
# lame hack until cabin's smart enough not to doubley-subscribe something.
# without this subscription count check, running the test suite
# causes Cabin to subscribe to STDOUT maaaaaany times.
subscriptions = @channel.instance_eval { @subscribers.count }
@channel.subscribe(@target) unless subscriptions > 0
# Set default loglevel to WARN unless $DEBUG is set (run with 'ruby -d')
@level = $DEBUG ? :debug : :warn
if ENV["LOGSTASH_DEBUG"]
@level = :debug
end
# Direct metrics elsewhere.
@channel.metrics.channel = Cabin::Channel.new
end # def initialize
# Delegation
def level=(value) @channel.level = value; end
def debug(*args); @channel.debug(*args); end
def debug?(*args); @channel.debug?(*args); end
def info(*args); @channel.info(*args); end
def info?(*args); @channel.info?(*args); end
def warn(*args); @channel.warn(*args); end
def warn?(*args); @channel.warn?(*args); end
def error(*args); @channel.error(*args); end
def error?(*args); @channel.error?(*args); end
def fatal(*args); @channel.fatal(*args); end
def fatal?(*args); @channel.fatal?(*args); end
def self.setup_log4j(logger)
require "java"
properties = java.util.Properties.new
log4j_level = "WARN"
case logger.level
when :debug
log4j_level = "DEBUG"
when :info
log4j_level = "INFO"
when :warn
log4j_level = "WARN"
end # case level
properties.setProperty("log4j.rootLogger", "#{log4j_level},logstash")
# TODO(sissel): This is a shitty hack to work around the fact that
# LogStash::Logger isn't used anymore. We should fix that.
target = logger.instance_eval { @subscribers }.values.first.instance_eval { @io }
case target
when STDOUT
properties.setProperty("log4j.appender.logstash",
"org.apache.log4j.ConsoleAppender")
properties.setProperty("log4j.appender.logstash.Target", "System.out")
when STDERR
properties.setProperty("log4j.appender.logstash",
"org.apache.log4j.ConsoleAppender")
properties.setProperty("log4j.appender.logstash.Target", "System.err")
when target.is_a?(File)
properties.setProperty("log4j.appender.logstash",
"org.apache.log4j.FileAppender")
properties.setProperty("log4j.appender.logstash.File", target.path)
else
properties.setProperty("log4j.appender.logstash", "org.apache.log4j.varia.NullAppender")
end # case target
properties.setProperty("log4j.appender.logstash.layout",
"org.apache.log4j.PatternLayout")
properties.setProperty("log4j.appender.logstash.layout.conversionPattern",
"log4j, [%d{yyyy-MM-dd}T%d{HH:mm:ss.SSS}] %5p: %c: %m%n")
org.apache.log4j.LogManager.resetConfiguration
org.apache.log4j.PropertyConfigurator.configure(properties)
logger.debug("log4j java properties setup", :log4j_level => log4j_level)
end
end # class LogStash::Logger

View file

@ -3,7 +3,7 @@ require "logstash/namespace"
require "logstash/logging"
require "logstash/json"
module LogStash; class Logging; class JSON
module LogStash; module Logging; class JSON
def initialize(io)
raise ArgumentError, "Expected IO, got #{io.class.name}" unless io.is_a?(IO)

View file

@ -0,0 +1,72 @@
require "logstash/java_integration"
module LogStash
module Logging
class Logger
java_import org.apache.logging.log4j.Level
java_import org.apache.logging.log4j.LogManager
java_import org.apache.logging.log4j.core.config.Configurator
def initialize(name)
@logger = LogManager.getLogger(name)
end
def debug?
@logger.is_debug_enabled
end
def info?
@logger.is_info_enabled
end
def error?
@logger.is_error_enabled
end
def warn?
@logger.is_warn_enabled
end
def fatal?
@logger.is_fatal_enabled
end
def trace?
@logger.is_trace_enabled
end
def debug(message, data = {})
@logger.debug(message, data)
end
def warn(message, data = {})
@logger.warn(message, data)
end
def info(message, data = {})
@logger.info(message, data)
end
def error(message, data = {})
@logger.error(message, data)
end
def fatal(message, data = {})
@logger.fatal(message, data)
end
def trace(message, data = {})
@logger.trace(message, data)
end
# Point logging at a specific path.
def self.configure_logging(level, path = LogManager::ROOT_LOGGER_NAME)
Configurator.setLevel(path, Level.toLevel(level))
end # def configure_logging
def self.initialize(config_location)
Configurator.initialize(nil, config_location)
end
end
end
end

View file

@ -8,6 +8,7 @@ require "logstash/util/wrapped_synchronous_queue"
require "concurrent/atomic/atomic_fixnum"
class LogStash::Outputs::Base < LogStash::Plugin
include LogStash::Util::Loggable
include LogStash::Config::Mixin
config_name "output"

View file

@ -29,6 +29,7 @@ module Clamp
module StrictDeclaration
include Clamp::Attribute::Declaration
include LogStash::Util::Loggable
# Instead of letting Clamp set up accessors for the options
# weŕe going to tightly controlling them through
@ -53,8 +54,7 @@ module Clamp
def define_deprecated_writer_for(option, opts, &block)
define_method(option.write_method) do |value|
logger = Cabin::Channel.get(LogStash)
logger.warn "DEPRECATION WARNING: The flag #{option.switches} has been deprecated, please use \"--#{opts[:new_flag]}=#{opts[:new_value]}\" instead."
self.class.logger.warn "DEPRECATION WARNING: The flag #{option.switches} has been deprecated, please use \"--#{opts[:new_flag]}=#{opts[:new_value]}\" instead."
LogStash::SETTINGS.set(opts[:new_flag], opts[:new_value])
end
end

View file

@ -20,6 +20,8 @@ require "logstash/output_delegator"
require "logstash/filter_delegator"
module LogStash; class Pipeline
include LogStash::Util::Loggable
attr_reader :inputs,
:filters,
:outputs,
@ -28,7 +30,6 @@ module LogStash; class Pipeline
:events_filtered,
:reporter,
:pipeline_id,
:logger,
:started_at,
:thread,
:config_str,
@ -45,13 +46,12 @@ module LogStash; class Pipeline
]
def initialize(config_str, settings = SETTINGS, namespaced_metric = nil)
@logger = self.logger
@config_str = config_str
@config_hash = Digest::SHA1.hexdigest(@config_str)
# Every time #plugin is invoked this is incremented to give each plugin
# a unique id when auto-generating plugin ids
@plugin_counter ||= 0
@logger = Cabin::Channel.get(LogStash)
@plugin_counter ||= 0
@settings = settings
@pipeline_id = @settings.get_value("pipeline.id") || self.object_id
@reporter = PipelineReporter.new(@logger, self)
@ -82,8 +82,8 @@ module LogStash; class Pipeline
# The config code is hard to represent as a log message...
# So just print it.
if @settings.get_value("config.debug") && logger.debug?
logger.debug("Compiled pipeline code", :code => code)
if @settings.get_value("config.debug") && @logger.debug?
@logger.debug("Compiled pipeline code", :code => code)
end
begin
@ -152,7 +152,7 @@ module LogStash; class Pipeline
start_workers
@logger.log("Pipeline #{@pipeline_id} started")
@logger.info("Pipeline #{@pipeline_id} started")
# Block until all inputs have stopped
# Generally this happens if SIGINT is sent and `shutdown` is called from an external thread
@ -167,7 +167,7 @@ module LogStash; class Pipeline
shutdown_flusher
shutdown_workers
@logger.log("Pipeline #{@pipeline_id} has been shutdown")
@logger.info("Pipeline #{@pipeline_id} has been shutdown")
# exit code
return 0

View file

@ -3,14 +3,13 @@ require "logstash/namespace"
require "logstash/logging"
require "logstash/config/mixin"
require "logstash/instrument/null_metric"
require "cabin"
require "concurrent"
require "securerandom"
require "logstash/plugins/registry"
class LogStash::Plugin
include LogStash::Util::Loggable
attr_accessor :params
attr_accessor :logger
NL = "\n"
@ -45,12 +44,12 @@ class LogStash::Plugin
self.class.name == other.class.name && @params == other.params
end
def initialize(params=nil)
def initialize(params=nil)
@logger = self.logger
@params = LogStash::Util.deep_clone(params)
# The id should always be defined normally, but in tests that might not be the case
# In the future we may make this more strict in the Plugin API
@params["id"] ||= "#{self.class.config_name}_#{SecureRandom.uuid}"
@logger = Cabin::Channel.get(LogStash)
end
# Return a uniq ID for this plugin configuration, by default
@ -161,10 +160,4 @@ class LogStash::Plugin
def self.is_a_plugin?(klass, name)
klass.ancestors.include?(LogStash::Plugin) && klass.respond_to?(:config_name) && klass.config_name == name
end
# @return [Cabin::Channel] logger channel for class methods
def self.logger
@logger ||= Cabin::Channel.get(LogStash)
end
end # class LogStash::Plugin

View file

@ -1,9 +1,11 @@
# encoding: utf-8
require 'singleton'
require "rubygems/package"
require "logstash/util/loggable"
module LogStash
class Registry
include LogStash::Util::Loggable
##
# Placeholder class for registered plugins
@ -45,7 +47,7 @@ module LogStash
def initialize
@registry = {}
@logger = Cabin::Channel.get(LogStash)
@logger = self.logger
end
def lookup(type, plugin_name, &block)

View file

@ -4,7 +4,6 @@ Encoding.default_external = Encoding::UTF_8
$DEBUGLIST = (ENV["DEBUG"] || "").split(",")
require "clamp"
require "cabin"
require "net/http"
require "logstash/environment"
@ -19,9 +18,10 @@ require "logstash/settings"
require "logstash/version"
class LogStash::Runner < Clamp::StrictCommand
include LogStash::Util::Loggable
# The `path.settings` need to be defined in the runner instead of the `logstash-core/lib/logstash/environment.rb`
# because the `Environment::LOGSTASH_HOME` doesn't exist in the context of the `logstash-core` gem.
#
#
# See issue https://github.com/elastic/logstash/issues/5361
LogStash::SETTINGS.register(LogStash::Setting::String.new("path.settings", ::File.join(LogStash::Environment::LOGSTASH_HOME, "config")))
@ -82,7 +82,8 @@ class LogStash::Runner < Clamp::StrictCommand
:attribute_name => "path.log"
option "--log.level", "LEVEL", I18n.t("logstash.runner.flag.log_level"),
:default => LogStash::SETTINGS.get_default("log.level")
:default => LogStash::SETTINGS.get_default("log.level"),
:attribute_name => "log.level"
option "--config.debug", :flag,
I18n.t("logstash.runner.flag.config_debug"),
@ -135,7 +136,7 @@ class LogStash::Runner < Clamp::StrictCommand
### DEPRECATED FLAGS ###
deprecated_option ["--verbose"], :flag,
I18n.t("logstash.runner.flag.verbose"),
:new_flag => "log.level", :new_value => "verbose"
:new_flag => "log.level", :new_value => "info"
deprecated_option ["--debug"], :flag,
I18n.t("logstash.runner.flag.debug"),
@ -143,12 +144,11 @@ class LogStash::Runner < Clamp::StrictCommand
deprecated_option ["--quiet"], :flag,
I18n.t("logstash.runner.flag.quiet"),
:new_flag => "log.level", :new_value => "quiet"
:new_flag => "log.level", :new_value => "error"
attr_reader :agent
def initialize(*args)
@logger = Cabin::Channel.get(LogStash)
@settings = LogStash::SETTINGS
super(*args)
end
@ -163,13 +163,20 @@ class LogStash::Runner < Clamp::StrictCommand
rescue => e
# abort unless we're just looking for the help
if (["--help", "-h"] & args).empty?
@logger.subscribe(STDOUT)
@logger.warn("Logstash has a new settings file which defines start up time settings. This file is typically located in $LS_HOME/config or /etc/logstash. If you installed Logstash through a package and are starting it manually please specify the location to this settings file by passing in \"--path.settings=/path/..\" in the command line options")
@logger.fatal("Failed to load settings file from \"path.settings\". Aborting...", "path.settings" => LogStash::SETTINGS.get("path.settings"), "exception" => e.class, "message" => e.message)
$stderr.puts "INFO: Logstash has a new settings file which defines start up time settings. This file is typically located in $LS_HOME/config or /etc/logstash. If you installed Logstash through a package and are starting it manually please specify the location to this settings file by passing in \"--path.settings=/path/..\" in the command line options"
$stderr.puts "ERROR: Failed to load settings file from \"path.settings\". Aborting... path.setting=#{LogStash::SETTINGS.get("path.settings")}, exception=#{e.class}, message=>#{e.message}"
return 1
end
end
# Configure Logstash logging facility, this need to be done before everything else to
# make sure the logger has the correct settings and the log level is correctly defined.
# TODO(talevy): cleanly support `path.logs` setting in log4j
unless java.lang.System.getProperty("log4j.configurationFile")
log4j_config_location = setting("path.settings") + "/log4j2.properties"
LogStash::Logging::Logger::initialize(log4j_config_location)
end
super(*[args])
end
@ -177,12 +184,12 @@ class LogStash::Runner < Clamp::StrictCommand
require "logstash/util"
require "logstash/util/java_version"
require "stud/task"
require "cabin" # gem 'cabin'
require "logstash/logging/json"
# Configure Logstash logging facility, this need to be done before everything else to
# make sure the logger has the correct settings and the log level is correctly defined.
configure_logging(setting("path.log"), setting("log.level"))
LogStash::Logging::Logger::configure_logging(setting("log.level"))
if setting("config.debug") && logger.debug?
logger.warn("--config.debug was specified, but log.level was not set to \'debug\'! No config info will be logged.")
end
LogStash::Util::set_thread_name(self.class.name)
@ -199,7 +206,6 @@ class LogStash::Runner < Clamp::StrictCommand
end
LogStash::ShutdownWatcher.unsafe_shutdown = setting("pipeline.unsafe_shutdown")
LogStash::ShutdownWatcher.logger = @logger
configure_plugin_paths(setting("path.plugins"))
@ -210,7 +216,7 @@ class LogStash::Runner < Clamp::StrictCommand
return start_shell(setting("interactive"), binding) if setting("interactive")
@settings.format_settings.each {|line| @logger.info(line) }
@settings.format_settings.each {|line| logger.info(line) }
if setting("config.string").nil? && setting("path.config").nil?
fail(I18n.t("logstash.runner.missing-configuration"))
@ -222,14 +228,14 @@ class LogStash::Runner < Clamp::StrictCommand
end
if setting("config.test_and_exit")
config_loader = LogStash::Config::Loader.new(@logger)
config_loader = LogStash::Config::Loader.new(logger)
config_str = config_loader.format_config(setting("path.config"), setting("config.string"))
begin
LogStash::Pipeline.new(config_str)
@logger.terminal "Configuration OK"
puts "Configuration OK"
return 0
rescue => e
@logger.fatal I18n.t("logstash.runner.invalid-configuration", :error => e.message)
logger.fatal I18n.t("logstash.runner.invalid-configuration", :error => e.message)
return 1
end
end
@ -252,6 +258,9 @@ class LogStash::Runner < Clamp::StrictCommand
@agent.shutdown
# flush any outstanding log messages during shutdown
org.apache.logging.log4j.LogManager.shutdown
agent_return
rescue Clamp::UsageError => e
@ -259,7 +268,7 @@ class LogStash::Runner < Clamp::StrictCommand
show_short_help
return 1
rescue => e
@logger.fatal(I18n.t("oops"), :error => e, :backtrace => e.backtrace)
logger.fatal(I18n.t("oops"), :error => e, :backtrace => e.backtrace)
return 1
ensure
Stud::untrap("INT", sigint_id) unless sigint_id.nil?
@ -271,10 +280,10 @@ class LogStash::Runner < Clamp::StrictCommand
def show_version
show_version_logstash
if @logger.debug? || @logger.info?
if logger.is_info_enabled
show_version_ruby
show_version_java if LogStash::Environment.jruby?
show_gems if @logger.debug?
show_gems if logger.debug?
end
end # def show_version
@ -312,55 +321,6 @@ class LogStash::Runner < Clamp::StrictCommand
LogStash::Agent.new(*args)
end
# Point logging at a specific path.
def configure_logging(path, level)
@logger = Cabin::Channel.get(LogStash)
# Set with the -v (or -vv...) flag
case level
when "quiet"
@logger.level = :error
when "verbose"
@logger.level = :info
when "debug"
@logger.level = :debug
else
@logger.level = :warn
end
if path
# TODO(sissel): Implement file output/rotation in Cabin.
# TODO(sissel): Catch exceptions, report sane errors.
begin
@log_fd.close if @log_fd
@log_fd = File.new(path, "a")
rescue => e
fail(I18n.t("logstash.runner.configuration.log_file_failed",
:path => path, :error => e))
end
if setting("log.format") == "json"
@logger.subscribe(LogStash::Logging::JSON.new(STDOUT), :level => :fatal)
@logger.subscribe(LogStash::Logging::JSON.new(@log_fd))
else
@logger.subscribe(STDOUT, :level => :fatal)
@logger.subscribe(@log_fd)
end
@logger.terminal "Sending logstash logs to #{path}."
else
if setting("log.format") == "json"
@logger.subscribe(LogStash::Logging::JSON.new(STDOUT))
else
@logger.subscribe(STDOUT)
end
end
if setting("config.debug") && @logger.level != :debug
@logger.warn("--config.debug was specified, but log.level was not set to \'debug\'! No config info will be logged.")
end
# TODO(sissel): redirect stdout/stderr to the log as well
# http://jira.codehaus.org/browse/JRUBY-7003
end # def configure_logging
# Emit a failure message and abort.
def fail(message)
@ -389,14 +349,14 @@ class LogStash::Runner < Clamp::StrictCommand
def trap_sighup
Stud::trap("HUP") do
@logger.warn(I18n.t("logstash.agent.sighup"))
logger.warn(I18n.t("logstash.agent.sighup"))
@agent.reload_state!
end
end
def trap_sigterm
Stud::trap("TERM") do
@logger.warn(I18n.t("logstash.agent.sigterm"))
logger.warn(I18n.t("logstash.agent.sigterm"))
@agent_task.stop!
end
end
@ -404,11 +364,11 @@ class LogStash::Runner < Clamp::StrictCommand
def trap_sigint
Stud::trap("INT") do
if @interrupted_once
@logger.fatal(I18n.t("logstash.agent.forced_sigint"))
logger.fatal(I18n.t("logstash.agent.forced_sigint"))
exit
else
@logger.warn(I18n.t("logstash.agent.sigint"))
Thread.new(@logger) {|logger| sleep 5; logger.warn(I18n.t("logstash.agent.slow_shutdown")) }
logger.warn(I18n.t("logstash.agent.sigint"))
Thread.new(logger) {|lg| sleep 5; lg.warn(I18n.t("logstash.agent.slow_shutdown")) }
@interrupted_once = true
@agent_task.stop!
end

View file

@ -2,6 +2,7 @@
module LogStash
class ShutdownWatcher
include LogStash::Util::Loggable
CHECK_EVERY = 1 # second
REPORT_EVERY = 5 # checks
@ -25,14 +26,6 @@ module LogStash
@unsafe_shutdown
end
def self.logger=(logger)
@logger = logger
end
def self.logger
@logger ||= Cabin::Channel.get(LogStash)
end
def self.start(pipeline, cycle_period=CHECK_EVERY, report_every=REPORT_EVERY, abort_threshold=ABORT_AFTER)
controller = self.new(pipeline, cycle_period, report_every, abort_threshold)
Thread.new(controller) { |controller| controller.start }

View file

@ -6,10 +6,9 @@ module LogStash::Util
# Decorators provides common manipulation on the event data.
module Decorators
include LogStash::Util::Loggable
extend self
@logger = Cabin::Channel.get(LogStash)
# fields is a hash of field => value
# where both `field` and `value` can use sprintf syntax.
def add_fields(fields,event, pluginname)
@ -28,7 +27,7 @@ module LogStash::Util
else
event.set(field, v)
end
@logger.debug? and @logger.debug("#{pluginname}: adding value to field", :field => field, :value => value)
self.logger.debug? and self.logger.debug("#{pluginname}: adding value to field", "field" => field, "value" => value)
end
end
end
@ -37,7 +36,7 @@ module LogStash::Util
def add_tags(tags, event, pluginname)
tags.each do |tag|
tag = event.sprintf(tag)
@logger.debug? and @logger.debug("#{pluginname}: adding tag", :tag => tag)
self.logger.debug? and self.logger.debug("#{pluginname}: adding tag", "tag" => tag)
# note below that the tags array field needs to be updated then reassigned to the event.
# this is important because a construct like event["tags"] << tag will not work
# in the current Java event implementation. see https://github.com/elastic/logstash/issues/4140

View file

@ -1,11 +1,6 @@
# encoding: utf-8
require 'cabin'
module LogStash::Util::JavaVersion
def self.logger
@logger ||= Cabin::Channel.get(LogStash)
end
# Return the current java version string. Returns nil if this is a non-java platform (e.g. MRI).
def self.version
return nil unless LogStash::Environment.jruby?

View file

@ -1,29 +1,19 @@
# encoding: utf-8
require "logstash/logging/logger"
require "logstash/namespace"
require "cabin"
module LogStash module Util
module Loggable
class << self
def logger=(new_logger)
@logger = new_logger
def self.included(klass)
def klass.logger
ruby_name = self.name || self.class.name || self.class.to_s
log4j_name = ruby_name.gsub('::', '.').downcase
@logger ||= LogStash::Logging::Logger.new(log4j_name)
end
def logger
@logger ||= Cabin::Channel.get(LogStash)
self.class.logger
end
end
def self.included(base)
class << base
def logger
Loggable.logger
end
end
end
def logger
Loggable.logger
end
end
end; end

View file

@ -86,10 +86,10 @@ en:
runner:
short-help: |-
usage:
bin/logstash -f CONFIG_PATH [-t] [-r] [--quiet|verbose|debug] [-w COUNT] [-l LOG]
bin/logstash -e CONFIG_STR [-t] [--quiet|verbose|debug] [-w COUNT] [-l LOG]
bin/logstash -i SHELL [--quiet|verbose|debug]
bin/logstash -V [--verbose|debug]
bin/logstash -f CONFIG_PATH [-t] [-r] [] [-w COUNT] [-l LOG]
bin/logstash -e CONFIG_STR [-t] [--log.level fatal|error|warn|info|debug|trace] [-w COUNT] [-l LOG]
bin/logstash -i SHELL [--log.level fatal|error|warn|info|debug|trace]
bin/logstash -V [--log.level fatal|error|warn|info|debug|trace]
bin/logstash --help
invalid-configuration: >-
The given configuration is invalid. Reason: %{error}
@ -228,10 +228,12 @@ en:
and NAME is the name of the plugin.
log_level: |+
Set the log level for logstash. Possible values are:
- quiet => :error
- verbose => :info
- debug => :debug
- warn => :warn
- fatal
- error
- warn
- info
- debug
- trace
unsafe_shutdown: |+
Force logstash to exit during shutdown even
if there are still inflight events in memory.

View file

@ -14,12 +14,11 @@ Gem::Specification.new do |gem|
gem.files = Dir.glob(["logstash-core.gemspec", "lib/**/*.rb", "spec/**/*.rb", "locales/*", "lib/logstash/api/init.ru"])
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "logstash-core"
gem.require_paths = ["lib"]
gem.require_paths = ["lib", "vendor/jars"]
gem.version = LOGSTASH_CORE_VERSION
gem.add_runtime_dependency "logstash-core-event-java", "5.0.0-alpha6"
gem.add_runtime_dependency "cabin", "~> 0.8.0" #(Apache 2.0 license)
gem.add_runtime_dependency "pry", "~> 0.10.1" #(Ruby license)
gem.add_runtime_dependency "stud", "~> 0.0.19" #(Apache 2.0 license)
gem.add_runtime_dependency "clamp", "~> 0.6.5" #(MIT license) for command line args/flags
@ -59,4 +58,6 @@ Gem::Specification.new do |gem|
# https://github.com/rubinius/rubinius/issues/2632#issuecomment-26954565
gem.add_runtime_dependency "racc"
end
gem.add_runtime_dependency 'jar-dependencies', '~> 0.3.4'
end

View file

@ -6,7 +6,7 @@ describe LogStash::Api::RackApp do
class DummyApp
class RaisedError < StandardError; end
def call(env)
case env["PATH_INFO"]
when "/good-page"
@ -21,7 +21,7 @@ describe LogStash::Api::RackApp do
end
end
let(:logger) { Cabin::Channel.get }
let(:logger) { double("logger") }
describe LogStash::Api::RackApp::ApiErrorHandler do
let(:app) do
@ -49,7 +49,7 @@ describe LogStash::Api::RackApp do
allow(logger).to receive(:error).with(any_args)
get "/raise-error"
end
it "should return a 500 error" do
expect(last_response.status).to eql(500)
end
@ -74,13 +74,15 @@ describe LogStash::Api::RackApp do
run DummyApp.new
end
end
it "should log good requests as info" do
expect(logger).to receive(:info?).and_return(true)
expect(logger).to receive(:info).with(LogStash::Api::RackApp::ApiLogger::LOG_MESSAGE, anything).once
get "/good-page"
end
it "should log 5xx requests as warnings" do
expect(logger).to receive(:warn?).and_return(true)
expect(logger).to receive(:warn).with(LogStash::Api::RackApp::ApiLogger::LOG_MESSAGE, anything).once
get "/service-unavailable"
end

View file

@ -3,7 +3,9 @@ require "spec_helper"
require "logstash/config/loader"
describe LogStash::Config::Loader do
subject { described_class.new(Cabin::Channel.get) }
let(:logger) { double("logger") }
subject { described_class.new(logger) }
context "when local" do
before { expect(subject).to receive(:local_config).with(path) }

View file

@ -4,6 +4,7 @@ require "logstash/pipeline"
require "logstash/pipeline_reporter"
class DummyOutput < LogStash::Outputs::Base
config_name "dummyoutput"
milestone 2

View file

@ -80,7 +80,7 @@ class DummySafeFilter < LogStash::Filters::Base
end
class TestPipeline < LogStash::Pipeline
attr_reader :outputs, :settings, :logger
attr_reader :outputs, :settings
end
describe LogStash::Pipeline do
@ -130,7 +130,7 @@ describe LogStash::Pipeline do
let(:logger) { double("pipeline logger").as_null_object }
before do
expect(Cabin::Channel).to receive(:get).with(LogStash).and_return(logger).at_least(:once)
expect(TestPipeline).to receive(:logger).and_return(logger)
allow(logger).to receive(:debug?).and_return(true)
end

View file

@ -76,7 +76,7 @@ describe LogStash::Plugin do
.with(plugin_name)
.and_return(double(:version => Gem::Version.new('1.0.0')))
expect_any_instance_of(Cabin::Channel).not_to receive(:info)
expect_any_instance_of(LogStash::Logging::Logger).not_to receive(:info)
subject.validate({})
end
@ -85,7 +85,7 @@ describe LogStash::Plugin do
.with(plugin_name)
.and_return(double(:version => Gem::Version.new('0.9.1')))
expect_any_instance_of(Cabin::Channel).to receive(:info)
expect_any_instance_of(LogStash::Logging::Logger).to receive(:info)
.with(/Using version 0.9.x/)
subject.validate({})
@ -96,7 +96,7 @@ describe LogStash::Plugin do
.with(plugin_name)
.and_return(double(:version => Gem::Version.new('0.1.1')))
expect_any_instance_of(Cabin::Channel).to receive(:info)
expect_any_instance_of(LogStash::Logging::Logger).to receive(:info)
.with(/Using version 0.1.x/)
subject.validate({})
end
@ -110,7 +110,7 @@ describe LogStash::Plugin do
.with(plugin_name)
.and_return(double(:version => Gem::Version.new('0.1.1')))
expect_any_instance_of(Cabin::Channel).to receive(:info)
expect_any_instance_of(LogStash::Logging::Logger).to receive(:info)
.once
.with(/Using version 0.1.x/)
@ -119,7 +119,7 @@ describe LogStash::Plugin do
end
it "warns the user if we can't find a defined version" do
expect_any_instance_of(Cabin::Channel).to receive(:warn)
expect_any_instance_of(LogStash::Logging::Logger).to receive(:warn)
.once
.with(/plugin doesn't have a version/)
@ -128,7 +128,7 @@ describe LogStash::Plugin do
it 'logs a warning if the plugin use the milestone option' do
expect_any_instance_of(Cabin::Channel).to receive(:debug)
expect_any_instance_of(LogStash::Logging::Logger).to receive(:debug)
.with(/stromae plugin is using the 'milestone' method/)
class LogStash::Filters::Stromae < LogStash::Filters::Base
@ -278,6 +278,7 @@ describe LogStash::Plugin do
[LogStash::Inputs::Base, LogStash::Filters::Base, LogStash::Outputs::Base].each do |base|
let(:plugin) do
Class.new(base) do
#include LogStash::Util::Loggable
config_name "testing"
def register

View file

@ -15,13 +15,20 @@ end
describe LogStash::Runner do
subject { LogStash::Runner }
let(:channel) { Cabin::Channel.new }
let(:logger) { double("logger") }
before :each do
allow(Cabin::Channel).to receive(:get).with(LogStash).and_return(channel)
allow(channel).to receive(:subscribe).with(any_args)
allow(channel).to receive(:log) {}
allow(LogStash::ShutdownWatcher).to receive(:logger).and_return(channel)
allow(LogStash::Runner).to receive(:logger).and_return(logger)
allow(logger).to receive(:debug?).and_return(true)
allow(logger).to receive(:subscribe).with(any_args)
allow(logger).to receive(:log) {}
allow(logger).to receive(:info) {}
allow(logger).to receive(:fatal) {}
allow(logger).to receive(:warn) {}
allow(LogStash::ShutdownWatcher).to receive(:logger).and_return(logger)
allow(LogStash::Logging::Logger).to receive(:configure_logging) do |level, path|
allow(logger).to receive(:level).and_return(level.to_sym)
end
end
after :each do
@ -29,7 +36,6 @@ describe LogStash::Runner do
end
after :all do
LogStash::ShutdownWatcher.logger = nil
end
describe "argument precedence" do
@ -56,16 +62,13 @@ describe LogStash::Runner do
describe "argument parsing" do
subject { LogStash::Runner.new("") }
before :each do
allow(Cabin::Channel.get(LogStash)).to receive(:terminal)
end
context "when -e is given" do
let(:args) { ["-e", "input {} output {}"] }
let(:agent) { double("agent") }
before do
allow(agent).to receive(:logger=).with(anything)
allow(agent).to receive(:shutdown)
allow(agent).to receive(:register_pipeline)
end
@ -131,32 +134,6 @@ describe LogStash::Runner do
end
end
describe "--log.format=json" do
subject { LogStash::Runner.new("") }
let(:logfile) { Stud::Temporary.file }
let(:args) { [ "--log.format", "json", "-l", logfile.path, "-e", "input {} output{}" ] }
after do
logfile.close
File.unlink(logfile.path)
end
before do
expect(channel).to receive(:subscribe).with(kind_of(LogStash::Logging::JSON)).and_call_original
subject.run(args)
# Log file should have stuff in it.
expect(logfile.stat.size).to be > 0
end
it "should log in valid json. One object per line." do
logfile.each_line do |line|
expect(line).not_to be_empty
expect { JSON.parse(line) }.not_to raise_error
end
end
end
describe "--config.test_and_exit" do
subject { LogStash::Runner.new("") }
let(:args) { ["-t", "-e", pipeline_string] }
@ -164,7 +141,6 @@ describe LogStash::Runner do
context "with a good configuration" do
let(:pipeline_string) { "input { } filter { } output { }" }
it "should exit successfuly" do
expect(channel).to receive(:terminal)
expect(subject.run(args)).to eq(0)
end
end
@ -172,7 +148,7 @@ describe LogStash::Runner do
context "with a bad configuration" do
let(:pipeline_string) { "rlwekjhrewlqrkjh" }
it "should fail by returning a bad exit code" do
expect(channel).to receive(:fatal)
expect(logger).to receive(:fatal)
expect(subject.run(args)).to eq(1)
end
end
@ -303,35 +279,35 @@ describe LogStash::Runner do
it "should set log level to warn" do
args = ["--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:warn)
expect(logger.level).to eq(:warn)
end
end
context "when setting to debug" do
it "should set log level to debug" do
args = ["--log.level", "debug", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:debug)
expect(logger.level).to eq(:debug)
end
end
context "when setting to verbose" do
it "should set log level to info" do
args = ["--log.level", "verbose", "--version"]
args = ["--log.level", "info", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:info)
expect(logger.level).to eq(:info)
end
end
context "when setting to quiet" do
it "should set log level to error" do
args = ["--log.level", "quiet", "--version"]
args = ["--log.level", "error", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:error)
expect(logger.level).to eq(:error)
end
end
context "deprecated flags" do
context "when using --quiet" do
it "should warn about the deprecated flag" do
expect(channel).to receive(:warn).with(/DEPRECATION WARNING/)
expect(logger).to receive(:warn).with(/DEPRECATION WARNING/)
args = ["--quiet", "--version"]
subject.run("bin/logstash", args)
end
@ -339,12 +315,12 @@ describe LogStash::Runner do
it "should still set the log level accordingly" do
args = ["--quiet", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:error)
expect(logger.level).to eq(:error)
end
end
context "when using --debug" do
it "should warn about the deprecated flag" do
expect(channel).to receive(:warn).with(/DEPRECATION WARNING/)
expect(logger).to receive(:warn).with(/DEPRECATION WARNING/)
args = ["--debug", "--version"]
subject.run("bin/logstash", args)
end
@ -352,12 +328,12 @@ describe LogStash::Runner do
it "should still set the log level accordingly" do
args = ["--debug", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:debug)
expect(logger.level).to eq(:debug)
end
end
context "when using --verbose" do
it "should warn about the deprecated flag" do
expect(channel).to receive(:warn).with(/DEPRECATION WARNING/)
expect(logger).to receive(:warn).with(/DEPRECATION WARNING/)
args = ["--verbose", "--version"]
subject.run("bin/logstash", args)
end
@ -365,7 +341,7 @@ describe LogStash::Runner do
it "should still set the log level accordingly" do
args = ["--verbose", "--version"]
subject.run("bin/logstash", args)
expect(channel.level).to eq(:info)
expect(logger.level).to eq(:info)
end
end
end

View file

@ -3,8 +3,6 @@ require "spec_helper"
require "logstash/shutdown_watcher"
describe LogStash::ShutdownWatcher do
let(:channel) { Cabin::Channel.new }
let(:check_every) { 0.01 }
let(:check_threshold) { 100 }
subject { LogStash::ShutdownWatcher.new(pipeline, check_every) }
@ -14,8 +12,6 @@ describe LogStash::ShutdownWatcher do
report_count = 0
before :each do
LogStash::ShutdownWatcher.logger = channel
allow(pipeline).to receive(:reporter).and_return(reporter)
allow(pipeline).to receive(:thread).and_return(Thread.current)
allow(reporter).to receive(:snapshot).and_return(reporter_snapshot)

View file

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.logstash.log;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Marker;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.impl.Log4jLogEvent;
import org.apache.logging.log4j.message.Message;
import java.util.List;
@JsonSerialize(using = CustomLogEventSerializer.class)
public class CustomLogEvent extends Log4jLogEvent {
public CustomLogEvent(final String loggerName, final Marker marker, final String loggerFQCN, final Level level,
final Message message, final List<Property> properties, final Throwable t) {
super(loggerName, marker, loggerFQCN, level, message, properties, t);
}
}

View file

@ -0,0 +1,48 @@
package org.logstash.log;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import java.io.IOException;
import java.util.Map;
public class CustomLogEventSerializer extends JsonSerializer<CustomLogEvent> {
@Override
public void serialize(CustomLogEvent event, JsonGenerator generator, SerializerProvider provider) throws IOException {
generator.writeStartObject();
generator.writeObjectField("level", event.getLevel());
generator.writeObjectField("loggerName", event.getLoggerName());
generator.writeObjectField("timeMillis", event.getTimeMillis());
generator.writeObjectField("thread", event.getThreadName());
generator.writeFieldName("logEvent");
generator.writeStartObject();
if (event.getMessage() instanceof StructuredMessage) {
StructuredMessage message = (StructuredMessage) event.getMessage();
generator.writeStringField("message", message.getMessage());
if (message.getParams() != null) {
for (Map.Entry<Object, Object> entry : message.getParams().entrySet()) {
Object value = entry.getValue();
try {
generator.writeObjectField(entry.getKey().toString(), value);
} catch (JsonMappingException e) {
generator.writeObjectField(entry.getKey().toString(), value.toString());
}
}
}
} else {
generator.writeStringField("message", event.getMessage().getFormattedMessage());
}
generator.writeEndObject();
generator.writeEndObject();
}
@Override
public Class<CustomLogEvent> handledType() {
return CustomLogEvent.class;
}
}

View file

@ -0,0 +1,17 @@
package org.logstash.log;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Marker;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.impl.LogEventFactory;
import org.apache.logging.log4j.message.Message;
import java.util.List;
public class LogstashLogEventFactory implements LogEventFactory {
@Override
public LogEvent createEvent(String loggerName, Marker marker, String fqcn, Level level, Message data, List<Property> properties, Throwable t) {
return new CustomLogEvent(loggerName, marker, fqcn, level, data, properties, t);
}
}

View file

@ -0,0 +1,33 @@
package org.logstash.log;
import org.apache.logging.log4j.message.Message;
import org.apache.logging.log4j.message.MessageFactory;
import org.apache.logging.log4j.message.ObjectMessage;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.message.SimpleMessage;
import java.util.Map;
public final class LogstashMessageFactory implements MessageFactory {
public static final LogstashMessageFactory INSTANCE = new LogstashMessageFactory();
@Override
public Message newMessage(Object message) {
return new ObjectMessage(message);
}
@Override
public Message newMessage(String message) {
return new SimpleMessage(message);
}
@Override
public Message newMessage(String message, Object... params) {
if (params.length == 1 && params[0] instanceof Map) {
return new StructuredMessage(message, params);
} else {
return new ParameterizedMessage(message, params);
}
}
}

View file

@ -0,0 +1,76 @@
package org.logstash.log;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.apache.logging.log4j.message.Message;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@JsonSerialize(using = CustomLogEventSerializer.class)
public class StructuredMessage implements Message {
private final String message;
private final Map<Object, Object> params;
@SuppressWarnings("unchecked")
public StructuredMessage(String message) {
this(message, (Map) null);
}
@SuppressWarnings("unchecked")
public StructuredMessage(String message, Object[] params) {
final Map<Object, Object> paramsMap;
if (params.length == 1 && params[0] instanceof Map) {
paramsMap = (Map) params[0];
} else {
paramsMap = new HashMap<>();
try {
for (int i = 0; i < params.length; i += 2) {
paramsMap.put(params[i].toString(), params[i + 1]);
}
} catch (IndexOutOfBoundsException e) {
throw new IllegalArgumentException("must log key-value pairs");
}
}
this.message = message;
this.params = paramsMap;
}
public StructuredMessage(String message, Map<Object, Object> params) {
this.message = message;
this.params = params;
}
public String getMessage() {
return message;
}
public Map<Object, Object> getParams() {
return params;
}
@Override
public Object[] getParameters() {
return params.values().toArray();
}
@Override
public String getFormattedMessage() {
String formatted = message;
if (params != null && !params.isEmpty()) {
formatted += " " + params;
}
return formatted;
}
@Override
public String getFormat() {
return null;
}
@Override
public Throwable getThrowable() {
return null;
}
}

View file

@ -0,0 +1,2 @@
Log4jLogEventFactory=org.logstash.log.LogstashLogEventFactory
log4j2.messageFactory=org.logstash.log.LogstashMessageFactory

View file

@ -0,0 +1,112 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.logstash.log;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.junit.LoggerContextRule;
import org.apache.logging.log4j.test.appender.ListAppender;
import org.junit.ClassRule;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static junit.framework.TestCase.assertEquals;
public class CustomLogEventTests {
private static final ObjectMapper mapper = new ObjectMapper();
private static final String CONFIG = "log4j2-test1.xml";
private ListAppender appender;
@ClassRule
public static LoggerContextRule CTX = new LoggerContextRule(CONFIG);
@Test
public void testPatternLayout() {
appender = CTX.getListAppender("EventLogger").clear();
Logger logger = LogManager.getLogger("EventLogger");
logger.info("simple message");
logger.warn("complex message", Collections.singletonMap("foo", "bar"));
logger.error("my name is: {}", "foo");
logger.error("here is a map: {}. ok?", Collections.singletonMap(2, 5));
logger.warn("ignored params {}", 4, 6);
List<String> messages = appender.getMessages();
assertEquals(5, messages.size());
assertEquals("[INFO][EventLogger] simple message", messages.get(0));
assertEquals("[WARN][EventLogger] complex message {foo=bar}", messages.get(1));
assertEquals("[ERROR][EventLogger] my name is: foo", messages.get(2));
assertEquals("[ERROR][EventLogger] here is a map: {}. ok? {2=5}", messages.get(3));
assertEquals("[WARN][EventLogger] ignored params 4", messages.get(4));
}
@Test
@SuppressWarnings("unchecked")
public void testJSONLayout() throws Exception {
appender = CTX.getListAppender("JSONEventLogger").clear();
Logger logger = LogManager.getLogger("JSONEventLogger");
logger.info("simple message");
logger.warn("complex message", Collections.singletonMap("foo", "bar"));
logger.error("my name is: {}", "foo");
logger.error("here is a map: {}", Collections.singletonMap(2, 5));
logger.warn("ignored params {}", 4, 6, 8);
List<String> messages = appender.getMessages();
Map<String, Object> firstMessage = mapper.readValue(messages.get(0), Map.class);
assertEquals(5, firstMessage.size());
assertEquals("INFO", firstMessage.get("level"));
assertEquals("JSONEventLogger", firstMessage.get("loggerName"));
assertEquals("main", firstMessage.get("thread"));
assertEquals(Collections.singletonMap("message", "simple message"), firstMessage.get("logEvent"));
Map<String, Object> secondMessage = mapper.readValue(messages.get(1), Map.class);
assertEquals(5, secondMessage.size());
assertEquals("WARN", secondMessage.get("level"));
assertEquals("JSONEventLogger", secondMessage.get("loggerName"));
assertEquals("main", secondMessage.get("thread"));
Map<String, Object> logEvent = new HashMap<>();
logEvent.put("message", "complex message");
logEvent.put("foo", "bar");
assertEquals(logEvent, secondMessage.get("logEvent"));
Map<String, Object> thirdMessage = mapper.readValue(messages.get(2), Map.class);
assertEquals(5, thirdMessage.size());
logEvent = Collections.singletonMap("message", "my name is: foo");
assertEquals(logEvent, thirdMessage.get("logEvent"));
Map<String, Object> fourthMessage = mapper.readValue(messages.get(3), Map.class);
assertEquals(5, fourthMessage.size());
logEvent = new HashMap<>();
logEvent.put("message", "here is a map: {}");
logEvent.put("2", 5);
assertEquals(logEvent, fourthMessage.get("logEvent"));
Map<String, Object> fifthMessage = mapper.readValue(messages.get(4), Map.class);
assertEquals(5, fifthMessage.size());
logEvent = Collections.singletonMap("message", "ignored params 4");
assertEquals(logEvent, fifthMessage.get("logEvent"));
}
}

View file

@ -0,0 +1,11 @@
status = error
name = LogstashPropertiesConfig
appender.list.type = List
appender.list.name = List
#appender.list.layout.type = JSONLayout
#appender.list.layout.compact = true
#appender.list.layout.eventEol = true
rootLogger.level = info
rootLogger.appenderRef.stdout.ref = List

View file

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<Configuration status="OFF" name="LoggerTest">
<properties>
<property name="filename">target/test.log</property>
</properties>
<ThresholdFilter level="trace"/>
<Appenders>
<List name="EventLogger">
<PatternLayout pattern="[%p][%c] %m"/>
</List>
<List name="JSONEventLogger">
<JSONLayout compact="true" eventEol="true" />
</List>
</Appenders>
<Loggers>
<Logger name="EventLogger" level="debug" additivity="false">
<AppenderRef ref="EventLogger"/>
</Logger>>
<Logger name="JSONEventLogger" level="debug" additivity="false">
<AppenderRef ref="JSONEventLogger"/>
</Logger>>
<Root level="trace">
<AppenderRef ref="EventLogger"/>
</Root>
</Loggers>
</Configuration>

View file

@ -19,6 +19,7 @@ namespace "artifact" do
"lib/systeminstall/**/*",
"logstash-core/lib/**/*",
"logstash-core/locales/**/*",
"logstash-core/vendor/**/*",
"logstash-core/*.gemspec",
"logstash-core-event-java/lib/**/*",
"logstash-core-event-java/*.gemspec",

View file

@ -11,11 +11,16 @@ namespace "compile" do
task "grammar" => "logstash-core/lib/logstash/config/grammar.rb"
task "logstash-core-java" do
puts("Building logstash-core using gradle")
system("./gradlew", "vendor", "-p", "./logstash-core")
end
task "logstash-core-event-java" do
puts("Building logstash-core-event-java using gradle")
system("logstash-core-event-java/gradlew", "jar", "-p", "./logstash-core-event-java")
system("./gradlew", "jar", "-p", "./logstash-core-event-java")
end
desc "Build everything"
task "all" => ["grammar", "logstash-core-event-java"]
task "all" => ["grammar", "logstash-core-java", "logstash-core-event-java"]
end

View file

@ -5,7 +5,6 @@ require_relative 'coverage_helper'
CoverageHelper.eager_load if ENV['COVERAGE']
require "logstash/devutils/rspec/spec_helper"
require "logstash/logging/json"
require "flores/rspec"
require "flores/random"
@ -22,21 +21,6 @@ end
RSpec.configure do |c|
Flores::RSpec.configure(c)
c.before do
# Force Cabin to always have a JSON subscriber. The main purpose of this
# is to catch crashes in json serialization for our logs. JSONIOThingy
# exists to validate taht what LogStash::Logging::JSON emits is always
# valid JSON.
jsonvalidator = JSONIOThingy.new
allow(Cabin::Channel).to receive(:new).and_wrap_original do |m, *args|
logger = m.call(*args)
logger.level = :debug
logger.subscribe(LogStash::Logging::JSON.new(jsonvalidator))
logger
end
end
end
def installed_plugins