mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 22:57:16 -04:00
- consolidate to one logstashd that forks parsers & indexers
- use logger for output - stop handling QuitRequests - re-up broadcast interval to 30 (was @ 5 for debugging) - add debug output (logstashd -d ...)
This commit is contained in:
parent
e8159ef438
commit
d4459ebf46
9 changed files with 180 additions and 138 deletions
|
@ -1,50 +0,0 @@
|
||||||
#!/usr/bin/env ruby
|
|
||||||
|
|
||||||
require 'rubygems'
|
|
||||||
require 'lib/net/servers/parser'
|
|
||||||
|
|
||||||
def main(args)
|
|
||||||
if args.length != 1
|
|
||||||
$stderr.puts "Usage: #{$0} configfile"
|
|
||||||
return 1
|
|
||||||
end
|
|
||||||
|
|
||||||
Thread::abort_on_exception = true
|
|
||||||
|
|
||||||
if ENV.has_key?("PROFILE")
|
|
||||||
require 'ruby-prof'
|
|
||||||
RubyProf.start
|
|
||||||
end
|
|
||||||
|
|
||||||
s = LogStash::Net::Servers::Parser.new(args[0])
|
|
||||||
s.run
|
|
||||||
|
|
||||||
if ENV.has_key?("PROFILE")
|
|
||||||
result = RubyProf.stop
|
|
||||||
printer = RubyProf::FlatPrinter.new(result)
|
|
||||||
printer.print(STDOUT, 0)
|
|
||||||
end
|
|
||||||
|
|
||||||
return 0
|
|
||||||
end
|
|
||||||
|
|
||||||
procs = ENV["PROCS"].to_i
|
|
||||||
procs ||= 1
|
|
||||||
|
|
||||||
if procs > 1
|
|
||||||
children = []
|
|
||||||
1.upto(procs) do |c|
|
|
||||||
pid = fork do
|
|
||||||
exit main(ARGV)
|
|
||||||
end
|
|
||||||
children << pid
|
|
||||||
end
|
|
||||||
|
|
||||||
while children.length > 0
|
|
||||||
pid = Process.wait(children[0], 0)
|
|
||||||
children.delete(pid)
|
|
||||||
$stderr.puts "pid #{pid} died"
|
|
||||||
end
|
|
||||||
else
|
|
||||||
exit main(ARGV)
|
|
||||||
end
|
|
121
bin/logstashd
Executable file
121
bin/logstashd
Executable file
|
@ -0,0 +1,121 @@
|
||||||
|
#!/usr/bin/env ruby
|
||||||
|
|
||||||
|
$: << File.join(File.dirname(__FILE__), "..")
|
||||||
|
|
||||||
|
require 'rubygems'
|
||||||
|
require 'lib/net/servers/indexer'
|
||||||
|
require 'lib/net/servers/parser'
|
||||||
|
require 'logger'
|
||||||
|
require 'optparse'
|
||||||
|
|
||||||
|
$progname = $0.split(File::SEPARATOR).last
|
||||||
|
$version = "0.3"
|
||||||
|
$logger = Logger.new(STDOUT)
|
||||||
|
$logger.level = Logger::INFO
|
||||||
|
$logger.progname = $progname
|
||||||
|
$logger.datetime_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
|
def main(args)
|
||||||
|
Thread::abort_on_exception = true
|
||||||
|
|
||||||
|
options = parse_options(args)
|
||||||
|
children = {}
|
||||||
|
|
||||||
|
if options[:indexer]
|
||||||
|
pid = fork do
|
||||||
|
indexer = LogStash::Net::Servers::Indexer.new(options[:config],
|
||||||
|
$logger)
|
||||||
|
indexer.run
|
||||||
|
exit(0)
|
||||||
|
end
|
||||||
|
$logger.info "starting indexer (pid #{pid})"
|
||||||
|
children[pid] = :indexer
|
||||||
|
end
|
||||||
|
|
||||||
|
if options[:parsers] > 0
|
||||||
|
1.upto(options[:parsers]) do |i|
|
||||||
|
pid = fork do
|
||||||
|
parser = LogStash::Net::Servers::Parser.new(options[:config],
|
||||||
|
$logger)
|
||||||
|
parser.run
|
||||||
|
exit(0)
|
||||||
|
end
|
||||||
|
$logger.info "starting parser #{i}/#{options[:parsers]} (pid #{pid})"
|
||||||
|
children[pid] = :parser
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
while children.keys.length > 0
|
||||||
|
pid = Process.wait(children.keys[0], 0)
|
||||||
|
$logger.warn "pid #{pid} died (#{children[pid]})"
|
||||||
|
children.delete(pid)
|
||||||
|
end
|
||||||
|
|
||||||
|
return 0
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_options(args)
|
||||||
|
options = {:indexer => true,
|
||||||
|
:parsers => 1,
|
||||||
|
:parserset => false,
|
||||||
|
:config => nil,
|
||||||
|
}
|
||||||
|
|
||||||
|
opts = OptionParser.new do |opts|
|
||||||
|
opts.banner = "Usage: logstashd [options] configfile"
|
||||||
|
opts.version = $version
|
||||||
|
|
||||||
|
opts.on("-d", "--debug", "Enable debug output") do |x|
|
||||||
|
$logger.level = Logger::DEBUG
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-I", "--disable-indexer",
|
||||||
|
"Disable indexer (default enabled)") do |x|
|
||||||
|
options[:indexer] = false
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-p", "--parsers COUNT", Integer,
|
||||||
|
"Number of parsers to run (default 1)") do |x|
|
||||||
|
raise(ArgumentError, "parser count must be >=0") if x < 0
|
||||||
|
options[:parsers] = x
|
||||||
|
if options[:parserset]
|
||||||
|
$stderr.puts "can only specify -p N or -P once"
|
||||||
|
exit(1)
|
||||||
|
end
|
||||||
|
options[:parserset] = true
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-P", "--disable-parser", "Disable parser") do |x|
|
||||||
|
options[:parsers] = 0
|
||||||
|
if options[:parserset]
|
||||||
|
$stderr.puts "can only specify -p N or -P once"
|
||||||
|
exit(1)
|
||||||
|
end
|
||||||
|
options[:parserset] = true
|
||||||
|
end
|
||||||
|
|
||||||
|
opts.on("-h", "--help", "Show this help message") do |x|
|
||||||
|
puts opts
|
||||||
|
exit(0)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
begin
|
||||||
|
opts.parse!(args)
|
||||||
|
rescue
|
||||||
|
$stderr.puts "#{$progname}: #{$!}"
|
||||||
|
$stderr.puts opts
|
||||||
|
exit(1)
|
||||||
|
end
|
||||||
|
|
||||||
|
if ARGV.length != 1
|
||||||
|
$stderr.puts "#{$progname}: must specify exactly one config file"
|
||||||
|
$stderr.puts opts
|
||||||
|
exit(1)
|
||||||
|
end
|
||||||
|
options[:config] = args.shift
|
||||||
|
|
||||||
|
return options
|
||||||
|
end
|
||||||
|
|
||||||
|
exit main(ARGV)
|
|
@ -1,30 +0,0 @@
|
||||||
#!/usr/bin/env ruby
|
|
||||||
|
|
||||||
require 'rubygems'
|
|
||||||
require 'lib/net/servers/indexer'
|
|
||||||
|
|
||||||
def main(args)
|
|
||||||
if args.length != 1
|
|
||||||
$stderr.puts "Usage: #{$0} configfile"
|
|
||||||
return 1
|
|
||||||
end
|
|
||||||
Thread::abort_on_exception = true
|
|
||||||
|
|
||||||
if ENV.has_key?("PROFILE")
|
|
||||||
require 'ruby-prof'
|
|
||||||
RubyProf.start
|
|
||||||
end
|
|
||||||
|
|
||||||
s = LogStash::Net::Servers::Indexer.new(args[0])
|
|
||||||
s.run
|
|
||||||
|
|
||||||
if ENV.has_key?("PROFILE")
|
|
||||||
result = RubyProf.stop
|
|
||||||
printer = RubyProf::FlatPrinter.new(result)
|
|
||||||
printer.print(STDOUT, 0)
|
|
||||||
end
|
|
||||||
|
|
||||||
return 0
|
|
||||||
end
|
|
||||||
|
|
||||||
exit main(ARGV)
|
|
|
@ -1,6 +1,6 @@
|
||||||
#!/usr/bin/ruby
|
#!/usr/bin/ruby
|
||||||
#
|
#
|
||||||
require 'rubygems'
|
require "rubygems"
|
||||||
require "socket"
|
require "socket"
|
||||||
require "lib/net/message"
|
require "lib/net/message"
|
||||||
require "lib/net/client"
|
require "lib/net/client"
|
||||||
|
@ -25,15 +25,14 @@ class SearchClient < LogStash::Net::MessageClient
|
||||||
attr_reader :responding
|
attr_reader :responding
|
||||||
attr_reader :results
|
attr_reader :results
|
||||||
|
|
||||||
def initialize(opts={})
|
def initialize(config_file)
|
||||||
#@log_type = opts[:log_type]
|
|
||||||
#@query = opts[:query]
|
|
||||||
@indexers = Array.new
|
@indexers = Array.new
|
||||||
@responding = Array.new
|
@responding = Array.new
|
||||||
@hits = 0
|
@hits = 0
|
||||||
@results = []
|
@results = []
|
||||||
@result_mutex = Mutex.new
|
@result_mutex = Mutex.new
|
||||||
super(opts)
|
config = YAML::load(File.open(config_file).read)
|
||||||
|
super(config, "search")
|
||||||
start
|
start
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -87,8 +86,11 @@ class SearchClient < LogStash::Net::MessageClient
|
||||||
end
|
end
|
||||||
|
|
||||||
def main(args)
|
def main(args)
|
||||||
client = SearchClient.new()
|
if ARGV.length != 3
|
||||||
client.search(args[0], args[1])
|
$stderr.puts "Usage: search configfile log_type query"
|
||||||
|
end
|
||||||
|
client = SearchClient.new(args[0])
|
||||||
|
client.search(args[1], args[2])
|
||||||
|
|
||||||
# Wait for the client to decide it's done.
|
# Wait for the client to decide it's done.
|
||||||
client.run
|
client.run
|
||||||
|
|
|
@ -1,11 +1,18 @@
|
||||||
require 'rubygems'
|
require 'rubygems'
|
||||||
require 'lib/net/socket'
|
require 'lib/net/socket'
|
||||||
require 'lib/net/messages/ping.rb'
|
require 'lib/net/messages/ping.rb'
|
||||||
|
require 'logger'
|
||||||
require 'stomp'
|
require 'stomp'
|
||||||
require 'uuid'
|
require 'uuid'
|
||||||
|
|
||||||
module LogStash; module Net
|
module LogStash; module Net
|
||||||
class MessageClient < MessageSocket
|
class MessageClient < MessageSocket
|
||||||
|
def initialize(config, progname)
|
||||||
|
logger = Logger.new(STDOUT)
|
||||||
|
logger.progname = progname
|
||||||
|
logger.datetime_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
super(config, logger)
|
||||||
|
end
|
||||||
# Nothing, yet.
|
# Nothing, yet.
|
||||||
end # class MessageClient
|
end # class MessageClient
|
||||||
end; end # module LogStash::Net
|
end; end # module LogStash::Net
|
||||||
|
|
|
@ -9,7 +9,6 @@ require 'lib/net/messages/logkeys'
|
||||||
require 'lib/net/messages/logtypes'
|
require 'lib/net/messages/logtypes'
|
||||||
require 'lib/net/messages/search'
|
require 'lib/net/messages/search'
|
||||||
require 'lib/net/messages/searchhits'
|
require 'lib/net/messages/searchhits'
|
||||||
require 'lib/net/messages/quit'
|
|
||||||
require 'lib/net/messages/ping'
|
require 'lib/net/messages/ping'
|
||||||
require 'lib/config/indexer.rb'
|
require 'lib/config/indexer.rb'
|
||||||
require 'ferret'
|
require 'ferret'
|
||||||
|
@ -18,12 +17,15 @@ require 'pp'
|
||||||
|
|
||||||
module LogStash; module Net; module Servers
|
module LogStash; module Net; module Servers
|
||||||
class Indexer < LogStash::Net::MessageServer
|
class Indexer < LogStash::Net::MessageServer
|
||||||
BROADCAST_INTERVAL = 5
|
BROADCAST_INTERVAL = 30
|
||||||
SYNC_DELAY = 10
|
SYNC_DELAY = 10
|
||||||
|
|
||||||
def initialize(configfile)
|
def initialize(configfile, logger)
|
||||||
|
@logger = logger
|
||||||
|
@logger.progname = "indexer"
|
||||||
@config = LogStash::Config::IndexerConfig.new(configfile)
|
@config = LogStash::Config::IndexerConfig.new(configfile)
|
||||||
super() # PASSARGS
|
|
||||||
|
super(@config, @logger)
|
||||||
@indexes = Hash.new
|
@indexes = Hash.new
|
||||||
@lines = Hash.new { |h,k| h[k] = 0 }
|
@lines = Hash.new { |h,k| h[k] = 0 }
|
||||||
@indexcount = 0
|
@indexcount = 0
|
||||||
|
@ -35,11 +37,6 @@ module LogStash; module Net; module Servers
|
||||||
@qps = Hash.new
|
@qps = Hash.new
|
||||||
end
|
end
|
||||||
|
|
||||||
def QuitRequestHandler(request)
|
|
||||||
$stderr.puts "Got quit message, exiting..."
|
|
||||||
close
|
|
||||||
end
|
|
||||||
|
|
||||||
def IndexEventRequestHandler(request)
|
def IndexEventRequestHandler(request)
|
||||||
response = LogStash::Net::Messages::IndexEventResponse.new
|
response = LogStash::Net::Messages::IndexEventResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
|
@ -47,21 +44,17 @@ module LogStash; module Net; module Servers
|
||||||
|
|
||||||
if @indexcount % 100 == 0
|
if @indexcount % 100 == 0
|
||||||
duration = (Time.now.to_f - @starttime.to_f)
|
duration = (Time.now.to_f - @starttime.to_f)
|
||||||
puts "rate: %.2f/sec" % (@indexcount / duration)
|
@logger.debug "rate: %.2f/sec" % (@indexcount / duration)
|
||||||
end
|
end
|
||||||
|
|
||||||
log_type = request.log_type
|
log_type = request.log_type
|
||||||
|
|
||||||
if not @indexes.member?(log_type)
|
@indexes[log_type] ||= @config.logs[log_type].get_index
|
||||||
@indexes[log_type] = @config.logs[log_type].get_index
|
|
||||||
end
|
|
||||||
|
|
||||||
#puts request.log_data.inspect
|
|
||||||
#puts @indexes[log_type].class
|
|
||||||
@indexes[log_type] << request.log_data
|
@indexes[log_type] << request.log_data
|
||||||
end
|
end
|
||||||
|
|
||||||
def PingRequestHandler(request)
|
def PingRequestHandler(request)
|
||||||
|
@logger.debug "received PingRequest (#{request.pingdata})"
|
||||||
response = LogStash::Net::Messages::PingResponse.new
|
response = LogStash::Net::Messages::PingResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
response.pingdata = request.pingdata
|
response.pingdata = request.pingdata
|
||||||
|
@ -69,14 +62,18 @@ module LogStash; module Net; module Servers
|
||||||
end
|
end
|
||||||
|
|
||||||
def LogTypesRequestHandler(request)
|
def LogTypesRequestHandler(request)
|
||||||
|
@logger.debug "received LogTypesRequest"
|
||||||
response = LogStash::Net::Messages::LogTypesResponse.new
|
response = LogStash::Net::Messages::LogTypesResponse.new
|
||||||
|
response.id = request.id
|
||||||
response.types = @config.logs.types
|
response.types = @config.logs.types
|
||||||
yield response
|
yield response
|
||||||
end
|
end
|
||||||
|
|
||||||
def LogKeysRequestHandler(request)
|
def LogKeysRequestHandler(request)
|
||||||
|
@logger.debug "received LogKeysRequest"
|
||||||
reader, search, qp = get_ferret(request.log_type)
|
reader, search, qp = get_ferret(request.log_type)
|
||||||
response = LogStash::Net::Messages::LogKeysResponse.new
|
response = LogStash::Net::Messages::LogKeysResponse.new
|
||||||
|
response.id = request.id
|
||||||
response.keys = reader.fields
|
response.keys = reader.fields
|
||||||
response.log_type = request.log_type
|
response.log_type = request.log_type
|
||||||
yield response
|
yield response
|
||||||
|
@ -95,13 +92,15 @@ module LogStash; module Net; module Servers
|
||||||
end
|
end
|
||||||
|
|
||||||
def SearchRequestHandler(request)
|
def SearchRequestHandler(request)
|
||||||
puts "Search for #{request.query.inspect} in #{request.log_type}"
|
@logger.debug "received SearchRequest (#{request.query.inspect} in " \
|
||||||
|
"#{request.log_type})"
|
||||||
response = LogStash::Net::Messages::SearchResponse.new
|
response = LogStash::Net::Messages::SearchResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
response.indexer_id = @id
|
response.indexer_id = @id
|
||||||
|
|
||||||
if @config.logs[request.log_type].nil?
|
if @config.logs[request.log_type].nil?
|
||||||
$stderr.puts "invalid log type: #{request.log_type}"
|
@logger.warn "SearchRequest received for invalid log_type: " \
|
||||||
|
"#{request.log_type}"
|
||||||
response.results = []
|
response.results = []
|
||||||
response.finished = true
|
response.finished = true
|
||||||
yield response
|
yield response
|
||||||
|
@ -152,25 +151,22 @@ module LogStash; module Net; module Servers
|
||||||
response.results = []
|
response.results = []
|
||||||
response.finished = true
|
response.finished = true
|
||||||
yield response
|
yield response
|
||||||
puts "Search done."
|
|
||||||
end # def SearchRequestHandler
|
end # def SearchRequestHandler
|
||||||
|
|
||||||
def SearchHitsRequestHandler(request)
|
def SearchHitsRequestHandler(request)
|
||||||
puts "Search for hits on #{request.query.inspect}"
|
@logger.debug "received SearchHitsRequest (#{request.query.inspect} in " \
|
||||||
|
"#{request.log_type})"
|
||||||
response = LogStash::Net::Messages::SearchHitsResponse.new
|
response = LogStash::Net::Messages::SearchHitsResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
if @config.logs[request.log_type].nil?
|
if @config.logs[request.log_type].nil?
|
||||||
puts "invalid log type: #{request.log_type}"
|
@logger.warn "SearchHitsRequest received for invalid log_type: " \
|
||||||
|
"#{request.log_type}"
|
||||||
response.hits = 0
|
response.hits = 0
|
||||||
yield response
|
yield response
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
reader = Ferret::Index::IndexReader.new(@config.logs[request.log_type].index_dir)
|
reader, search, qp = get_ferret(request.log_type)
|
||||||
search = Ferret::Search::Searcher.new(reader)
|
|
||||||
qp = Ferret::QueryParser.new(:fields => reader.fields,
|
|
||||||
:tokenized_fields => reader.tokenized_fields,
|
|
||||||
:or_default => false)
|
|
||||||
query = qp.parse(request.query)
|
query = qp.parse(request.query)
|
||||||
offset = (request.offset or 0)
|
offset = (request.offset or 0)
|
||||||
|
|
||||||
|
@ -182,12 +178,14 @@ module LogStash; module Net; module Servers
|
||||||
end # def SearchHitsRequestHandler
|
end # def SearchHitsRequestHandler
|
||||||
|
|
||||||
def BroadcastMessageHandler(request)
|
def BroadcastMessageHandler(request)
|
||||||
|
@logger.debug "received BroadcastMessage (from #{request.queue})"
|
||||||
@indexers_mutex.synchronize do
|
@indexers_mutex.synchronize do
|
||||||
@indexers[request.queue] = Time.now
|
@indexers[request.queue] = Time.now
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def DirectoryRequestHandler(request)
|
def DirectoryRequestHandler(request)
|
||||||
|
@logger.debug "received DirectoryRequest"
|
||||||
response = LogStash::Net::Messages::DirectoryResponse.new
|
response = LogStash::Net::Messages::DirectoryResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
response.indexers = @indexers.keys
|
response.indexers = @indexers.keys
|
||||||
|
@ -210,7 +208,6 @@ module LogStash; module Net; module Servers
|
||||||
# answering directory requests.
|
# answering directory requests.
|
||||||
|
|
||||||
sleep(BROADCAST_INTERVAL + 5)
|
sleep(BROADCAST_INTERVAL + 5)
|
||||||
puts "Subscribing to logstash-directory"
|
|
||||||
subscribe("logstash-directory")
|
subscribe("logstash-directory")
|
||||||
end
|
end
|
||||||
super
|
super
|
||||||
|
@ -222,9 +219,9 @@ module LogStash; module Net; module Servers
|
||||||
if Time.now > synctime
|
if Time.now > synctime
|
||||||
@indexes.each do |log_type, index|
|
@indexes.each do |log_type, index|
|
||||||
# TODO: only run flush if we need to
|
# TODO: only run flush if we need to
|
||||||
puts "Time's up. Syncing #{log_type}"
|
@logger.debug "Forcing a sync of #{log_type}"
|
||||||
index.flush
|
index.flush
|
||||||
break;
|
break
|
||||||
end
|
end
|
||||||
|
|
||||||
synctime = Time.now + SYNC_DELAY
|
synctime = Time.now + SYNC_DELAY
|
||||||
|
@ -243,7 +240,7 @@ module LogStash; module Net; module Servers
|
||||||
cutoff = Time.now - (BROADCAST_INTERVAL * 2)
|
cutoff = Time.now - (BROADCAST_INTERVAL * 2)
|
||||||
@indexers.each do |queue, heartbeat|
|
@indexers.each do |queue, heartbeat|
|
||||||
next if heartbeat > cutoff
|
next if heartbeat > cutoff
|
||||||
$stderr.puts "dropping indexer #{queue}, last heartbeat at " \
|
@logger.warn "dropping indexer #{queue}, last heartbeat at " \
|
||||||
"#{Time.at(heartbeat)}"
|
"#{Time.at(heartbeat)}"
|
||||||
@indexers.delete(queue)
|
@indexers.delete(queue)
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,7 +5,6 @@ require 'lib/net/message'
|
||||||
require 'lib/net/messages/indexevent'
|
require 'lib/net/messages/indexevent'
|
||||||
require 'lib/net/messages/search'
|
require 'lib/net/messages/search'
|
||||||
require 'lib/net/messages/searchhits'
|
require 'lib/net/messages/searchhits'
|
||||||
require 'lib/net/messages/quit'
|
|
||||||
require 'lib/net/messages/ping'
|
require 'lib/net/messages/ping'
|
||||||
require 'lib/config/indexer.rb'
|
require 'lib/config/indexer.rb'
|
||||||
require 'ferret'
|
require 'ferret'
|
||||||
|
@ -16,34 +15,32 @@ module LogStash; module Net; module Servers
|
||||||
class Parser < LogStash::Net::MessageServer
|
class Parser < LogStash::Net::MessageServer
|
||||||
SYNCDELAY = 10
|
SYNCDELAY = 10
|
||||||
|
|
||||||
def initialize(configfile)
|
def initialize(configfile, logger)
|
||||||
@config = LogStash::Config::IndexerConfig.new(configfile)
|
@config = LogStash::Config::IndexerConfig.new(configfile)
|
||||||
super()
|
@logger = logger
|
||||||
@indexes = Hash.new
|
@logger.progname = "parser"
|
||||||
|
super(@config, @logger)
|
||||||
@lines = Hash.new { |h,k| h[k] = 0 }
|
@lines = Hash.new { |h,k| h[k] = 0 }
|
||||||
@indexcount = 0
|
@indexcount = 0
|
||||||
@starttime = Time.now
|
@starttime = Time.now
|
||||||
end
|
end
|
||||||
|
|
||||||
def QuitRequestHandler(request)
|
|
||||||
puts "Got quit message, exiting..."
|
|
||||||
close
|
|
||||||
end
|
|
||||||
|
|
||||||
def IndexEventRequestHandler(request)
|
def IndexEventRequestHandler(request)
|
||||||
|
@logger.debug "received IndexEventRequest (for type " \
|
||||||
|
"#{request.log_type}): #{request.log_data}"
|
||||||
response = LogStash::Net::Messages::IndexEventResponse.new
|
response = LogStash::Net::Messages::IndexEventResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
@indexcount += 1
|
@indexcount += 1
|
||||||
|
|
||||||
if @indexcount % 100 == 0
|
if @indexcount % 100 == 0
|
||||||
duration = (Time.now.to_f - @starttime.to_f)
|
duration = (Time.now.to_f - @starttime.to_f)
|
||||||
puts "rate: %.2f/sec" % (@indexcount / duration)
|
@logger.debug "rate: %.2f/sec" % (@indexcount / duration)
|
||||||
end
|
end
|
||||||
|
|
||||||
log_type = request.log_type
|
log_type = request.log_type
|
||||||
entry = @config.logs[log_type].parse_entry(request.log_data)
|
entry = @config.logs[log_type].parse_entry(request.log_data)
|
||||||
if !entry
|
if !entry
|
||||||
puts "Failed parsing line: #{request.log_data}"
|
@logger.warn "Failed parsing line: #{request.log_data}"
|
||||||
response.code = 1
|
response.code = 1
|
||||||
response.error = "Entry was #{entry.inspect} (log parsing failed)"
|
response.error = "Entry was #{entry.inspect} (log parsing failed)"
|
||||||
entry = {
|
entry = {
|
||||||
|
@ -53,20 +50,15 @@ module LogStash; module Net; module Servers
|
||||||
else
|
else
|
||||||
response.code = 0
|
response.code = 0
|
||||||
end
|
end
|
||||||
|
|
||||||
if not @indexes.member?(log_type)
|
|
||||||
@indexes[log_type] = @config.logs[log_type].get_index
|
|
||||||
end
|
|
||||||
|
|
||||||
entry["@LOG_TYPE"] = log_type
|
entry["@LOG_TYPE"] = log_type
|
||||||
|
|
||||||
# Now we have a hash for the log data, send it to the indexer
|
# Now we have a hash for the log data, send it to the indexer
|
||||||
request.log_data = entry
|
request.log_data = entry
|
||||||
sendmsg("logstash-index", request)
|
sendmsg("logstash-index", request)
|
||||||
#@indexes[log_type] << entry
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def PingRequestHandler(request)
|
def PingRequestHandler(request)
|
||||||
|
@logger.debug "received PingRequest (#{request.pingdata})"
|
||||||
response = LogStash::Net::Messages::PingResponse.new
|
response = LogStash::Net::Messages::PingResponse.new
|
||||||
response.id = request.id
|
response.id = request.id
|
||||||
response.pingdata = request.pingdata
|
response.pingdata = request.pingdata
|
||||||
|
|
|
@ -38,8 +38,9 @@ module LogStash; module Net
|
||||||
class MessageSocket
|
class MessageSocket
|
||||||
MAXBUF = 30
|
MAXBUF = 30
|
||||||
|
|
||||||
def initialize(username='', password='', host='localhost', port=61613)
|
def initialize(config, logger)
|
||||||
@id = UUID::generate
|
@id = UUID::generate
|
||||||
|
@config, @logger = config, logger
|
||||||
@want_queues = []
|
@want_queues = []
|
||||||
@queues = []
|
@queues = []
|
||||||
@want_topics = []
|
@want_topics = []
|
||||||
|
@ -54,8 +55,10 @@ module LogStash; module Net
|
||||||
def start_amqp
|
def start_amqp
|
||||||
@amqpthread = Thread.new do
|
@amqpthread = Thread.new do
|
||||||
# Create connection to AMQP, and in turn, the main EventMachine loop.
|
# Create connection to AMQP, and in turn, the main EventMachine loop.
|
||||||
|
# TODO: use @config
|
||||||
AMQP.start(:host => "localhost") do
|
AMQP.start(:host => "localhost") do
|
||||||
@mq = MQ.new
|
@mq = MQ.new
|
||||||
|
@logger.info "Subscribing to main queue #{@id}"
|
||||||
mq_q = @mq.queue(@id, :auto_delete => true)
|
mq_q = @mq.queue(@id, :auto_delete => true)
|
||||||
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
||||||
handle_new_subscriptions
|
handle_new_subscriptions
|
||||||
|
@ -126,7 +129,7 @@ module LogStash; module Net
|
||||||
def handle_new_subscriptions
|
def handle_new_subscriptions
|
||||||
todo = @want_queues - @queues
|
todo = @want_queues - @queues
|
||||||
todo.each do |queue|
|
todo.each do |queue|
|
||||||
puts "Subscribing to queue #{queue}"
|
@logger.info "Subscribing to queue #{queue}"
|
||||||
mq_q = @mq.queue(queue)
|
mq_q = @mq.queue(queue)
|
||||||
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
||||||
@queues << queue
|
@queues << queue
|
||||||
|
@ -134,7 +137,7 @@ module LogStash; module Net
|
||||||
|
|
||||||
todo = @want_topics - @topics
|
todo = @want_topics - @topics
|
||||||
todo.each do |topic|
|
todo.each do |topic|
|
||||||
puts "Subscribing to topic #{topic}"
|
@logger.info "Subscribing to topic #{topic}"
|
||||||
exchange = @mq.topic("amq.topic")
|
exchange = @mq.topic("amq.topic")
|
||||||
mq_q = @mq.queue("#{@id}-#{topic}",
|
mq_q = @mq.queue("#{@id}-#{topic}",
|
||||||
:exclusive => true,
|
:exclusive => true,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
server: "localhost:61613"
|
mqhost: "localhost"
|
||||||
sources:
|
sources:
|
||||||
/var/log/messages: linux-syslog
|
/var/log/messages: linux-syslog
|
||||||
#/var/log/secure: linux-syslog
|
#/var/log/secure: linux-syslog
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue