mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 22:57:16 -04:00
- consolidate to one logstashd that forks parsers & indexers
- use logger for output - stop handling QuitRequests - re-up broadcast interval to 30 (was @ 5 for debugging) - add debug output (logstashd -d ...)
This commit is contained in:
parent
e8159ef438
commit
d4459ebf46
9 changed files with 180 additions and 138 deletions
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require 'rubygems'
|
||||
require 'lib/net/servers/parser'
|
||||
|
||||
def main(args)
|
||||
if args.length != 1
|
||||
$stderr.puts "Usage: #{$0} configfile"
|
||||
return 1
|
||||
end
|
||||
|
||||
Thread::abort_on_exception = true
|
||||
|
||||
if ENV.has_key?("PROFILE")
|
||||
require 'ruby-prof'
|
||||
RubyProf.start
|
||||
end
|
||||
|
||||
s = LogStash::Net::Servers::Parser.new(args[0])
|
||||
s.run
|
||||
|
||||
if ENV.has_key?("PROFILE")
|
||||
result = RubyProf.stop
|
||||
printer = RubyProf::FlatPrinter.new(result)
|
||||
printer.print(STDOUT, 0)
|
||||
end
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
procs = ENV["PROCS"].to_i
|
||||
procs ||= 1
|
||||
|
||||
if procs > 1
|
||||
children = []
|
||||
1.upto(procs) do |c|
|
||||
pid = fork do
|
||||
exit main(ARGV)
|
||||
end
|
||||
children << pid
|
||||
end
|
||||
|
||||
while children.length > 0
|
||||
pid = Process.wait(children[0], 0)
|
||||
children.delete(pid)
|
||||
$stderr.puts "pid #{pid} died"
|
||||
end
|
||||
else
|
||||
exit main(ARGV)
|
||||
end
|
121
bin/logstashd
Executable file
121
bin/logstashd
Executable file
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
$: << File.join(File.dirname(__FILE__), "..")
|
||||
|
||||
require 'rubygems'
|
||||
require 'lib/net/servers/indexer'
|
||||
require 'lib/net/servers/parser'
|
||||
require 'logger'
|
||||
require 'optparse'
|
||||
|
||||
$progname = $0.split(File::SEPARATOR).last
|
||||
$version = "0.3"
|
||||
$logger = Logger.new(STDOUT)
|
||||
$logger.level = Logger::INFO
|
||||
$logger.progname = $progname
|
||||
$logger.datetime_format = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
def main(args)
|
||||
Thread::abort_on_exception = true
|
||||
|
||||
options = parse_options(args)
|
||||
children = {}
|
||||
|
||||
if options[:indexer]
|
||||
pid = fork do
|
||||
indexer = LogStash::Net::Servers::Indexer.new(options[:config],
|
||||
$logger)
|
||||
indexer.run
|
||||
exit(0)
|
||||
end
|
||||
$logger.info "starting indexer (pid #{pid})"
|
||||
children[pid] = :indexer
|
||||
end
|
||||
|
||||
if options[:parsers] > 0
|
||||
1.upto(options[:parsers]) do |i|
|
||||
pid = fork do
|
||||
parser = LogStash::Net::Servers::Parser.new(options[:config],
|
||||
$logger)
|
||||
parser.run
|
||||
exit(0)
|
||||
end
|
||||
$logger.info "starting parser #{i}/#{options[:parsers]} (pid #{pid})"
|
||||
children[pid] = :parser
|
||||
end
|
||||
end
|
||||
|
||||
while children.keys.length > 0
|
||||
pid = Process.wait(children.keys[0], 0)
|
||||
$logger.warn "pid #{pid} died (#{children[pid]})"
|
||||
children.delete(pid)
|
||||
end
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
def parse_options(args)
|
||||
options = {:indexer => true,
|
||||
:parsers => 1,
|
||||
:parserset => false,
|
||||
:config => nil,
|
||||
}
|
||||
|
||||
opts = OptionParser.new do |opts|
|
||||
opts.banner = "Usage: logstashd [options] configfile"
|
||||
opts.version = $version
|
||||
|
||||
opts.on("-d", "--debug", "Enable debug output") do |x|
|
||||
$logger.level = Logger::DEBUG
|
||||
end
|
||||
|
||||
opts.on("-I", "--disable-indexer",
|
||||
"Disable indexer (default enabled)") do |x|
|
||||
options[:indexer] = false
|
||||
end
|
||||
|
||||
opts.on("-p", "--parsers COUNT", Integer,
|
||||
"Number of parsers to run (default 1)") do |x|
|
||||
raise(ArgumentError, "parser count must be >=0") if x < 0
|
||||
options[:parsers] = x
|
||||
if options[:parserset]
|
||||
$stderr.puts "can only specify -p N or -P once"
|
||||
exit(1)
|
||||
end
|
||||
options[:parserset] = true
|
||||
end
|
||||
|
||||
opts.on("-P", "--disable-parser", "Disable parser") do |x|
|
||||
options[:parsers] = 0
|
||||
if options[:parserset]
|
||||
$stderr.puts "can only specify -p N or -P once"
|
||||
exit(1)
|
||||
end
|
||||
options[:parserset] = true
|
||||
end
|
||||
|
||||
opts.on("-h", "--help", "Show this help message") do |x|
|
||||
puts opts
|
||||
exit(0)
|
||||
end
|
||||
end
|
||||
|
||||
begin
|
||||
opts.parse!(args)
|
||||
rescue
|
||||
$stderr.puts "#{$progname}: #{$!}"
|
||||
$stderr.puts opts
|
||||
exit(1)
|
||||
end
|
||||
|
||||
if ARGV.length != 1
|
||||
$stderr.puts "#{$progname}: must specify exactly one config file"
|
||||
$stderr.puts opts
|
||||
exit(1)
|
||||
end
|
||||
options[:config] = args.shift
|
||||
|
||||
return options
|
||||
end
|
||||
|
||||
exit main(ARGV)
|
|
@ -1,30 +0,0 @@
|
|||
#!/usr/bin/env ruby
|
||||
|
||||
require 'rubygems'
|
||||
require 'lib/net/servers/indexer'
|
||||
|
||||
def main(args)
|
||||
if args.length != 1
|
||||
$stderr.puts "Usage: #{$0} configfile"
|
||||
return 1
|
||||
end
|
||||
Thread::abort_on_exception = true
|
||||
|
||||
if ENV.has_key?("PROFILE")
|
||||
require 'ruby-prof'
|
||||
RubyProf.start
|
||||
end
|
||||
|
||||
s = LogStash::Net::Servers::Indexer.new(args[0])
|
||||
s.run
|
||||
|
||||
if ENV.has_key?("PROFILE")
|
||||
result = RubyProf.stop
|
||||
printer = RubyProf::FlatPrinter.new(result)
|
||||
printer.print(STDOUT, 0)
|
||||
end
|
||||
|
||||
return 0
|
||||
end
|
||||
|
||||
exit main(ARGV)
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/ruby
|
||||
#
|
||||
require 'rubygems'
|
||||
require "rubygems"
|
||||
require "socket"
|
||||
require "lib/net/message"
|
||||
require "lib/net/client"
|
||||
|
@ -25,15 +25,14 @@ class SearchClient < LogStash::Net::MessageClient
|
|||
attr_reader :responding
|
||||
attr_reader :results
|
||||
|
||||
def initialize(opts={})
|
||||
#@log_type = opts[:log_type]
|
||||
#@query = opts[:query]
|
||||
def initialize(config_file)
|
||||
@indexers = Array.new
|
||||
@responding = Array.new
|
||||
@hits = 0
|
||||
@results = []
|
||||
@result_mutex = Mutex.new
|
||||
super(opts)
|
||||
config = YAML::load(File.open(config_file).read)
|
||||
super(config, "search")
|
||||
start
|
||||
end
|
||||
|
||||
|
@ -87,8 +86,11 @@ class SearchClient < LogStash::Net::MessageClient
|
|||
end
|
||||
|
||||
def main(args)
|
||||
client = SearchClient.new()
|
||||
client.search(args[0], args[1])
|
||||
if ARGV.length != 3
|
||||
$stderr.puts "Usage: search configfile log_type query"
|
||||
end
|
||||
client = SearchClient.new(args[0])
|
||||
client.search(args[1], args[2])
|
||||
|
||||
# Wait for the client to decide it's done.
|
||||
client.run
|
||||
|
|
|
@ -1,11 +1,18 @@
|
|||
require 'rubygems'
|
||||
require 'lib/net/socket'
|
||||
require 'lib/net/messages/ping.rb'
|
||||
require 'logger'
|
||||
require 'stomp'
|
||||
require 'uuid'
|
||||
|
||||
module LogStash; module Net
|
||||
class MessageClient < MessageSocket
|
||||
def initialize(config, progname)
|
||||
logger = Logger.new(STDOUT)
|
||||
logger.progname = progname
|
||||
logger.datetime_format = "%Y-%m-%d %H:%M:%S"
|
||||
super(config, logger)
|
||||
end
|
||||
# Nothing, yet.
|
||||
end # class MessageClient
|
||||
end; end # module LogStash::Net
|
||||
|
|
|
@ -9,7 +9,6 @@ require 'lib/net/messages/logkeys'
|
|||
require 'lib/net/messages/logtypes'
|
||||
require 'lib/net/messages/search'
|
||||
require 'lib/net/messages/searchhits'
|
||||
require 'lib/net/messages/quit'
|
||||
require 'lib/net/messages/ping'
|
||||
require 'lib/config/indexer.rb'
|
||||
require 'ferret'
|
||||
|
@ -18,12 +17,15 @@ require 'pp'
|
|||
|
||||
module LogStash; module Net; module Servers
|
||||
class Indexer < LogStash::Net::MessageServer
|
||||
BROADCAST_INTERVAL = 5
|
||||
BROADCAST_INTERVAL = 30
|
||||
SYNC_DELAY = 10
|
||||
|
||||
def initialize(configfile)
|
||||
def initialize(configfile, logger)
|
||||
@logger = logger
|
||||
@logger.progname = "indexer"
|
||||
@config = LogStash::Config::IndexerConfig.new(configfile)
|
||||
super() # PASSARGS
|
||||
|
||||
super(@config, @logger)
|
||||
@indexes = Hash.new
|
||||
@lines = Hash.new { |h,k| h[k] = 0 }
|
||||
@indexcount = 0
|
||||
|
@ -35,11 +37,6 @@ module LogStash; module Net; module Servers
|
|||
@qps = Hash.new
|
||||
end
|
||||
|
||||
def QuitRequestHandler(request)
|
||||
$stderr.puts "Got quit message, exiting..."
|
||||
close
|
||||
end
|
||||
|
||||
def IndexEventRequestHandler(request)
|
||||
response = LogStash::Net::Messages::IndexEventResponse.new
|
||||
response.id = request.id
|
||||
|
@ -47,21 +44,17 @@ module LogStash; module Net; module Servers
|
|||
|
||||
if @indexcount % 100 == 0
|
||||
duration = (Time.now.to_f - @starttime.to_f)
|
||||
puts "rate: %.2f/sec" % (@indexcount / duration)
|
||||
@logger.debug "rate: %.2f/sec" % (@indexcount / duration)
|
||||
end
|
||||
|
||||
log_type = request.log_type
|
||||
|
||||
if not @indexes.member?(log_type)
|
||||
@indexes[log_type] = @config.logs[log_type].get_index
|
||||
end
|
||||
|
||||
#puts request.log_data.inspect
|
||||
#puts @indexes[log_type].class
|
||||
@indexes[log_type] ||= @config.logs[log_type].get_index
|
||||
@indexes[log_type] << request.log_data
|
||||
end
|
||||
|
||||
def PingRequestHandler(request)
|
||||
@logger.debug "received PingRequest (#{request.pingdata})"
|
||||
response = LogStash::Net::Messages::PingResponse.new
|
||||
response.id = request.id
|
||||
response.pingdata = request.pingdata
|
||||
|
@ -69,14 +62,18 @@ module LogStash; module Net; module Servers
|
|||
end
|
||||
|
||||
def LogTypesRequestHandler(request)
|
||||
@logger.debug "received LogTypesRequest"
|
||||
response = LogStash::Net::Messages::LogTypesResponse.new
|
||||
response.id = request.id
|
||||
response.types = @config.logs.types
|
||||
yield response
|
||||
end
|
||||
|
||||
def LogKeysRequestHandler(request)
|
||||
@logger.debug "received LogKeysRequest"
|
||||
reader, search, qp = get_ferret(request.log_type)
|
||||
response = LogStash::Net::Messages::LogKeysResponse.new
|
||||
response.id = request.id
|
||||
response.keys = reader.fields
|
||||
response.log_type = request.log_type
|
||||
yield response
|
||||
|
@ -95,13 +92,15 @@ module LogStash; module Net; module Servers
|
|||
end
|
||||
|
||||
def SearchRequestHandler(request)
|
||||
puts "Search for #{request.query.inspect} in #{request.log_type}"
|
||||
@logger.debug "received SearchRequest (#{request.query.inspect} in " \
|
||||
"#{request.log_type})"
|
||||
response = LogStash::Net::Messages::SearchResponse.new
|
||||
response.id = request.id
|
||||
response.indexer_id = @id
|
||||
|
||||
if @config.logs[request.log_type].nil?
|
||||
$stderr.puts "invalid log type: #{request.log_type}"
|
||||
@logger.warn "SearchRequest received for invalid log_type: " \
|
||||
"#{request.log_type}"
|
||||
response.results = []
|
||||
response.finished = true
|
||||
yield response
|
||||
|
@ -152,25 +151,22 @@ module LogStash; module Net; module Servers
|
|||
response.results = []
|
||||
response.finished = true
|
||||
yield response
|
||||
puts "Search done."
|
||||
end # def SearchRequestHandler
|
||||
|
||||
def SearchHitsRequestHandler(request)
|
||||
puts "Search for hits on #{request.query.inspect}"
|
||||
@logger.debug "received SearchHitsRequest (#{request.query.inspect} in " \
|
||||
"#{request.log_type})"
|
||||
response = LogStash::Net::Messages::SearchHitsResponse.new
|
||||
response.id = request.id
|
||||
if @config.logs[request.log_type].nil?
|
||||
puts "invalid log type: #{request.log_type}"
|
||||
@logger.warn "SearchHitsRequest received for invalid log_type: " \
|
||||
"#{request.log_type}"
|
||||
response.hits = 0
|
||||
yield response
|
||||
return
|
||||
end
|
||||
|
||||
reader = Ferret::Index::IndexReader.new(@config.logs[request.log_type].index_dir)
|
||||
search = Ferret::Search::Searcher.new(reader)
|
||||
qp = Ferret::QueryParser.new(:fields => reader.fields,
|
||||
:tokenized_fields => reader.tokenized_fields,
|
||||
:or_default => false)
|
||||
reader, search, qp = get_ferret(request.log_type)
|
||||
query = qp.parse(request.query)
|
||||
offset = (request.offset or 0)
|
||||
|
||||
|
@ -182,12 +178,14 @@ module LogStash; module Net; module Servers
|
|||
end # def SearchHitsRequestHandler
|
||||
|
||||
def BroadcastMessageHandler(request)
|
||||
@logger.debug "received BroadcastMessage (from #{request.queue})"
|
||||
@indexers_mutex.synchronize do
|
||||
@indexers[request.queue] = Time.now
|
||||
end
|
||||
end
|
||||
|
||||
def DirectoryRequestHandler(request)
|
||||
@logger.debug "received DirectoryRequest"
|
||||
response = LogStash::Net::Messages::DirectoryResponse.new
|
||||
response.id = request.id
|
||||
response.indexers = @indexers.keys
|
||||
|
@ -210,7 +208,6 @@ module LogStash; module Net; module Servers
|
|||
# answering directory requests.
|
||||
|
||||
sleep(BROADCAST_INTERVAL + 5)
|
||||
puts "Subscribing to logstash-directory"
|
||||
subscribe("logstash-directory")
|
||||
end
|
||||
super
|
||||
|
@ -222,9 +219,9 @@ module LogStash; module Net; module Servers
|
|||
if Time.now > synctime
|
||||
@indexes.each do |log_type, index|
|
||||
# TODO: only run flush if we need to
|
||||
puts "Time's up. Syncing #{log_type}"
|
||||
@logger.debug "Forcing a sync of #{log_type}"
|
||||
index.flush
|
||||
break;
|
||||
break
|
||||
end
|
||||
|
||||
synctime = Time.now + SYNC_DELAY
|
||||
|
@ -243,7 +240,7 @@ module LogStash; module Net; module Servers
|
|||
cutoff = Time.now - (BROADCAST_INTERVAL * 2)
|
||||
@indexers.each do |queue, heartbeat|
|
||||
next if heartbeat > cutoff
|
||||
$stderr.puts "dropping indexer #{queue}, last heartbeat at " \
|
||||
@logger.warn "dropping indexer #{queue}, last heartbeat at " \
|
||||
"#{Time.at(heartbeat)}"
|
||||
@indexers.delete(queue)
|
||||
end
|
||||
|
|
|
@ -5,7 +5,6 @@ require 'lib/net/message'
|
|||
require 'lib/net/messages/indexevent'
|
||||
require 'lib/net/messages/search'
|
||||
require 'lib/net/messages/searchhits'
|
||||
require 'lib/net/messages/quit'
|
||||
require 'lib/net/messages/ping'
|
||||
require 'lib/config/indexer.rb'
|
||||
require 'ferret'
|
||||
|
@ -16,34 +15,32 @@ module LogStash; module Net; module Servers
|
|||
class Parser < LogStash::Net::MessageServer
|
||||
SYNCDELAY = 10
|
||||
|
||||
def initialize(configfile)
|
||||
def initialize(configfile, logger)
|
||||
@config = LogStash::Config::IndexerConfig.new(configfile)
|
||||
super()
|
||||
@indexes = Hash.new
|
||||
@logger = logger
|
||||
@logger.progname = "parser"
|
||||
super(@config, @logger)
|
||||
@lines = Hash.new { |h,k| h[k] = 0 }
|
||||
@indexcount = 0
|
||||
@starttime = Time.now
|
||||
end
|
||||
|
||||
def QuitRequestHandler(request)
|
||||
puts "Got quit message, exiting..."
|
||||
close
|
||||
end
|
||||
|
||||
def IndexEventRequestHandler(request)
|
||||
@logger.debug "received IndexEventRequest (for type " \
|
||||
"#{request.log_type}): #{request.log_data}"
|
||||
response = LogStash::Net::Messages::IndexEventResponse.new
|
||||
response.id = request.id
|
||||
@indexcount += 1
|
||||
|
||||
if @indexcount % 100 == 0
|
||||
duration = (Time.now.to_f - @starttime.to_f)
|
||||
puts "rate: %.2f/sec" % (@indexcount / duration)
|
||||
@logger.debug "rate: %.2f/sec" % (@indexcount / duration)
|
||||
end
|
||||
|
||||
log_type = request.log_type
|
||||
entry = @config.logs[log_type].parse_entry(request.log_data)
|
||||
if !entry
|
||||
puts "Failed parsing line: #{request.log_data}"
|
||||
@logger.warn "Failed parsing line: #{request.log_data}"
|
||||
response.code = 1
|
||||
response.error = "Entry was #{entry.inspect} (log parsing failed)"
|
||||
entry = {
|
||||
|
@ -53,20 +50,15 @@ module LogStash; module Net; module Servers
|
|||
else
|
||||
response.code = 0
|
||||
end
|
||||
|
||||
if not @indexes.member?(log_type)
|
||||
@indexes[log_type] = @config.logs[log_type].get_index
|
||||
end
|
||||
|
||||
entry["@LOG_TYPE"] = log_type
|
||||
|
||||
# Now we have a hash for the log data, send it to the indexer
|
||||
request.log_data = entry
|
||||
sendmsg("logstash-index", request)
|
||||
#@indexes[log_type] << entry
|
||||
end
|
||||
|
||||
def PingRequestHandler(request)
|
||||
@logger.debug "received PingRequest (#{request.pingdata})"
|
||||
response = LogStash::Net::Messages::PingResponse.new
|
||||
response.id = request.id
|
||||
response.pingdata = request.pingdata
|
||||
|
|
|
@ -38,8 +38,9 @@ module LogStash; module Net
|
|||
class MessageSocket
|
||||
MAXBUF = 30
|
||||
|
||||
def initialize(username='', password='', host='localhost', port=61613)
|
||||
def initialize(config, logger)
|
||||
@id = UUID::generate
|
||||
@config, @logger = config, logger
|
||||
@want_queues = []
|
||||
@queues = []
|
||||
@want_topics = []
|
||||
|
@ -54,8 +55,10 @@ module LogStash; module Net
|
|||
def start_amqp
|
||||
@amqpthread = Thread.new do
|
||||
# Create connection to AMQP, and in turn, the main EventMachine loop.
|
||||
# TODO: use @config
|
||||
AMQP.start(:host => "localhost") do
|
||||
@mq = MQ.new
|
||||
@logger.info "Subscribing to main queue #{@id}"
|
||||
mq_q = @mq.queue(@id, :auto_delete => true)
|
||||
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
||||
handle_new_subscriptions
|
||||
|
@ -126,7 +129,7 @@ module LogStash; module Net
|
|||
def handle_new_subscriptions
|
||||
todo = @want_queues - @queues
|
||||
todo.each do |queue|
|
||||
puts "Subscribing to queue #{queue}"
|
||||
@logger.info "Subscribing to queue #{queue}"
|
||||
mq_q = @mq.queue(queue)
|
||||
mq_q.subscribe(:ack =>true) { |hdr, msg| handle_message(hdr, msg) }
|
||||
@queues << queue
|
||||
|
@ -134,7 +137,7 @@ module LogStash; module Net
|
|||
|
||||
todo = @want_topics - @topics
|
||||
todo.each do |topic|
|
||||
puts "Subscribing to topic #{topic}"
|
||||
@logger.info "Subscribing to topic #{topic}"
|
||||
exchange = @mq.topic("amq.topic")
|
||||
mq_q = @mq.queue("#{@id}-#{topic}",
|
||||
:exclusive => true,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
server: "localhost:61613"
|
||||
mqhost: "localhost"
|
||||
sources:
|
||||
/var/log/messages: linux-syslog
|
||||
#/var/log/secure: linux-syslog
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue