mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 14:47:19 -04:00
- Update the web interface to use the new search api
- Add offset and total attributes LogStash:;Search::Result - Added -b/--backend flag to logstash-web for specifying the url of the backend. Defaults to elasticsearch://localhost:9200 Still missing facets/graphs, but it's progress.
This commit is contained in:
parent
7b0aef841b
commit
31644cb5a2
5 changed files with 109 additions and 41 deletions
|
@ -9,7 +9,7 @@ require "logstash/search/result"
|
|||
|
||||
class LogStash::Search::ElasticSearch < LogStash::Search::Base
|
||||
public
|
||||
def initialize(settings)
|
||||
def initialize(settings={})
|
||||
@host = (settings[:host] || "localhost")
|
||||
@port = (settings[:port] || 9200).to_i
|
||||
@logger = LogStash::Logger.new(STDOUT)
|
||||
|
@ -17,6 +17,7 @@ class LogStash::Search::ElasticSearch < LogStash::Search::Base
|
|||
|
||||
public
|
||||
def search(query)
|
||||
raise "No block given for search call." if !block_given?
|
||||
if query.is_a?(String)
|
||||
query = LogStash::Search::Query.parse(query)
|
||||
end
|
||||
|
@ -49,7 +50,7 @@ class LogStash::Search::ElasticSearch < LogStash::Search::Base
|
|||
|
||||
@logger.info(["Got search results",
|
||||
{ :query => query.query_string, :duration => data["duration"],
|
||||
:data => data }])
|
||||
:results => data["hits"]["hits"].size }])
|
||||
if req.response_header.status != 200
|
||||
result.error_message = data["error"] || req.inspect
|
||||
@error = data["error"] || req.inspect
|
||||
|
@ -59,6 +60,11 @@ class LogStash::Search::ElasticSearch < LogStash::Search::Base
|
|||
data["hits"]["hits"].each do |hit|
|
||||
result.events << LogStash::Event.new(hit["_source"])
|
||||
end
|
||||
|
||||
# Total hits this search could find if not limited
|
||||
result.total = data["hits"]["total"]
|
||||
result.offset = query.offset
|
||||
|
||||
yield result
|
||||
end
|
||||
|
||||
|
|
|
@ -2,9 +2,19 @@ require "logstash/namespace"
|
|||
require "logstash/logging"
|
||||
|
||||
class LogStash::Search::Result
|
||||
# Array of LogStash::Event of results
|
||||
attr_accessor :events
|
||||
|
||||
# How long this query took, in seconds (or fractions of).
|
||||
attr_accessor :duration
|
||||
|
||||
# Offset in search
|
||||
attr_accessor :offset
|
||||
|
||||
# Total records matched by this query, regardless of offset/count in query.
|
||||
attr_accessor :total
|
||||
|
||||
# Error message, if any.
|
||||
attr_accessor :error_message
|
||||
|
||||
def initialize(settings={})
|
||||
|
|
|
@ -155,8 +155,8 @@
|
|||
|
||||
/* TODO(sissel): recurse through the data */
|
||||
var fields = new Array();
|
||||
for (var i in data._source["@fields"]) {
|
||||
var value = data._source["@fields"][i]
|
||||
for (var i in data["@fields"]) {
|
||||
var value = data["@fields"][i]
|
||||
if (/^[, ]*$/.test(value)) {
|
||||
continue; /* Skip empty data fields */
|
||||
}
|
||||
|
@ -166,9 +166,9 @@
|
|||
fields.push( { type: "field", field: i, value: value })
|
||||
}
|
||||
|
||||
for (var i in data._source) {
|
||||
for (var i in data) {
|
||||
if (i == "@fields") continue;
|
||||
var value = data._source[i]
|
||||
var value = data[i]
|
||||
if (!(value instanceof Array)) {
|
||||
value = [value];
|
||||
}
|
||||
|
|
|
@ -6,13 +6,15 @@ $:.unshift(File.dirname(__FILE__))
|
|||
|
||||
require "eventmachine"
|
||||
require "json"
|
||||
require "lib/elasticsearch"
|
||||
require "logstash/search/elasticsearch"
|
||||
require "logstash/search/query"
|
||||
require "logstash/namespace"
|
||||
require "rack"
|
||||
require "rubygems"
|
||||
require "sinatra/async"
|
||||
|
||||
class EventMachine::ConnectionError < RuntimeError; end
|
||||
module LogStash::Web; end
|
||||
|
||||
class LogStash::Web::Server < Sinatra::Base
|
||||
register Sinatra::Async
|
||||
|
@ -20,7 +22,19 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
set :logging, true
|
||||
set :public, "#{File.dirname(__FILE__)}/public"
|
||||
set :views, "#{File.dirname(__FILE__)}/views"
|
||||
elasticsearch = LogStash::Web::ElasticSearch.new
|
||||
|
||||
use Rack::CommonLogger
|
||||
#use Rack::ShowExceptions
|
||||
|
||||
def initialize(settings={})
|
||||
super
|
||||
# TODO(sissel): Support alternate backends
|
||||
backend_url = URI.parse(settings.backend_url)
|
||||
@backend = LogStash::Search::ElasticSearch.new(
|
||||
:host => backend_url.host,
|
||||
:port => backend_url.port
|
||||
)
|
||||
end
|
||||
|
||||
aget '/style.css' do
|
||||
headers "Content-Type" => "text/css; charset=utf8"
|
||||
|
@ -32,8 +46,11 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
end # '/'
|
||||
|
||||
aget '/search' do
|
||||
result_callback = proc do
|
||||
result_callback = proc do |results|
|
||||
status 500 if @error
|
||||
@results = results
|
||||
|
||||
p :got => results
|
||||
|
||||
params[:format] ||= "html"
|
||||
case params[:format]
|
||||
|
@ -48,6 +65,7 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
body erb :"search/results.txt", :layout => false
|
||||
when "json"
|
||||
headers({"Content-Type" => "text/plain" })
|
||||
# TODO(sissel): issue/30 - needs refactoring here.
|
||||
hits = @hits.collect { |h| h["_source"] }
|
||||
response = {
|
||||
"hits" => hits,
|
||||
|
@ -63,19 +81,26 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
# have javascript enabled, we need to show the results in
|
||||
# case a user doesn't have javascript.
|
||||
if params[:q] and params[:q] != ""
|
||||
elasticsearch.search(params) do |results|
|
||||
@results = results
|
||||
@hits = (@results["hits"]["hits"] rescue [])
|
||||
query = LogStash::Search::Query.new(
|
||||
:query_string => params[:q],
|
||||
:offset => params[:offset],
|
||||
:count => params[:count]
|
||||
)
|
||||
|
||||
@backend.search(query) do |results|
|
||||
p :got => results
|
||||
begin
|
||||
result_callback.call
|
||||
result_callback.call results
|
||||
rescue => e
|
||||
puts e
|
||||
p :exception => e
|
||||
end
|
||||
end # elasticsearch.search
|
||||
end # @backend.search
|
||||
else
|
||||
#@error = "No query given."
|
||||
@hits = []
|
||||
result_callback.call
|
||||
results = LogStash::Search::Result.new(
|
||||
:events => [],
|
||||
:error_mesage => "No query given"
|
||||
)
|
||||
result_callback.call results
|
||||
end
|
||||
end # aget '/search'
|
||||
|
||||
|
@ -83,23 +108,34 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
headers({"Content-Type" => "text/html" })
|
||||
count = params["count"] = (params["count"] or 50).to_i
|
||||
offset = params["offset"] = (params["offset"] or 0).to_i
|
||||
elasticsearch.search(params) do |results|
|
||||
|
||||
query = LogStash::Search::Query.new(
|
||||
:query_string => params[:q],
|
||||
:offset => offset,
|
||||
:count => count
|
||||
)
|
||||
|
||||
@backend.search(query) do |results|
|
||||
@results = results
|
||||
if @results.include?("error")
|
||||
if @results.error?
|
||||
body haml :"search/error", :layout => !request.xhr?
|
||||
next
|
||||
end
|
||||
|
||||
@hits = (@results["hits"]["hits"] rescue [])
|
||||
@total = (@results["hits"]["total"] rescue 0)
|
||||
@graphpoints = []
|
||||
begin
|
||||
@results["facets"]["by_hour"]["entries"].each do |entry|
|
||||
@graphpoints << [entry["key"], entry["count"]]
|
||||
end
|
||||
rescue => e
|
||||
puts e
|
||||
end
|
||||
@events = @results.events
|
||||
@total = (@results.total rescue 0)
|
||||
count = @results.events.size
|
||||
|
||||
# TODO(sissel): move this to a facet query
|
||||
#@graphpoints = []
|
||||
#begin
|
||||
#@results["facets"]["by_hour"]["entries"].each do |entry|
|
||||
#@graphpoints << [entry["key"], entry["count"]]
|
||||
#end
|
||||
#rescue => e
|
||||
#p :exception => e
|
||||
#puts e.backtrace.join("\n")
|
||||
#end
|
||||
|
||||
if count and offset
|
||||
if @total > (count + offset)
|
||||
|
@ -132,16 +168,22 @@ class LogStash::Web::Server < Sinatra::Base
|
|||
end
|
||||
|
||||
body haml :"search/ajax", :layout => !request.xhr?
|
||||
end # elasticsearch.search
|
||||
end # @backend.search
|
||||
end # apost '/search/ajax'
|
||||
|
||||
aget '/*' do
|
||||
status 404 if @error
|
||||
body "Invalid path."
|
||||
end # aget /*
|
||||
end # class LogStash::Web::Server
|
||||
|
||||
require "optparse"
|
||||
Settings = Struct.new(:daemonize, :logfile, :address, :port)
|
||||
Settings = Struct.new(:daemonize, :logfile, :address, :port, :backend_url)
|
||||
settings = Settings.new
|
||||
|
||||
settings.address = "0.0.0.0"
|
||||
settings.port = 9292
|
||||
settings.address = "0.0.0.0"
|
||||
settings.port = 9292
|
||||
settings.backend_url = "elasticsearch://localhost:9200/"
|
||||
|
||||
progname = File.basename($0)
|
||||
|
||||
|
@ -163,6 +205,11 @@ opts = OptionParser.new do |opts|
|
|||
opts.on("-p", "--port PORT", "Port on which to start webserver. Default is 9292.") do |port|
|
||||
settings.port = port.to_i
|
||||
end
|
||||
|
||||
opts.on("-b", "--backend URL",
|
||||
"The backend URL to use. Default is elasticserach://localhost:9200/") do |url|
|
||||
settings.backend_url = url
|
||||
end
|
||||
end
|
||||
|
||||
opts.parse!
|
||||
|
@ -189,5 +236,10 @@ end
|
|||
Rack::Handler::Thin.run(
|
||||
Rack::CommonLogger.new( \
|
||||
Rack::ShowExceptions.new( \
|
||||
LogStash::Web::Server.new)),
|
||||
LogStash::Web::Server.new(settings))),
|
||||
:Port => settings.port, :Host => settings.address)
|
||||
#Rack::Handler::Thin.run(
|
||||
#LogStash::Web::Server.new(settings),
|
||||
#:Port => settings.port,
|
||||
#:Host => settings.address
|
||||
#)
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
- if @total and @result_start and @result_end
|
||||
%small
|
||||
%strong
|
||||
Results #{@result_start} - #{@result_end} of #{@total}
|
||||
Results #{@result_start} - #{@result_end} of #{@results.total}
|
||||
|
|
||||
- if @first_href
|
||||
%a.pager{ :href => @first_href } first
|
||||
|
@ -29,7 +29,7 @@
|
|||
|
|
||||
%a.pager{ :href => @last_href }
|
||||
last
|
||||
- if @hits.length == 0
|
||||
- if @results.events.length == 0
|
||||
- if !params[:q]
|
||||
/ We default to a '+2 days' in the future to capture 'today at 00:00'
|
||||
/ plus tomorrow, inclusive, in case you are 23 hours behind the international
|
||||
|
@ -42,8 +42,8 @@
|
|||
%tr
|
||||
%th timestamp
|
||||
%th event
|
||||
- @hits.reverse.each do |hit|
|
||||
- @results.events.reverse.each do |event|
|
||||
%tr.event
|
||||
%td.timestamp&= hit["_source"]["@timestamp"]
|
||||
%td.message{ :"data-full" => hit.to_json }
|
||||
%pre&= hit["_source"]["@message"]
|
||||
%td.timestamp&= event.timestamp
|
||||
%td.message{ :"data-full" => event.to_json }
|
||||
%pre&= event.message
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue