mirror of
https://github.com/elastic/logstash.git
synced 2025-04-23 22:27:21 -04:00
- Fix search's graph
This commit is contained in:
parent
1cb56a94d4
commit
2c299fae25
3 changed files with 54 additions and 35 deletions
|
@ -15,17 +15,21 @@ class Search < Application
|
|||
params[:offset] = (params[:offset] ? params[:offset].to_i : 0) rescue 0
|
||||
params[:limit] = (params[:limit] ? params[:limit].to_i : 100) rescue 100
|
||||
|
||||
q[:from] = params[:offset]
|
||||
q[:size] = params[:limit]
|
||||
q[:log_type] = params[:log_type]
|
||||
q[:base] = "logstash"
|
||||
q[:q] = params[:q]
|
||||
options = {}
|
||||
options[:from] = params[:offset]
|
||||
options[:size] = params[:limit]
|
||||
options[:sort] = "@DATE"
|
||||
q[:query_string] = {
|
||||
:default_field => "@LINE",
|
||||
:query => params[:q]
|
||||
}
|
||||
|
||||
options[:query] = q
|
||||
|
||||
search = ElasticSearch.new("localhost:9200")
|
||||
|
||||
Timeout.timeout(10) do
|
||||
#@hits, @results = $search.search(params)
|
||||
results = search.query(q)
|
||||
results = search.query(options)
|
||||
@hits = results.hits
|
||||
@results = results.results
|
||||
@graphdata = _graphpoints(search, q)
|
||||
|
@ -47,28 +51,28 @@ class Search < Application
|
|||
@points = []
|
||||
# correct for timezone date offset
|
||||
Timeout.timeout(20) do
|
||||
queries = {}
|
||||
queries = []
|
||||
while starttime + day > curtime
|
||||
endtime = curtime + increment - 1
|
||||
querygen = "@DATE:[#{curtime} #{endtime}] AND (#{orig_query})"
|
||||
puts "Query: #{querygen}"
|
||||
queries[querygen] = {
|
||||
:time => curtime,
|
||||
:query => querygen,
|
||||
querygen = [query.clone]
|
||||
querygen << {
|
||||
:range => {
|
||||
"@DATE" => {
|
||||
:from => curtime,
|
||||
:to => endtime,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
queries << { :bool => { :must => querygen } }
|
||||
curtime += increment
|
||||
end
|
||||
|
||||
queries.each do |genquery, data|
|
||||
hitq = query.clone
|
||||
hitq[:q] = genquery
|
||||
count = search.count(hitq)
|
||||
queries[genquery][:hits] = count
|
||||
end
|
||||
|
||||
@data = Hash.new
|
||||
queries.each do |query, entry|
|
||||
@data[entry[:time].to_i * 1000] = entry[:hits]
|
||||
queries.each do |genquery|
|
||||
count = search.count(genquery)
|
||||
puts count
|
||||
@data[genquery[:bool][:must][1][:range]["@DATE"][:from].to_i * 1000] = count
|
||||
end
|
||||
@data = @data.to_a
|
||||
end
|
||||
|
|
|
@ -39,7 +39,9 @@
|
|||
<% end %>
|
||||
|
||||
<pre>
|
||||
<%=h @results.collect { |v| v["_source"]["@LINE"] }.join("\n") %>
|
||||
<% @results.each do |result| -%>
|
||||
<%=h result["_source"]["@DATE"] %> | <%=h result["_source"]["@LINE"] %>
|
||||
<% end -%>
|
||||
</pre>
|
||||
|
||||
<script id="source">
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
require "rubygems"
|
||||
require "uri"
|
||||
require "json"
|
||||
require "ap"
|
||||
require "logger"
|
||||
require "httpclient"
|
||||
|
||||
|
@ -12,27 +13,27 @@ class ElasticSearch
|
|||
@logger = Logger.new(STDERR)
|
||||
end
|
||||
|
||||
def _get(query, what)
|
||||
index = URI.escape("#{query[:base]}/#{query[:log_type]}")
|
||||
uri = "http://#{@host}/#{index}/_#{what}?"
|
||||
params = query.collect { |k,v| "#{URI.escape(k.to_s)}=#{URI.escape(v.to_s)}" }.join("&")
|
||||
uri += "#{params}"
|
||||
def _get(params, what, path = "")
|
||||
path.gsub!(/\/+$/, "")
|
||||
uri = URI.escape("http://#{@host}#{path}/_#{what}")
|
||||
@logger.info("URL for #{what}: #{uri}")
|
||||
response = @http.get(uri)
|
||||
@logger.info("Body: #{params.to_json}");
|
||||
# ElasticSearch uses "GET" with body, so we can't call .get() here.
|
||||
response = @http.request(:get, uri, query = nil, body = params.to_json)
|
||||
|
||||
if response.status != 200
|
||||
p JSON.parse(response.content)
|
||||
ap JSON.parse(response.content)
|
||||
raise "Search failure (http code #{response.code})"
|
||||
end
|
||||
return JSON.parse(response.content)
|
||||
end
|
||||
|
||||
def query(query)
|
||||
return ElasticSearch::SearchResults.new(_get(query, "search"))
|
||||
def query(query, path = "")
|
||||
return ElasticSearch::SearchResults.new(_get(query, "search", path))
|
||||
end # def query
|
||||
|
||||
def count( query)
|
||||
return _get(query, "count")["count"]
|
||||
def count(query, path = "")
|
||||
return _get(query, "count", path)["count"]
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -49,5 +50,17 @@ end
|
|||
if __FILE__ == $0
|
||||
require "ap"
|
||||
es = ElasticSearch.new("localhost:9200")
|
||||
ap es.query(:base => "logstash", :log_type => "linux-syslog", :q => "progname:etl-cron").results
|
||||
#ap es.query( { :query => { :field => { :progname => "etl-cron"} } }).results
|
||||
#ap es.query( { :query => { :field => { :@DATE => 1272164175} } }).results
|
||||
ap es.query(
|
||||
{ :query =>
|
||||
{"bool" => {"must" => [{"query_string" => {"query" => ARGV[0],"default_field" => "@LINE"}},{"range" => {"@DATE" => {"to" => Time.now.to_i,"from" => Time.now.to_i - 600}}}]}}
|
||||
}
|
||||
)
|
||||
end
|
||||
#:bool => { :must => [
|
||||
#{ :range => { :@DATE => { :from => 1272164175, :to => 1272164176,} } },
|
||||
##{ :field => { :progname => "etl-cron" } },
|
||||
#{ :query_string => { :query => "progname:etl-cron", :default_field => "@LINE" } },
|
||||
#] },
|
||||
#}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue