mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 06:37:19 -04:00
Refactor MetricStore#extract_metrics to handle every possible situation that makes sense
Fixes #5381
This commit is contained in:
parent
86f776fafa
commit
4c38288d31
2 changed files with 90 additions and 23 deletions
|
@ -118,6 +118,7 @@ module LogStash module Instrument
|
|||
# [:jvm, :process],
|
||||
# :open_file_descriptors,
|
||||
# [:cpu, [:total_in_millis, :percent]]
|
||||
# [:pipelines, [:one, :two], :size]
|
||||
# )
|
||||
#
|
||||
# Returns:
|
||||
|
@ -125,33 +126,38 @@ module LogStash module Instrument
|
|||
# {
|
||||
# :open_file_descriptors => 123
|
||||
# :cpu => { :total_in_millis => 456, :percent => 789 }
|
||||
# :pipelines => {
|
||||
# :one => {:size => 90210},
|
||||
# :two => {:size => 8675309}
|
||||
# }
|
||||
# }
|
||||
def extract_metrics(path, *keys)
|
||||
metrics = get_shallow(*path)
|
||||
|
||||
keys.reduce({}) do |acc,k|
|
||||
# Get the value of this key, recurse as needed
|
||||
# to get deeply nested paths
|
||||
v = if k.is_a?(Array)
|
||||
# We have a nested hash, time to recurse
|
||||
res = extract_metrics(path + k[0..-2], *k.last)
|
||||
# We're only going one level deep into the array in this frame
|
||||
# so make the key that one. Otherwise we get the full path
|
||||
# as an array as the key, which makes no sense
|
||||
k = k.first
|
||||
res
|
||||
else # Scalar value
|
||||
metrics[k]
|
||||
end
|
||||
# Simplifiy 1-length keys
|
||||
k = k.first if k.is_a?(Array) && k.size == 1
|
||||
|
||||
if v.is_a?(Hash)
|
||||
# This is a nested structure, simple assignment
|
||||
acc[k] = v
|
||||
else
|
||||
# This is a Metric object, we need to extract its value
|
||||
# If the metric didn't exist it might be nil, but we still want its key
|
||||
# to exist with a nil value
|
||||
acc[k] = v ? v.value : nil; acc
|
||||
# If we have array values here we need to recurse
|
||||
# There are two levels of looping here, one for the paths we might pass in
|
||||
# one for the upcoming keys we might pass in
|
||||
if k.is_a?(Array)
|
||||
# We need to build up future executions to extract_metrics
|
||||
# which means building up the path and keys arguments.
|
||||
# We need a nested loop her to execute all permutations of these in case we hit
|
||||
# something like [[:a,:b],[:c,:d]] which produces 4 different metrics
|
||||
next_paths = Array(k.first)
|
||||
next_keys = Array(k[1])
|
||||
rest = k[2..-1]
|
||||
next_paths.each do |next_path|
|
||||
# If there already is a hash at this location use that so we don't overwrite it
|
||||
np_hash = acc[next_path] || {}
|
||||
|
||||
acc[next_path] = next_keys.reduce(np_hash) do |a,next_key|
|
||||
a.merge! extract_metrics(path + [next_path], [next_key, *rest])
|
||||
end
|
||||
end
|
||||
else # Scalar value
|
||||
res = get_shallow(*path)[k]
|
||||
acc[k] = res ? res.value : nil
|
||||
end
|
||||
|
||||
acc
|
||||
|
|
|
@ -142,6 +142,67 @@ describe LogStash::Instrument::MetricStore do
|
|||
end
|
||||
end
|
||||
|
||||
describe "get_shallow" do
|
||||
it "should retrieve a path as a single value" do
|
||||
r = subject.get_shallow(:node, :sashimi, :pipelines, :pipeline01, :processed_events_in)
|
||||
expect(r.value).to eql(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe "extract_metrics" do
|
||||
it "should retrieve non-nested values correctly" do
|
||||
r = subject.extract_metrics(
|
||||
[:node, :sashimi, :pipelines, :pipeline01],
|
||||
:processed_events_in,
|
||||
:processed_events_out,
|
||||
)
|
||||
expect(r[:processed_events_in]).to eql(1)
|
||||
expect(r[:processed_events_out]).to eql(1)
|
||||
end
|
||||
|
||||
it "should retrieve nested values correctly alongside non-nested ones" do
|
||||
r = subject.extract_metrics(
|
||||
[:node, :sashimi, :pipelines, :pipeline01],
|
||||
:processed_events_in,
|
||||
[:plugins, :"logstash-output-elasticsearch", :event_in]
|
||||
)
|
||||
expect(r[:processed_events_in]).to eql(1)
|
||||
expect(r[:plugins][:"logstash-output-elasticsearch"][:event_in]).to eql(1)
|
||||
end
|
||||
|
||||
it "should retrieve multiple nested keys at a given location" do
|
||||
r = subject.extract_metrics(
|
||||
[:node, :sashimi, :pipelines],
|
||||
[:pipeline01, [:processed_events_in, :processed_events_out]]
|
||||
)
|
||||
|
||||
expect(r[:pipeline01][:processed_events_in]).to eql(1)
|
||||
expect(r[:pipeline01][:processed_events_out]).to eql(1)
|
||||
end
|
||||
|
||||
it "should retrieve a single key nested in multiple places" do
|
||||
r = subject.extract_metrics(
|
||||
[:node, :sashimi, :pipelines],
|
||||
[[:pipeline01, :pipeline02], :processed_events_out]
|
||||
)
|
||||
|
||||
expect(r[:pipeline01][:processed_events_out]).to eql(1)
|
||||
expect(r[:pipeline02][:processed_events_out]).to eql(1)
|
||||
end
|
||||
|
||||
it "handle overlaps of paths" do
|
||||
r = subject.extract_metrics(
|
||||
[:node, :sashimi, :pipelines],
|
||||
[:pipeline01, :processed_events_in],
|
||||
[[:pipeline01, :pipeline02], :processed_events_out]
|
||||
)
|
||||
|
||||
expect(r[:pipeline01][:processed_events_in]).to eql(1)
|
||||
expect(r[:pipeline01][:processed_events_out]).to eql(1)
|
||||
expect(r[:pipeline02][:processed_events_out]).to eql(1)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#each" do
|
||||
it "retrieves all the metric" do
|
||||
expect(subject.each.size).to eq(metric_events.size)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue