mirror of
https://github.com/elastic/logstash.git
synced 2025-04-23 22:27:21 -04:00
Add GC stats to the api
This PR introduce GC stats in the form of `collection_time` and `collection_count`, it takes the configured gc and categorize them into old and young. Example output: ``` gc: { collectors: { young: { collection_time_in_millis: 22101, collection_count: 5452 }, old: { collection_time_in_millis: 57, collection_count: 1 } } } ``` Fixes: #5730 Fixes #5788
This commit is contained in:
parent
cf3a424609
commit
5d4a3aeeb6
5 changed files with 99 additions and 22 deletions
|
@ -14,7 +14,8 @@ module LogStash
|
|||
:count,
|
||||
:peak_count
|
||||
),
|
||||
:mem => memory
|
||||
:mem => memory,
|
||||
:gc => gc
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -59,6 +60,10 @@ module LogStash
|
|||
}
|
||||
end
|
||||
|
||||
def gc
|
||||
service.get_shallow(:jvm, :gc)
|
||||
end
|
||||
|
||||
def hot_threads(options={})
|
||||
HotThreadsReport.new(self, options)
|
||||
end
|
||||
|
|
|
@ -12,7 +12,7 @@ module LogStash
|
|||
payload = {
|
||||
:jvm => jvm_payload,
|
||||
:process => process_payload,
|
||||
:pipeline => pipeline_payload
|
||||
:pipeline => pipeline_payload,
|
||||
}
|
||||
respond_with(payload, {:filter => params["filter"]})
|
||||
end
|
||||
|
|
|
@ -1,18 +1,36 @@
|
|||
|
||||
# encoding: utf-8
|
||||
require "logstash/instrument/periodic_poller/base"
|
||||
require 'jrmonitor'
|
||||
require "jrmonitor"
|
||||
require "set"
|
||||
|
||||
java_import 'java.lang.management.ManagementFactory'
|
||||
java_import 'java.lang.management.OperatingSystemMXBean'
|
||||
java_import 'java.lang.management.GarbageCollectorMXBean'
|
||||
java_import 'com.sun.management.UnixOperatingSystemMXBean'
|
||||
java_import 'javax.management.MBeanServer'
|
||||
java_import 'javax.management.ObjectName'
|
||||
java_import 'javax.management.AttributeList'
|
||||
java_import 'javax.naming.directory.Attribute'
|
||||
|
||||
|
||||
module LogStash module Instrument module PeriodicPoller
|
||||
class JVM < Base
|
||||
class GarbageCollectorName
|
||||
YOUNG_GC_NAMES = Set.new(["Copy", "PS Scavenge", "ParNew", "G1 Young Generation"])
|
||||
OLD_GC_NAMES = Set.new(["MarkSweepCompact", "PS MarkSweep", "ConcurrentMarkSweep", "G1 Old Generation"])
|
||||
|
||||
YOUNG = :young
|
||||
OLD = :old
|
||||
|
||||
def self.get(gc_name)
|
||||
if YOUNG_GC_NAMES.include?(gc_name)
|
||||
YOUNG
|
||||
elsif(OLD_GC_NAMES.include?(gc_name))
|
||||
OLD
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
attr_reader :metric
|
||||
|
||||
|
@ -22,31 +40,46 @@ module LogStash module Instrument module PeriodicPoller
|
|||
end
|
||||
|
||||
def collect
|
||||
raw = JRMonitor.memory.generate
|
||||
raw = JRMonitor.memory.generate
|
||||
collect_heap_metrics(raw)
|
||||
collect_non_heap_metrics(raw)
|
||||
collect_pools_metrics(raw)
|
||||
collect_threads_metrics
|
||||
collect_process_metrics
|
||||
collect_gc_stats
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def collect_threads_metrics
|
||||
def collect_gc_stats
|
||||
garbage_collectors = ManagementFactory.getGarbageCollectorMXBeans()
|
||||
|
||||
garbage_collectors.each do |collector|
|
||||
name = GarbageCollectorName.get(collector.getName())
|
||||
if name.nil?
|
||||
logger.error("Unknown garbage collector name", :name => name)
|
||||
else
|
||||
metric.gauge([:jvm, :gc, :collectors, name], :collection_count, collector.getCollectionCount())
|
||||
metric.gauge([:jvm, :gc, :collectors, name], :collection_time_in_millis, collector.getCollectionTime())
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def collect_threads_metrics
|
||||
threads = JRMonitor.threads.generate
|
||||
|
||||
|
||||
current = threads.count
|
||||
if @peak_threads.nil? || @peak_threads < current
|
||||
@peak_threads = current
|
||||
end
|
||||
|
||||
metric.gauge([:jvm, :threads], :count, threads.count)
|
||||
end
|
||||
|
||||
metric.gauge([:jvm, :threads], :count, threads.count)
|
||||
metric.gauge([:jvm, :threads], :peak_count, @peak_threads)
|
||||
end
|
||||
|
||||
def collect_process_metrics
|
||||
process_metrics = JRMonitor.process.generate
|
||||
|
||||
|
||||
path = [:jvm, :process]
|
||||
|
||||
|
||||
|
@ -91,6 +124,7 @@ module LogStash module Instrument module PeriodicPoller
|
|||
end
|
||||
end
|
||||
|
||||
|
||||
def build_pools_metrics(data)
|
||||
heap = data["heap"]
|
||||
old = {}
|
||||
|
@ -129,9 +163,8 @@ module LogStash module Instrument module PeriodicPoller
|
|||
:committed_in_bytes => 0,
|
||||
:max_in_bytes => 0,
|
||||
:peak_used_in_bytes => 0,
|
||||
:peak_max_in_bytes => 0
|
||||
:peak_max_in_bytes => 0
|
||||
}
|
||||
end
|
||||
|
||||
end
|
||||
end; end; end
|
||||
|
|
|
@ -16,6 +16,18 @@ describe LogStash::Api::Modules::NodeStats do
|
|||
"count"=>Numeric,
|
||||
"peak_count"=>Numeric
|
||||
},
|
||||
"gc" => {
|
||||
"collectors" => {
|
||||
"young" => {
|
||||
"collection_count" => Numeric,
|
||||
"collection_time_in_millis" => Numeric
|
||||
},
|
||||
"old" => {
|
||||
"collection_count" => Numeric,
|
||||
"collection_time_in_millis" => Numeric
|
||||
}
|
||||
}
|
||||
},
|
||||
"mem" => {
|
||||
"heap_used_in_bytes" => Numeric,
|
||||
"heap_used_percent" => Numeric,
|
||||
|
|
|
@ -3,18 +3,41 @@ require "spec_helper"
|
|||
require "logstash/instrument/periodic_poller/jvm"
|
||||
require "logstash/instrument/collector"
|
||||
|
||||
describe LogStash::Instrument::PeriodicPoller::JVM::GarbageCollectorName do
|
||||
subject { LogStash::Instrument::PeriodicPoller::JVM::GarbageCollectorName }
|
||||
|
||||
context "when the gc is of young type" do
|
||||
LogStash::Instrument::PeriodicPoller::JVM::GarbageCollectorName::YOUNG_GC_NAMES.each do |name|
|
||||
it "returns young for #{name}" do
|
||||
expect(subject.get(name)).to eq(:young)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context "when the gc is of old type" do
|
||||
LogStash::Instrument::PeriodicPoller::JVM::GarbageCollectorName::OLD_GC_NAMES.each do |name|
|
||||
it "returns old for #{name}" do
|
||||
expect(subject.get(name)).to eq(:old)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "returns `nil` when we dont know the gc name" do
|
||||
expect(subject.get("UNKNOWN GC")).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
describe LogStash::Instrument::PeriodicPoller::JVM do
|
||||
let(:metric) { LogStash::Instrument::Metric.new(LogStash::Instrument::Collector.new) }
|
||||
let(:options) { {} }
|
||||
subject(:jvm) { described_class.new(metric, options) }
|
||||
|
||||
|
||||
it "should initialize cleanly" do
|
||||
expect { jvm }.not_to raise_error
|
||||
end
|
||||
|
||||
describe "collections" do
|
||||
subject(:collection) { jvm.collect }
|
||||
|
||||
it "should run cleanly" do
|
||||
expect { collection }.not_to raise_error
|
||||
end
|
||||
|
@ -22,21 +45,25 @@ describe LogStash::Instrument::PeriodicPoller::JVM do
|
|||
describe "metrics" do
|
||||
before(:each) { jvm.collect }
|
||||
let(:snapshot_store) { metric.collector.snapshot_metric.metric_store }
|
||||
subject(:jvm_metrics) { snapshot_store.get_shallow(:jvm, :process) }
|
||||
subject(:jvm_metrics) { snapshot_store.get_shallow(:jvm) }
|
||||
|
||||
# Make looking up metric paths easy when given varargs of keys
|
||||
# e.g. mval(:parent, :child)
|
||||
def mval(*metric_path)
|
||||
metric_path.reduce(jvm_metrics) {|acc,k| acc[k]}.value
|
||||
end
|
||||
end
|
||||
|
||||
[
|
||||
:max_file_descriptors,
|
||||
:open_file_descriptors,
|
||||
:peak_open_file_descriptors,
|
||||
[:mem, :total_virtual_in_bytes],
|
||||
[:cpu, :total_in_millis],
|
||||
[:cpu, :percent]
|
||||
[:process, :max_file_descriptors],
|
||||
[:process, :open_file_descriptors],
|
||||
[:process, :peak_open_file_descriptors],
|
||||
[:process, :mem, :total_virtual_in_bytes],
|
||||
[:process, :cpu, :total_in_millis],
|
||||
[:process, :cpu, :percent],
|
||||
[:gc, :collectors, :young, :collection_count],
|
||||
[:gc, :collectors, :young, :collection_time_in_millis],
|
||||
[:gc, :collectors, :old, :collection_count],
|
||||
[:gc, :collectors, :old, :collection_time_in_millis]
|
||||
].each do |path|
|
||||
path = Array(path)
|
||||
it "should have a value for #{path} that is Numeric" do
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue