Remove witness classes (#10240)

This commit is contained in:
Dan Hermann 2019-01-01 15:03:56 -06:00 committed by GitHub
parent 5a0e9a1d2e
commit 134818e69e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 0 additions and 4243 deletions

View file

@ -1,105 +0,0 @@
package org.logstash.instrument.witness;
import com.fasterxml.jackson.core.JsonGenerator;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.GaugeMetric;
import org.logstash.instrument.metrics.gauge.RubyTimeStampGauge;
import java.io.IOException;
/**
* Similar to the {@link java.util.function.Consumer} functional interface this is expected to operate via side effects. Differs from {@link java.util.function.Consumer} in that
* this is stricter typed, and allows for a checked {@link IOException}.
*
* @param <T> The type of {@link GaugeMetric} to serialize
*/
@FunctionalInterface
public interface MetricSerializer<T extends Metric<?>> {
/**
* Performs this operation on the given argument.
*
* @param t the input argument
* @throws IOException On failure to serialize
*/
void serialize(T t) throws IOException;
/**
* Helper class to create a functional fluent api.
* Usage example: {@code MetricSerializer.Get.numberSerializer(gen).serialize(99);}
*/
class Get {
/**
* Proper way to serialize a {@link Number} type metric to JSON
*
* @param gen The {@link JsonGenerator} used to generate JSON
* @return the {@link MetricSerializer} which is the function used to serialize the metric
*/
public static MetricSerializer<Metric<Number>> numberSerializer(JsonGenerator gen) {
return m -> {
if (m != null) {
Number value = m.getValue();
gen.writeObjectField(m.getName(), value == null ? 0 : value);
}
};
}
/**
* Proper way to serialize a {@link Long} type metric to JSON
*
* @param gen The {@link JsonGenerator} used to generate JSON
* @return the {@link MetricSerializer} which is the function used to serialize the metric
*/
public static MetricSerializer<Metric<Long>> longSerializer(JsonGenerator gen) {
return m -> {
if (m != null) {
Long value = m.getValue();
gen.writeNumberField(m.getName(), value == null ? 0 : value);
}
};
}
/**
* Proper way to serialize a {@link Boolean} type metric to JSON
*
* @param gen The {@link JsonGenerator} used to generate JSON
* @return the {@link MetricSerializer} which is the function used to serialize the metric
*/
public static MetricSerializer<Metric<Boolean>> booleanSerializer(JsonGenerator gen) {
return m -> {
if (m != null) {
Boolean value = m.getValue();
gen.writeBooleanField(m.getName(), value == null ? false : value);
}
};
}
/**
* Proper way to serialize a {@link String} type metric to JSON
*
* @param gen The {@link JsonGenerator} used to generate JSON
* @return the {@link MetricSerializer} which is the function used to serialize the metric
*/
public static MetricSerializer<Metric<String>> stringSerializer(JsonGenerator gen) {
return m -> {
if (m != null) {
gen.writeStringField(m.getName(), m.getValue());
}
};
}
/**
* Proper way to serialize a {@link RubyTimeStampGauge} type metric to JSON that should emit a {@code null} JSON value if missing
*
* @param gen The {@link JsonGenerator} used to generate JSON
* @return the {@link MetricSerializer} which is the function used to serialize the metric
*/
public static MetricSerializer<RubyTimeStampGauge> timestampSerializer(JsonGenerator gen) {
return m -> {
if (m != null) {
gen.writeStringField(m.getName(), m.getValue() != null ? m.getValue().toString() : null);
}
};
}
}
}

View file

@ -1,45 +0,0 @@
package org.logstash.instrument.witness;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import java.io.IOException;
import java.io.StringWriter;
/**
* A Witness that can be serialized as JSON. A Witness is an abstraction to the {@link org.logstash.instrument.metrics.Metric}'s that watches/witnesses what is happening inside
* of Logstash.
*/
public interface SerializableWitness {
/**
* Generates the corresponding JSON for the witness.
*
* @param gen The {@link JsonGenerator} used to generate the JSON
* @param provider The {@link SerializerProvider} that may be used to assist with the JSON generation.
* @throws IOException if any errors occur in JSON generation
*/
void genJson(final JsonGenerator gen, SerializerProvider provider) throws IOException;
/**
* Helper method to return the Witness as a JSON string.
*
* @return A {@link String} whose content is the JSON representation for the witness.
* @throws IOException if any errors occur in JSON generation
*/
default String asJson() throws IOException {
JsonFactory jsonFactory = new JsonFactory();
try (StringWriter sw = new StringWriter();
JsonGenerator gen = jsonFactory.createGenerator(sw)) {
ObjectMapper mapper = new ObjectMapper(jsonFactory);
gen.writeStartObject();
genJson(gen, mapper.getSerializerProvider());
gen.writeEndObject();
gen.flush();
sw.flush();
return sw.toString();
}
}
}

View file

@ -1,166 +0,0 @@
package org.logstash.instrument.witness;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.witness.pipeline.EventsWitness;
import org.logstash.instrument.witness.pipeline.PipelineWitness;
import org.logstash.instrument.witness.pipeline.PipelinesWitness;
import org.logstash.instrument.witness.pipeline.ReloadWitness;
import org.logstash.instrument.witness.process.ProcessWitness;
import org.logstash.instrument.witness.schedule.WitnessScheduler;
import java.io.IOException;
import java.util.Arrays;
/**
* <p>Primary entry point for the Witness subsystem. The Witness subsystem is an abstraction for the {@link org.logstash.instrument.metrics.Metric}'s that watches/witnesses what
* is happening inside Logstash. </p>
* <p>Usage example to increment the events counter for the foo input in the main pipeline:
* {@code Witness.instance().pipeline("main").inputs("foo").events().in(1);}
* </p>
* <p>A Witness may be forgetful. Which means that those witnesses may expose a {@code forget()} method to reset underlying metrics back to it's initial state. </p>
* <p>A Witness may also be a snitch. Which means that those witnesses may expose a {@code snitch()} method to retrieve the underlying metric values without JSON serialization.</p>
* <p>All Witnesses are capable of serializing their underlying metrics as JSON.</p>
*/
@JsonSerialize(using = Witness.Serializer.class)
public final class Witness implements SerializableWitness {
private final ReloadWitness reloadWitness;
private final EventsWitness eventsWitness;
private final PipelinesWitness pipelinesWitness;
private final ProcessWitness processWitness;
private final WitnessScheduler processWitnessScheduler;
private static Witness instance;
/**
* Constructor. Consumers should use {@link #instance()} method to obtain an instance of this class.
* <p>THIS IS ONLY TO BE USED BY THE RUBY AGENT</p>
*/
public Witness() {
this.reloadWitness = new ReloadWitness();
this.eventsWitness = new EventsWitness();
this.pipelinesWitness = new PipelinesWitness();
this.processWitness = new ProcessWitness();
this.processWitnessScheduler = new WitnessScheduler(processWitness);
}
/**
* This is a dirty hack since the {@link Witness} needs to mirror the Ruby agent's lifecycle which, at least for testing, can mean more then 1 instance per JVM, but only 1
* active instance at any time. Exposing this allows Ruby to create the instance for use in it's agent constructor, then set it here for all to use as a singleton.
* <p>THIS IS ONLY TO BE USED BY THE RUBY AGENT</p>
*
* @param newInstance The instance of the {@link Witness} to use as the singleton instance that mirror's the agent's lifecycle.
*/
public static synchronized void setInstance(Witness newInstance) {
//Ruby agent restart
if (instance != null) {
instance.processWitnessScheduler.shutdown();
}
instance = newInstance;
if (instance != null) {
instance.processWitnessScheduler.schedule();
}
}
/**
* Obtain the singleton instance of the {@link Witness}
*
* @return the singleton instance of the {@link Witness}
* @throws IllegalStateException if attempted to be used before being set.
*/
public static Witness instance() {
if (instance == null) {
throw new IllegalStateException("The stats witness instance must be set before it used. Called from: " + Arrays.toString(new Throwable().getStackTrace()));
}
return instance;
}
public EventsWitness events() {
return eventsWitness;
}
/**
* Obtain a reference to the associated reload witness.
*
* @return The associated {@link ReloadWitness}
*/
public ReloadWitness reloads() {
return reloadWitness;
}
/**
* Obtain a reference to the associated pipelines witness. Consumers may use {@link #pipeline(String)} as a shortcut to this method.
*
* @return The associated {@link PipelinesWitness}
*/
public PipelinesWitness pipelines() {
return pipelinesWitness;
}
/**
* Obtain a reference to the associated process witness.
*
* @return The associated {@link ProcessWitness}
*/
public ProcessWitness process() {
return processWitness;
}
/**
* Shortcut method for {@link PipelinesWitness#pipeline(String)}
*
* @param name The name of the pipeline witness to retrieve.
* @return the associated {@link PipelineWitness} for the given name
*/
public PipelineWitness pipeline(String name) {
return pipelinesWitness.pipeline(name);
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<Witness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(Witness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
private Serializer(Class<Witness> t) {
super(t);
}
@Override
public void serialize(Witness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(Witness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
witness.process().genJson(gen, provider);
witness.events().genJson(gen, provider);
witness.reloads().genJson(gen, provider);
witness.pipelinesWitness.genJson(gen, provider);
}
}
}

View file

@ -1,248 +0,0 @@
package org.logstash.instrument.witness.configuration;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.BooleanGauge;
import org.logstash.instrument.metrics.gauge.NumberGauge;
import org.logstash.instrument.metrics.gauge.TextGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* The witness for configuration.
*/
@JsonSerialize(using = ConfigWitness.Serializer.class)
public final class ConfigWitness implements SerializableWitness {
private final BooleanGauge deadLetterQueueEnabled;
private final BooleanGauge configReloadAutomatic;
private final NumberGauge batchSize;
private final NumberGauge workers;
private final NumberGauge batchDelay;
private final NumberGauge configReloadInterval;
private final TextGauge deadLetterQueuePath;
private final Snitch snitch;
private static final String KEY = "config";
/**
* Constructor.
*/
public ConfigWitness() {
deadLetterQueueEnabled = new BooleanGauge("dead_letter_queue_enabled");
configReloadAutomatic = new BooleanGauge("config_reload_automatic");
batchSize = new NumberGauge("batch_size");
workers = new NumberGauge("workers");
batchDelay = new NumberGauge("batch_delay");
configReloadInterval = new NumberGauge("config_reload_interval");
deadLetterQueuePath = new TextGauge("dead_letter_queue_path");
snitch = new Snitch(this);
}
/**
* Sets the configured batch delay
*
* @param delay the configured batch delay
*/
public void batchDelay(long delay) {
batchDelay.set(delay);
}
/**
* Sets the configured batch size
*
* @param size the configured batch size
*/
public void batchSize(long size) {
batchSize.set(size);
}
/**
* Flag to determine if the configuration is configured for auto reload
*
* @param isAuto true if the config is set reload, false otherwise
*/
public void configReloadAutomatic(boolean isAuto) {
configReloadAutomatic.set(isAuto);
}
/**
* The configured reload interval
*
* @param interval the interval between reloads
*/
public void configReloadInterval(long interval) {
configReloadInterval.set(interval);
}
/**
* Flag to determine if the dead letter queue is configured to be enabled.
*
* @param enabled true if enabled, false otherwise
*/
public void deadLetterQueueEnabled(boolean enabled) {
deadLetterQueueEnabled.set(enabled);
}
/**
* The configured path for the dead letter queue.
*
* @param path the path used by the dead letter queue
*/
public void deadLetterQueuePath(String path) {
deadLetterQueuePath.set(path);
}
/**
* The number of configured workers
*
* @param workers the number of configured workers
*/
public void workers(long workers) {
this.workers.set(workers);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return this.snitch;
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<ConfigWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(ConfigWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
private Serializer(Class<ConfigWitness> t) {
super(t);
}
@Override
public void serialize(ConfigWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(ConfigWitness witness, JsonGenerator gen) throws IOException {
gen.writeObjectFieldStart(KEY);
MetricSerializer<Metric<Number>> numberSerializer = MetricSerializer.Get.numberSerializer(gen);
MetricSerializer<Metric<Boolean>> booleanSerializer = MetricSerializer.Get.booleanSerializer(gen);
MetricSerializer<Metric<String>> stringSerializer = MetricSerializer.Get.stringSerializer(gen);
numberSerializer.serialize(witness.batchSize);
numberSerializer.serialize(witness.workers);
numberSerializer.serialize(witness.batchDelay);
numberSerializer.serialize(witness.configReloadInterval);
booleanSerializer.serialize(witness.configReloadAutomatic);
booleanSerializer.serialize(witness.deadLetterQueueEnabled);
stringSerializer.serialize(witness.deadLetterQueuePath);
gen.writeEndObject();
}
}
/**
* The snitch for the errors. Used to retrieve discrete metric values.
*/
public static final class Snitch {
private final ConfigWitness witness;
private Snitch(ConfigWitness witness) {
this.witness = witness;
}
/**
* Gets the configured batch delay
*
* @return the batch delay. May be {@code null}
*/
public Number batchDelay() {
return witness.batchDelay.getValue();
}
/**
* Gets the configured batch size
*
* @return the batch size. May be {@code null}
*/
public Number batchSize() {
return witness.batchSize.getValue();
}
/**
* Gets if the reload automatic is configured
*
* @return true if configured for automatic, false otherwise
*/
public boolean configReloadAutomatic() {
Boolean reload = witness.configReloadAutomatic.getValue();
return reload == null ? false : reload;
}
/**
* Gets the configured reload interval
*
* @return the configured reload interval. May be {@code null}
*/
public Number configReloadInterval() {
return witness.configReloadInterval.getValue();
}
/**
* Gets if the dead letter queue is configured to be enabled
*
* @return true if the dead letter queue is configured to be enabled, false otherwise
*/
public boolean deadLetterQueueEnabled() {
Boolean enabled = witness.deadLetterQueueEnabled.getValue();
return enabled == null ? false : enabled;
}
/**
* Gets the path that the dead letter queue is configured.
*
* @return the configured path for the dead letter queue. May be {@code null}
*/
public String deadLetterQueuePath() {
return witness.deadLetterQueuePath.getValue();
}
/**
* Gets the number of configured workers
*
* @return the configured number of workers. May be {@code null}
*/
public Number workers() {
return witness.workers.getValue();
}
}
}

View file

@ -1,113 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.NumberGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* Witness for the Dead Letter Queue
*/
@JsonSerialize(using = DeadLetterQueueWitness.Serializer.class)
public class DeadLetterQueueWitness implements SerializableWitness {
private static final String KEY = "dead_letter_queue";
private final Snitch snitch;
private final NumberGauge queueSizeInBytes;
/**
* Constructor
*/
public DeadLetterQueueWitness() {
queueSizeInBytes = new NumberGauge("queue_size_in_bytes");
snitch = new Snitch(this);
}
/**
* Set the dead letter queue size, represented in bytes
*
* @param size the byte size of the queue
*/
public void queueSizeInBytes(long size) {
queueSizeInBytes.set(size);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return this.snitch;
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<DeadLetterQueueWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(DeadLetterQueueWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<DeadLetterQueueWitness> t) {
super(t);
}
@Override
public void serialize(DeadLetterQueueWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(DeadLetterQueueWitness witness, JsonGenerator gen) throws IOException {
gen.writeObjectFieldStart(KEY);
MetricSerializer<Metric<Number>> numberSerializer = MetricSerializer.Get.numberSerializer(gen);
numberSerializer.serialize(witness.queueSizeInBytes);
gen.writeEndObject();
}
}
/**
* The snitch for the dead letter queue. Used to retrieve discrete metric values.
*/
public static class Snitch {
private final DeadLetterQueueWitness witness;
private Snitch(DeadLetterQueueWitness witness) {
this.witness = witness;
}
/**
* Gets the queue size in bytes
*
* @return the queue size in bytes. May be {@code null}
*/
public Number queueSizeInBytes() {
return witness.queueSizeInBytes.getValue();
}
}
}

View file

@ -1,152 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.TextGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
/**
* Witness for errors.
*/
@JsonSerialize(using = ErrorWitness.Serializer.class)
public class ErrorWitness implements SerializableWitness {
private final TextGauge message;
private final TextGauge backtrace;
private final Snitch snitch;
private static final String KEY = "last_error";
public ErrorWitness() {
message = new TextGauge("message");
backtrace = new TextGauge("backtrace");
snitch = new Snitch(this);
}
/**
* Stacktrace as a {@link String}
*
* @param stackTrace The stack trace already formatted for output.
*/
public void backtrace(String stackTrace) {
this.backtrace.set(stackTrace);
}
/**
* The message of the error.
*
* @param message human readable error message.
*/
public void message(String message) {
this.message.set(message);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return this.snitch;
}
/**
* Stacktrace for Java.
*
* @param throwable The Java {@link Throwable} that contains the stacktrace to output
*/
public void backtrace(Throwable throwable) {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
PrintStream printStream = new PrintStream(byteArrayOutputStream)) {
throwable.printStackTrace(printStream);
String backtrace = byteArrayOutputStream.toString("UTF-8");
this.backtrace.set(backtrace);
} catch (IOException e) {
//A checked exception due to a the close on a ByteArrayOutputStream is simply annoying since it is an empty method. This will never be called.
throw new IllegalStateException("Unknown error", e);
}
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<ErrorWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(ErrorWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<ErrorWitness> t) {
super(t);
}
@Override
public void serialize(ErrorWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(ErrorWitness witness, JsonGenerator gen) throws IOException {
gen.writeObjectFieldStart(KEY);
MetricSerializer<Metric<String>> stringSerializer = MetricSerializer.Get.stringSerializer(gen);
stringSerializer.serialize(witness.message);
stringSerializer.serialize(witness.backtrace);
gen.writeEndObject();
}
}
/**
* The snitch for the errors. Used to retrieve discrete metric values.
*/
public static final class Snitch {
private final ErrorWitness witness;
private Snitch(ErrorWitness witness) {
this.witness = witness;
}
/**
* Gets the error message
*
* @return the error message. May be {@code null}
*/
public String message() {
return witness.message.getValue();
}
/**
* Gets the error stack/back trace
*
* @return the backtrace as a String. May be {@code null}
*/
public String backtrace() {
return witness.backtrace.getValue();
}
}
}

View file

@ -1,232 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.counter.LongCounter;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* Witness for events.
*/
@JsonSerialize(using = EventsWitness.Serializer.class)
public final class EventsWitness implements SerializableWitness {
private LongCounter filtered;
private LongCounter out;
private LongCounter in;
private LongCounter duration;
private LongCounter queuePushDuration;
private static final String KEY = "events";
private final Snitch snitch;
/**
* Constructor.
*/
public EventsWitness() {
filtered = new LongCounter("filtered");
out = new LongCounter("out");
in = new LongCounter("in");
duration = new LongCounter("duration_in_millis");
queuePushDuration = new LongCounter("queue_push_duration_in_millis");
snitch = new Snitch(this);
}
/**
* Add to the existing duration
*
* @param durationToAdd the amount to add to the existing duration.
*/
public void duration(long durationToAdd) {
duration.increment(durationToAdd);
}
/**
* increment the filtered count by 1
*/
public void filtered() {
filtered.increment();
}
/**
* increment the filtered count
*
* @param count the count to increment by
*/
public void filtered(long count) {
filtered.increment(count);
}
/**
* Forgets all information related to this witness.
*/
public void forgetAll() {
filtered.reset();
out.reset();
in.reset();
duration.reset();
queuePushDuration.reset();
}
/**
* increment the in count by 1
*/
public void in() {
in.increment();
}
/**
* increment the in count
*
* @param count the number to increment by
*/
public void in(long count) {
in.increment(count);
}
/**
* increment the out count by 1
*/
public void out() {
out.increment();
}
/**
* increment the count
*
* @param count the number by which to increment by
*/
public void out(long count) {
out.increment(count);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Add to the existing queue push duration
*
* @param durationToAdd the duration to add
*/
public void queuePushDuration(long durationToAdd) {
queuePushDuration.increment(durationToAdd);
}
@Override
public void genJson(final JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<EventsWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(EventsWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
private Serializer(Class<EventsWitness> t) {
super(t);
}
@Override
public void serialize(EventsWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(EventsWitness witness, JsonGenerator gen) throws IOException {
gen.writeObjectFieldStart(KEY);
MetricSerializer<Metric<Long>> longSerializer = MetricSerializer.Get.longSerializer(gen);
longSerializer.serialize(witness.duration);
longSerializer.serialize(witness.in);
longSerializer.serialize(witness.out);
longSerializer.serialize(witness.filtered);
longSerializer.serialize(witness.queuePushDuration);
gen.writeEndObject();
}
}
/**
* The snitch for the {@link EventsWitness}. Allows to read discrete metrics values.
*/
public static final class Snitch {
private final EventsWitness witness;
private Snitch(EventsWitness witness) {
this.witness = witness;
}
/**
* Gets the duration of the events.
*
* @return the events duration.
*/
public long duration() {
return witness.duration.getValue();
}
/**
* Gets the filtered events count.
*
* @return the count of the filtered events.
*/
public long filtered() {
return witness.filtered.getValue();
}
/**
* Gets the in events count.
*
* @return the count of the events in.
*/
public long in() {
return witness.in.getValue();
}
/**
* Gets the out events count.
*
* @return the count of the events out.
*/
public long out() {
return witness.out.getValue();
}
/**
* Gets the duration of the queue push
*
* @return the queue push duration.
*/
public long queuePushDuration() {
return witness.queuePushDuration.getValue();
}
}
}

View file

@ -1,186 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.witness.configuration.ConfigWitness;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* A single pipeline witness.
*/
@JsonSerialize(using = PipelineWitness.Serializer.class)
public final class PipelineWitness implements SerializableWitness {
private final ReloadWitness reloadWitness;
private final EventsWitness eventsWitness;
private final ConfigWitness configWitness;
private final PluginsWitness pluginsWitness;
private final QueueWitness queueWitness;
private final DeadLetterQueueWitness deadLetterQueueWitness;
private final String KEY;
/**
* Constructor.
*
* @param pipelineName The uniquely identifying name of the pipeline.
*/
public PipelineWitness(String pipelineName) {
this.KEY = pipelineName;
this.reloadWitness = new ReloadWitness();
this.eventsWitness = new EventsWitness();
this.configWitness = new ConfigWitness();
this.pluginsWitness = new PluginsWitness();
this.queueWitness = new QueueWitness();
this.deadLetterQueueWitness = new DeadLetterQueueWitness();
}
/**
* Get a reference to associated config witness
*
* @return the associated {@link ConfigWitness}
*/
public ConfigWitness config() {
return configWitness;
}
/**
* Get a reference to the associated dead letter queue witness
* @return The associated {@link DeadLetterQueueWitness}
*/
public DeadLetterQueueWitness dlq() {
return deadLetterQueueWitness;
}
/**
* Get a reference to associated events witness
*
* @return the associated {@link EventsWitness}
*/
public EventsWitness events() {
return eventsWitness;
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the filter
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness filters(String id) {
return pluginsWitness.filters(id);
}
/**
* Forgets all events for this witness.
*/
public void forgetEvents() {
events().forgetAll();
}
/**
* Forgets all plugins for this witness.
*/
public void forgetPlugins() {
plugins().forgetAll();
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the input
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness inputs(String id) {
return pluginsWitness.inputs(id);
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the output
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness outputs(String id) {
return pluginsWitness.outputs(id);
}
/**
* Get a reference to associated plugins witness
*
* @return the associated {@link PluginsWitness}
*/
public PluginsWitness plugins() {
return pluginsWitness;
}
/**
* Get a reference to associated reload witness
*
* @return the associated {@link ReloadWitness}
*/
public ReloadWitness reloads() {
return reloadWitness;
}
/**
* Get a reference to associated queue witness
*
* @return the associated {@link QueueWitness}
*/
public QueueWitness queue() {
return queueWitness;
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<PipelineWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(PipelineWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
private Serializer(Class<PipelineWitness> t) {
super(t);
}
@Override
public void serialize(PipelineWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(PipelineWitness witness, JsonGenerator gen,
SerializerProvider provider) throws IOException {
gen.writeObjectFieldStart(witness.KEY);
witness.events().genJson(gen, provider);
witness.plugins().genJson(gen, provider);
witness.reloads().genJson(gen, provider);
witness.queue().genJson(gen, provider);
if (witness.config().snitch().deadLetterQueueEnabled()) {
witness.dlq().genJson(gen, provider);
}
gen.writeEndObject();
}
}
}

View file

@ -1,85 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Witness for the set of pipelines.
*/
@JsonSerialize(using = PipelinesWitness.Serializer.class)
public final class PipelinesWitness implements SerializableWitness {
private final Map<String, PipelineWitness> pipelines;
private static final String KEY = "pipelines";
/**
* Constructor.
*/
public PipelinesWitness() {
this.pipelines = new ConcurrentHashMap<>();
}
/**
* Get a uniquely named pipeline witness. If one does not exist, it will be created.
*
* @param name The name of the pipeline.
* @return the {@link PipelineWitness} identified by the given name.
*/
public PipelineWitness pipeline(String name) {
return pipelines.computeIfAbsent(name, k -> new PipelineWitness(k));
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<PipelinesWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(PipelinesWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<PipelinesWitness> t) {
super(t);
}
@Override
public void serialize(PipelinesWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(PipelinesWitness witness, JsonGenerator gen,
SerializerProvider provider) throws IOException {
gen.writeObjectFieldStart(KEY);
for (Map.Entry<String, PipelineWitness> entry : witness.pipelines.entrySet()) {
entry.getValue().genJson(gen, provider);
}
gen.writeEndObject();
}
}
}

View file

@ -1,319 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.jruby.RubySymbol;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.counter.CounterMetric;
import org.logstash.instrument.metrics.counter.LongCounter;
import org.logstash.instrument.metrics.gauge.GaugeMetric;
import org.logstash.instrument.metrics.gauge.LazyDelegatingGauge;
import org.logstash.instrument.metrics.gauge.TextGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Witness for a single plugin.
*/
@JsonSerialize(using = PluginWitness.Serializer.class)
public class PluginWitness implements SerializableWitness {
private final EventsWitness eventsWitness;
private final PluginWitness.CustomWitness customWitness;
private final TextGauge id;
private final TextGauge name;
private final Snitch snitch;
/**
* Constructor.
*
* @param id The unique identifier for this plugin.
*/
public PluginWitness(String id) {
eventsWitness = new EventsWitness();
customWitness = new PluginWitness.CustomWitness();
this.id = new TextGauge("id", id);
this.name = new TextGauge("name");
this.snitch = new Snitch(this);
}
/**
* Get a reference to the associated events witness.
*
* @return the associated {@link EventsWitness}
*/
public EventsWitness events() {
return eventsWitness;
}
/**
* Sets the name of this plugin.
*
* @param name the name of this plugin.
* @return an instance of this witness (to allow method chaining)
*/
public PluginWitness name(String name) {
this.name.set(name);
return this;
}
/**
* Get a reference to the associated custom witness
*
* @return the {@link PluginWitness.CustomWitness}
*/
public PluginWitness.CustomWitness custom() {
return this.customWitness;
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson JSON serializer.
*/
public static final class Serializer extends StdSerializer<PluginWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(PluginWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<PluginWitness> t) {
super(t);
}
@Override
public void serialize(PluginWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(PluginWitness witness, JsonGenerator gen,
SerializerProvider provider) throws IOException {
MetricSerializer<Metric<String>> stringSerializer = MetricSerializer.Get.stringSerializer(gen);
MetricSerializer<Metric<Long>> longSerializer = MetricSerializer.Get.longSerializer(gen);
stringSerializer.serialize(witness.id);
witness.events().genJson(gen, provider);
stringSerializer.serialize(witness.name);
for (GaugeMetric<Object, Object> gauge : witness.customWitness.gauges.values()) {
gen.writeObjectField(gauge.getName(), gauge.getValue());
}
for (CounterMetric<Long> counter : witness.customWitness.counters.values()) {
longSerializer.serialize(counter);
}
}
}
/**
* A custom witness that we can hand off to plugin's to contribute to the metrics
*/
public static final class CustomWitness {
private final Snitch snitch;
/**
* private Constructor - not for external instantiation
*/
private CustomWitness() {
this.snitch = new Snitch(this);
}
private final Map<String, GaugeMetric<Object, Object>> gauges = new ConcurrentHashMap<>();
private final Map<String, CounterMetric<Long>> counters = new ConcurrentHashMap<>();
/**
* Set that gauge value
*
* @param key the {@link RubySymbol} for the key of this gauge. Note - internally this will be converted to a {@link String}
* @param value The value of the Gauge. This allows for any {@link Object} type, unless text or numeric type, there is no guarantees of proper serialization.
*/
public void gauge(RubySymbol key, Object value) {
gauge(key.asJavaString(), value);
}
/**
* Set that gauge value
*
* @param key the {@link String} for the key of this gauge. Note - internally this will be converted to a {@link String}
* @param value The value of the Gauge. This allows for any {@link Object} type, unless text or numeric type, there is no guarantees of proper serialization.
*/
public void gauge(String key, Object value) {
GaugeMetric<Object, Object> gauge = gauges.get(key);
if (gauge != null) {
gauge.set(value);
} else {
gauge = new LazyDelegatingGauge(key, value);
gauges.put(key, gauge);
}
}
/**
* Increments the underlying counter for this {@link RubySymbol} by 1.
*
* @param key the {@link RubySymbol} key of the counter to increment. Note - internally this will be converted to a {@link String}
*/
public void increment(RubySymbol key) {
increment(key.asJavaString());
}
/**
* Increments the underlying counter for this {@link RubySymbol} by 1.
*
* @param key the {@link String} key of the counter to increment. Note - internally this will be converted to a {@link String}
*/
public void increment(String key) {
increment(key, 1);
}
/**
* Increments the underlying counter for this {@link RubySymbol} by the given value.
*
* @param key the {@link RubySymbol} key of the counter to increment. Note - internally this will be converted to a {@link String}
* @param by the amount to increment by
*/
public void increment(RubySymbol key, long by) {
increment(key.asJavaString(), by);
}
/**
* Increments the underlying counter for this {@link RubySymbol} by the given value.
*
* @param key the {@link String} key of the counter to increment. Note - internally this will be converted to a {@link String}
* @param by the amount to increment by
*/
public void increment(String key, long by) {
CounterMetric<Long> counter = counters.get(key);
if (counter != null) {
counter.increment(by);
} else {
counter = new LongCounter(key);
counter.increment();
counters.put(key, counter);
}
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Snitch for a plugin. Provides discrete metric values.
*/
public static final class Snitch {
private final PluginWitness.CustomWitness witness;
/**
* Constructor
*
* @param witness the witness
*/
private Snitch(PluginWitness.CustomWitness witness) {
this.witness = witness;
}
/**
* Get the underlying {@link GaugeMetric}. May call {@link GaugeMetric#getType()} to get the underlying type.
*
* @param key the key/name of the {@link GaugeMetric}.
* @return the {@link GaugeMetric} May return {@code null}
*/
public GaugeMetric gauge(String key) {
return witness.gauges.get(key);
}
/**
* Gets the full set of custom {@link GaugeMetric}
*
* @return the map of all of the {@link GaugeMetric}, keyed by the associated {@link GaugeMetric} key/name
*/
public Map<String, GaugeMetric<?, ?>> gauges() {
return Collections.unmodifiableMap(witness.gauges);
}
/**
* Get the custom Counter. May call {@link CounterMetric#getType()} to get the underlying type.
*
* @param key the key/name of the {@link CounterMetric}
* @return the {@link CounterMetric} for the given key. May return {@code null}
*/
public CounterMetric<?> counter(String key) {
return witness.counters.get(key);
}
/**
* Gets the full set of the custom {@link CounterMetric}
*
* @return the map of all of the {@link CounterMetric}, keyed by the associated {@link CounterMetric} key/name
*/
public Map<String, CounterMetric<?>> counters() {
return Collections.unmodifiableMap(witness.counters);
}
}
}
/**
* Snitch for a plugin. Provides discrete metric values.
*/
public static final class Snitch {
private final PluginWitness witness;
private Snitch(PluginWitness witness) {
this.witness = witness;
}
/**
* Gets the id for this plugin.
*
* @return the id
*/
public String id() {
return witness.id.getValue();
}
/**
* Gets the name of this plugin
*
* @return the name
*/
public String name() {
return witness.name.getValue();
}
}
}

View file

@ -1,155 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A Witness for the set of plugins.
*/
@JsonSerialize(using = PluginsWitness.Serializer.class)
public class PluginsWitness implements SerializableWitness {
private final Map<String, PluginWitness> inputs;
private final Map<String, PluginWitness> outputs;
private final Map<String, PluginWitness> filters;
private final Map<String, PluginWitness> codecs;
private static final String KEY = "plugins";
/**
* Constructor.
*/
public PluginsWitness() {
this.inputs = new ConcurrentHashMap<>();
this.outputs = new ConcurrentHashMap<>();
this.filters = new ConcurrentHashMap<>();
this.codecs = new ConcurrentHashMap<>();
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the input
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness inputs(String id) {
return getPlugin(inputs, id);
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the output
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness outputs(String id) {
return getPlugin(outputs, id);
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the filter
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness filters(String id) {
return getPlugin(filters, id);
}
/**
* Gets the {@link PluginWitness} for the given id, creates the associated {@link PluginWitness} if needed
*
* @param id the id of the codec
* @return the associated {@link PluginWitness} (for method chaining)
*/
public PluginWitness codecs(String id) {
return getPlugin(codecs, id);
}
/**
* Forgets all information related to the the plugins.
*/
public void forgetAll() {
inputs.clear();
outputs.clear();
filters.clear();
codecs.clear();
}
/**
* Gets or creates the {@link PluginWitness}
*
* @param plugin the map of the plugin type.
* @param id the id of the plugin
* @return existing or new {@link PluginWitness}
*/
private static PluginWitness getPlugin(Map<String, PluginWitness> plugin, String id) {
return plugin.computeIfAbsent(id, PluginWitness::new);
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
PluginsWitness.Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<PluginsWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(PluginsWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
private Serializer(Class<PluginsWitness> t) {
super(t);
}
@Override
public void serialize(PluginsWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(PluginsWitness witness, JsonGenerator gen,
SerializerProvider provider) throws IOException {
gen.writeObjectFieldStart(KEY);
serializePlugins("inputs", witness.inputs, gen, provider);
serializePlugins("filters", witness.filters, gen, provider);
serializePlugins("outputs", witness.outputs, gen, provider);
//codec is not serialized
gen.writeEndObject();
}
private static void serializePlugins(String key, Map<String, PluginWitness> plugin,
JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeArrayFieldStart(key);
for (Map.Entry<String, PluginWitness> entry : plugin.entrySet()) {
gen.writeStartObject();
entry.getValue().genJson(gen, provider);
gen.writeEndObject();
}
gen.writeEndArray();
}
}
}

View file

@ -1,387 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.NumberGauge;
import org.logstash.instrument.metrics.gauge.TextGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* Witness for the queue.
*/
@JsonSerialize(using = QueueWitness.Serializer.class)
public final class QueueWitness implements SerializableWitness {
private final TextGauge type;
private final NumberGauge events; // note this is NOT an EventsWitness
private final Snitch snitch;
private final CapacityWitness capacity;
private final DataWitness data;
private static final String KEY = "queue";
private static final Serializer SERIALIZER = new Serializer();
/**
* Constructor.
*/
public QueueWitness() {
type = new TextGauge("type");
events = new NumberGauge("events");
snitch = new Snitch(this);
capacity = new CapacityWitness();
data = new DataWitness();
}
/**
* The number of events currently in the queue.
*
* @param count the count of events currently in the queue
*/
public void events(long count) {
events.set(count);
}
/**
* Get the capacity witness for this queue.
*
* @return the associated {@link CapacityWitness}
*/
public CapacityWitness capacity() {
return capacity;
}
/**
* Get the data witness for this queue.
*
* @return the associated {@link DataWitness}
*/
public DataWitness data() {
return data;
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Sets the type of the queue.
*
* @param type The type of the queue.
*/
public void type(String type) {
this.type.set(type);
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* Inner witness for the queue capacity
*/
public static class CapacityWitness {
private final NumberGauge queueSizeInBytes;
private final NumberGauge pageCapacityInBytes;
private final NumberGauge maxQueueSizeInBytes;
private final NumberGauge maxUnreadEvents;
private final Snitch snitch;
private final static String KEY = "capacity";
private CapacityWitness() {
queueSizeInBytes = new NumberGauge("queue_size_in_bytes");
pageCapacityInBytes = new NumberGauge("page_capacity_in_bytes");
maxQueueSizeInBytes = new NumberGauge("max_queue_size_in_bytes");
maxUnreadEvents = new NumberGauge("max_unread_events");
snitch = new Snitch(this);
}
/**
* Set the queue size for this queue, represented in bytes
*
* @param size the byte size of this queue
*/
public void queueSizeInBytes(long size) {
queueSizeInBytes.set(size);
}
/**
* Set the page capacity for this queue, represented in bytes.
*
* @param capacity the byte capacity of this queue.
*/
public void pageCapacityInBytes(long capacity) {
pageCapacityInBytes.set(capacity);
}
/**
* Set the max queue size, represented in bytes.
*
* @param max the max queue size of this queue.
*/
public void maxQueueSizeInBytes(long max) {
maxQueueSizeInBytes.set(max);
}
/**
* Set the max unread events count.
*
* @param max the max unread events.
*/
public void maxUnreadEvents(long max) {
maxUnreadEvents.set(max);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Snitch for queue capacity. Provides discrete metric values.
*/
public static class Snitch {
private final CapacityWitness witness;
private Snitch(CapacityWitness witness) {
this.witness = witness;
}
/**
* Gets the queue size in bytes
*
* @return the queue size in bytes. May be {@code null}
*/
public Number queueSizeInBytes() {
return witness.queueSizeInBytes.getValue();
}
/**
* Gets the page queue capacity in bytes.
*
* @return the page queue capacity.
*/
public Number pageCapacityInBytes() {
return witness.pageCapacityInBytes.getValue();
}
/**
* Gets the max queue size in bytes.
*
* @return the max queue size.
*/
public Number maxQueueSizeInBytes() {
return witness.maxQueueSizeInBytes.getValue();
}
/**
* Get the max unread events from this queue.
*
* @return the max unread events.
*/
public Number maxUnreadEvents() {
return witness.maxUnreadEvents.getValue();
}
}
}
/**
* Inner witness for the queue data
*/
public static class DataWitness {
private final TextGauge path;
private final NumberGauge freeSpaceInBytes;
private final TextGauge storageType;
private final Snitch snitch;
private final static String KEY = "data";
private DataWitness() {
path = new TextGauge("path");
freeSpaceInBytes = new NumberGauge("free_space_in_bytes");
storageType = new TextGauge("storage_type");
snitch = new Snitch(this);
}
/**
* Set the free space for this queue, represented in bytes
*
* @param space the free byte size for this queue
*/
public void freeSpaceInBytes(long space) {
freeSpaceInBytes.set(space);
}
/**
* Set the path for this persistent queue.
*
* @param path the path to the persistent queue
*/
public void path(String path) {
this.path.set(path);
}
/**
* Set the storage type for this queue.
*
* @param storageType the storage type for this queue.
*/
public void storageType(String storageType) {
this.storageType.set(storageType);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Snitch for queue capacity. Provides discrete metric values.
*/
public static class Snitch {
private final DataWitness witness;
private Snitch(DataWitness witness) {
this.witness = witness;
}
/**
* Gets the path of this persistent queue.
*
* @return the path to the persistent queue. May be {@code null}
*/
public String path() {
return witness.path.getValue();
}
/**
* Gets the free space of the queue in bytes.
*
* @return the free space of the queue
*/
public Number freeSpaceInBytes() {
return witness.freeSpaceInBytes.getValue();
}
/**
* Gets the storage type of the queue.
*
* @return the storage type.
*/
public String storageType() {
return witness.storageType.getValue();
}
}
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<QueueWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(QueueWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<QueueWitness> t) {
super(t);
}
@Override
public void serialize(QueueWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(QueueWitness witness, JsonGenerator gen) throws IOException {
gen.writeObjectFieldStart(KEY);
MetricSerializer<Metric<Number>> numberSerializer = MetricSerializer.Get.numberSerializer(gen);
MetricSerializer<Metric<String>> stringSerializer = MetricSerializer.Get.stringSerializer(gen);
stringSerializer.serialize(witness.type);
if ("persisted".equals(witness.type.getValue())) {
numberSerializer.serialize(witness.events);
//capacity
gen.writeObjectFieldStart(CapacityWitness.KEY);
numberSerializer.serialize(witness.capacity.queueSizeInBytes);
numberSerializer.serialize(witness.capacity.pageCapacityInBytes);
numberSerializer.serialize(witness.capacity.maxQueueSizeInBytes);
numberSerializer.serialize(witness.capacity.maxUnreadEvents);
gen.writeEndObject();
//data
gen.writeObjectFieldStart(DataWitness.KEY);
stringSerializer.serialize(witness.data.path);
numberSerializer.serialize(witness.data.freeSpaceInBytes);
stringSerializer.serialize(witness.data.storageType);
gen.writeEndObject();
}
gen.writeEndObject();
}
}
/**
* Snitch for queue. Provides discrete metric values.
*/
public static final class Snitch {
private final QueueWitness witness;
private Snitch(QueueWitness witness) {
this.witness = witness;
}
/**
* Gets the type of queue
*
* @return the queue type. May be {@code null}
*/
public String type() {
return witness.type.getValue();
}
/**
* Gets the number of events currently in the queue
*
* @return the count of events in the queue. {@code null}
*/
public Number events() {
return witness.events.getValue();
}
}
}

View file

@ -1,217 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import org.logstash.Timestamp;
import org.logstash.ext.JrubyTimestampExtLibrary;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.counter.LongCounter;
import org.logstash.instrument.metrics.gauge.RubyTimeStampGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import java.io.IOException;
/**
* A witness to record reloads.
*/
@JsonSerialize(using = ReloadWitness.Serializer.class)
public final class ReloadWitness implements SerializableWitness {
private final LongCounter success;
private final LongCounter failure;
private final ErrorWitness lastError;
private final RubyTimeStampGauge lastSuccessTimestamp;
private final RubyTimeStampGauge lastFailureTimestamp;
private final Snitch snitch;
private static final String KEY = "reloads";
/**
* Constructor.
*/
public ReloadWitness() {
success = new LongCounter("successes");
failure = new LongCounter("failures");
lastError = new ErrorWitness();
lastSuccessTimestamp = new RubyTimeStampGauge("last_success_timestamp");
lastFailureTimestamp = new RubyTimeStampGauge("last_failure_timestamp");
snitch = new Snitch(this);
}
/**
* Obtain a reference to the associated error witness.
*
* @return the associated {@link ErrorWitness}
*/
public ErrorWitness error() {
return lastError;
}
/**
* Record a single failure
*/
public void failure() {
failure.increment();
}
/**
* Record a failure
*
* @param count the number of failures
*/
public void failures(long count) {
failure.increment(count);
}
/**
* Record a single success
*/
public void success() {
success.increment();
}
/**
* Record a success
*
* @param count the number of successes
*/
public void successes(long count) {
success.increment(count);
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* Set the last success timestamp.
*
* @param timestamp the {@link JrubyTimestampExtLibrary.RubyTimestamp} to set
* @deprecated
*/
@Deprecated
public void lastSuccessTimestamp(JrubyTimestampExtLibrary.RubyTimestamp timestamp) {
lastSuccessTimestamp.set(timestamp);
}
/**
* Set the last failure timestamp.
*
* @param timestamp the {@link JrubyTimestampExtLibrary.RubyTimestamp} to set
* @deprecated
*/
@Deprecated
public void lastFailureTimestamp(JrubyTimestampExtLibrary.RubyTimestamp timestamp) {
lastFailureTimestamp.set(timestamp);
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen, provider);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<ReloadWitness> {
private static final long serialVersionUID = 1L;
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(ReloadWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<ReloadWitness> t) {
super(t);
}
@Override
public void serialize(ReloadWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen, provider);
gen.writeEndObject();
}
static void innerSerialize(ReloadWitness witness, JsonGenerator gen,
SerializerProvider provider) throws IOException {
gen.writeObjectFieldStart(ReloadWitness.KEY);
witness.lastError.genJson(gen, provider);
MetricSerializer<Metric<Long>> longSerializer = MetricSerializer.Get.longSerializer(gen);
MetricSerializer<RubyTimeStampGauge> timestampSerializer = MetricSerializer.Get.timestampSerializer(gen);
longSerializer.serialize(witness.success);
timestampSerializer.serialize(witness.lastSuccessTimestamp);
timestampSerializer.serialize(witness.lastFailureTimestamp);
longSerializer.serialize(witness.failure);
gen.writeEndObject();
}
}
/**
* The Reload snitch. Provides a means to get discrete metric values.
*/
public static class Snitch {
private final ReloadWitness witness;
Snitch(ReloadWitness witness) {
this.witness = witness;
}
/**
* Get the number of successful reloads
*
* @return the count of successful reloads
*/
public long successes() {
return witness.success.getValue();
}
/**
* Get the number of failed reloads
*
* @return the count of failed reloads
*/
public long failures() {
return witness.failure.getValue();
}
/**
* Gets the timestamp for the last success reload
*
* @return {@link Timestamp} of the last successful reload
* @deprecated
*/
@Deprecated
public Timestamp lastSuccessTimestamp() {
return witness.lastSuccessTimestamp.getValue();
}
/**
* Gets the timestamp for the last failed reload
*
* @return {@link Timestamp} of the last failed reload
* @deprecated
*/
@Deprecated
public Timestamp lastFailureTimestamp() {
return witness.lastFailureTimestamp.getValue();
}
}
}

View file

@ -1,242 +0,0 @@
package org.logstash.instrument.witness.process;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import com.sun.management.UnixOperatingSystemMXBean;
import org.logstash.instrument.metrics.Metric;
import org.logstash.instrument.metrics.gauge.NumberGauge;
import org.logstash.instrument.witness.MetricSerializer;
import org.logstash.instrument.witness.SerializableWitness;
import org.logstash.instrument.witness.schedule.ScheduledWitness;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.util.concurrent.TimeUnit;
/**
* A scheduled witness for process metrics
*/
@JsonSerialize(using = ProcessWitness.Serializer.class)
public class ProcessWitness implements SerializableWitness, ScheduledWitness {
private static final OperatingSystemMXBean osMxBean;
private static final String KEY = "process";
public static final boolean isUnix;
private static final UnixOperatingSystemMXBean unixOsBean;
private final NumberGauge openFileDescriptors;
private final NumberGauge peakOpenFileDescriptors;
private final NumberGauge maxFileDescriptors;
private final Cpu cpu;
private final Memory memory;
private final Snitch snitch;
static {
osMxBean = ManagementFactory.getOperatingSystemMXBean();
isUnix = osMxBean instanceof UnixOperatingSystemMXBean;
unixOsBean = isUnix ? (UnixOperatingSystemMXBean) osMxBean : null;
}
/**
* Constructor
*/
public ProcessWitness() {
this.openFileDescriptors = new NumberGauge("open_file_descriptors", -1);
this.maxFileDescriptors = new NumberGauge("max_file_descriptors", -1);
this.peakOpenFileDescriptors = new NumberGauge("peak_open_file_descriptors", -1);
this.cpu = new Cpu();
this.memory = new Memory();
this.snitch = new Snitch(this);
}
@Override
public void refresh() {
if (isUnix) {
long currentOpen = unixOsBean.getOpenFileDescriptorCount();
openFileDescriptors.set(currentOpen);
if (maxFileDescriptors.getValue() == null || peakOpenFileDescriptors.getValue().longValue() < currentOpen) {
peakOpenFileDescriptors.set(currentOpen);
}
maxFileDescriptors.set(unixOsBean.getMaxFileDescriptorCount());
}
cpu.refresh();
memory.refresh();
}
/**
* Get a reference to associated snitch to get discrete metric values.
*
* @return the associate {@link Snitch}
*/
public Snitch snitch() {
return snitch;
}
/**
* An inner witness for the process / cpu metrics
*/
public class Cpu implements ScheduledWitness {
private static final String KEY = "cpu";
private final NumberGauge cpuProcessPercent;
private final NumberGauge cpuTotalInMillis;
private Cpu() {
this.cpuProcessPercent = new NumberGauge("percent", -1);
this.cpuTotalInMillis = new NumberGauge("total_in_millis", -1);
}
@Override
public void refresh() {
cpuProcessPercent.set(scaleLoadToPercent(unixOsBean.getProcessCpuLoad()));
cpuTotalInMillis.set(TimeUnit.MILLISECONDS.convert(unixOsBean.getProcessCpuTime(), TimeUnit.NANOSECONDS));
}
}
/**
* An inner witness for the the process / memory metrics
*/
public class Memory implements ScheduledWitness {
private static final String KEY = "mem";
private final NumberGauge memTotalVirtualInBytes;
private Memory() {
memTotalVirtualInBytes = new NumberGauge("total_virtual_in_bytes", -1);
}
@Override
public void refresh() {
memTotalVirtualInBytes.set(unixOsBean.getCommittedVirtualMemorySize());
}
}
@Override
public void genJson(JsonGenerator gen, SerializerProvider provider) throws IOException {
Serializer.innerSerialize(this, gen);
}
/**
* The Jackson serializer.
*/
public static final class Serializer extends StdSerializer<ProcessWitness> {
/**
* Default constructor - required for Jackson
*/
public Serializer() {
this(ProcessWitness.class);
}
/**
* Constructor
*
* @param t the type to serialize
*/
protected Serializer(Class<ProcessWitness> t) {
super(t);
}
@Override
public void serialize(ProcessWitness witness, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
innerSerialize(witness, gen);
gen.writeEndObject();
}
static void innerSerialize(ProcessWitness witness, JsonGenerator gen) throws IOException {
MetricSerializer<Metric<Number>> numberSerializer = MetricSerializer.Get.numberSerializer(gen);
gen.writeObjectFieldStart(KEY);
numberSerializer.serialize(witness.openFileDescriptors);
numberSerializer.serialize(witness.peakOpenFileDescriptors);
numberSerializer.serialize(witness.maxFileDescriptors);
//memory
gen.writeObjectFieldStart(Memory.KEY);
numberSerializer.serialize(witness.memory.memTotalVirtualInBytes);
gen.writeEndObject();
//cpu
gen.writeObjectFieldStart(Cpu.KEY);
numberSerializer.serialize(witness.cpu.cpuTotalInMillis);
numberSerializer.serialize(witness.cpu.cpuProcessPercent);
//TODO: jake load average
gen.writeEndObject();
gen.writeEndObject();
}
}
/**
* The Process snitch. Provides a means to get discrete metric values.
*/
public static final class Snitch {
private final ProcessWitness witness;
private Snitch(ProcessWitness witness) {
this.witness = witness;
}
/**
* Get the number of open file descriptors for this process
*
* @return the open file descriptors
*/
public long openFileDescriptors() {
return witness.openFileDescriptors.getValue().longValue();
}
/**
* Get the max file descriptors for this process
*
* @return the max file descriptors
*/
public long maxFileDescriptors() {
return witness.maxFileDescriptors.getValue().longValue();
}
/**
* Get the high water number of open file descriptors for this process
*
* @return the high water/ peak of the seen open file descriptors
*/
public long peakOpenFileDescriptors() {
return witness.peakOpenFileDescriptors.getValue().longValue();
}
/**
* Get the cpu percent for this process
*
* @return the cpu percent
*/
public short cpuProcessPercent() {
return witness.cpu.cpuProcessPercent.getValue().shortValue();
}
/**
* Get the total time of the cpu in milliseconds for this process
*
* @return the cpu total in milliseconds
*/
public long cpuTotalInMillis() {
return witness.cpu.cpuTotalInMillis.getValue().longValue();
}
/**
* Get the committed (virtual) memory for this process
*
* @return the committed memory
*/
public long memTotalVirtualInBytes() {
return witness.memory.memTotalVirtualInBytes.getValue().longValue();
}
}
private short scaleLoadToPercent(double load) {
if (isUnix && load >= 0) {
return Double.valueOf(Math.floor(load * 100)).shortValue();
} else {
return (short) -1;
}
}
}

View file

@ -1,34 +0,0 @@
package org.logstash.instrument.witness.schedule;
import java.time.Duration;
/**
* A witness that is self-populating on a given schedule.
*/
public interface ScheduledWitness {
/**
* The duration between updates for this witness
*
* @return the {@link Duration} between scheduled updates. For example {@link Duration#ofMinutes(long)} with a value of 5 would schedule this implementation to
* self-populate every 5 minute. Defaults to every 60 seconds. - Note, implementations may not allow schedules faster then every 1 second.
*/
default Duration every() {
//note - the system property is an only an escape hatch if this proves to cause performance issues. Do not document this system property, it is not part of the contract.
return Duration.ofSeconds(Long.parseLong(System.getProperty("witness.scheduled.duration.in.seconds", "10")));
}
/**
* Get the name to set for the thread on which this is scheduled. This is useful for debugging purposes. Defaults to the class name + -thread.
*
* @return The name for the scheduled thread.
*/
default String threadName() {
return getClass().getSimpleName() + "-thread";
}
/**
* Updates the underlying metrics on the given schedule.
*/
void refresh();
}

View file

@ -1,80 +0,0 @@
package org.logstash.instrument.witness.schedule;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* Schedules {@link ScheduledWitness} to refresh themselves on an interval.
*/
public class WitnessScheduler {
private final ScheduledWitness witness;
private final ScheduledExecutorService executorService;
private static final Logger LOGGER = LogManager.getLogger(WitnessScheduler.class);
/**
* Constructor
*
* @param witness the {@link ScheduledWitness} to schedule
*/
public WitnessScheduler(ScheduledWitness witness) {
this.witness = witness;
this.executorService = Executors.newScheduledThreadPool(1, ((Runnable r) -> {
Thread t = new Thread(r);
//Allow this thread to simply die when the JVM dies
t.setDaemon(true);
//Set the name
t.setName(witness.threadName());
return t;
}));
}
/**
* Schedules the witness to refresh on provided schedule. Note - this implementation does not allow refreshes faster then every 1 second.
*/
public void schedule() {
executorService.scheduleAtFixedRate(new RefreshRunnable(), 0, witness.every().getSeconds(), TimeUnit.SECONDS);
}
/**
* Shuts down the underlying executor service. Since these are daemon threads, this is not absolutely necessary.
*/
public void shutdown(){
executorService.shutdown();
try {
if(!executorService.awaitTermination(5, TimeUnit.SECONDS)){
executorService.shutdownNow();
}
} catch (InterruptedException e) {
throw new IllegalStateException("Witness should be scheduled from the main thread, and the main thread does not expect to be interrupted", e);
}
}
/**
* Runnable that will won't cancel the scheduled tasks on refresh if an exception is thrown, and throttles the log message.
*/
class RefreshRunnable implements Runnable {
long lastLogged = 0;
@Override
public void run() {
try {
witness.refresh();
} catch (Exception e) {
long now = System.currentTimeMillis();
//throttle to only log the warning if it hasn't been logged in the past 120 seconds, this will ensure at least 1 log message, and logging for intermittent issues,
// but keep from flooding the log file on a repeating error on every schedule
if (lastLogged == 0 || now - lastLogged > 120_000) {
LOGGER.warn("Can not fully refresh the metrics for the " + witness.getClass().getSimpleName(), e);
}
lastLogged = now;
}
}
}
}

View file

@ -1,93 +0,0 @@
package org.logstash.instrument.witness;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link Witness}
*/
public class WitnessTest {
private Witness witness;
@Before
public void setup() {
Witness.setInstance(null);
}
@Test
public void testInstance() {
witness = new Witness();
Witness.setInstance(witness);
assertThat(Witness.instance()).isEqualTo(witness);
}
@Test(expected = IllegalStateException.class)
public void testNoInstanceError() {
Witness.instance();
}
@Test
public void testNotNull() {
witness = new Witness();
Witness.setInstance(witness);
assertThat(witness.events()).isNotNull();
assertThat(witness.reloads()).isNotNull();
assertThat(witness.pipelines()).isNotNull();
assertThat(witness.pipeline("foo")).isNotNull();
}
@Test
public void testAsJson() throws Exception {
witness = new Witness();
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
witness = new Witness();
String json = witness.asJson();
//empty pipelines
assertThat(json).isEqualTo("{\"process\":{\"open_file_descriptors\":-1,\"peak_open_file_descriptors\":-1,\"max_file_descriptors\":-1," +
"\"mem\":{\"total_virtual_in_bytes\":-1},\"cpu\":{\"total_in_millis\":-1,\"percent\":-1}},\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0," +
"\"queue_push_duration_in_millis\":0},\"reloads\":{\"last_error\":{\"message\":null,\"backtrace\":null},\"successes\":0,\"last_success_timestamp\":null," +
"\"last_failure_timestamp\":null,\"failures\":0},\"pipelines\":{}}");
}
@Test
public void testSerializeEvents() throws Exception {
witness = new Witness();
witness.events().in(99);
String json = witness.asJson();
assertThat(json).contains("\"in\":99");
witness.events().forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("99");
}
@Test
public void testSerializePipelines() throws Exception {
witness = new Witness();
witness.pipeline("foo").events().in(98);
witness.pipeline("foo").inputs("bar").events().in(99);
String json = witness.asJson();
assertThat(json).contains("\"pipelines\":{\"foo\"");
//pipeline events
assertThat(json).contains("foo").contains("in").contains(":98");
//plugin events
assertThat(json).contains("\"in\":99");
//forget events
witness.pipeline("foo").forgetEvents();
json = witness.asJson();
assertThat(json).doesNotContain("98");
//forget plugins
witness.pipeline("foo").forgetPlugins();
json = witness.asJson();
assertThat(json).doesNotContain("99");
//pipelines still there
assertThat(json).contains("\"pipelines\":{\"foo\"");
}
}

View file

@ -1,143 +0,0 @@
package org.logstash.instrument.witness.configuration;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link ConfigWitness}
*/
public class ConfigWitnessTest {
private ConfigWitness witness;
@Before
public void setup() {
witness = new ConfigWitness();
}
@Test
public void testBatchDelay() {
assertThat(witness.snitch().batchDelay()).isNull();
witness.batchDelay(99);
assertThat(witness.snitch().batchDelay()).isEqualTo(99l);
}
@Test
public void testBatchSize() {
assertThat(witness.snitch().batchSize()).isNull();
witness.batchSize(98);
assertThat(witness.snitch().batchSize()).isEqualTo(98l);
}
@Test
public void testConfigReloadAutomatic() {
assertThat(witness.snitch().configReloadAutomatic()).isFalse();
witness.configReloadAutomatic(true);
assertThat(witness.snitch().configReloadAutomatic()).isTrue();
witness.configReloadAutomatic(false);
assertThat(witness.snitch().configReloadAutomatic()).isFalse();
}
@Test
public void testConfigReloadInterval() {
assertThat(witness.snitch().configReloadInterval()).isNull();
witness.configReloadInterval(97);
assertThat(witness.snitch().configReloadInterval()).isEqualTo(97l);
}
@Test
public void testDeadLetterQueueEnabled() {
assertThat(witness.snitch().deadLetterQueueEnabled()).isFalse();
witness.deadLetterQueueEnabled(true);
assertThat(witness.snitch().deadLetterQueueEnabled()).isTrue();
witness.deadLetterQueueEnabled(false);
assertThat(witness.snitch().deadLetterQueueEnabled()).isFalse();
}
@Test
public void testDeadLetterQueuePath() {
assertThat(witness.snitch().deadLetterQueuePath()).isNull();
witness.deadLetterQueuePath("/var/dlq");
assertThat(witness.snitch().deadLetterQueuePath()).isEqualTo("/var/dlq");
}
@Test
public void testWorkers() {
assertThat(witness.snitch().workers()).isNull();
witness.workers(96);
assertThat(witness.snitch().workers()).isEqualTo(96l);
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson()).contains("config");
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":0,\"config_reload_interval\":0,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeBatchSize() throws Exception {
witness.batchSize(999);
String json = witness.asJson();
assertThat(json).contains("999");
}
@Test
public void testSerializeWorkersSize() throws Exception {
witness.workers(888);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":888,\"batch_delay\":0,\"config_reload_interval\":0,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeBatchDelay() throws Exception {
witness.batchDelay(777);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":777,\"config_reload_interval\":0,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeAutoConfigReload() throws Exception {
witness.configReloadAutomatic(true);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":0,\"config_reload_interval\":0,\"config_reload_automatic\":true," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeReloadInterval() throws Exception {
witness.configReloadInterval(666);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":0,\"config_reload_interval\":666,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeEnableDeadLetterQueue() throws Exception {
witness.deadLetterQueueEnabled(true);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":0,\"config_reload_interval\":0,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":true,\"dead_letter_queue_path\":null}}");
}
@Test
public void testSerializeEnableDeadLetterPath() throws Exception {
witness.deadLetterQueuePath("/var/dlq");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"config\":{\"batch_size\":0,\"workers\":0,\"batch_delay\":0,\"config_reload_interval\":0,\"config_reload_automatic\":false," +
"\"dead_letter_queue_enabled\":false,\"dead_letter_queue_path\":\"/var/dlq\"}}");
}
}

View file

@ -1,47 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link DeadLetterQueueWitness}
*/
public class DeadLetterQueueWitnessTest {
private DeadLetterQueueWitness witness;
@Before
public void setup() {
witness = new DeadLetterQueueWitness();
}
@Test
public void queueSizeInBytes() {
assertThat(witness.snitch().queueSizeInBytes()).isNull();
witness.queueSizeInBytes(99);
assertThat(witness.snitch().queueSizeInBytes()).isEqualTo(99l);
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"dead_letter_queue\":{\"queue_size_in_bytes\":0}}");
}
@Test
public void testSerializeQueueSize() throws Exception {
witness.queueSizeInBytes(98);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"dead_letter_queue\":{\"queue_size_in_bytes\":98}}");
}
}

View file

@ -1,70 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link ErrorWitness}
*/
public class ErrorWitnessTest {
private ErrorWitness witness;
@Before
public void setup() {
witness = new ErrorWitness();
}
@Test
public void backtrace() {
//as String
witness.backtrace("foo");
assertThat(witness.snitch().backtrace()).isEqualTo("foo");
//as Exception
RuntimeException exception = new RuntimeException("foobar");
witness.backtrace(exception);
for (StackTraceElement element : exception.getStackTrace()) {
assertThat(witness.snitch().backtrace()).contains(element.toString());
}
}
@Test
public void message() {
witness.message("baz");
assertThat(witness.snitch().message()).isEqualTo("baz");
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson()).contains("last_error");
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"last_error\":{\"message\":null,\"backtrace\":null}}");
}
@Test
public void testSerializeMessage() throws Exception {
witness.message("whoops");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"last_error\":{\"message\":\"whoops\",\"backtrace\":null}}");
}
@Test
public void testSerializeBackTrace() throws Exception {
witness.backtrace("ruby, backtrace");
String json = witness.asJson();
assertThat(json).contains("ruby").contains("backtrace");
witness.backtrace(new RuntimeException("Uh oh!"));
json = witness.asJson();
assertThat(json).contains("Uh oh!").contains("ErrorWitnessTest");
}
}

View file

@ -1,147 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link EventsWitness}
*/
public class EventsWitnessTest {
private EventsWitness witness;
@Before
public void setup() {
witness = new EventsWitness();
}
@Test
public void testDuration() {
witness.duration(99);
assertThat(witness.snitch().duration()).isEqualTo(99);
witness.duration(1);
assertThat(witness.snitch().duration()).isEqualTo(100);
}
@Test
public void testFiltered() {
witness.filtered(88);
assertThat(witness.snitch().filtered()).isEqualTo(88);
witness.filtered();
assertThat(witness.snitch().filtered()).isEqualTo(89);
}
@Test
public void testForget() {
witness.duration(99);
witness.filtered(88);
witness.in(66);
witness.out(55);
witness.queuePushDuration(44);
assertThat(witness.snitch().duration()).isEqualTo(99);
assertThat(witness.snitch().in()).isEqualTo(66);
assertThat(witness.snitch().out()).isEqualTo(55);
assertThat(witness.snitch().queuePushDuration()).isEqualTo(44);
witness.forgetAll();
assertThat(witness.snitch().duration()).isEqualTo(0);
assertThat(witness.snitch().in()).isEqualTo(0);
assertThat(witness.snitch().out()).isEqualTo(0);
assertThat(witness.snitch().queuePushDuration()).isEqualTo(0);
}
@Test
public void testIn() {
witness.in(66);
assertThat(witness.snitch().in()).isEqualTo(66);
witness.in();
assertThat(witness.snitch().in()).isEqualTo(67);
}
@Test
public void testOut() {
witness.out(55);
assertThat(witness.snitch().out()).isEqualTo(55);
witness.out();
assertThat(witness.snitch().out()).isEqualTo(56);
}
@Test
public void testQueuePushDuration() {
witness.queuePushDuration(44);
assertThat(witness.snitch().queuePushDuration()).isEqualTo(44);
witness.queuePushDuration(1);
assertThat(witness.snitch().queuePushDuration()).isEqualTo(45);
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
//empty
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
//dirty
witness.in(1);
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson()).contains("events");
}
@Test
public void testSerializeEmpty() throws Exception {
assertThat(witness.asJson()).isNotEmpty();
}
@Test
public void testSerializeDuration() throws Exception {
witness.duration(999);
String json = witness.asJson();
assertThat(json).contains("999");
witness.forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("999");
}
@Test
public void testSerializeIn() throws Exception {
witness.in(888);
String json = witness.asJson();
assertThat(json).contains("888");
witness.forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("888");
}
@Test
public void testSerializeFiltered() throws Exception {
witness.filtered(777);
String json = witness.asJson();
assertThat(json).contains("777");
witness.forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("777");
}
@Test
public void testSerializeOut() throws Exception {
witness.out(666);
String json = witness.asJson();
assertThat(json).contains("666");
witness.forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("666");
}
@Test
public void testSerializeQueueDuration() throws Exception {
witness.queuePushDuration(555);
String json = witness.asJson();
assertThat(json).contains("555");
witness.forgetAll();
json = witness.asJson();
assertThat(json).doesNotContain("555");
}
}

View file

@ -1,131 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link PipelineWitness}
*/
public class PipelineWitnessTest {
private PipelineWitness witness;
@Before
public void setup() {
witness = new PipelineWitness("default");
}
@Test
public void testNotNull() {
assertThat(witness.inputs("123")).isNotNull();
assertThat(witness.filters("456")).isNotNull();
assertThat(witness.outputs("789")).isNotNull();
assertThat(witness.events()).isNotNull();
assertThat(witness.plugins()).isNotNull();
assertThat(witness.queue()).isNotNull();
assertThat(witness.config()).isNotNull();
assertThat(witness.reloads()).isNotNull();
}
@Test
public void testForget() {
witness.inputs("123").events().in(99);
witness.filters("456").events().in(98);
witness.outputs("789").events().in(97);
assertThat(witness.inputs("123").events().snitch().in()).isEqualTo(99);
assertThat(witness.filters("456").events().snitch().in()).isEqualTo(98);
assertThat(witness.outputs("789").events().snitch().in()).isEqualTo(97);
witness.events().in(99);
witness.events().filtered(98);
witness.events().out(97);
assertThat(witness.events().snitch().in()).isEqualTo(99);
assertThat(witness.events().snitch().filtered()).isEqualTo(98);
assertThat(witness.events().snitch().out()).isEqualTo(97);
witness.queue().type("memory");
witness.forgetPlugins();
witness.forgetEvents();
assertThat(witness.inputs("123").events().snitch().in())
.isEqualTo(witness.filters("456").events().snitch().in())
.isEqualTo(witness.outputs("789").events().snitch().in())
.isEqualTo(witness.events().snitch().in())
.isEqualTo(witness.events().snitch().filtered())
.isEqualTo(witness.events().snitch().out())
.isEqualTo(0);
assertThat(witness.queue().snitch().type()).isEqualTo("memory");
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"default\":{\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0}," +
"\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]},\"reloads\":{\"last_error\":{\"message\":null,\"backtrace\":null},\"successes\":0," +
"\"last_success_timestamp\":null,\"last_failure_timestamp\":null,\"failures\":0},\"queue\":{\"type\":null}}}");
}
@Test
public void testSerializeEvents() throws Exception {
witness.events().in(99);
String json = witness.asJson();
assertThat(json).contains("99");
witness.forgetEvents();
json = witness.asJson();
//events are forgotten
assertThat(json).doesNotContain("99");
}
@Test
public void testSerializePlugins() throws Exception {
witness.inputs("aaa");
witness.filters("bbb");
witness.outputs("ccc");
String json = witness.asJson();
assertThat(json).contains("aaa").contains("bbb").contains("ccc");
witness.forgetPlugins();
json = witness.asJson();
//plugins are forgotten
assertThat(json).doesNotContain("aaa").doesNotContain("bbb").doesNotContain("ccc");
}
@Test
public void testSerializeReloads() throws Exception {
witness.reloads().successes(98);
String json = witness.asJson();
assertThat(json).contains("98");
}
@Test
public void testSerializeQueue() throws Exception {
witness.queue().type("quantum");
String json = witness.asJson();
assertThat(json).contains("quantum");
}
/**
* Only serialize the DeadLetterQueue if enabled
* @throws Exception if an Exception is thrown.
*/
@Test
public void testSerializeDeadLetterQueue() throws Exception {
witness.config().deadLetterQueueEnabled(false);
String json = witness.asJson();
assertThat(json).doesNotContain("dead_letter_queue");
witness.config().deadLetterQueueEnabled(true);
json = witness.asJson();
assertThat(json).contains("\"dead_letter_queue\":{\"queue_size_in_bytes\":0}");
}
}

View file

@ -1,51 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link PipelinesWitness}
*/
public class PipelinesWitnessTest {
private PipelinesWitness witness;
@Before
public void setup() {
witness = new PipelinesWitness();
}
@Test
public void testPipeline() {
//once to create
assertThat(witness.pipeline("default")).isNotNull();
//again to assert it can pull from the map
assertThat(witness.pipeline("default")).isNotNull();
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson()).contains("pipelines");
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"pipelines\":{}}");
}
@Test
public void testSerializePipelines() throws Exception {
witness.pipeline("aaa");
witness.pipeline("bbb");
witness.pipeline("ccc");
String json = witness.asJson();
assertThat(json).contains("aaa").contains("bbb").contains("ccc");
}
}

View file

@ -1,169 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.jruby.RubySymbol;
import org.junit.Before;
import org.junit.Test;
import org.logstash.RubyUtil;
import org.logstash.instrument.metrics.MetricType;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.util.UUID;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link PluginWitness}
*/
public class PluginWitnessTest {
private PluginWitness witness;
@Before
public void setup() {
witness = new PluginWitness("123");
assertThat(witness.snitch().id()).isEqualTo("123");
}
@Test
public void testName() {
assertThat(witness.name("abc")).isEqualTo(witness);
assertThat(witness.snitch().name()).isEqualTo("abc");
}
@Test
public void testCustomGauge() {
witness.custom().gauge("a", "foo");
witness.custom().gauge("b", 1);
witness.custom().gauge("c", true);
witness.custom().gauge("d", URI.create("unknown"));
assertThat(witness.custom().snitch().gauges().size()).isEqualTo(4);
assertThat(witness.custom().snitch().gauge("a").getValue()).isEqualTo("foo");
assertThat(witness.custom().snitch().gauge("a").getType()).isEqualTo(MetricType.GAUGE_TEXT);
assertThat(witness.custom().snitch().gauge("b").getValue()).isEqualTo(1);
assertThat(witness.custom().snitch().gauge("b").getType()).isEqualTo(MetricType.GAUGE_NUMBER);
assertThat(witness.custom().snitch().gauge("c").getValue()).isEqualTo(Boolean.TRUE);
assertThat(witness.custom().snitch().gauge("c").getType()).isEqualTo(MetricType.GAUGE_BOOLEAN);
assertThat(witness.custom().snitch().gauge("d").getValue()).isEqualTo(URI.create("unknown"));
assertThat(witness.custom().snitch().gauge("d").getType()).isEqualTo(MetricType.GAUGE_UNKNOWN);
}
@Test
public void testCustomCounter(){
witness.custom().increment("foo");
witness.custom().increment("bar");
assertThat(witness.custom().snitch().counters().size()).isEqualTo(2);
assertThat(witness.custom().snitch().counters().values().stream().allMatch(v -> MetricType.COUNTER_LONG.equals(v.getType()))).isTrue();
assertThat(witness.custom().snitch().counter("foo").getValue()).isEqualTo(1l);
assertThat(witness.custom().snitch().counter("bar").getValue()).isEqualTo(1l);
witness.custom().increment("foo");
witness.custom().increment("foo");
witness.custom().increment("bar");
assertThat(witness.custom().snitch().counter("foo").getValue()).isEqualTo(3l);
assertThat(witness.custom().snitch().counter("bar").getValue()).isEqualTo(2l);
}
@Test
public void testRubySymbol() throws IOException {
RubySymbol symbol = RubySymbol.newSymbol(RubyUtil.RUBY, "mysymbol");
witness.custom().increment(symbol);
witness.custom().increment(symbol, 99);
assertThat(witness.custom().snitch().counter("mysymbol").getValue()).isEqualTo(100l);
witness.custom().gauge(symbol, "blah");
assertThat(witness.custom().snitch().gauge("mysymbol").getValue()).isEqualTo("blah");
witness.custom().gauge(symbol, "blah2");
assertThat(witness.custom().snitch().gauge("mysymbol").getValue()).isEqualTo("blah2");
}
@Test
public void testCustomNotSet(){
assertThat(witness.custom().snitch().counter("nothing")).isNull();
assertThat(witness.custom().snitch().gauge("nothing")).isNull();
assertThat(witness.custom().snitch().gauges()).isEmpty();
assertThat(witness.custom().snitch().counters()).isEmpty();
}
@Test
public void testEvents() {
assertThat(witness.events()).isNotNull();
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializationEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null}");
}
@Test
public void testSerializationName() throws Exception {
witness.name("abc");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":\"abc\"}");
}
@Test
public void testSerializationEvents() throws Exception {
witness.events().in();
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":1,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null}");
}
@Test
public void testSerializationCustomCounter() throws Exception {
witness.custom().increment("a");
witness.custom().increment("a");
witness.custom().increment("b");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null," +
"\"a\":2,\"b\":1}");
}
@Test
public void testSerializationCustomGauge() throws Exception {
witness.custom().gauge("a", "foo");
witness.custom().gauge("b", 1);
witness.custom().gauge("c", true);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null," +
"\"a\":\"foo\",\"b\":1,\"c\":true}");
}
@Test
public void testSerializationCustomGaugeNumericTypes() throws Exception {
short a = 1;
int b = 1;
float c = 1;
double d = 1;
BigDecimal e = new BigDecimal(1);
witness.custom().gauge("a", a);
witness.custom().gauge("b", b);
witness.custom().gauge("c", c);
witness.custom().gauge("d", d);
witness.custom().gauge("e", e);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"id\":\"123\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null," +
"\"a\":1,\"b\":1,\"c\":1.0,\"d\":1.0,\"e\":1}");
}
@Test(expected = IllegalStateException.class)
public void testSerializationUnknownCustomGauge() throws Exception {
//There are not default Jackson serializers for UUID
witness.custom().gauge("a", UUID.randomUUID());
witness.asJson();
}
}

View file

@ -1,96 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link PluginsWitness}
*/
public class PluginsWitnessTest {
private PluginsWitness witness;
@Before
public void setup(){
witness = new PluginsWitness();
}
@Test
public void testForget(){
witness.inputs("1").events().in(99);
assertThat(witness.inputs("1").events().snitch().in()).isEqualTo(99);
witness.filters("1").events().in(98);
assertThat(witness.filters("1").events().snitch().in()).isEqualTo(98);
witness.outputs("1").events().in(97);
assertThat(witness.outputs("1").events().snitch().in()).isEqualTo(97);
witness.codecs("1").events().in(96);
assertThat(witness.codecs("1").events().snitch().in()).isEqualTo(96);
witness.forgetAll();
assertThat(witness.inputs("1").events().snitch().in()).isEqualTo(0);
assertThat(witness.filters("1").events().snitch().filtered()).isEqualTo(0);
assertThat(witness.outputs("1").events().snitch().in()).isEqualTo(0);
assertThat(witness.codecs("1").events().snitch().in()).isEqualTo(0);
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception{
String json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
}
@Test
public void testSerializeInput() throws Exception{
witness.inputs("foo");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[{\"id\":\"foo\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0," +
"\"queue_push_duration_in_millis\":0},\"name\":null}],\"filters\":[],\"outputs\":[]}}");
witness.forgetAll();
json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
}
@Test
public void testSerializeFilters() throws Exception{
witness.filters("foo");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[{\"id\":\"foo\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0,\"filtered\":0," +
"\"queue_push_duration_in_millis\":0},\"name\":null}],\"outputs\":[]}}");
witness.forgetAll();
json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
}
@Test
public void testSerializeOutputs() throws Exception{
witness.outputs("foo");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[{\"id\":\"foo\",\"events\":{\"duration_in_millis\":0,\"in\":0,\"out\":0," +
"\"filtered\":0,\"queue_push_duration_in_millis\":0},\"name\":null}]}}");
witness.forgetAll();
json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
}
@Test
public void testSerializeCodecs() throws Exception{
witness.codecs("foo");
String json = witness.asJson();
//codecs are not currently serialized.
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
witness.forgetAll();
json = witness.asJson();
assertThat(json).isEqualTo("{\"plugins\":{\"inputs\":[],\"filters\":[],\"outputs\":[]}}");
}
}

View file

@ -1,174 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link QueueWitness}
*/
public class QueueWitnessTest {
private QueueWitness witness;
@Before
public void setup() {
witness = new QueueWitness();
}
@Test
public void testType() {
witness.type("memory");
assertThat(witness.snitch().type()).isEqualTo("memory");
}
@Test
public void testEvents() {
assertThat(witness.snitch().events()).isNull();
witness.events(101);
assertThat(witness.snitch().events()).isEqualTo(101l);
}
@Test
public void testQueueSizeInBytes(){
witness.capacity().queueSizeInBytes(99);
assertThat(witness.capacity().snitch().queueSizeInBytes()).isEqualTo(99l);
}
@Test
public void testPageCapacityInBytes(){
witness.capacity().pageCapacityInBytes(98);
assertThat(witness.capacity().snitch().pageCapacityInBytes()).isEqualTo(98l);
}
@Test
public void testMaxQueueSizeInBytes(){
witness.capacity().maxQueueSizeInBytes(97);
assertThat(witness.capacity().snitch().maxQueueSizeInBytes()).isEqualTo(97l);
}
@Test
public void testMaxUnreadEvents(){
witness.capacity().maxUnreadEvents(96);
assertThat(witness.capacity().snitch().maxUnreadEvents()).isEqualTo(96l);
}
@Test
public void testPath(){
witness.data().path("/var/ls/q");
assertThat(witness.data().snitch().path()).isEqualTo("/var/ls/q");
}
@Test
public void testFreeSpace(){
witness.data().freeSpaceInBytes(77);
assertThat(witness.data().snitch().freeSpaceInBytes()).isEqualTo(77l);
}
@Test
public void testStorageType(){
witness.data().storageType("ext4");
assertThat(witness.data().snitch().storageType()).isEqualTo("ext4");
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":null}}");
}
@Test
public void testSerializeMemoryType() throws Exception {
witness.type("memory");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"memory\"}}");
}
@Test
public void testSerializePersistedType() throws Exception {
witness.type("persisted");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializeQueueSize() throws Exception {
witness.type("persisted");
witness.capacity().queueSizeInBytes(88);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":88,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializeQueuePageCapacity() throws Exception {
witness.type("persisted");
witness.capacity().pageCapacityInBytes(87);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":87," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializeMaxQueueSize() throws Exception {
witness.type("persisted");
witness.capacity().maxUnreadEvents(86);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":86},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializeMaxUnreadEvents() throws Exception {
witness.type("persisted");
witness.capacity().maxUnreadEvents(85);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":85},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializePath() throws Exception{
witness.type("persisted");
witness.data().path("/var/ls/q2");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":\"/var/ls/q2\",\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
@Test
public void testSerializeFreeSpace() throws Exception{
witness.type("persisted");
witness.data().freeSpaceInBytes(66);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":66,\"storage_type\":null}}}");
}
@Test
public void testSerializeStorageType() throws Exception{
witness.type("persisted");
witness.data().storageType("xfs");
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":0,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":\"xfs\"}}}");
}
@Test
public void testSerializeEvents() throws Exception{
witness.type("persisted");
witness.events(102);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"queue\":{\"type\":\"persisted\",\"events\":102,\"capacity\":{\"queue_size_in_bytes\":0,\"page_capacity_in_bytes\":0," +
"\"max_queue_size_in_bytes\":0,\"max_unread_events\":0},\"data\":{\"path\":null,\"free_space_in_bytes\":0,\"storage_type\":null}}}");
}
}

View file

@ -1,100 +0,0 @@
package org.logstash.instrument.witness.pipeline;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import org.logstash.RubyUtil;
import org.logstash.Timestamp;
import org.logstash.ext.JrubyTimestampExtLibrary;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Unit tests for {@link ReloadWitness}
*/
public class ReloadWitnessTest {
private ReloadWitness witness;
private static final Timestamp TIMESTAMP = new Timestamp();
private static final JrubyTimestampExtLibrary.RubyTimestamp RUBY_TIMESTAMP =
JrubyTimestampExtLibrary.RubyTimestamp.newRubyTimestamp(
RubyUtil.RUBY, TIMESTAMP
);
@Before
public void setup() {
witness = new ReloadWitness();
}
@Test
public void testSuccess() {
witness.success();
witness.lastSuccessTimestamp(RUBY_TIMESTAMP);
assertThat(witness.snitch().successes()).isEqualTo(1);
assertThat(witness.snitch().lastSuccessTimestamp()).isEqualTo(TIMESTAMP);
witness.successes(99);
assertThat(witness.snitch().successes()).isEqualTo(100);
}
@Test
public void testFailure() {
witness.failure();
witness.lastFailureTimestamp(RUBY_TIMESTAMP);
assertThat(witness.snitch().failures()).isEqualTo(1);
assertThat(witness.snitch().lastFailureTimestamp()).isEqualTo(TIMESTAMP);
witness.failures(99);
assertThat(witness.snitch().failures()).isEqualTo(100);
}
@Test
public void testError() {
witness.error().message("foo");
assertThat(witness.error().snitch().message()).isEqualTo("foo");
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"reloads\":{\"last_error\":{\"message\":null,\"backtrace\":null},\"successes\":0,\"last_success_timestamp\":null," +
"\"last_failure_timestamp\":null,\"failures\":0}}");
}
@Test
public void testSerializeSuccess() throws Exception {
witness.success();
witness.lastSuccessTimestamp(RUBY_TIMESTAMP);
String json = witness.asJson();
assertThat(json).isEqualTo("{\"reloads\":{\"last_error\":{\"message\":null,\"backtrace\":null},\"successes\":1,\"last_success_timestamp\":\""
+ TIMESTAMP.toString() + "\",\"last_failure_timestamp\":null,\"failures\":0}}");
}
@Test
public void testSerializeFailure() throws Exception {
witness.failure();
witness.lastFailureTimestamp(RUBY_TIMESTAMP);
String json = witness.asJson();
assertThat(json).isEqualTo(
"{\"reloads\":{\"last_error\":{\"message\":null,\"backtrace\":null},\"successes\":0,\"last_success_timestamp\":null," +
"\"last_failure_timestamp\":\"" + TIMESTAMP.toString() + "\",\"failures\":1}}"
);
}
@Test
public void testSerializeError() throws Exception{
witness.error().message("foo");
witness.error().backtrace("bar");
String json = witness.asJson();
assertThat(json).contains("foo");
assertThat(json).contains("bar");
}
}

View file

@ -1,118 +0,0 @@
package org.logstash.instrument.witness.process;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import java.security.MessageDigest;
import java.time.Instant;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assume.assumeTrue;
/**
* Unit tests for {@link ProcessWitness}
*/
public class ProcessWitnessTest {
private ProcessWitness witness;
@Before
public void setup(){
witness = new ProcessWitness();
}
@Test
public void testInitialState(){
ProcessWitness.Snitch snitch = witness.snitch();
assertThat(snitch.cpuProcessPercent()).isEqualTo((short) -1);
assertThat(snitch.cpuTotalInMillis()).isEqualTo(-1);
assertThat(snitch.maxFileDescriptors()).isEqualTo(-1);
assertThat(snitch.memTotalVirtualInBytes()).isEqualTo(-1);
assertThat(snitch.openFileDescriptors()).isEqualTo(-1);
assertThat(snitch.peakOpenFileDescriptors()).isEqualTo(-1);
}
@Test
public void testRefresh(){
ProcessWitness.Snitch snitch = witness.snitch();
assumeTrue(ProcessWitness.isUnix);
witness.refresh();
assertThat(snitch.cpuProcessPercent()).isGreaterThanOrEqualTo((short) 0);
assertThat(snitch.cpuTotalInMillis()).isGreaterThan(0);
assertThat(snitch.maxFileDescriptors()).isGreaterThan(0);
assertThat(snitch.memTotalVirtualInBytes()).isGreaterThan(0);
assertThat(snitch.openFileDescriptors()).isGreaterThan(0);
assertThat(snitch.peakOpenFileDescriptors()).isGreaterThan(0);
}
@Test
public void testRefreshChanges() throws InterruptedException {
ProcessWitness.Snitch snitch = witness.snitch();
assumeTrue(ProcessWitness.isUnix);
witness.refresh();
short before = snitch.cpuProcessPercent();
ScheduledExecutorService refresh = Executors.newSingleThreadScheduledExecutor();
refresh.scheduleAtFixedRate(() -> witness.refresh(), 0 , 100, TimeUnit.MILLISECONDS);
//Add some arbitrary CPU load
ExecutorService cpuLoad = Executors.newSingleThreadExecutor();
cpuLoad.execute(() -> {
while(true){
try {
MessageDigest md = MessageDigest.getInstance("SHA-1");
md.update(UUID.randomUUID().toString().getBytes());
md.digest();
if(Thread.currentThread().isInterrupted()){
break;
}
} catch (Exception e) {
//do nothing
}
}
});
//we only care that the value changes, give the load some time to change it
boolean pass = false;
Instant end = Instant.now().plusSeconds(10);
do {
Thread.sleep(100);
if (before != snitch.cpuProcessPercent()) {
pass = true;
break;
}
} while (end.isAfter(Instant.now()));
assertThat(pass).isTrue();
refresh.shutdownNow();
cpuLoad.shutdownNow();
}
@Test
public void testAsJson() throws Exception {
ObjectMapper mapper = new ObjectMapper();
assertThat(mapper.writeValueAsString(witness)).isEqualTo(witness.asJson());
}
@Test
public void testSerializeEmpty() throws Exception {
String json = witness.asJson();
assertThat(json).isEqualTo("{\"process\":{\"open_file_descriptors\":-1,\"peak_open_file_descriptors\":-1,\"max_file_descriptors\":-1," +
"\"mem\":{\"total_virtual_in_bytes\":-1},\"cpu\":{\"total_in_millis\":-1,\"percent\":-1}}}");
}
@Test
public void testSerializePopulated() throws Exception {
assumeTrue(ProcessWitness.isUnix);
String emptyJson = witness.asJson();
witness.refresh();
String populatedJson = witness.asJson();
assertThat(emptyJson).isNotEqualTo(populatedJson);
assertThat(populatedJson).doesNotContain("-1");
}
}

View file

@ -1,138 +0,0 @@
package org.logstash.instrument.witness.schedule;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.ErrorHandler;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import java.time.Duration;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
/**
* Unit tests for {@link WitnessScheduler}
*/
@RunWith(MockitoJUnitRunner.class)
public class WitnessSchedulerTest {
private Witness1 witness1;
private Witness2 witness2;
private Witness3 witness3;
@Mock
Appender appender;
@Mock
ErrorHandler errorHandler;
@Before
public void setup() {
witness1 = new Witness1();
witness2 = new Witness2();
witness3 = new Witness3();
when(appender.getName()).thenReturn("junit");
when(appender.getHandler()).thenReturn(errorHandler);
when(appender.isStarted()).thenReturn(true);
LoggerContext.getContext(false).getLogger(WitnessScheduler.class.getName()).addAppender(appender);
LoggerContext.getContext(false).getLogger(WitnessScheduler.class.getName()).setLevel(Level.WARN);
}
@After
public void tearDown() {
LoggerContext.getContext(false).getLogger(WitnessScheduler.class.getName()).removeAppender(appender);
}
@Test
public void testSchedule() throws InterruptedException {
WitnessScheduler witness1Scheduler = new WitnessScheduler(witness1);
witness1Scheduler.schedule();
new WitnessScheduler(witness2).schedule();
new WitnessScheduler(witness3).schedule();
//Give some time fo the schedules to run.
Thread.sleep(15000);
assertThat(witness1.counter).isBetween(15, 60);
assertThat(witness2.counter).isBetween(3, 10);
//this tests that an exception thrown does not kill the scheduler
assertThat(witness3.counter).isBetween(15, 60);
assertThat(Thread.getAllStackTraces().keySet().stream().map(t -> t.getName()).collect(Collectors.toSet())).contains("Witness1-thread").contains("Witness2-thread")
.contains("Witness3-thread");
ArgumentCaptor<LogEvent> argument = ArgumentCaptor.forClass(LogEvent.class);
//tests that Witness3 is the only error and that it only gets logged once
verify(appender).append(argument.capture());
assertThat(argument.getAllValues().stream().filter(a -> a.getMessage().toString().equals("Can not fully refresh the metrics for the Witness3")).count()).isEqualTo(1);
//shutdown 1 of the schedulers
witness1Scheduler.shutdown();
int count1 = witness1.counter;
int count2 = witness2.counter;
int count3 = witness3.counter;
Thread.sleep(10000);
//witness 1 has been stopped but the others keep on truckin
assertThat(count1).isEqualTo(witness1.counter);
assertThat(count2).isLessThan(witness2.counter);
assertThat(count3).isLessThan(witness3.counter);
assertThat(Thread.getAllStackTraces().keySet().stream().map(t -> t.getName()).collect(Collectors.toSet())).doesNotContain("Witness1-thread");
}
class Witness1 implements ScheduledWitness {
int counter = 0;
@Override
public void refresh() {
counter++;
}
@Override
public Duration every() {
return Duration.ofSeconds(1);
}
}
class Witness2 implements ScheduledWitness {
int counter = 0;
@Override
public void refresh() {
counter++;
}
@Override
public Duration every() {
return Duration.ofSeconds(5);
}
}
class Witness3 implements ScheduledWitness {
int counter = 0;
@Override
public void refresh() {
counter++;
throw new RuntimeException();
}
@Override
public Duration every() {
return Duration.ofSeconds(1);
}
}
}