logstash/etc/logstash-elasticsearch-rabbitmq-river.yaml
Pete Fritchman ec2ae60d83 add support for streaming logs to elasticsearch via a "rabbitmq river".
we have to PUT a json config to the elasticsearch HTTP API to tell it
where to look, then we use our amqp output to send bulk-data style
index messages to elasticsearch. The ES side does bulk-queueing for us.
2010-11-01 01:46:20 +00:00

41 lines
1.3 KiB
YAML

---
# this is a sample logstash config (code is still highly in change, so
# this could change later)
#
#
inputs:
# Give a list of inputs. Tag them for easy query/filter later.
linux-syslog: # this is the 'linux-syslog' type
- /var/log/messages # watch /var/log/messages (uses eventmachine-tail)
- /var/log/kern.log
- /var/log/auth.log
- /var/log/user.log
apache-access: # similar, different type.
- /var/log/apache2/access.log
- /b/access
apache-error:
- /var/log/apache2/error.log
filters:
- grok:
linux-syslog: # for logs of type 'linux-syslog'
patterns:
- %{SYSLOGLINE}
apache-access: # for logs of type 'apache-error'
patterns:
- %{COMBINEDAPACHELOG}
- date:
linux-syslog: # for logs of type 'linux-syslog'
# Look for a field 'timestamp' with this format, parse and it for the timestamp
# This field comes from the SYSLOGLINE pattern
timestamp: "%b %e %H:%M:%S"
apache-access:
timestamp: "%d/%b/%Y:%H:%M:%S %Z"
outputs:
- stdout:///
- "elasticsearch://localhost:9200/logs/all?method=river&type=rabbitmq&host=127.0.0.1&user=guest&pass=guest&vhost=/&queue=es"
# But we could write to mongodb, too.
# - mongodb://localhost/parsedlogs
# And also write to an AMQP topic
# - amqp://localhost/topic/parsedlogs
# Write to stdout ... etc.
# - stdout:///