mirror of
https://github.com/elastic/logstash.git
synced 2025-04-24 06:37:19 -04:00
parent
d38e117246
commit
f6eccf081b
1 changed files with 4 additions and 4 deletions
|
@ -155,17 +155,17 @@ Here is an example of the forked path configuration.
|
|||
queue.type: persisted
|
||||
config.string: |
|
||||
input { beats { port => 5044 } }
|
||||
output { pipeline { send_to => [es, http] } }
|
||||
output { pipeline { send_to => [internal-es, partner-s3] } }
|
||||
- pipeline.id: buffered-es
|
||||
queue.type: persisted
|
||||
config.string: |
|
||||
input { pipeline { address => partner } }
|
||||
input { pipeline { address => internal-es } }
|
||||
# Index the full event
|
||||
output { elasticsearch { } }
|
||||
- pipeline.id: partner
|
||||
queue.type: persisted
|
||||
config.string: |
|
||||
input { pipeline { address => s3 } }
|
||||
input { pipeline { address => partner-s3 } }
|
||||
filter {
|
||||
# Remove the sensitive data
|
||||
mutate { remove_field => 'sensitive-data' }
|
||||
|
@ -194,7 +194,7 @@ Here is an example of the collector pattern.
|
|||
- pipeline.id: partner
|
||||
# This common pipeline enforces the same logic whether data comes from Kafka or Beats
|
||||
config.string: |
|
||||
input { pipeline { address => commonOu } }
|
||||
input { pipeline { address => commonOut } }
|
||||
filter {
|
||||
# Always remove sensitive data from all input sources
|
||||
mutate { remove_field => 'sensitive-data' }
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue