Skip to content

Commit

Permalink
[docs] Fix indentation problems in processor examples (elastic#18115)
Browse files Browse the repository at this point in the history
  • Loading branch information
dedemorton committed May 4, 2020
1 parent 6856b25 commit 6b2ca2d
Show file tree
Hide file tree
Showing 57 changed files with 1,303 additions and 1,298 deletions.
150 changes: 75 additions & 75 deletions auditbeat/auditbeat.reference.yml
Original file line number Diff line number Diff line change
Expand Up @@ -243,151 +243,151 @@ auditbeat.modules:
# values:
#
#processors:
#- include_fields:
# fields: ["cpu"]
#- drop_fields:
# fields: ["cpu.user", "cpu.system"]
# - include_fields:
# fields: ["cpu"]
# - drop_fields:
# fields: ["cpu.user", "cpu.system"]
#
# The following example drops the events that have the HTTP response code 200:
#
#processors:
#- drop_event:
# when:
# equals:
# http.code: 200
# - drop_event:
# when:
# equals:
# http.code: 200
#
# The following example renames the field a to b:
#
#processors:
#- rename:
# fields:
# - from: "a"
# to: "b"
# - rename:
# fields:
# - from: "a"
# to: "b"
#
# The following example tokenizes the string into fields:
#
#processors:
#- dissect:
# tokenizer: "%{key1} - %{key2}"
# field: "message"
# target_prefix: "dissect"
# - dissect:
# tokenizer: "%{key1} - %{key2}"
# field: "message"
# target_prefix: "dissect"
#
# The following example enriches each event with metadata from the cloud
# provider about the host machine. It works on EC2, GCE, DigitalOcean,
# Tencent Cloud, and Alibaba Cloud.
#
#processors:
#- add_cloud_metadata: ~
# - add_cloud_metadata: ~
#
# The following example enriches each event with the machine's local time zone
# offset from UTC.
#
#processors:
#- add_locale:
# format: offset
# - add_locale:
# format: offset
#
# The following example enriches each event with docker metadata, it matches
# given fields to an existing container id and adds info from that container:
#
#processors:
#- add_docker_metadata:
# host: "unix:///var/run/docker.sock"
# match_fields: ["system.process.cgroup.id"]
# match_pids: ["process.pid", "process.ppid"]
# match_source: true
# match_source_index: 4
# match_short_id: false
# cleanup_timeout: 60
# labels.dedot: false
# # To connect to Docker over TLS you must specify a client and CA certificate.
# #ssl:
# # certificate_authority: "/etc/pki/root/ca.pem"
# # certificate: "/etc/pki/client/cert.pem"
# # key: "/etc/pki/client/cert.key"
# - add_docker_metadata:
# host: "unix:///var/run/docker.sock"
# match_fields: ["system.process.cgroup.id"]
# match_pids: ["process.pid", "process.ppid"]
# match_source: true
# match_source_index: 4
# match_short_id: false
# cleanup_timeout: 60
# labels.dedot: false
# # To connect to Docker over TLS you must specify a client and CA certificate.
# #ssl:
# # certificate_authority: "/etc/pki/root/ca.pem"
# # certificate: "/etc/pki/client/cert.pem"
# # key: "/etc/pki/client/cert.key"
#
# The following example enriches each event with docker metadata, it matches
# container id from log path available in `source` field (by default it expects
# it to be /var/lib/docker/containers/*/*.log).
#
#processors:
#- add_docker_metadata: ~
# - add_docker_metadata: ~
#
# The following example enriches each event with host metadata.
#
#processors:
#- add_host_metadata: ~
# - add_host_metadata: ~
#
# The following example enriches each event with process metadata using
# process IDs included in the event.
#
#processors:
#- add_process_metadata:
# match_pids: ["system.process.ppid"]
# target: system.process.parent
# - add_process_metadata:
# match_pids: ["system.process.ppid"]
# target: system.process.parent
#
# The following example decodes fields containing JSON strings
# and replaces the strings with valid JSON objects.
#
#processors:
#- decode_json_fields:
# fields: ["field1", "field2", ...]
# process_array: false
# max_depth: 1
# target: ""
# overwrite_keys: false
# - decode_json_fields:
# fields: ["field1", "field2", ...]
# process_array: false
# max_depth: 1
# target: ""
# overwrite_keys: false
#
#processors:
#- decompress_gzip_field:
# from: "field1"
# to: "field2"
# ignore_missing: false
# fail_on_error: true
# - decompress_gzip_field:
# from: "field1"
# to: "field2"
# ignore_missing: false
# fail_on_error: true
#
# The following example copies the value of message to message_copied
#
#processors:
#- copy_fields:
# fields:
# - copy_fields:
# fields:
# - from: message
# to: message_copied
# fail_on_error: true
# ignore_missing: false
# fail_on_error: true
# ignore_missing: false
#
# The following example truncates the value of message to 1024 bytes
#
#processors:
#- truncate_fields:
# fields:
# - message
# max_bytes: 1024
# fail_on_error: false
# ignore_missing: true
# - truncate_fields:
# fields:
# - message
# max_bytes: 1024
# fail_on_error: false
# ignore_missing: true
#
# The following example preserves the raw message under event.original
#
#processors:
#- copy_fields:
# fields:
# - copy_fields:
# fields:
# - from: message
# to: event.original
# fail_on_error: false
# ignore_missing: true
#- truncate_fields:
# fields:
# - event.original
# max_bytes: 1024
# fail_on_error: false
# ignore_missing: true
# fail_on_error: false
# ignore_missing: true
# - truncate_fields:
# fields:
# - event.original
# max_bytes: 1024
# fail_on_error: false
# ignore_missing: true
#
# The following example URL-decodes the value of field1 to field2
#
#processors:
#- urldecode:
# fields:
# - from: "field1"
# to: "field2"
# ignore_missing: false
# fail_on_error: true
# - urldecode:
# fields:
# - from: "field1"
# to: "field2"
# ignore_missing: false
# fail_on_error: true

#============================= Elastic Cloud ==================================

Expand Down
12 changes: 6 additions & 6 deletions filebeat/docs/filebeat-filtering.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -37,21 +37,21 @@ The following configuration drops all the DEBUG messages.
[source,yaml]
-----------------------------------------------------
processors:
- drop_event:
when:
- drop_event:
when:
regexp:
message: "^DBG:"
message: "^DBG:"
-----------------------------------------------------

To drop all the log messages coming from a certain log file:

[source,yaml]
----------------
processors:
- drop_event:
when:
- drop_event:
when:
contains:
source: "test"
source: "test"
----------------

[float]
Expand Down
6 changes: 3 additions & 3 deletions filebeat/docs/kubernetes-default-indexers-matchers.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ configuration:
[source,yaml]
-------------------------------------------------------------------------------
processors:
- add_kubernetes_metadata:
default_indexers.enabled: false
default_matchers.enabled: false
- add_kubernetes_metadata:
default_indexers.enabled: false
default_matchers.enabled: false
-------------------------------------------------------------------------------
Loading

0 comments on commit 6b2ca2d

Please sign in to comment.