Skip to content

Commit

Permalink
Improve ECS categorization field mappings in postgresql module
Browse files Browse the repository at this point in the history
- convert pipeline to yml
- event.kind
- event.category
- event.type
- related.user

Closes elastic#16177
  • Loading branch information
leehinman committed Apr 22, 2020
1 parent 757e7d1 commit 2745250
Show file tree
Hide file tree
Showing 10 changed files with 1,235 additions and 52 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d
- Enhance `elasticsearch/slowlog` fileset to handle ECS-compatible logs emitted by Elasticsearch. {issue}17715[17715] {pull}17729[17729]
- Improve ECS categorization field mappings in misp module. {issue}16026[16026] {pull}17344[17344]
- Added Unix stream socket support as an input source and a syslog input source. {pull}17492[17492]
- Improve ECS categorization field mappings in postgresql module. {issue}16177[16177] {pull}17914[17914]

*Heartbeat*

Expand Down
49 changes: 0 additions & 49 deletions filebeat/module/postgresql/log/ingest/pipeline.json

This file was deleted.

57 changes: 57 additions & 0 deletions filebeat/module/postgresql/log/ingest/pipeline.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
description: Pipeline for parsing PostgreSQL logs.
processors:
- grok:
field: message
ignore_missing: true
patterns:
- '^%{DATETIME:postgresql.log.timestamp} \[%{NUMBER:process.pid:long}(-%{BASE16FLOAT:postgresql.log.core_id:long})?\]
((\[%{USERNAME:user.name}\]@\[%{POSTGRESQL_DB_NAME:postgresql.log.database}\]|%{USERNAME:user.name}@%{POSTGRESQL_DB_NAME:postgresql.log.database})
)?%{WORD:log.level}: (?:%{NUMBER:postgresql.log.error.code:long}|%{SPACE})(duration:
%{NUMBER:temp.duration:float} ms %{POSTGRESQL_QUERY_STEP}: %{GREEDYDATA:postgresql.log.query}|:
%{GREEDYDATA:message}|%{GREEDYDATA:message})'
pattern_definitions:
DATETIME: '[-0-9]+ %{TIME} %{WORD:event.timezone}'
GREEDYDATA: |-
(.|
| )*
POSTGRESQL_DB_NAME: '[a-zA-Z0-9_]+[a-zA-Z0-9_\$]*'
POSTGRESQL_QUERY_STEP: '%{WORD:postgresql.log.query_step}(?: <unnamed>| %{WORD:postgresql.log.query_name})?'
- date:
field: postgresql.log.timestamp
target_field: '@timestamp'
formats:
- yyyy-MM-dd HH:mm:ss.SSS zz
- yyyy-MM-dd HH:mm:ss zz
- script:
lang: painless
source: ctx.event.duration = Math.round(ctx.temp.duration * params.scale)
params:
scale: 1000000
if: ctx.temp?.duration != null
- remove:
field: temp.duration
ignore_missing: true
- set:
field: event.kind
value: event
- append:
field: event.category
value:
- database
- append:
field: event.type
value:
- info
- append:
field: event.type
value:
- error
if: "ctx?.postgresql?.log?.error?.code != null && ctx.postgresql.log.error.code >= 02000"
- append:
field: related.user
value: "{{user.name}}"
if: "ctx?.user?.name != null"
on_failure:
- set:
field: error.message
value: '{{ _ingest.on_failure_message }}'
2 changes: 1 addition & 1 deletion filebeat/module/postgresql/log/manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ var:
os.windows:
- "c:/Program Files/PostgreSQL/*/logs/*.log*"

ingest_pipeline: ingest/pipeline.json
ingest_pipeline: ingest/pipeline.yml
input: config/log.yml
Loading

0 comments on commit 2745250

Please sign in to comment.