diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index bcd1c714553..87e3616944c 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -123,6 +123,8 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d - Fix bug in `httpjson` that prevented `first_event` getting updated. {pull}26407[26407] - Fix bug in the Syslog input that misparsed rfc5424 days starting with 0. {pull}26419[26419] - Do not close filestream harvester if an unexpected error is returned when close.on_state_change.* is enabled. {pull}26411[26411] +- Do not close filestream harvester if an unexpected error is returned when close.on_state_change.* is enabled. {pull}26411[26411] +- Fix Elasticsearch compatibility for modules that use `copy_from` in `set` processors. {issue}26629[26629] *Filebeat* diff --git a/filebeat/module/elasticsearch/audit/ingest/pipeline-json.yml b/filebeat/module/elasticsearch/audit/ingest/pipeline-json.yml index e658a2836e0..359af0ab196 100644 --- a/filebeat/module/elasticsearch/audit/ingest/pipeline-json.yml +++ b/filebeat/module/elasticsearch/audit/ingest/pipeline-json.yml @@ -156,7 +156,7 @@ processors: path: elasticsearch.audit - set: field: http.request.id - copy_from: elasticsearch.audit.request.id + value: '{{{elasticsearch.audit.request.id}}}' ignore_empty_value: true - dot_expander: field: cluster.name diff --git a/filebeat/module/nginx/ingress_controller/ingest/pipeline.yml b/filebeat/module/nginx/ingress_controller/ingest/pipeline.yml index 64fd7567ba1..281e34d9e34 100644 --- a/filebeat/module/nginx/ingress_controller/ingest/pipeline.yml +++ b/filebeat/module/nginx/ingress_controller/ingest/pipeline.yml @@ -63,7 +63,7 @@ processors: value: "" - set: field: http.request.id - copy_from: nginx.ingress_controller.http.request.id + value: '{{{nginx.ingress_controller.http.request.id}}}' ignore_empty_value: true ignore_failure: true - script: diff --git a/x-pack/filebeat/module/cyberarkpas/audit/ingest/pipeline.yml b/x-pack/filebeat/module/cyberarkpas/audit/ingest/pipeline.yml index 32e9c221f2e..7b5a9454a5c 100644 --- a/x-pack/filebeat/module/cyberarkpas/audit/ingest/pipeline.yml +++ b/x-pack/filebeat/module/cyberarkpas/audit/ingest/pipeline.yml @@ -914,10 +914,27 @@ processors: value: "success" - set: event.reason from: cyberarkpas.audit.reason + on_failure: + - append: + field: error.message + value: 'Failed to enrich based on ID #{{{ event.code }}}: {{{_ingest.on_failure_message}}}' source: > - def clone(def val) { - return val instanceof List? new ArrayList(val) : val; + def clone(def ref) { + if (ref == null) return ref; + if (ref instanceof Map) { + ref = ref.entrySet().stream().collect( + Collectors.toMap( + e -> e.getKey(), + e -> clone(e.getValue()) + ) + ); + } else if (ref instanceof List) { + ref = ref.stream().map(e -> clone(e)).collect( + Collectors.toList() + ); + } + return ref; } def read_field(def map, String name) { if (map == null || !(map instanceof Map)) return null; @@ -925,29 +942,31 @@ processors: return pos == -1? map[name] : read_field(map[name.substring(0, pos)], name.substring(pos+1)); } + boolean set_field(Map map, String name, def value) { + int pos = name.indexOf("."); + if (pos == -1) { + map[name] = clone(value); + return true; + } + String key = name.substring(0, pos), + path = name.substring(pos+1); + if (!map.containsKey(key)) { + map[key] = new HashMap(); + } + map = map[key]; + return map instanceof Map? set_field(map, path, value) + : false; + } String msgID = ctx.event?.code; def actions = params.get(msgID); if (actions == null) return; - List values = new ArrayList(); for (def item : actions) { def val = item.value; if (val == null && (val = read_field(ctx, item.from)) == null || val == "") continue; - values.add([ - "to": item.set, - "value": clone(val) - ]); + if (!set_field(ctx, item.set, val)) { + throw new Exception("Failed to set field " + item.set); + } } - if (!values.isEmpty()) ctx._tmp["values"] = values; - - - foreach: - field: _tmp.values - ignore_missing: true - processor: - set: - field: '{{{_ingest._value.to}}}' - copy_from: '_ingest._value.value' - ignore_empty_value: true - override: true # # Force event.outcome: unknown in case it gets a value other than one of the allowed. @@ -994,7 +1013,7 @@ processors: on_failure: - set: field: source.domain - copy_from: source.address + value: '{{{source.address}}}' - convert: field: destination.address target_field: destination.ip @@ -1003,7 +1022,7 @@ processors: on_failure: - set: field: destination.domain - copy_from: destination.address + value: '{{{destination.address}}}' # # Populate related.ip diff --git a/x-pack/filebeat/module/panw/panos/ingest/pipeline.yml b/x-pack/filebeat/module/panw/panos/ingest/pipeline.yml index e0bbd9b8c25..a6509d7f2a6 100644 --- a/x-pack/filebeat/module/panw/panos/ingest/pipeline.yml +++ b/x-pack/filebeat/module/panw/panos/ingest/pipeline.yml @@ -481,6 +481,7 @@ processors: value: - '{{panw.panos.network.nat.community_id}}' +<<<<<<< HEAD - grok: if: 'ctx?.panw?.panos?.threat?.name != null' field: panw.panos.threat.name @@ -611,6 +612,154 @@ processors: - server.nat.ip - server.nat.port if: 'ctx?.destination?.nat?.ip == "0.0.0.0" && ctx?.destination?.nat?.port == 0' +======= + - set: + field: rule.name + value: "{{panw.panos.ruleset}}" + ignore_empty_value: true + + # Set url and file values + - rename: + if: 'ctx?.panw?.panos?.sub_type != "url"' + field: url.original + target_field: file.name + ignore_missing: true + + - grok: + field: url.original + patterns: + - '(%{ANY:url.scheme}\:\/\/)?(%{USERNAME:url.username}(\:%{PASSWORD:url.password})?\@)?%{DOMAIN:url.domain}(\:%{POSINT:url.port})?(%{PATH:url.path})?(\?%{QUERY:url.query})?(\#%{ANY:url.fragment})?' + ignore_missing: true + pattern_definitions: + USERNAME: '[^\:]*' + PASSWORD: '[^@]*' + DOMAIN: '[^\/\?#\:]*' + PATH: '[^\?#]*' + QUERY: '[^#]*' + ANY: '.*' + if: 'ctx?.url?.original != null && ctx?.url?.original != "-/" && ctx?.url?.original != ""' + + - grok: + field: url.path + patterns: + - '%{FILENAME}((?:\.%{ANY})*(\.%{ANY:url.extension}))?' + ignore_missing: true + pattern_definitions: + FILENAME: '[^\.]+' + ANY: '.*' + if: 'ctx?.url?.path != null && ctx?.url?.path != ""' + + - grok: + field: file.name + patterns: + - '%{FILENAME}((?:\.%{ANY})*(\.%{ANY:file.extension}))?' + ignore_missing: true + pattern_definitions: + FILENAME: '[^\.]+' + ANY: '.*' + if: 'ctx?.file?.name != null && ctx?.file?.name != ""' + + - script: + lang: painless + description: Copy source.user to user + source: > + def clone(def ref) { + if (ref == null) return ref; + if (ref instanceof Map) { + ref = ref.entrySet().stream().collect( + Collectors.toMap( + e -> e.getKey(), + e -> clone(e.getValue()) + ) + ); + } else if (ref instanceof List) { + ref = ref.stream().map(e -> clone(e)).collect( + Collectors.toList() + ); + } + return ref; + } + def u = ctx?.source?.user; + if (u != null) { + ctx["user"] = clone(u); + } + + - append: + field: related.user + allow_duplicates: false + value: "{{client.user.name}}" + if: "ctx?.client?.user?.name != null" + + - append: + field: related.user + allow_duplicates: false + value: "{{source.user.name}}" + if: "ctx?.source?.user?.name != null" + + - append: + field: related.user + allow_duplicates: false + value: "{{server.user.name}}" + if: "ctx?.server?.user?.name != null" + + - append: + field: related.user + allow_duplicates: false + value: "{{destination.user.name}}" + if: "ctx?.destination?.user?.name != null" + + - append: + field: related.user + allow_duplicates: false + value: "{{url.username}}" + if: "ctx?.url?.username != null && ctx?.url?.username != ''" + + - append: + field: related.hash + allow_duplicates: false + value: "{{panw.panos.file.hash}}" + if: "ctx?.panw?.panos?.file?.hash != null" + + - append: + field: related.hosts + allow_duplicates: false + value: "{{observer.hostname}}" + if: "ctx?.observer?.hostname != null && ctx.observer?.hostname != ''" + + - append: + field: related.hosts + allow_duplicates: false + value: "{{host.name}}" + if: "ctx?.host?.name != null && ctx.host?.name != ''" + + - append: + field: related.hosts + allow_duplicates: false + value: "{{url.domain}}" + if: "ctx?.url?.domain != null && ctx.url?.domain != ''" + + # Remove temporary fields. + - remove: + field: + - _temp_ + ignore_missing: true + + # Remove NAT fields when translation was not done. + - remove: + field: + - source.nat.ip + - source.nat.port + - client.nat.ip + - client.nat.port + if: 'ctx?.source?.nat?.ip == "0.0.0.0" && ctx?.source?.nat?.port == 0' + - remove: + field: + - destination.nat.ip + - destination.nat.port + - server.nat.ip + - server.nat.port + if: 'ctx?.destination?.nat?.ip == "0.0.0.0" && ctx?.destination?.nat?.port == 0' +>>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) on_failure: - set: diff --git a/x-pack/filebeat/module/threatintel/abuseurl/ingest/pipeline.yml b/x-pack/filebeat/module/threatintel/abuseurl/ingest/pipeline.yml index ed2ebeda10d..6b6b37be9a5 100644 --- a/x-pack/filebeat/module/threatintel/abuseurl/ingest/pipeline.yml +++ b/x-pack/filebeat/module/threatintel/abuseurl/ingest/pipeline.yml @@ -51,11 +51,18 @@ processors: target_field: threatintel.indicator.url keep_original: true remove_if_successful: true +<<<<<<< HEAD - rename: field: threatintel.abuseurl.url target_field: threatintel.indicator.url.full ignore_missing: true if: ctx?.threatintel?.indicator?.url?.original == null && ctx?.threatintel?.abuseurl?.url != null +======= +- set: + field: threatintel.indicator.url.full + value: '{{{threatintel.indicator.url.original}}}' + ignore_empty_value: true +>>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) - rename: field: threatintel.abuseurl.host target_field: threatintel.indicator.domain diff --git a/x-pack/filebeat/module/threatintel/anomali/ingest/pipeline.yml b/x-pack/filebeat/module/threatintel/anomali/ingest/pipeline.yml index 239cbc608f5..7e95f2f64fb 100644 --- a/x-pack/filebeat/module/threatintel/anomali/ingest/pipeline.yml +++ b/x-pack/filebeat/module/threatintel/anomali/ingest/pipeline.yml @@ -82,11 +82,18 @@ processors: keep_original: true remove_if_successful: true if: ctx?.threatintel?.indicator?.type == 'url' +<<<<<<< HEAD - rename: field: _tmp.threatvalue target_field: threatintel.indicator.url.full ignore_missing: true if: ctx?.threatintel?.indicator?.type == 'url' && ctx?.threatintel?.indicator?.url?.original == null +======= +- set: + field: threatintel.indicator.url.full + value: '{{{threatintel.indicator.url.original}}}' + ignore_empty_value: true +>>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) - rename: field: _tmp.threatvalue target_field: threatintel.indicator.email.address diff --git a/x-pack/filebeat/module/threatintel/anomalithreatstream/ingest/pipeline.yml b/x-pack/filebeat/module/threatintel/anomalithreatstream/ingest/pipeline.yml new file mode 100644 index 00000000000..be753edb153 --- /dev/null +++ b/x-pack/filebeat/module/threatintel/anomalithreatstream/ingest/pipeline.yml @@ -0,0 +1,401 @@ +description: Pipeline for parsing Anomali ThreatStream +processors: +# +# Safeguard against feeding the pipeline with documents other +# that the ones generated by Filebeat's http_endpoint input. +# +- fail: + if: 'ctx.json == null || !(ctx.json instanceof Map)' + message: 'missing json object in input document' + +# +# Set basic ECS fields. +# +- set: + field: event.ingested + value: '{{{ _ingest.timestamp }}}' +- set: + field: event.kind + value: enrichment +- set: + field: event.category + value: threat +- set: + field: event.type + value: indicator + +# +# Map itype field to STIX 2.0 Cyber Observable values (threatintel.indicator.type). +# +- script: + lang: painless + if: 'ctx.json.itype != null' + description: > + Map itype field to STIX 2.0 Cyber Observable values (threatintel.indicator.type). + params: + actor_ip: ipv4-addr + adware_domain: domain-name + anon_proxy: ipv4-addr + anon_vpn: ipv4-addr + apt_domain: domain-name + apt_email: email-addr + apt_ip: ipv4-addr + apt_md5: file + apt_subject: email + apt_ua: url + apt_url: url + bot_ip: ipv4-addr + brute_ip: ipv4-addr + c2_domain: domain-name + c2_ip: ipv4-addr + c2_url: url + comm_proxy_domain: domain-name + comm_proxy_ip: ipv4-addr + compromised_domain: domain-name + compromised_ip: ipv4-addr + compromised_url: url + crypto_hash: file + crypto_ip: ipv4-addr + crypto_pool: domain + crypto_url: url + crypto_wallet: file + ddos_ip: ipv4-addr + disposable_email_domain: domain-name + dyn_dns: domain-name + exfil_domain: domain-name + exfil_ip: ipv4-addr + exfil_url: url + exploit_domain: domain-name + exploit_ip: ipv4-addr + exploit_url: url + free_email_domain: domain-name + geolocation_url: url + hack_tool: file + i2p_ip: ipv4-addr + ipcheck_url: url + mal_domain: domain-name + mal_email: email-addr + mal_ip: ipv4-addr + mal_md5: file + mal_sslcert_sh1: x509-certificate + mal_sslcert_sha1: x509-certificate + mal_ua: url + mal_url: url + p2pcnc: ipv4-addr + parked_domain: domain-name + parked_ip: ipv4-addr + parked_url: url + pastesite_url: url + phish_domain: domain-name + phish_email: email-addr + phish_ip: ipv4-addr + phish_url: url + proxy_ip: ipv4-addr + scan_ip: ipv4-addr + sinkhole_domain: domain-name + sinkhole_ip: ipv4-addr + spam_domain: domain-name + spam_email: email-addr + spam_ip: ipv4-addr + spam_url: url + speedtest_url: url + ssh_ip: ipv4-addr + suppress: suppress + suspicious_domain: domain-name + suspicious_email: email-addr + suspicious_ip: ipv4-addr + suspicious_reg_email: email-addr + suspicious_url: url + tor_ip: ipv4-addr + torrent_tracker_url: url + vpn_domain: domain-name + vps_ip: ipv4-addr + whois_bulk_reg_email: email-addr + whois_privacy_domain: domain-name + whois_privacy_email: email-addr + source: > + String mapping = params[ctx.json.itype]; + if (mapping != null) { + ctx["threatintel_indicator_type"] = mapping; + } + on_failure: + - append: + field: error.message + value: 'Unable to determine indicator type from "{{{ json.itype }}}": {{{ _ingest.on_failure_message }}}' + +- rename: + field: threatintel_indicator_type + target_field: threatintel.indicator.type + ignore_missing: true + +# +# Detect ipv6 for ipv4-addr types. +# +- set: + field: threatintel.indicator.type + value: ipv6-addr + if: 'ctx.threatintel?.indicator?.type == "ipv4-addr" && ctx.json.srcip != null && ctx.json.srcip.contains(":")' + +# +# Map first and last seen dates. +# +- date: + field: json.date_first + target_field: threatintel.indicator.first_seen + formats: + - ISO8601 + if: 'ctx.json.date_first != null' + on_failure: + - append: + field: error.message + value: 'Error parsing date_first field value "{{{ json.date_first }}}": {{{ _ingest.on_failure_message }}}' + +- date: + field: json.date_last + target_field: threatintel.indicator.last_seen + formats: + - ISO8601 + if: 'ctx.json.date_last != null' + on_failure: + - append: + field: error.message + value: 'Error parsing date_last field value "{{{ json.date_last }}}": {{{ _ingest.on_failure_message }}}' + +# +# Map IP geolocation fields. +# +- convert: + field: json.lat + target_field: threatintel.indicator.geo.location.lat + type: double + if: 'ctx.json.lat != null && ctx.json.lon != null' + on_failure: + - append: + field: error.message + value: 'Cannot convert lat field "{{{ json.lat }}}" to double: {{{ _ingest.on_failure_message }}}' +- convert: + field: json.lon + target_field: threatintel.indicator.geo.location.lon + type: double + if: 'ctx.json.lat != null && ctx.json.lon != null' + on_failure: + - append: + field: error.message + value: 'Cannot convert lon field "{{{ json.lon }}}" to double: {{{ _ingest.on_failure_message }}}' + +# +# Map classification field to Traffic Light Protocol (TLP). +# Currently: +# public => White ("Disclosure is not limited.") +# private => Amber ("Limited disclosure, restricted to participants’ organizations."). +# +- set: + field: threatintel.indicator.marking.tlp + value: Amber + if: 'ctx.json.classification == "private"' +- set: + field: threatintel.indicator.marking.tlp + value: White + if: 'ctx.json.classification == "public"' + +# +# Convert confidence field (-1..100) to ECS confidence (0..10). +# +- script: + lang: painless + description: > + Normalize confidence level. + source: > + def value = ctx.json.confidence; + if (value == null || value < 0.0 || value > 100.0) return; + ctx["threatintel_indicator_confidence"] = (long)Math.round((double)value / 10.0); + on_failure: + - append: + field: error.message + value: 'failed to normalize confidence value `{{{ json.confidence }}}`: {{{ _ingest.on_failure_message }}}' + +- rename: + field: threatintel_indicator_confidence + target_field: threatintel.indicator.confidence + ignore_missing: true + +# +# Convert asn field. +# +- convert: + field: json.asn + target_field: threatintel.indicator.as.number + type: long + ignore_missing: true + on_failure: + - append: + field: error.message + value: 'Cannot convert asn field `{{{ json.asn }}}` to long: {{{ _ingest.on_failure_message }}}' + +- rename: + field: json.org + target_field: threatintel.indicator.as.organization.name + ignore_missing: true + +- rename: + field: json.domain + target_field: threatintel.indicator.domain + ignore_missing: true + +- rename: + field: json.email + target_field: threatintel.indicator.email.address + ignore_missing: true + +- rename: + field: json.srcip + target_field: threatintel.indicator.ip + ignore_missing: true + +- uri_parts: + field: json.url + target_field: threatintel.indicator.url + keep_original: true + remove_if_successful: true + if: 'ctx.json.url != null' + on_failure: + - append: + field: error.message + value: 'Cannot parse url field `{{{ json.url }}}`: {{{ _ingest.on_failure_message }}}' + +- set: + field: threatintel.indicator.url.full + value: '{{{threatintel.indicator.url.original}}}' + ignore_empty_value: true + +- rename: + field: json.country + target_field: threatintel.indicator.geo.country_iso_code + ignore_missing: true + +# +# md5 field can actually contain different kinds of hash. +# Map to file.hash.* depending on hash length. +# +- rename: + field: json.md5 + target_field: threatintel.indicator.file.hash.md5 + if: 'ctx.json.md5 != null && ctx.json.md5.length() == 32' + +- rename: + field: json.md5 + target_field: threatintel.indicator.file.hash.sha1 + if: 'ctx.json.md5 != null && ctx.json.md5.length() == 40' + +- rename: + field: json.md5 + target_field: threatintel.indicator.file.hash.sha256 + if: 'ctx.json.md5 != null && ctx.json.md5.length() == 64' + +- rename: + field: json.md5 + target_field: threatintel.indicator.file.hash.sha512 + if: 'ctx.json.md5 != null && ctx.json.md5.length() == 128' + +- rename: + field: json.source + target_field: threatintel.indicator.provider + ignore_missing: true + +# +# Map field severity to event severity as follows: +# low => 3 +# medium => 5 +# high => 7 +# very-high => 9 +# +- set: + field: event.severity + value: 3 + if: 'ctx.json.severity == "low"' + +- set: + field: event.severity + value: 5 + if: 'ctx.json.severity == "medium"' + +- set: + field: event.severity + value: 7 + if: 'ctx.json.severity == "high"' + +- set: + field: event.severity + value: 9 + if: 'ctx.json.severity == "very-high"' + +# +# Field trusted_circles_ids is a comma-separated string +# that can contain leading and trailing separators (i.e. ",123,"). +# Need a script processor as split processor doesn't support +# removing non-trailing separators. +# +- script: + lang: painless + if: 'ctx.json.trusted_circle_ids != null && ctx.json.trusted_circle_ids instanceof String' + description: > + Convert trusted_circles_ids from CSV to an array. + source: > + def lst = + Stream.of(ctx.json.trusted_circle_ids.splitOnToken(",")) + .filter(s -> !s.isEmpty()) + .toArray(String[]::new); + if (lst.length > 0) { + ctx.json.trusted_circle_ids = lst; + } else { + ctx.json.remove('trusted_circle_ids'); + } + on_failure: + - append: + field: error.message + value: 'unable to split trusted_circle_ids "{{{ json.trusted_circle_ids }}}": {{{ _ingest.on_failure_message }}}' + +# +# Split detail field and append each component to ECS tags field. +# +- split: + field: json.detail + separator: '(?>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) ## Regkey indicator operations - set: diff --git a/x-pack/filebeat/module/threatintel/otx/ingest/pipeline.yml b/x-pack/filebeat/module/threatintel/otx/ingest/pipeline.yml index a4a16035111..fa47a5ca52a 100644 --- a/x-pack/filebeat/module/threatintel/otx/ingest/pipeline.yml +++ b/x-pack/filebeat/module/threatintel/otx/ingest/pipeline.yml @@ -85,6 +85,7 @@ processors: keep_original: true remove_if_successful: true if: ctx?.threatintel?.indicator?.type == 'url' +<<<<<<< HEAD - rename: field: threatintel.otx.indicator target_field: threatintel.indicator.url.full @@ -95,6 +96,13 @@ processors: target_field: threatintel.indicator.url.path ignore_missing: true if: "ctx?.threatintel?.otx?.type == 'URI'" +======= +- set: + field: threatintel.indicator.url.full + value: '{{{threatintel.indicator.url.original}}}' + ignore_empty_value: true + if: "ctx?.threatintel?.otx?.type == 'URL'" +>>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) ## Email indicator operations - set: diff --git a/x-pack/filebeat/module/threatintel/recordedfuture/ingest/pipeline.yml b/x-pack/filebeat/module/threatintel/recordedfuture/ingest/pipeline.yml new file mode 100644 index 00000000000..71a261a12af --- /dev/null +++ b/x-pack/filebeat/module/threatintel/recordedfuture/ingest/pipeline.yml @@ -0,0 +1,236 @@ +description: Pipeline for parsing Recorded Future threat intel. +processors: + # + # Safeguard against feeding the pipeline with documents other + # that the ones generated by Filebeat's httpjson input. + # + - fail: + if: 'ctx.json == null || !(ctx.json instanceof Map)' + message: 'missing json object in input document' + + # + # Set basic ECS fields. + # + - set: + field: event.ingested + value: '{{{ _ingest.timestamp }}}' + - set: + field: event.kind + value: enrichment + - set: + field: event.category + value: threat + - set: + field: event.type + value: indicator + + # + # Map itype field to STIX 2.0 Cyber Observable values (threatintel.indicator.type). + # + - script: + lang: painless + if: 'ctx.json.entity?.type != null' + description: > + Map entity.type field to STIX 2.0 Cyber Observable values (threatintel.indicator.type). + params: + IpAddress: ipv4-addr + InternetDomainName: domain-name + Hash: file + URL: url + source: > + String mapping = params[ctx.json.entity.type]; + if (mapping != null) { + ctx["threatintel_indicator_type"] = mapping; + } + on_failure: + - append: + field: error.message + value: 'Unable to determine indicator type from "{{{ json.entity.type }}}": {{{ _ingest.on_failure_message }}}' + + - rename: + field: threatintel_indicator_type + target_field: threatintel.indicator.type + ignore_missing: true + + # + # Detect ipv6 for ipv4-addr types. + # + - set: + field: threatintel.indicator.type + value: ipv6-addr + if: 'ctx.threatintel?.indicator?.type == "ipv4-addr" && ctx.json.entity.name != null && ctx.json.entity.name.contains(":")' + + # + # Map first and last seen dates. + # + - date: + field: json.timestamps.firstSeen + target_field: threatintel.indicator.first_seen + formats: + - ISO8601 + if: 'ctx.json.timestamps?.firstSeen != null' + on_failure: + - append: + field: error.message + value: 'Error parsing firstSeen field value "{{{ json.timestamps.firstSeen }}}": {{{ _ingest.on_failure_message }}}' + - date: + field: json.timestamps.lastSeen + target_field: threatintel.indicator.last_seen + formats: + - ISO8601 + if: 'ctx.json.timestamps?.lastSeen != null' + on_failure: + - append: + field: error.message + value: 'Error parsing lastSeen field value "{{{ json.timestamps.lastSeen }}}": {{{ _ingest.on_failure_message }}}' + + + # + # Map location fields. + # + - rename: + field: json.location.location.city + target_field: threatintel.indicator.geo.city_name + ignore_missing: true + - rename: + field: json.location.location.continent + target_field: threatintel.indicator.geo.continent_name + ignore_missing: true + - rename: + field: json.location.location.country + target_field: threatintel.indicator.geo.country_name + ignore_missing: true + - grok: + field: json.location.asn + patterns: + - '^(?:[Aa][Ss])?%{NUMBER:threatintel.indicator.as.number:long}$' + ignore_missing: true + on_failure: + - append: + field: error.message + value: 'Cannot parse asn field `{{{ json.location.asn }}}`: {{{ _ingest.on_failure_message }}}' + + - rename: + field: json.location.organization + target_field: threatintel.indicator.as.organization.name + ignore_missing: true + + - set: + field: event.reference + value: '{{{ json.intelCard }}}' + ignore_empty_value: true + + - set: + field: json.ip_range + value: '{{{json.entity.name}}}' + if: 'ctx.json.entity?.type == "IpAddress" && ctx.json.entity.name != null && ctx.json.entity.name.contains("/")' + - set: + field: json.ip_range + value: '{{{ json.entity.name }}}/32' + if: 'ctx.threatintel?.indicator?.type == "ipv4-addr" && ctx.json.entity.name != null && !ctx.json.entity.name.contains("/")' + - set: + field: json.ip_range + value: '{{{ json.entity.name }}}/128' + if: 'ctx.threatintel?.indicator?.type == "ipv6-addr" && ctx.json.entity.name != null && !ctx.json.entity.name.contains("/")' + - set: + field: json.ip_range + value: '{{{json.entity.name}}}' + if: 'ctx.json.entity?.type == "IpAddress" && ctx.json.entity.name != null && ctx.json.entity.name.contains("/")' + + - rename: + field: json.entity.name + target_field: threatintel.indicator.ip + if: 'ctx.json.entity?.type == "IpAddress" && ctx.json.entity.name != null && !ctx.json.entity.name.contains("/")' + + - rename: + field: json.entity.name + target_field: threatintel.indicator.domain + ignore_missing: true + if: 'ctx.threatintel?.indicator?.type == "domain-name"' + + - uri_parts: + field: json.entity.name + target_field: threatintel.indicator.url + keep_original: true + remove_if_successful: true + if: 'ctx.threatintel?.indicator?.type == "url"' + on_failure: + - append: + field: error.message + value: 'Cannot parse url field `{{{ json.entity.name }}}`: {{{ _ingest.on_failure_message }}}' + + # At this point fileHashes may exist if "fileHashes" field is requested. + - append: + field: json.fileHashes + value: '{{{ json.entity.name }}}' + allow_duplicates: false + if: 'ctx.threatintel?.indicator?.type == "file"' + + - remove: + field: json.entity.name + if: 'ctx.threatintel?.indicator?.type == "file"' + + - script: + lang: painless + description: > + Map file hashes. + if: 'ctx.json.fileHashes != null' + params: + '4': crc32 + '32': md5 + '40': sha1 + '64': sha256 + '128': sha512 + source: > + def hashes = new HashMap(); + for (def hash : ctx.json.fileHashes) { + def algo = params[String.valueOf(hash.length())]; + if (algo != null) { + hashes[algo] = hash; + } + } + ctx["_hashes"] = hashes; + on_failure: + - append: + field: error.message + value: 'Failed to map fileHashes field: {{ _ingest.on_failure_message }}' + + - rename: + field: _hashes + target_field: threatintel.indicator.file.hash + ignore_missing: true + + # + # Map risk.score to event.risk_score. + # + - convert: + field: json.risk.score + target_field: event.risk_score + ignore_missing: true + type: float + on_failure: + - append: + field: error.message + value: 'Risk score `{{{ json.risk.score }}}` cannot be converted to float: {{ _ingest.on_failure_message }}' + # + # Remove fields converted to an ECS field. + # + - remove: + field: + - json.timestamps + - json.location + - json.fileHashes + - message + ignore_missing: true + + # + # Save fields without an ECS mapping under `threatintel.recordedfuture`. + # + - rename: + field: json + target_field: threatintel.recordedfuture + +on_failure: + - append: + field: error.message + value: '{{ _ingest.on_failure_message }}' diff --git a/x-pack/filebeat/module/zoom/webhook/ingest/meeting.yml b/x-pack/filebeat/module/zoom/webhook/ingest/meeting.yml index 9291add3593..7fcdb81b5b2 100644 --- a/x-pack/filebeat/module/zoom/webhook/ingest/meeting.yml +++ b/x-pack/filebeat/module/zoom/webhook/ingest/meeting.yml @@ -44,8 +44,21 @@ processors: field: zoom.registrant.join_url target_field: url.full ignore_missing: true +<<<<<<< HEAD if: ctx?.url?.full == null +======= + if: ctx?.original?.full == null +- uri_parts: + field: url.original + ignore_failure: true + if: ctx?.url?.original != null +- set: + field: url.full + value: '{{{url.original}}}' + ignore_failure: true + if: ctx?.url?.original != null +>>>>>>> a7b01105f ([Filebeat] Replace copy_from with templated value (#26631)) # # Set user.* from participant, if any. #