Skip to content

Commit

Permalink
Merge pull request #624 from airbnb/jacknaglieri-req-subkeys-fix
Browse files Browse the repository at this point in the history
Required Sub-key Bug Fix, Community Rules Update, and More
  • Loading branch information
jacknagz authored Mar 8, 2018
2 parents 3c70334 + 793d3c6 commit e8151bf
Show file tree
Hide file tree
Showing 29 changed files with 147 additions and 165 deletions.
5 changes: 4 additions & 1 deletion conf/outputs.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@
"aws-lambda": {
"sample-lambda": "function-name:qualifier"
},
"aws-firehose": {
"alerts": "<prefix>_streamalert_alert_delivery"
},
"aws-s3": {
"sample-bucket": "sample.bucket.name"
"bucket": "aws-s3-bucket"
},
"komand": [
"sample-integration"
Expand Down
18 changes: 8 additions & 10 deletions docs/source/rules.rst
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,11 @@ An alert can be sent to multiple destinations.
req_subkeys
~~~~~~~~~~~

``req_subkeys`` is an optional argument which defines required sub-keys that must exist in the incoming record in order for it to be evaluated.
``req_subkeys`` is an optional argument which defines sub-keys that must exist in the incoming record in order for it to be evaluated.

This feature should be avoided, but it is useful if you defined a loose schema to trade flexibility for safety; see `Schemas <conf-schemas.html#json-example-osquery>`_.
Each defined sub-key must have a non zero value as well in order for the rule to evaluate the log.

This feature should be used if you have logs with a loose schema defined in order to avoid ``KeyError`` in rules.

Examples:

Expand All @@ -143,15 +145,11 @@ Examples:
@rule(logs=['osquery:differential'],
outputs=['pagerduty', 'aws-s3'],
req_subkeys={'columns':['address', 'hostnames']})
...
# The 'columns' key must contain
# sub-keys of 'port' and 'protocol'
def osquery_host_check(rec):
# If all logs did not have the 'address' sub-key, this rule would
# throw a KeyError. Using req_subkeys avoids this.
return rec['columns']['address'] == '127.0.0.1'
@rule(logs=['osquery:differential'],
outputs=['pagerduty', 'aws-s3'],
req_subkeys={'columns':['port', 'protocol']})
...
context
~~~~~~~~~~~
Expand Down
15 changes: 15 additions & 0 deletions helpers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"""
from fnmatch import fnmatch
import logging
import json
import time

from netaddr import IPAddress, IPNetwork
Expand Down Expand Up @@ -217,3 +218,17 @@ def data_has_value_from_substring_list(data, needle_list):
return False

return any(needle in data for needle in needle_list)

def safe_json_loads(data):
"""Safely load a JSON string into a dictionary
Args:
data (str): A JSON string
Returns:
dict: The loaded JSON string or empty dict
"""
try:
return json.loads(data)
except ValueError:
return {}
8 changes: 3 additions & 5 deletions rules/community/cloudtrail/cloudtrail_critical_api_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@
disable = StreamRules.disable()


@rule(logs=['cloudtrail:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])
@rule(
logs=['cloudtrail:events'],
outputs=['aws-firehose:alerts'])
def cloudtrail_critical_api_calls(rec):
"""
author: airbnb_csirt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@
disable = StreamRules.disable()


@rule(logs=['cloudtrail:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])
@rule(
logs=['cloudtrail:events'],
outputs=['aws-firehose:alerts'])
def cloudtrail_mfa_policy_abuse_attempt(rec):
"""
author: Scott Piper of Summit Route in collaboration with Duo Security
Expand Down Expand Up @@ -70,8 +68,8 @@ def cloudtrail_mfa_policy_abuse_attempt(rec):
# - 'AccessDenied'
# - 'EntityAlreadyExists': Can't create another MFA device with the same name.
# - 'LimitExceeded': Can't enable a second MFA device for the same user.
if ('errorCode' in rec and
in_set(rec['eventName'], {'CreateVirtualMFADevice', 'EnableMFADevice'})):
if ('errorCode' in rec
and in_set(rec['eventName'], {'CreateVirtualMFADevice', 'EnableMFADevice'})):
return True

return False
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@

rule = StreamRules.rule

@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['eventName', 'requestParameters']})

@rule(
logs=['cloudwatch:events'],
outputs=['aws-firehose:alerts'],
req_subkeys={
'detail': ['eventName', 'requestParameters']
})
def cloudtrail_network_acl_ingress_anywhere(rec):
"""
author: @mimeframe
Expand All @@ -22,8 +23,6 @@ def cloudtrail_network_acl_ingress_anywhere(rec):

req_params = rec['detail']['requestParameters']

return (
req_params['cidrBlock'] == '0.0.0.0/0' and
req_params['ruleAction'] == 'allow' and
req_params['egress'] is False
)
return (req_params['cidrBlock'] == '0.0.0.0/0'
and req_params['ruleAction'] == 'allow'
and req_params['egress'] is False)
12 changes: 6 additions & 6 deletions rules/community/cloudtrail/cloudtrail_put_bucket_acl.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
rule = StreamRules.rule


@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['requestParameters', 'eventName']})
@rule(
logs=['cloudwatch:events'],
outputs=['aws-firehose:alerts'],
req_subkeys={
'detail': ['requestParameters', 'eventName']
})
def cloudtrail_put_bucket_acl(rec):
"""
author: airbnb_csirt
Expand Down
21 changes: 10 additions & 11 deletions rules/community/cloudtrail/cloudtrail_put_object_acl_public.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@

rule = StreamRules.rule

@rule(logs=['cloudwatch:events'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['requestParameters']})

@rule(
logs=['cloudwatch:events'],
outputs=['aws-firehose:alerts'],
req_subkeys={
'detail': ['requestParameters']
})
def cloudtrail_put_object_acl_public(rec):
"""
author: @mimeframe
Expand All @@ -25,16 +27,13 @@ def cloudtrail_put_object_acl_public(rec):
}

# s3 buckets that are expected to have public objects
public_buckets = {
'example-bucket-to-ignore'
}
public_buckets = {'example-bucket-to-ignore'}

request_params = rec['detail']['requestParameters']
return (
rec['detail']['eventName'] == 'PutObjectAcl' and
# note: substring is used because it can exist as:
# "http://acs.amazonaws.com/groups/global/AllUsers" or
# "uri=http://acs.amazonaws.com/groups/global/AllUsers"
data_has_value_from_substring_list(request_params, public_acls) and
not in_set(request_params.get('bucketName'), public_buckets)
)
data_has_value_from_substring_list(request_params, public_acls)
and not in_set(request_params.get('bucketName'), public_buckets))
20 changes: 9 additions & 11 deletions rules/community/cloudtrail/cloudtrail_root_account_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
rule = StreamRules.rule


@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['userIdentity', 'eventType']})
@rule(
logs=['cloudwatch:events'],
outputs=['aws-firehose:alerts'],
req_subkeys={
'detail': ['userIdentity', 'eventType']
})
def cloudtrail_root_account_usage(rec):
"""
author: airbnb_csirt
Expand All @@ -22,8 +22,6 @@ def cloudtrail_root_account_usage(rec):
(b) ping the individual to determine if intentional and/or legitimate
"""
# reference_1 contains details on logic below
return (
rec['detail']['userIdentity']['type'] == 'Root' and
rec['detail']['userIdentity'].get('invokedBy') is None and
rec['detail']['eventType'] != 'AwsServiceEvent'
)
return (rec['detail']['userIdentity']['type'] == 'Root'
and rec['detail']['userIdentity'].get('invokedBy') is None
and rec['detail']['eventType'] != 'AwsServiceEvent')
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@

rule = StreamRules.rule

@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['eventName', 'requestParameters']})

@rule(
logs=['cloudwatch:events'],
outputs=['aws-firehose:alerts'],
req_subkeys={
'detail': ['eventName', 'requestParameters']
})
def cloudtrail_security_group_ingress_anywhere(rec):
"""
author: @mimeframe, @ryandeivert
Expand Down
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
"""Alert when a DUO bypass code is artisanly crafted and not auto-generated."""
import json
from helpers.base import safe_json_loads
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule

@rule(logs=['duo:administrator'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['duo:administrator'], outputs=['aws-firehose:alerts'])
def duo_bypass_code_create_non_auto_generated(rec):
"""
author: @mimeframe
description: Alert when a DUO bypass code is artisanly crafted and not auto-generated.
reference: https://duo.com/docs/administration-users#generating-a-bypass-code
"""
return (
rec['action'] == 'bypass_create' and
json.loads(rec['description']).get('auto_generated') is False
)
return (rec['action'] == 'bypass_create'
and safe_json_loads(rec['description']).get('auto_generated') is False)
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
"""Alert when a DUO bypass code is created that is non-expiring."""
import json
from helpers.base import safe_json_loads
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule

@rule(logs=['duo:administrator'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['duo:administrator'], outputs=['aws-firehose:alerts'])
def duo_bypass_code_create_non_expiring(rec):
"""
author: @mimeframe
description: Alert when a DUO bypass code is created that is non-expiring.
reference: https://duo.com/docs/administration-users#generating-a-bypass-code
"""
return (
rec['action'] == 'bypass_create' and
json.loads(rec['description']).get('valid_secs') is None
)
return (rec['action'] == 'bypass_create'
and safe_json_loads(rec['description']).get('valid_secs') is None)
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
"""Alert when a DUO bypass code is created that has unlimited use."""
import json
from helpers.base import safe_json_loads
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule

@rule(logs=['duo:administrator'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['duo:administrator'], outputs=['aws-firehose:alerts'])
def duo_bypass_code_create_unlimited_use(rec):
"""
author: @mimeframe
description: Alert when a DUO bypass code is created that has unlimited use.
reference: https://duo.com/docs/administration-users#generating-a-bypass-code
"""
return (
rec['action'] == 'bypass_create' and
json.loads(rec['description']).get('remaining_uses') is None
)
return (rec['action'] == 'bypass_create'
and safe_json_loads(rec['description']).get('remaining_uses') is None)
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@

rule = StreamRules.rule

@rule(logs=['duo:authentication'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['duo:authentication'], outputs=['aws-firehose:alerts'])
def duo_anonymous_ip_failure(rec):
"""
author: airbnb_csirt
Expand Down
6 changes: 2 additions & 4 deletions rules/community/duo_authentication/duo_fraud.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@

rule = StreamRules.rule

@rule(logs=['duo:authentication'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['duo:authentication'], outputs=['aws-firehose:alerts'])
def duo_fraud(rec):
"""
author: airbnb_csirt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@

rule = StreamRules.rule

@rule(logs=['ghe:general'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['ghe:general'], outputs=['aws-firehose:alerts'])
def github_disable_dismiss_stale_pull_request_approvals(rec):
"""
author: @mimeframe
Expand Down
6 changes: 2 additions & 4 deletions rules/community/github/github_disable_protect_this_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,8 @@

rule = StreamRules.rule

@rule(logs=['ghe:general'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])

@rule(logs=['ghe:general'], outputs=['aws-firehose:alerts'])
def github_disable_protect_this_branch(rec):
"""
author: @mimeframe
Expand Down
Loading

0 comments on commit e8151bf

Please sign in to comment.