Skip to content

Commit

Permalink
A collection of small fixes (#623)
Browse files Browse the repository at this point in the history
  • Loading branch information
austinbyers authored and Austin Byers committed Mar 12, 2018
1 parent fd1199f commit 0746fd9
Show file tree
Hide file tree
Showing 17 changed files with 100 additions and 101 deletions.
4 changes: 1 addition & 3 deletions conf/lambda.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
"source_bucket": "PREFIX_GOES_HERE.streamalert.source",
"source_current_hash": "<auto_generated>",
"source_object_key": "<auto_generated>",
"third_party_libraries": [],
"timeout": 60,
"vpc_config": {
"security_group_ids": [],
Expand All @@ -37,7 +36,6 @@
"handler": "app_integrations.main.handler",
"source_bucket": "PREFIX_GOES_HERE.streamalert.source",
"source_current_hash": "<auto_generated>",
"source_object_key": "<auto_generated>",
"third_party_libraries": []
"source_object_key": "<auto_generated>"
}
}
7 changes: 7 additions & 0 deletions conf/sources.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,13 @@
]
}
},
"sns": {
"prefix_cluster_sample_topic": {
"logs": [
"binaryalert"
]
}
},
"stream_alert_app": {
"prefix_cluster_box_admin_events_sm-app-name_app": {
"logs": [
Expand Down
Empty file.
15 changes: 15 additions & 0 deletions rules/community/binaryalert/binaryalert_yara_match.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
"""Alert on destructive AWS API calls."""
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule


@rule(logs=['binaryalert'],
outputs=['aws-sns:sample-topic'])
def binaryalert_yara_match(rec):
"""
author: Austin Byers (Airbnb CSIRT)
description: BinaryAlert found a binary matching a YARA rule
reference: https://binaryalert.io
"""
return rec['NumMatchedRules'] > 0
1 change: 1 addition & 0 deletions stream_alert/alert_processor/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import # Suppresses RuntimeWarning import error in Lambda
from collections import OrderedDict
import json

Expand Down
1 change: 1 addition & 0 deletions stream_alert/athena_partition_refresh/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import # Suppresses RuntimeWarning import error in Lambda
from collections import defaultdict
from datetime import datetime
import json
Expand Down
1 change: 1 addition & 0 deletions stream_alert/rule_processor/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import # Suppresses RuntimeWarning import error in Lambda
import importlib
import os

Expand Down
1 change: 1 addition & 0 deletions stream_alert/threat_intel_downloader/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import # Suppresses RuntimeWarning import error in Lambda
import json
import os

Expand Down
12 changes: 8 additions & 4 deletions stream_alert_cli/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,13 @@
from botocore.exceptions import ClientError
from moto import (
mock_cloudwatch,
mock_kms,
mock_dynamodb2,
mock_kinesis,
mock_kms,
mock_lambda,
mock_s3,
mock_dynamodb2,
mock_sns,
mock_sqs
)

from stream_alert_cli.logger import LOGGER_CLI
Expand Down Expand Up @@ -522,10 +524,12 @@ def wrap(func):
"""Wrap the returned function with or without mocks"""
if context.mocked:
@mock_cloudwatch
@mock_kinesis
@mock_kms
@mock_lambda
@mock_s3
@mock_kms
@mock_kinesis
@mock_sns
@mock_sqs
def mocked(options, context):
"""This function is now mocked using moto mock decorators to
override any boto3 calls. Wrapping this function here allows
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/manage_lambda/package.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def _resolve_third_party(self, temp_package_path):

# Add any custom libs needed by rules, etc
third_party_libs.update(
set(self.config['lambda'][self.config_key]['third_party_libraries']))
set(self.config['lambda'][self.config_key].get('third_party_libraries', [])))

# Return a default of True here if no libraries to install
if not third_party_libs:
Expand Down
1 change: 0 additions & 1 deletion stream_alert_cli/terraform/threat_intel_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ def generate_threat_intel_downloader(config):
['expiration_ts', 'itype', 'source', 'type', 'value']),
'ioc_filters': ti_downloader_config.get('ioc_filters', ['crowdstrike', '@airbnb.com']),
'ioc_types': ti_downloader_config.get('ioc_types', ['domain', 'ip', 'md5']),
'autoscale': ti_downloader_config.get('autoscale', False),
'max_read_capacity': ti_downloader_config.get('max_read_capacity', '5'),
'min_read_capacity': ti_downloader_config.get('min_read_capacity', '5'),
'target_utilization': ti_downloader_config.get('target_utilization', '70')
Expand Down
9 changes: 9 additions & 0 deletions stream_alert_cli/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,6 +772,15 @@ def setup_outputs(self, alert):
lambda_function = parts[-1]
helpers.create_lambda_function(lambda_function,
self.region)

elif service == 'aws-sns':
topic_name = self.outputs_config[service][descriptor]
boto3.client('sns', region_name=self.region).create_topic(Name=topic_name)

elif service == 'aws-sqs':
queue_name = self.outputs_config[service][descriptor]
boto3.client('sqs', region_name=self.region).create_queue(QueueName=queue_name)

elif service == 'komand':
output_name = '{}/{}'.format(service, descriptor)
creds = {'komand_auth_token': '00000000-0000-0000-0000-000000000000',
Expand Down
3 changes: 0 additions & 3 deletions terraform/modules/tf_stream_alert_globals/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,16 @@ resource "aws_dynamodb_table" "alerts_table" {
name = "RuleName"
type = "S"
}

attribute {
name = "Timestamp"
type = "S"
}

// Enable expriation time while testing Dynamo table for alerts
// TODO: Remove TTL once Alert Merger is implemented
ttl {
attribute_name = "TTL"
enabled = true
}

tags {
Name = "StreamAlert"
}
Expand Down
83 changes: 2 additions & 81 deletions terraform/modules/tf_threat_intel_downloader/dynamodb.tf
Original file line number Diff line number Diff line change
Expand Up @@ -26,95 +26,16 @@ resource "aws_dynamodb_table" "threat_intel_ioc" {
}
}

// IAM Role: Application autoscalling role
resource "aws_iam_role" "stream_alert_dynamodb_appautoscaling" {
count = "${var.autoscale ? 1 : 0}"
name = "${var.prefix}_streamalert_dynamodb_appautoscaling"
assume_role_policy = "${data.aws_iam_policy_document.appautoscaling_assume_role_policy.json}"
}

// IAM Policy Doc: Generic Application Autoscaling AssumeRole
data "aws_iam_policy_document" "appautoscaling_assume_role_policy" {
count = "${var.autoscale ? 1 : 0}"

statement {
effect = "Allow"
actions = ["sts:AssumeRole"]

principals {
type = "Service"
identifiers = ["application-autoscaling.amazonaws.com"]
}
}
}

// IAM Role Policy: Allow appautoscaling IAM role to autoscaling DynamoDB table
resource "aws_iam_role_policy" "appautoscaling_update_table" {
count = "${var.autoscale ? 1 : 0}"
name = "DynamoDBAppAutoscaleUpdateTablePolicy"
role = "${aws_iam_role.stream_alert_dynamodb_appautoscaling.id}"
policy = "${data.aws_iam_policy_document.appautoscaling_update_table.json}"
}

// IAM Policy Doc: Allow autoscaling IAM role to send alarm to CloudWatch
// and change table settings for autoscaling.
// This policy is allow the role to change table settings
data "aws_iam_policy_document" "appautoscaling_update_table" {
count = "${var.autoscale ? 1 : 0}"

statement {
effect = "Allow"

actions = [
"dynamodb:DescribeTable",
"dynamodb:UpdateTable",
]

resources = [
"${aws_dynamodb_table.threat_intel_ioc.arn}",
]
}
}

// IAM Role Policy: Allow appautoscaling IAM role to autoscaling DynamoDB table
resource "aws_iam_role_policy" "appautoscaling_cloudwatch_alarms" {
count = "${var.autoscale ? 1 : 0}"
name = "DynamoDBAppAutoscaleCloudWatchAlarmsPolicy"
role = "${aws_iam_role.stream_alert_dynamodb_appautoscaling.id}"
policy = "${data.aws_iam_policy_document.appautoscaling_cloudwatch_alarms.json}"
}

// IAM Policy Doc: This policy is allow the role to send alarm to CloudWatch.
data "aws_iam_policy_document" "appautoscaling_cloudwatch_alarms" {
count = "${var.autoscale ? 1 : 0}"

statement {
effect = "Allow"

actions = [
"cloudwatch:PutMetricAlarm",
"cloudwatch:DescribeAlarms",
"cloudwatch:GetMetricStatistics",
"cloudwatch:SetAlarmState",
"cloudwatch:DeleteAlarms",
]

resources = ["*"]
}
}

resource "aws_appautoscaling_target" "dynamodb_table_read_target" {
count = "${var.autoscale ? 1 : 0}"
max_capacity = "${var.max_read_capacity}"
min_capacity = "${var.min_read_capacity}"
resource_id = "table/${var.prefix}_streamalert_threat_intel_downloader"
role_arn = "${aws_iam_role.stream_alert_dynamodb_appautoscaling.arn}"
resource_id = "table/${aws_dynamodb_table.threat_intel_ioc.name}"
role_arn = "arn:aws:iam::${var.account_id}:role/aws-service-role/dynamodb.application-autoscaling.amazonaws.com/AWSServiceRoleForApplicationAutoScaling_DynamoDBTable"
scalable_dimension = "dynamodb:table:ReadCapacityUnits"
service_namespace = "dynamodb"
}

resource "aws_appautoscaling_policy" "dynamodb_table_read_policy" {
count = "${var.autoscale ? 1 : 0}"
name = "DynamoDBReadCapacityUtilization:${aws_appautoscaling_target.dynamodb_table_read_target.resource_id}"
policy_type = "TargetTrackingScaling"
resource_id = "${aws_appautoscaling_target.dynamodb_table_read_target.resource_id}"
Expand Down
4 changes: 0 additions & 4 deletions terraform/modules/tf_threat_intel_downloader/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,6 @@ variable "log_retention" {
default = 14
}

variable "autoscale" {
default = false
}

variable "max_read_capacity" {
default = 5
}
Expand Down
50 changes: 50 additions & 0 deletions tests/integration/rules/binaryalert/binaryalert_yara_match.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
{
"records": [
{
"data": {
"FileInfo": {
"MD5": "...",
"S3LastModified": "...",
"S3Location": "...",
"S3Metadata": {},
"SHA256": "..."
},
"MatchedRules": {
"Rule1": {
"MatchedStrings": [
"$eicar_regex"
],
"Meta": {
"author": "Austin Byers (Airbnb CSIRT)",
"description": "This is a standard AV test, intended to check whether BinaryAlert is working correctly.",
"reference": "http://www.eicar.org/86-0-Intended-use.html"
},
"RuleFile": "eicar.yar",
"RuleName": "eicar_av_test",
"RuleTags": []
}
},
"NumMatchedRules": "1"
},
"description": "All YARA matches from BinaryAlert trigger an alert",
"log": "binaryalert",
"service": "sns",
"source": "prefix_cluster_sample_topic",
"trigger_rules": [
"binaryalert_yara_match"
]
},
{
"data": {
"FileInfo": {},
"MatchedRules": {},
"NumMatchedRules": "0"
},
"description": "No alerts triggered if no YARA rules were matched",
"log": "binaryalert",
"service": "sns",
"source": "prefix_cluster_sample_topic",
"trigger_rules": []
}
]
}
7 changes: 3 additions & 4 deletions tests/unit/stream_alert_athena_partition_refresh/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,14 +175,13 @@ def test_handler_no_received_messages(
@patch('stream_alert.athena_partition_refresh.main.LOGGER')
@patch('stream_alert.athena_partition_refresh.main._load_config',
return_value=CONFIG_DATA)
@patch('stream_alert.athena_partition_refresh.main.'
'StreamAlertSQSClient.unique_s3_buckets_and_keys',
return_value={})
@patch('stream_alert.athena_partition_refresh.main.StreamAlertSQSClient')
@mock_sqs
def test_handler_no_unique_buckets(self, _, mock_config, mock_logging):
def test_handler_no_unique_buckets(self, mock_sqs_client, mock_config, mock_logging):
"""Athena - Handler - No Unique Buckets"""
test_sqs_client = TestStreamAlertSQSClient()
test_sqs_client.setup()
mock_sqs_client.return_value.unique_s3_buckets_and_keys = lambda: {}

handler(None, None)

Expand Down

0 comments on commit 0746fd9

Please sign in to comment.