Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JSON Templates Tests, S3 Events Support, and Moar Unit Tests #188

Merged
merged 6 commits into from
Jun 20, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions stream_alert_cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
import os
import sys

from collections import defaultdict

from stream_alert_cli.logger import LOGGER_CLI


Expand Down
141 changes: 108 additions & 33 deletions stream_alert_cli/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,13 @@
See the License for the specific language governing permissions and
limitations under the License.
'''
import base64
import json
import os
import random
import subprocess
import zipfile
import zlib

from StringIO import StringIO

Expand All @@ -25,42 +28,113 @@
from stream_alert_cli.logger import LOGGER_CLI


class CLIHelpers(object):
"""Common helpers between StreamAlert CLI classes"""
@classmethod
def run_command(cls, runner_args, **kwargs):
"""Helper function to run commands with error handling.
DIR_TEMPLATES = 'test/integration/templates'

Args:
runner_args (list): Commands to run via subprocess
kwargs:
cwd (string): A path to execute commands from
error_message (string): Message to show if command fails
quiet (boolean): Whether to show command output or hide it

"""
default_error_message = "An error occurred while running: {}".format(
' '.join(runner_args)
)
error_message = kwargs.get('error_message', default_error_message)
def run_command(runner_args, **kwargs):
"""Helper function to run commands with error handling.

default_cwd = 'terraform'
cwd = kwargs.get('cwd', default_cwd)
Args:
runner_args (list): Commands to run via subprocess
kwargs:
cwd (string): A path to execute commands from
error_message (string): Message to show if command fails
quiet (boolean): Whether to show command output or hide it

"""
default_error_message = "An error occurred while running: {}".format(
' '.join(runner_args)
)
error_message = kwargs.get('error_message', default_error_message)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems like error_message is unused :hmm:

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

good catch

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like this little fella is still hangin around

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's being used now.. check below


stdout_option = None
if kwargs.get('quiet'):
stdout_option = open(os.devnull, 'w')
default_cwd = 'terraform'
cwd = kwargs.get('cwd', default_cwd)

try:
subprocess.check_call(runner_args, stdout=stdout_option, cwd=cwd)
except subprocess.CalledProcessError as e:
LOGGER_CLI.error('Return Code %s - %s', e.returncode, e.cmd)
return False
stdout_option = None
if kwargs.get('quiet'):
stdout_option = open(os.devnull, 'w')

try:
subprocess.check_call(runner_args, stdout=stdout_option, cwd=cwd)
except subprocess.CalledProcessError as err:
LOGGER_CLI.error('%s\n%s', error_message, err.cmd)
return False

return True
return True


def _create_lambda_function(function_name, region):
def format_lambda_test_record(test_record):
"""Create a properly formatted Kinesis, S3, or SNS record.

Supports a dictionary or string based data record. Reads in
event templates from the test/integration/templates folder.

Args:
test_record: Test record metadata dict with the following structure:
data - string or dict of the raw data
description - a string describing the test that is being performed
trigger - bool of if the record should produce an alert
source - which stream/s3 bucket originated the data
service - which aws service originated the data
compress (optional) - if the payload needs to be gzip compressed or not

Returns:
dict in the format of the specific service
"""
service = test_record['service']
source = test_record['source']
compress = test_record.get('compress')

data_type = type(test_record['data'])
if data_type == dict:
data = json.dumps(test_record['data'])
elif data_type in (unicode, str):
data = test_record['data']
else:
LOGGER_CLI.info('Invalid data type: %s', data_type)
return

# Get the template file for this particular service
template_path = os.path.join(DIR_TEMPLATES, '{}.json'.format(service))
with open(template_path, 'r') as service_template:
try:
template = json.load(service_template)
except ValueError as err:
LOGGER_CLI.error('Error loading %s.json: %s', service, err)
return

if service == 's3':
# Set the S3 object key to a random value for testing
test_record['key'] = ('{:032X}'.format(random.randrange(16**32)))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

import random is missing after migrating this function to helpers

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

interesting, when testing it locally, it must have never made it into this if body

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I actually just checked to see why this passed integration tests - none of the rules we have in the integration tests are configured with "service": "s3". There are also none from kinesis configured to use compress, so that's also why that's not failing.

We should either add more encompassing integration tests or we should (need to) add cli unit tests that would have caught this.

template['s3']['object']['key'] = test_record['key']
template['s3']['object']['size'] = len(data)
template['s3']['bucket']['arn'] = 'arn:aws:s3:::{}'.format(source)
template['s3']['bucket']['name'] = source

# Create the mocked s3 object in the designated bucket with the random key
put_mock_s3_object(source, test_record['key'], data, 'us-east-1')

elif service == 'kinesis':
if compress:
kinesis_data = base64.b64encode(zlib.compress(data))
else:
kinesis_data = base64.b64encode(data)

template['kinesis']['data'] = kinesis_data
template['eventSourceARN'] = 'arn:aws:kinesis:us-east-1:111222333:stream/{}'.format(
source)

elif service == 'sns':
template['Sns']['Message'] = data
template['EventSubscriptionArn'] = 'arn:aws:sns:us-east-1:111222333:{}'.format(
source)
else:
LOGGER_CLI.info('Invalid service %s', service)

return template


def create_lambda_function(function_name, region):
"""Helper function to create mock lambda function"""
boto3.client('lambda', region_name=region).create_function(
FunctionName=function_name,
Expand All @@ -76,7 +150,8 @@ def _create_lambda_function(function_name, region):
}
)

def _encrypt_with_kms(data, region, alias):

def encrypt_with_kms(data, region, alias):
kms_client = boto3.client('kms', region_name=region)
response = kms_client.encrypt(KeyId=alias,
Plaintext=data)
Expand All @@ -99,16 +174,16 @@ def handler(event, context):
return package_output.read()


def _put_mock_creds(output_name, creds, bucket, region, alias):
def put_mock_creds(output_name, creds, bucket, region, alias):
"""Helper function to mock encrypt creds and put on s3"""
creds_string = json.dumps(creds)

enc_creds = _encrypt_with_kms(creds_string, region, alias)
enc_creds = encrypt_with_kms(creds_string, region, alias)

_put_mock_s3_object(bucket, output_name, enc_creds, region)
put_mock_s3_object(bucket, output_name, enc_creds, region)


def _put_mock_s3_object(bucket, key, data, region):
def put_mock_s3_object(bucket, key, data, region):
"""Create a mock AWS S3 object for testing

Args:
Expand Down
23 changes: 17 additions & 6 deletions stream_alert_cli/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

OUTPUTS_CONFIG = 'outputs.json'


def load_outputs_config(conf_dir='conf'):
"""Load the outputs configuration file from disk
Expand All @@ -37,10 +38,13 @@ def load_outputs_config(conf_dir='conf'):
try:
values = json.load(outputs)
except ValueError:
LOGGER_CLI.exception('the %s file could not be loaded into json', OUTPUTS_CONFIG)
LOGGER_CLI.exception(
'the %s file could not be loaded into json',
OUTPUTS_CONFIG)

return values


def write_outputs_config(data, conf_dir='conf'):
"""Write the outputs configuration file back to disk
Expand All @@ -56,6 +60,7 @@ def write_outputs_config(data, conf_dir='conf'):
sort_keys=True
))


def load_config(props, service):
"""Gets the outputs config from disk and checks if the output already exists
Expand All @@ -72,6 +77,7 @@ def load_config(props, service):

return config


def encrypt_and_push_creds_to_s3(region, bucket, key, props):
"""Construct a dictionary of the credentials we want to encrypt and send to s3
Expand All @@ -93,6 +99,7 @@ def encrypt_and_push_creds_to_s3(region, bucket, key, props):
enc_creds = kms_encrypt(region, creds_json)
return send_creds_to_s3(region, bucket, key, enc_creds)


def kms_encrypt(region, data):
"""Encrypt data with AWS KMS.
Expand All @@ -111,6 +118,7 @@ def kms_encrypt(region, data):
except ClientError:
LOGGER_CLI.exception('an error occurred during credential encryption')


def send_creds_to_s3(region, bucket, key, blob_data):
"""Put the encrypted credential blob for this service and destination in s3
Expand All @@ -131,13 +139,15 @@ def send_creds_to_s3(region, bucket, key, blob_data):

return True
except ClientError as err:
LOGGER_CLI.error('An error occurred while sending credentials to S3 for key [%s]: '
'%s [%s]',
key,
err.response['Error']['Message'],
err.response['Error']['BucketName'])
LOGGER_CLI.error(
'An error occurred while sending credentials to S3 for key [%s]: '
'%s [%s]',
key,
err.response['Error']['Message'],
err.response['Error']['BucketName'])
return False


def check_output_exists(config, props, service):
"""Determine if this service and destination combo has already been created
Expand All @@ -156,6 +166,7 @@ def check_output_exists(config, props, service):

return True


def update_outputs_config(config, updated_config, service):
"""Updates and writes the outputs config back to disk
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/package.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def _resolve_third_party(self, temp_package_path):
third_party_libs = self.config['lambda'][self.config_key]['third_party_libraries']
if third_party_libs:
LOGGER_CLI.info(
'Installing third-party libraries: {}'.format(', '.join(third_party_libs)))
'Installing third-party libraries: %s', ', '.join(third_party_libs))
pip_command = ['install']
pip_command.extend(third_party_libs)
pip_command.extend(['--upgrade', '--target', temp_package_path])
Expand Down
14 changes: 8 additions & 6 deletions stream_alert_cli/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from stream_alert_cli.package import RuleProcessorPackage, AlertProcessorPackage
from stream_alert_cli.test import stream_alert_test
from stream_alert_cli.helpers import CLIHelpers
from stream_alert_cli import helpers
from stream_alert_cli.config import CLIConfig
from stream_alert_cli.logger import LOGGER_CLI
from stream_alert_cli.version import LambdaVersion
Expand Down Expand Up @@ -199,7 +199,7 @@ def terraform_handler(options):

def run_command(args=None, **kwargs):
"""Alias to CLI Helpers.run_command"""
return CLIHelpers.run_command(args, **kwargs)
return helpers.run_command(args, **kwargs)


def continue_prompt():
Expand Down Expand Up @@ -463,13 +463,15 @@ def configure_output(options):

# Encrypt the creds and push them to S3
# then update the local output configuration with properties
if config_outputs.encrypt_and_push_creds_to_s3(region, secrets_bucket, secrets_key, props):
if config_outputs.encrypt_and_push_creds_to_s3(
region, secrets_bucket, secrets_key, props):
updated_config = output.format_output_config(config, props)
config_outputs.update_outputs_config(config, updated_config, service)

LOGGER_CLI.info('Successfully saved \'%s\' output configuration for service \'%s\'',
props['descriptor'].value,
options.service)
LOGGER_CLI.info(
'Successfully saved \'%s\' output configuration for service \'%s\'',
props['descriptor'].value,
options.service)
else:
LOGGER_CLI.error('An error occurred while saving \'%s\' '
'output configuration for service \'%s\'',
Expand Down
Loading