Skip to content

Commit

Permalink
[linting] pylint and autopep8 on cli + tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Jack Naglieri committed Jun 20, 2017
1 parent 7c7d1dc commit aba4870
Show file tree
Hide file tree
Showing 10 changed files with 101 additions and 70 deletions.
2 changes: 0 additions & 2 deletions stream_alert_cli/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
import os
import sys

from collections import defaultdict

from stream_alert_cli.logger import LOGGER_CLI


Expand Down
23 changes: 12 additions & 11 deletions stream_alert_cli/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@

DIR_TEMPLATES = 'test/integration/templates'


def run_command(runner_args, **kwargs):
"""Helper function to run commands with error handling.
Expand All @@ -56,8 +57,7 @@ def run_command(runner_args, **kwargs):
try:
subprocess.check_call(runner_args, stdout=stdout_option, cwd=cwd)
except subprocess.CalledProcessError as err:
LOGGER_CLI.error('%s\n%s', error_message)
LOGGER_CLI.error(err.cmd)
LOGGER_CLI.error('%s\n%s', error_message, err.cmd)
return False

return True
Expand Down Expand Up @@ -112,7 +112,7 @@ def format_lambda_test_record(test_record):
template['s3']['bucket']['name'] = source

# Create the mocked s3 object in the designated bucket with the random key
_put_mock_s3_object(source, test_record['key'], data, 'us-east-1')
put_mock_s3_object(source, test_record['key'], data, 'us-east-1')

elif service == 'kinesis':
if compress:
Expand All @@ -134,7 +134,7 @@ def format_lambda_test_record(test_record):
return template


def _create_lambda_function(function_name, region):
def create_lambda_function(function_name, region):
"""Helper function to create mock lambda function"""
boto3.client('lambda', region_name=region).create_function(
FunctionName=function_name,
Expand All @@ -146,19 +146,20 @@ def _create_lambda_function(function_name, region):
MemorySize=128,
Publish=True,
Code={
'ZipFile': _make_lambda_package()
'ZipFile': make_lambda_package()
}
)

def _encrypt_with_kms(data, region, alias):

def encrypt_with_kms(data, region, alias):
kms_client = boto3.client('kms', region_name=region)
response = kms_client.encrypt(KeyId=alias,
Plaintext=data)

return response['CiphertextBlob']


def _make_lambda_package():
def make_lambda_package():
"""Helper function to create mock lambda package"""
mock_lambda_function = """
def handler(event, context):
Expand All @@ -173,16 +174,16 @@ def handler(event, context):
return package_output.read()


def _put_mock_creds(output_name, creds, bucket, region, alias):
def put_mock_creds(output_name, creds, bucket, region, alias):
"""Helper function to mock encrypt creds and put on s3"""
creds_string = json.dumps(creds)

enc_creds = _encrypt_with_kms(creds_string, region, alias)
enc_creds = encrypt_with_kms(creds_string, region, alias)

_put_mock_s3_object(bucket, output_name, enc_creds, region)
put_mock_s3_object(bucket, output_name, enc_creds, region)


def _put_mock_s3_object(bucket, key, data, region):
def put_mock_s3_object(bucket, key, data, region):
"""Create a mock AWS S3 object for testing
Args:
Expand Down
23 changes: 17 additions & 6 deletions stream_alert_cli/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

OUTPUTS_CONFIG = 'outputs.json'


def load_outputs_config(conf_dir='conf'):
"""Load the outputs configuration file from disk
Expand All @@ -37,10 +38,13 @@ def load_outputs_config(conf_dir='conf'):
try:
values = json.load(outputs)
except ValueError:
LOGGER_CLI.exception('the %s file could not be loaded into json', OUTPUTS_CONFIG)
LOGGER_CLI.exception(
'the %s file could not be loaded into json',
OUTPUTS_CONFIG)

return values


def write_outputs_config(data, conf_dir='conf'):
"""Write the outputs configuration file back to disk
Expand All @@ -56,6 +60,7 @@ def write_outputs_config(data, conf_dir='conf'):
sort_keys=True
))


def load_config(props, service):
"""Gets the outputs config from disk and checks if the output already exists
Expand All @@ -72,6 +77,7 @@ def load_config(props, service):

return config


def encrypt_and_push_creds_to_s3(region, bucket, key, props):
"""Construct a dictionary of the credentials we want to encrypt and send to s3
Expand All @@ -93,6 +99,7 @@ def encrypt_and_push_creds_to_s3(region, bucket, key, props):
enc_creds = kms_encrypt(region, creds_json)
return send_creds_to_s3(region, bucket, key, enc_creds)


def kms_encrypt(region, data):
"""Encrypt data with AWS KMS.
Expand All @@ -111,6 +118,7 @@ def kms_encrypt(region, data):
except ClientError:
LOGGER_CLI.exception('an error occurred during credential encryption')


def send_creds_to_s3(region, bucket, key, blob_data):
"""Put the encrypted credential blob for this service and destination in s3
Expand All @@ -131,13 +139,15 @@ def send_creds_to_s3(region, bucket, key, blob_data):

return True
except ClientError as err:
LOGGER_CLI.error('An error occurred while sending credentials to S3 for key [%s]: '
'%s [%s]',
key,
err.response['Error']['Message'],
err.response['Error']['BucketName'])
LOGGER_CLI.error(
'An error occurred while sending credentials to S3 for key [%s]: '
'%s [%s]',
key,
err.response['Error']['Message'],
err.response['Error']['BucketName'])
return False


def check_output_exists(config, props, service):
"""Determine if this service and destination combo has already been created
Expand All @@ -156,6 +166,7 @@ def check_output_exists(config, props, service):

return True


def update_outputs_config(config, updated_config, service):
"""Updates and writes the outputs config back to disk
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/package.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def _resolve_third_party(self, temp_package_path):
third_party_libs = self.config['lambda'][self.config_key]['third_party_libraries']
if third_party_libs:
LOGGER_CLI.info(
'Installing third-party libraries: {}'.format(', '.join(third_party_libs)))
'Installing third-party libraries: %s', ', '.join(third_party_libs))
pip_command = ['install']
pip_command.extend(third_party_libs)
pip_command.extend(['--upgrade', '--target', temp_package_path])
Expand Down
10 changes: 6 additions & 4 deletions stream_alert_cli/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,13 +463,15 @@ def configure_output(options):

# Encrypt the creds and push them to S3
# then update the local output configuration with properties
if config_outputs.encrypt_and_push_creds_to_s3(region, secrets_bucket, secrets_key, props):
if config_outputs.encrypt_and_push_creds_to_s3(
region, secrets_bucket, secrets_key, props):
updated_config = output.format_output_config(config, props)
config_outputs.update_outputs_config(config, updated_config, service)

LOGGER_CLI.info('Successfully saved \'%s\' output configuration for service \'%s\'',
props['descriptor'].value,
options.service)
LOGGER_CLI.info(
'Successfully saved \'%s\' output configuration for service \'%s\'',
props['descriptor'].value,
options.service)
else:
LOGGER_CLI.error('An error occurred while saving \'%s\' '
'output configuration for service \'%s\'',
Expand Down
39 changes: 31 additions & 8 deletions stream_alert_cli/terraform_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,19 @@ def infinitedict():


def generate_s3_bucket(**kwargs):
"""Generate an S3 Bucket dict
Keyword Args:
bucket [string]: The name of the bucket
acl [string]: The S3 bucket ACL
logging_bucket [string]: The S3 bucket to send access logs to
force_destroy [bool]: To enable or disable force destroy of the bucket
versioning [bool]: To enable or disable S3 object versioning
lifecycle_rule [dict]: The S3 bucket lifecycle rule
Returns:
[dict] S3 bucket Terraform dict to be used in clusters/main.tf
"""
bucket_name = kwargs.get('bucket')
acl = kwargs.get('acl', 'private')
logging_bucket = kwargs.get('logging')
Expand All @@ -41,14 +54,16 @@ def generate_s3_bucket(**kwargs):
'target_prefix': '{}/'.format(bucket_name)
}
force_destroy = kwargs.get('force_destroy', False)
versioning = kwargs.get('versioning', {'enabled': True})
versioning = kwargs.get('versioning', True)
lifecycle_rule = kwargs.get('lifecycle_rule')

bucket = {
'bucket': bucket_name,
'acl': acl,
'force_destroy': force_destroy,
'versioning': versioning,
'versioning': {
'enabled': versioning
},
'logging': logging
}

Expand All @@ -59,6 +74,15 @@ def generate_s3_bucket(**kwargs):


def generate_main(**kwargs):
"""Generate the main.tf Terraform dict
Keyword Args:
init [string]: If Terraform is running in the init phase or not
config [CLIConfig]: The loaded CLI config
Returns:
[dict] main.tf Terraform dict
"""
init = kwargs.get('init')
config = kwargs.get('config')

Expand Down Expand Up @@ -304,7 +328,8 @@ def generate_kinesis_events(cluster_name, cluster_dict, config):
a given cluster.
config [dict]: The loaded config from the 'conf/' directory
"""
kinesis_events_enabled = bool(config['clusters'][cluster_name]['modules']['kinesis_events']['enabled'])
kinesis_events_enabled = bool(
config['clusters'][cluster_name]['modules']['kinesis_events']['enabled'])
# Kinesis events module
cluster_dict['module']['kinesis_events_{}'.format(cluster_name)] = {
'source': 'modules/tf_stream_alert_kinesis_events',
Expand Down Expand Up @@ -381,7 +406,9 @@ def generate_s3_events(cluster_name, cluster_dict, config):
's3_bucket_id': s3_bucket_id,
's3_bucket_arn': 'arn:aws:s3:::{}'.format(s3_bucket_id)}
else:
LOGGER_CLI.error('Config Error: Missing S3 bucket in %s s3_events module', cluster_name)
LOGGER_CLI.error(
'Config Error: Missing S3 bucket in %s s3_events module',
cluster_name)
sys.exit(1)


Expand All @@ -396,10 +423,6 @@ def generate_cluster(**kwargs):
cluster_name = kwargs.get('cluster_name')

account = config['global']['account']
prefix = account['prefix']
logging_bucket = '{}.streamalert.s3-logging'.format(
config['global']['account']['prefix'])
account_id = account['aws_account_id']

modules = config['clusters'][cluster_name]['modules']
cluster_dict = infinitedict()
Expand Down
26 changes: 9 additions & 17 deletions stream_alert_cli/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@
import json
import logging
import os
import random
import re
import sys
import time
import zlib

from mock import Mock, patch

Expand All @@ -29,12 +28,6 @@

from stream_alert.alert_processor import main as StreamOutput
from stream_alert.rule_processor.handler import StreamAlert
from stream_alert_cli.helpers import (
_create_lambda_function,
_put_mock_creds,
_put_mock_s3_object
)

from stream_alert_cli import helpers
from stream_alert_cli.logger import LOGGER_CLI, LOGGER_SA
from stream_alert_cli.outputs import load_outputs_config
Expand Down Expand Up @@ -197,7 +190,6 @@ def check_keys(self, rule_name, test_record):
record_keys.difference_update(key_diff)
self.rules_fail_pass_warn[2].append((rule_name, message))


return record_keys.issubset(required_keys | optional_keys)

@staticmethod
Expand Down Expand Up @@ -388,21 +380,21 @@ def setup_outputs(self, outputs, url_mock):
boto3.client('s3', region_name='us-east-1').create_bucket(Bucket=bucket)
elif service == 'aws-lambda':
function = self.outputs_config[service][descriptor]
_create_lambda_function(function, 'us-east-1')
helpers.create_lambda_function(function, 'us-east-1')
elif service == 'pagerduty':
output_name = ('/').join([service, descriptor])
creds = {'service_key': '247b97499078a015cc6c586bc0a92de6'}
_put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)
helpers.put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)

# Set the patched urlopen.getcode return value to 200
url_mock.return_value.getcode.return_value = 200
elif service == 'phantom':
output_name = ('/').join([service, descriptor])
creds = {'ph_auth_token': '6c586bc047b9749a92de29078a015cc6',
'url': 'phantom.foo.bar'}
_put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)
helpers.put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)

# Set the patched urlopen.getcode return value to 200
url_mock.return_value.getcode.return_value = 200
Expand All @@ -411,8 +403,8 @@ def setup_outputs(self, outputs, url_mock):
elif service == 'slack':
output_name = ('/').join([service, descriptor])
creds = {'url': 'https://api.slack.com/web-hook-key'}
_put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)
helpers.put_mock_creds(output_name, creds, self.secrets_bucket,
'us-east-1', self.kms_alias)

# Set the patched urlopen.getcode return value to 200
url_mock.return_value.getcode.return_value = 200
Expand Down Expand Up @@ -494,4 +486,4 @@ def stream_alert_test(options):
AlertProcessorTester.report_output_summary()

if not (rp_status and ap_status):
os._exit(1)
sys.exit(1)
Loading

0 comments on commit aba4870

Please sign in to comment.