Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CB Audit Schema, Fix Typos #631

Merged
merged 2 commits into from
Mar 14, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions conf/logs.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,14 @@
]
}
},
"carbonblack:audit": {
"schema": {
"cb_server": "string",
"message": "string",
"type": "string"
},
"parser": "json"
},
"carbonblack:alert.status.updated": {
"schema": {
"alert_resolution": "string",
Expand Down
2 changes: 1 addition & 1 deletion stream_alert/rule_processor/firehose.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def firehose_request_wrapper():
record_batch_size)
return

# Error handle if failures occured in PutRecordBatch after
# Error handle if failures occurred in PutRecordBatch after
# several backoff attempts
if resp.get('FailedPutCount') > 0:
failed_records = [failed
Expand Down
2 changes: 1 addition & 1 deletion stream_alert/rule_processor/payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def _get_object(self):
try:
return self._download_object(region, bucket, key)
except IOError:
LOGGER.exception('[S3Payload] The following error occured while downloading')
LOGGER.exception('[S3Payload] The following error occurred while downloading')
return

@staticmethod
Expand Down
4 changes: 2 additions & 2 deletions stream_alert/rule_processor/threat_intel.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def _process_ioc(self, ioc_collections):

query_result = []

query_error_msg = 'An error occured while quering dynamodb table. Error is: %s'
query_error_msg = 'An error occurred while quering dynamodb table. Error is: %s'
try:
result, unprocesed_keys = self._query(query_values)
query_result.extend(result)
Expand All @@ -292,7 +292,7 @@ def _process_ioc(self, ioc_collections):
if unprocesed_keys:
deserializer = self._deserialize(unprocesed_keys[self._table]['Keys'])
query_values = [elem[PRIMARY_KEY] for elem in deserializer]
query_error_msg = 'An error occured while processing unprocesed_keys. Error is: %s'
query_error_msg = 'An error occurred while processing unprocesed_keys. Error is: %s'
try:
result, _ = self._query(query_values)
query_result.extend(result)
Expand Down
4 changes: 2 additions & 2 deletions stream_alert_cli/athena/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def rebuild_partitions(athena_client, options, config):
partition_success, partitions = athena_client.run_athena_query(
query='SHOW PARTITIONS {}'.format(sanitized_table_name), database='streamalert')
if not partition_success:
LOGGER_CLI.error('An error occured when loading partitions for %s',
LOGGER_CLI.error('An error occurred when loading partitions for %s',
sanitized_table_name)
return

Expand All @@ -102,7 +102,7 @@ def rebuild_partitions(athena_client, options, config):
drop_success, _ = athena_client.run_athena_query(
query='DROP TABLE {}'.format(sanitized_table_name), database='streamalert')
if not drop_success:
LOGGER_CLI.error('An error occured when dropping the %s table', sanitized_table_name)
LOGGER_CLI.error('An error occurred when dropping the %s table', sanitized_table_name)
return

LOGGER_CLI.info('Dropped table %s', sanitized_table_name)
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/terraform/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def terraform_handler(options, config):
'aws_kms_alias.stream_alert_secrets'
]
if not tf_runner(targets=init_targets):
LOGGER_CLI.error('An error occured while running StreamAlert init')
LOGGER_CLI.error('An error occurred while running StreamAlert init')
sys.exit(1)

# generate the main.tf with remote state enabled
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -996,7 +996,7 @@ def run_tests(options, context):
alert_proc_tester.all_tests_passed):
sys.exit(1)

# If there are any log records in the memory buffer, then errors occured somewhere
# If there are any log records in the memory buffer, then errors occurred somewhere
if log_mem_hanlder.buffer:
# Release the MemoryHandler so we can do some other logging now
logging.getLogger().removeHandler(log_mem_hanlder)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def test_process_ioc_with_clienterror(self, log_mock, mock_client):
StreamIoc(value='1.1.1.2', ioc_type='ip')
]
threat_intel._process_ioc(ioc_collections)
log_mock.assert_called_with('An error occured while quering dynamodb table. Error is: %s',
log_mock.assert_called_with('An error occurred while quering dynamodb table. Error is: %s',
{'Error': {'Code': 400, 'Message': 'raising test exception'}})

@patch('boto3.client')
Expand Down