diff --git a/conf/logs.json b/conf/logs.json index a8f88e756..d56527926 100644 --- a/conf/logs.json +++ b/conf/logs.json @@ -27,6 +27,14 @@ ] } }, + "carbonblack:audit": { + "schema": { + "cb_server": "string", + "message": "string", + "type": "string" + }, + "parser": "json" + }, "carbonblack:alert.status.updated": { "schema": { "alert_resolution": "string", diff --git a/stream_alert/rule_processor/firehose.py b/stream_alert/rule_processor/firehose.py index a8d5402d1..f628cbeec 100644 --- a/stream_alert/rule_processor/firehose.py +++ b/stream_alert/rule_processor/firehose.py @@ -189,7 +189,7 @@ def firehose_request_wrapper(): record_batch_size) return - # Error handle if failures occured in PutRecordBatch after + # Error handle if failures occurred in PutRecordBatch after # several backoff attempts if resp.get('FailedPutCount') > 0: failed_records = [failed diff --git a/stream_alert/rule_processor/payload.py b/stream_alert/rule_processor/payload.py index 8fbbbf0b9..e127b837a 100644 --- a/stream_alert/rule_processor/payload.py +++ b/stream_alert/rule_processor/payload.py @@ -254,7 +254,7 @@ def _get_object(self): try: return self._download_object(region, bucket, key) except IOError: - LOGGER.exception('[S3Payload] The following error occured while downloading') + LOGGER.exception('[S3Payload] The following error occurred while downloading') return @staticmethod diff --git a/stream_alert/rule_processor/threat_intel.py b/stream_alert/rule_processor/threat_intel.py index 40064308f..db83927c9 100644 --- a/stream_alert/rule_processor/threat_intel.py +++ b/stream_alert/rule_processor/threat_intel.py @@ -276,7 +276,7 @@ def _process_ioc(self, ioc_collections): query_result = [] - query_error_msg = 'An error occured while quering dynamodb table. Error is: %s' + query_error_msg = 'An error occurred while quering dynamodb table. Error is: %s' try: result, unprocesed_keys = self._query(query_values) query_result.extend(result) @@ -292,7 +292,7 @@ def _process_ioc(self, ioc_collections): if unprocesed_keys: deserializer = self._deserialize(unprocesed_keys[self._table]['Keys']) query_values = [elem[PRIMARY_KEY] for elem in deserializer] - query_error_msg = 'An error occured while processing unprocesed_keys. Error is: %s' + query_error_msg = 'An error occurred while processing unprocesed_keys. Error is: %s' try: result, _ = self._query(query_values) query_result.extend(result) diff --git a/stream_alert_cli/athena/handler.py b/stream_alert_cli/athena/handler.py index 7e95a852a..c913ea84b 100644 --- a/stream_alert_cli/athena/handler.py +++ b/stream_alert_cli/athena/handler.py @@ -91,7 +91,7 @@ def rebuild_partitions(athena_client, options, config): partition_success, partitions = athena_client.run_athena_query( query='SHOW PARTITIONS {}'.format(sanitized_table_name), database='streamalert') if not partition_success: - LOGGER_CLI.error('An error occured when loading partitions for %s', + LOGGER_CLI.error('An error occurred when loading partitions for %s', sanitized_table_name) return @@ -102,7 +102,7 @@ def rebuild_partitions(athena_client, options, config): drop_success, _ = athena_client.run_athena_query( query='DROP TABLE {}'.format(sanitized_table_name), database='streamalert') if not drop_success: - LOGGER_CLI.error('An error occured when dropping the %s table', sanitized_table_name) + LOGGER_CLI.error('An error occurred when dropping the %s table', sanitized_table_name) return LOGGER_CLI.info('Dropped table %s', sanitized_table_name) diff --git a/stream_alert_cli/terraform/handler.py b/stream_alert_cli/terraform/handler.py index 47eead2c0..e740ee5b8 100644 --- a/stream_alert_cli/terraform/handler.py +++ b/stream_alert_cli/terraform/handler.py @@ -84,7 +84,7 @@ def terraform_handler(options, config): 'aws_kms_alias.stream_alert_secrets' ] if not tf_runner(targets=init_targets): - LOGGER_CLI.error('An error occured while running StreamAlert init') + LOGGER_CLI.error('An error occurred while running StreamAlert init') sys.exit(1) # generate the main.tf with remote state enabled diff --git a/stream_alert_cli/test.py b/stream_alert_cli/test.py index ce7c830c9..d6f8c4829 100644 --- a/stream_alert_cli/test.py +++ b/stream_alert_cli/test.py @@ -996,7 +996,7 @@ def run_tests(options, context): alert_proc_tester.all_tests_passed): sys.exit(1) - # If there are any log records in the memory buffer, then errors occured somewhere + # If there are any log records in the memory buffer, then errors occurred somewhere if log_mem_hanlder.buffer: # Release the MemoryHandler so we can do some other logging now logging.getLogger().removeHandler(log_mem_hanlder) diff --git a/tests/unit/stream_alert_rule_processor/test_threat_intel.py b/tests/unit/stream_alert_rule_processor/test_threat_intel.py index a08db3734..a349f494e 100644 --- a/tests/unit/stream_alert_rule_processor/test_threat_intel.py +++ b/tests/unit/stream_alert_rule_processor/test_threat_intel.py @@ -439,7 +439,7 @@ def test_process_ioc_with_clienterror(self, log_mock, mock_client): StreamIoc(value='1.1.1.2', ioc_type='ip') ] threat_intel._process_ioc(ioc_collections) - log_mock.assert_called_with('An error occured while quering dynamodb table. Error is: %s', + log_mock.assert_called_with('An error occurred while quering dynamodb table. Error is: %s', {'Error': {'Code': 400, 'Message': 'raising test exception'}}) @patch('boto3.client')