Skip to content

Commit

Permalink
[typos] fix occured => occurred
Browse files Browse the repository at this point in the history
  • Loading branch information
Jack Naglieri committed Mar 14, 2018
1 parent 3c9a183 commit 6663c77
Show file tree
Hide file tree
Showing 7 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion stream_alert/rule_processor/firehose.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def firehose_request_wrapper():
record_batch_size)
return

# Error handle if failures occured in PutRecordBatch after
# Error handle if failures occurred in PutRecordBatch after
# several backoff attempts
if resp.get('FailedPutCount') > 0:
failed_records = [failed
Expand Down
2 changes: 1 addition & 1 deletion stream_alert/rule_processor/payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ def _get_object(self):
try:
return self._download_object(region, bucket, key)
except IOError:
LOGGER.exception('[S3Payload] The following error occured while downloading')
LOGGER.exception('[S3Payload] The following error occurred while downloading')
return

@staticmethod
Expand Down
4 changes: 2 additions & 2 deletions stream_alert/rule_processor/threat_intel.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def _process_ioc(self, ioc_collections):

query_result = []

query_error_msg = 'An error occured while quering dynamodb table. Error is: %s'
query_error_msg = 'An error occurred while quering dynamodb table. Error is: %s'
try:
result, unprocesed_keys = self._query(query_values)
query_result.extend(result)
Expand All @@ -292,7 +292,7 @@ def _process_ioc(self, ioc_collections):
if unprocesed_keys:
deserializer = self._deserialize(unprocesed_keys[self._table]['Keys'])
query_values = [elem[PRIMARY_KEY] for elem in deserializer]
query_error_msg = 'An error occured while processing unprocesed_keys. Error is: %s'
query_error_msg = 'An error occurred while processing unprocesed_keys. Error is: %s'
try:
result, _ = self._query(query_values)
query_result.extend(result)
Expand Down
4 changes: 2 additions & 2 deletions stream_alert_cli/athena/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def rebuild_partitions(athena_client, options, config):
partition_success, partitions = athena_client.run_athena_query(
query='SHOW PARTITIONS {}'.format(sanitized_table_name), database='streamalert')
if not partition_success:
LOGGER_CLI.error('An error occured when loading partitions for %s',
LOGGER_CLI.error('An error occurred when loading partitions for %s',
sanitized_table_name)
return

Expand All @@ -102,7 +102,7 @@ def rebuild_partitions(athena_client, options, config):
drop_success, _ = athena_client.run_athena_query(
query='DROP TABLE {}'.format(sanitized_table_name), database='streamalert')
if not drop_success:
LOGGER_CLI.error('An error occured when dropping the %s table', sanitized_table_name)
LOGGER_CLI.error('An error occurred when dropping the %s table', sanitized_table_name)
return

LOGGER_CLI.info('Dropped table %s', sanitized_table_name)
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/terraform/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def terraform_handler(options, config):
'aws_kms_alias.stream_alert_secrets'
]
if not tf_runner(targets=init_targets):
LOGGER_CLI.error('An error occured while running StreamAlert init')
LOGGER_CLI.error('An error occurred while running StreamAlert init')
sys.exit(1)

# generate the main.tf with remote state enabled
Expand Down
2 changes: 1 addition & 1 deletion stream_alert_cli/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -996,7 +996,7 @@ def run_tests(options, context):
alert_proc_tester.all_tests_passed):
sys.exit(1)

# If there are any log records in the memory buffer, then errors occured somewhere
# If there are any log records in the memory buffer, then errors occurred somewhere
if log_mem_hanlder.buffer:
# Release the MemoryHandler so we can do some other logging now
logging.getLogger().removeHandler(log_mem_hanlder)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ def test_process_ioc_with_clienterror(self, log_mock, mock_client):
StreamIoc(value='1.1.1.2', ioc_type='ip')
]
threat_intel._process_ioc(ioc_collections)
log_mock.assert_called_with('An error occured while quering dynamodb table. Error is: %s',
log_mock.assert_called_with('An error occurred while quering dynamodb table. Error is: %s',
{'Error': {'Code': 400, 'Message': 'raising test exception'}})

@patch('boto3.client')
Expand Down

0 comments on commit 6663c77

Please sign in to comment.