Skip to content

Commit

Permalink
Merge pull request #1438 from GSA/main
Browse files Browse the repository at this point in the history
11/26/2024 Production Deploy
  • Loading branch information
stvnrlly authored Nov 26, 2024
2 parents 9cdd3b2 + 76e63ac commit cffaeb9
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 11 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ jobs:
- uses: pypa/gh-action-pip-audit@v1.0.8
with:
inputs: requirements.txt
ignore-vulns: |
PYSEC-2022-43162
static-scan:
runs-on: ubuntu-latest
Expand Down
20 changes: 10 additions & 10 deletions app/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@


def set_job_cache(key, value):
current_app.logger.info(f"Setting {key} in the job_cache.")
current_app.logger.debug(f"Setting {key} in the job_cache.")
job_cache = current_app.config["job_cache"]
job_cache[key] = (value, time.time() + 8 * 24 * 60 * 60)

Expand All @@ -34,14 +34,14 @@ def get_job_cache(key):
if ret is None:
current_app.logger.warning(f"Could not find {key} in the job_cache.")
else:
current_app.logger.info(f"Got {key} from job_cache.")
current_app.logger.debug(f"Got {key} from job_cache.")
return ret


def len_job_cache():
job_cache = current_app.config["job_cache"]
ret = len(job_cache)
current_app.logger.info(f"Length of job_cache is {ret}")
current_app.logger.debug(f"Length of job_cache is {ret}")
return ret


Expand All @@ -53,7 +53,7 @@ def clean_cache():
if expiry_time < current_time:
keys_to_delete.append(key)

current_app.logger.info(
current_app.logger.debug(
f"Deleting the following keys from the job_cache: {keys_to_delete}"
)
for key in keys_to_delete:
Expand Down Expand Up @@ -139,7 +139,7 @@ def cleanup_old_s3_objects():

try:
remove_csv_object(obj["Key"])
current_app.logger.info(
current_app.logger.debug(
f"#delete-old-s3-objects Deleted: {obj['LastModified']} {obj['Key']}"
)
except botocore.exceptions.ClientError:
Expand Down Expand Up @@ -287,7 +287,7 @@ def file_exists(file_location):


def get_job_location(service_id, job_id):
current_app.logger.info(
current_app.logger.debug(
f"#s3-partitioning NEW JOB_LOCATION: {NEW_FILE_LOCATION_STRUCTURE.format(service_id, job_id)}"
)
return (
Expand All @@ -305,7 +305,7 @@ def get_old_job_location(service_id, job_id):
but it will take a few days where we have to support both formats.
Remove this when everything works with the NEW_FILE_LOCATION_STRUCTURE.
"""
current_app.logger.info(
current_app.logger.debug(
f"#s3-partitioning OLD JOB LOCATION: {FILE_LOCATION_STRUCTURE.format(service_id, job_id)}"
)
return (
Expand Down Expand Up @@ -445,7 +445,7 @@ def extract_personalisation(job):
def get_phone_number_from_s3(service_id, job_id, job_row_number):
job = get_job_cache(job_id)
if job is None:
current_app.logger.info(f"job {job_id} was not in the cache")
current_app.logger.debug(f"job {job_id} was not in the cache")
job = get_job_from_s3(service_id, job_id)
# Even if it is None, put it here to avoid KeyErrors
set_job_cache(job_id, job)
Expand Down Expand Up @@ -479,7 +479,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):
# So this is a little recycling mechanism to reduce the number of downloads.
job = get_job_cache(job_id)
if job is None:
current_app.logger.info(f"job {job_id} was not in the cache")
current_app.logger.debug(f"job {job_id} was not in the cache")
job = get_job_from_s3(service_id, job_id)
# Even if it is None, put it here to avoid KeyErrors
set_job_cache(job_id, job)
Expand All @@ -503,7 +503,7 @@ def get_personalisation_from_s3(service_id, job_id, job_row_number):


def get_job_metadata_from_s3(service_id, job_id):
current_app.logger.info(
current_app.logger.debug(
f"#s3-partitioning CALLING GET_JOB_METADATA with {service_id}, {job_id}"
)
obj = get_s3_object(*get_job_location(service_id, job_id))
Expand Down
2 changes: 1 addition & 1 deletion app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ class Config(object):
current_minute = (datetime.now().minute + 1) % 60

CELERY = {
"worker_max_tasks_per_child": 200,
"worker_max_tasks_per_child": 500,
"broker_url": REDIS_URL,
"broker_transport_options": {
"visibility_timeout": 310,
Expand Down

0 comments on commit cffaeb9

Please sign in to comment.