Skip to content

Commit

Permalink
Add local to APP_NAME,Add comment to cron task.
Browse files Browse the repository at this point in the history
Send message to slack when found modified cases.
Change string value with single quote to double quote
  • Loading branch information
fec-jli committed Sep 26, 2021
1 parent 4a92a43 commit 7c60a48
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 64 deletions.
69 changes: 35 additions & 34 deletions webservices/tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,31 +8,33 @@

# Feature and dev are sharing the same RDS box so we only want dev to update
schedule = {}
if env.app.get('space_name', 'unknown-space').lower() != 'feature':
if env.app.get("space_name", "unknown-space").lower() != "feature":
schedule = {
'refresh_materialized_views': {
'task': 'webservices.tasks.refresh.refresh_materialized_views',
'schedule': crontab(minute=0, hour=9),
# Refresh materialized views everyday at 5am(EST).
"refresh_materialized_views": {
"task": "webservices.tasks.refresh.refresh_materialized_views",
"schedule": crontab(minute=0, hour=9),
},

'reload_all_aos_daily_except_sunday': {
'task': 'webservices.tasks.legal_docs.reload_all_aos_when_change',
'schedule': crontab(minute=0, hour=1, day_of_week='mon,tue,wed,thu,fri,sat'),
# Reload DAILY_MODIFIED_STARTING_AO at 9pm(EST) except Sunday.
"reload_all_aos_daily_except_sunday": {
"task": "webservices.tasks.legal_docs.reload_all_aos_when_change",
"schedule": crontab(minute=0, hour=1, day_of_week="mon,tue,wed,thu,fri,sat"),
},

'reload_all_aos_every_sunday': {
'task': 'webservices.tasks.legal_docs.reload_all_aos',
'schedule': crontab(minute=0, hour=1, day_of_week='sun'),
# Reload All AOs every Sunday at 9pm(EST).
"reload_all_aos_every_sunday": {
"task": "webservices.tasks.legal_docs.reload_all_aos",
"schedule": crontab(minute=0, hour=1, day_of_week="sun"),
},

'refresh_legal_docs': {
'task': 'webservices.tasks.legal_docs.refresh',
'schedule': crontab(minute='*/5', hour='10-23'),
# Reload RECENTLY_MODIFIED_CASES and RECENTLY_MODIFIED_STARTING_AO
# every 5 minutes during 6am-7pm(EST).
"refresh_legal_docs": {
"task": "webservices.tasks.legal_docs.refresh_most_recent_legal_doc",
"schedule": crontab(minute="*/5", hour="10-23"),
},

'backup_elasticsearch_every_sunday': {
'task': 'webservices.tasks.legal_docs.create_es_backup',
'schedule': crontab(minute=0, hour=4, day_of_week='sun'),
# Snapshot Elasticsearch at 12am(EST) in Sunday.
"backup_elasticsearch_every_sunday": {
"task": "webservices.tasks.legal_docs.create_es_backup",
"schedule": crontab(minute=0, hour=4, day_of_week="sun"),
},

}
Expand All @@ -41,33 +43,32 @@
def redis_url():
"""
Retrieve the URL needed to connect to a Redis instance, depending on environment.
When running in a cloud.gov environment, retrieve the uri credential for the 'aws-elasticache-redis' service.
"""

# Is the app running in a cloud.gov environment
if env.space is not None:
redis_env = env.get_service(label='aws-elasticache-redis')
redis_url = redis_env.credentials.get('uri')
redis_env = env.get_service(label="aws-elasticache-redis")
redis_url = redis_env.credentials.get("uri")

return redis_url

return env.get_credential('FEC_REDIS_URL', 'redis://localhost:6379/0')
return env.get_credential("FEC_REDIS_URL", "redis://localhost:6379/0")


app = celery.Celery('openfec')
app = celery.Celery("openfec")
app.conf.update(
broker_url=redis_url(),
broker_use_ssl={
'ssl_cert_reqs': ssl.CERT_NONE,
"ssl_cert_reqs": ssl.CERT_NONE,
},
redis_backend_use_ssl={
'ssl_cert_reqs': ssl.CERT_NONE,
"ssl_cert_reqs": ssl.CERT_NONE,
},
imports=(
'webservices.tasks.refresh',
'webservices.tasks.download',
'webservices.tasks.legal_docs',
"webservices.tasks.refresh",
"webservices.tasks.download",
"webservices.tasks.legal_docs",
),
beat_schedule=schedule,
broker_connection_timeout=30, # in seconds
Expand All @@ -76,10 +77,10 @@ def redis_url():
)

app.conf.ONCE = {
'backend': 'celery_once.backends.Redis',
'settings': {
'url': redis_url() + "?ssl=true",
'default_timeout': 60 * 60
"backend": "celery_once.backends.Redis",
"settings": {
"url": redis_url() + "?ssl=true",
"default_timeout": 60 * 60
}
}

Expand Down
66 changes: 37 additions & 29 deletions webservices/tasks/legal_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,80 +41,88 @@
SLACK_BOTS = "#bots"


@app.task(once={'graceful': True}, base=QueueOnce)
def refresh():
@app.task(once={"graceful": True}, base=QueueOnce)
def refresh_most_recent_legal_doc():
# refresh most recently(within 8 hours) modified legal_doc.
with db.engine.connect() as conn:
refresh_aos(conn)
refresh_cases(conn)
refresh_most_recent_aos(conn)
refresh_most_recent_cases(conn)


@app.task(once={'graceful': True}, base=QueueOnce)
@app.task(once={"graceful": True}, base=QueueOnce)
def reload_all_aos_when_change():
"""
Reload all AOs if there were any new or modified AOs found for the past 24 hour period
"""
with db.engine.connect() as conn:
row = conn.execute(DAILY_MODIFIED_STARTING_AO).first()
if row:
logger.info("AO found %s modified at %s", row["ao_no"], row["pg_date"])
logger.info("Daily (%s) reload of all AOs starting", datetime.date.today().strftime("%A"))
logger.info(" AO found %s modified at %s", row["ao_no"], row["pg_date"])
logger.info(" Daily (%s) reload of all AOs starting", datetime.date.today().strftime("%A"))
load_advisory_opinions()
logger.info("Daily (%s) reload of all AOs completed", datetime.date.today().strftime("%A"))
slack_message = 'Daily reload of all AOs completed in {0} space'.format(get_app_name())
logger.info(" Daily (%s) reload of all AOs completed", datetime.date.today().strftime("%A"))
slack_message = "Daily reload of all AOs completed in {0} space".format(get_app_name())

utils.post_to_slack(slack_message, SLACK_BOTS)
else:
logger.info("No daily (%s) modified AOs found", datetime.date.today().strftime("%A"))
logger.info(" No daily (%s) modified AOs found", datetime.date.today().strftime("%A"))
slack_message = \
'No modified AOs found for the day - Reload of all AOs skipped in {0} space'.format(get_app_name())
"No modified AOs found for the day - Reload of all AOs skipped in {0} space".format(get_app_name())
utils.post_to_slack(slack_message, SLACK_BOTS)


@app.task(once={'graceful': True}, base=QueueOnce)
@app.task(once={"graceful": True}, base=QueueOnce)
def reload_all_aos():
logger.info("Weekly (%s) reload of all AOs starting", datetime.date.today().strftime("%A"))
logger.info(" Weekly (%s) reload of all AOs starting", datetime.date.today().strftime("%A"))
load_advisory_opinions()
logger.info("Weekly (%s) reload of all AOs completed", datetime.date.today().strftime("%A"))
slack_message = 'Weekly reload of all AOs completed in {0} space'.format(get_app_name())
logger.info(" Weekly (%s) reload of all AOs completed", datetime.date.today().strftime("%A"))
slack_message = "Weekly reload of all AOs completed in {0} space".format(get_app_name())
utils.post_to_slack(slack_message, SLACK_BOTS)


@app.task(once={'graceful': True}, base=QueueOnce)
@app.task(once={"graceful": True}, base=QueueOnce)
def create_es_backup():
try:
logger.info("Weekly (%s) elasticsearch backup starting", datetime.date.today().strftime("%A"))
logger.info(" Weekly (%s) elasticsearch backup starting", datetime.date.today().strftime("%A"))
create_es_snapshot()
logger.info("Weekly (%s) elasticsearch backup completed", datetime.date.today().strftime("%A"))
slack_message = 'Weekly elasticsearch backup completed in {0} space'.format(get_app_name())
logger.info(" Weekly (%s) elasticsearch backup completed", datetime.date.today().strftime("%A"))
slack_message = "Weekly elasticsearch backup completed in {0} space".format(get_app_name())
utils.post_to_slack(slack_message, SLACK_BOTS)
except Exception as error:
logger.exception(error)
slack_message = '*ERROR* elasticsearch backup failed for {0}. Check logs.'.format(get_app_name())
slack_message = "*ERROR* elasticsearch backup failed for {0}. Check logs.".format(get_app_name())
utils.post_to_slack(slack_message, SLACK_BOTS)


def refresh_aos(conn):
def refresh_most_recent_aos(conn):
row = conn.execute(RECENTLY_MODIFIED_STARTING_AO).first()
if row:
logger.info("AO %s found modified at %s", row["ao_no"], row["pg_date"])
logger.info(" AO %s found modified at %s", row["ao_no"], row["pg_date"])
load_advisory_opinions(row["ao_no"])
else:
logger.info("No modified AOs found")
logger.info(" No modified AOs found")


def refresh_cases(conn):
logger.info('Checking for modified cases')
def refresh_most_recent_cases(conn):
logger.info(" Checking for modified cases(MUR/AF/ADR)...")
rs = conn.execute(RECENTLY_MODIFIED_CASES)
slack_message = ""
if rs.returns_rows:
load_count = 0
deleted_case_count = 0
for row in rs:
logger.info("%s %s found modified at %s", row["case_type"], row["case_no"], row["pg_date"])
logger.info(" %s %s found modified at %s", row["case_type"], row["case_no"], row["pg_date"])
load_cases(row["case_type"], row["case_no"])
if row["published_flg"]:
load_count += 1
logger.info("Total of %d case(s) loaded...", load_count)
logger.info(" Total of %d case(s) loaded...", load_count)
slack_message = slack_message + str(row["case_type"]) + " " + str(row["case_no"]) + " found modified at " + str(row["pg_date"])
else:
deleted_case_count += 1
logger.info("Total of %d case(s) unpublished...", deleted_case_count)
logger.info(" Total of %d case(s) unpublished...", deleted_case_count)
slack_message = " Total of %d case(s) unpublished...", str(deleted_case_count)
else:
logger.info("No modified cases found")
logger.info(" No modified cases found")
if slack_message:
slack_message = slack_message + " in {0} space".format(get_app_name())
utils.post_to_slack(slack_message, SLACK_BOTS)
2 changes: 1 addition & 1 deletion webservices/tasks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@ def get_json_data(response):


def get_app_name():
return env.get_credential("APP_NAME")
return env.get_credential("APP_NAME") if env.get_credential("APP_NAME") is not None else "fec | api | local"

0 comments on commit 7c60a48

Please sign in to comment.