Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/update docs #7

Merged
merged 16 commits into from
Jul 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -151,4 +151,4 @@ deploy/eb/
storage/

# Misc
./caddy/certs
caddy/certs
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,5 @@ staticfiles/
.mypy-dep/

# Misc
./caddy/certs
caddy/certs
storage/
13 changes: 10 additions & 3 deletions common/management/commands/setup_sentry_cron_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from django.conf import settings
from django.core.management.base import BaseCommand

from risk_module.sentry import SentryMonitor
from risk_module.sentry import SentryMonitor, SentryMonitorConfig

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -37,8 +37,10 @@ def handle(self, *args, **options):

SENTRY_INGEST = f"https://{parsed_url.hostname}"

for cronjob in SentryMonitor.choices:
job, schedule = cronjob
for sentry_monitor in SentryMonitor:
job = sentry_monitor.value
schedule = sentry_monitor.label

SENTRY_CRONS = f"{SENTRY_INGEST}/api/{project_id}/cron/{job}/{api_key}/"

payload = {
Expand All @@ -47,6 +49,11 @@ def handle(self, *args, **options):
"type": "crontab",
"value": str(schedule),
},
"checkin_margin": SentryMonitorConfig.get_checkin_margin(sentry_monitor),
"max_runtime": SentryMonitorConfig.get_max_runtime(sentry_monitor),
"failure_issue_threshold": SentryMonitorConfig.get_failure_issue_threshold(sentry_monitor),
"recovery_threshold": SentryMonitorConfig.get_recovery_threshold(sentry_monitor),
"tz": settings.TIME_ZONE, # timezone
},
"environment": settings.RISK_ENVIRONMENT,
"status": "ok",
Expand Down
17 changes: 17 additions & 0 deletions common/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import requests


def logging_context(context) -> dict:
return {
"context": context,
}


def logging_response_context(response: requests.Response) -> dict:
return logging_context({
"url": response.url,
"response": {
"content": response.content,
"status_code": response.status_code,
}
})
21 changes: 18 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,17 +1,32 @@
x-server: &base_server_setup
image: ghcr.io/ifrcgo/go-risk-module-api:latest
build: .
env_file:
- .env
environment:
RISK_ENVIRONMENT: ${RISK_ENVIRONMENT:-development}
RISK_API_FQDN: ${RISK_API_FQDN:-localhost}
DJANGO_DEBUG: ${DJANGO_DEBUG:-True}
DJANGO_SECRET_KEY: ${DJANGO_SECRET_KEY:?err}
DJANGO_SECRET_KEY: ${DJANGO_SECRET_KEY?}
# Database
DATABASE_NAME: ${DATABASE_NAME:-postgres}
DATABASE_USER: ${DATABASE_USER:-postgres}
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-postgres}
DATABASE_HOST: ${DATABASE_HOST:-db}
DATABASE_PORT: ${DATABASE_PORT:-5432}
# Redis
CELERY_REDIS_URL: ${CELERY_REDIS_URL:-redis://redis:6379/0}
CACHE_REDIS_URL: ${CELERY_REDIS_URL:-redis://redis:6379/1}
# PDC
PDC_USERNAME: ${PDC_USERNAME?error}
PDC_PASSWORD: ${PDC_PASSWORD?error}
PDC_ACCESS_TOKEN: ${PDC_ACCESS_TOKEN?error}
# Meteoswiss
METEOSWISS_S3_ENDPOINT_URL: ${METEOSWISS_S3_ENDPOINT_URL?error}
METEOSWISS_S3_BUCKET: ${METEOSWISS_S3_BUCKET?error}
METEOSWISS_S3_ACCESS_KEY: ${METEOSWISS_S3_ACCESS_KEY?error}
METEOSWISS_S3_SECRET_KEY: ${METEOSWISS_S3_SECRET_KEY?error}
env_file:
# NOTE: Pass additional environment using .env
- .env
volumes:
- .:/code
- ipython_data_local:/root/.ipython/profile_default # persist ipython data, including ipython history
Expand Down
13 changes: 13 additions & 0 deletions gh-docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ services:
CI: "true"
# https://github.com/pytest-dev/pytest/issues/7443
PYTEST_ADDOPTS: "--color=yes"
RISK_ENVIRONMENT: ${RISK_ENVIRONMENT:-CI}
RISK_API_FQDN: https://go-test.com
DJANGO_DEBUG: "true"
DJANGO_SECRET_KEY: "XXXXXXXXXXXXXXXXXXXXXXXXXX"
# From db.environment
Expand All @@ -37,6 +39,17 @@ services:
DATABASE_HOST: db
# From redis
CELERY_REDIS_URL: redis://redis:6379/0
CACHE_REDIS_URL: redis://redis:6379/1
# Dummy credentials
# -- PDC
PDC_USERNAME: username
PDC_PASSWORD: password
PDC_ACCESS_TOKEN: token
# -- Meteoswiss
METEOSWISS_S3_ENDPOINT_URL: https://test-endpoint.s3.com
METEOSWISS_S3_BUCKET: test-bucket
METEOSWISS_S3_ACCESS_KEY: access-key
METEOSWISS_S3_SECRET_KEY: secret-key
volumes:
- ./coverage/:/code/coverage/
- ./ci-share/:/ci-share/
Expand Down
22 changes: 14 additions & 8 deletions imminent/management/commands/check_pdc_status.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import logging
import datetime

from django.core.management.base import BaseCommand
from django.utils import timezone
from sentry_sdk.crons import monitor

from imminent.models import Pdc
from risk_module.sentry import SentryMonitor


logger = logging.getLogger()
Expand All @@ -12,11 +14,15 @@
class Command(BaseCommand):
help = "Import Hazard Exposure Data"

@monitor(monitor_slug=SentryMonitor.CHECK_PDC_STATUS)
def handle(self, *args, **options):
now = datetime.datetime.now()
today_date = now.date()
pdcs = Pdc.objects.filter(status=Pdc.Status.ACTIVE)
for pdc in pdcs:
if pdc.end_date and pdc.end_date < today_date:
pdc.status = Pdc.Status.EXPIRED
pdc.save(update_fields=["status"])
today_date = timezone.now().date()
resp = (
Pdc.objects.filter(
status=Pdc.Status.ACTIVE,
end_date__lt=today_date,
).update(
status=Pdc.Status.EXPIRED,
)
)
print(f'Updated: {resp}')
12 changes: 10 additions & 2 deletions imminent/management/commands/create_adam_events.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging
import urllib3
import json
import pytz
from datetime import datetime

from django.core.management.base import BaseCommand
Expand All @@ -12,6 +13,14 @@
logger = logging.getLogger()


def get_timezone_aware_datetime(iso_format_datetime) -> datetime:
_datetime = datetime.fromisoformat(iso_format_datetime)
if _datetime.tzinfo is None:
_datetime = _datetime.replace(tzinfo=pytz.UTC)
return _datetime


# TODO: Confirm if this is used or superseed by create_adam_exposure?
class Command(BaseCommand):
help = "Import ADAM Event Data"

Expand All @@ -32,7 +41,6 @@ def handle(self, *args, **options):
url = "https://x8qclqysv7.execute-api.eu-west-1.amazonaws.com/dev/events/feed"
response = http.request("GET", url)
data = response.data
print(data)
values = json.loads(data)
for data in values:
if data["eventType"] in ["Earthquake", "Flood", "Tropical Storm"]:
Expand All @@ -41,6 +49,6 @@ def handle(self, *args, **options):
"hazard_type": self.map_hazard_type(data["eventType"]),
"country": Country.objects.filter(iso3=data["eventISO3"].lower()).first(),
"event_id": data["guid"].split("_")[0] if data["eventType"] == "Tropical Storm" else data["guid"],
"publish_date": data["pubDate"],
"publish_date": get_timezone_aware_datetime(data["pubDate"]),
}
Adam.objects.create(**adam)
83 changes: 69 additions & 14 deletions imminent/management/commands/create_adam_exposure.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,59 @@
import logging
import urllib3
import json
import pytz
from datetime import datetime

from django.core.management.base import BaseCommand
from sentry_sdk.crons import monitor

from risk_module.sentry import SentryMonitor
from common.models import Country, HazardType
from common.utils import logging_response_context
from imminent.models import Adam


logger = logging.getLogger(__name__)


def get_timezone_aware_datetime(iso_format_datetime) -> datetime:
_datetime = datetime.fromisoformat(iso_format_datetime)
if _datetime.tzinfo is None:
_datetime = _datetime.replace(tzinfo=pytz.UTC)
return _datetime


class Command(BaseCommand):
help = "Import ADAM Exposure Data"

def parse_datetime(self, date):
return datetime.strptime(date, "%Y-%m-%dT%HH:MM::SS").strftime("%Y-%m-%d")

def handle(self, *args, **kwargs):
http = urllib3.PoolManager()
@staticmethod
def is_response_valid(response, response_data) -> bool:
if (
response.status != 200 or
(
isinstance(response_data, dict) and
"features" not in response_data
)
):
return False
return True

def process_earthquakes(self, http):
earthquake_url = "https://x8qclqysv7.execute-api.eu-west-1.amazonaws.com/dev/events/earthquakes/"
response = http.request("GET", earthquake_url)
data = response.data
eathquake_values = json.loads(data)
for earthquake_event in eathquake_values["features"]:
response_data = json.loads(response.data)

if not self.is_response_valid(response, response_data):
logger.error(
"Error querying Adam Earthquakes data",
extra=logging_response_context(response),
)
return

for earthquake_event in response_data["features"]:
geojson = {
"type": "Feature",
"geometry": earthquake_event["geometry"],
Expand All @@ -45,17 +77,25 @@ def handle(self, *args, **kwargs):
"country": Country.objects.filter(iso3=props["iso3"].lower()).last(),
"title": props["title"],
"hazard_type": HazardType.EARTHQUAKE,
"publish_date": props["published_at"],
"publish_date": get_timezone_aware_datetime(props["published_at"]),
"event_id": props["event_id"],
}
)
Adam.objects.get_or_create(**data)

def process_floods(self, http):
flood_url = "https://x8qclqysv7.execute-api.eu-west-1.amazonaws.com/dev/events/floods/"
response = http.request("GET", flood_url)
data = response.data
flood_values = json.loads(data)
for flood_event in flood_values["features"]:
response_data = json.loads(response.data)

if not self.is_response_valid(response, response_data):
logger.error(
"Error querying Adam Floods data",
extra=logging_response_context(response),
)
return

for flood_event in response_data["features"]:
geojson = {
"type": "Feature",
"geometry": flood_event["geometry"],
Expand All @@ -71,17 +111,25 @@ def handle(self, *args, **kwargs):
"country": Country.objects.filter(iso3=props["iso3"].lower()).last(),
"title": None,
"hazard_type": HazardType.FLOOD,
"publish_date": props["effective_date"],
"publish_date": get_timezone_aware_datetime(props["effective_date"]),
"event_id": props["eventid"],
}
)
Adam.objects.get_or_create(**data)

def process_cyclones(self, http):
cyclone_url = "https://x8qclqysv7.execute-api.eu-west-1.amazonaws.com/dev/events/cyclones/"
response = http.request("GET", cyclone_url)
data = response.data
cyclone_values = json.loads(data)
for cyclone_event in cyclone_values["features"]:
response_data = json.loads(response.data)

if not self.is_response_valid(response, response_data):
logger.error(
"Error querying Adam Cyclones data",
extra=logging_response_context(response),
)
return

for cyclone_event in response_data["features"]:
data = {
"geojson": cyclone_event["geometry"],
"event_details": cyclone_event["properties"],
Expand All @@ -96,8 +144,15 @@ def handle(self, *args, **kwargs):
"country": Country.objects.filter(name__icontains=country.strip()).last(),
"title": props["title"],
"hazard_type": HazardType.CYCLONE,
"publish_date": props["published_at"],
"publish_date": get_timezone_aware_datetime(props["published_at"]),
"event_id": props["event_id"],
}
)
Adam.objects.get_or_create(**data)

@monitor(monitor_slug=SentryMonitor.CREATE_ADAM_EXPOSURE)
def handle(self, *args, **kwargs):
http = urllib3.PoolManager()
self.process_earthquakes(http)
self.process_floods(http)
self.process_cyclones(http)
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ def handle(self, *args, **kwargs):
country1_df = []
if len(country1_df) > 0:
country1_speed = country1_df.iloc[:, 4:].values.tolist()
speed_60 = None
speed_90 = None
speed_120 = None
if len(country1_speed[0]) == 4:
speed_60 = country1_speed[0][0]
speed_90 = country1_speed[0][1]
Expand Down
20 changes: 13 additions & 7 deletions imminent/management/commands/create_pdc_daily.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import requests
import logging
import datetime
import os

from django.core.management.base import BaseCommand
from django.conf import settings
from django.utils import timezone
from sentry_sdk.crons import monitor

from imminent.models import Pdc
from risk_module.sentry import SentryMonitor
from common.models import HazardType
from common.utils import logging_response_context
from imminent.models import Pdc


logger = logging.getLogger()
Expand All @@ -20,17 +23,20 @@ def parse_timestamp(self, timestamp):
# NOTE: all timestamp are in millisecond and with timezone `utc`
return timezone.make_aware(datetime.datetime.utcfromtimestamp(int(timestamp) / 1000))

@monitor(monitor_slug=SentryMonitor.CREATE_PDC_DAILY)
def handle(self, *args, **options):
# NOTE: Use the search hazard api for the information download
# make sure to use filter the data
access_token = os.environ.get("PDC_ACCESS_TOKEN")
url = "https://sentry.pdc.org/hp_srv/services/hazards/t/json/get_active_hazards"
headers = {"Authorization": "Bearer {}".format(access_token)}
headers = {"Authorization": "Bearer {}".format(settings.PDC_ACCESS_TOKEN)}
response = requests.get(url, headers=headers)
if response.status_code != 200:
error_log = f"Error querying PDC data at {url}"
logger.error(error_log)
logger.error(response.content)
logger.error(
"Error querying PDC data",
extra=logging_response_context(response),
)
# TODO return?

response_data = response.json()
for data in response_data:
# NOTE: Filter the active hazard only
Expand Down
Loading
Loading