From 35045b5d3cb368d4ea6d7eeca32a1459997523e1 Mon Sep 17 00:00:00 2001 From: thenav56 Date: Sat, 10 Aug 2024 17:58:47 +0545 Subject: [PATCH] Remove duplicate codes --- .../management/commands/create_pdc_daily.py | 191 +++---------- .../management/commands/create_pdc_data.py | 255 ++++-------------- .../commands/create_pdc_intensity.py | 64 +++-- 3 files changed, 132 insertions(+), 378 deletions(-) diff --git a/imminent/management/commands/create_pdc_daily.py b/imminent/management/commands/create_pdc_daily.py index e258df2..9c1bcd6 100644 --- a/imminent/management/commands/create_pdc_daily.py +++ b/imminent/management/commands/create_pdc_daily.py @@ -1,10 +1,8 @@ -import datetime import logging import requests from django.conf import settings from django.core.management.base import BaseCommand -from django.utils import timezone from sentry_sdk.crons import monitor from common.models import HazardType @@ -12,18 +10,45 @@ from imminent.models import Pdc from risk_module.sentry import SentryMonitor +from .create_pdc_data import Command as CreatePdcDataCommand + logger = logging.getLogger(__name__) class Command(BaseCommand): help = "Import Active Hazards" - def parse_timestamp(self, timestamp): - # NOTE: all timestamp are in millisecond and with timezone `utc` - return timezone.make_aware(datetime.datetime.utcfromtimestamp(int(timestamp) / 1000)) + def save_pdc_data(self, hazard_type: HazardType, data): + pdc_updated_at = CreatePdcDataCommand.parse_timestamp(data["last_Update"]) + + # XXX: This was only done for WILDFIRE before?? + existing_qs = Pdc.objects.filter( + uuid=data["uuid"], + hazard_type=hazard_type, + pdc_updated_at=pdc_updated_at, + ) + if existing_qs.exists(): + return + + pdc_data = { + "hazard_id": data["hazard_ID"], + "hazard_name": data["hazard_Name"], + "latitude": data["latitude"], + "longitude": data["longitude"], + "description": data["description"], + "hazard_type": hazard_type, + "uuid": data["uuid"], + "start_date": CreatePdcDataCommand.parse_timestamp(data["start_Date"]), + "end_date": CreatePdcDataCommand.parse_timestamp(data["end_Date"]), + "status": Pdc.Status.ACTIVE, + "pdc_created_at": CreatePdcDataCommand.parse_timestamp(data["create_Date"]), + "pdc_updated_at": pdc_updated_at, + # XXX: Severity was not saved here compare to create_pdc_data + } + Pdc.objects.get_or_create(**pdc_data) @monitor(monitor_slug=SentryMonitor.CREATE_PDC_DAILY) - def handle(self, *args, **options): + def handle(self, **_): # NOTE: Use the search hazard api for the information download # make sure to use filter the data url = "https://sentry.pdc.org/hp_srv/services/hazards/t/json/get_active_hazards" @@ -34,157 +59,17 @@ def handle(self, *args, **options): "Error querying PDC data", extra=logging_response_context(response), ) - # TODO return? + return response_data = response.json() for data in response_data: # NOTE: Filter the active hazard only # Update the hazard if it has expired - hazard_type = data["type_ID"] hazard_status = data["status"] + if hazard_status == "E": - pdcs = Pdc.objects.filter(uuid=data["uuid"]) - for pdc in pdcs: - pdc.status = Pdc.Status.EXPIRED - pdc.save(update_fields=["status"]) - if hazard_status == "A": - if hazard_type == "FLOOD": - hazard_type = HazardType.FLOOD - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "CYCLONE": - hazard_type = HazardType.CYCLONE - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "STORM": - hazard_type = HazardType.STORM - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "DROUGHT": - hazard_type = HazardType.DROUGHT - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "WIND": - hazard_type = HazardType.WIND - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "TSUNAMI": - hazard_type = HazardType.TSUNAMI - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "EARTHQUAKE": - hazard_type = HazardType.EARTHQUAKE - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "WILDFIRE": - hazard_type = HazardType.WILDFIRE - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - } - Pdc.objects.get_or_create(**data) + Pdc.objects.filter(uuid=data["uuid"]).update(status=Pdc.Status.EXPIRED) + + elif hazard_status == "A": + if hazard_type := CreatePdcDataCommand.HAZARD_TYPE_MAP.get(data["type_ID"].upper()): + self.save_pdc_data(hazard_type, data) diff --git a/imminent/management/commands/create_pdc_data.py b/imminent/management/commands/create_pdc_data.py index af523c0..5dc7f29 100644 --- a/imminent/management/commands/create_pdc_data.py +++ b/imminent/management/commands/create_pdc_data.py @@ -18,23 +18,62 @@ class Command(BaseCommand): help = "Import Active Hazards" - def parse_timestamp(self, timestamp): + SEVERITY_MAP = { + "WARNING": Pdc.Severity.WARNING, + "WATCH": Pdc.Severity.WATCH, + "ADVISORY": Pdc.Severity.ADVISORY, + "INFORMATION": Pdc.Severity.INFORMATION, + } + + HAZARD_TYPE_MAP = { + "FLOOD": HazardType.FLOOD, + "CYCLONE": HazardType.CYCLONE, + "STORM": HazardType.STORM, + "DROUGHT": HazardType.DROUGHT, + "WIND": HazardType.WIND, + "TSUNAMI": HazardType.TSUNAMI, + "EARTHQUAKE": HazardType.EARTHQUAKE, + "WILDFIRE": HazardType.WILDFIRE, + } + + @staticmethod + def parse_timestamp(timestamp): # NOTE: all timestamp are in millisecond and with timezone `utc` - return timezone.make_aware(datetime.datetime.utcfromtimestamp(int(timestamp) / 1000)) + return timezone.make_aware( + # FIXME: Using deprecated function + datetime.datetime.utcfromtimestamp(int(timestamp) / 1000) + ) - def parse_severity(self, severity): - if severity == "WARNING": - severity = Pdc.Severity.WARNING - elif severity == "WATCH": - severity = Pdc.Severity.WATCH - elif severity == "ADVISORY": - severity = Pdc.Severity.ADVISORY - elif severity == "INFORMATION": - severity = Pdc.Severity.INFORMATION - return severity + def save_pdc_data(self, hazard_type: HazardType, data): + pdc_updated_at = self.parse_timestamp(data["last_Update"]) + + existing_qs = Pdc.objects.filter( + uuid=data["uuid"], + hazard_type=hazard_type, + pdc_updated_at=pdc_updated_at, + ) + if existing_qs.exists(): + return + + pdc_data = { + "hazard_id": data["hazard_ID"], + "hazard_name": data["hazard_Name"], + "latitude": data["latitude"], + "longitude": data["longitude"], + "description": data["description"], + "hazard_type": hazard_type, + "uuid": data["uuid"], + "start_date": self.parse_timestamp(data["start_Date"]), + "end_date": self.parse_timestamp(data["end_Date"]), + "status": Pdc.Status.ACTIVE, + "pdc_created_at": self.parse_timestamp(data["create_Date"]), + "pdc_updated_at": pdc_updated_at, + "severity": self.SEVERITY_MAP.get(data["severity_ID"].upper()), + } + Pdc.objects.get_or_create(**pdc_data) @monitor(monitor_slug=SentryMonitor.CREATE_PDC_DATA) - def handle(self, *args, **options): + def handle(self, **_): # NOTE: Use the search hazard api for the information download # make sure to use filter the data url = "https://sentry.pdc.org/hp_srv/services/hazards/t/json/search_hazard" @@ -54,193 +93,17 @@ def handle(self, *args, **options): "Error querying PDC data", extra=logging_response_context(response), ) - # TODO: return? + return response_data = response.json() for data in response_data: # NOTE: Filter the active hazard only # Update the hazard if it has expired - hazard_type = data["type_ID"] hazard_status = data["status"] + if hazard_status == "E": - pdcs = Pdc.objects.filter(uuid=data["uuid"]) - for pdc in pdcs: - pdc.status = Pdc.Status.EXPIRED - pdc.save(update_fields=["status"]) - if hazard_status == "A": - if hazard_type == "FLOOD": - hazard_type = HazardType.FLOOD - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": pdc_updated_at, - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "CYCLONE": - hazard_type = HazardType.CYCLONE - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "STORM": - hazard_type = HazardType.STORM - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "DROUGHT": - hazard_type = HazardType.DROUGHT - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "WIND": - hazard_type = HazardType.WIND - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "TSUNAMI": - hazard_type = HazardType.TSUNAMI - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "EARTHQUAKE": - hazard_type = HazardType.EARTHQUAKE - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) - elif hazard_type == "WILDFIRE": - hazard_type = HazardType.WILDFIRE - pdc_updated_at = self.parse_timestamp(data["last_Update"]) - if Pdc.objects.filter(uuid=data["uuid"], hazard_type=hazard_type, pdc_updated_at=pdc_updated_at).exists(): - continue - else: - data = { - "hazard_id": data["hazard_ID"], - "hazard_name": data["hazard_Name"], - "latitude": data["latitude"], - "longitude": data["longitude"], - "description": data["description"], - "hazard_type": hazard_type, - "uuid": data["uuid"], - "start_date": self.parse_timestamp(data["start_Date"]), - "end_date": self.parse_timestamp(data["end_Date"]), - "status": Pdc.Status.ACTIVE, - "pdc_created_at": self.parse_timestamp(data["create_Date"]), - "pdc_updated_at": self.parse_timestamp(data["last_Update"]), - "severity": self.parse_severity(data["severity_ID"]), - } - Pdc.objects.get_or_create(**data) + Pdc.objects.filter(uuid=data["uuid"]).update(status=Pdc.Status.EXPIRED) + + elif hazard_status == "A": + if hazard_type := self.HAZARD_TYPE_MAP.get(data["type_ID"].upper()): + self.save_pdc_data(hazard_type, data) diff --git a/imminent/management/commands/create_pdc_intensity.py b/imminent/management/commands/create_pdc_intensity.py index 20cbe5d..02bd64a 100644 --- a/imminent/management/commands/create_pdc_intensity.py +++ b/imminent/management/commands/create_pdc_intensity.py @@ -1,5 +1,6 @@ +import datetime + import requests -from django.conf import settings from django.core.management.base import BaseCommand from sentry_sdk.crons import monitor @@ -7,40 +8,45 @@ from imminent.models import Pdc from risk_module.sentry import SentryMonitor +from .create_pdc_polygon import ARC_GIS_DEFAULT_PARAMS +from .create_pdc_polygon import Command as CreatePdcPolygonCommand + class Command(BaseCommand): - help = "Import polygon from `uuid` from pdc arch-gis" + help = "Import polygon from `uuid` from pdc arc-gis" @monitor(monitor_slug=SentryMonitor.CREATE_PDC_INTENSITY) - def handle(self, *args, **kwargs): + def handle(self, **_): # get all the uuids and use them to query to the - # arch-gis server of pdc + # arc-gis server of pdc # filtering only cyclone since they only have track of disaster path uuids = Pdc.objects.filter(status=Pdc.Status.ACTIVE, hazard_type=HazardType.CYCLONE).values_list("uuid", flat=True) + session = requests.Session() + token_expires = None + for uuid in uuids: - session = requests.Session() - login_url = "https://partners.pdc.org/arcgis/tokens/generateToken" - - data = { - "f": "json", - "username": settings.PDC_USERNAME, - "password": settings.PDC_PASSWORD, - "referer": "https://www.arcgis.com", - } - - login_response = session.post(login_url, data=data, allow_redirects=True) - access_token = login_response.json()["token"] - session.headers.update( - { - "Authorization": f"Bearer {access_token}", - } + # Attach auth token + if token_expires is None or datetime.datetime.now() >= token_expires: + access_token, token_expires = CreatePdcPolygonCommand.get_access_token(session) + session.headers.update( + { + "Authorization": f"Bearer {access_token}", + } + ) + + # Fetch + arc_gis_url = "https://partners.pdc.org/arcgis/rest/services/partners/pdc_active_hazards_partners/MapServer/9/query" + arc_response = session.post( + url=arc_gis_url, + data={ + **ARC_GIS_DEFAULT_PARAMS, + "where": f"uuid='{uuid}'", + "outFields": "forecast_date_time,wind_speed_mph,severity,storm_name,track_heading", + }, + ) + + # Save to database + response_data = arc_response.json() + Pdc.objects.filter(uuid=uuid).update( + storm_position_geojson=response_data["features"], ) - arch_gis_url = f"https://partners.pdc.org/arcgis/rest/services/partners/pdc_active_hazards_partners/MapServer/9/query?where=uuid%3D%27{uuid}%27&text=&objectIds=&time=&geometry=&geometryType=esriGeometryEnvelope&inSR=&spatialRel=esriSpatialRelIntersects&relationParam=&outFields=forecast_date_time%2Cwind_speed_mph%2Cseverity%2Cstorm_name%2Ctrack_heading&returnGeometry=true&returnTrueCurves=false&maxAllowableOffset=&geometryPrecision=&outSR=&having=&returnIdsOnly=false&returnCountOnly=false&orderByFields=&groupByFieldsForStatistics=&outStatistics=&returnZ=false&returnM=false&gdbVersion=&historicMoment=&returnDistinctValues=false&resultOffset=&resultRecordCount=&queryByDistance=&returnExtentOnly=false&datumTransformation=¶meterValues=&rangeValues=&quantizationParameters=&featureEncoding=esriDefault&f=geojson" # noqa: E501 - arch_response = session.get(url=arch_gis_url) - response_data = arch_response.json() - update_data = [] - for data in response_data["features"]: - update_data.append(data) - for pdc in Pdc.objects.filter(uuid=uuid): - pdc.storm_position_geojson = update_data - pdc.save(update_fields=["storm_position_geojson"])