From d759a4a20051a85e7fde47e65e7107b6b73c48aa Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 16 Sep 2024 09:20:54 +0545 Subject: [PATCH 1/5] refactor(hdx-cron): refactors hdx table to cron table --- API/cron.py | 217 +++++++++++++++++++++++++++++++++ API/data/{hdx.sql => cron.sql} | 4 +- API/data/tables.sql | 6 +- API/hdx.py | 214 -------------------------------- API/main.py | 4 +- src/app.py | 130 ++++++++++---------- src/config.py | 2 +- 7 files changed, 290 insertions(+), 287 deletions(-) create mode 100644 API/cron.py rename API/data/{hdx.sql => cron.sql} (99%) delete mode 100644 API/hdx.py diff --git a/API/cron.py b/API/cron.py new file mode 100644 index 00000000..404e97b4 --- /dev/null +++ b/API/cron.py @@ -0,0 +1,217 @@ +# Standard library imports +from typing import Dict, List + +# Third party imports +from fastapi import APIRouter, Depends, HTTPException, Query, Request +from fastapi_versioning import version + +# Reader imports +from src.app import Cron +from src.config import LIMITER as limiter +from src.config import RATE_LIMIT_PER_MIN + +from .auth import AuthUser, admin_required, staff_required + +# from src.validation.models import DynamicCategoriesModel + + +router = APIRouter(prefix="/cron", tags=["Cron"]) + + +@router.post("/", response_model=dict) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def create_cron( + request: Request, cron_data: dict, user_data: AuthUser = Depends(staff_required) +): + """ + Create a new Cron entry. + + Args: + request (Request): The request object. + cron_data (dict): Data for creating the cron entry. + user_data (AuthUser): User authentication data. + + Returns: + dict: Result of the cron creation process. + """ + cron_instance = Cron() + return cron_instance.create_cron(cron_data) + + +@router.get("/", response_model=List[dict]) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def read_cron_list( + request: Request, + skip: int = 0, + limit: int = 10, +): + """ + Retrieve a list of Cron entries based on provided filters. + + Args: + request (Request): The request object. + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve. + + Returns: + List[dict]: List of Cron entries. + """ + cron_instance = Cron() + filters = {} + for key, values in request.query_params.items(): + if key not in ["skip", "limit"]: + if key in ["iso3", "id", "queue", "meta", "cron_upload", "cid"]: + filters[f"{key} = %s"] = values + continue + filters[f"dataset->>'{key}' = %s"] = values + try: + cron_list = cron_instance.get_cron_list_with_filters(skip, limit, filters) + except Exception as ex: + raise HTTPException(status_code=422, detail="Couldn't process query") + return cron_list + + +@router.get("/search/", response_model=List[dict]) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def search_cron( + request: Request, + dataset_title: str = Query( + ..., description="The title of the dataset to search for." + ), + skip: int = Query(0, description="Number of entries to skip."), + limit: int = Query(10, description="Maximum number of entries to retrieve."), +): + """ + Search for Cron entries by dataset title. + + Args: + request (Request): The request object. + dataset_title (str): The title of the dataset to search for. + skip (int): Number of entries to skip. + limit (int): Maximum number of entries to retrieve. + + Returns: + List[dict]: List of Cron entries matching the dataset title. + """ + cron_instance = Cron() + cron_list = cron_instance.search_cron_by_dataset_title(dataset_title, skip, limit) + return cron_list + + +@router.get("/{cron_id}", response_model=dict) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def read_cron(request: Request, cron_id: int): + """ + Retrieve a specific cron entry by its ID. + + Args: + request (Request): The request object. + cron_id (int): ID of the cron entry to retrieve. + + Returns: + dict: Details of the requested cron entry. + + Raises: + HTTPException: If the cron entry is not found. + """ + cron_instance = Cron() + cron = cron_instance.get_cron_by_id(cron_id) + if cron: + return cron + raise HTTPException(status_code=404, detail="cron not found") + + +@router.put("/{cron_id}", response_model=dict) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def update_cron( + request: Request, + cron_id: int, + cron_data: dict, + user_data: AuthUser = Depends(staff_required), +): + """ + Update an existing cron entry. + + Args: + request (Request): The request object. + cron_id (int): ID of the cron entry to update. + cron_data (dict): Data for updating the cron entry. + user_data (AuthUser): User authentication data. + + Returns: + dict: Result of the cron update process. + + Raises: + HTTPException: If the cron entry is not found. + """ + cron_instance = Cron() + existing_cron = cron_instance.get_cron_by_id(cron_id) + if not existing_cron: + raise HTTPException(status_code=404, detail="cron not found") + cron_instance_update = Cron() + return cron_instance_update.update_cron(cron_id, cron_data) + + +@router.patch("/{cron_id}", response_model=Dict) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def patch_cron( + request: Request, + cron_id: int, + cron_data: Dict, + user_data: AuthUser = Depends(staff_required), +): + """ + Partially update an existing cron entry. + + Args: + request (Request): The request object. + cron_id (int): ID of the cron entry to update. + cron_data (Dict): Data for partially updating the cron entry. + user_data (AuthUser): User authentication data. + + Returns: + Dict: Result of the cron update process. + + Raises: + HTTPException: If the cron entry is not found. + """ + cron_instance = Cron() + existing_cron = cron_instance.get_cron_by_id(cron_id) + if not existing_cron: + raise HTTPException(status_code=404, detail="cron not found") + patch_instance = Cron() + return patch_instance.patch_cron(cron_id, cron_data) + + +@router.delete("/{cron_id}", response_model=dict) +@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") +@version(1) +async def delete_cron( + request: Request, cron_id: int, user_data: AuthUser = Depends(admin_required) +): + """ + Delete an existing cron entry. + + Args: + request (Request): The request object. + cron_id (int): ID of the cron entry to delete. + user_data (AuthUser): User authentication data. + + Returns: + dict: Result of the cron deletion process. + + Raises: + HTTPException: If the cron entry is not found. + """ + cron_instance = Cron() + existing_cron = cron_instance.get_cron_by_id(cron_id) + if not existing_cron: + raise HTTPException(status_code=404, detail="cron not found") + + return cron_instance.delete_cron(cron_id) diff --git a/API/data/hdx.sql b/API/data/cron.sql similarity index 99% rename from API/data/hdx.sql rename to API/data/cron.sql index b9c05074..f5200494 100644 --- a/API/data/hdx.sql +++ b/API/data/cron.sql @@ -1,4 +1,4 @@ -CREATE TABLE if not exists public.hdx ( +CREATE TABLE if not exists public.cron ( id SERIAL PRIMARY KEY, iso3 VARCHAR(3) NULL, cid INT NULL, @@ -9,7 +9,7 @@ CREATE TABLE if not exists public.hdx ( categories JSONB NULL, geometry public.geometry(MultiPolygon, 4326) NULL ); -CREATE INDEX if not exists hdx_dataset_idx ON public.hdx (dataset); +CREATE INDEX if not exists cron_dataset_idx ON public.cron (dataset); INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('AFG',168,true,'{"dataset_title": "Afghanistan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_afg", "dataset_locations": ["afg"]}','raw_ondemand',false,NULL,NULL), diff --git a/API/data/tables.sql b/API/data/tables.sql index 64759462..63c77594 100644 --- a/API/data/tables.sql +++ b/API/data/tables.sql @@ -14,7 +14,7 @@ CREATE table if not exists public.users ( CONSTRAINT valid_role CHECK ((role = ANY (ARRAY[1, 2, 3]))) ); -CREATE TABLE if not exists public.hdx ( +CREATE TABLE if not exists public.cron ( id SERIAL PRIMARY KEY, iso3 VARCHAR(3) NULL, cid INT NULL, @@ -25,5 +25,5 @@ CREATE TABLE if not exists public.hdx ( categories JSONB NULL, geometry public.geometry(MultiPolygon, 4326) NULL ); -CREATE INDEX if not exists hdx_dataset_idx ON public.hdx (dataset); -CREATE UNIQUE INDEX if not exists unique_dataset_prefix_idx ON public.hdx ((dataset->>'dataset_prefix')); +CREATE INDEX if not exists cron_dataset_idx ON public.cron (dataset); +CREATE UNIQUE INDEX if not exists unique_dataset_prefix_idx ON public.cron ((dataset->>'dataset_prefix')); diff --git a/API/hdx.py b/API/hdx.py deleted file mode 100644 index b5d89a20..00000000 --- a/API/hdx.py +++ /dev/null @@ -1,214 +0,0 @@ -from typing import Dict, List - -from fastapi import APIRouter, Depends, HTTPException, Query, Request -from fastapi_versioning import version - -from src.app import HDX -from src.config import LIMITER as limiter -from src.config import RATE_LIMIT_PER_MIN - -from .auth import AuthUser, admin_required, staff_required - -# from src.validation.models import DynamicCategoriesModel - - -router = APIRouter(prefix="/hdx", tags=["HDX"]) - - -@router.post("/", response_model=dict) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def create_hdx( - request: Request, hdx_data: dict, user_data: AuthUser = Depends(staff_required) -): - """ - Create a new HDX entry. - - Args: - request (Request): The request object. - hdx_data (dict): Data for creating the HDX entry. - user_data (AuthUser): User authentication data. - - Returns: - dict: Result of the HDX creation process. - """ - hdx_instance = HDX() - return hdx_instance.create_hdx(hdx_data) - - -@router.get("/", response_model=List[dict]) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def read_hdx_list( - request: Request, - skip: int = 0, - limit: int = 10, -): - """ - Retrieve a list of HDX entries based on provided filters. - - Args: - request (Request): The request object. - skip (int): Number of entries to skip. - limit (int): Maximum number of entries to retrieve. - - Returns: - List[dict]: List of HDX entries. - """ - hdx_instance = HDX() - filters = {} - for key, values in request.query_params.items(): - if key not in ["skip", "limit"]: - if key in ["iso3", "id", "queue", "meta", "hdx_upload", "cid"]: - filters[f"{key} = %s"] = values - continue - filters[f"dataset->>'{key}' = %s"] = values - try: - hdx_list = hdx_instance.get_hdx_list_with_filters(skip, limit, filters) - except Exception as ex: - raise HTTPException(status_code=422, detail="Couldn't process query") - return hdx_list - - -@router.get("/search/", response_model=List[dict]) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def search_hdx( - request: Request, - dataset_title: str = Query( - ..., description="The title of the dataset to search for." - ), - skip: int = Query(0, description="Number of entries to skip."), - limit: int = Query(10, description="Maximum number of entries to retrieve."), -): - """ - Search for HDX entries by dataset title. - - Args: - request (Request): The request object. - dataset_title (str): The title of the dataset to search for. - skip (int): Number of entries to skip. - limit (int): Maximum number of entries to retrieve. - - Returns: - List[dict]: List of HDX entries matching the dataset title. - """ - hdx_instance = HDX() - hdx_list = hdx_instance.search_hdx_by_dataset_title(dataset_title, skip, limit) - return hdx_list - - -@router.get("/{hdx_id}", response_model=dict) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def read_hdx(request: Request, hdx_id: int): - """ - Retrieve a specific HDX entry by its ID. - - Args: - request (Request): The request object. - hdx_id (int): ID of the HDX entry to retrieve. - - Returns: - dict: Details of the requested HDX entry. - - Raises: - HTTPException: If the HDX entry is not found. - """ - hdx_instance = HDX() - hdx = hdx_instance.get_hdx_by_id(hdx_id) - if hdx: - return hdx - raise HTTPException(status_code=404, detail="HDX not found") - - -@router.put("/{hdx_id}", response_model=dict) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def update_hdx( - request: Request, - hdx_id: int, - hdx_data: dict, - user_data: AuthUser = Depends(staff_required), -): - """ - Update an existing HDX entry. - - Args: - request (Request): The request object. - hdx_id (int): ID of the HDX entry to update. - hdx_data (dict): Data for updating the HDX entry. - user_data (AuthUser): User authentication data. - - Returns: - dict: Result of the HDX update process. - - Raises: - HTTPException: If the HDX entry is not found. - """ - hdx_instance = HDX() - existing_hdx = hdx_instance.get_hdx_by_id(hdx_id) - if not existing_hdx: - raise HTTPException(status_code=404, detail="HDX not found") - hdx_instance_update = HDX() - return hdx_instance_update.update_hdx(hdx_id, hdx_data) - - -@router.patch("/{hdx_id}", response_model=Dict) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def patch_hdx( - request: Request, - hdx_id: int, - hdx_data: Dict, - user_data: AuthUser = Depends(staff_required), -): - """ - Partially update an existing HDX entry. - - Args: - request (Request): The request object. - hdx_id (int): ID of the HDX entry to update. - hdx_data (Dict): Data for partially updating the HDX entry. - user_data (AuthUser): User authentication data. - - Returns: - Dict: Result of the HDX update process. - - Raises: - HTTPException: If the HDX entry is not found. - """ - hdx_instance = HDX() - existing_hdx = hdx_instance.get_hdx_by_id(hdx_id) - if not existing_hdx: - raise HTTPException(status_code=404, detail="HDX not found") - patch_instance = HDX() - return patch_instance.patch_hdx(hdx_id, hdx_data) - - -@router.delete("/{hdx_id}", response_model=dict) -@limiter.limit(f"{RATE_LIMIT_PER_MIN}/minute") -@version(1) -async def delete_hdx( - request: Request, hdx_id: int, user_data: AuthUser = Depends(admin_required) -): - """ - Delete an existing HDX entry. - - Args: - request (Request): The request object. - hdx_id (int): ID of the HDX entry to delete. - user_data (AuthUser): User authentication data. - - Returns: - dict: Result of the HDX deletion process. - - Raises: - HTTPException: If the HDX entry is not found. - """ - hdx_instance = HDX() - existing_hdx = hdx_instance.get_hdx_by_id(hdx_id) - if not existing_hdx: - raise HTTPException(status_code=404, detail="HDX not found") - - return hdx_instance.delete_hdx(hdx_id) diff --git a/API/main.py b/API/main.py index e46b0b81..19ee141b 100644 --- a/API/main.py +++ b/API/main.py @@ -61,7 +61,7 @@ from .download_metrics import router as metrics_router if ENABLE_HDX_EXPORTS: - from .hdx import router as hdx_router + from .cron import router as cron_router if SENTRY_DSN: # Third party imports @@ -96,7 +96,7 @@ if ENABLE_METRICS_APIS: app.include_router(metrics_router) if ENABLE_HDX_EXPORTS: - app.include_router(hdx_router) + app.include_router(cron_router) if USE_S3_TO_UPLOAD: app.include_router(s3_router) diff --git a/src/app.py b/src/app.py index 5128fe88..46e74a92 100644 --- a/src/app.py +++ b/src/app.py @@ -1667,11 +1667,11 @@ def resource_to_hdx(self, uploaded_resources, dataset_config, category): resource["uploaded_to_hdx"] = True else: non_hdx_resources.append(resource) - category_name, hdx_dataset_info = uploader.upload_dataset( + category_name, cron_dataset_info = uploader.upload_dataset( self.params.meta and USE_S3_TO_UPLOAD ) - hdx_dataset_info["resources"].extend(non_hdx_resources) - return {category_name: hdx_dataset_info} + cron_dataset_info["resources"].extend(non_hdx_resources) + return {category_name: cron_dataset_info} def clean_resources(self): """ @@ -1972,28 +1972,28 @@ def init_dataset(self): self.dataset.add_tag(tag) -class HDX: +class Cron: def __init__(self) -> None: """ - Initializes an instance of the HDX class, connecting to the database. + Initializes an instance of the Cron class, connecting to the database. """ dbdict = get_db_connection_params() self.d_b = Database(dbdict) self.con, self.cur = self.d_b.connect() - def create_hdx(self, hdx_data): + def create_cron(self, cron_data): """ - Create a new HDX entry in the database. + Create a new Cron entry in the database. Args: - hdx_data (dict): Data for creating the HDX entry. + cron_data (dict): Data for creating the Cron entry. Returns: - dict: Result of the HDX creation process. + dict: Result of the cron creation process. """ insert_query = sql.SQL( """ - INSERT INTO public.hdx (iso3, hdx_upload, dataset, queue, meta, categories, geometry) + INSERT INTO public.cron (iso3, hdx_upload, dataset, queue, meta, categories, geometry) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING * """ @@ -2001,13 +2001,13 @@ def create_hdx(self, hdx_data): self.cur.execute( insert_query, ( - hdx_data.get("iso3", None), - hdx_data.get("hdx_upload", True), - json.dumps(hdx_data.get("dataset")), - hdx_data.get("queue", "raw_ondemand"), - hdx_data.get("meta", False), - json.dumps(hdx_data.get("categories", {})), - json.dumps(hdx_data.get("geometry")), + cron_data.get("iso3", None), + cron_data.get("hdx_upload", True), + json.dumps(cron_data.get("dataset")), + cron_data.get("queue", "raw_ondemand"), + cron_data.get("meta", False), + json.dumps(cron_data.get("categories", {})), + json.dumps(cron_data.get("geometry")), ), ) self.con.commit() @@ -2017,11 +2017,11 @@ def create_hdx(self, hdx_data): return {"create": True} raise HTTPException(status_code=500, detail="Insert failed") - def get_hdx_list_with_filters( + def get_cron_list_with_filters( self, skip: int = 0, limit: int = 10, filters: dict = {} ): """ - Retrieve a list of HDX entries based on provided filters. + Retrieve a list of Cron entries based on provided filters. Args: skip (int): Number of entries to skip. @@ -2029,7 +2029,7 @@ def get_hdx_list_with_filters( filters (dict): Filtering criteria. Returns: - List[dict]: List of HDX entries. + List[dict]: List of Cron entries. """ filter_conditions = [] filter_values = [] @@ -2042,7 +2042,7 @@ def get_hdx_list_with_filters( select_query = sql.SQL( f""" - SELECT ST_AsGeoJSON(c.*) FROM public.hdx c + SELECT ST_AsGeoJSON(c.*) FROM public.cron c {"WHERE " + where_clause if where_clause else ""} OFFSET %s LIMIT %s """ @@ -2054,11 +2054,11 @@ def get_hdx_list_with_filters( self.d_b.close_conn() return [orjson.loads(item[0]) for item in result] - def search_hdx_by_dataset_title( + def search_cron_by_dataset_title( self, dataset_title: str, skip: int = 0, limit: int = 10 ): """ - Search for HDX entries by dataset title. + Search for Cron entries by dataset title. Args: dataset_title (str): The title of the dataset to search for. @@ -2066,11 +2066,11 @@ def search_hdx_by_dataset_title( limit (int): Maximum number of entries to retrieve. Returns: - List[dict]: List of HDX entries matching the dataset title. + List[dict]: List of Cron entries matching the dataset title. """ search_query = sql.SQL( """ - SELECT ST_AsGeoJSON(c.*) FROM public.hdx c + SELECT ST_AsGeoJSON(c.*) FROM public.cron c WHERE c.dataset->>'dataset_title' ILIKE %s OFFSET %s LIMIT %s """ @@ -2080,49 +2080,49 @@ def search_hdx_by_dataset_title( self.d_b.close_conn() return [orjson.loads(item[0]) for item in result] - def get_hdx_by_id(self, hdx_id: int): + def get_cron_by_id(self, cron_id: int): """ - Retrieve a specific HDX entry by its ID. + Retrieve a specific Cron entry by its ID. Args: - hdx_id (int): ID of the HDX entry to retrieve. + cron_id (int): ID of the Cron entry to retrieve. Returns: - dict: Details of the requested HDX entry. + dict: Details of the requested Cron entry. Raises: - HTTPException: If the HDX entry is not found. + HTTPException: If the Cron entry is not found. """ select_query = sql.SQL( """ - SELECT ST_AsGeoJSON(c.*) FROM public.hdx c + SELECT ST_AsGeoJSON(c.*) FROM public.cron c WHERE id = %s """ ) - self.cur.execute(select_query, (hdx_id,)) + self.cur.execute(select_query, (cron_id,)) result = self.cur.fetchone() self.d_b.close_conn() if result: return orjson.loads(result[0]) raise HTTPException(status_code=404, detail="Item not found") - def update_hdx(self, hdx_id: int, hdx_data): + def update_cron(self, cron_id: int, cron_data): """ - Update an existing HDX entry in the database. + Update an existing Cron entry in the database. Args: - hdx_id (int): ID of the HDX entry to update. - hdx_data (dict): Data for updating the HDX entry. + cron_id (int): ID of the Cron entry to update. + cron_data (dict): Data for updating the Cron entry. Returns: - dict: Result of the HDX update process. + dict: Result of the Cron update process. Raises: - HTTPException: If the HDX entry is not found. + HTTPException: If the Cron entry is not found. """ update_query = sql.SQL( """ - UPDATE public.hdx + UPDATE public.cron SET iso3 = %s, hdx_upload = %s, dataset = %s, queue = %s, meta = %s, categories = %s, geometry = %s WHERE id = %s RETURNING * @@ -2131,14 +2131,14 @@ def update_hdx(self, hdx_id: int, hdx_data): self.cur.execute( update_query, ( - hdx_data.get("iso3", None), - hdx_data.get("hdx_upload", True), - json.dumps(hdx_data.get("dataset")), - hdx_data.get("queue", "raw_ondemand"), - hdx_data.get("meta", False), - json.dumps(hdx_data.get("categories", {})), - json.dumps(hdx_data.get("geometry")), - hdx_id, + cron_data.get("iso3", None), + cron_data.get("hdx_upload", True), + json.dumps(cron_data.get("dataset")), + cron_data.get("queue", "raw_ondemand"), + cron_data.get("meta", False), + json.dumps(cron_data.get("categories", {})), + json.dumps(cron_data.get("geometry")), + cron_id, ), ) self.con.commit() @@ -2148,36 +2148,36 @@ def update_hdx(self, hdx_id: int, hdx_data): return {"update": True} raise HTTPException(status_code=404, detail="Item not found") - def patch_hdx(self, hdx_id: int, hdx_data: dict): + def patch_cron(self, cron_id: int, cron_data: dict): """ - Partially update an existing HDX entry in the database. + Partially update an existing Cron entry in the database. Args: - hdx_id (int): ID of the HDX entry to update. - hdx_data (dict): Data for partially updating the HDX entry. + cron_id (int): ID of the Cron entry to update. + cron_data (dict): Data for partially updating the Cron entry. Returns: - dict: Result of the HDX update process. + dict: Result of the Cron update process. Raises: - HTTPException: If the HDX entry is not found. + HTTPException: If the Cron entry is not found. """ - if not hdx_data: + if not cron_data: raise ValueError("No data provided for update") set_clauses = [] params = [] - for field, value in hdx_data.items(): + for field, value in cron_data.items(): set_clauses.append(sql.SQL("{} = %s").format(sql.Identifier(field))) if isinstance(value, dict): params.append(json.dumps(value)) else: params.append(value) - query = sql.SQL("UPDATE public.hdx SET {} WHERE id = %s RETURNING *").format( + query = sql.SQL("UPDATE public.cron SET {} WHERE id = %s RETURNING *").format( sql.SQL(", ").join(set_clauses) ) - params.append(hdx_id) + params.append(cron_id) self.cur.execute(query, tuple(params)) self.con.commit() @@ -2188,33 +2188,33 @@ def patch_hdx(self, hdx_id: int, hdx_data: dict): return {"update": True} raise HTTPException(status_code=404, detail="Item not found") - def delete_hdx(self, hdx_id: int): + def delete_cron(self, cron_id: int): """ - Delete an existing HDX entry from the database. + Delete an existing Cron entry from the database. Args: - hdx_id (int): ID of the HDX entry to delete. + cron_id (int): ID of the Cron entry to delete. Returns: - dict: Result of the HDX deletion process. + dict: Result of the Cron deletion process. Raises: - HTTPException: If the HDX entry is not found. + HTTPException: If the Cron entry is not found. """ delete_query = sql.SQL( """ - DELETE FROM public.hdx + DELETE FROM public.cron WHERE id = %s RETURNING * """ ) - self.cur.execute(delete_query, (hdx_id,)) + self.cur.execute(delete_query, (cron_id,)) self.con.commit() result = self.cur.fetchone() self.d_b.close_conn() if result: return dict(result[0]) - raise HTTPException(status_code=404, detail="HDX item not found") + raise HTTPException(status_code=404, detail="Cron item not found") class DownloadMetrics: diff --git a/src/config.py b/src/config.py index 78bb17e8..29f90bba 100644 --- a/src/config.py +++ b/src/config.py @@ -292,7 +292,7 @@ def not_raises(func, *args, **kwargs): hdx_credentials = os.environ["REMOTE_HDX"] except KeyError: - logger.debug("EnvVar: REMOTE_HDX not supplied; Falling back to other means") + # logger.debug("EnvVar: REMOTE_HDX not supplied; Falling back to other means") HDX_SITE = os.environ.get("HDX_SITE") or config.get( "HDX", "HDX_SITE", fallback="demo" ) From 28d73065ea058ece2dd204b896591a022129f092 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 16 Sep 2024 09:23:23 +0545 Subject: [PATCH 2/5] ci(unittest): refactor syntax insertion --- .github/workflows/Unit-Test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/Unit-Test.yml b/.github/workflows/Unit-Test.yml index 51ed16d7..2b5994c7 100644 --- a/.github/workflows/Unit-Test.yml +++ b/.github/workflows/Unit-Test.yml @@ -57,7 +57,7 @@ jobs: psql -U postgres -h localhost -p 5434 raw < tests/fixtures/pokhara.sql psql -U postgres -h localhost -p 5434 raw < backend/sql/countries.sql psql -U postgres -h localhost -p 5434 raw < API/data/tables.sql - psql -U postgres -h localhost -p 5434 raw < API/data/hdx.sql + psql -U postgres -h localhost -p 5434 raw < API/data/cron.sql - name: Install gdal run: sudo apt-get update && sudo apt-get -y install gdal-bin python3-gdal && sudo apt-get -y autoremove && sudo apt-get clean From 3b4fe290998f5052aa3d61bb30c3c5f77ddb9748 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 16 Sep 2024 09:40:14 +0545 Subject: [PATCH 3/5] fix(hdx-iso-submit): fixes bug on iso submit for custom exports --- src/query_builder/builder.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/query_builder/builder.py b/src/query_builder/builder.py index 356d6298..2df4fd13 100644 --- a/src/query_builder/builder.py +++ b/src/query_builder/builder.py @@ -35,7 +35,6 @@ {criteria} """ HDX_MARKDOWN = """ -OpenStreetMap exports for use in GIS applications. {filter_str} Features may have these attributes: @@ -887,7 +886,7 @@ def get_country_from_iso(iso3): query = f"""SELECT b.cid::int as fid, b.dataset->>'dataset_title' as dataset_title, b.dataset->>'dataset_prefix' as dataset_prefix, b.dataset->>'dataset_locations' as locations FROM - hdx b + cron b WHERE LOWER(iso3) = '{iso3}' """ From ad6f8ddfcf3bbc1e9b10356abbdbfbae1e8078ea Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 16 Sep 2024 14:13:26 +0545 Subject: [PATCH 4/5] Improve error txt --- src/app.py | 4 ++-- src/config.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/app.py b/src/app.py index 46e74a92..fb5e82d5 100644 --- a/src/app.py +++ b/src/app.py @@ -1265,7 +1265,7 @@ def __init__(self, params, uid=None): cur.execute(query) result = cur.fetchall() if not result: - raise HTTPException(status_code=404, detail="Invalid iso3 code") + raise HTTPException(status_code=404, detail="iso3 code not found in db") result = result[0] ( self.cid, @@ -1685,7 +1685,7 @@ def clean_resources(self): def process_custom_categories(self): """ - Processes HDX tags and executes category processing in parallel. + Processes Custom tags and executes category processing in parallel. Returns: - Dictionary containing the processed dataset information. diff --git a/src/config.py b/src/config.py index 29f90bba..3f6ab649 100644 --- a/src/config.py +++ b/src/config.py @@ -378,7 +378,7 @@ def get_db_connection_params() -> dict: db_credentials = os.environ["REMOTE_DB"] except KeyError: - logger.debug("EnvVar: REMOTE_DB not supplied; Falling back to other means") + # logger.debug("EnvVar: REMOTE_DB not supplied; Falling back to other means") connection_params = dict( host=os.environ.get("PGHOST") or config.get("DB", "PGHOST"), @@ -439,7 +439,7 @@ def get_oauth_credentials() -> tuple: try: oauth2_credentials = os.environ["REMOTE_OAUTH"] except KeyError: - logger.debug("EnvVar: REMOTE_OAUTH not supplied; Falling back to other means") + # logger.debug("EnvVar: REMOTE_OAUTH not supplied; Falling back to other means") client_id = os.environ.get("OSM_CLIENT_ID") or config.get( "OAUTH", "OSM_CLIENT_ID" From 6b24e624165b0e44ca547b9afa126a54b1874d36 Mon Sep 17 00:00:00 2001 From: kshitijrajsharma Date: Mon, 16 Sep 2024 14:30:30 +0545 Subject: [PATCH 5/5] ci(syntaxci): fixed sytax ci issue --- API/data/cron.sql | 50 +++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/API/data/cron.sql b/API/data/cron.sql index f5200494..3248a447 100644 --- a/API/data/cron.sql +++ b/API/data/cron.sql @@ -11,7 +11,7 @@ CREATE TABLE if not exists public.cron ( ); CREATE INDEX if not exists cron_dataset_idx ON public.cron (dataset); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('AFG',168,true,'{"dataset_title": "Afghanistan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_afg", "dataset_locations": ["afg"]}','raw_ondemand',false,NULL,NULL), ('AND',108,true,'{"dataset_title": "Andorra", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_and", "dataset_locations": ["and"]}','raw_ondemand',false,NULL,NULL), ('ALB',170,true,'{"dataset_title": "Albania", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_alb", "dataset_locations": ["alb"]}','raw_ondemand',false,NULL,NULL), @@ -21,7 +21,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('ATA',66,true,'{"dataset_title": "Antarctica", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ata", "dataset_locations": ["ata"]}','raw_ondemand',false,NULL,NULL), ('ATG',57,true,'{"dataset_title": "Antigua and Barbuda", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_atg", "dataset_locations": ["atg"]}','raw_ondemand',false,NULL,NULL), ('AZE',166,true,'{"dataset_title": "Azerbaijan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_aze", "dataset_locations": ["aze"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('BHS',4,true,'{"dataset_title": "Bahamas", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bhs", "dataset_locations": ["bhs"]}','raw_ondemand',false,NULL,NULL), ('BHR',2,true,'{"dataset_title": "Bahrain", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bhr", "dataset_locations": ["bhr"]}','raw_ondemand',false,NULL,NULL), ('VGB',42,true,'{"dataset_title": "British Virgin Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_vgb", "dataset_locations": ["vgb"]}','raw_ondemand',false,NULL,NULL), @@ -32,7 +32,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('ARM',174,true,'{"dataset_title": "Armenia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_arm", "dataset_locations": ["arm"]}','raw_ondemand',false,NULL,NULL), ('ABW',68,true,'{"dataset_title": "Aruba", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_abw", "dataset_locations": ["abw"]}','raw_ondemand',false,NULL,NULL), ('AUS',56,true,'{"dataset_title": "Australia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_aus", "dataset_locations": ["aus"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('CMR',91,true,'{"dataset_title": "Cameroon", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cmr", "dataset_locations": ["cmr"]}','raw_ondemand',false,NULL,NULL), ('AUT',176,true,'{"dataset_title": "Austria", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_aut", "dataset_locations": ["aut"]}','raw_ondemand',false,NULL,NULL), ('PSE',107,true,'{"dataset_title": "State of Palestine", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_pse", "dataset_locations": ["pse"]}','raw_ondemand',false,NULL,NULL), @@ -43,7 +43,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('BRB',69,true,'{"dataset_title": "Barbados", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_brb", "dataset_locations": ["brb"]}','raw_ondemand',false,NULL,NULL), ('GIN',39,true,'{"dataset_title": "Guinea", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_gin", "dataset_locations": ["gin"]}','raw_ondemand',false,NULL,NULL), ('BLR',180,true,'{"dataset_title": "Belarus", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_blr", "dataset_locations": ["blr"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('GNB',188,true,'{"dataset_title": "Guinea Bissau", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_gnb", "dataset_locations": ["gnb"]}','raw_ondemand',false,NULL,NULL), ('TTO',12,true,'{"dataset_title": "Trinidad and Tobago", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tto", "dataset_locations": ["tto"]}','raw_ondemand',false,NULL,NULL), ('BEL',182,true,'{"dataset_title": "Belgium", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bel", "dataset_locations": ["bel"]}','raw_ondemand',false,NULL,NULL), @@ -54,7 +54,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('IOT',30,true,'{"dataset_title": "British Indian Ocean Territory", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_iot", "dataset_locations": ["iot"]}','raw_ondemand',false,NULL,NULL), ('BIH',187,true,'{"dataset_title": "Bosnia and Herzegovina", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bih", "dataset_locations": ["bih"]}','raw_ondemand',false,NULL,NULL), ('PER',156,true,'{"dataset_title": "Peru", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_per", "dataset_locations": ["per"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('BWA',189,true,'{"dataset_title": "Botswana", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bwa", "dataset_locations": ["bwa"]}','raw_ondemand',false,NULL,NULL), ('BVT',38,true,'{"dataset_title": "Bouvet Island", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bvt", "dataset_locations": ["bvt"]}','raw_ondemand',false,NULL,NULL), ('PHL',183,true,'{"dataset_title": "Philippines", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_phl", "dataset_locations": ["phl"]}','raw_ondemand',false,NULL,NULL), @@ -65,7 +65,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('BGR',191,true,'{"dataset_title": "Bulgaria", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bgr", "dataset_locations": ["bgr"]}','raw_ondemand',false,NULL,NULL), ('CHL',217,true,'{"dataset_title": "Chile", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_chl", "dataset_locations": ["chl"]}','raw_ondemand',false,NULL,NULL), ('VCT',19,true,'{"dataset_title": "Saint Vincent And The Grenadines", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_vct", "dataset_locations": ["vct"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('WSM',13,true,'{"dataset_title": "Samoa", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_wsm", "dataset_locations": ["wsm"]}','raw_ondemand',false,NULL,NULL), ('BFA',195,true,'{"dataset_title": "Burkina Faso", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_bfa", "dataset_locations": ["bfa"]}','raw_ondemand',false,NULL,NULL), ('DOM',164,true,'{"dataset_title": "Dominican Republic", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_dom", "dataset_locations": ["dom"]}','raw_ondemand',false,NULL,NULL), @@ -76,7 +76,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('CYM',34,true,'{"dataset_title": "Cayman Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cym", "dataset_locations": ["cym"]}','raw_ondemand',false,NULL,NULL), ('CAF',106,true,'{"dataset_title": "Central African Republic", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_caf", "dataset_locations": ["caf"]}','raw_ondemand',false,NULL,NULL), ('GUF',65,true,'{"dataset_title": "French Guiana", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_guf", "dataset_locations": ["guf"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('CHN',63,true,'{"dataset_title": "China", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_chn", "dataset_locations": ["chn"]}','raw_ondemand',false,NULL,NULL), ('PYF',10,true,'{"dataset_title": "French Polynesia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_pyf", "dataset_locations": ["pyf"]}','raw_ondemand',false,NULL,NULL), ('HKG',76,true,'{"dataset_title": "China, Hong Kong Special Administrative Region", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_hkg", "dataset_locations": ["hkg"]}','raw_ondemand',false,NULL,NULL), @@ -87,7 +87,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('COG',131,true,'{"dataset_title": "Congo", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cog", "dataset_locations": ["cog"]}','raw_ondemand',false,NULL,NULL), ('COK',109,true,'{"dataset_title": "Cook Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cok", "dataset_locations": ["cok"]}','raw_ondemand',false,NULL,NULL), ('CRI',196,true,'{"dataset_title": "Costa Rica", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cri", "dataset_locations": ["cri"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('CIV',221,true,'{"dataset_title": "Côte d''Ivoire", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_civ", "dataset_locations": ["civ"]}','raw_ondemand',false,NULL,NULL), ('HRV',67,true,'{"dataset_title": "Croatia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_hrv", "dataset_locations": ["hrv"]}','raw_ondemand',false,NULL,NULL), ('CUB',149,true,'{"dataset_title": "Cuba", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cub", "dataset_locations": ["cub"]}','raw_ondemand',false,NULL,NULL), @@ -98,7 +98,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('COD',89,true,'{"dataset_title": "Democratic Republic of the Congo", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_cod", "dataset_locations": ["cod"]}','raw_ondemand',false,NULL,NULL), ('DNK',16,true,'{"dataset_title": "Denmark", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_dnk", "dataset_locations": ["dnk"]}','raw_ondemand',false,NULL,NULL), ('DJI',48,true,'{"dataset_title": "Djibouti", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_dji", "dataset_locations": ["dji"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('DMA',33,true,'{"dataset_title": "Dominica", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_dma", "dataset_locations": ["dma"]}','raw_ondemand',false,NULL,NULL), ('GMB',207,true,'{"dataset_title": "Gambia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_gmb", "dataset_locations": ["gmb"]}','raw_ondemand',false,NULL,NULL), ('ECU',200,true,'{"dataset_title": "Ecuador", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ecu", "dataset_locations": ["ecu"]}','raw_ondemand',false,NULL,NULL), @@ -109,7 +109,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('GEO',1,true,'{"dataset_title": "Georgia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_geo", "dataset_locations": ["geo"]}','raw_ondemand',false,NULL,NULL), ('DEU',50,true,'{"dataset_title": "Germany", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_deu", "dataset_locations": ["deu"]}','raw_ondemand',false,NULL,NULL), ('EST',201,true,'{"dataset_title": "Estonia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_est", "dataset_locations": ["est"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('SWZ',144,true,'{"dataset_title": "Eswatini", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_swz", "dataset_locations": ["swz"]}','raw_ondemand',false,NULL,NULL), ('ETH',75,true,'{"dataset_title": "Ethiopia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_eth", "dataset_locations": ["eth"]}','raw_ondemand',false,NULL,NULL), ('FLK',165,true,'{"dataset_title": "Falkland Islands ( Malvinas)", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_flk", "dataset_locations": ["flk"]}','raw_ondemand',false,NULL,NULL), @@ -120,7 +120,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('HTI',210,true,'{"dataset_title": "Haiti", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_hti", "dataset_locations": ["hti"]}','raw_ondemand',false,NULL,NULL), ('VAT',55,true,'{"dataset_title": "Holy See (Vatican City State)", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_vat", "dataset_locations": ["vat"]}','raw_ondemand',false,NULL,NULL), ('HND',169,true,'{"dataset_title": "Honduras", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_hnd", "dataset_locations": ["hnd"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('HUN',14,true,'{"dataset_title": "Hungary", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_hun", "dataset_locations": ["hun"]}','raw_ondemand',false,NULL,NULL), ('FRA',226,true,'{"dataset_title": "France", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_fra", "dataset_locations": ["fra"]}','raw_ondemand',false,NULL,NULL), ('GRC',202,true,'{"dataset_title": "Greece", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_grc", "dataset_locations": ["grc"]}','raw_ondemand',false,NULL,NULL), @@ -131,7 +131,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('IND',7,true,'{"dataset_title": "India", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ind", "dataset_locations": ["ind"]}','raw_ondemand',false,NULL,NULL), ('GTM',203,true,'{"dataset_title": "Guatemala", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_gtm", "dataset_locations": ["gtm"]}','raw_ondemand',false,NULL,NULL), ('GGY',83,true,'{"dataset_title": "Guernsey", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ggy", "dataset_locations": ["ggy"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('GUY',94,true,'{"dataset_title": "Guyana", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_guy", "dataset_locations": ["guy"]}','raw_ondemand',false,NULL,NULL), ('IRN',204,true,'{"dataset_title": "Iran", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_irn", "dataset_locations": ["irn"]}','raw_ondemand',false,NULL,NULL), ('ITA',123,true,'{"dataset_title": "Italy", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ita", "dataset_locations": ["ita"]}','raw_ondemand',false,NULL,NULL), @@ -142,7 +142,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('IMN',125,true,'{"dataset_title": "Isle Of Man", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_imn", "dataset_locations": ["imn"]}','raw_ondemand',false,NULL,NULL), ('ISR',185,true,'{"dataset_title": "Israel", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_isr", "dataset_locations": ["isr"]}','raw_ondemand',false,NULL,NULL), ('JEY',84,true,'{"dataset_title": "Jersey", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_jey", "dataset_locations": ["jey"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('JOR',239,true,'{"dataset_title": "Jordan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_jor", "dataset_locations": ["jor"]}','raw_ondemand',false,NULL,NULL), ('KEN',141,true,'{"dataset_title": "Kenya", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ken", "dataset_locations": ["ken"]}','raw_ondemand',false,NULL,NULL), ('KIR',46,true,'{"dataset_title": "Kiribati", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_kir", "dataset_locations": ["kir"]}','raw_ondemand',false,NULL,NULL), @@ -153,7 +153,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('LVA',175,true,'{"dataset_title": "Latvia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lva", "dataset_locations": ["lva"]}','raw_ondemand',false,NULL,NULL), ('LAO',78,true,'{"dataset_title": "Laos", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lao", "dataset_locations": ["lao"]}','raw_ondemand',false,NULL,NULL), ('LBY',153,true,'{"dataset_title": "Libya", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lby", "dataset_locations": ["lby"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('LBN',208,true,'{"dataset_title": "Lebanon", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lbn", "dataset_locations": ["lbn"]}','raw_ondemand',false,NULL,NULL), ('LSO',192,true,'{"dataset_title": "Lesotho", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lso", "dataset_locations": ["lso"]}','raw_ondemand',false,NULL,NULL), ('LBR',148,true,'{"dataset_title": "Liberia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lbr", "dataset_locations": ["lbr"]}','raw_ondemand',false,NULL,NULL), @@ -164,7 +164,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('MNE',214,true,'{"dataset_title": "Montenegro", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mne", "dataset_locations": ["mne"]}','raw_ondemand',false,NULL,NULL), ('MWI',211,true,'{"dataset_title": "Malawi", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mwi", "dataset_locations": ["mwi"]}','raw_ondemand',false,NULL,NULL), ('NZL',129,true,'{"dataset_title": "New Zealand", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_nzl", "dataset_locations": ["nzl"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('MYS',230,true,'{"dataset_title": "Malaysia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mys", "dataset_locations": ["mys"]}','raw_ondemand',false,NULL,NULL), ('MDV',23,true,'{"dataset_title": "Maldives", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mdv", "dataset_locations": ["mdv"]}','raw_ondemand',false,NULL,NULL), ('MLI',158,true,'{"dataset_title": "Mali", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mli", "dataset_locations": ["mli"]}','raw_ondemand',false,NULL,NULL), @@ -175,7 +175,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('MUS',64,true,'{"dataset_title": "Mauritius", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mus", "dataset_locations": ["mus"]}','raw_ondemand',false,NULL,NULL), ('MYT',31,true,'{"dataset_title": "Mayotte", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_myt", "dataset_locations": ["myt"]}','raw_ondemand',false,NULL,NULL), ('MEX',212,true,'{"dataset_title": "Mexico", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mex", "dataset_locations": ["mex"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('FSM',47,true,'{"dataset_title": "Micronesia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_fsm", "dataset_locations": ["fsm"]}','raw_ondemand',false,NULL,NULL), ('MDA',186,true,'{"dataset_title": "Moldova", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mda", "dataset_locations": ["mda"]}','raw_ondemand',false,NULL,NULL), ('MCO',213,true,'{"dataset_title": "Monaco", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mco", "dataset_locations": ["mco"]}','raw_ondemand',false,NULL,NULL), @@ -186,7 +186,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('MOZ',215,true,'{"dataset_title": "Mozambique", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_moz", "dataset_locations": ["moz"]}','raw_ondemand',false,NULL,NULL), ('NER',90,true,'{"dataset_title": "Niger", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ner", "dataset_locations": ["ner"]}','raw_ondemand',false,NULL,NULL), ('PNG',133,true,'{"dataset_title": "Papua New Guinea", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_png", "dataset_locations": ["png"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('MMR',86,true,'{"dataset_title": "Myanmar", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mmr", "dataset_locations": ["mmr"]}','raw_ondemand',false,NULL,NULL), ('NAM',115,true,'{"dataset_title": "Namibia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_nam", "dataset_locations": ["nam"]}','raw_ondemand',false,NULL,NULL), ('NRU',32,true,'{"dataset_title": "Nauru", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_nru", "dataset_locations": ["nru"]}','raw_ondemand',false,NULL,NULL), @@ -197,7 +197,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('NCL',120,true,'{"dataset_title": "New Caledonia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_ncl", "dataset_locations": ["ncl"]}','raw_ondemand',false,NULL,NULL), ('NGA',151,true,'{"dataset_title": "Nigeria", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_nga", "dataset_locations": ["nga"]}','raw_ondemand',false,NULL,NULL), ('NIU',17,true,'{"dataset_title": "Niue", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_niu", "dataset_locations": ["niu"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('PAN',155,true,'{"dataset_title": "Panama", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_pan", "dataset_locations": ["pan"]}','raw_ondemand',false,NULL,NULL), ('MKD',206,true,'{"dataset_title": "North Macedonia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_mkd", "dataset_locations": ["mkd"]}','raw_ondemand',false,NULL,NULL), ('NOR',154,true,'{"dataset_title": "Norway", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_nor", "dataset_locations": ["nor"]}','raw_ondemand',false,NULL,NULL), @@ -208,7 +208,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('POL',157,true,'{"dataset_title": "Poland", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_pol", "dataset_locations": ["pol"]}','raw_ondemand',false,NULL,NULL), ('PRT',161,true,'{"dataset_title": "Portugal", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_prt", "dataset_locations": ["prt"]}','raw_ondemand',false,NULL,NULL), ('PRI',103,true,'{"dataset_title": "Puerto Rico", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_pri", "dataset_locations": ["pri"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('ROU',177,true,'{"dataset_title": "Romania", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_rou", "dataset_locations": ["rou"]}','raw_ondemand',false,NULL,NULL), ('RUS',126,true,'{"dataset_title": "Russia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_rus", "dataset_locations": ["rus"]}','raw_ondemand',false,NULL,NULL), ('RWA',181,true,'{"dataset_title": "Rwanda", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_rwa", "dataset_locations": ["rwa"]}','raw_ondemand',false,NULL,NULL), @@ -219,7 +219,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet (NULL,18,true,'{"dataset_title": "Saint Martin and Sint Maarten", "dataset_folder": "ISO3", "update_frequency": "disabled", "dataset_prefix": "hotosm_maf_sxm", "dataset_locations": ["maf", "sxm"]}','raw_ondemand',false,NULL,NULL), ('SMR',105,true,'{"dataset_title": "San Marino", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_smr", "dataset_locations": ["smr"]}','raw_ondemand',false,NULL,NULL), ('STP',51,true,'{"dataset_title": "Sao Tome And Principe", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_stp", "dataset_locations": ["stp"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('SLE',134,true,'{"dataset_title": "Sierra Leone", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_sle", "dataset_locations": ["sle"]}','raw_ondemand',false,NULL,NULL), ('SAU',236,true,'{"dataset_title": "Saudi Arabia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_sau", "dataset_locations": ["sau"]}','raw_ondemand',false,NULL,NULL), ('SGP',222,true,'{"dataset_title": "Singaore", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_sgp", "dataset_locations": ["sgp"]}','raw_ondemand',false,NULL,NULL), @@ -230,7 +230,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('ESP',5,true,'{"dataset_title": "Spain", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_esp", "dataset_locations": ["esp"]}','raw_ondemand',false,NULL,NULL), ('LKA',130,true,'{"dataset_title": "Sri Lanka", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_lka", "dataset_locations": ["lka"]}','raw_ondemand',false,NULL,NULL), ('SRB',218,true,'{"dataset_title": "Serbia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_srb", "dataset_locations": ["srb"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('SYC',219,true,'{"dataset_title": "Seychelles", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_syc", "dataset_locations": ["syc"]}','raw_ondemand',false,NULL,NULL), ('SVK',220,true,'{"dataset_title": "Slovakia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_svk", "dataset_locations": ["svk"]}','raw_ondemand',false,NULL,NULL), ('SVN',137,true,'{"dataset_title": "Slovenia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_svn", "dataset_locations": ["svn"]}','raw_ondemand',false,NULL,NULL), @@ -241,7 +241,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('KOR',136,true,'{"dataset_title": "South Korea", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_kor", "dataset_locations": ["kor"]}','raw_ondemand',false,NULL,NULL), ('SJM',111,true,'{"dataset_title": "Svalbard and Jan Mayen Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_sjm", "dataset_locations": ["sjm"]}','raw_ondemand',false,NULL,NULL), ('SWE',124,true,'{"dataset_title": "Sweden", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_swe", "dataset_locations": ["swe"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('CHE',21,true,'{"dataset_title": "Switzerland", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_che", "dataset_locations": ["che"]}','raw_ondemand',false,NULL,NULL), ('TUN',102,true,'{"dataset_title": "Tunisia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tun", "dataset_locations": ["tun"]}','raw_ondemand',false,NULL,NULL), ('SYR',237,true,'{"dataset_title": "Syria", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_syr", "dataset_locations": ["syr"]}','raw_ondemand',false,NULL,NULL), @@ -252,7 +252,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('TZA',145,true,'{"dataset_title": "Tanzania", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tza", "dataset_locations": ["tza"]}','raw_ondemand',false,NULL,NULL), ('THA',20,true,'{"dataset_title": "Thailand", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tha", "dataset_locations": ["tha"]}','raw_ondemand',false,NULL,NULL), ('TLS',96,true,'{"dataset_title": "Timor-Leste", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tls", "dataset_locations": ["tls"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('TUR',163,true,'{"dataset_title": "Turkey", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tur", "dataset_locations": ["tur"]}','raw_ondemand',false,NULL,NULL), ('TKM',147,true,'{"dataset_title": "Turkmenistan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tkm", "dataset_locations": ["tjk"]}','raw_ondemand',false,NULL,NULL), ('TCA',85,true,'{"dataset_title": "Turks and Caicos Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_tca", "dataset_locations": ["tca"]}','raw_ondemand',false,NULL,NULL), @@ -263,7 +263,7 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('ARE',224,true,'{"dataset_title": "United Arab Emirates", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_are", "dataset_locations": ["are"]}','raw_ondemand',false,NULL,NULL), ('GBR',80,true,'{"dataset_title": "United Kingdom", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_gbr", "dataset_locations": ["gbr"]}','raw_ondemand',false,NULL,NULL), ('USA',235,true,'{"dataset_title": "United States", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_usa", "dataset_locations": ["usa"]}','raw_ondemand',false,NULL,NULL); -INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES +INSERT INTO public.cron (iso3,cid,hdx_upload,dataset,queue,meta,categories,geometry) VALUES ('VIR',113,true,'{"dataset_title": "United States Virgin Islands", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_vir", "dataset_locations": ["vir"]}','raw_ondemand',false,NULL,NULL), ('UZB',232,true,'{"dataset_title": "Uzbekistan", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_uzb", "dataset_locations": ["uzb"]}','raw_ondemand',false,NULL,NULL), ('VUT',25,true,'{"dataset_title": "Vanuatu", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_vut", "dataset_locations": ["vut"]}','raw_ondemand',false,NULL,NULL), @@ -274,4 +274,4 @@ INSERT INTO public.hdx (iso3,cid,hdx_upload,dataset,queue,meta,categories,geomet ('ZMB',184,true,'{"dataset_title": "Zambia", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_zmb", "dataset_locations": ["zmb"]}','raw_ondemand',false,NULL,NULL), ('ZWE',190,true,'{"dataset_title": "Zimbabwe", "dataset_folder": "ISO3", "update_frequency": "monthly", "dataset_prefix": "hotosm_zwe", "dataset_locations": ["zwe"]}','raw_ondemand',false,NULL,NULL); -CREATE UNIQUE INDEX if not exists unique_dataset_prefix_idx ON public.hdx ((dataset->>'dataset_prefix')); +CREATE UNIQUE INDEX if not exists unique_dataset_prefix_idx ON public.cron ((dataset->>'dataset_prefix'));