Skip to content

Commit

Permalink
Merge d178ce9 into d9ded15
Browse files Browse the repository at this point in the history
  • Loading branch information
wphyojpl authored Dec 16, 2024
2 parents d9ded15 + d178ce9 commit 7f5e186
Show file tree
Hide file tree
Showing 4 changed files with 107 additions and 1 deletion.
44 changes: 44 additions & 0 deletions cumulus_lambda_functions/cumulus_wrapper/query_granules.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(self, cumulus_base: str, cumulus_token: str):
super().__init__(cumulus_base, cumulus_token)
self._conditions.append('status=completed')
self._item_transformer = ItemTransformer()
self.__collection_id = None

def with_filter(self, filter_key, filter_values: list):
if len(filter_values) < 1:
Expand All @@ -34,6 +35,7 @@ def with_filter(self, filter_key, filter_values: list):

def with_collection_id(self, collection_id: str):
self._conditions.append(f'{self.__collection_id_key}={collection_id}')
self.__collection_id = collection_id
return self

def with_bbox(self):
Expand Down Expand Up @@ -130,6 +132,48 @@ def query_direct_to_private_api(self, private_api_prefix: str, transform=True):
return {'server_error': f'error while invoking:{str(e)}'}
return {'results': stac_list}

def delete_entry(self, private_api_prefix: str, granule_id: str):
payload = {
'httpMethod': 'DELETE',
'resource': '/{proxy+}',
'path': f'/{self.__granules_key}/{self.__collection_id}/{granule_id}',
'queryStringParameters': {**{k[0]: k[1] for k in [k1.split('=') for k1 in self._conditions]}},
# 'queryStringParameters': {'limit': '30'},
'headers': {
'Content-Type': 'application/json',
},
# 'body': json.dumps({"action": "removeFromCmr"})
}
LOGGER.debug(f'payload: {payload}')
try:
query_result = self._invoke_api(payload, private_api_prefix)
"""
{'statusCode': 200, 'body': '{"meta":{"name":"cumulus-api","stack":"am-uds-dev-cumulus","table":"granule","limit":3,"page":1,"count":0},"results":[]}', 'headers': {'x-powered-by': 'Express', 'access-control-allow-origin': '*', 'strict-transport-security': 'max-age=31536000; includeSubDomains', 'content-type': 'application/json; charset=utf-8', 'content-length': '120', 'etag': 'W/"78-YdHqDNIH4LuOJMR39jGNA/23yOQ"', 'date': 'Tue, 07 Jun 2022 22:30:44 GMT', 'connection': 'close'}, 'isBase64Encoded': False}
"""
if query_result['statusCode'] >= 500:
LOGGER.error(f'server error status code: {query_result.statusCode}. details: {query_result}')
return {'server_error': query_result}
if query_result['statusCode'] >= 400:
LOGGER.error(f'client error status code: {query_result.statusCode}. details: {query_result}')
return {'client_error': query_result}
query_result = json.loads(query_result['body'])
LOGGER.info(f'json query_result: {query_result}')
"""
{
"detail": "Record deleted"
}
"""
if 'detail' not in query_result:
LOGGER.error(f'missing key: detail. invalid response json: {query_result}')
return {'server_error': f'missing key: detail. invalid response json: {query_result}'}
if query_result['detail'] != 'Record deleted':
LOGGER.error(f'Wrong Message: {query_result}')
return {'server_error': f'Wrong Message: {query_result}'}
except Exception as e:
LOGGER.exception('error while invoking')
return {'server_error': f'error while invoking:{str(e)}'}
return {}

def query(self, transform=True):
conditions_str = '&'.join(self._conditions)
LOGGER.info(f'cumulus_base: {self.cumulus_base}')
Expand Down
15 changes: 15 additions & 0 deletions cumulus_lambda_functions/lib/uds_db/granules_db_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,21 @@ def get_entry(self, tenant: str, tenant_venue: str, doc_id: str, ):
raise ValueError(f"no such granule: {doc_id}")
return result

def delete_entry(self, tenant: str, tenant_venue: str, doc_id: str, ):
read_alias_name = f'{DBConstants.granules_read_alias_prefix}_{tenant}_{tenant_venue}'.lower().strip()
result = self.__es.query({
'size': 9999,
'query': {'term': {'_id': doc_id}}
}, read_alias_name)
if result is None:
raise ValueError(f"no such granule: {doc_id}")
delete_result = self.__es.delete_by_query({
'query': {'term': {'_id': doc_id}}
}, read_alias_name)
if delete_result is None:
raise ValueError(f"no such granule: {doc_id}")
return result

def update_entry(self, tenant: str, tenant_venue: str, json_body: dict, doc_id: str, ):
write_alias_name = f'{DBConstants.granules_write_alias_prefix}_{tenant}_{tenant_venue}'.lower().strip()
json_body['event_time'] = TimeUtils.get_current_unix_milli()
Expand Down
47 changes: 47 additions & 0 deletions cumulus_lambda_functions/uds_api/granules_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
import os
from typing import Union

from mdps_ds_lib.lib.aws.aws_s3 import AwsS3
from pystac import Item

from cumulus_lambda_functions.cumulus_wrapper.query_granules import GranulesQuery

from cumulus_lambda_functions.daac_archiver.daac_archiver_logic import DaacArchiverLogic
from cumulus_lambda_functions.uds_api.dapa.daac_archive_crud import DaacArchiveCrud, DaacDeleteModel, DaacAddModel, \
DaacUpdateModel
Expand Down Expand Up @@ -239,6 +244,48 @@ async def get_single_granule_dapa(request: Request, collection_id: str, granule_
raise HTTPException(status_code=500, detail=str(e))
return granules_result

@router.delete("/{collection_id}/items/{granule_id}")
@router.delete("/{collection_id}/items/{granule_id}/")
async def get_single_granule_dapa(request: Request, collection_id: str, granule_id: str):
authorizer: UDSAuthorizorAbstract = UDSAuthorizerFactory() \
.get_instance(UDSAuthorizerFactory.cognito,
es_url=os.getenv('ES_URL'),
es_port=int(os.getenv('ES_PORT', '443'))
)
auth_info = FastApiUtils.get_authorization_info(request)
collection_identifier = UdsCollections.decode_identifier(collection_id)
if not authorizer.is_authorized_for_collection(DBConstants.read, collection_id,
auth_info['ldap_groups'],
collection_identifier.tenant,
collection_identifier.venue):
LOGGER.debug(f'user: {auth_info["username"]} is not authorized for {collection_id}')
raise HTTPException(status_code=403, detail=json.dumps({
'message': 'not authorized to execute this action'
}))
try:
LOGGER.debug(f'deleting granule: {granule_id}')
cumulus_lambda_prefix = os.getenv('CUMULUS_LAMBDA_PREFIX')
cumulus = GranulesQuery('https://na/dev', 'NA')
cumulus.with_collection_id(collection_id)
cumulus_delete_result = cumulus.delete_entry(cumulus_lambda_prefix, granule_id) # TODO not sure it is correct granule ID
LOGGER.debug(f'cumulus_delete_result: {cumulus_delete_result}')
es_delete_result = GranulesDbIndex().delete_entry(collection_identifier.tenant,
collection_identifier.venue,
granule_id
)
LOGGER.debug(f'es_delete_result: {es_delete_result}')
es_delete_result = [Item.from_dict(k['_source']) for k in es_delete_result['hits']['hits']]
s3 = AwsS3()
for each_granule in es_delete_result:
s3_urls = [v.href for k, v in each_granule.assets.items()]
LOGGER.debug(f'deleting S3 for {each_granule.id} - s3_urls: {s3_urls}')
delete_result = s3.delete_multiple(s3_urls=s3_urls)
LOGGER.debug(f'delete_result for {each_granule.id} - delete_result: {delete_result}')
except Exception as e:
LOGGER.exception('failed during get_granules_dapa')
raise HTTPException(status_code=500, detail=str(e))
return {}


@router.put("/{collection_id}/archive/{granule_id}")
@router.put("/{collection_id}/archive/{granule_id}/")
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jsonschema==4.23.0
jsonschema-specifications==2023.12.1
lark==0.12.0
mangum==0.18.0
mdps-ds-lib==1.1.1
mdps-ds-lib==1.1.1.dev000200
pydantic==2.9.2
pydantic_core==2.23.4
pygeofilter==0.2.4
Expand Down

0 comments on commit 7f5e186

Please sign in to comment.