Skip to content

Commit

Permalink
Merge 919b757 into d9ded15
Browse files Browse the repository at this point in the history
  • Loading branch information
wphyojpl authored Dec 16, 2024
2 parents d9ded15 + 919b757 commit 60994cb
Show file tree
Hide file tree
Showing 5 changed files with 180 additions and 1 deletion.
44 changes: 44 additions & 0 deletions cumulus_lambda_functions/cumulus_wrapper/query_granules.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(self, cumulus_base: str, cumulus_token: str):
super().__init__(cumulus_base, cumulus_token)
self._conditions.append('status=completed')
self._item_transformer = ItemTransformer()
self.__collection_id = None

def with_filter(self, filter_key, filter_values: list):
if len(filter_values) < 1:
Expand All @@ -34,6 +35,7 @@ def with_filter(self, filter_key, filter_values: list):

def with_collection_id(self, collection_id: str):
self._conditions.append(f'{self.__collection_id_key}={collection_id}')
self.__collection_id = collection_id
return self

def with_bbox(self):
Expand Down Expand Up @@ -130,6 +132,48 @@ def query_direct_to_private_api(self, private_api_prefix: str, transform=True):
return {'server_error': f'error while invoking:{str(e)}'}
return {'results': stac_list}

def delete_entry(self, private_api_prefix: str, granule_id: str):
payload = {
'httpMethod': 'DELETE',
'resource': '/{proxy+}',
'path': f'/{self.__granules_key}/{self.__collection_id}/{granule_id}',
'queryStringParameters': {**{k[0]: k[1] for k in [k1.split('=') for k1 in self._conditions]}},
# 'queryStringParameters': {'limit': '30'},
'headers': {
'Content-Type': 'application/json',
},
# 'body': json.dumps({"action": "removeFromCmr"})
}
LOGGER.debug(f'payload: {payload}')
try:
query_result = self._invoke_api(payload, private_api_prefix)
"""
{'statusCode': 200, 'body': '{"meta":{"name":"cumulus-api","stack":"am-uds-dev-cumulus","table":"granule","limit":3,"page":1,"count":0},"results":[]}', 'headers': {'x-powered-by': 'Express', 'access-control-allow-origin': '*', 'strict-transport-security': 'max-age=31536000; includeSubDomains', 'content-type': 'application/json; charset=utf-8', 'content-length': '120', 'etag': 'W/"78-YdHqDNIH4LuOJMR39jGNA/23yOQ"', 'date': 'Tue, 07 Jun 2022 22:30:44 GMT', 'connection': 'close'}, 'isBase64Encoded': False}
"""
if query_result['statusCode'] >= 500:
LOGGER.error(f'server error status code: {query_result.statusCode}. details: {query_result}')
return {'server_error': query_result}
if query_result['statusCode'] >= 400:
LOGGER.error(f'client error status code: {query_result.statusCode}. details: {query_result}')
return {'client_error': query_result}
query_result = json.loads(query_result['body'])
LOGGER.info(f'json query_result: {query_result}')
"""
{
"detail": "Record deleted"
}
"""
if 'detail' not in query_result:
LOGGER.error(f'missing key: detail. invalid response json: {query_result}')
return {'server_error': f'missing key: detail. invalid response json: {query_result}'}
if query_result['detail'] != 'Record deleted':
LOGGER.error(f'Wrong Message: {query_result}')
return {'server_error': f'Wrong Message: {query_result}'}
except Exception as e:
LOGGER.exception('error while invoking')
return {'server_error': f'error while invoking:{str(e)}'}
return {}

def query(self, transform=True):
conditions_str = '&'.join(self._conditions)
LOGGER.info(f'cumulus_base: {self.cumulus_base}')
Expand Down
17 changes: 17 additions & 0 deletions cumulus_lambda_functions/lib/uds_db/granules_db_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,23 @@ def get_entry(self, tenant: str, tenant_venue: str, doc_id: str, ):
raise ValueError(f"no such granule: {doc_id}")
return result

def delete_entry(self, tenant: str, tenant_venue: str, doc_id: str, ):
read_alias_name = f'{DBConstants.granules_read_alias_prefix}_{tenant}_{tenant_venue}'.lower().strip()
result = self.__es.query({
'size': 9999,
'query': {'term': {'_id': doc_id}}
}, read_alias_name)
if result is None:
raise ValueError(f"no such granule: {doc_id}")
for each_granule in result['hits']['hits']:
delete_result = self.__es.delete_by_query({
'query': {'term': {'_id': each_granule['_id']}}
}, each_granule['_index'])
LOGGER.debug(f'delete_result: {delete_result}')
if delete_result is None:
raise ValueError(f"error deleting {each_granule}")
return result

def update_entry(self, tenant: str, tenant_venue: str, json_body: dict, doc_id: str, ):
write_alias_name = f'{DBConstants.granules_write_alias_prefix}_{tenant}_{tenant_venue}'.lower().strip()
json_body['event_time'] = TimeUtils.get_current_unix_milli()
Expand Down
50 changes: 50 additions & 0 deletions cumulus_lambda_functions/uds_api/granules_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,11 @@
import os
from typing import Union

from mdps_ds_lib.lib.aws.aws_s3 import AwsS3
from pystac import Item

from cumulus_lambda_functions.cumulus_wrapper.query_granules import GranulesQuery

from cumulus_lambda_functions.daac_archiver.daac_archiver_logic import DaacArchiverLogic
from cumulus_lambda_functions.uds_api.dapa.daac_archive_crud import DaacArchiveCrud, DaacDeleteModel, DaacAddModel, \
DaacUpdateModel
Expand Down Expand Up @@ -239,6 +244,51 @@ async def get_single_granule_dapa(request: Request, collection_id: str, granule_
raise HTTPException(status_code=500, detail=str(e))
return granules_result

@router.delete("/{collection_id}/items/{granule_id}")
@router.delete("/{collection_id}/items/{granule_id}/")
async def delete_single_granule_dapa(request: Request, collection_id: str, granule_id: str, delete_files: bool=True):
authorizer: UDSAuthorizorAbstract = UDSAuthorizerFactory() \
.get_instance(UDSAuthorizerFactory.cognito,
es_url=os.getenv('ES_URL'),
es_port=int(os.getenv('ES_PORT', '443'))
)
auth_info = FastApiUtils.get_authorization_info(request)
collection_identifier = UdsCollections.decode_identifier(collection_id)
if not authorizer.is_authorized_for_collection(DBConstants.read, collection_id,
auth_info['ldap_groups'],
collection_identifier.tenant,
collection_identifier.venue):
LOGGER.debug(f'user: {auth_info["username"]} is not authorized for {collection_id}')
raise HTTPException(status_code=403, detail=json.dumps({
'message': 'not authorized to execute this action'
}))
try:
LOGGER.debug(f'deleting granule: {granule_id}')
cumulus_lambda_prefix = os.getenv('CUMULUS_LAMBDA_PREFIX')
cumulus = GranulesQuery('https://na/dev', 'NA')
cumulus.with_collection_id(collection_id)
cumulus_delete_result = cumulus.delete_entry(cumulus_lambda_prefix, granule_id) # TODO not sure it is correct granule ID
LOGGER.debug(f'cumulus_delete_result: {cumulus_delete_result}')
es_delete_result = GranulesDbIndex().delete_entry(collection_identifier.tenant,
collection_identifier.venue,
granule_id
)
LOGGER.debug(f'es_delete_result: {es_delete_result}')
es_delete_result = [Item.from_dict(k['_source']) for k in es_delete_result['hits']['hits']]
if delete_files is False:
LOGGER.debug(f'Not deleting files as it is set to false in the request')
return {}
s3 = AwsS3()
for each_granule in es_delete_result:
s3_urls = [v.href for k, v in each_granule.assets.items()]
LOGGER.debug(f'deleting S3 for {each_granule.id} - s3_urls: {s3_urls}')
delete_result = s3.delete_multiple(s3_urls=s3_urls)
LOGGER.debug(f'delete_result for {each_granule.id} - delete_result: {delete_result}')
except Exception as e:
LOGGER.exception('failed during get_granules_dapa')
raise HTTPException(status_code=500, detail=str(e))
return {}


@router.put("/{collection_id}/archive/{granule_id}")
@router.put("/{collection_id}/archive/{granule_id}/")
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jsonschema==4.23.0
jsonschema-specifications==2023.12.1
lark==0.12.0
mangum==0.18.0
mdps-ds-lib==1.1.1
mdps-ds-lib==1.1.1.dev000200
pydantic==2.9.2
pydantic_core==2.23.4
pygeofilter==0.2.4
Expand Down
68 changes: 68 additions & 0 deletions tests/integration_tests/test_granules_deletion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import base64
import json
import os
from unittest import TestCase

import requests
from dotenv import load_dotenv
from mdps_ds_lib.lib.cognito_login.cognito_login import CognitoLogin


class TestGranulesDeletion(TestCase):
def setUp(self) -> None:
super().setUp()
load_dotenv()
self._url_prefix = f'{os.environ.get("UNITY_URL")}/{os.environ.get("UNITY_STAGE", "sbx-uds-dapa")}'
self.cognito_login = CognitoLogin() \
.with_client_id(os.environ.get('CLIENT_ID', '')) \
.with_cognito_url(os.environ.get('COGNITO_URL', '')) \
.with_verify_ssl(False) \
.start(base64.standard_b64decode(os.environ.get('USERNAME')).decode(),
base64.standard_b64decode(os.environ.get('PASSWORD')).decode())
self.bearer_token = self.cognito_login.token
self.stage = os.environ.get("UNITY_URL").split('/')[-1]
self.uds_url = f'{os.environ.get("UNITY_URL")}/{os.environ.get("UNITY_STAGE", "sbx-uds-dapa")}/'
self.custom_metadata_body = {
'tag': {'type': 'keyword'},
'c_data1': {'type': 'long'},
'c_data2': {'type': 'boolean'},
'c_data3': {'type': 'keyword'},
}

self.tenant = 'UDS_LOCAL_TEST_3' # 'uds_local_test' # 'uds_sandbox'
self.tenant_venue = 'DEV' # 'DEV1' # 'dev'
self.collection_name = 'AAA' # 'uds_collection' # 'sbx_collection'
self.collection_version = '24.03.20.14.40'.replace('.', '') # '2402011200'
return

def test_query_all(self):
collection_id = f'URN:NASA:UNITY:{self.tenant}:{self.tenant_venue}:{self.collection_name}-01___001'
granule_id = f'{collection_id}:test_file01'
post_url = f'{self.uds_url}collections/{collection_id}/items/' # MCP Dev
headers = {
'Authorization': f'Bearer {self.bearer_token}',
}
print(post_url)
query_result = requests.get(url=post_url,
headers=headers,
)
self.assertEqual(query_result.status_code, 200, f'wrong status code. {query_result.text}')
response_json = json.loads(query_result.text)
print(json.dumps(response_json, indent=4))
return

def test_delete_01(self):
collection_id = f'URN:NASA:UNITY:{self.tenant}:{self.tenant_venue}:{self.collection_name}-01___001'
granule_id = f'{collection_id}:test_file01'
post_url = f'{self.uds_url}collections/{collection_id}/items/{granule_id}' # MCP Dev
headers = {
'Authorization': f'Bearer {self.bearer_token}',
}
print(post_url)
query_result = requests.delete(url=post_url,
headers=headers,
)
self.assertEqual(query_result.status_code, 200, f'wrong status code. {query_result.text}')
response_json = json.loads(query_result.text)
print(json.dumps(response_json, indent=4))
return

0 comments on commit 60994cb

Please sign in to comment.