Skip to content

Commit

Permalink
invisible files
Browse files Browse the repository at this point in the history
  • Loading branch information
sofiaqiriazi committed Jul 28, 2022
1 parent c333215 commit 3799877
Show file tree
Hide file tree
Showing 87 changed files with 119,327 additions and 0 deletions.
Binary file added East and Horn of Africa, .docx
Binary file not shown.
Binary file added Finding-the-Nexus/.DS_Store
Binary file not shown.
202 changes: 202 additions & 0 deletions Finding-the-Nexus/Bruno_parser/example_m2m.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
# =============================================================================
# USGS/EROS Inventory Service Example
# Python - JSON API
#
# Script Last Modified: 6/17/2020
# Note: This example does not include any error handling!
# Any request can throw an error, which can be found in the errorCode proprty of
# the response (errorCode, errorMessage, and data properies are included in all responses).
# These types of checks could be done by writing a wrapper similiar to the sendRequest function below
# Usage: python download_data.py -u username -p password
# =============================================================================

import json
import requests
import sys
import time
import argparse

# send http request
def sendRequest(url, data, apiKey = None):
json_data = json.dumps(data)

if apiKey == None:
response = requests.post(url, json_data)
else:
headers = {'X-Auth-Token': apiKey}
response = requests.post(url, json_data, headers = headers)

try:
httpStatusCode = response.status_code
if response == None:
print("No output from service")
sys.exit()
output = json.loads(response.text)
if output['errorCode'] != None:
print(output['errorCode'], "- ", output['errorMessage'])
sys.exit()
if httpStatusCode == 404:
print("404 Not Found")
sys.exit()
elif httpStatusCode == 401:
print("401 Unauthorized")
sys.exit()
elif httpStatusCode == 400:
print("Error Code", httpStatusCode)
sys.exit()
except Exception as e:
response.close()
print(e)
sys.exit()
response.close()

return output['data']


if __name__ == '__main__':
#NOTE :: Passing credentials over a command line arguement is not considered secure
# and is used only for the purpose of being example - credential parameters
# should be gathered in a more secure way for production usage
#Define the command line arguements

# user input
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--username', required=True, help='Username')
parser.add_argument('-p', '--password', required=True, help='Password')

args = parser.parse_args()

username = args.username
password = args.password

print("\nRunning Scripts...\n")

serviceUrl = "https://m2m.cr.usgs.gov/api/api/json/stable/"

# login
payload = {'username' : username, 'password' : password}

apiKey = sendRequest(serviceUrl + "login", payload)

print("API Key: " + apiKey + "\n")

datasetName = "gls_all"

spatialFilter = {'filterType' : "mbr",
'lowerLeft' : {'latitude' : 30, 'longitude' : -120},
'upperRight' : { 'latitude' : 40, 'longitude' : -140}}

temporalFilter = {'start' : '2000-12-10', 'end' : '2005-12-10'}

payload = {'datasetName' : datasetName,
'spatialFilter' : spatialFilter,
'temporalFilter' : temporalFilter}

print("Searching datasets...\n")
datasets = sendRequest(serviceUrl + "dataset-search", payload, apiKey)

print("Found ", len(datasets), " datasets\n")

# download datasets
for dataset in datasets:

# Because I've ran this before I know that I want GLS_ALL, I don't want to download anything I don't
# want so we will skip any other datasets that might be found, logging it incase I want to look into
# downloading that data in the future.
if dataset['datasetAlias'] != datasetName:
print("Found dataset " + dataset['collectionName'] + " but skipping it.\n")
continue

# I don't want to limit my results, but using the dataset-filters request, you can
# find additional filters

acquisitionFilter = {"end": "2005-12-10",
"start": "2000-12-10" }

payload = {'datasetName' : dataset['datasetAlias'],
'maxResults' : 2,
'startingNumber' : 1,
'sceneFilter' : {
'spatialFilter' : spatialFilter,
'acquisitionFilter' : acquisitionFilter}}

# Now I need to run a scene search to find data to download
print("Searching scenes...\n\n")

scenes = sendRequest(serviceUrl + "scene-search", payload, apiKey)

# Did we find anything?
if scenes['recordsReturned'] > 0:
# Aggregate a list of scene ids
sceneIds = []
for result in scenes['results']:
# Add this scene to the list I would like to download
sceneIds.append(result['entityId'])

# Find the download options for these scenes
# NOTE :: Remember the scene list cannot exceed 50,000 items!
payload = {'datasetName' : dataset['datasetAlias'], 'entityIds' : sceneIds}

downloadOptions = sendRequest(serviceUrl + "download-options", payload, apiKey)

# Aggregate a list of available products
downloads = []
for product in downloadOptions:
# Make sure the product is available for this scene
if product['available'] == True:
downloads.append({'entityId' : product['entityId'],
'productId' : product['id']})

# Did we find products?
if downloads:
requestedDownloadsCount = len(downloads)
# set a label for the download request
label = "download-sample"
payload = {'downloads' : downloads,
'label' : label}
# Call the download to get the direct download urls
requestResults = sendRequest(serviceUrl + "download-request", payload, apiKey)

# PreparingDownloads has a valid link that can be used but data may not be immediately available
# Call the download-retrieve method to get download that is available for immediate download
if requestResults['preparingDownloads'] != None and len(requestResults['preparingDownloads']) > 0:
payload = {'label' : label}
moreDownloadUrls = sendRequest(serviceUrl + "download-retrieve", payload, apiKey)

downloadIds = []

for download in moreDownloadUrls['available']:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

for download in moreDownloadUrls['requested']:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

# Didn't get all of the reuested downloads, call the download-retrieve method again probably after 30 seconds
while len(downloadIds) < requestedDownloadsCount:
preparingDownloads = requestedDownloadsCount - len(downloadIds)
print("\n", preparingDownloads, "downloads are not available. Waiting for 30 seconds.\n")
time.sleep(30)
print("Trying to retrieve data\n")
moreDownloadUrls = sendRequest(serviceUrl + "download-retrieve", payload, apiKey)
for download in moreDownloadUrls['available']:
if download['downloadId'] not in downloadIds:
downloadIds.append(download['downloadId'])
print("DOWNLOAD: " + download['url'])

else:
# Get all available downloads
for download in requestResults['availableDownloads']:
# TODO :: Implement a downloading routine
print("DOWNLOAD: " + download['url'])
print("\nAll downloads are available to download.\n")
else:
print("Search found no results.\n")

# Logout so the API Key cannot be used anymore
endpoint = "logout"
if sendRequest(serviceUrl + endpoint, None, apiKey) == None:
print("Logged Out\n\n")
else:
print("Logout Failed\n\n")
Loading

0 comments on commit 3799877

Please sign in to comment.