Skip to content

Commit

Permalink
Merge pull request #2528 from ASFHyP3/add-ruff
Browse files Browse the repository at this point in the history
Switch to `ruff` from `flake8` for Static Analysis
  • Loading branch information
jtherrmann authored Dec 17, 2024
2 parents ea769ef + 837eed4 commit 1053a91
Show file tree
Hide file tree
Showing 45 changed files with 626 additions and 698 deletions.
15 changes: 3 additions & 12 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,9 @@ name: Static code analysis
on: push

jobs:
flake8:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4.2.2
- uses: actions/setup-python@v5
with:
python-version: 3.9
- run: |
python -m pip install --upgrade pip
make install
- run: make flake8
call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.12.0

cfn-lint:
runs-on: ubuntu-latest
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,6 @@ dmypy.json

# Pyre type checker
.pyre/

# vim
*.swp
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [9.1.1]

### Changed
- The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting.

## [9.1.0]

### Added
Expand Down
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ cost_profile ?= DEFAULT
render:
@echo rendering $(files) for API $(api_name) and security environment $(security_environment); python apps/render_cf.py -j $(files) -e $(compute_env_file) -s $(security_environment) -n $(api_name) -c $(cost_profile)

static: flake8 openapi-validate cfn-lint
static: ruff openapi-validate cfn-lint

flake8:
flake8 --ignore=E731 --max-line-length=120 --import-order-style=pycharm --statistics --application-import-names hyp3_api,get_files,handle_batch_event,set_batch_overrides,check_processing_time,start_execution_manager,start_execution_worker,disable_private_dns,update_db,upload_log,dynamo,lambda_logging,scale_cluster apps tests lib
ruff:
ruff check . && ruff format --diff .

openapi-validate: render
openapi-spec-validator apps/api/src/hyp3_api/api-spec/openapi-spec.yml
Expand Down
1 change: 1 addition & 0 deletions apps/api/src/hyp3_api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from flask import Flask


app = Flask(__name__, template_folder='ui/swagger/', static_folder='ui/swagger/', static_url_path='/ui/')
CMR_URL = 'https://cmr.earthdata.nasa.gov/search/granules.json'

Expand Down
3 changes: 2 additions & 1 deletion apps/api/src/hyp3_api/__main__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from hyp3_api import app

if __name__ == "__main__":

if __name__ == '__main__':
app.run(port=8080)
12 changes: 4 additions & 8 deletions apps/api/src/hyp3_api/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,7 @@


def problem_format(status, message):
response = jsonify({
'status': status,
'detail': message,
'title': responses[status],
'type': 'about:blank'
})
response = jsonify({'status': status, 'detail': message, 'title': responses[status], 'type': 'about:blank'})
response.headers['Content-Type'] = 'application/problem+json'
response.status_code = status
return response
Expand Down Expand Up @@ -49,8 +44,9 @@ def get_jobs(user, start=None, end=None, status_code=None, name=None, job_type=N
payload = {'jobs': jobs}
if last_evaluated_key is not None:
next_token = util.serialize(last_evaluated_key)
payload['next'] = util.build_next_url(request.url, next_token, request.headers.get('X-Forwarded-Host'),
request.root_path)
payload['next'] = util.build_next_url(
request.url, next_token, request.headers.get('X-Forwarded-Host'), request.root_path
)
return payload


Expand Down
17 changes: 8 additions & 9 deletions apps/api/src/hyp3_api/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from hyp3_api import app, auth, handlers
from hyp3_api.openapi import get_spec_yaml


api_spec_file = Path(__file__).parent / 'api-spec' / 'openapi-spec.yml'
api_spec_dict = get_spec_yaml(api_spec_file)
api_spec = OpenAPI.from_dict(api_spec_dict)
Expand All @@ -26,14 +27,9 @@

@app.before_request
def check_system_available():
if environ['SYSTEM_AVAILABLE'] != "true":
if environ['SYSTEM_AVAILABLE'] != 'true':
message = 'HyP3 is currently unavailable. Please try again later.'
error = {
'detail': message,
'status': 503,
'title': 'Service Unavailable',
'type': 'about:blank'
}
error = {'detail': message, 'status': 503, 'title': 'Service Unavailable', 'type': 'about:blank'}
return make_response(jsonify(error), 503)


Expand Down Expand Up @@ -71,8 +67,11 @@ def render_ui():

@app.errorhandler(404)
def error404(_):
return handlers.problem_format(404, 'The requested URL was not found on the server.'
' If you entered the URL manually please check your spelling and try again.')
return handlers.problem_format(
404,
'The requested URL was not found on the server.'
' If you entered the URL manually please check your spelling and try again.',
)


class CustomEncoder(json.JSONEncoder):
Expand Down
14 changes: 6 additions & 8 deletions apps/api/src/hyp3_api/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from hyp3_api import CMR_URL
from hyp3_api.util import get_granules


DEM_COVERAGE = None


Expand Down Expand Up @@ -57,8 +58,9 @@ def get_cmr_metadata(granules):
granules = [
{
'name': entry.get('producer_granule_id', entry.get('title')),
'polygon': Polygon(format_points(entry['polygons'][0][0]))
} for entry in response.json()['feed']['entry']
'polygon': Polygon(format_points(entry['polygons'][0][0])),
}
for entry in response.json()['feed']['entry']
]
return granules

Expand Down Expand Up @@ -93,9 +95,7 @@ def check_same_burst_ids(job, _):
)
for i in range(len(ref_ids)):
if ref_ids[i] != sec_ids[i]:
raise GranuleValidationError(
f'Burst IDs do not match for {refs[i]} and {secs[i]}.'
)
raise GranuleValidationError(f'Burst IDs do not match for {refs[i]} and {secs[i]}.')
if len(set(ref_ids)) != len(ref_ids):
duplicate_pair_id = next(ref_id for ref_id in ref_ids if ref_ids.count(ref_id) > 1)
raise GranuleValidationError(
Expand Down Expand Up @@ -174,9 +174,7 @@ def check_granules_intersecting_bounds(job, granule_metadata):
if not bbox.intersection(bounds):
bad_granules.append(granule['name'])
if bad_granules:
raise GranuleValidationError(
f'The following granules do not intersect the provided bounds: {bad_granules}.'
)
raise GranuleValidationError(f'The following granules do not intersect the provided bounds: {bad_granules}.')


def check_same_relative_orbits(job, granule_metadata):
Expand Down
2 changes: 1 addition & 1 deletion apps/disable-private-dns/src/disable_private_dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def set_private_dns_disabled(endpoint_id):
response = CLIENT.modify_vpc_endpoint(VpcEndpointId=endpoint_id, PrivateDnsEnabled=False)
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2/client/modify_vpc_endpoint.html
assert response['Return'] is True, response
print(f"Private DNS disabled for VPC Endpoint: {endpoint_id}.")
print(f'Private DNS disabled for VPC Endpoint: {endpoint_id}.')


def disable_private_dns(vpc_id, endpoint_name):
Expand Down
41 changes: 24 additions & 17 deletions apps/get-files/src/get_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import boto3


S3_CLIENT = boto3.client('s3')


Expand Down Expand Up @@ -40,12 +41,16 @@ def visible_product(product_path: Union[str, Path]) -> bool:


def get_products(files):
return [{
'url': item['download_url'],
'size': item['size'],
'filename': item['filename'],
's3': item['s3'],
} for item in files if item['file_type'] == 'product' and visible_product(item['filename'])]
return [
{
'url': item['download_url'],
'size': item['size'],
'filename': item['filename'],
's3': item['s3'],
}
for item in files
if item['file_type'] == 'product' and visible_product(item['filename'])
]


def get_file_urls_by_type(file_list, file_type):
Expand All @@ -61,16 +66,18 @@ def organize_files(files_dict, bucket):
for item in files_dict:
download_url = get_download_url(bucket, item['Key'])
file_type = get_object_file_type(bucket, item['Key'])
all_files.append({
'download_url': download_url,
'file_type': file_type,
'size': item['Size'],
'filename': basename(item['Key']),
's3': {
'bucket': bucket,
'key': item['Key'],
},
})
all_files.append(
{
'download_url': download_url,
'file_type': file_type,
'size': item['Size'],
'filename': basename(item['Key']),
's3': {
'bucket': bucket,
'key': item['Key'],
},
}
)
if expiration is None and file_type in ['product', 'log']:
expiration = get_expiration_time(bucket, item['Key'])

Expand All @@ -79,7 +86,7 @@ def organize_files(files_dict, bucket):
'browse_images': get_file_urls_by_type(all_files, 'browse'),
'thumbnail_images': get_file_urls_by_type(all_files, 'thumbnail'),
'logs': get_file_urls_by_type(all_files, 'log'),
'expiration_time': expiration
'expiration_time': expiration,
}


Expand Down
42 changes: 12 additions & 30 deletions apps/render_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ def get_state_for_job_step(step: dict, index: int, next_state_name: str, job_spe
{
'Catch': [
{
'ErrorEquals': [
'States.ALL'
],
'ErrorEquals': ['States.ALL'],
'ResultPath': f'$.results.processing_results.step_{index}',
'Next': 'PROCESSING_FAILED',
},
Expand Down Expand Up @@ -72,8 +70,8 @@ def get_map_state(job_spec: dict, step: dict) -> dict:
'StartAt': submit_job_state_name,
'States': {
submit_job_state_name: submit_job_state,
}
}
},
},
}


Expand All @@ -98,29 +96,16 @@ def get_batch_submit_job_state(job_spec: dict, step: dict, filter_batch_params=F
'SchedulingPriorityOverride.$': '$.priority',
parameters_key: batch_job_parameters,
'ContainerOverrides.$': '$.container_overrides',
'RetryStrategy': {
'Attempts': 3
},
'RetryStrategy': {'Attempts': 3},
},
'ResultSelector': {
'StartedAt.$': '$.StartedAt',
'StoppedAt.$': '$.StoppedAt',
},
'Retry': [
{
'ErrorEquals': [
'Batch.ServerException',
'Batch.AWSBatchException'
],
'MaxAttempts': 2
},
{
'ErrorEquals': [
'States.ALL'
],
'MaxAttempts': 0
}
]
{'ErrorEquals': ['Batch.ServerException', 'Batch.AWSBatchException'], 'MaxAttempts': 2},
{'ErrorEquals': ['States.ALL'], 'MaxAttempts': 0},
],
}


Expand Down Expand Up @@ -151,11 +136,7 @@ def get_batch_job_parameters(job_spec: dict, step: dict, map_item: str = None) -

def get_batch_param_names_for_job_step(step: dict) -> set[str]:
ref_prefix = 'Ref::'
return {
arg.removeprefix(ref_prefix)
for arg in step['command']
if arg.startswith(ref_prefix)
}
return {arg.removeprefix(ref_prefix) for arg in step['command'] if arg.startswith(ref_prefix)}


def render_templates(job_types: dict, compute_envs: dict, security_environment: str, api_name: str):
Expand All @@ -170,7 +151,7 @@ def render_templates(job_types: dict, compute_envs: dict, security_environment:
keep_trailing_newline=True,
)

for template_file in Path('.').glob('**/*.j2'):
for template_file in Path().glob('**/*.j2'):
template = env.get_template(str(template_file))

output = template.render(
Expand Down Expand Up @@ -217,7 +198,8 @@ def render_batch_params_by_job_type(job_types: dict) -> None:
def render_default_params_by_job_type(job_types: dict) -> None:
default_params_by_job_type = {
job_type: {
key: value['api_schema']['default'] for key, value in job_spec['parameters'].items()
key: value['api_schema']['default']
for key, value in job_spec['parameters'].items()
if key not in job_spec['required_parameters']
}
for job_type, job_spec in job_types.items()
Expand Down Expand Up @@ -255,7 +237,7 @@ def validate_job_spec(job_type: str, job_spec: dict) -> None:
if actual_param_fields != expected_param_fields:
raise ValueError(
f"parameter '{param_name}' for {job_type} has fields {actual_param_fields} "
f"but should have {expected_param_fields}"
f'but should have {expected_param_fields}'
)


Expand Down
Loading

0 comments on commit 1053a91

Please sign in to comment.