Skip to content

Commit

Permalink
Merge pull request #685 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.3.6
  • Loading branch information
AndrewPlayer3 authored Dec 17, 2024
2 parents 1e26542 + 5b0ea88 commit 5508e80
Show file tree
Hide file tree
Showing 22 changed files with 275 additions and 217 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/changelog.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ on:

jobs:
call-changelog-check-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.11.2
uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.12.0
2 changes: 1 addition & 1 deletion .github/workflows/create-jira-issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:

jobs:
call-create-jira-issue-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.11.2
uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.12.0
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy-stac-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,6 @@ jobs:

call-bump-version-workflow:
needs: deploy-stac-api
uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.11.2
uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.12.0
secrets:
USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }}
2 changes: 1 addition & 1 deletion .github/workflows/labeled-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ on:

jobs:
call-labeled-pr-check-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.11.2
uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.12.0
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:

jobs:
call-release-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.11.2
uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.12.0
with:
release_prefix: ASF STAC
secrets:
Expand Down
12 changes: 3 additions & 9 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,9 @@ name: Static code analysis
on: push

jobs:
flake8:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.9
- run: make install
- run: make flake8
call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.12.0

cfn-lint:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
steps:
- uses: actions/checkout@v4

- uses: mamba-org/setup-micromamba@v1
- uses: mamba-org/setup-micromamba@v2
with:
environment-file: environment.yml

Expand Down
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.3.6]
### Changed
- The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting.

## [0.3.5]
### Changed
- Dependency upgrades.
Expand All @@ -12,7 +16,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
- HAND license changed to CC0 from CCBy 4.0 in `collections/glo-30-hand/glo-30-hand.json` to match NASA data publishing guidelines.


## [0.3.3]
### Changed
- Updated GitHub actions to use `setup-micromamba` instead of `setup-miniconda`
Expand Down
5 changes: 0 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,6 @@ run-api:
test:
PYTHONPATH=${PWD}/collections/sentinel-1-global-coherence/:${PWD}/collections/glo-30-hand/ python -m pytest tests/

static: flake8 cfn-lint

flake8:
flake8 --max-line-length=120

cfn-lint:
# Ignore "W1011 Use dynamic references over parameters for secrets" because we store secrets
# using GitHub Secrets.
Expand Down
17 changes: 10 additions & 7 deletions apps/api/src/api.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import os

os.environ['ENABLED_EXTENSIONS'] = ','.join([
'query',
'sort',
'fields',
'pagination',
'context',
])

os.environ['ENABLED_EXTENSIONS'] = ','.join(
[
'query',
'sort',
'fields',
'pagination',
'context',
]
)

from stac_fastapi.pgstac.app import handler # noqa: F401, E402
11 changes: 6 additions & 5 deletions collections/glo-30-hand/create_hand_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@
from datetime import datetime, timezone
from pathlib import Path, PurePath

import asf_stac_util
import boto3
from osgeo import gdal
from shapely import geometry

import asf_stac_util


gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR')

s3 = boto3.client('s3')
Expand Down Expand Up @@ -67,7 +69,7 @@ def create_stac_item(s3_key: str, s3_url: str, gdal_info_output: dict) -> dict:
'href': get_dem_url(item_id),
'type': 'image/tiff; application=geotiff',
'title': 'GLO-30 Public Copernicus Digital Elevation Model GeoTIFF'
' used as input to create this HAND GeoTIFF',
' used as input to create this HAND GeoTIFF',
'rel': 'related',
},
],
Expand All @@ -77,8 +79,7 @@ def create_stac_item(s3_key: str, s3_url: str, gdal_info_output: dict) -> dict:
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('s3_objects', type=Path, help='Path to a text file containing the list of S3 objects')
parser.add_argument('-o', '--output-file', type=Path, help='Path for the output file',
default='glo-30-hand.ndjson')
parser.add_argument('-o', '--output-file', type=Path, help='Path for the output file', default='glo-30-hand.ndjson')
parser.add_argument('-n', '--number-of-items', type=int, help='Number of items to create')
return parser.parse_args()

Expand All @@ -87,7 +88,7 @@ def main():
args = parse_args()

with args.s3_objects.open() as f:
s3_keys = f.read().splitlines()[:args.number_of_items]
s3_keys = f.read().splitlines()[: args.number_of_items]

s3_url = get_s3_url()
write_stac_items(s3_keys, s3_url, args.output_file)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
import argparse
import urllib.parse
from dataclasses import dataclass

from datetime import datetime, timezone
from pathlib import Path, PurePath

import asf_stac_util
import boto3
from shapely import geometry

import asf_stac_util


s3 = boto3.client('s3')

# TODO verify the start and end datetime values for each season
Expand Down Expand Up @@ -164,8 +165,9 @@ def bounding_box_from_tile(tile: str) -> geometry.Polygon:
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('s3_objects', type=Path, help='Path to a text file containing the list of S3 objects')
parser.add_argument('-o', '--output-file', type=Path, help='Path for the output file',
default='sentinel-1-global-coherence.ndjson')
parser.add_argument(
'-o', '--output-file', type=Path, help='Path for the output file', default='sentinel-1-global-coherence.ndjson'
)
parser.add_argument('-n', '--number-of-items', type=int, help='Number of items to create')
return parser.parse_args()

Expand All @@ -174,7 +176,7 @@ def main():
args = parse_args()

with args.s3_objects.open() as f:
s3_keys = f.read().splitlines()[:args.number_of_items]
s3_keys = f.read().splitlines()[: args.number_of_items]

s3_url = get_s3_url()
write_stac_items(s3_keys, s3_url, args.output_file)
Expand Down
1 change: 1 addition & 0 deletions convert_collections_to_ndjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
from pathlib import Path


parser = argparse.ArgumentParser()
parser.add_argument('--output-file', type=Path, default='collections.ndjson')
parser.add_argument('collections', type=Path, nargs='+')
Expand Down
1 change: 1 addition & 0 deletions lib/asf-stac-util/setup.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from setuptools import find_packages, setup


setup(
name='asf-stac-util',
license='BSD',
Expand Down
35 changes: 35 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
[project]
requires-python = "==3.9"

[tool.ruff]
line-length = 120
# The directories to consider when resolving first- vs. third-party imports.
# See: https://docs.astral.sh/ruff/settings/#src
src = [
"**/src",
"lib/*",
"tests",
]

[tool.ruff.format]
indent-style = "space"
quote-style = "single"

[tool.ruff.lint]
extend-select = [
"I", # isort: https://docs.astral.sh/ruff/rules/#isort-i
"UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up

# TODO: Uncomment the following extensions and address their warnings:
# "D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d
# "ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann

"PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
]

[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2
7 changes: 2 additions & 5 deletions requirements-apps-api.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,2 @@
mangum==0.17.0
stac-fastapi.api==2.5.5.post1
stac-fastapi.extensions==2.5.5.post1
stac-fastapi.pgstac==2.5.0
stac-fastapi.types==2.5.5.post1
mangum==0.19.0
stac-fastapi.pgstac==3.0.1
2 changes: 1 addition & 1 deletion requirements-run-codebuild.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
boto3==1.34.140
boto3==1.35.76
16 changes: 8 additions & 8 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
-r requirements-apps-api.txt
-r requirements-run-codebuild.txt
./lib/asf-stac-util/
boto3==1.34.140
cfn-lint==1.5.0
flake8==7.1.0
pypgstac[psycopg]==0.7.10
boto3==1.35.76
cfn-lint==1.22.0
ruff
pypgstac[psycopg]==0.8.6
pystac==1.10.1
pytest==8.2.2
pytest==8.3.4
requests==2.32.3
shapely==2.0.4
tqdm==4.66.4
uvicorn==0.30.1
shapely==2.0.6
tqdm==4.67.1
uvicorn==0.32.1
1 change: 1 addition & 0 deletions run_codebuild.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import boto3


CLIENT = boto3.client('codebuild')


Expand Down
8 changes: 4 additions & 4 deletions tests/test_asf_stac_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def test_jsonify_stac_item():
'datetime_field': datetime(2022, 11, 30, 12, tzinfo=timezone.utc),
}
) == (
'{"str_field": "foo", "int_field": 5, "float_field": 3.1, "bool_field": true, "null_field": null, '
'"dict_field": {"str_field": "bar"}, "list_field": [[1, 2], [3, 4]], "tuple_field": [[1, 2], [3, 4]], '
'"datetime_field": "2022-11-30T12:00:00Z"}'
)
'{"str_field": "foo", "int_field": 5, "float_field": 3.1, "bool_field": true, "null_field": null, '
'"dict_field": {"str_field": "bar"}, "list_field": [[1, 2], [3, 4]], "tuple_field": [[1, 2], [3, 4]], '
'"datetime_field": "2022-11-30T12:00:00Z"}'
)
Loading

0 comments on commit 5508e80

Please sign in to comment.