Skip to content

Commit

Permalink
update postgis version for tests, fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana Barzinpour committed Apr 15, 2024
1 parent aeec345 commit 3e04d3a
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 40 deletions.
16 changes: 12 additions & 4 deletions cubedash/_stac.py
Original file line number Diff line number Diff line change
Expand Up @@ -468,6 +468,7 @@ def _format(val: str) -> dict[str, str]:
return {"field": val[1:], "direction": "desc"}
if val.startswith("+"):
return {"field": val[1:], "direction": "asc"}
# default is ascending
return {"field": val.strip(), "direction": "asc"}

if isinstance(arg, str):
Expand All @@ -476,7 +477,8 @@ def _format(val: str) -> dict[str, str]:
if isinstance(arg[0], str):
return [_format(a) for a in arg]
if isinstance(arg[0], dict):
return arg
for a in arg:
a["field"] = _remove_prefixes(a["field"])

return arg

Expand Down Expand Up @@ -557,9 +559,9 @@ def _handle_search_request(
"Only 'id', 'collection', and Item properties can be used to sort results.",
)

filter_lang = request.args.get("filter-lang", default=None)
filter_lang = request_args.get("filter-lang", default=None, type=str)
filter_cql = request_args.get("filter", default=None, type=_filter_arg)
filter_crs = request.args.get("filter-crs", default=None)
filter_crs = request_args.get("filter-crs", default=None)
if filter_crs and filter_crs != "https://www.opengis.net/def/crs/OGC/1.3/CRS84":
abort(
400,
Expand Down Expand Up @@ -793,7 +795,13 @@ def search_stac_items(
)
if include_total_count:
count_matching = _model.STORE.get_count(
product_names=product_names, time=time, bbox=bbox, dataset_ids=dataset_ids
product_names=product_names,
time=time,
bbox=bbox,
intersects=intersects,
dataset_ids=dataset_ids,
filter_lang=filter_lang,
filter_cql=filter_cql,
)
extra_properties["numberMatched"] = count_matching
extra_properties["context"]["matched"] = count_matching
Expand Down
37 changes: 21 additions & 16 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -1247,6 +1247,7 @@ def _get_field_exprs(
.where(ODC_DATASET_TYPE.c.id == DATASET_SPATIAL.c.dataset_type_ref)
.scalar_subquery()
)
field_exprs["datetime"] = DATASET_SPATIAL.c.center_time
geom = func.ST_Transform(DATASET_SPATIAL.c.footprint, 4326)
field_exprs["geometry"] = geom
field_exprs["bbox"] = func.Box2D(geom).cast(String)
Expand Down Expand Up @@ -1278,20 +1279,17 @@ def _add_order_to_query(
) -> Select:
order_clauses = []
for s in sortby:
try:
field = field_exprs.get(s.get("field"))
# is there any way to check if sortable?
if field is not None:
asc = s.get("direction") == "asc"
if asc:
order_clauses.append(field)
else:
order_clauses.append(field.desc())
except AttributeError: # there is no field by that name, ignore
# the spec does not specify a handling directive for unspecified fields,
# so we've chosen to ignore them to be in line with the other extensions
continue

field = field_exprs.get(s.get("field"))
# is there any way to check if sortable?
if field is not None:
asc = s.get("direction") == "asc"
if asc:
order_clauses.append(field.asc())
else:
order_clauses.append(field.desc())
# there is no field by that name, ignore
# the spec does not specify a handling directive for unspecified fields,
# so we've chosen to ignore them to be in line with the other extensions
query = query.order_by(*order_clauses)
return query

Expand Down Expand Up @@ -1354,7 +1352,14 @@ def get_count(
"""
Do the base select query to get the count of matching datasets.
"""
query: Select = select([func.count()]).select_from(DATASET_SPATIAL)
if filter_cql: # to account the possibiity of 'collection' in the filter
query: Select = select([func.count()]).select_from(
DATASET_SPATIAL.join(
ODC_DATASET, onclause=ODC_DATASET.c.id == DATASET_SPATIAL.c.id
)
)
else:
query: Select = select([func.count()]).select_from(DATASET_SPATIAL)

query = self._add_fields_to_query(
query,
Expand Down Expand Up @@ -1457,7 +1462,7 @@ def search_items(
"Only full-dataset searches can be sorted by recently added"
)
query = query.order_by(ODC_DATASET.c.added.desc())
else: # order was provided as a sortby query
elif order: # order was provided as a sortby query
query = self._add_order_to_query(query, field_exprs, order)

query = query.limit(limit).offset(
Expand Down
2 changes: 1 addition & 1 deletion cubedash/testutils/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def postgresql_server():
else:
client = docker.from_env()
container = client.containers.run(
"postgis/postgis:14-3.3-alpine",
"postgis/postgis:16-3.4",
auto_remove=True,
remove=True,
detach=True,
Expand Down
42 changes: 23 additions & 19 deletions integration_tests/test_stac.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ def expect_404(url: str, message_contains: str = None):
"/collections/ls7_nbar_scene/items"
"?datetime=2000-01-01/2000-01-01&bbox=-48.206,-14.195,-45.067,-12.272",
"/stac/collections/ls7_nbar_scene/items"
"?datetime=2000-01-01/2000-01-01&bbox=-48.206,-14.195,-45.067,-12.272",
"?datetime=2000-01-01%2F2000-01-01&bbox=-48.206,-14.195,-45.067,-12.272",
),
(
"/collections/ls7_nbar_scene/items/0c5b625e-5432-4911-9f7d-f6b894e27f3c",
Expand Down Expand Up @@ -1440,7 +1440,7 @@ def test_stac_sortby_extension(stac_client: FlaskClient):
rv: Response = stac_client.get(
"/stac/search?collection=ga_ls8c_ard_3&limit=5&sortby=id,-datetime,foo"
)
assert rv.json == doc
assert rv.json["features"] == doc["features"]

# sorting across pages
next_link = _get_next_href(doc)
Expand All @@ -1449,7 +1449,6 @@ def test_stac_sortby_extension(stac_client: FlaskClient):
last_item = doc["features"][-1]
next_item = rv.json["features"][0]
assert last_item["id"] < next_item["id"]
assert last_item["properties"]["datetime"] > next_item["properties"]["datetime"]


def test_stac_filter_extension(stac_client: FlaskClient):
Expand All @@ -1462,7 +1461,7 @@ def test_stac_filter_extension(stac_client: FlaskClient):
},
{
"op": ">=",
"args": [{"property": "cloud_cover"}, float(2)],
"args": [{"property": "eo:cloud_cover"}, float(2)],
},
],
}
Expand All @@ -1481,16 +1480,32 @@ def test_stac_filter_extension(stac_client: FlaskClient):
)
assert rv.status_code == 200
features = rv.json.get("features")
assert len(features) == 2
assert len(features) == rv.json.get("numberMatched") == 2
ids = [f["id"] for f in features]
assert "fc792b3b-a685-4c0f-9cf6-f5257f042c64" in ids
assert "192276c6-8fa4-46a9-8bc6-e04e157974b9" in ids

# test cql2-text
filter_text = "collection='ga_ls8c_ard_3' AND view:sun_azimuth > 5"
filter_text = "collection='ga_ls8c_ard_3' AND dataset_maturity <> 'final' AND cloud_cover >= 2"
rv: Response = stac_client.get(f"/stac/search?filter={filter_text}")
features = rv.json.get("features")
assert len(features) == 9
assert rv.json.get("numberMatched") == 2

filter_text = "view:sun_azimuth < 40 AND dataset_maturity = 'final'"
rv: Response = stac_client.get(
f"/stac/search?collections=ga_ls8c_ard_3&filter={filter_text}"
)
assert rv.json.get("numberMatched") == 4

# test invalid property name treated as null
rv: Response = stac_client.get(
"/stac/search?filter=item.collection='ga_ls8c_ard_3' AND properties.foo > 2"
)
assert rv.json.get("numberMatched") == 0

rv: Response = stac_client.get(
"/stac/search?filter=collection='ga_ls8c_ard_3' AND foo IS NULL"
)
assert rv.json.get("numberMatched") == 21

# test lang mismatch
rv: Response = stac_client.post(
Expand All @@ -1509,17 +1524,6 @@ def test_stac_filter_extension(stac_client: FlaskClient):
)
assert rv.status_code == 400

# test invalid property name treated as null
rv: Response = stac_client.get(
"/stac/search?filter=item.collection='ga_ls8c_ard_3' AND properties.foo != 2"
)
assert len(rv.json.get("features")) == 0

rv: Response = stac_client.get(
"/stac/search?filter=collection='ga_ls8c_ard_3' AND foo IS NULL"
)
assert (len(rv.json.get("features"))) == 21

# filter-crs invalid value
rv: Response = stac_client.post(
"/stac/search",
Expand Down

0 comments on commit 3e04d3a

Please sign in to comment.