Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ services:
build:
context: .
dockerfile: Dockerfile.tests
volumes:
- .:/app
environment:
- ENVIRONMENT=local
- DB_MIN_CONN_SIZE=1
Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
"orjson",
"pydantic",
"stac_pydantic==3.1.*",
"stac-fastapi.api~=3.0.2",
"stac-fastapi.extensions~=3.0.2",
"stac-fastapi.types~=3.0.2",
"stac-fastapi.api~=3.0.3",
"stac-fastapi.extensions~=3.0.3",
"stac-fastapi.types~=3.0.3",
"asyncpg",
"buildpg",
"brotli_asgi",
Expand Down
4 changes: 4 additions & 0 deletions stac_fastapi/pgstac/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from stac_fastapi.extensions.core import (
FieldsExtension,
FilterExtension,
OffsetPaginationExtension,
SortExtension,
TokenPaginationExtension,
TransactionExtension,
Expand Down Expand Up @@ -55,6 +56,9 @@
"sort": SortExtension(),
"fields": FieldsExtension(),
"filter": FilterExtension(client=FiltersClient()),
# NOTE: there is no conformance class for the Pagination extension
# so `CollectionSearchExtension.from_extensions` will raise a warning
"pagination": OffsetPaginationExtension(),
}

enabled_extensions = (
Expand Down
28 changes: 18 additions & 10 deletions stac_fastapi/pgstac/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from stac_fastapi.pgstac.config import Settings
from stac_fastapi.pgstac.models.links import (
CollectionLinks,
CollectionSearchPagingLinks,
ItemCollectionLinks,
ItemLinks,
PagingLinks,
Expand All @@ -46,8 +47,8 @@ async def all_collections( # noqa: C901
bbox: Optional[BBox] = None,
datetime: Optional[DateTimeType] = None,
limit: Optional[int] = None,
offset: Optional[int] = None,
query: Optional[str] = None,
token: Optional[str] = None,
fields: Optional[List[str]] = None,
sortby: Optional[str] = None,
filter: Optional[str] = None,
Expand All @@ -68,7 +69,7 @@ async def all_collections( # noqa: C901
base_args = {
"bbox": bbox,
"limit": limit,
"token": token,
"offset": offset,
"query": orjson.loads(unquote_plus(query)) if query else query,
}

Expand All @@ -90,12 +91,16 @@ async def all_collections( # noqa: C901
)
collections_result: Collections = await conn.fetchval(q, *p)

next: Optional[str] = None
prev: Optional[str] = None

next_link: Optional[Dict[str, Any]] = None
prev_link: Optional[Dict[str, Any]] = None
if links := collections_result.get("links"):
next = collections_result["links"].pop("next")
prev = collections_result["links"].pop("prev")
next_link = None
prev_link = None
for link in links:
if link["rel"] == "next":
next_link = link
elif link["rel"] == "prev":
prev_link = link

linked_collections: List[Collection] = []
collections = collections_result["collections"]
Expand All @@ -120,10 +125,13 @@ async def all_collections( # noqa: C901

linked_collections.append(coll)

links = await PagingLinks(
if not collections:
next_link = None

links = await CollectionSearchPagingLinks(
request=request,
next=next,
prev=prev,
next=next_link,
prev=prev_link,
).get_links()

return Collections(
Expand Down
53 changes: 53 additions & 0 deletions stac_fastapi/pgstac/models/links.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,59 @@ def link_prev(self) -> Optional[Dict[str, Any]]:
return None


@attr.s
class CollectionSearchPagingLinks(BaseLinks):
next: Optional[Dict[str, Any]] = attr.ib(kw_only=True, default=None)
prev: Optional[Dict[str, Any]] = attr.ib(kw_only=True, default=None)

def link_next(self) -> Optional[Dict[str, Any]]:
"""Create link for next page."""
if self.next is not None:
method = self.request.method
if method == "GET":
# if offset is equal to default value (0), drop it
if self.next["body"].get("offset", -1) == 0:
_ = self.next["body"].pop("offset")

href = merge_params(self.url, self.next["body"])

# if next link is equal to this link, skip it
if href == self.url:
return None

return {
"rel": Relations.next.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}

return None

def link_prev(self):
if self.prev is not None:
method = self.request.method
if method == "GET":
# if offset is equal to default value (0), drop it
if self.prev["body"].get("offset", -1) == 0:
_ = self.prev["body"].pop("offset")

href = merge_params(self.url, self.prev["body"])
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I reverted this and now we will return URLs with offset=0


# if prev link is equal to this link, skip it
if href == self.url:
return None

return {
"rel": Relations.previous.value,
"type": MimeTypes.geojson.value,
"method": method,
"href": href,
}

return None


@attr.s
class CollectionLinksBase(BaseLinks):
"""Create inferred links specific to collections."""
Expand Down
9 changes: 6 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
CollectionSearchExtension,
FieldsExtension,
FilterExtension,
OffsetPaginationExtension,
SortExtension,
TokenPaginationExtension,
TransactionExtension,
Expand Down Expand Up @@ -140,10 +141,12 @@ def api_client(request, database):
SortExtension(),
FieldsExtension(),
FilterExtension(client=FiltersClient()),
OffsetPaginationExtension(),
]
collection_search_extension = CollectionSearchExtension.from_extensions(
collection_extensions
)
with pytest.warns(UserWarning):
collection_search_extension = CollectionSearchExtension.from_extensions(
collection_extensions
)

items_get_request_model = create_request_model(
model_name="ItemCollectionUri",
Expand Down
130 changes: 130 additions & 0 deletions tests/resources/test_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,3 +303,133 @@ async def test_get_collections_search(
"/collections",
)
assert len(resp.json()["collections"]) == 2


@pytest.mark.asyncio
async def test_get_collections_search_limit_offset(
app_client, load_test_collection, load_test2_collection
):
resp = await app_client.get("/collections")
cols = resp.json()["collections"]
assert len(cols) == 2
links = resp.json()["links"]
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}

###################
# limit should be positive
resp = await app_client.get("/collections", params={"limit": 0})
assert resp.status_code == 400

###################
# limit=1, should have a `next` link
resp = await app_client.get(
"/collections",
params={"limit": 1},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 1
assert cols[0]["id"] == load_test_collection["id"]
assert len(links) == 3
assert {"root", "self", "next"} == {link["rel"] for link in links}
next_link = list(filter(lambda link: link["rel"] == "next", links))[0]
assert next_link["href"].endswith("?limit=1&offset=1")

###################
# limit=2, there should not be a next link
resp = await app_client.get(
"/collections",
params={"limit": 2},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
assert cols[0]["id"] == load_test_collection["id"]
assert cols[1]["id"] == load_test2_collection.id
# TODO: check with pgstac
# assert len(links) == 2
# assert {"root", "self"} == {link["rel"] for link in links}

###################
# limit=3, there should not be a next/previous link
resp = await app_client.get(
"/collections",
params={"limit": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
assert cols[0]["id"] == load_test_collection["id"]
assert cols[1]["id"] == load_test2_collection.id
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}

###################
# offset=3, because there are 2 collections, we should not have `next` or `prev` links
resp = await app_client.get(
"/collections",
params={"offset": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 0
assert len(links) == 2
assert {"root", "self"} == {link["rel"] for link in links}

###################
# offset=3,limit=1
resp = await app_client.get(
"/collections",
params={"limit": 1, "offset": 3},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 0
assert len(links) == 3
assert {"root", "self", "previous"} == {link["rel"] for link in links}
prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
assert prev_link["href"].endswith("?limit=1&offset=2")

# ###################
# # offset=3,limit=2
# resp = await app_client.get(
# "/collections",
# params={"limit": 2,"offset": 3},
# )
# cols = resp.json()["collections"]
# links = resp.json()["links"]
# assert len(cols) == 0
# assert len(links) == 3
# assert {"root", "self", "previous"} == {link["rel"] for link in links}
# prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
# assert prev_link["href"].endswith("?limit=1&offset=2")

###################
# offset=1, should have a `previous` link
resp = await app_client.get(
"/collections",
params={"offset": 1},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 1
assert cols[0]["id"] == load_test2_collection.id
# TODO: Check with pgstac
# assert len(links) == 3
# assert {"root", "self", "previous"} == {link["rel"] for link in links}
# prev_link = list(filter(lambda link: link["rel"] == "previous", links))[0]
# assert prev_link["href"].endswith("?offset=0")

###################
# offset=0, should not have next/previous link
resp = await app_client.get(
"/collections",
params={"offset": 0},
)
cols = resp.json()["collections"]
links = resp.json()["links"]
assert len(cols) == 2
# TODO: Check with pgstac
# assert len(links) == 2
# assert {"root", "self"} == {link["rel"] for link in links}
Loading