Skip to content

Commit 84b4c36

Browse files
authored
♻️ Generic pagination and API model fixtures (ITISFoundation#2682)
1 parent 40dba4e commit 84b4c36

File tree

21 files changed

+2889
-16388
lines changed

21 files changed

+2889
-16388
lines changed

packages/service-library/src/servicelib/rest_pagination_utils.py renamed to packages/models-library/src/models_library/rest_pagination.py

Lines changed: 12 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
from math import ceil
2-
from typing import Any, List, Optional
1+
from typing import Generic, List, Optional, TypeVar
32

43
from pydantic import (
54
AnyHttpUrl,
@@ -10,7 +9,7 @@
109
PositiveInt,
1110
validator,
1211
)
13-
from yarl import URL
12+
from pydantic.generics import GenericModel
1413

1514
DEFAULT_NUMBER_OF_ITEMS_PER_PAGE = 20
1615

@@ -112,10 +111,17 @@ class Config:
112111
extra = Extra.forbid
113112

114113

115-
class PageResponseLimitOffset(BaseModel):
114+
ItemT = TypeVar("ItemT")
115+
116+
117+
class Page(GenericModel, Generic[ItemT]):
118+
"""
119+
Paginated response model of ItemTs
120+
"""
121+
116122
meta: PageMetaInfoLimitOffset = Field(alias="_meta")
117123
links: PageLinks = Field(alias="_links")
118-
data: List[Any]
124+
data: List[ItemT]
119125

120126
@validator("data", pre=True)
121127
@classmethod
@@ -136,41 +142,12 @@ def check_data_compatible_with_meta(cls, v, values):
136142
)
137143
return v
138144

139-
@classmethod
140-
def paginate_data(
141-
cls,
142-
data: List[Any],
143-
request_url: URL,
144-
total: int,
145-
limit: int,
146-
offset: int,
147-
) -> "PageResponseLimitOffset":
148-
last_page = ceil(total / limit) - 1
149-
150-
return PageResponseLimitOffset(
151-
_meta=PageMetaInfoLimitOffset(
152-
total=total, count=len(data), limit=limit, offset=offset
153-
),
154-
_links=PageLinks(
155-
self=f"{request_url.update_query({'offset': offset, 'limit': limit})}",
156-
first=f"{request_url.update_query({'offset': 0, 'limit': limit})}",
157-
prev=f"{request_url.update_query({'offset': max(offset - limit, 0), 'limit': limit})}"
158-
if offset > 0
159-
else None,
160-
next=f"{request_url.update_query({'offset': min(offset + limit, last_page * limit), 'limit': limit})}"
161-
if offset < (last_page * limit)
162-
else None,
163-
last=f"{request_url.update_query({'offset': last_page * limit, 'limit': limit})}",
164-
),
165-
data=data,
166-
)
167-
168145
class Config:
169146
extra = Extra.forbid
170147

171148
schema_extra = {
172149
"examples": [
173-
# first page
150+
# first page Page[str]
174151
{
175152
"_meta": {"total": 7, "count": 4, "limit": 4, "offset": 0},
176153
"_links": {
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
from math import ceil
2+
from typing import Any, Dict, List, Protocol, TypedDict, Union, runtime_checkable
3+
4+
from .rest_pagination import PageLinks, PageMetaInfoLimitOffset
5+
6+
# NOTE: In this repo we use two type of URL-like data structures:
7+
# - from yarl (aiohttp-style) and
8+
# - from starlette (fastapi-style)
9+
#
10+
# Here define protocol to avoid including starlette or yarl in this librarie's requirements
11+
# and a helper function below that can handle both protocols at runtime
12+
#
13+
14+
15+
@runtime_checkable
16+
class _YarlURL(Protocol):
17+
def update_query(self, query) -> "_YarlURL":
18+
...
19+
20+
21+
class _StarletteURL(Protocol):
22+
# SEE starlette.data_structures.URL
23+
# in https://github.com/encode/starlette/blob/master/starlette/datastructures.py#L130
24+
25+
def replace_query_params(self, **kwargs: Any) -> "_StarletteURL":
26+
...
27+
28+
29+
_URLType = Union[_YarlURL, _StarletteURL]
30+
31+
32+
def _replace_query(url: _URLType, query: Dict[str, Any]):
33+
"""This helper function ensures query replacement works with both"""
34+
if isinstance(url, _YarlURL):
35+
new_url = url.update_query(query)
36+
else:
37+
new_url = url.replace_query_params(**query)
38+
return f"{new_url}"
39+
40+
41+
class PageDict(TypedDict):
42+
_meta: Any
43+
_links: Any
44+
data: List[Any]
45+
46+
47+
def paginate_data(
48+
chunk: List[Any],
49+
*,
50+
request_url: _URLType,
51+
total: int,
52+
limit: int,
53+
offset: int,
54+
) -> PageDict:
55+
"""Builds page-like objects to feed to Page[ItemT] pydantic model class
56+
57+
Usage:
58+
59+
obj: PageDict = paginate_data( ... )
60+
model = Page[MyModelItem].parse_obj(obj)
61+
62+
raises ValidationError
63+
"""
64+
last_page = ceil(total / limit) - 1
65+
66+
return PageDict(
67+
_meta=PageMetaInfoLimitOffset(
68+
total=total, count=len(chunk), limit=limit, offset=offset
69+
),
70+
_links=PageLinks(
71+
self=_replace_query(request_url, {"offset": offset, "limit": limit}),
72+
first=_replace_query(request_url, {"offset": 0, "limit": limit}),
73+
prev=_replace_query(
74+
request_url, {"offset": max(offset - limit, 0), "limit": limit}
75+
)
76+
if offset > 0
77+
else None,
78+
next=_replace_query(
79+
request_url,
80+
{"offset": min(offset + limit, last_page * limit), "limit": limit},
81+
)
82+
if offset < (last_page * limit)
83+
else None,
84+
last=_replace_query(
85+
request_url, {"offset": last_page * limit, "limit": limit}
86+
),
87+
),
88+
data=chunk,
89+
)
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
from copy import deepcopy
2+
3+
import pytest
4+
from models_library.rest_pagination import Page, PageMetaInfoLimitOffset
5+
from pydantic.main import BaseModel
6+
7+
8+
@pytest.mark.parametrize("cls_model", [Page[str], PageMetaInfoLimitOffset])
9+
def test_page_response_limit_offset_models(cls_model: BaseModel):
10+
examples = cls_model.Config.schema_extra["examples"]
11+
12+
for index, example in enumerate(examples):
13+
print(f"{index:-^10}:\n", example)
14+
15+
model_instance = cls_model(**example)
16+
assert model_instance
17+
18+
19+
def test_invalid_offset():
20+
with pytest.raises(ValueError):
21+
PageMetaInfoLimitOffset(limit=6, total=5, offset=5, count=2)
22+
23+
24+
@pytest.mark.parametrize(
25+
"count, offset",
26+
[
27+
pytest.param(7, 0, id="count bigger than limit"),
28+
pytest.param(6, 0, id="count bigger than total"),
29+
pytest.param(5, 1, id="count + offset bigger than total"),
30+
],
31+
)
32+
def test_invalid_count(count: int, offset: int):
33+
with pytest.raises(ValueError):
34+
PageMetaInfoLimitOffset(limit=6, total=5, offset=offset, count=count)
35+
36+
37+
def test_data_size_does_not_fit_count():
38+
example = deepcopy(Page[str].Config.schema_extra["examples"][0])
39+
example["_meta"]["count"] = len(example["data"]) - 1
40+
with pytest.raises(ValueError):
41+
Page[str](**example)
42+
43+
44+
def test_empty_data_is_converted_to_list():
45+
example = deepcopy(Page[str].Config.schema_extra["examples"][0])
46+
example["data"] = None
47+
example["_meta"]["count"] = 0
48+
model_instance = Page[str](**example)
49+
assert model_instance
50+
assert model_instance.data == []
Lines changed: 170 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,170 @@
1+
import pytest
2+
from models_library.rest_pagination import Page, PageLinks, PageMetaInfoLimitOffset
3+
from models_library.rest_pagination_utils import PageDict, paginate_data
4+
from yarl import URL
5+
6+
7+
@pytest.mark.parametrize(
8+
"base_url",
9+
[
10+
"http://site.com",
11+
"http://site.com/",
12+
"http://some/random/url.com",
13+
"http://some/random/url.com/",
14+
"http://s.s.s.s.subsite.site.com",
15+
"http://s.s.s.s.subsite.site.com/",
16+
"http://10.0.0.1.nip.io/",
17+
"http://10.0.0.1.nip.io:8091/",
18+
"http://10.0.0.1.nip.io",
19+
"http://10.0.0.1.nip.io:8091",
20+
],
21+
)
22+
def test_paginating_data(base_url):
23+
# create random data
24+
total_number_of_items = 29
25+
limit = 9
26+
data_chunk = list(range(limit))
27+
request_url = URL(f"{base_url}?some=1&random=4&query=true")
28+
29+
number_of_chunks = total_number_of_items // limit + 1
30+
last_chunk_size = total_number_of_items % limit
31+
last_chunk_offset = (number_of_chunks - 1) * len(data_chunk)
32+
33+
# first "call"
34+
offset = 0
35+
data_obj: PageDict = paginate_data(
36+
data_chunk,
37+
total=total_number_of_items,
38+
limit=limit,
39+
offset=offset,
40+
request_url=request_url,
41+
)
42+
assert data_obj
43+
44+
model_instance = Page[int].parse_obj(data_obj)
45+
assert model_instance
46+
assert model_instance.meta == PageMetaInfoLimitOffset(
47+
total=total_number_of_items, count=len(data_chunk), limit=limit, offset=offset
48+
)
49+
assert model_instance.links == PageLinks(
50+
self=str(
51+
URL(base_url).with_query(
52+
f"some=1&random=4&query=true&offset={offset}&limit={limit}"
53+
)
54+
),
55+
first=str(
56+
URL(base_url).with_query(
57+
f"some=1&random=4&query=true&offset=0&limit={limit}"
58+
)
59+
),
60+
prev=None,
61+
next=str(
62+
URL(base_url).with_query(
63+
f"some=1&random=4&query=true&offset={offset+limit}&limit={limit}"
64+
)
65+
),
66+
last=str(
67+
URL(base_url).with_query(
68+
f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
69+
)
70+
),
71+
)
72+
73+
# next "call"s
74+
for _ in range(1, number_of_chunks - 1):
75+
offset += len(data_chunk)
76+
assert model_instance.links.next is not None
77+
78+
data_obj: PageDict = paginate_data(
79+
data_chunk,
80+
request_url=URL(model_instance.links.next),
81+
total=total_number_of_items,
82+
limit=limit,
83+
offset=offset,
84+
)
85+
86+
model_instance = Page[int].parse_obj(data_obj)
87+
assert model_instance
88+
assert model_instance.meta == PageMetaInfoLimitOffset(
89+
total=total_number_of_items,
90+
count=len(data_chunk),
91+
limit=limit,
92+
offset=offset,
93+
)
94+
assert model_instance.links == PageLinks(
95+
self=str(
96+
URL(base_url).with_query(
97+
f"some=1&random=4&query=true&offset={offset}&limit={limit}"
98+
)
99+
),
100+
first=str(
101+
URL(base_url).with_query(
102+
f"some=1&random=4&query=true&offset=0&limit={limit}"
103+
)
104+
),
105+
prev=str(
106+
URL(base_url).with_query(
107+
f"some=1&random=4&query=true&offset={offset-limit}&limit={limit}"
108+
)
109+
),
110+
next=str(
111+
URL(base_url).with_query(
112+
f"some=1&random=4&query=true&offset={offset+limit}&limit={limit}"
113+
)
114+
),
115+
last=str(
116+
URL(base_url).with_query(
117+
f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
118+
)
119+
),
120+
)
121+
122+
# last "call"
123+
#
124+
offset += len(data_chunk)
125+
data_chunk = data_chunk[:last_chunk_size]
126+
127+
assert offset == last_chunk_offset
128+
129+
assert model_instance.links.next is not None
130+
data_obj: PageDict = paginate_data(
131+
data_chunk,
132+
request_url=URL(model_instance.links.next),
133+
total=total_number_of_items,
134+
limit=limit,
135+
offset=offset,
136+
)
137+
assert data_obj
138+
139+
model_instance = Page[int].parse_obj(data_obj)
140+
assert model_instance
141+
142+
assert model_instance.meta == PageMetaInfoLimitOffset(
143+
total=total_number_of_items,
144+
count=len(data_chunk),
145+
limit=limit,
146+
offset=offset,
147+
)
148+
assert model_instance.links == PageLinks(
149+
self=str(
150+
URL(base_url).with_query(
151+
f"some=1&random=4&query=true&offset={offset}&limit={limit}"
152+
)
153+
),
154+
first=str(
155+
URL(base_url).with_query(
156+
f"some=1&random=4&query=true&offset=0&limit={limit}"
157+
)
158+
),
159+
prev=str(
160+
URL(base_url).with_query(
161+
f"some=1&random=4&query=true&offset={last_chunk_offset - limit}&limit={limit}"
162+
)
163+
),
164+
next=None,
165+
last=str(
166+
URL(base_url).with_query(
167+
f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
168+
)
169+
),
170+
)

0 commit comments

Comments
 (0)