Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
140 changes: 2 additions & 138 deletions src/posit/connect/_api.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
# TODO-barret-future; Piecemeal migrate everything to leverage `ApiDictEndpoint` and `ApiListEndpoint` classes.
# TODO-barret-future; Piecemeal migrate everything to leverage `ApiDictEndpoint`
# TODO-barret-future; Merge any trailing behavior of `Active` or `ActiveList` into the new classes.

from __future__ import annotations

import itertools
import posixpath
from abc import ABC, abstractmethod
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any, Generator, Generic, Optional, TypeVar, cast, overload
from typing import TYPE_CHECKING, Any, Optional, cast

from ._api_call import ApiCallMixin, get_api
from ._json import Jsonifiable, JsonifiableDict, ResponseAttrs
Expand Down Expand Up @@ -143,136 +140,3 @@ def __init__(
super().__init__(attrs)
self._ctx = ctx
self._path = path


T = TypeVar("T", bound="ReadOnlyDict")
"""A type variable that is bound to the `Active` class"""


class ApiListEndpoint(ApiCallMixin, Generic[T], ABC, object):
"""A HTTP GET endpoint that can fetch a collection."""

def __init__(self, *, ctx: Context, path: str, uid_key: str = "guid") -> None:
"""A sequence abstraction for any HTTP GET endpoint that returns a collection.

Parameters
----------
ctx : Context
The context object containing the session and URL for API interactions.
path : str
The HTTP path component for the collection endpoint
uid_key : str, optional
The field name of that uniquely identifiers an instance of T, by default "guid"
"""
super().__init__()
self._ctx = ctx
self._path = path
self._uid_key = uid_key

@abstractmethod
def _create_instance(self, path: str, /, **kwargs: Any) -> T:
"""Create an instance of 'T'."""
raise NotImplementedError()

def fetch(self) -> Generator[T, None, None]:
"""Fetch the collection.

Fetches the collection directly from Connect. This operation does not effect the cache state.

Returns
-------
List[T]
"""
results: Jsonifiable = self._get_api()
results_list = cast(list[JsonifiableDict], results)
for result in results_list:
yield self._to_instance(result)

def __iter__(self) -> Generator[T, None, None]:
return self.fetch()

def _to_instance(self, result: dict) -> T:
"""Converts a result into an instance of T."""
uid = result[self._uid_key]
path = posixpath.join(self._path, uid)
return self._create_instance(path, **result)

@overload
def __getitem__(self, index: int) -> T: ...

@overload
def __getitem__(self, index: slice) -> Generator[T, None, None]: ...

def __getitem__(self, index: int | slice) -> T | Generator[T, None, None]:
if isinstance(index, slice):
results = itertools.islice(self.fetch(), index.start, index.stop, index.step)
for result in results:
yield result
else:
return list(itertools.islice(self.fetch(), index, index + 1))[0]

# def __len__(self) -> int:
# return len(self.fetch())

def __str__(self) -> str:
return self.__repr__()

def __repr__(self) -> str:
# Jobs - 123 items
return repr(
f"{self.__class__.__name__} - { len(list(self.fetch())) } items - {self._path}"
)

def find(self, uid: str) -> T | None:
"""
Find a record by its unique identifier.

Fetches the record from Connect by it's identifier.

Parameters
----------
uid : str
The unique identifier of the record.

Returns
-------
:
Single instance of T if found, else None
"""
result: Jsonifiable = self._get_api(uid)
result_obj = cast(JsonifiableDict, result)

return self._to_instance(result_obj)

def find_by(self, **conditions: Any) -> T | None:
"""
Find the first record matching the specified conditions.

There is no implied ordering, so if order matters, you should specify it yourself.

Parameters
----------
**conditions : Any

Returns
-------
T
The first record matching the conditions, or `None` if no match is found.
"""
results = self.fetch()

conditions_items = conditions.items()

# Get the first item of the generator that matches the conditions
# If no item is found, return None
return next(
(
# Return result
result
# Iterate through `results` generator
for result in results
# If all `conditions`'s key/values are found in `result`'s key/values...
if result.items() >= conditions_items
),
None,
)
21 changes: 6 additions & 15 deletions src/posit/connect/content.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,8 +295,7 @@ def create_repository(
def delete(self) -> None:
"""Delete the content item."""
path = f"v1/content/{self['guid']}"
url = self._ctx.url + path
self._ctx.session.delete(url)
self._ctx.client.delete(path)

def deploy(self) -> tasks.Task:
"""Deploy the content.
Expand All @@ -315,8 +314,7 @@ def deploy(self) -> tasks.Task:
None
"""
path = f"v1/content/{self['guid']}/deploy"
url = self._ctx.url + path
response = self._ctx.session.post(url, json={"bundle_id": None})
response = self._ctx.client.post(path, json={"bundle_id": None})
result = response.json()
ts = tasks.Tasks(self.params)
return ts.get(result["task_id"])
Expand Down Expand Up @@ -442,8 +440,7 @@ def update(
-------
None
"""
url = self._ctx.url + f"v1/content/{self['guid']}"
response = self._ctx.session.patch(url, json=attrs)
response = self._ctx.client.patch(f"v1/content/{self['guid']}", json=attrs)
super().update(**response.json())

# Relationships
Expand Down Expand Up @@ -619,9 +616,7 @@ def create(
-------
ContentItem
"""
path = "v1/content"
url = self._ctx.url + path
response = self._ctx.session.post(url, json=attrs)
response = self._ctx.client.post("v1/content", json=attrs)
return ContentItem(self._ctx, **response.json())

@overload
Expand Down Expand Up @@ -707,9 +702,7 @@ def find(self, include: Optional[str | list[Any]] = None, **conditions) -> List[
if self.owner_guid:
conditions["owner_guid"] = self.owner_guid

path = "v1/content"
url = self._ctx.url + path
response = self._ctx.session.get(url, params=conditions)
response = self._ctx.client.get("v1/content", params=conditions)
return [
ContentItem(
self._ctx,
Expand Down Expand Up @@ -880,7 +873,5 @@ def get(self, guid: str) -> ContentItem:
-------
ContentItem
"""
path = f"v1/content/{guid}"
url = self._ctx.url + path
response = self._ctx.session.get(url)
response = self._ctx.client.get(f"v1/content/{guid}")
return ContentItem(self._ctx, **response.json())
38 changes: 15 additions & 23 deletions src/posit/connect/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,7 @@ def delete(self) -> None:
group.delete()
```
"""
path = f"v1/groups/{self['guid']}"
url = self._ctx.url + path
self._ctx.session.delete(url)
self._ctx.client.delete(f"v1/groups/{self['guid']}")


class GroupMembers(Resources):
Expand Down Expand Up @@ -129,9 +127,10 @@ def add(self, user: Optional[User] = None, /, *, user_guid: Optional[str] = None
if not user_guid:
raise ValueError("`user_guid=` should not be empty.")

path = f"v1/groups/{self._group_guid}/members"
url = self._ctx.url + path
self._ctx.session.post(url, json={"user_guid": user_guid})
self._ctx.client.post(
f"v1/groups/{self._group_guid}/members",
json={"user_guid": user_guid},
)

@overload
def delete(self, user: User, /) -> None: ...
Expand Down Expand Up @@ -189,9 +188,7 @@ def delete(self, user: Optional[User] = None, /, *, user_guid: Optional[str] = N
if not user_guid:
raise ValueError("`user_guid=` should not be empty.")

path = f"v1/groups/{self._group_guid}/members/{user_guid}"
url = self._ctx.url + path
self._ctx.session.delete(url)
self._ctx.client.delete(f"v1/groups/{self._group_guid}/members/{user_guid}")

def find(self) -> list[User]:
"""Find group members.
Expand Down Expand Up @@ -222,8 +219,7 @@ def find(self) -> list[User]:
from .users import User

path = f"v1/groups/{self._group_guid}/members"
url = self._ctx.url + path
paginator = Paginator(self._ctx.session, url)
paginator = Paginator(self._ctx, path)
member_dicts = paginator.fetch_results()

# For each member in the group
Expand Down Expand Up @@ -254,9 +250,10 @@ def count(self) -> int:
--------
* https://docs.posit.co/connect/api/#get-/v1/groups/-group_guid-/members
"""
path = f"v1/groups/{self._group_guid}/members"
url = self._ctx.url + path
response = self._ctx.session.get(url, params={"page_size": 1})
response = self._ctx.client.get(
f"v1/groups/{self._group_guid}/members",
params={"page_size": 1},
)
result = response.json()
return result["total"]

Expand Down Expand Up @@ -307,9 +304,7 @@ def create(self, **kwargs) -> Group:
-------
Group
"""
path = "v1/groups"
url = self._ctx.url + path
response = self._ctx.session.post(url, json=kwargs)
response = self._ctx.client.post("v1/groups", json=kwargs)
return Group(self._ctx, **response.json())

@overload
Expand Down Expand Up @@ -339,8 +334,7 @@ def find(self, **kwargs):
* https://docs.posit.co/connect/api/#get-/v1/groups
"""
path = "v1/groups"
url = self._ctx.url + path
paginator = Paginator(self._ctx.session, url, params=kwargs)
paginator = Paginator(self._ctx, path, params=kwargs)
results = paginator.fetch_results()
return [
Group(
Expand Down Expand Up @@ -377,8 +371,7 @@ def find_one(self, **kwargs) -> Group | None:
* https://docs.posit.co/connect/api/#get-/v1/groups
"""
path = "v1/groups"
url = self._ctx.url + path
paginator = Paginator(self._ctx.session, url, params=kwargs)
paginator = Paginator(self._ctx, path, params=kwargs)
pages = paginator.fetch_pages()
results = (result for page in pages for result in page.results)
groups = (
Expand All @@ -405,8 +398,7 @@ def get(self, guid: str) -> Group:
--------
* https://docs.posit.co/connect/api/#get-/v1/groups
"""
url = self._ctx.url + f"v1/groups/{guid}"
response = self._ctx.session.get(url)
response = self._ctx.client.get(f"v1/groups/{guid}")
return Group(
self._ctx,
**response.json(),
Expand Down
16 changes: 8 additions & 8 deletions src/posit/connect/paginator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, Generator, List

if TYPE_CHECKING:
import requests
from .context import Context

# The maximum page size supported by the API.
_MAX_PAGE_SIZE = 500
Expand Down Expand Up @@ -43,15 +43,15 @@ class Paginator:

def __init__(
self,
session: requests.Session,
url: str,
ctx: Context,
path: str,
params: dict | None = None,
) -> None:
if params is None:
params = {}
self.session = session
self.url = url
self.params = params
self._ctx = ctx
self._path = path
self._params = params

def fetch_results(self) -> List[dict]:
"""
Expand Down Expand Up @@ -106,9 +106,9 @@ def fetch_page(self, page_number: int) -> Page:

"""
params = {
**self.params,
**self._params,
"page_number": page_number,
"page_size": _MAX_PAGE_SIZE,
}
response = self.session.get(self.url, params=params)
response = self._ctx.client.get(self._path, params=params)
return Page(**response.json())
3 changes: 1 addition & 2 deletions src/posit/connect/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,7 @@ def find_by(self, **conditions) -> Any | None:

class _PaginatedResourceSequence(_ResourceSequence):
def fetch(self, **conditions):
url = self._ctx.url + self._path
paginator = Paginator(self._ctx.session, url, dict(**conditions))
paginator = Paginator(self._ctx, self._path, dict(**conditions))
for page in paginator.fetch_pages():
resources = []
results = page.results
Expand Down
Loading
Loading