diff --git a/literalai/__init__.py b/literalai/__init__.py
index 7d668dd..bb17064 100644
--- a/literalai/__init__.py
+++ b/literalai/__init__.py
@@ -1,6 +1,8 @@
from literalai.client import AsyncLiteralClient, LiteralClient
from literalai.evaluation.dataset import Dataset
from literalai.evaluation.dataset_item import DatasetItem
+from literalai.evaluation.dataset_experiment import DatasetExperiment, DatasetExperimentItem
+from literalai.prompt_engineering.prompt import Prompt
from literalai.my_types import * # noqa
from literalai.observability.generation import (
BaseGeneration,
@@ -27,5 +29,8 @@
"Dataset",
"Attachment",
"DatasetItem",
+ "DatasetExperiment",
+ "DatasetExperimentItem",
+ "Prompt",
"__version__",
]
diff --git a/literalai/api/README.md b/literalai/api/README.md
new file mode 100644
index 0000000..f94fc94
--- /dev/null
+++ b/literalai/api/README.md
@@ -0,0 +1,7 @@
+# Literal AI API
+
+This module contains the APIs to directly interact with the Literal AI platform.
+
+`BaseLiteralAPI` has all the methods prototypes and is the source of truth when it comes to documentation.
+
+Inheriting from `BaseLiteralAPI` are `LiteralAPI` and `AsyncLiteralAPI`, the synchronous and asynchronous APIs respectively.
diff --git a/literalai/api/__init__.py b/literalai/api/__init__.py
index b771092..39f0033 100644
--- a/literalai/api/__init__.py
+++ b/literalai/api/__init__.py
@@ -1,2628 +1,4 @@
-import logging
-import os
-import uuid
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- List,
- Literal,
- Optional,
- TypeVar,
- Union,
- cast,
-)
+from literalai.api.synchronous import LiteralAPI
+from literalai.api.asynchronous import AsyncLiteralAPI
-from typing_extensions import deprecated
-
-from literalai.api.attachment_helpers import (
- AttachmentUpload,
- create_attachment_helper,
- delete_attachment_helper,
- get_attachment_helper,
- update_attachment_helper,
-)
-from literalai.api.dataset_helpers import (
- add_generation_to_dataset_helper,
- add_step_to_dataset_helper,
- create_dataset_helper,
- create_dataset_item_helper,
- create_experiment_helper,
- create_experiment_item_helper,
- delete_dataset_helper,
- delete_dataset_item_helper,
- get_dataset_helper,
- get_dataset_item_helper,
- update_dataset_helper,
-)
-from literalai.api.generation_helpers import (
- create_generation_helper,
- get_generations_helper,
-)
-from literalai.api.prompt_helpers import (
- PromptRollout,
- create_prompt_helper,
- create_prompt_lineage_helper,
- create_prompt_variant_helper,
- get_prompt_ab_testing_helper,
- get_prompt_helper,
- get_prompt_lineage_helper,
- update_prompt_ab_testing_helper,
-)
-from literalai.api.score_helpers import (
- ScoreUpdate,
- check_scores_finite,
- create_score_helper,
- create_scores_query_builder,
- delete_score_helper,
- get_scores_helper,
- update_score_helper,
-)
-from literalai.api.step_helpers import (
- create_step_helper,
- delete_step_helper,
- get_step_helper,
- get_steps_helper,
- send_steps_helper,
- update_step_helper,
-)
-from literalai.api.thread_helpers import (
- create_thread_helper,
- delete_thread_helper,
- get_thread_helper,
- get_threads_helper,
- list_threads_helper,
- update_thread_helper,
- upsert_thread_helper,
-)
-from literalai.api.user_helpers import (
- create_user_helper,
- delete_user_helper,
- get_user_helper,
- get_users_helper,
- update_user_helper,
-)
-from literalai.context import active_steps_var, active_thread_var
-from literalai.evaluation.dataset import Dataset, DatasetType
-from literalai.evaluation.dataset_experiment import (
- DatasetExperiment,
- DatasetExperimentItem,
-)
-from literalai.observability.filter import (
- generations_filters,
- generations_order_by,
- scores_filters,
- scores_order_by,
- steps_filters,
- steps_order_by,
- threads_filters,
- threads_order_by,
- users_filters,
-)
-from literalai.prompt_engineering.prompt import Prompt, ProviderSettings
-
-if TYPE_CHECKING:
- from typing import Tuple # noqa: F401
-
-import httpx
-
-from literalai.my_types import Environment, PaginatedResponse
-from literalai.observability.generation import (
- ChatGeneration,
- CompletionGeneration,
- GenerationMessage,
-)
-from literalai.observability.step import (
- Attachment,
- Score,
- ScoreDict,
- ScoreType,
- Step,
- StepDict,
- StepType,
-)
-
-logger = logging.getLogger(__name__)
-
-
-def _prepare_variables(variables: Dict[str, Any]) -> Dict[str, Any]:
- """
- Recursively checks and converts bytes objects in variables.
- """
-
- def handle_bytes(item):
- if isinstance(item, bytes):
- return "STRIPPED_BINARY_DATA"
- elif isinstance(item, dict):
- return {k: handle_bytes(v) for k, v in item.items()}
- elif isinstance(item, list):
- return [handle_bytes(i) for i in item]
- elif isinstance(item, tuple):
- return tuple(handle_bytes(i) for i in item)
- return item
-
- return handle_bytes(variables)
-
-
-class BaseLiteralAPI:
- def __init__(
- self,
- api_key: Optional[str] = None,
- url: Optional[str] = None,
- environment: Optional[Environment] = None,
- ):
- if url and url[-1] == "/":
- url = url[:-1]
-
- if api_key is None:
- raise Exception("LITERAL_API_KEY not set")
- if url is None:
- raise Exception("LITERAL_API_URL not set")
-
- self.api_key = api_key
- self.url = url
-
- if environment:
- os.environ["LITERAL_ENV"] = environment
-
- self.graphql_endpoint = self.url + "/api/graphql"
- self.rest_endpoint = self.url + "/api"
-
- @property
- def headers(self):
- from literalai.version import __version__
-
- h = {
- "Content-Type": "application/json",
- "x-api-key": self.api_key,
- "x-client-name": "py-literal-client",
- "x-client-version": __version__,
- }
-
- if env := os.getenv("LITERAL_ENV"):
- h["x-env"] = env
-
- return h
-
-
-class LiteralAPI(BaseLiteralAPI):
- """
- ```python
- from literalai import LiteralClient
- # Initialize the client
- literalai_client = LiteralClient(api_key="your_api_key_here")
- # Access the API's methods
- print(literalai_client.api)
- ```
- """
-
- R = TypeVar("R")
-
- def make_gql_call(
- self, description: str, query: str, variables: Dict[str, Any]
- ) -> Dict:
- def raise_error(error):
- logger.error(f"Failed to {description}: {error}")
- raise Exception(error)
-
- variables = _prepare_variables(variables)
- with httpx.Client(follow_redirects=True) as client:
- response = client.post(
- self.graphql_endpoint,
- json={"query": query, "variables": variables},
- headers=self.headers,
- timeout=10,
- )
-
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError:
- raise_error(f"Failed to {description}: {response.text}")
-
- try:
- json = response.json()
- except ValueError as e:
- raise_error(
- f"""Failed to parse JSON response: {
- e}, content: {response.content!r}"""
- )
-
- if json.get("errors"):
- raise_error(json["errors"])
-
- if json.get("data"):
- if isinstance(json["data"], dict):
- for key, value in json["data"].items():
- if value and value.get("ok") is False:
- raise_error(
- f"""Failed to {description}: {
- value.get('message')}"""
- )
-
- return json
-
- # This should not be reached, exceptions should be thrown beforehands
- # Added because of mypy
- raise Exception("Unknown error")
-
- def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict:
- with httpx.Client(follow_redirects=True) as client:
- response = client.post(
- self.rest_endpoint + subpath,
- json=body,
- headers=self.headers,
- timeout=20,
- )
-
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError:
- message = f"Failed to call {subpath}: {response.text}"
- logger.error(message)
- raise Exception(message)
-
- try:
- return response.json()
- except ValueError as e:
- raise ValueError(
- f"""Failed to parse JSON response: {
- e}, content: {response.content!r}"""
- )
-
- def gql_helper(
- self,
- query: str,
- description: str,
- variables: Dict,
- process_response: Callable[..., R],
- ) -> R:
- response = self.make_gql_call(description, query, variables)
- return process_response(response)
-
- # User API
-
- def get_users(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[users_filters] = None,
- ):
- """
- Retrieves a list of users based on pagination and optional filters.
-
- Args:
- first (Optional[int]): The number of users to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[users_filters]): Filters to apply to the user query.
-
- Returns:
- Dict: A dictionary containing the queried user data.
- """
- return self.gql_helper(*get_users_helper(first, after, before, filters))
-
- def get_user(self, id: Optional[str] = None, identifier: Optional[str] = None):
- """
- Retrieves a user based on the provided ID or identifier.
-
- Args:
- id (Optional[str]): The unique ID of the user.
- identifier (Optional[str]): A unique identifier for the user, such as a username or email.
-
- Returns:
- The user data as returned by the GraphQL helper function.
- """
- return self.gql_helper(*get_user_helper(id, identifier))
-
- def create_user(self, identifier: str, metadata: Optional[Dict] = None):
- """
- Creates a new user with the specified identifier and optional metadata.
-
- Args:
- identifier (str): A unique identifier for the user, such as a username or email.
- metadata (Optional[Dict]): Additional data associated with the user.
-
- Returns:
- The result of the GraphQL call to create a user.
- """
- return self.gql_helper(*create_user_helper(identifier, metadata))
-
- def update_user(
- self, id: str, identifier: Optional[str] = None, metadata: Optional[Dict] = None
- ):
- """
- Updates an existing user identified by the given ID, with optional new identifier and metadata.
-
- Args:
- id (str): The unique ID of the user to update.
- identifier (Optional[str]): A new identifier for the user, such as a username or email.
- metadata (Optional[Dict]): New or updated metadata for the user.
-
- Returns:
- The result of the GraphQL call to update the user.
- """
- return self.gql_helper(*update_user_helper(id, identifier, metadata))
-
- def delete_user(self, id: str):
- """
- Deletes a user identified by the given ID.
-
- Args:
- id (str): The unique ID of the user to delete.
-
- Returns:
- The result of the GraphQL call to delete the user.
- """
- return self.gql_helper(*delete_user_helper(id))
-
- def get_or_create_user(self, identifier: str, metadata: Optional[Dict] = None):
- """
- Retrieves a user by their identifier, or creates a new user if they do not exist.
-
- Args:
- identifier (str): The identifier of the user to retrieve or create.
- metadata (Optional[Dict]): Metadata to associate with the user if they are created.
-
- Returns:
- The existing or newly created user data.
- """
- user = self.get_user(identifier=identifier)
- if user:
- return user
-
- return self.create_user(identifier, metadata)
-
- # Thread API
-
- def get_threads(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[threads_filters] = None,
- order_by: Optional[threads_order_by] = None,
- step_types_to_keep: Optional[List[StepType]] = None,
- ):
- """
- Fetches a list of threads based on pagination and optional filters.
-
- Args:
- first (Optional[int]): Number of threads to fetch.
- after (Optional[str]): Cursor for pagination, fetch threads after this cursor.
- before (Optional[str]): Cursor for pagination, fetch threads before this cursor.
- filters (Optional[threads_filters]): Filters to apply on the threads query.
- order_by (Optional[threads_order_by]): Order by clause for threads.
- step_types_to_keep (Optional[List[StepType]]) : If set, only steps of the corresponding types
- will be returned
-
- Returns:
- A list of threads that match the criteria.
- """
- return self.gql_helper(
- *get_threads_helper(
- first, after, before, filters, order_by, step_types_to_keep
- )
- )
-
- def list_threads(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[threads_filters] = None,
- order_by: Optional[threads_order_by] = None,
- ):
- """
- Lists threads based on pagination and optional filters, similar to get_threads but may include additional processing.
-
- Args:
- first (Optional[int]): Number of threads to list.
- after (Optional[str]): Cursor for pagination, list threads after this cursor.
- before (Optional[str]): Cursor for pagination, list threads before this cursor.
- filters (Optional[threads_filters]): Filters to apply on the threads listing.
- order_by (Optional[threads_order_by]): Order by clause for threads.
-
- Returns:
- A list of threads that match the criteria.
- """
- return self.gql_helper(
- *list_threads_helper(first, after, before, filters, order_by)
- )
-
- def get_thread(self, id: str):
- """
- Retrieves a single thread by its ID.
-
- Args:
- id (str): The unique identifier of the thread.
-
- Returns:
- The thread corresponding to the provided ID.
- """
- return self.gql_helper(*get_thread_helper(id))
-
- def create_thread(
- self,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Creates a new thread with the specified details.
-
- Args:
- name (Optional[str]): Name of the thread.
- metadata (Optional[Dict]): Metadata associated with the thread.
- participant_id (Optional[str]): Identifier for the participant.
- tags (Optional[List[str]]): List of tags associated with the thread.
-
- Returns:
- The newly created thread.
- """
- return self.gql_helper(
- *create_thread_helper(name, metadata, participant_id, tags)
- )
-
- def upsert_thread(
- self,
- id: str,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Updates an existing thread or creates a new one if it does not exist.
-
- Args:
- id (str): The unique identifier of the thread.
- name (Optional[str]): Name of the thread.
- metadata (Optional[Dict]): Metadata associated with the thread.
- participant_id (Optional[str]): Identifier for the participant.
- tags (Optional[List[str]]): List of tags associated with the thread.
-
- Returns:
- The updated or newly created thread.
- """
- return self.gql_helper(
- *upsert_thread_helper(id, name, metadata, participant_id, tags)
- )
-
- def update_thread(
- self,
- id: str,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Updates the specified details of an existing thread.
-
- Args:
- id (str): The unique identifier of the thread to update.
- name (Optional[str]): New name of the thread.
- metadata (Optional[Dict]): New metadata for the thread.
- participant_id (Optional[str]): New identifier for the participant.
- tags (Optional[List[str]]): New list of tags for the thread.
-
- Returns:
- The updated thread.
- """
- return self.gql_helper(
- *update_thread_helper(id, name, metadata, participant_id, tags)
- )
-
- def delete_thread(self, id: str):
- """
- Deletes a thread identified by its ID.
-
- Args:
- id (str): The unique identifier of the thread to delete.
-
- Returns:
- The result of the deletion operation.
- """
- return self.gql_helper(*delete_thread_helper(id))
-
- # Score API
-
- def get_scores(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[scores_filters] = None,
- order_by: Optional[scores_order_by] = None,
- ):
- return self.gql_helper(
- *get_scores_helper(first, after, before, filters, order_by)
- )
-
- def create_scores(self, scores: List[ScoreDict]):
- check_scores_finite(scores)
-
- query = create_scores_query_builder(scores)
- variables = {}
- for id, score in enumerate(scores):
- for k, v in score.items():
- variables[f"{k}_{id}"] = v
-
- def process_response(response):
- return [x for x in response["data"].values()]
-
- return self.gql_helper(query, "create scores", variables, process_response)
-
- def create_score(
- self,
- name: str,
- value: float,
- type: ScoreType,
- step_id: Optional[str] = None,
- generation_id: Optional[str] = None,
- dataset_experiment_item_id: Optional[str] = None,
- comment: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Creates a single score in the database.
-
- Args:
- name (str): The name of the score.
- value (float): The numerical value of the score.
- type (ScoreType): The type of the score.
- step_id (Optional[str]): The ID of the step associated with the score.
- generation_id (Optional[str]): The ID of the generation associated with the score.
- dataset_experiment_item_id (Optional[str]): The ID of the dataset experiment item associated with the score.
- comment (Optional[str]): An optional comment about the score.
- tags (Optional[List[str]]): Optional tags associated with the score.
-
- Returns:
- The created Score object.
- """
- if generation_id:
- logger.warning(
- "generation_id is deprecated and will be removed in a future version, please use step_id instead"
- )
- check_scores_finite([{"name": name, "value": value}])
-
- return self.gql_helper(
- *create_score_helper(
- name,
- value,
- type,
- step_id,
- dataset_experiment_item_id,
- comment,
- tags,
- )
- )
-
- def update_score(
- self,
- id: str,
- update_params: ScoreUpdate,
- ):
- """
- Updates a score identified by its ID with new parameters.
-
- Args:
- id (str): The unique identifier of the score to update.
- update_params (ScoreUpdate): A dictionary of parameters to update in the score.
-
- Returns:
- The result of the update operation.
- """
- return self.gql_helper(*update_score_helper(id, update_params))
-
- def delete_score(self, id: str):
- """
- Deletes a score identified by its ID.
-
- Args:
- id (str): The unique identifier of the score to delete.
-
- Returns:
- The result of the deletion operation.
- """
- return self.gql_helper(*delete_score_helper(id))
-
- # Attachment API
-
- def upload_file(
- self,
- content: Union[bytes, str],
- thread_id: Optional[str] = None,
- mime: Optional[str] = "application/octet-stream",
- ) -> Dict:
- """
- Uploads a file to the server.
-
- Args:
- content (Union[bytes, str]): The content of the file to upload.
- thread_id (Optional[str]): The ID of the thread associated with the file.
- mime (Optional[str]): The MIME type of the file. Defaults to 'application/octet-stream'.
-
- Returns:
- Dict: A dictionary containing the object key and URL of the uploaded file, or None values if the upload fails.
- """
- id = str(uuid.uuid4())
- body = {"fileName": id, "contentType": mime}
- if thread_id:
- body["threadId"] = thread_id
-
- path = "/api/upload/file"
-
- with httpx.Client(follow_redirects=True) as client:
- response = client.post(
- f"{self.url}{path}",
- json=body,
- headers=self.headers,
- )
- if response.status_code >= 400:
- reason = response.text
- logger.error(f"Failed to sign upload url: {reason}")
- return {"object_key": None, "url": None}
- json_res = response.json()
- method = "put" if "put" in json_res else "post"
- request_dict: Dict[str, Any] = json_res.get(method, {})
- url: Optional[str] = request_dict.get("url")
-
- if not url:
- raise Exception("Invalid server response")
- headers: Optional[Dict] = request_dict.get("headers")
- fields: Dict = request_dict.get("fields", {})
- object_key: Optional[str] = fields.get("key")
- upload_type: Literal["raw", "multipart"] = cast(
- Literal["raw", "multipart"], request_dict.get("uploadType", "multipart")
- )
- signed_url: Optional[str] = json_res.get("signedUrl")
-
- # Prepare form data
- form_data = (
- {}
- ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]]
- for field_name, field_value in fields.items():
- form_data[field_name] = (None, field_value)
-
- # Add file to the form_data
- # Note: The content_type parameter is not needed here, as the correct MIME type should be set
- # in the 'Content-Type' field from upload_details
- form_data["file"] = (id, content, mime)
-
- with httpx.Client(follow_redirects=True) as client:
- if upload_type == "raw":
- upload_response = client.request(
- url=url,
- headers=headers,
- method=method,
- data=content, # type: ignore
- )
- else:
- upload_response = client.request(
- url=url,
- headers=headers,
- method=method,
- files=form_data,
- ) # type: ignore
- try:
- upload_response.raise_for_status()
- return {"object_key": object_key, "url": signed_url}
- except Exception as e:
- logger.error(f"Failed to upload file: {str(e)}")
- return {"object_key": None, "url": None}
-
- def create_attachment(
- self,
- thread_id: Optional[str] = None,
- step_id: Optional[str] = None,
- id: Optional[str] = None,
- metadata: Optional[Dict] = None,
- mime: Optional[str] = None,
- name: Optional[str] = None,
- object_key: Optional[str] = None,
- url: Optional[str] = None,
- content: Optional[Union[bytes, str]] = None,
- path: Optional[str] = None,
- ) -> "Attachment":
- """
- Creates an attachment associated with a thread and step, potentially uploading file content.
-
- Args:
- thread_id (str): The ID of the thread to which the attachment is linked.
- step_id (str): The ID of the step to which the attachment is linked.
- id (Optional[str]): The ID of the attachment, if updating an existing one.
- metadata (Optional[Dict]): Metadata associated with the attachment.
- mime (Optional[str]): MIME type of the file, if content is provided.
- name (Optional[str]): Name of the attachment.
- object_key (Optional[str]): Object key of the uploaded file, if already known.
- url (Optional[str]): URL of the uploaded file, if already known.
- content (Optional[Union[bytes, str]]): File content to upload.
- path (Optional[str]): Path where the file should be stored.
-
- Returns:
- literalai.observability.step.Attachment: The created or updated attachment object.
- """
- if not thread_id:
- if active_thread := active_thread_var.get(None):
- thread_id = active_thread.id
-
- if not step_id:
- if active_steps := active_steps_var.get([]):
- step_id = active_steps[-1].id
- else:
- raise Exception("No step_id provided and no active step found.")
-
- (
- query,
- description,
- variables,
- content,
- process_response,
- ) = create_attachment_helper(
- thread_id=thread_id,
- step_id=step_id,
- id=id,
- metadata=metadata,
- mime=mime,
- name=name,
- object_key=object_key,
- url=url,
- content=content,
- path=path,
- )
-
- if content:
- uploaded = self.upload_file(content=content, thread_id=thread_id, mime=mime)
-
- if uploaded["object_key"] is None or uploaded["url"] is None:
- raise Exception("Failed to upload file")
-
- object_key = uploaded["object_key"]
- if object_key:
- variables["objectKey"] = object_key
- else:
- variables["url"] = uploaded["url"]
-
- response = self.make_gql_call(description, query, variables)
- return process_response(response)
-
- def update_attachment(self, id: str, update_params: AttachmentUpload):
- """
- Updates an existing attachment with new parameters.
-
- Args:
- id (str): The unique identifier of the attachment to update.
- update_params (AttachmentUpload): The parameters to update in the attachment.
-
- Returns:
- The result of the update operation.
- """
- return self.gql_helper(*update_attachment_helper(id, update_params))
-
- def get_attachment(self, id: str):
- """
- Retrieves an attachment by its ID.
-
- Args:
- id (str): The unique identifier of the attachment to retrieve.
-
- Returns:
- The attachment data as returned by the GraphQL helper function.
- """
- return self.gql_helper(*get_attachment_helper(id))
-
- def delete_attachment(self, id: str):
- """
- Deletes an attachment identified by its ID.
-
- Args:
- id (str): The unique identifier of the attachment to delete.
-
- Returns:
- The result of the deletion operation.
- """
- return self.gql_helper(*delete_attachment_helper(id))
-
- # Step API
-
- def create_step(
- self,
- thread_id: Optional[str] = None,
- type: Optional[StepType] = "undefined",
- start_time: Optional[str] = None,
- end_time: Optional[str] = None,
- input: Optional[Dict] = None,
- output: Optional[Dict] = None,
- metadata: Optional[Dict] = None,
- parent_id: Optional[str] = None,
- name: Optional[str] = None,
- tags: Optional[List[str]] = None,
- root_run_id: Optional[str] = None,
- ):
- """
- Creates a new step with the specified parameters.
-
- Args:
- thread_id (Optional[str]): The ID of the thread this step is associated with.
- type (Optional[StepType]): The type of the step, defaults to "undefined".
- start_time (Optional[str]): The start time of the step.
- end_time (Optional[str]): The end time of the step.
- input (Optional[Dict]): Input data for the step.
- output (Optional[Dict]): Output data from the step.
- metadata (Optional[Dict]): Metadata associated with the step.
- parent_id (Optional[str]): The ID of the parent step, if any.
- name (Optional[str]): The name of the step.
- tags (Optional[List[str]]): Tags associated with the step.
- root_run_id (Optional[str]): The ID of the root run, if any.
-
- Returns:
- The result of the GraphQL helper function for creating a step.
- """
- return self.gql_helper(
- *create_step_helper(
- thread_id=thread_id,
- type=type,
- start_time=start_time,
- end_time=end_time,
- input=input,
- output=output,
- metadata=metadata,
- parent_id=parent_id,
- name=name,
- tags=tags,
- root_run_id=root_run_id,
- )
- )
-
- def update_step(
- self,
- id: str,
- type: Optional[StepType] = None,
- input: Optional[str] = None,
- output: Optional[str] = None,
- metadata: Optional[Dict] = None,
- name: Optional[str] = None,
- tags: Optional[List[str]] = None,
- start_time: Optional[str] = None,
- end_time: Optional[str] = None,
- parent_id: Optional[str] = None,
- ):
- """
- Updates an existing step identified by its ID with new parameters.
-
- Args:
- id (str): The unique identifier of the step to update.
- type (Optional[StepType]): The type of the step.
- input (Optional[str]): Input data for the step.
- output (Optional[str]): Output data from the step.
- metadata (Optional[Dict]): Metadata associated with the step.
- name (Optional[str]): The name of the step.
- tags (Optional[List[str]]): Tags associated with the step.
- start_time (Optional[str]): The start time of the step.
- end_time (Optional[str]): The end time of the step.
- parent_id (Optional[str]): The ID of the parent step, if any.
-
- Returns:
- The result of the GraphQL helper function for updating a step.
- """
- return self.gql_helper(
- *update_step_helper(
- id=id,
- type=type,
- input=input,
- output=output,
- metadata=metadata,
- name=name,
- tags=tags,
- start_time=start_time,
- end_time=end_time,
- parent_id=parent_id,
- )
- )
-
- def get_steps(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[steps_filters] = None,
- order_by: Optional[steps_order_by] = None,
- ) -> PaginatedResponse[Step]:
- """
- Fetches a list of steps based on pagination and optional filters.
-
- Args:
- first (Optional[int]): Number of steps to fetch.
- after (Optional[str]): Cursor for pagination, fetch steps after this cursor.
- before (Optional[str]): Cursor for pagination, fetch steps before this cursor.
- filters (Optional[steps_filters]): Filters to apply on the steps query.
- order_by (Optional[steps_order_by]): Order by clause for steps.
-
- Returns:
- A list of steps that match the criteria.
- """
- return self.gql_helper(
- *get_steps_helper(first, after, before, filters, order_by)
- )
-
- def get_step(
- self,
- id: str,
- ):
- """
- Retrieves a step by its ID.
-
- Args:
- id (str): The unique identifier of the step to retrieve.
-
- Returns:
- The step data as returned by the GraphQL helper function.
- """
- return self.gql_helper(*get_step_helper(id=id))
-
- def delete_step(
- self,
- id: str,
- ):
- """
- Deletes a step identified by its ID.
-
- Args:
- id (str): The unique identifier of the step to delete.
-
- Returns:
- The result of the deletion operation.
- """
- return self.gql_helper(*delete_step_helper(id=id))
-
- def send_steps(self, steps: List[Union[StepDict, "Step"]]):
- """
- Sends a list of steps to be processed.
-
- Args:
- steps (List[Union[StepDict, Step]]): A list of steps or step dictionaries to send.
-
- Returns:
- The result of the GraphQL helper function for sending steps.
- """
- return self.gql_helper(*send_steps_helper(steps=steps))
-
- # Generation API
-
- def get_generations(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[generations_filters] = None,
- order_by: Optional[generations_order_by] = None,
- ):
- """
- Fetches a list of generations based on pagination and optional filters.
-
- Args:
- first (Optional[int]): The number of generations to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[generations_filters]): Filters to apply to the generations query.
- order_by (Optional[generations_order_by]): Order by clause for generations.
-
- Returns:
- A list of generations that match the criteria.
- """
- return self.gql_helper(
- *get_generations_helper(first, after, before, filters, order_by)
- )
-
- def create_generation(
- self, generation: Union[ChatGeneration, CompletionGeneration]
- ):
- """
- Creates a new generation, either a chat or completion type.
-
- ```py
- from literalai.observability.generation import ChatGeneration
- from literalai import LiteralClient
-
- literalai_client = LiteralClient(api_key=)
-
- example_generation = ChatGeneration(
- messages=[
- {
- "role": "user",
- "content": "Hello, how can I help you today?"
- },
- ],
- message_completion={
- "role": "assistant",
- "content": "Sure, I can help with that. What do you need to know?"
- },
- model="gpt-4o-mini",
- provider="OpenAI"
- )
-
- literalai_client.api.create_generation(example_generation)
- ```
-
- Args:
- generation (Union[ChatGeneration, CompletionGeneration]): The generation data to create.
-
- Returns:
- The result of the creation operation.
- """
- return self.gql_helper(*create_generation_helper(generation))
-
- # Dataset API
-
- def create_dataset(
- self,
- name: str,
- description: Optional[str] = None,
- metadata: Optional[Dict] = None,
- type: DatasetType = "key_value",
- ):
- """
- Creates a new dataset with the specified properties.
-
- Args:
- name (str): The name of the dataset.
- description (Optional[str]): A description of the dataset.
- metadata (Optional[Dict]): Additional metadata for the dataset.
- type (DatasetType): The type of the dataset, defaults to "key_value".
-
- Returns:
- The result of the dataset creation operation.
- """
- return self.gql_helper(
- *create_dataset_helper(self, name, description, metadata, type)
- )
-
- def get_dataset(
- self, id: Optional[str] = None, name: Optional[str] = None
- ) -> Optional[Dataset]:
- """
- Retrieves a dataset by its ID or name.
-
- Args:
- id (Optional[str]): The unique identifier of the dataset.
- name (Optional[str]): The name of the dataset.
-
- Returns:
- The dataset data as returned by the REST helper function.
- """
- subpath, _, variables, process_response = get_dataset_helper(
- self, id=id, name=name
- )
- response = self.make_rest_call(subpath, variables)
- return process_response(response)
-
- def update_dataset(
- self,
- id: str,
- name: Optional[str] = None,
- description: Optional[str] = None,
- metadata: Optional[Dict] = None,
- ):
- """
- Updates an existing dataset identified by its ID with new properties.
-
- Args:
- id (str): The unique identifier of the dataset to update.
- name (Optional[str]): A new name for the dataset.
- description (Optional[str]): A new description for the dataset.
- metadata (Optional[Dict]): New or updated metadata for the dataset.
-
- Returns:
- The result of the dataset update operation.
- """
- return self.gql_helper(
- *update_dataset_helper(self, id, name, description, metadata)
- )
-
- def delete_dataset(self, id: str):
- """
- Deletes a dataset identified by its ID.
-
- Args:
- id (str): The unique identifier of the dataset to delete.
-
- Returns:
- The result of the deletion operation.
- """
- return self.gql_helper(*delete_dataset_helper(self, id))
-
- # Dataset Experiment APIs
-
- def create_experiment(
- self,
- name: str,
- dataset_id: Optional[str] = None,
- prompt_variant_id: Optional[str] = None,
- params: Optional[Dict] = None,
- ) -> "DatasetExperiment":
- """
- Creates a new experiment associated with a specific dataset.
-
- Args:
- name (str): The name of the experiment.
- dataset_id (Optional[str]): The unique identifier of the dataset.
- prompt_variant_id (Optional[str]): The identifier of the prompt variant to associate to the experiment.
- params (Optional[Dict]): Additional parameters for the experiment.
-
- Returns:
- DatasetExperiment: The newly created experiment object.
- """
- return self.gql_helper(
- *create_experiment_helper(
- api=self,
- name=name,
- dataset_id=dataset_id,
- prompt_variant_id=prompt_variant_id,
- params=params,
- )
- )
-
- def create_experiment_item(
- self, experiment_item: DatasetExperimentItem
- ) -> DatasetExperimentItem:
- """
- Creates an experiment item within an existing experiment.
-
- Args:
- experiment_item (DatasetExperimentItem): The experiment item to be created, containing all necessary data.
-
- Returns:
- DatasetExperimentItem: The newly created experiment item with scores attached.
- """
- # Create the dataset experiment item
- result = self.gql_helper(
- *create_experiment_item_helper(
- dataset_experiment_id=experiment_item.dataset_experiment_id,
- dataset_item_id=experiment_item.dataset_item_id,
- experiment_run_id=experiment_item.experiment_run_id,
- input=experiment_item.input,
- output=experiment_item.output,
- )
- )
-
- for score in experiment_item.scores:
- score["datasetExperimentItemId"] = result.id
-
- # Create the scores and add to experiment item.
- result.scores = self.create_scores(experiment_item.scores)
-
- return result
-
- # Dataset Item API
-
- def create_dataset_item(
- self,
- dataset_id: str,
- input: Dict,
- expected_output: Optional[Dict] = None,
- metadata: Optional[Dict] = None,
- ):
- """
- Creates a new dataset item with the specified properties.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- input (Dict): The input data for the dataset item.
- expected_output (Optional[Dict]): The expected output data for the dataset item.
- metadata (Optional[Dict]): Additional metadata for the dataset item.
-
- Returns:
- Dict: The result of the dataset item creation operation.
- """
- return self.gql_helper(
- *create_dataset_item_helper(dataset_id, input, expected_output, metadata)
- )
-
- def get_dataset_item(self, id: str):
- """
- Retrieves a dataset item by its unique identifier.
-
- Args:
- id (str): The unique identifier of the dataset item to retrieve.
-
- Returns:
- Dict: The dataset item data.
- """
- return self.gql_helper(*get_dataset_item_helper(id))
-
- def delete_dataset_item(self, id: str):
- """
- Deletes a dataset item by its unique identifier.
-
- Args:
- id (str): The unique identifier of the dataset item to delete.
-
- Returns:
- Dict: The result of the dataset item deletion operation.
- """
- return self.gql_helper(*delete_dataset_item_helper(id))
-
- def add_step_to_dataset(
- self, dataset_id: str, step_id: str, metadata: Optional[Dict] = None
- ):
- """
- Adds a step to a dataset.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- step_id (str): The unique identifier of the step to add.
- metadata (Optional[Dict]): Additional metadata for the step being added.
-
- Returns:
- Dict: The result of adding the step to the dataset.
- """
- return self.gql_helper(
- *add_step_to_dataset_helper(dataset_id, step_id, metadata)
- )
-
- def add_generation_to_dataset(
- self, dataset_id: str, generation_id: str, metadata: Optional[Dict] = None
- ):
- """
- Adds a generation to a dataset.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- generation_id (str): The unique identifier of the generation to add.
- metadata (Optional[Dict]): Additional metadata for the generation being added.
-
- Returns:
- Dict: The result of adding the generation to the dataset.
- """
- return self.gql_helper(
- *add_generation_to_dataset_helper(dataset_id, generation_id, metadata)
- )
-
- # Prompt API
-
- def get_or_create_prompt_lineage(
- self, name: str, description: Optional[str] = None
- ):
- """
- Creates a prompt lineage with the specified name and optional description.
- If the prompt lineage with that name already exists, it is returned.
-
- Args:
- name (str): The name of the prompt lineage.
- description (Optional[str]): An optional description of the prompt lineage.
-
- Returns:
- Dict: The result of the prompt lineage creation operation.
- """
- return self.gql_helper(*create_prompt_lineage_helper(name, description))
-
- @deprecated('Please use "get_or_create_prompt_lineage" instead.')
- def create_prompt_lineage(self, name: str, description: Optional[str] = None):
- return self.get_or_create_prompt_lineage(name, description)
-
- def get_or_create_prompt(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- tools: Optional[List[Dict]] = None,
- ) -> Prompt:
- """
- A `Prompt` is fully defined by its `name`, `template_messages`, `settings` and tools.
- If a prompt already exists for the given arguments, it is returned.
- Otherwise, a new prompt is created.
-
- Args:
- name (str): The name of the prompt to retrieve or create.
- template_messages (List[GenerationMessage]): A list of template messages for the prompt.
- settings (Optional[Dict]): Optional settings for the prompt.
- tools (Optional[List[Dict]]): Optional tool options for the model
-
- Returns:
- Prompt: The prompt that was retrieved or created.
- """
- lineage = self.get_or_create_prompt_lineage(name)
- lineage_id = lineage["id"]
- return self.gql_helper(
- *create_prompt_helper(self, lineage_id, template_messages, settings, tools)
- )
-
- @deprecated('Please use "get_or_create_prompt" instead.')
- def create_prompt(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- ) -> Prompt:
- return self.get_or_create_prompt(name, template_messages, settings)
-
- def get_prompt(
- self,
- id: Optional[str] = None,
- name: Optional[str] = None,
- version: Optional[int] = None,
- ) -> Prompt:
- """
- Gets a prompt either by:
- - `id`
- - or `name` and (optional) `version`
-
- Either the `id` or the `name` must be provided.
- If both are provided, the `id` is used.
-
- Args:
- id (str): The unique identifier of the prompt to retrieve.
- name (str): The name of the prompt to retrieve.
- version (Optional[int]): The version number of the prompt to retrieve.
-
- Returns:
- Prompt: The prompt with the given identifier or name.
- """
- if id:
- return self.gql_helper(*get_prompt_helper(self, id=id))
- elif name:
- return self.gql_helper(*get_prompt_helper(self, name=name, version=version))
- else:
- raise ValueError("Either the `id` or the `name` must be provided.")
-
- def create_prompt_variant(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- tools: Optional[List[Dict]] = None,
- ) -> Optional[str]:
- """
- Creates a prompt variation for an experiment.
- This variation is not an official version until manually saved.
-
- Args:
- name (str): The name of the prompt to retrieve or create.
- template_messages (List[GenerationMessage]): A list of template messages for the prompt.
- settings (Optional[Dict]): Optional settings for the prompt.
- tools (Optional[List[Dict]]): Optional tool options for the model
-
- Returns:
- prompt_variant_id: The prompt variant id to link with the experiment.
- """
- lineage = self.gql_helper(*get_prompt_lineage_helper(name))
- lineage_id = lineage["id"] if lineage else None
- return self.gql_helper(
- *create_prompt_variant_helper(
- lineage_id, template_messages, settings, tools
- )
- )
-
- def get_prompt_ab_testing(self, name: str) -> List[PromptRollout]:
- """
- Get the A/B testing configuration for a prompt lineage.
-
- Args:
- name (str): The name of the prompt lineage.
- Returns:
- List[PromptRollout]
- """
- return self.gql_helper(*get_prompt_ab_testing_helper(name=name))
-
- def update_prompt_ab_testing(
- self, name: str, rollouts: List[PromptRollout]
- ) -> Dict:
- """
- Update the A/B testing configuration for a prompt lineage.
-
- Args:
- name (str): The name of the prompt lineage.
- rollouts (List[PromptRollout]): The percentage rollout for each prompt version.
-
- Returns:
- Dict
- """
- return self.gql_helper(
- *update_prompt_ab_testing_helper(name=name, rollouts=rollouts)
- )
-
- # Misc API
- def get_my_project_id(self):
- """
- Retrieves the projectId associated to the API key.
-
- Returns:
- The projectId associated to the API key.
- """
- response = self.make_rest_call("/my-project", {})
- return response["projectId"]
-
-
-class AsyncLiteralAPI(BaseLiteralAPI):
- """
- ```python
- from literalai import AsyncLiteralClient
- # Initialize the client
- async_literalai_client = AsyncLiteralClient(api_key="your_api_key_here")
- # Access the API's methods
- print(async_literalai_client.api)
- ```
- """
-
- R = TypeVar("R")
-
- async def make_gql_call(
- self, description: str, query: str, variables: Dict[str, Any]
- ) -> Dict:
- def raise_error(error):
- logger.error(f"Failed to {description}: {error}")
- raise Exception(error)
-
- variables = _prepare_variables(variables)
-
- async with httpx.AsyncClient(follow_redirects=True) as client:
- response = await client.post(
- self.graphql_endpoint,
- json={"query": query, "variables": variables},
- headers=self.headers,
- timeout=10,
- )
-
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError:
- raise_error(f"Failed to {description}: {response.text}")
-
- try:
- json = response.json()
- except ValueError as e:
- raise_error(
- f"""Failed to parse JSON response: {
- e}, content: {response.content!r}"""
- )
-
- if json.get("errors"):
- raise_error(json["errors"])
-
- if json.get("data"):
- if isinstance(json["data"], dict):
- for key, value in json["data"].items():
- if value and value.get("ok") is False:
- raise_error(
- f"""Failed to {description}: {
- value.get('message')}"""
- )
-
- return json
-
- # This should not be reached, exceptions should be thrown beforehands
- # Added because of mypy
- raise Exception("Unkown error")
-
- async def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict:
- async with httpx.AsyncClient(follow_redirects=True) as client:
- response = await client.post(
- self.rest_endpoint + subpath,
- json=body,
- headers=self.headers,
- timeout=20,
- )
-
- try:
- response.raise_for_status()
- except httpx.HTTPStatusError:
- message = f"Failed to call {subpath}: {response.text}"
- logger.error(message)
- raise Exception(message)
-
- try:
- return response.json()
- except ValueError as e:
- raise ValueError(
- f"""Failed to parse JSON response: {
- e}, content: {response.content!r}"""
- )
-
- async def gql_helper(
- self,
- query: str,
- description: str,
- variables: Dict,
- process_response: Callable[..., R],
- ) -> R:
- response = await self.make_gql_call(description, query, variables)
- return process_response(response)
-
- async def get_users(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[users_filters] = None,
- ):
- """
- Asynchronously fetches a list of users based on pagination and optional filters.
-
- Args:
- first (Optional[int]): The number of users to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[users_filters]): Filters to apply to the user query.
-
- Returns:
- The result of the GraphQL helper function for fetching users.
- """
- return await self.gql_helper(*get_users_helper(first, after, before, filters))
-
- # User API
-
- async def get_user(
- self, id: Optional[str] = None, identifier: Optional[str] = None
- ):
- """
- Asynchronously retrieves a user by ID or identifier.
-
- Args:
- id (Optional[str]): The unique identifier of the user to retrieve.
- identifier (Optional[str]): An alternative identifier for the user.
-
- Returns:
- The result of the GraphQL helper function for fetching a user.
- """
- return await self.gql_helper(*get_user_helper(id, identifier))
-
- async def create_user(self, identifier: str, metadata: Optional[Dict] = None):
- """
- Asynchronously creates a new user with the specified identifier and optional metadata.
-
- Args:
- identifier (str): The identifier for the new user.
- metadata (Optional[Dict]): Additional metadata for the user.
-
- Returns:
- The result of the GraphQL helper function for creating a user.
- """
- return await self.gql_helper(*create_user_helper(identifier, metadata))
-
- async def update_user(
- self, id: str, identifier: Optional[str] = None, metadata: Optional[Dict] = None
- ):
- """
- Asynchronously updates an existing user identified by ID with new identifier and/or metadata.
-
- Args:
- id (str): The unique identifier of the user to update.
- identifier (Optional[str]): New identifier for the user.
- metadata (Optional[Dict]): New metadata for the user.
-
- Returns:
- The result of the GraphQL helper function for updating a user.
- """
- return await self.gql_helper(*update_user_helper(id, identifier, metadata))
-
- async def delete_user(self, id: str):
- """
- Asynchronously deletes a user identified by ID.
-
- Args:
- id (str): The unique identifier of the user to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a user.
- """
- return await self.gql_helper(*delete_user_helper(id))
-
- async def get_or_create_user(
- self, identifier: str, metadata: Optional[Dict] = None
- ):
- """
- Asynchronously retrieves a user by identifier or creates a new one if it does not exist.
-
- Args:
- identifier (str): The identifier of the user to retrieve or create.
- metadata (Optional[Dict]): Metadata for the user if creation is necessary.
-
- Returns:
- The existing or newly created user.
- """
- user = await self.get_user(identifier=identifier)
- if user:
- return user
-
- return await self.create_user(identifier, metadata)
-
- # Thread API
-
- async def get_threads(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[threads_filters] = None,
- order_by: Optional[threads_order_by] = None,
- step_types_to_keep: Optional[List[StepType]] = None,
- ):
- """
- Asynchronously fetches a list of threads based on pagination and optional filters and ordering.
-
- Args:
- first (Optional[int]): The number of threads to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[threads_filters]): Filters to apply to the thread query.
- order_by (Optional[threads_order_by]): Ordering criteria for the threads.
- step_types_to_keep (Optional[List[StepType]]) : If set, only steps of the corresponding types
- will be returned
-
- Returns:
- The result of the GraphQL helper function for fetching threads.
- """
- return await self.gql_helper(
- *get_threads_helper(
- first, after, before, filters, order_by, step_types_to_keep
- )
- )
-
- async def list_threads(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[threads_filters] = None,
- order_by: Optional[threads_order_by] = None,
- ):
- """
- Asynchronously lists threads based on pagination and optional filters and ordering, similar to `get_threads`.
-
- Args:
- first (Optional[int]): The number of threads to list.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[threads_filters]): Filters to apply to the thread query.
- order_by (Optional[threads_order_by]): Ordering criteria for the threads.
-
- Returns:
- The result of the GraphQL helper function for listing threads.
- """
- return await self.gql_helper(
- *list_threads_helper(first, after, before, filters, order_by)
- )
-
- async def get_thread(self, id: str):
- """
- Asynchronously retrieves a thread by its ID.
-
- Args:
- id (str): The unique identifier of the thread to retrieve.
-
- Returns:
- The result of the GraphQL helper function for fetching a thread.
- """
- return await self.gql_helper(*get_thread_helper(id))
-
- async def create_thread(
- self,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Asynchronously creates a new thread with specified details.
-
- Args:
- name (Optional[str]): The name of the thread.
- metadata (Optional[Dict]): Metadata associated with the thread.
- participant_id (Optional[str]): Identifier for the participant associated with the thread.
- tags (Optional[List[str]]): Tags associated with the thread.
-
- Returns:
- The result of the GraphQL helper function for creating a thread.
- """
- return await self.gql_helper(
- *create_thread_helper(name, metadata, participant_id, tags)
- )
-
- async def upsert_thread(
- self,
- id: str,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Asynchronously updates or inserts a thread based on the provided ID.
-
- Args:
- id (str): The unique identifier of the thread to upsert.
- name (Optional[str]): The name of the thread.
- metadata (Optional[Dict]): Metadata associated with the thread.
- participant_id (Optional[str]): Identifier for the participant associated with the thread.
- tags (Optional[List[str]]): Tags associated with the thread.
-
- Returns:
- The result of the GraphQL helper function for upserting a thread.
- """
- return await self.gql_helper(
- *upsert_thread_helper(id, name, metadata, participant_id, tags)
- )
-
- async def update_thread(
- self,
- id: str,
- name: Optional[str] = None,
- metadata: Optional[Dict] = None,
- participant_id: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Asynchronously updates an existing thread identified by ID with new details.
-
- Args:
- id (str): The unique identifier of the thread to update.
- name (Optional[str]): New name of the thread.
- metadata (Optional[Dict]): New metadata for the thread.
- participant_id (Optional[str]): New identifier for the participant.
- tags (Optional[List[str]]): New list of tags for the thread.
-
- Returns:
- The result of the GraphQL helper function for updating a thread.
- """
- return await self.gql_helper(
- *update_thread_helper(id, name, metadata, participant_id, tags)
- )
-
- async def delete_thread(self, id: str):
- """
- Asynchronously deletes a thread identified by its ID.
-
- Args:
- id (str): The unique identifier of the thread to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a thread.
- """
- return await self.gql_helper(*delete_thread_helper(id))
-
- # Score API
-
- async def get_scores(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[scores_filters] = None,
- order_by: Optional[scores_order_by] = None,
- ):
- """
- Asynchronously fetches scores based on pagination and optional filters.
-
- Args:
- first (Optional[int]): The number of scores to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[scores_filters]): Filters to apply to the scores query.
- order_by (Optional[scores_order_by]): Ordering options for the scores.
-
- Returns:
- The result of the GraphQL helper function for fetching scores.
- """
- return await self.gql_helper(
- *get_scores_helper(first, after, before, filters, order_by)
- )
-
- async def create_scores(self, scores: List[ScoreDict]):
- """
- Asynchronously creates multiple scores.
-
- Args:
- scores (List[literalai.observability.step.ScoreDict]): A list of dictionaries representing the scores to be created.
-
- Returns:
- The result of the GraphQL helper function for creating scores.
- """
- check_scores_finite(scores)
-
- query = create_scores_query_builder(scores)
- variables = {}
-
- for id, score in enumerate(scores):
- for k, v in score.items():
- variables[f"{k}_{id}"] = v
-
- def process_response(response):
- return [Score.from_dict(x) for x in response["data"].values()]
-
- return await self.gql_helper(
- query, "create scores", variables, process_response
- )
-
- async def create_score(
- self,
- name: str,
- value: float,
- type: ScoreType,
- step_id: Optional[str] = None,
- generation_id: Optional[str] = None,
- dataset_experiment_item_id: Optional[str] = None,
- comment: Optional[str] = None,
- tags: Optional[List[str]] = None,
- ):
- """
- Asynchronously creates a single score.
-
- Args:
- name (str): The name of the score.
- value (float): The numerical value of the score.
- type (ScoreType): The type of the score.
- step_id (Optional[str]): The ID of the step associated with the score.
- generation_id: Deprecated - use step_id
- dataset_experiment_item_id (Optional[str]): The ID of the dataset experiment item associated with the score.
- comment (Optional[str]): A comment associated with the score.
- tags (Optional[List[str]]): A list of tags associated with the score.
-
- Returns:
- The result of the GraphQL helper function for creating a score.
- """
- if generation_id:
- logger.warning(
- "generation_id is deprecated and will be removed in a future version, please use step_id instead"
- )
- check_scores_finite([{"name": name, "value": value}])
-
- return await self.gql_helper(
- *create_score_helper(
- name,
- value,
- type,
- step_id,
- dataset_experiment_item_id,
- comment,
- tags,
- )
- )
-
- async def update_score(
- self,
- id: str,
- update_params: ScoreUpdate,
- ):
- """
- Asynchronously updates a score identified by its ID.
-
- Args:
- id (str): The unique identifier of the score to update.
- update_params (ScoreUpdate): A dictionary of parameters to update.
-
- Returns:
- The result of the GraphQL helper function for updating a score.
- """
- return await self.gql_helper(*update_score_helper(id, update_params))
-
- async def delete_score(self, id: str):
- """
- Asynchronously deletes a score identified by its ID.
-
- Args:
- id (str): The unique identifier of the score to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a score.
- """
- return await self.gql_helper(*delete_score_helper(id))
-
- # Attachment API
-
- async def upload_file(
- self,
- content: Union[bytes, str],
- thread_id: Optional[str] = None,
- mime: Optional[str] = "application/octet-stream",
- ) -> Dict:
- """
- Asynchronously uploads a file to the server.
-
- Args:
- content (Union[bytes, str]): The content of the file to upload.
- thread_id (str): The ID of the thread associated with the file.
- mime (Optional[str]): The MIME type of the file.
-
- Returns:
- A dictionary containing the object key and URL of the uploaded file.
- """
- id = str(uuid.uuid4())
- body = {"fileName": id, "contentType": mime, "threadId": thread_id}
-
- path = "/api/upload/file"
-
- async with httpx.AsyncClient(follow_redirects=True) as client:
- response = await client.post(
- f"{self.url}{path}",
- json=body,
- headers=self.headers,
- )
- if response.status_code >= 400:
- reason = response.text
- logger.error(f"Failed to sign upload url: {reason}")
- return {"object_key": None, "url": None}
- json_res = response.json()
- method = "put" if "put" in json_res else "post"
- request_dict: Dict[str, Any] = json_res.get(method, {})
- url: Optional[str] = request_dict.get("url")
-
- if not url:
- raise Exception("Invalid server response")
- headers: Optional[Dict] = request_dict.get("headers")
- fields: Dict = request_dict.get("fields", {})
- object_key: Optional[str] = fields.get("key")
- upload_type: Literal["raw", "multipart"] = cast(
- Literal["raw", "multipart"], request_dict.get("uploadType", "multipart")
- )
- signed_url: Optional[str] = json_res.get("signedUrl")
-
- # Prepare form data
- form_data = (
- {}
- ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]]
- for field_name, field_value in fields.items():
- form_data[field_name] = (None, field_value)
-
- # Add file to the form_data
- # Note: The content_type parameter is not needed here, as the correct MIME type should be set
- # in the 'Content-Type' field from upload_details
- form_data["file"] = (id, content, mime)
-
- async with httpx.AsyncClient(follow_redirects=True) as client:
- if upload_type == "raw":
- upload_response = await client.request(
- url=url,
- headers=headers,
- method=method,
- data=content, # type: ignore
- )
- else:
- upload_response = await client.request(
- url=url,
- headers=headers,
- method=method,
- files=form_data,
- ) # type: ignore
- try:
- upload_response.raise_for_status()
- return {"object_key": object_key, "url": signed_url}
- except Exception as e:
- logger.error(f"Failed to upload file: {str(e)}")
- return {"object_key": None, "url": None}
-
- async def create_attachment(
- self,
- thread_id: str,
- step_id: str,
- id: Optional[str] = None,
- metadata: Optional[Dict] = None,
- mime: Optional[str] = None,
- name: Optional[str] = None,
- object_key: Optional[str] = None,
- url: Optional[str] = None,
- content: Optional[Union[bytes, str]] = None,
- path: Optional[str] = None,
- ) -> "Attachment":
- """
- Asynchronously creates an attachment and uploads it if content is provided.
-
- Args:
- thread_id (str): The ID of the thread associated with the attachment.
- step_id (str): The ID of the step associated with the attachment.
- id (Optional[str]): An optional unique identifier for the attachment.
- metadata (Optional[Dict]): Optional metadata for the attachment.
- mime (Optional[str]): The MIME type of the attachment.
- name (Optional[str]): The name of the attachment.
- object_key (Optional[str]): The object key for the attachment if already uploaded.
- url (Optional[str]): The URL of the attachment if already uploaded.
- content (Optional[Union[bytes, str]]): The content of the attachment to upload.
- path (Optional[str]): The file path of the attachment if it is to be uploaded from a local file.
-
- Returns:
- The attachment object created after the upload and creation process.
- """
- (
- query,
- description,
- variables,
- content,
- process_response,
- ) = create_attachment_helper(
- thread_id=thread_id,
- step_id=step_id,
- id=id,
- metadata=metadata,
- mime=mime,
- name=name,
- object_key=object_key,
- url=url,
- content=content,
- path=path,
- )
-
- if content:
- uploaded = await self.upload_file(
- content=content, thread_id=thread_id, mime=mime
- )
-
- if uploaded["object_key"] is None or uploaded["url"] is None:
- raise Exception("Failed to upload file")
-
- object_key = uploaded["object_key"]
- if object_key:
- variables["objectKey"] = object_key
- else:
- variables["url"] = uploaded["url"]
-
- response = await self.make_gql_call(description, query, variables)
- return process_response(response)
-
- async def update_attachment(self, id: str, update_params: AttachmentUpload):
- """
- Asynchronously updates an attachment identified by its ID.
-
- Args:
- id (str): The unique identifier of the attachment to update.
- update_params (AttachmentUpload): A dictionary of parameters to update the attachment.
-
- Returns:
- The result of the GraphQL helper function for updating an attachment.
- """
- return await self.gql_helper(*update_attachment_helper(id, update_params))
-
- async def get_attachment(self, id: str):
- """
- Asynchronously retrieves an attachment by its ID.
-
- Args:
- id (str): The unique identifier of the attachment to retrieve.
-
- Returns:
- The result of the GraphQL helper function for fetching an attachment.
- """
- return await self.gql_helper(*get_attachment_helper(id))
-
- async def delete_attachment(self, id: str):
- """
- Asynchronously deletes an attachment identified by its ID.
-
- Args:
- id (str): The unique identifier of the attachment to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting an attachment.
- """
- return await self.gql_helper(*delete_attachment_helper(id))
-
- # Step API
-
- async def create_step(
- self,
- thread_id: Optional[str] = None,
- type: Optional[StepType] = "undefined",
- start_time: Optional[str] = None,
- end_time: Optional[str] = None,
- input: Optional[Dict] = None,
- output: Optional[Dict] = None,
- metadata: Optional[Dict] = None,
- parent_id: Optional[str] = None,
- name: Optional[str] = None,
- tags: Optional[List[str]] = None,
- root_run_id: Optional[str] = None,
- ):
- """
- Asynchronously creates a new step with the specified parameters.
-
- Args:
- thread_id (Optional[str]): The ID of the thread associated with the step.
- type (Optional[StepType]): The type of the step, defaults to "undefined".
- start_time (Optional[str]): The start time of the step.
- end_time (Optional[str]): The end time of the step.
- input (Optional[Dict]): Input data for the step.
- output (Optional[Dict]): Output data from the step.
- metadata (Optional[Dict]): Metadata associated with the step.
- parent_id (Optional[str]): The ID of the parent step, if any.
- name (Optional[str]): The name of the step.
- tags (Optional[List[str]]): Tags associated with the step.
- root_run_id (Optional[str]): The ID of the root run, if any.
-
- Returns:
- The result of the GraphQL helper function for creating a step.
- """
- return await self.gql_helper(
- *create_step_helper(
- thread_id=thread_id,
- type=type,
- start_time=start_time,
- end_time=end_time,
- input=input,
- output=output,
- metadata=metadata,
- parent_id=parent_id,
- name=name,
- tags=tags,
- root_run_id=root_run_id,
- )
- )
-
- async def update_step(
- self,
- id: str,
- type: Optional[StepType] = None,
- input: Optional[str] = None,
- output: Optional[str] = None,
- metadata: Optional[Dict] = None,
- name: Optional[str] = None,
- tags: Optional[List[str]] = None,
- start_time: Optional[str] = None,
- end_time: Optional[str] = None,
- parent_id: Optional[str] = None,
- ):
- """
- Asynchronously updates an existing step identified by its ID with new parameters.
-
- Args:
- id (str): The unique identifier of the step to update.
- type (Optional[StepType]): The type of the step.
- input (Optional[str]): Input data for the step.
- output (Optional[str]): Output data from the step.
- metadata (Optional[Dict]): Metadata associated with the step.
- name (Optional[str]): The name of the step.
- tags (Optional[List[str]]): Tags associated with the step.
- start_time (Optional[str]): The start time of the step.
- end_time (Optional[str]): The end time of the step.
- parent_id (Optional[str]): The ID of the parent step, if any.
-
- Returns:
- The result of the GraphQL helper function for updating a step.
- """
- return await self.gql_helper(
- *update_step_helper(
- id=id,
- type=type,
- input=input,
- output=output,
- metadata=metadata,
- name=name,
- tags=tags,
- start_time=start_time,
- end_time=end_time,
- parent_id=parent_id,
- )
- )
-
- async def get_steps(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[steps_filters] = None,
- order_by: Optional[steps_order_by] = None,
- ) -> PaginatedResponse[Step]:
- return await self.gql_helper(
- *get_steps_helper(first, after, before, filters, order_by)
- )
-
- get_steps.__doc__ = LiteralAPI.get_steps.__doc__
-
- async def get_step(
- self,
- id: str,
- ):
- """
- Asynchronously retrieves a step by its ID.
-
- Args:
- id (str): The unique identifier of the step to retrieve.
-
- Returns:
- The result of the GraphQL helper function for fetching a step.
- """
- return await self.gql_helper(*get_step_helper(id=id))
-
- async def delete_step(
- self,
- id: str,
- ):
- """
- Asynchronously deletes a step identified by its ID.
-
- Args:
- id (str): The unique identifier of the step to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a step.
- """
- return await self.gql_helper(*delete_step_helper(id=id))
-
- async def send_steps(self, steps: List[Union[StepDict, "Step"]]):
- """
- Asynchronously sends a list of steps to be processed.
-
- Args:
- steps (List[Union[StepDict, Step]]): A list of steps or step dictionaries to send.
-
- Returns:
- The result of the GraphQL helper function for sending steps.
- """
- return await self.gql_helper(*send_steps_helper(steps=steps))
-
- # Generation API
-
- async def get_generations(
- self,
- first: Optional[int] = None,
- after: Optional[str] = None,
- before: Optional[str] = None,
- filters: Optional[generations_filters] = None,
- order_by: Optional[generations_order_by] = None,
- ):
- """
- Asynchronously fetches a list of generations based on pagination and optional filters.
-
- Args:
- first (Optional[int]): The number of generations to retrieve.
- after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
- before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
- filters (Optional[generations_filters]): Filters to apply to the generations query.
- order_by (Optional[generations_order_by]): Ordering options for the generations.
-
- Returns:
- The result of the GraphQL helper function for fetching generations.
- """
- return await self.gql_helper(
- *get_generations_helper(first, after, before, filters, order_by)
- )
-
- async def create_generation(
- self, generation: Union[ChatGeneration, CompletionGeneration]
- ):
- """
- Asynchronously creates a new generation with the specified details.
-
- Args:
- generation (Union[ChatGeneration, CompletionGeneration]): The generation data to create.
-
- Returns:
- The result of the GraphQL helper function for creating a generation.
- """
- return await self.gql_helper(*create_generation_helper(generation))
-
- # Dataset API
-
- async def create_dataset(
- self,
- name: str,
- description: Optional[str] = None,
- metadata: Optional[Dict] = None,
- type: DatasetType = "key_value",
- ):
- """
- Asynchronously creates a new dataset with the specified details.
-
- Args:
- name (str): The name of the dataset.
- description (Optional[str]): A description of the dataset.
- metadata (Optional[Dict]): Metadata associated with the dataset.
- type (DatasetType): The type of the dataset, defaults to "key_value".
-
- Returns:
- The result of the GraphQL helper function for creating a dataset.
- """
- sync_api = LiteralAPI(self.api_key, self.url)
- return await self.gql_helper(
- *create_dataset_helper(sync_api, name, description, metadata, type)
- )
-
- async def get_dataset(self, id: Optional[str] = None, name: Optional[str] = None):
- """
- Asynchronously retrieves a dataset by its ID or name.
-
- Args:
- id (Optional[str]): The unique identifier of the dataset to retrieve.
- name (Optional[str]): The name of the dataset to retrieve.
-
- Returns:
- The processed response from the REST API call.
- """
- sync_api = LiteralAPI(self.api_key, self.url)
- subpath, _, variables, process_response = get_dataset_helper(
- sync_api, id=id, name=name
- )
- response = await self.make_rest_call(subpath, variables)
- return process_response(response)
-
- async def update_dataset(
- self,
- id: str,
- name: Optional[str] = None,
- description: Optional[str] = None,
- metadata: Optional[Dict] = None,
- ):
- """
- Asynchronously updates an existing dataset identified by its ID with new details.
-
- Args:
- id (str): The unique identifier of the dataset to update.
- name (Optional[str]): The new name of the dataset.
- description (Optional[str]): A new description for the dataset.
- metadata (Optional[Dict]): New metadata for the dataset.
-
- Returns:
- The result of the GraphQL helper function for updating a dataset.
- """
- sync_api = LiteralAPI(self.api_key, self.url)
- return await self.gql_helper(
- *update_dataset_helper(sync_api, id, name, description, metadata)
- )
-
- async def delete_dataset(self, id: str):
- """
- Asynchronously deletes a dataset identified by its ID.
-
- Args:
- id (str): The unique identifier of the dataset to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a dataset.
- """
- sync_api = LiteralAPI(self.api_key, self.url)
- return await self.gql_helper(*delete_dataset_helper(sync_api, id))
-
- # Dataset Experiment APIs
-
- async def create_experiment(
- self,
- name: str,
- dataset_id: Optional[str] = None,
- prompt_variant_id: Optional[str] = None,
- params: Optional[Dict] = None,
- ) -> "DatasetExperiment":
- sync_api = LiteralAPI(self.api_key, self.url)
-
- return await self.gql_helper(
- *create_experiment_helper(
- api=sync_api,
- name=name,
- dataset_id=dataset_id,
- prompt_variant_id=prompt_variant_id,
- params=params,
- )
- )
-
- create_experiment.__doc__ = LiteralAPI.create_experiment.__doc__
-
- async def create_experiment_item(
- self, experiment_item: DatasetExperimentItem
- ) -> DatasetExperimentItem:
- """
- Asynchronously creates an item within an experiment.
-
- Args:
- experiment_item (DatasetExperimentItem): The experiment item to be created.
-
- Returns:
- DatasetExperimentItem: The created experiment item with updated scores.
- """
- check_scores_finite(experiment_item.scores)
-
- # Create the dataset experiment item
- result = await self.gql_helper(
- *create_experiment_item_helper(
- dataset_experiment_id=experiment_item.dataset_experiment_id,
- dataset_item_id=experiment_item.dataset_item_id,
- experiment_run_id=experiment_item.experiment_run_id,
- input=experiment_item.input,
- output=experiment_item.output,
- )
- )
-
- for score in experiment_item.scores:
- score["datasetExperimentItemId"] = result.id
-
- # Create the scores and add to experiment item.
- result.scores = await self.create_scores(experiment_item.scores)
-
- return result
-
- # DatasetItem API
-
- async def create_dataset_item(
- self,
- dataset_id: str,
- input: Dict,
- expected_output: Optional[Dict] = None,
- metadata: Optional[Dict] = None,
- ):
- """
- Asynchronously creates a dataset item.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- input (Dict): The input data for the dataset item.
- expected_output (Optional[Dict]): The expected output data for the dataset item.
- metadata (Optional[Dict]): Additional metadata for the dataset item.
-
- Returns:
- The result of the GraphQL helper function for creating a dataset item.
- """
- return await self.gql_helper(
- *create_dataset_item_helper(dataset_id, input, expected_output, metadata)
- )
-
- async def get_dataset_item(self, id: str):
- """
- Asynchronously retrieves a dataset item by its ID.
-
- Args:
- id (str): The unique identifier of the dataset item.
-
- Returns:
- The result of the GraphQL helper function for fetching a dataset item.
- """
- return await self.gql_helper(*get_dataset_item_helper(id))
-
- async def delete_dataset_item(self, id: str):
- """
- Asynchronously deletes a dataset item by its ID.
-
- Args:
- id (str): The unique identifier of the dataset item to delete.
-
- Returns:
- The result of the GraphQL helper function for deleting a dataset item.
- """
- return await self.gql_helper(*delete_dataset_item_helper(id))
-
- async def add_step_to_dataset(
- self, dataset_id: str, step_id: str, metadata: Optional[Dict] = None
- ):
- """
- Asynchronously adds a step to a dataset.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- step_id (str): The unique identifier of the step to add.
- metadata (Optional[Dict]): Additional metadata for the step being added.
-
- Returns:
- The result of the GraphQL helper function for adding a step to a dataset.
- """
- return await self.gql_helper(
- *add_step_to_dataset_helper(dataset_id, step_id, metadata)
- )
-
- async def add_generation_to_dataset(
- self, dataset_id: str, generation_id: str, metadata: Optional[Dict] = None
- ):
- """
- Asynchronously adds a generation to a dataset.
-
- Args:
- dataset_id (str): The unique identifier of the dataset.
- generation_id (str): The unique identifier of the generation to add.
- metadata (Optional[Dict]): Additional metadata for the generation being added.
-
- Returns:
- The result of the GraphQL helper function for adding a generation to a dataset.
- """
- return await self.gql_helper(
- *add_generation_to_dataset_helper(dataset_id, generation_id, metadata)
- )
-
- # Prompt API
-
- async def get_or_create_prompt_lineage(
- self, name: str, description: Optional[str] = None
- ):
- return await self.gql_helper(*create_prompt_lineage_helper(name, description))
-
- get_or_create_prompt_lineage.__doc__ = (
- LiteralAPI.get_or_create_prompt_lineage.__doc__
- )
-
- @deprecated('Please use "get_or_create_prompt_lineage" instead.')
- async def create_prompt_lineage(self, name: str, description: Optional[str] = None):
- return await self.get_or_create_prompt_lineage(name, description)
-
- async def get_or_create_prompt(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- tools: Optional[List[Dict]] = None,
- ) -> Prompt:
- lineage = await self.get_or_create_prompt_lineage(name)
- lineage_id = lineage["id"]
-
- sync_api = LiteralAPI(self.api_key, self.url)
- return await self.gql_helper(
- *create_prompt_helper(
- sync_api, lineage_id, template_messages, settings, tools
- )
- )
-
- get_or_create_prompt.__doc__ = LiteralAPI.get_or_create_prompt.__doc__
-
- @deprecated('Please use "get_or_create_prompt" instead.')
- async def create_prompt(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- ):
- return await self.get_or_create_prompt(name, template_messages, settings)
-
- async def create_prompt_variant(
- self,
- name: str,
- template_messages: List[GenerationMessage],
- settings: Optional[ProviderSettings] = None,
- tools: Optional[List[Dict]] = None,
- ) -> Optional[str]:
- """
- Creates a prompt variation for an experiment.
- This variation is not an official version until manually saved.
-
- Args:
- name (str): The name of the prompt to retrieve or create.
- template_messages (List[GenerationMessage]): A list of template messages for the prompt.
- settings (Optional[Dict]): Optional settings for the prompt.
- tools (Optional[List[Dict]]): Optional tool options for the model
-
- Returns:
- prompt_variant_id: The prompt variant id to link with the experiment.
- """
- lineage = await self.gql_helper(*get_prompt_lineage_helper(name))
- lineage_id = lineage["id"] if lineage else None
- return await self.gql_helper(
- *create_prompt_variant_helper(
- lineage_id, template_messages, settings, tools
- )
- )
-
- create_prompt_variant.__doc__ = LiteralAPI.create_prompt_variant.__doc__
-
- async def get_prompt(
- self,
- id: Optional[str] = None,
- name: Optional[str] = None,
- version: Optional[int] = None,
- ) -> Prompt:
- sync_api = LiteralAPI(self.api_key, self.url)
- if id:
- return await self.gql_helper(*get_prompt_helper(sync_api, id=id))
- elif name:
- return await self.gql_helper(
- *get_prompt_helper(sync_api, name=name, version=version)
- )
- else:
- raise ValueError("Either the `id` or the `name` must be provided.")
-
- get_prompt.__doc__ = LiteralAPI.get_prompt.__doc__
-
- async def update_prompt_ab_testing(
- self, name: str, rollouts: List[PromptRollout]
- ) -> Dict:
- return await self.gql_helper(
- *update_prompt_ab_testing_helper(name=name, rollouts=rollouts)
- )
-
- update_prompt_ab_testing.__doc__ = LiteralAPI.update_prompt_ab_testing.__doc__
-
- async def get_prompt_ab_testing(self, name: str) -> List[PromptRollout]:
- return await self.gql_helper(*get_prompt_ab_testing_helper(name=name))
-
- get_prompt_ab_testing.__doc__ = LiteralAPI.get_prompt_ab_testing.__doc__
-
- # Misc API
-
- async def get_my_project_id(self):
- response = await self.make_rest_call("/my-project", {})
- return response["projectId"]
-
- get_my_project_id.__doc__ = LiteralAPI.get_my_project_id.__doc__
+__all__ = ["LiteralAPI", "AsyncLiteralAPI"]
diff --git a/literalai/api/asynchronous.py b/literalai/api/asynchronous.py
new file mode 100644
index 0000000..8d832dd
--- /dev/null
+++ b/literalai/api/asynchronous.py
@@ -0,0 +1,868 @@
+import logging
+import uuid
+
+from typing_extensions import deprecated
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ List,
+ Literal,
+ Optional,
+ TypeVar,
+ Union,
+ cast,
+)
+
+from literalai.api.base import BaseLiteralAPI, prepare_variables
+
+from literalai.api.helpers.attachment_helpers import (
+ AttachmentUpload,
+ create_attachment_helper,
+ delete_attachment_helper,
+ get_attachment_helper,
+ update_attachment_helper,
+)
+from literalai.api.helpers.dataset_helpers import (
+ add_generation_to_dataset_helper,
+ add_step_to_dataset_helper,
+ create_dataset_helper,
+ create_dataset_item_helper,
+ create_experiment_helper,
+ create_experiment_item_helper,
+ delete_dataset_helper,
+ delete_dataset_item_helper,
+ get_dataset_helper,
+ get_dataset_item_helper,
+ update_dataset_helper,
+)
+from literalai.api.helpers.generation_helpers import (
+ create_generation_helper,
+ get_generations_helper,
+)
+from literalai.api.helpers.prompt_helpers import (
+ PromptRollout,
+ create_prompt_helper,
+ create_prompt_lineage_helper,
+ create_prompt_variant_helper,
+ get_prompt_ab_testing_helper,
+ get_prompt_helper,
+ get_prompt_lineage_helper,
+ update_prompt_ab_testing_helper,
+)
+from literalai.api.helpers.score_helpers import (
+ ScoreUpdate,
+ check_scores_finite,
+ create_score_helper,
+ create_scores_query_builder,
+ delete_score_helper,
+ get_scores_helper,
+ update_score_helper,
+)
+from literalai.api.helpers.step_helpers import (
+ create_step_helper,
+ delete_step_helper,
+ get_step_helper,
+ get_steps_helper,
+ send_steps_helper,
+ update_step_helper,
+)
+from literalai.api.helpers.thread_helpers import (
+ create_thread_helper,
+ delete_thread_helper,
+ get_thread_helper,
+ get_threads_helper,
+ list_threads_helper,
+ update_thread_helper,
+ upsert_thread_helper,
+)
+from literalai.api.helpers.user_helpers import (
+ create_user_helper,
+ delete_user_helper,
+ get_user_helper,
+ get_users_helper,
+ update_user_helper,
+)
+from literalai.api.synchronous import LiteralAPI
+from literalai.context import active_steps_var, active_thread_var
+from literalai.evaluation.dataset import Dataset, DatasetType
+from literalai.evaluation.dataset_experiment import (
+ DatasetExperiment,
+ DatasetExperimentItem,
+)
+from literalai.evaluation.dataset_item import DatasetItem
+from literalai.observability.filter import (
+ generations_filters,
+ generations_order_by,
+ scores_filters,
+ scores_order_by,
+ steps_filters,
+ steps_order_by,
+ threads_filters,
+ threads_order_by,
+ users_filters,
+)
+from literalai.observability.thread import Thread
+from literalai.prompt_engineering.prompt import Prompt, ProviderSettings
+
+if TYPE_CHECKING:
+ from typing import Tuple # noqa: F401
+
+import httpx
+
+from literalai.my_types import PaginatedResponse, User
+from literalai.observability.generation import (
+ BaseGeneration,
+ ChatGeneration,
+ CompletionGeneration,
+ GenerationMessage,
+)
+from literalai.observability.step import (
+ Attachment,
+ Score,
+ ScoreDict,
+ ScoreType,
+ Step,
+ StepDict,
+ StepType,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class AsyncLiteralAPI(BaseLiteralAPI):
+ """
+ ```python
+ from literalai import AsyncLiteralClient
+ # Initialize the client
+ async_literalai_client = AsyncLiteralClient(api_key="your_api_key_here")
+ # Access the API's methods
+ print(async_literalai_client.api)
+ ```
+ """
+
+ R = TypeVar("R")
+
+ async def make_gql_call(
+ self, description: str, query: str, variables: Dict[str, Any]
+ ) -> Dict:
+ def raise_error(error):
+ logger.error(f"Failed to {description}: {error}")
+ raise Exception(error)
+
+ variables = prepare_variables(variables)
+
+ async with httpx.AsyncClient(follow_redirects=True) as client:
+ response = await client.post(
+ self.graphql_endpoint,
+ json={"query": query, "variables": variables},
+ headers=self.headers,
+ timeout=10,
+ )
+
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError:
+ raise_error(f"Failed to {description}: {response.text}")
+
+ try:
+ json = response.json()
+ except ValueError as e:
+ raise_error(
+ f"""Failed to parse JSON response: {
+ e}, content: {response.content!r}"""
+ )
+
+ if json.get("errors"):
+ raise_error(json["errors"])
+
+ if json.get("data"):
+ if isinstance(json["data"], dict):
+ for _, value in json["data"].items():
+ if value and value.get("ok") is False:
+ raise_error(
+ f"""Failed to {description}: {
+ value.get('message')}"""
+ )
+
+ return json
+
+ async def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict:
+ async with httpx.AsyncClient(follow_redirects=True) as client:
+ response = await client.post(
+ self.rest_endpoint + subpath,
+ json=body,
+ headers=self.headers,
+ timeout=20,
+ )
+
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError:
+ message = f"Failed to call {subpath}: {response.text}"
+ logger.error(message)
+ raise Exception(message)
+
+ try:
+ return response.json()
+ except ValueError as e:
+ raise ValueError(
+ f"""Failed to parse JSON response: {
+ e}, content: {response.content!r}"""
+ )
+
+ async def gql_helper(
+ self,
+ query: str,
+ description: str,
+ variables: Dict,
+ process_response: Callable[..., R],
+ ) -> R:
+ response = await self.make_gql_call(description, query, variables)
+ return process_response(response)
+
+ ##################################################################################
+ # User APIs #
+ ##################################################################################
+
+ async def get_users(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[users_filters] = None,
+ ) -> PaginatedResponse["User"]:
+ return await self.gql_helper(*get_users_helper(first, after, before, filters))
+
+ async def get_user(
+ self, id: Optional[str] = None, identifier: Optional[str] = None
+ ) -> "User":
+ return await self.gql_helper(*get_user_helper(id, identifier))
+
+ async def create_user(self, identifier: str, metadata: Optional[Dict] = None) -> "User":
+ return await self.gql_helper(*create_user_helper(identifier, metadata))
+
+ async def update_user(
+ self, id: str, identifier: Optional[str] = None, metadata: Optional[Dict] = None
+ ) -> "User":
+ return await self.gql_helper(*update_user_helper(id, identifier, metadata))
+
+ async def delete_user(self, id: str) -> Dict:
+ return await self.gql_helper(*delete_user_helper(id))
+
+ async def get_or_create_user(
+ self, identifier: str, metadata: Optional[Dict] = None
+ ) -> "User":
+ user = await self.get_user(identifier=identifier)
+ if user:
+ return user
+
+ return await self.create_user(identifier, metadata)
+
+ ##################################################################################
+ # Thread APIs #
+ ##################################################################################
+
+ async def get_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ step_types_to_keep: Optional[List[StepType]] = None,
+ ) -> PaginatedResponse["Thread"]:
+ return await self.gql_helper(
+ *get_threads_helper(
+ first, after, before, filters, order_by, step_types_to_keep
+ )
+ )
+
+ async def list_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ ) -> PaginatedResponse["Thread"]:
+ return await self.gql_helper(
+ *list_threads_helper(first, after, before, filters, order_by)
+ )
+
+ async def get_thread(self, id: str) -> "Thread":
+ return await self.gql_helper(*get_thread_helper(id))
+
+ async def create_thread(
+ self,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return await self.gql_helper(
+ *create_thread_helper(name, metadata, participant_id, tags)
+ )
+
+ async def upsert_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return await self.gql_helper(
+ *upsert_thread_helper(id, name, metadata, participant_id, tags)
+ )
+
+ async def update_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return await self.gql_helper(
+ *update_thread_helper(id, name, metadata, participant_id, tags)
+ )
+
+ async def delete_thread(self, id: str) -> bool:
+ return await self.gql_helper(*delete_thread_helper(id))
+
+ ##################################################################################
+ # Score APIs #
+ ##################################################################################
+
+ async def get_scores(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[scores_filters] = None,
+ order_by: Optional[scores_order_by] = None,
+ ) -> PaginatedResponse["Score"]:
+ return await self.gql_helper(
+ *get_scores_helper(first, after, before, filters, order_by)
+ )
+
+ async def create_scores(self, scores: List[ScoreDict]) -> List["Score"]:
+ check_scores_finite(scores)
+
+ query = create_scores_query_builder(scores)
+ variables = {}
+
+ for id, score in enumerate(scores):
+ for k, v in score.items():
+ variables[f"{k}_{id}"] = v
+
+ def process_response(response):
+ return [Score.from_dict(x) for x in response["data"].values()]
+
+ return await self.gql_helper(
+ query, "create scores", variables, process_response
+ )
+
+ async def create_score(
+ self,
+ name: str,
+ value: float,
+ type: ScoreType,
+ step_id: Optional[str] = None,
+ generation_id: Optional[str] = None,
+ dataset_experiment_item_id: Optional[str] = None,
+ comment: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Score":
+ if generation_id:
+ logger.warning(
+ "generation_id is deprecated and will be removed in a future version, please use step_id instead"
+ )
+ check_scores_finite([{"name": name, "value": value}])
+
+ return await self.gql_helper(
+ *create_score_helper(
+ name,
+ value,
+ type,
+ step_id,
+ dataset_experiment_item_id,
+ comment,
+ tags,
+ )
+ )
+
+ async def update_score(
+ self,
+ id: str,
+ update_params: ScoreUpdate,
+ ) -> "Score":
+ return await self.gql_helper(*update_score_helper(id, update_params))
+
+ async def delete_score(self, id: str) -> Dict:
+ return await self.gql_helper(*delete_score_helper(id))
+
+ ##################################################################################
+ # Attachment APIs #
+ ##################################################################################
+
+ async def upload_file(
+ self,
+ content: Union[bytes, str],
+ thread_id: Optional[str] = None,
+ mime: Optional[str] = "application/octet-stream",
+ ) -> Dict:
+ id = str(uuid.uuid4())
+ body = {"fileName": id, "contentType": mime, "threadId": thread_id}
+
+ path = "/api/upload/file"
+
+ async with httpx.AsyncClient(follow_redirects=True) as client:
+ response = await client.post(
+ f"{self.url}{path}",
+ json=body,
+ headers=self.headers,
+ )
+ if response.status_code >= 400:
+ reason = response.text
+ logger.error(f"Failed to sign upload url: {reason}")
+ return {"object_key": None, "url": None}
+ json_res = response.json()
+ method = "put" if "put" in json_res else "post"
+ request_dict: Dict[str, Any] = json_res.get(method, {})
+ url: Optional[str] = request_dict.get("url")
+
+ if not url:
+ raise Exception("Invalid server response")
+ headers: Optional[Dict] = request_dict.get("headers")
+ fields: Dict = request_dict.get("fields", {})
+ object_key: Optional[str] = fields.get("key")
+ upload_type: Literal["raw", "multipart"] = cast(
+ Literal["raw", "multipart"], request_dict.get("uploadType", "multipart")
+ )
+ signed_url: Optional[str] = json_res.get("signedUrl")
+
+ # Prepare form data
+ form_data = (
+ {}
+ ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]]
+ for field_name, field_value in fields.items():
+ form_data[field_name] = (None, field_value)
+
+ # Add file to the form_data
+ # Note: The content_type parameter is not needed here, as the correct MIME type should be set
+ # in the 'Content-Type' field from upload_details
+ form_data["file"] = (id, content, mime)
+
+ async with httpx.AsyncClient(follow_redirects=True) as client:
+ if upload_type == "raw":
+ upload_response = await client.request(
+ url=url,
+ headers=headers,
+ method=method,
+ data=content, # type: ignore
+ )
+ else:
+ upload_response = await client.request(
+ url=url,
+ headers=headers,
+ method=method,
+ files=form_data,
+ ) # type: ignore
+ try:
+ upload_response.raise_for_status()
+ return {"object_key": object_key, "url": signed_url}
+ except Exception as e:
+ logger.error(f"Failed to upload file: {str(e)}")
+ return {"object_key": None, "url": None}
+
+ async def create_attachment(
+ self,
+ thread_id: Optional[str] = None,
+ step_id: Optional[str] = None,
+ id: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ mime: Optional[str] = None,
+ name: Optional[str] = None,
+ object_key: Optional[str] = None,
+ url: Optional[str] = None,
+ content: Optional[Union[bytes, str]] = None,
+ path: Optional[str] = None,
+ ) -> "Attachment":
+ if not thread_id:
+ if active_thread := active_thread_var.get(None):
+ thread_id = active_thread.id
+
+ if not step_id:
+ if active_steps := active_steps_var.get([]):
+ step_id = active_steps[-1].id
+ else:
+ raise Exception("No step_id provided and no active step found.")
+
+ (
+ query,
+ description,
+ variables,
+ content,
+ process_response,
+ ) = create_attachment_helper(
+ thread_id=thread_id,
+ step_id=step_id,
+ id=id,
+ metadata=metadata,
+ mime=mime,
+ name=name,
+ object_key=object_key,
+ url=url,
+ content=content,
+ path=path,
+ )
+
+ if content:
+ uploaded = await self.upload_file(
+ content=content, thread_id=thread_id, mime=mime
+ )
+
+ if uploaded["object_key"] is None or uploaded["url"] is None:
+ raise Exception("Failed to upload file")
+
+ object_key = uploaded["object_key"]
+ if object_key:
+ variables["objectKey"] = object_key
+ else:
+ variables["url"] = uploaded["url"]
+
+ response = await self.make_gql_call(description, query, variables)
+ return process_response(response)
+
+ async def update_attachment(self, id: str, update_params: AttachmentUpload) -> "Attachment":
+ return await self.gql_helper(*update_attachment_helper(id, update_params))
+
+ async def get_attachment(self, id: str) -> Optional["Attachment"]:
+ return await self.gql_helper(*get_attachment_helper(id))
+
+ async def delete_attachment(self, id: str) -> Dict:
+ return await self.gql_helper(*delete_attachment_helper(id))
+
+ ##################################################################################
+ # Step APIs #
+ ##################################################################################
+
+
+ async def create_step(
+ self,
+ thread_id: Optional[str] = None,
+ type: Optional[StepType] = "undefined",
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ input: Optional[Dict] = None,
+ output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ parent_id: Optional[str] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ root_run_id: Optional[str] = None,
+ ) -> "Step":
+ return await self.gql_helper(
+ *create_step_helper(
+ thread_id=thread_id,
+ type=type,
+ start_time=start_time,
+ end_time=end_time,
+ input=input,
+ output=output,
+ metadata=metadata,
+ parent_id=parent_id,
+ name=name,
+ tags=tags,
+ root_run_id=root_run_id,
+ )
+ )
+
+ async def update_step(
+ self,
+ id: str,
+ type: Optional[StepType] = None,
+ input: Optional[str] = None,
+ output: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ parent_id: Optional[str] = None,
+ ) -> "Step":
+ return await self.gql_helper(
+ *update_step_helper(
+ id=id,
+ type=type,
+ input=input,
+ output=output,
+ metadata=metadata,
+ name=name,
+ tags=tags,
+ start_time=start_time,
+ end_time=end_time,
+ parent_id=parent_id,
+ )
+ )
+
+ async def get_steps(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[steps_filters] = None,
+ order_by: Optional[steps_order_by] = None,
+ ) -> PaginatedResponse["Step"]:
+ return await self.gql_helper(
+ *get_steps_helper(first, after, before, filters, order_by)
+ )
+
+ async def get_step(
+ self,
+ id: str,
+ ) -> Optional["Step"]:
+ return await self.gql_helper(*get_step_helper(id=id))
+
+ async def delete_step(
+ self,
+ id: str,
+ ) -> bool:
+ return await self.gql_helper(*delete_step_helper(id=id))
+
+ async def send_steps(self, steps: List[Union["StepDict", "Step"]]):
+ return await self.gql_helper(*send_steps_helper(steps=steps))
+
+ ##################################################################################
+ # Generation APIs #
+ ##################################################################################
+
+ async def get_generations(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[generations_filters] = None,
+ order_by: Optional[generations_order_by] = None,
+ ) -> PaginatedResponse["BaseGeneration"]:
+ return await self.gql_helper(
+ *get_generations_helper(first, after, before, filters, order_by)
+ )
+
+ async def create_generation(
+ self, generation: Union["ChatGeneration", "CompletionGeneration"]
+ ) -> Union["ChatGeneration", "CompletionGeneration"]:
+ return await self.gql_helper(*create_generation_helper(generation))
+
+ ##################################################################################
+ # Dataset APIs #
+ ##################################################################################
+
+ async def create_dataset(
+ self,
+ name: str,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ type: DatasetType = "key_value",
+ ) -> "Dataset":
+ sync_api = LiteralAPI(self.api_key, self.url)
+ return await self.gql_helper(
+ *create_dataset_helper(sync_api, name, description, metadata, type)
+ )
+
+ async def get_dataset(self, id: Optional[str] = None, name: Optional[str] = None) -> "Dataset":
+ sync_api = LiteralAPI(self.api_key, self.url)
+ subpath, _, variables, process_response = get_dataset_helper(
+ sync_api, id=id, name=name
+ )
+ response = await self.make_rest_call(subpath, variables)
+ return process_response(response)
+
+ async def update_dataset(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ ) -> "Dataset":
+ sync_api = LiteralAPI(self.api_key, self.url)
+ return await self.gql_helper(
+ *update_dataset_helper(sync_api, id, name, description, metadata)
+ )
+
+ async def delete_dataset(self, id: str) -> "Dataset":
+ sync_api = LiteralAPI(self.api_key, self.url)
+ return await self.gql_helper(*delete_dataset_helper(sync_api, id))
+
+ ##################################################################################
+ # Experiment APIs #
+ ##################################################################################
+
+ async def create_experiment(
+ self,
+ name: str,
+ dataset_id: Optional[str] = None,
+ prompt_variant_id: Optional[str] = None,
+ params: Optional[Dict] = None,
+ ) -> "DatasetExperiment":
+ sync_api = LiteralAPI(self.api_key, self.url)
+
+ return await self.gql_helper(
+ *create_experiment_helper(
+ api=sync_api,
+ name=name,
+ dataset_id=dataset_id,
+ prompt_variant_id=prompt_variant_id,
+ params=params,
+ )
+ )
+
+ async def create_experiment_item(
+ self, experiment_item: "DatasetExperimentItem"
+ ) -> "DatasetExperimentItem":
+ check_scores_finite(experiment_item.scores)
+
+ # Create the dataset experiment item
+ result = await self.gql_helper(
+ *create_experiment_item_helper(
+ dataset_experiment_id=experiment_item.dataset_experiment_id,
+ dataset_item_id=experiment_item.dataset_item_id,
+ experiment_run_id=experiment_item.experiment_run_id,
+ input=experiment_item.input,
+ output=experiment_item.output,
+ )
+ )
+
+ for score in experiment_item.scores:
+ score["datasetExperimentItemId"] = result.id
+
+ # Create the scores and add to experiment item.
+ result.scores = await self.create_scores(experiment_item.scores)
+
+ return result
+
+ ##################################################################################
+ # DatasetItem APIs #
+ ##################################################################################
+
+ async def create_dataset_item(
+ self,
+ dataset_id: str,
+ input: Dict,
+ expected_output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ ) -> "DatasetItem":
+ return await self.gql_helper(
+ *create_dataset_item_helper(dataset_id, input, expected_output, metadata)
+ )
+
+ async def get_dataset_item(self, id: str) -> "DatasetItem":
+ return await self.gql_helper(*get_dataset_item_helper(id))
+
+ async def delete_dataset_item(self, id: str) -> "DatasetItem":
+ return await self.gql_helper(*delete_dataset_item_helper(id))
+
+ async def add_step_to_dataset(
+ self, dataset_id: str, step_id: str, metadata: Optional[Dict] = None
+ ) -> "DatasetItem":
+ return await self.gql_helper(
+ *add_step_to_dataset_helper(dataset_id, step_id, metadata)
+ )
+
+ async def add_generation_to_dataset(
+ self, dataset_id: str, generation_id: str, metadata: Optional[Dict] = None
+ ) -> "DatasetItem":
+ return await self.gql_helper(
+ *add_generation_to_dataset_helper(dataset_id, generation_id, metadata)
+ )
+
+ ##################################################################################
+ # Prompt APIs #
+ ##################################################################################
+
+ async def get_or_create_prompt_lineage(
+ self, name: str, description: Optional[str] = None
+ ) -> Dict:
+ return await self.gql_helper(*create_prompt_lineage_helper(name, description))
+
+ @deprecated('Please use "get_or_create_prompt_lineage" instead.')
+ async def create_prompt_lineage(self, name: str, description: Optional[str] = None) -> Dict:
+ return await self.get_or_create_prompt_lineage(name, description)
+
+ async def get_or_create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ) -> "Prompt":
+ lineage = await self.get_or_create_prompt_lineage(name)
+ lineage_id = lineage["id"]
+
+ sync_api = LiteralAPI(self.api_key, self.url)
+ return await self.gql_helper(
+ *create_prompt_helper(
+ sync_api, lineage_id, template_messages, settings, tools
+ )
+ )
+
+ @deprecated("Please use `get_or_create_prompt` instead.")
+ async def create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ ) -> "Prompt":
+ return await self.get_or_create_prompt(name, template_messages, settings)
+
+ async def create_prompt_variant(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ) -> Optional[str]:
+ lineage = await self.gql_helper(*get_prompt_lineage_helper(name))
+ lineage_id = lineage["id"] if lineage else None
+ return await self.gql_helper(
+ *create_prompt_variant_helper(
+ lineage_id, template_messages, settings, tools
+ )
+ )
+
+ async def get_prompt(
+ self,
+ id: Optional[str] = None,
+ name: Optional[str] = None,
+ version: Optional[int] = None,
+ ) -> "Prompt":
+ sync_api = LiteralAPI(self.api_key, self.url)
+ if id:
+ return await self.gql_helper(*get_prompt_helper(sync_api, id=id))
+ elif name:
+ return await self.gql_helper(
+ *get_prompt_helper(sync_api, name=name, version=version)
+ )
+ else:
+ raise ValueError("Either the `id` or the `name` must be provided.")
+
+ async def update_prompt_ab_testing(
+ self, name: str, rollouts: List["PromptRollout"]
+ ) -> Dict:
+ return await self.gql_helper(
+ *update_prompt_ab_testing_helper(name=name, rollouts=rollouts)
+ )
+
+ async def get_prompt_ab_testing(self, name: str) -> List["PromptRollout"]:
+ return await self.gql_helper(*get_prompt_ab_testing_helper(name=name))
+
+ ##################################################################################
+ # Misc APIs #
+ ##################################################################################
+
+ async def get_my_project_id(self) -> str:
+ response = await self.make_rest_call("/my-project", {})
+ return response["projectId"]
diff --git a/literalai/api/base.py b/literalai/api/base.py
new file mode 100644
index 0000000..b011e05
--- /dev/null
+++ b/literalai/api/base.py
@@ -0,0 +1,1088 @@
+import os
+
+from abc import ABC, abstractmethod
+from typing import (
+ Any,
+ Dict,
+ List,
+ Optional,
+ Union,
+)
+
+from typing_extensions import deprecated
+
+from literalai.my_types import Environment
+
+from literalai.evaluation.dataset import DatasetType
+from literalai.evaluation.dataset_experiment import (
+ DatasetExperimentItem,
+)
+from literalai.api.helpers.attachment_helpers import (
+ AttachmentUpload)
+from literalai.api.helpers.score_helpers import (
+ ScoreUpdate,
+)
+
+from literalai.observability.filter import (
+ generations_filters,
+ generations_order_by,
+ scores_filters,
+ scores_order_by,
+ steps_filters,
+ steps_order_by,
+ threads_filters,
+ threads_order_by,
+ users_filters,
+)
+from literalai.prompt_engineering.prompt import ProviderSettings
+
+
+from literalai.api.helpers.prompt_helpers import (
+ PromptRollout)
+
+from literalai.observability.generation import (
+ ChatGeneration,
+ CompletionGeneration,
+ GenerationMessage,
+)
+from literalai.observability.step import (
+ ScoreDict,
+ ScoreType,
+ Step,
+ StepDict,
+ StepType,
+)
+
+def prepare_variables(variables: Dict[str, Any]) -> Dict[str, Any]:
+ """
+ Recursively checks and converts bytes objects in variables.
+ """
+
+ def handle_bytes(item):
+ if isinstance(item, bytes):
+ return "STRIPPED_BINARY_DATA"
+ elif isinstance(item, dict):
+ return {k: handle_bytes(v) for k, v in item.items()}
+ elif isinstance(item, list):
+ return [handle_bytes(i) for i in item]
+ elif isinstance(item, tuple):
+ return tuple(handle_bytes(i) for i in item)
+ return item
+
+ return handle_bytes(variables)
+
+class BaseLiteralAPI(ABC):
+ def __init__(
+ self,
+ api_key: Optional[str] = None,
+ url: Optional[str] = None,
+ environment: Optional[Environment] = None,
+ ):
+ if url and url[-1] == "/":
+ url = url[:-1]
+
+ if api_key is None:
+ raise Exception("LITERAL_API_KEY not set")
+ if url is None:
+ raise Exception("LITERAL_API_URL not set")
+
+ self.api_key = api_key
+ self.url = url
+
+ if environment:
+ os.environ["LITERAL_ENV"] = environment
+
+ self.graphql_endpoint = self.url + "/api/graphql"
+ self.rest_endpoint = self.url + "/api"
+
+ @property
+ def headers(self):
+ from literalai.version import __version__
+
+ h = {
+ "Content-Type": "application/json",
+ "x-api-key": self.api_key,
+ "x-client-name": "py-literal-client",
+ "x-client-version": __version__,
+ }
+
+ if env := os.getenv("LITERAL_ENV"):
+ h["x-env"] = env
+
+ return h
+
+ @abstractmethod
+ def get_users(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[users_filters] = None,
+ ):
+ """
+ Retrieves a list of users based on pagination and optional filters.
+
+ Args:
+ first (Optional[int]): The number of users to retrieve.
+ after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
+ before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
+ filters (Optional[users_filters]): Filters to apply to the user query.
+
+ Returns:
+ `PaginatedResponse[User]`: A paginated response containing the queried user data.
+ """
+ pass
+
+ @abstractmethod
+ def get_user(self, id: Optional[str] = None, identifier: Optional[str] = None):
+ """
+ Retrieves a user based on the provided ID or identifier.
+
+ Args:
+ id (Optional[str]): The unique ID of the user.
+ identifier (Optional[str]): A unique identifier for the user, such as a username or email.
+
+ Returns:
+ `User`: The user with requested id or identifier.
+ """
+ pass
+
+ @abstractmethod
+ def create_user(self, identifier: str, metadata: Optional[Dict] = None):
+ """
+ Creates a new user with the specified identifier and optional metadata.
+
+ Args:
+ identifier (str): A unique identifier for the user, such as a username or email.
+ metadata (Optional[Dict]): Additional data associated with the user.
+
+ Returns:
+ `User`: The created user object.
+ """
+ pass
+
+ @abstractmethod
+ def update_user(
+ self, id: str, identifier: Optional[str] = None, metadata: Optional[Dict] = None
+ ):
+ """
+ Updates an existing user identified by the given ID, with optional new identifier and metadata.
+
+ Args:
+ id (str): The unique ID of the user to update.
+ identifier (Optional[str]): A new identifier for the user, such as a username or email.
+ metadata (Optional[Dict]): New or updated metadata for the user.
+
+ Returns:
+ `User`: The updated user object.
+ """
+ pass
+
+ @abstractmethod
+ def delete_user(self, id: str):
+ """
+ Deletes a user identified by the given ID.
+
+ Args:
+ id (str): The unique ID of the user to delete.
+
+ Returns:
+ Dict: The deleted user as a dict.
+ """
+ pass
+
+ @abstractmethod
+ def get_or_create_user(self, identifier: str, metadata: Optional[Dict] = None):
+ """
+ Retrieves a user by their identifier, or creates a new user if it does not exist.
+
+ Args:
+ identifier (str): The identifier of the user to retrieve or create.
+ metadata (Optional[Dict]): Metadata to associate with the user if they are created.
+
+ Returns:
+ `User`: The existing or newly created user.
+ """
+ pass
+
+ @abstractmethod
+ def get_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ step_types_to_keep: Optional[List[StepType]] = None,
+ ):
+ """
+ Fetches a list of threads based on pagination and optional filters.
+
+ Args:
+ first (Optional[int]): Number of threads to fetch.
+ after (Optional[str]): Cursor for pagination, fetch threads after this cursor.
+ before (Optional[str]): Cursor for pagination, fetch threads before this cursor.
+ filters (Optional[threads_filters]): Filters to apply on the threads query.
+ order_by (Optional[threads_order_by]): Order by clause for threads.
+ step_types_to_keep (Optional[List[StepType]]) : If set, only steps of the corresponding types
+ will be returned
+
+ Returns:
+ `PaginatedResponse[Thread]`: A paginated response containing the queried thread data.
+ """
+ pass
+
+ @abstractmethod
+ def list_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ ):
+ """
+ Lists threads based on pagination and optional filters, similar to get_threads but may include additional processing.
+
+ Args:
+ first (Optional[int]): Number of threads to list.
+ after (Optional[str]): Cursor for pagination, list threads after this cursor.
+ before (Optional[str]): Cursor for pagination, list threads before this cursor.
+ filters (Optional[threads_filters]): Filters to apply on the threads listing.
+ order_by (Optional[threads_order_by]): Order by clause for threads.
+
+ Returns:
+ `PaginatedResponse[Thread]`: A paginated response containing the queried thread data.
+ """
+ pass
+
+ @abstractmethod
+ def get_thread(self, id: str):
+ """
+ Retrieves a single thread by its ID.
+
+ Args:
+ id (str): The unique identifier of the thread.
+
+ Returns:
+ `Thread`: The thread corresponding to the provided ID.
+ """
+ pass
+
+ @abstractmethod
+ def create_thread(
+ self,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ):
+ """
+ Creates a `Thread` with the specified details.
+
+ Args:
+ name (Optional[str]): Name of the thread.
+ metadata (Optional[Dict]): Metadata associated with the thread.
+ participant_id (Optional[str]): Identifier for the participant.
+ tags (Optional[List[str]]): List of tags associated with the thread.
+
+ Returns:
+ `Thread`: The created thread.
+ """
+ pass
+
+ @abstractmethod
+ def upsert_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ):
+ """
+ Updates an existing thread or creates a new one if it does not exist.
+
+ Args:
+ id (str): The unique identifier of the thread.
+ name (Optional[str]): Name of the thread.
+ metadata (Optional[Dict]): Metadata associated with the thread.
+ participant_id (Optional[str]): Identifier for the participant.
+ tags (Optional[List[str]]): List of tags associated with the thread.
+
+ Returns:
+ `Thread`: The updated or newly created thread.
+ """
+ pass
+
+ @abstractmethod
+ def update_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ):
+ """
+ Updates the specified details of an existing thread.
+
+ Args:
+ id (str): The unique identifier of the thread to update.
+ name (Optional[str]): New name of the thread.
+ metadata (Optional[Dict]): New metadata for the thread.
+ participant_id (Optional[str]): New identifier for the participant.
+ tags (Optional[List[str]]): New list of tags for the thread.
+
+ Returns:
+ `Thread`: The updated thread.
+ """
+ pass
+
+ @abstractmethod
+ def delete_thread(self, id: str):
+ """
+ Deletes a thread identified by its ID.
+
+ Args:
+ id (str): The unique identifier of the thread to delete.
+
+ Returns:
+ `bool`: True if the thread was deleted, False otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def get_scores(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[scores_filters] = None,
+ order_by: Optional[scores_order_by] = None,
+ ):
+ """
+ Fetches scores based on pagination and optional filters.
+
+ Args:
+ first (Optional[int]): The number of scores to retrieve.
+ after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
+ before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
+ filters (Optional[scores_filters]): Filters to apply to the scores query.
+ order_by (Optional[scores_order_by]): Ordering options for the scores.
+
+ Returns:
+ `PaginatedResponse[Score]`: A paginated response containing the queried scores.
+ """
+ pass
+
+ @abstractmethod
+ def create_scores(self, scores: List[ScoreDict]):
+ """
+ Creates multiple scores.
+
+ Args:
+ scores (List[ScoreDict]): A list of dictionaries representing the scores to be created.
+
+ Returns:
+ List[ScoreDict]: The created scores as a list of dictionaries.
+ """
+ pass
+
+ @abstractmethod
+ def create_score(
+ self,
+ name: str,
+ value: float,
+ type: ScoreType,
+ step_id: Optional[str] = None,
+ generation_id: Optional[str] = None,
+ dataset_experiment_item_id: Optional[str] = None,
+ comment: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ):
+ """
+ Creates a single score in the database.
+
+ Args:
+ name (str): The name of the score.
+ value (float): The numerical value of the score.
+ type (ScoreType): The type of the score.
+ step_id (Optional[str]): The ID of the step associated with the score.
+ generation_id (Optional[str]): The ID of the generation associated with the score.
+ dataset_experiment_item_id (Optional[str]): The ID of the dataset experiment item associated with the score.
+ comment (Optional[str]): An optional comment about the score.
+ tags (Optional[List[str]]): Optional tags associated with the score.
+
+ Returns:
+ `Score`: The created score.
+ """
+ pass
+
+ @abstractmethod
+ def update_score(
+ self,
+ id: str,
+ update_params: ScoreUpdate,
+ ):
+ """
+ Updates a score identified by its ID with new parameters.
+
+ Args:
+ id (str): The unique identifier of the score to update.
+ update_params (ScoreUpdate): A dictionary of parameters to update in the score.
+
+ Returns:
+ `Score`: The updated score.
+ """
+ pass
+
+ @abstractmethod
+ def delete_score(self, id: str):
+ """
+ Deletes a score by ID.
+
+ Args:
+ id (str): ID of score to delete.
+
+ Returns:
+ Dict: The deleted `Score` as a dict.
+ """
+ pass
+
+ @abstractmethod
+ def upload_file(
+ self,
+ content: Union[bytes, str],
+ thread_id: Optional[str] = None,
+ mime: Optional[str] = "application/octet-stream",
+ ):
+ """
+ Uploads a file to the server.
+
+ Args:
+ content (Union[bytes, str]): The content of the file to upload.
+ thread_id (Optional[str]): The ID of the thread associated with the file.
+ mime (Optional[str]): The MIME type of the file. Defaults to 'application/octet-stream'.
+
+ Returns:
+ Dict: A dictionary containing the object key and URL of the uploaded file, or None values if the upload fails.
+ """
+ pass
+
+ @abstractmethod
+ def create_attachment(
+ self,
+ thread_id: Optional[str] = None,
+ step_id: Optional[str] = None,
+ id: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ mime: Optional[str] = None,
+ name: Optional[str] = None,
+ object_key: Optional[str] = None,
+ url: Optional[str] = None,
+ content: Optional[Union[bytes, str]] = None,
+ path: Optional[str] = None,
+ ):
+ """
+ Creates an attachment associated with a thread and step, potentially uploading file content.
+
+ Args:
+ thread_id (str): The ID of the thread to which the attachment is linked.
+ step_id (str): The ID of the step to which the attachment is linked.
+ id (Optional[str]): The ID of the attachment, if updating an existing one.
+ metadata (Optional[Dict]): Metadata associated with the attachment.
+ mime (Optional[str]): MIME type of the file, if content is provided.
+ name (Optional[str]): Name of the attachment.
+ object_key (Optional[str]): Object key of the uploaded file, if already known.
+ url (Optional[str]): URL of the uploaded file, if already known.
+ content (Optional[Union[bytes, str]]): File content to upload.
+ path (Optional[str]): Path where the file should be stored.
+
+ Returns:
+ `Attachment`: The created attachment.
+ """
+ pass
+
+ @abstractmethod
+ def update_attachment(self, id: str, update_params: AttachmentUpload):
+ """
+ Updates an existing attachment with new parameters.
+
+ Args:
+ id (str): The unique identifier of the attachment to update.
+ update_params (AttachmentUpload): The parameters to update in the attachment.
+
+ Returns:
+ `Attachment`: The updated attachment.
+ """
+ pass
+
+ @abstractmethod
+ def get_attachment(self, id: str):
+ """
+ Retrieves an attachment by ID.
+
+ Args:
+ id (str): ID of the attachment to retrieve.
+
+ Returns:
+ `Attachment`: The attachment object with requested ID.
+ """
+ pass
+
+ @abstractmethod
+ def delete_attachment(self, id: str):
+ """
+ Deletes an attachment identified by ID.
+
+ Args:
+ id (str): The unique identifier of the attachment to delete.
+
+ Returns:
+ `Attachment`: The deleted attachment.
+ """
+ pass
+
+ @abstractmethod
+ def create_step(
+ self,
+ thread_id: Optional[str] = None,
+ type: Optional[StepType] = "undefined",
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ input: Optional[Dict] = None,
+ output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ parent_id: Optional[str] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ root_run_id: Optional[str] = None,
+ ):
+ """
+ Creates a new step with the specified parameters.
+
+ Args:
+ thread_id (Optional[str]): The ID of the thread this step is associated with.
+ type (Optional[StepType]): The type of the step, defaults to "undefined".
+ start_time (Optional[str]): The start time of the step.
+ end_time (Optional[str]): The end time of the step.
+ input (Optional[Dict]): Input data for the step.
+ output (Optional[Dict]): Output data from the step.
+ metadata (Optional[Dict]): Metadata associated with the step.
+ parent_id (Optional[str]): The ID of the parent step, if any.
+ name (Optional[str]): The name of the step.
+ tags (Optional[List[str]]): Tags associated with the step.
+ root_run_id (Optional[str]): The ID of the root run, if any.
+
+ Returns:
+ `Step`: The created step.
+ """
+ pass
+
+ @abstractmethod
+ def update_step(
+ self,
+ id: str,
+ type: Optional[StepType] = None,
+ input: Optional[str] = None,
+ output: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ parent_id: Optional[str] = None,
+ ):
+ """
+ Updates an existing step identified by its ID with new parameters.
+
+ Args:
+ id (str): The unique identifier of the step to update.
+ type (Optional[StepType]): The type of the step.
+ input (Optional[str]): Input data for the step.
+ output (Optional[str]): Output data from the step.
+ metadata (Optional[Dict]): Metadata associated with the step.
+ name (Optional[str]): The name of the step.
+ tags (Optional[List[str]]): Tags associated with the step.
+ start_time (Optional[str]): The start time of the step.
+ end_time (Optional[str]): The end time of the step.
+ parent_id (Optional[str]): The ID of the parent step, if any.
+
+ Returns:
+ `Step`: The updated step.
+ """
+ pass
+
+ @abstractmethod
+ def get_steps(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[steps_filters] = None,
+ order_by: Optional[steps_order_by] = None,
+ ):
+ """
+ Fetches a list of steps based on pagination and optional filters.
+
+ Args:
+ first (Optional[int]): Number of steps to fetch.
+ after (Optional[str]): Cursor for pagination, fetch steps after this cursor.
+ before (Optional[str]): Cursor for pagination, fetch steps before this cursor.
+ filters (Optional[steps_filters]): Filters to apply on the steps query.
+ order_by (Optional[steps_order_by]): Order by clause for steps.
+
+ Returns:
+ `PaginatedResponse[Step]`: The list of steps matching the criteria.
+ """
+ pass
+
+ @abstractmethod
+ def get_step(
+ self,
+ id: str,
+ ):
+ """
+ Retrieves a step by its ID.
+
+ Args:
+ id (str): The unique identifier of the step to retrieve.
+
+ Returns:
+ `Step`: The step with requested ID.
+ """
+ pass
+
+ @abstractmethod
+ def delete_step(
+ self,
+ id: str,
+ ):
+ """
+ Deletes a step identified by its ID.
+
+ Args:
+ id (str): The unique identifier of the step to delete.
+
+ Returns:
+ `bool`: True if the step was deleted successfully, False otherwise.
+ """
+ pass
+
+ @abstractmethod
+ def send_steps(self, steps: List[Union[StepDict, "Step"]]):
+ """
+ Sends a list of steps to process.
+ Step ingestion happens asynchronously if you configured a cache. See [Cache Configuration](https://docs.literalai.com/self-hosting/deployment#4-cache-configuration-optional).
+
+ Args:
+ steps (List[Union[StepDict, Step]]): A list of steps or step dictionaries to send.
+
+ Returns:
+ `Dict`: Dictionary with keys "ok" (boolean) and "message" (string).
+ """
+ pass
+
+ @abstractmethod
+ def get_generations(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[generations_filters] = None,
+ order_by: Optional[generations_order_by] = None,
+ ):
+ """
+ Fetches a list of generations based on pagination and optional filters.
+
+ Args:
+ first (Optional[int]): The number of generations to retrieve.
+ after (Optional[str]): A cursor for use in pagination, fetching records after this cursor.
+ before (Optional[str]): A cursor for use in pagination, fetching records before this cursor.
+ filters (Optional[generations_filters]): Filters to apply to the generations query.
+ order_by (Optional[generations_order_by]): Order by clause for generations.
+
+ Returns:
+ A list of generations that match the criteria.
+ """
+ pass
+
+ @abstractmethod
+ def create_generation(
+ self, generation: Union[ChatGeneration, CompletionGeneration]
+ ):
+ """
+ Creates a new generation, either a chat or completion type.
+
+ ```py
+ from literalai.observability.generation import ChatGeneration
+ from literalai import LiteralClient
+
+ literalai_client = LiteralClient(api_key="lsk-***")
+
+ example_generation = ChatGeneration(
+ messages=[
+ {
+ "role": "user",
+ "content": "Hello, how can I help you today?"
+ },
+ ],
+ message_completion={
+ "role": "assistant",
+ "content": "Sure, I can help with that. What do you need to know?"
+ },
+ model="gpt-4o-mini",
+ provider="OpenAI"
+ )
+
+ literalai_client.api.create_generation(example_generation)
+ ```
+
+ Args:
+ generation (Union[ChatGeneration, CompletionGeneration]): The generation data to create.
+
+ Returns:
+ `Union[ChatGeneration, CompletionGeneration]`: The created generation, either a chat or completion type.
+ """
+ pass
+
+ @abstractmethod
+ def create_dataset(
+ self,
+ name: str,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ type: DatasetType = "key_value",
+ ):
+ """
+ Creates a new dataset with the specified properties.
+
+ Args:
+ name (str): The name of the dataset.
+ description (Optional[str]): A description of the dataset.
+ metadata (Optional[Dict]): Additional metadata for the dataset.
+ type (DatasetType): The type of the dataset, defaults to "key_value".
+
+ Returns:
+ `Dataset`: The created dataset, empty initially.
+ """
+ pass
+
+ @abstractmethod
+ def get_dataset(
+ self, id: Optional[str] = None, name: Optional[str] = None
+ ):
+ """
+ Retrieves a dataset by its ID or name.
+
+ Args:
+ id (Optional[str]): The unique identifier of the dataset.
+ name (Optional[str]): The name of the dataset.
+
+ Returns:
+ The dataset data as returned by the REST helper function.
+ """
+ pass
+
+ @abstractmethod
+ def update_dataset(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ ):
+ """
+ Updates an existing dataset identified by its ID with new properties.
+
+ Args:
+ id (str): The unique identifier of the dataset to update.
+ name (Optional[str]): A new name for the dataset.
+ description (Optional[str]): A new description for the dataset.
+ metadata (Optional[Dict]): New or updated metadata for the dataset.
+
+ Returns:
+ `Dataset`: The dataset with updated properties.
+ """
+ pass
+
+ @abstractmethod
+ def delete_dataset(self, id: str):
+ """
+ Deletes a dataset identified by its ID.
+
+ Args:
+ id (str): The unique identifier of the dataset to delete.
+
+ Returns:
+ `Dataset`: The deleted dataset.
+ """
+ pass
+
+ @abstractmethod
+ def create_experiment(
+ self,
+ name: str,
+ dataset_id: Optional[str] = None,
+ prompt_variant_id: Optional[str] = None,
+ params: Optional[Dict] = None,
+ ):
+ """
+ Creates a new experiment associated with a specific dataset.
+
+ Args:
+ name (str): The name of the experiment.
+ dataset_id (Optional[str]): The unique identifier of the dataset.
+ prompt_variant_id (Optional[str]): The identifier of the prompt variant to associate to the experiment.
+ params (Optional[Dict]): Additional parameters for the experiment.
+
+ Returns:
+ DatasetExperiment: The newly created experiment object.
+ """
+ pass
+
+ @abstractmethod
+ def create_experiment_item(
+ self, experiment_item: DatasetExperimentItem
+ ):
+ """
+ Creates an experiment item within an existing experiment.
+
+ Args:
+ experiment_item (DatasetExperimentItem): The experiment item to be created, containing all necessary data.
+
+ Returns:
+ DatasetExperimentItem: The newly created experiment item with scores attached.
+ """
+ pass
+
+ @abstractmethod
+ def create_dataset_item(
+ self,
+ dataset_id: str,
+ input: Dict,
+ expected_output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ ):
+ """
+ Creates an item in a dataset with the specified properties.
+
+ Args:
+ dataset_id (str): The unique identifier of the dataset.
+ input (Dict): The input data for the dataset item.
+ expected_output (Optional[Dict]): The expected output data for the dataset item.
+ metadata (Optional[Dict]): Additional metadata for the dataset item.
+
+ Returns:
+ DatasetItem: The created dataset item.
+ """
+ pass
+
+ @abstractmethod
+ def get_dataset_item(self, id: str):
+ """
+ Retrieves a dataset item by ID.
+
+ Args:
+ id (str): ID of the `DatasetItem` to retrieve.
+
+ Returns:
+ `DatasetItem`: The dataset item.
+ """
+ pass
+
+ @abstractmethod
+ def delete_dataset_item(self, id: str):
+ """
+ Deletes a dataset item by ID.
+
+ Args:
+ id (str): ID of the dataset item to delete.
+
+ Returns:
+ `DatasetItem`: The deleted item.
+ """
+ pass
+
+ @abstractmethod
+ def add_step_to_dataset(
+ self, dataset_id: str, step_id: str, metadata: Optional[Dict] = None
+ ):
+ """
+ Adds a step to a dataset.
+
+ Args:
+ dataset_id (str): The unique identifier of the dataset.
+ step_id (str): The unique identifier of the step to add.
+ metadata (Optional[Dict]): Additional metadata related to the step to add.
+
+ Returns:
+ Dict: The created `DatasetItem`.
+ """
+ pass
+
+ @abstractmethod
+ def add_generation_to_dataset(
+ self, dataset_id: str, generation_id: str, metadata: Optional[Dict] = None
+ ):
+ """
+ Adds a generation to a dataset.
+
+ Args:
+ dataset_id (str): The unique identifier of the dataset.
+ generation_id (str): The unique identifier of the generation to add.
+ metadata (Optional[Dict]): Additional metadata related to the generation to add.
+
+ Returns:
+ Dict: The created `DatasetItem`.
+ """
+ pass
+
+ @abstractmethod
+ @deprecated("Use get_or_create_prompt_lineage instead")
+ def create_prompt_lineage(self, name: str, description: Optional[str] = None):
+ """
+ Deprecated. Please use **get_or_create_prompt_lineage** instead.
+ """
+ pass
+
+ @abstractmethod
+ def get_or_create_prompt_lineage(
+ self, name: str, description: Optional[str] = None
+ ):
+ """
+ Creates a prompt lineage with the specified name and optional description.
+ If the prompt lineage with that name already exists, it is returned.
+
+ Args:
+ name (str): The name of the prompt lineage.
+ description (Optional[str]): An optional description of the prompt lineage.
+
+ Returns:
+ `Dict`: The prompt lineage data as a dictionary.
+ """
+ pass
+
+ @abstractmethod
+ @deprecated("Please use `get_or_create_prompt` instead.")
+ def create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ ):
+ """
+ Deprecated. Please use `get_or_create_prompt` instead.
+ """
+ pass
+
+ @abstractmethod
+ def get_or_create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ):
+ """
+ A `Prompt` is fully defined by its `name`, `template_messages`, `settings` and tools.
+ If a prompt already exists for the given arguments, it is returned.
+ Otherwise, a new prompt is created.
+
+ Args:
+ name (str): The name of the prompt to retrieve or create.
+ template_messages (List[GenerationMessage]): A list of template messages for the prompt.
+ settings (Optional[Dict]): Optional settings for the prompt.
+ tools (Optional[List[Dict]]): Optional tool options for the model
+
+ Returns:
+ Prompt: The prompt that was retrieved or created.
+ """
+ pass
+
+ @abstractmethod
+ def get_prompt(
+ self,
+ id: Optional[str] = None,
+ name: Optional[str] = None,
+ version: Optional[int] = None,
+ ):
+ """
+ Gets a prompt either by:
+ - `id`
+ - or `name` and (optional) `version`
+
+ Either the `id` or the `name` must be provided.
+ If both are provided, the `id` is used.
+
+ Args:
+ id (str): The unique identifier of the prompt to retrieve.
+ name (str): The name of the prompt to retrieve.
+ version (Optional[int]): The version number of the prompt to retrieve.
+
+ Returns:
+ Prompt: The prompt with the given identifier or name.
+ """
+ pass
+
+ @abstractmethod
+ def create_prompt_variant(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ):
+ """
+ Creates a prompt variant to use as part of an experiment.
+ This variant is not an official Prompt version until manually saved.
+
+ Args:
+ name (str): Name of the variant to create.
+ template_messages (List[GenerationMessage]): A list of template messages for the prompt.
+ settings (Optional[Dict]): Optional settings for the prompt.
+ tools (Optional[List[Dict]]): Optional tools for the model.
+
+ Returns:
+ prompt_variant_id: The ID of the created prompt variant id, which you can link to an experiment.
+ """
+ pass
+
+ @abstractmethod
+ def get_prompt_ab_testing(self, name: str):
+ """
+ Get the A/B testing configuration for a prompt lineage.
+
+ Args:
+ name (str): The name of the prompt lineage.
+ Returns:
+ List[PromptRollout]
+ """
+ pass
+
+ @abstractmethod
+ def update_prompt_ab_testing(
+ self, name: str, rollouts: List[PromptRollout]
+ ):
+ """
+ Update the A/B testing configuration for a prompt lineage.
+
+ Args:
+ name (str): The name of the prompt lineage.
+ rollouts (List[PromptRollout]): The percentage rollout for each prompt version.
+
+ Returns:
+ Dict
+ """
+ pass
+
+ @abstractmethod
+ def get_my_project_id(self):
+ """
+ Retrieves the project ID associated with the API key.
+
+ Returns:
+ `str`: The project ID for current API key.
+ """
+ pass
diff --git a/literalai/api/helpers/__init__.py b/literalai/api/helpers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/literalai/api/attachment_helpers.py b/literalai/api/helpers/attachment_helpers.py
similarity index 98%
rename from literalai/api/attachment_helpers.py
rename to literalai/api/helpers/attachment_helpers.py
index 870f4e4..cb02772 100644
--- a/literalai/api/attachment_helpers.py
+++ b/literalai/api/helpers/attachment_helpers.py
@@ -3,7 +3,7 @@
from literalai.observability.step import Attachment
-from literalai.api import gql
+from literalai.api.helpers import gql
def create_attachment_helper(
diff --git a/literalai/api/dataset_helpers.py b/literalai/api/helpers/dataset_helpers.py
similarity index 99%
rename from literalai/api/dataset_helpers.py
rename to literalai/api/helpers/dataset_helpers.py
index 5d2ff46..91048f6 100644
--- a/literalai/api/dataset_helpers.py
+++ b/literalai/api/helpers/dataset_helpers.py
@@ -1,6 +1,6 @@
from typing import TYPE_CHECKING, Dict, Optional
-from literalai.api import gql
+from literalai.api.helpers import gql
from literalai.evaluation.dataset import Dataset, DatasetType
from literalai.evaluation.dataset_experiment import (
DatasetExperiment,
diff --git a/literalai/api/generation_helpers.py b/literalai/api/helpers/generation_helpers.py
similarity index 97%
rename from literalai/api/generation_helpers.py
rename to literalai/api/helpers/generation_helpers.py
index f91e30e..0a287d6 100644
--- a/literalai/api/generation_helpers.py
+++ b/literalai/api/helpers/generation_helpers.py
@@ -6,7 +6,7 @@
)
from literalai.observability.generation import BaseGeneration, CompletionGeneration, ChatGeneration
-from literalai.api import gql
+from literalai.api.helpers import gql
def get_generations_helper(
diff --git a/literalai/api/gql.py b/literalai/api/helpers/gql.py
similarity index 100%
rename from literalai/api/gql.py
rename to literalai/api/helpers/gql.py
diff --git a/literalai/api/prompt_helpers.py b/literalai/api/helpers/prompt_helpers.py
similarity index 97%
rename from literalai/api/prompt_helpers.py
rename to literalai/api/helpers/prompt_helpers.py
index 3377f77..14f2613 100644
--- a/literalai/api/prompt_helpers.py
+++ b/literalai/api/helpers/prompt_helpers.py
@@ -6,7 +6,7 @@
if TYPE_CHECKING:
from literalai.api import LiteralAPI
-from literalai.api import gql
+from literalai.api.helpers import gql
def create_prompt_lineage_helper(name: str, description: Optional[str] = None):
@@ -86,7 +86,7 @@ def create_prompt_variant_helper(
"tools": tools,
}
- def process_response(response):
+ def process_response(response) -> Optional[str]:
variant = response["data"]["createPromptExperiment"]
return variant["id"] if variant else None
diff --git a/literalai/api/score_helpers.py b/literalai/api/helpers/score_helpers.py
similarity index 99%
rename from literalai/api/score_helpers.py
rename to literalai/api/helpers/score_helpers.py
index 6d0d456..ee6bd9d 100644
--- a/literalai/api/score_helpers.py
+++ b/literalai/api/helpers/score_helpers.py
@@ -5,7 +5,7 @@
from literalai.my_types import PaginatedResponse
from literalai.observability.step import ScoreType, ScoreDict, Score
-from literalai.api import gql
+from literalai.api.helpers import gql
def get_scores_helper(
diff --git a/literalai/api/step_helpers.py b/literalai/api/helpers/step_helpers.py
similarity index 99%
rename from literalai/api/step_helpers.py
rename to literalai/api/helpers/step_helpers.py
index ea667e6..9d7f8c1 100644
--- a/literalai/api/step_helpers.py
+++ b/literalai/api/helpers/step_helpers.py
@@ -4,7 +4,7 @@
from literalai.my_types import PaginatedResponse
from literalai.observability.step import Step, StepDict, StepType
-from literalai.api import gql
+from literalai.api.helpers import gql
def create_step_helper(
diff --git a/literalai/api/thread_helpers.py b/literalai/api/helpers/thread_helpers.py
similarity index 99%
rename from literalai/api/thread_helpers.py
rename to literalai/api/helpers/thread_helpers.py
index 0d35b6d..f4c3b59 100644
--- a/literalai/api/thread_helpers.py
+++ b/literalai/api/helpers/thread_helpers.py
@@ -5,7 +5,7 @@
from literalai.observability.step import StepType
from literalai.observability.thread import Thread
-from literalai.api import gql
+from literalai.api.helpers import gql
def get_threads_helper(
diff --git a/literalai/api/user_helpers.py b/literalai/api/helpers/user_helpers.py
similarity index 96%
rename from literalai/api/user_helpers.py
rename to literalai/api/helpers/user_helpers.py
index 06abf6e..ad533d2 100644
--- a/literalai/api/user_helpers.py
+++ b/literalai/api/helpers/user_helpers.py
@@ -3,7 +3,7 @@
from literalai.observability.filter import users_filters
from literalai.my_types import PaginatedResponse, User
-from literalai.api import gql
+from literalai.api.helpers import gql
def get_users_helper(
@@ -36,7 +36,7 @@ def process_response(response):
def create_user_helper(identifier: str, metadata: Optional[Dict] = None):
variables = {"identifier": identifier, "metadata": metadata}
- def process_response(response):
+ def process_response(response) -> User:
return User.from_dict(response["data"]["createParticipant"])
description = "create user"
diff --git a/literalai/api/synchronous.py b/literalai/api/synchronous.py
new file mode 100644
index 0000000..3266624
--- /dev/null
+++ b/literalai/api/synchronous.py
@@ -0,0 +1,846 @@
+import logging
+import uuid
+
+from typing_extensions import deprecated
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ List,
+ Literal,
+ Optional,
+ TypeVar,
+ Union,
+ cast,
+)
+
+from literalai.api.base import BaseLiteralAPI, prepare_variables
+
+from literalai.api.helpers.attachment_helpers import (
+ AttachmentUpload,
+ create_attachment_helper,
+ delete_attachment_helper,
+ get_attachment_helper,
+ update_attachment_helper,
+)
+from literalai.api.helpers.dataset_helpers import (
+ add_generation_to_dataset_helper,
+ add_step_to_dataset_helper,
+ create_dataset_helper,
+ create_dataset_item_helper,
+ create_experiment_helper,
+ create_experiment_item_helper,
+ delete_dataset_helper,
+ delete_dataset_item_helper,
+ get_dataset_helper,
+ get_dataset_item_helper,
+ update_dataset_helper,
+)
+from literalai.api.helpers.generation_helpers import (
+ create_generation_helper,
+ get_generations_helper,
+)
+from literalai.api.helpers.prompt_helpers import (
+ PromptRollout,
+ create_prompt_helper,
+ create_prompt_lineage_helper,
+ create_prompt_variant_helper,
+ get_prompt_ab_testing_helper,
+ get_prompt_helper,
+ get_prompt_lineage_helper,
+ update_prompt_ab_testing_helper,
+)
+from literalai.api.helpers.score_helpers import (
+ ScoreUpdate,
+ check_scores_finite,
+ create_score_helper,
+ create_scores_query_builder,
+ delete_score_helper,
+ get_scores_helper,
+ update_score_helper,
+)
+from literalai.api.helpers.step_helpers import (
+ create_step_helper,
+ delete_step_helper,
+ get_step_helper,
+ get_steps_helper,
+ send_steps_helper,
+ update_step_helper,
+)
+from literalai.api.helpers.thread_helpers import (
+ create_thread_helper,
+ delete_thread_helper,
+ get_thread_helper,
+ get_threads_helper,
+ list_threads_helper,
+ update_thread_helper,
+ upsert_thread_helper,
+)
+from literalai.api.helpers.user_helpers import (
+ create_user_helper,
+ delete_user_helper,
+ get_user_helper,
+ get_users_helper,
+ update_user_helper,
+)
+from literalai.context import active_steps_var, active_thread_var
+from literalai.evaluation.dataset import Dataset, DatasetType
+from literalai.evaluation.dataset_experiment import (
+ DatasetExperiment,
+ DatasetExperimentItem,
+)
+from literalai.evaluation.dataset_item import DatasetItem
+from literalai.observability.filter import (
+ generations_filters,
+ generations_order_by,
+ scores_filters,
+ scores_order_by,
+ steps_filters,
+ steps_order_by,
+ threads_filters,
+ threads_order_by,
+ users_filters,
+)
+from literalai.observability.thread import Thread
+from literalai.prompt_engineering.prompt import Prompt, ProviderSettings
+
+if TYPE_CHECKING:
+ from typing import Tuple # noqa: F401
+
+import httpx
+
+from literalai.my_types import PaginatedResponse, User
+from literalai.observability.generation import (
+ BaseGeneration,
+ ChatGeneration,
+ CompletionGeneration,
+ GenerationMessage,
+)
+from literalai.observability.step import (
+ Attachment,
+ Score,
+ ScoreDict,
+ ScoreType,
+ Step,
+ StepDict,
+ StepType,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class LiteralAPI(BaseLiteralAPI):
+ """
+ ```python
+ from literalai import LiteralClient
+ # Initialize the client
+ literalai_client = LiteralClient(api_key="your_api_key_here")
+ # Access the API's methods
+ print(literalai_client.api)
+ ```
+ """
+
+ R = TypeVar("R")
+
+ def make_gql_call(
+ self, description: str, query: str, variables: Dict[str, Any]
+ ) -> Dict:
+ def raise_error(error):
+ logger.error(f"Failed to {description}: {error}")
+ raise Exception(error)
+
+ variables = prepare_variables(variables)
+ with httpx.Client(follow_redirects=True) as client:
+ response = client.post(
+ self.graphql_endpoint,
+ json={"query": query, "variables": variables},
+ headers=self.headers,
+ timeout=10,
+ )
+
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError:
+ raise_error(f"Failed to {description}: {response.text}")
+
+ try:
+ json = response.json()
+ except ValueError as e:
+ raise_error(
+ f"""Failed to parse JSON response: {
+ e}, content: {response.content!r}"""
+ )
+
+ if json.get("errors"):
+ raise_error(json["errors"])
+
+ if json.get("data"):
+ if isinstance(json["data"], dict):
+ for _, value in json["data"].items():
+ if value and value.get("ok") is False:
+ raise_error(
+ f"""Failed to {description}: {
+ value.get('message')}"""
+ )
+
+ return json
+
+
+ def make_rest_call(self, subpath: str, body: Dict[str, Any]) -> Dict:
+ with httpx.Client(follow_redirects=True) as client:
+ response = client.post(
+ self.rest_endpoint + subpath,
+ json=body,
+ headers=self.headers,
+ timeout=20,
+ )
+
+ try:
+ response.raise_for_status()
+ except httpx.HTTPStatusError:
+ message = f"Failed to call {subpath}: {response.text}"
+ logger.error(message)
+ raise Exception(message)
+
+ try:
+ return response.json()
+ except ValueError as e:
+ raise ValueError(
+ f"""Failed to parse JSON response: {
+ e}, content: {response.content!r}"""
+ )
+
+ def gql_helper(
+ self,
+ query: str,
+ description: str,
+ variables: Dict,
+ process_response: Callable[..., R],
+ ) -> R:
+ response = self.make_gql_call(description, query, variables)
+ return process_response(response)
+
+ ##################################################################################
+ # User APIs #
+ ##################################################################################
+
+ def get_users(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[users_filters] = None,
+ ) -> PaginatedResponse["User"]:
+ return self.gql_helper(*get_users_helper(first, after, before, filters))
+
+ def get_user(self, id: Optional[str] = None, identifier: Optional[str] = None) -> "User":
+ return self.gql_helper(*get_user_helper(id, identifier))
+
+ def create_user(self, identifier: str, metadata: Optional[Dict] = None) -> "User":
+ return self.gql_helper(*create_user_helper(identifier, metadata))
+
+ def update_user(
+ self, id: str, identifier: Optional[str] = None, metadata: Optional[Dict] = None
+ ) -> "User":
+ return self.gql_helper(*update_user_helper(id, identifier, metadata))
+
+ def delete_user(self, id: str) -> Dict:
+ return self.gql_helper(*delete_user_helper(id))
+
+ def get_or_create_user(self, identifier: str, metadata: Optional[Dict] = None) -> "User":
+ user = self.get_user(identifier=identifier)
+ if user:
+ return user
+
+ return self.create_user(identifier, metadata)
+
+ ##################################################################################
+ # Thread APIs #
+ ##################################################################################
+
+ def get_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ step_types_to_keep: Optional[List[StepType]] = None,
+ ) -> PaginatedResponse["Thread"]:
+ return self.gql_helper(
+ *get_threads_helper(
+ first, after, before, filters, order_by, step_types_to_keep
+ )
+ )
+
+ def list_threads(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[threads_filters] = None,
+ order_by: Optional[threads_order_by] = None,
+ ) -> PaginatedResponse["Thread"]:
+ return self.gql_helper(
+ *list_threads_helper(first, after, before, filters, order_by)
+ )
+
+ def get_thread(self, id: str) -> "Thread":
+ return self.gql_helper(*get_thread_helper(id))
+
+ def create_thread(
+ self,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return self.gql_helper(
+ *create_thread_helper(name, metadata, participant_id, tags)
+ )
+
+ def upsert_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return self.gql_helper(
+ *upsert_thread_helper(id, name, metadata, participant_id, tags)
+ )
+
+ def update_thread(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ participant_id: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Thread":
+ return self.gql_helper(
+ *update_thread_helper(id, name, metadata, participant_id, tags)
+ )
+
+ def delete_thread(self, id: str) -> bool:
+ return self.gql_helper(*delete_thread_helper(id))
+
+ ##################################################################################
+ # Score APIs #
+ ##################################################################################
+
+ def get_scores(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[scores_filters] = None,
+ order_by: Optional[scores_order_by] = None,
+ ) -> PaginatedResponse["Score"]:
+ return self.gql_helper(
+ *get_scores_helper(first, after, before, filters, order_by)
+ )
+
+ def create_scores(self, scores: List["ScoreDict"]):
+ check_scores_finite(scores)
+
+ query = create_scores_query_builder(scores)
+ variables = {}
+ for id, score in enumerate(scores):
+ for k, v in score.items():
+ variables[f"{k}_{id}"] = v
+
+ def process_response(response):
+ return [x for x in response["data"].values()]
+
+ return self.gql_helper(query, "create scores", variables, process_response)
+
+ def create_score(
+ self,
+ name: str,
+ value: float,
+ type: ScoreType,
+ step_id: Optional[str] = None,
+ generation_id: Optional[str] = None,
+ dataset_experiment_item_id: Optional[str] = None,
+ comment: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ ) -> "Score":
+ if generation_id:
+ logger.warning(
+ "generation_id is deprecated and will be removed in a future version, please use step_id instead"
+ )
+ check_scores_finite([{"name": name, "value": value}])
+
+ return self.gql_helper(
+ *create_score_helper(
+ name,
+ value,
+ type,
+ step_id,
+ dataset_experiment_item_id,
+ comment,
+ tags,
+ )
+ )
+
+ def update_score(
+ self,
+ id: str,
+ update_params: ScoreUpdate,
+ ) -> "Score":
+ return self.gql_helper(*update_score_helper(id, update_params))
+
+ def delete_score(self, id: str) -> Dict:
+ return self.gql_helper(*delete_score_helper(id))
+
+ ##################################################################################
+ # Attachment APIs #
+ ##################################################################################
+
+ def upload_file(
+ self,
+ content: Union[bytes, str],
+ thread_id: Optional[str] = None,
+ mime: Optional[str] = "application/octet-stream",
+ ) -> Dict:
+ id = str(uuid.uuid4())
+ body = {"fileName": id, "contentType": mime}
+ if thread_id:
+ body["threadId"] = thread_id
+
+ path = "/api/upload/file"
+
+ with httpx.Client(follow_redirects=True) as client:
+ response = client.post(
+ f"{self.url}{path}",
+ json=body,
+ headers=self.headers,
+ )
+ if response.status_code >= 400:
+ reason = response.text
+ logger.error(f"Failed to sign upload url: {reason}")
+ return {"object_key": None, "url": None}
+ json_res = response.json()
+ method = "put" if "put" in json_res else "post"
+ request_dict: Dict[str, Any] = json_res.get(method, {})
+ url: Optional[str] = request_dict.get("url")
+
+ if not url:
+ raise Exception("Invalid server response")
+ headers: Optional[Dict] = request_dict.get("headers")
+ fields: Dict = request_dict.get("fields", {})
+ object_key: Optional[str] = fields.get("key")
+ upload_type: Literal["raw", "multipart"] = cast(
+ Literal["raw", "multipart"], request_dict.get("uploadType", "multipart")
+ )
+ signed_url: Optional[str] = json_res.get("signedUrl")
+
+ # Prepare form data
+ form_data = (
+ {}
+ ) # type: Dict[str, Union[Tuple[Union[str, None], Any], Tuple[Union[str, None], Any, Any]]]
+ for field_name, field_value in fields.items():
+ form_data[field_name] = (None, field_value)
+
+ # Add file to the form_data
+ # Note: The content_type parameter is not needed here, as the correct MIME type should be set
+ # in the 'Content-Type' field from upload_details
+ form_data["file"] = (id, content, mime)
+
+ with httpx.Client(follow_redirects=True) as client:
+ if upload_type == "raw":
+ upload_response = client.request(
+ url=url,
+ headers=headers,
+ method=method,
+ data=content, # type: ignore
+ )
+ else:
+ upload_response = client.request(
+ url=url,
+ headers=headers,
+ method=method,
+ files=form_data,
+ ) # type: ignore
+ try:
+ upload_response.raise_for_status()
+ return {"object_key": object_key, "url": signed_url}
+ except Exception as e:
+ logger.error(f"Failed to upload file: {str(e)}")
+ return {"object_key": None, "url": None}
+
+ def create_attachment(
+ self,
+ thread_id: Optional[str] = None,
+ step_id: Optional[str] = None,
+ id: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ mime: Optional[str] = None,
+ name: Optional[str] = None,
+ object_key: Optional[str] = None,
+ url: Optional[str] = None,
+ content: Optional[Union[bytes, str]] = None,
+ path: Optional[str] = None,
+ ) -> "Attachment":
+ if not thread_id:
+ if active_thread := active_thread_var.get(None):
+ thread_id = active_thread.id
+
+ if not step_id:
+ if active_steps := active_steps_var.get([]):
+ step_id = active_steps[-1].id
+ else:
+ raise Exception("No step_id provided and no active step found.")
+
+ (
+ query,
+ description,
+ variables,
+ content,
+ process_response,
+ ) = create_attachment_helper(
+ thread_id=thread_id,
+ step_id=step_id,
+ id=id,
+ metadata=metadata,
+ mime=mime,
+ name=name,
+ object_key=object_key,
+ url=url,
+ content=content,
+ path=path,
+ )
+
+ if content:
+ uploaded = self.upload_file(content=content, thread_id=thread_id, mime=mime)
+
+ if uploaded["object_key"] is None or uploaded["url"] is None:
+ raise Exception("Failed to upload file")
+
+ object_key = uploaded["object_key"]
+ if object_key:
+ variables["objectKey"] = object_key
+ else:
+ variables["url"] = uploaded["url"]
+
+ response = self.make_gql_call(description, query, variables)
+ return process_response(response)
+
+ def update_attachment(self, id: str, update_params: AttachmentUpload) -> "Attachment":
+ return self.gql_helper(*update_attachment_helper(id, update_params))
+
+ def get_attachment(self, id: str) -> Optional["Attachment"]:
+ return self.gql_helper(*get_attachment_helper(id))
+
+ def delete_attachment(self, id: str) -> Dict:
+ return self.gql_helper(*delete_attachment_helper(id))
+
+ ##################################################################################
+ # Step APIs #
+ ##################################################################################
+
+ def create_step(
+ self,
+ thread_id: Optional[str] = None,
+ type: Optional[StepType] = "undefined",
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ input: Optional[Dict] = None,
+ output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ parent_id: Optional[str] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ root_run_id: Optional[str] = None,
+ ) -> Step:
+ return self.gql_helper(
+ *create_step_helper(
+ thread_id=thread_id,
+ type=type,
+ start_time=start_time,
+ end_time=end_time,
+ input=input,
+ output=output,
+ metadata=metadata,
+ parent_id=parent_id,
+ name=name,
+ tags=tags,
+ root_run_id=root_run_id,
+ )
+ )
+
+ def update_step(
+ self,
+ id: str,
+ type: Optional[StepType] = None,
+ input: Optional[str] = None,
+ output: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ name: Optional[str] = None,
+ tags: Optional[List[str]] = None,
+ start_time: Optional[str] = None,
+ end_time: Optional[str] = None,
+ parent_id: Optional[str] = None,
+ ) -> "Step":
+ return self.gql_helper(
+ *update_step_helper(
+ id=id,
+ type=type,
+ input=input,
+ output=output,
+ metadata=metadata,
+ name=name,
+ tags=tags,
+ start_time=start_time,
+ end_time=end_time,
+ parent_id=parent_id,
+ )
+ )
+
+ def get_steps(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[steps_filters] = None,
+ order_by: Optional[steps_order_by] = None,
+ ) -> PaginatedResponse["Step"]:
+ return self.gql_helper(
+ *get_steps_helper(first, after, before, filters, order_by)
+ )
+
+ def get_step(
+ self,
+ id: str,
+ ) -> Optional["Step"]:
+ return self.gql_helper(*get_step_helper(id=id))
+
+ def delete_step(
+ self,
+ id: str,
+ ) -> bool:
+ return self.gql_helper(*delete_step_helper(id=id))
+
+ def send_steps(self, steps: List[Union["StepDict", "Step"]]):
+ return self.gql_helper(*send_steps_helper(steps=steps))
+
+ ##################################################################################
+ # Generation APIs #
+ ##################################################################################
+
+ def get_generations(
+ self,
+ first: Optional[int] = None,
+ after: Optional[str] = None,
+ before: Optional[str] = None,
+ filters: Optional[generations_filters] = None,
+ order_by: Optional[generations_order_by] = None,
+ ) -> PaginatedResponse["BaseGeneration"]:
+ return self.gql_helper(
+ *get_generations_helper(first, after, before, filters, order_by)
+ )
+
+ def create_generation(
+ self, generation: Union["ChatGeneration", "CompletionGeneration"]
+ ) -> Union["ChatGeneration", "CompletionGeneration"]:
+ return self.gql_helper(*create_generation_helper(generation))
+
+ ##################################################################################
+ # Dataset APIs #
+ ##################################################################################
+
+ def create_dataset(
+ self,
+ name: str,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ type: DatasetType = "key_value",
+ ) -> "Dataset":
+ return self.gql_helper(
+ *create_dataset_helper(self, name, description, metadata, type)
+ )
+
+ def get_dataset(
+ self, id: Optional[str] = None, name: Optional[str] = None
+ ) -> Optional["Dataset"]:
+ subpath, _, variables, process_response = get_dataset_helper(
+ self, id=id, name=name
+ )
+ response = self.make_rest_call(subpath, variables)
+ return process_response(response)
+
+ def update_dataset(
+ self,
+ id: str,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ metadata: Optional[Dict] = None,
+ ) -> "Dataset":
+ return self.gql_helper(
+ *update_dataset_helper(self, id, name, description, metadata)
+ )
+
+ def delete_dataset(self, id: str) -> "Dataset":
+ return self.gql_helper(*delete_dataset_helper(self, id))
+
+ ##################################################################################
+ # Experiment APIs #
+ ##################################################################################
+
+ def create_experiment(
+ self,
+ name: str,
+ dataset_id: Optional[str] = None,
+ prompt_variant_id: Optional[str] = None,
+ params: Optional[Dict] = None,
+ ) -> "DatasetExperiment":
+ return self.gql_helper(
+ *create_experiment_helper(
+ api=self,
+ name=name,
+ dataset_id=dataset_id,
+ prompt_variant_id=prompt_variant_id,
+ params=params,
+ )
+ )
+
+ def create_experiment_item(
+ self, experiment_item: DatasetExperimentItem
+ ) -> "DatasetExperimentItem":
+ # Create the dataset experiment item
+ result = self.gql_helper(
+ *create_experiment_item_helper(
+ dataset_experiment_id=experiment_item.dataset_experiment_id,
+ dataset_item_id=experiment_item.dataset_item_id,
+ experiment_run_id=experiment_item.experiment_run_id,
+ input=experiment_item.input,
+ output=experiment_item.output,
+ )
+ )
+
+ for score in experiment_item.scores:
+ score["datasetExperimentItemId"] = result.id
+
+ # Create the scores and add to experiment item.
+ result.scores = self.create_scores(experiment_item.scores)
+
+ return result
+
+ ##################################################################################
+ # Dataset Item APIs #
+ ##################################################################################
+
+ def create_dataset_item(
+ self,
+ dataset_id: str,
+ input: Dict,
+ expected_output: Optional[Dict] = None,
+ metadata: Optional[Dict] = None,
+ ) -> "DatasetItem":
+ return self.gql_helper(
+ *create_dataset_item_helper(dataset_id, input, expected_output, metadata)
+ )
+
+ def get_dataset_item(self, id: str) -> Optional["DatasetItem"]:
+ return self.gql_helper(*get_dataset_item_helper(id))
+
+ def delete_dataset_item(self, id: str) -> "DatasetItem":
+ return self.gql_helper(*delete_dataset_item_helper(id))
+
+ def add_step_to_dataset(
+ self, dataset_id: str, step_id: str, metadata: Optional[Dict] = None
+ ) -> "DatasetItem":
+ return self.gql_helper(
+ *add_step_to_dataset_helper(dataset_id, step_id, metadata)
+ )
+
+ def add_generation_to_dataset(
+ self, dataset_id: str, generation_id: str, metadata: Optional[Dict] = None
+ ) -> "DatasetItem":
+ return self.gql_helper(
+ *add_generation_to_dataset_helper(dataset_id, generation_id, metadata)
+ )
+
+ ##################################################################################
+ # Prompt APIs #
+ ##################################################################################
+
+ def get_or_create_prompt_lineage(
+ self, name: str, description: Optional[str] = None
+ ) -> Dict:
+ return self.gql_helper(*create_prompt_lineage_helper(name, description))
+
+ @deprecated("Use get_or_create_prompt_lineage instead")
+ def create_prompt_lineage(self, name: str, description: Optional[str] = None) -> Dict:
+ return self.get_or_create_prompt_lineage(name, description)
+
+ def get_or_create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ) -> "Prompt":
+ lineage = self.get_or_create_prompt_lineage(name)
+ lineage_id = lineage["id"]
+ return self.gql_helper(
+ *create_prompt_helper(self, lineage_id, template_messages, settings, tools)
+ )
+
+ @deprecated("Please use `get_or_create_prompt` instead.")
+ def create_prompt(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ ) -> "Prompt":
+ return self.get_or_create_prompt(name, template_messages, settings)
+
+ def get_prompt(
+ self,
+ id: Optional[str] = None,
+ name: Optional[str] = None,
+ version: Optional[int] = None,
+ ) -> "Prompt":
+ if id:
+ return self.gql_helper(*get_prompt_helper(self, id=id))
+ elif name:
+ return self.gql_helper(*get_prompt_helper(self, name=name, version=version))
+ else:
+ raise ValueError("Either the `id` or the `name` must be provided.")
+
+ def create_prompt_variant(
+ self,
+ name: str,
+ template_messages: List[GenerationMessage],
+ settings: Optional[ProviderSettings] = None,
+ tools: Optional[List[Dict]] = None,
+ ) -> Optional[str]:
+ lineage = self.gql_helper(*get_prompt_lineage_helper(name))
+ lineage_id = lineage["id"] if lineage else None
+ return self.gql_helper(
+ *create_prompt_variant_helper(
+ lineage_id, template_messages, settings, tools
+ )
+ )
+
+ def get_prompt_ab_testing(self, name: str) -> List["PromptRollout"]:
+ return self.gql_helper(*get_prompt_ab_testing_helper(name=name))
+
+ def update_prompt_ab_testing(
+ self, name: str, rollouts: List["PromptRollout"]
+ ) -> Dict:
+ return self.gql_helper(
+ *update_prompt_ab_testing_helper(name=name, rollouts=rollouts)
+ )
+
+ ##################################################################################
+ # Misc APIs #
+ ##################################################################################
+
+ def get_my_project_id(self) -> str:
+ response = self.make_rest_call("/my-project", {})
+ return response["projectId"]
diff --git a/literalai/evaluation/dataset.py b/literalai/evaluation/dataset.py
index 6fc7b52..61789d6 100644
--- a/literalai/evaluation/dataset.py
+++ b/literalai/evaluation/dataset.py
@@ -26,6 +26,9 @@ class DatasetDict(TypedDict, total=False):
@dataclass(repr=False)
class Dataset(Utils):
+ """
+ A dataset of items, each item representing an ideal scenario to run experiments on.
+ """
api: "LiteralAPI"
id: str
created_at: str
@@ -69,6 +72,9 @@ def update(
description: Optional[str] = None,
metadata: Optional[Dict] = None,
):
+ """
+ Update the dataset with the given name, description and metadata.
+ """
updated_dataset = self.api.update_dataset(
self.id, name=name, description=description, metadata=metadata
)
@@ -77,6 +83,9 @@ def update(
self.metadata = updated_dataset.metadata
def delete(self):
+ """
+ Deletes the dataset.
+ """
self.api.delete_dataset(self.id)
def create_item(
@@ -86,11 +95,15 @@ def create_item(
metadata: Optional[Dict] = None,
) -> DatasetItem:
"""
- Create a new dataset item and add it to this dataset.
- :param input: The input data for the dataset item.
- :param expected_output: The output data for the dataset item (optional).
- :param metadata: Metadata for the dataset item (optional).
- :return: The created DatasetItem instance.
+ Creates a new dataset item and adds it to this dataset.
+
+ Args:
+ input: The input data for the dataset item.
+ expected_output: The output data for the dataset item (optional).
+ metadata: Metadata for the dataset item (optional).
+
+ Returns:
+ `DatasetItem`:The created DatasetItem instance.
"""
dataset_item = self.api.create_dataset_item(
self.id, input, expected_output, metadata
@@ -108,10 +121,14 @@ def create_experiment(
) -> DatasetExperiment:
"""
Creates a new dataset experiment based on this dataset.
- :param name: The name of the experiment .
- :param prompt_variant_id: The Prompt variant ID to experiment on.
- :param params: The params used on the experiment.
- :return: The created DatasetExperiment instance.
+
+ Args:
+ name: The name of the experiment.
+ prompt_variant_id: The Prompt variant ID to experiment on.
+ params: The params used on the experiment.
+
+ Returns:
+ `DatasetExperiment`: The created DatasetExperiment instance.
"""
experiment = self.api.create_experiment(
name=name,
@@ -123,9 +140,10 @@ def create_experiment(
def delete_item(self, item_id: str):
"""
- Delete a dataset item from this dataset.
- :param api: An instance of the DatasetAPI to make the call.
- :param item_id: The ID of the dataset item to delete.
+ Deletes a dataset item from this dataset.
+
+ Args:
+ item_id: The ID of the dataset item to delete.
"""
self.api.delete_dataset_item(item_id)
if self.items is not None:
@@ -134,9 +152,13 @@ def delete_item(self, item_id: str):
def add_step(self, step_id: str, metadata: Optional[Dict] = None) -> DatasetItem:
"""
Create a new dataset item based on a step and add it to this dataset.
- :param step_id: The id of the step to add to the dataset.
- :param metadata: Metadata for the dataset item (optional).
- :return: The created DatasetItem instance.
+
+ Args:
+ step_id: The id of the step to add to the dataset.
+ metadata: Metadata for the dataset item (optional).
+
+ Returns:
+ `DatasetItem`: The created DatasetItem instance.
"""
if self.type == "generation":
raise ValueError("Cannot add a step to a generation dataset")
@@ -152,9 +174,13 @@ def add_generation(
) -> DatasetItem:
"""
Create a new dataset item based on a generation and add it to this dataset.
- :param generation_id: The id of the generation to add to the dataset.
- :param metadata: Metadata for the dataset item (optional).
- :return: The created DatasetItem instance.
+
+ Args:
+ generation_id: The id of the generation to add to the dataset.
+ metadata: Metadata for the dataset item (optional).
+
+ Returns:
+ `DatasetItem`: The created DatasetItem instance.
"""
dataset_item = self.api.add_generation_to_dataset(
self.id, generation_id, metadata
diff --git a/literalai/evaluation/dataset_experiment.py b/literalai/evaluation/dataset_experiment.py
index cacd193..e428036 100644
--- a/literalai/evaluation/dataset_experiment.py
+++ b/literalai/evaluation/dataset_experiment.py
@@ -21,6 +21,9 @@ class DatasetExperimentItemDict(TypedDict, total=False):
@dataclass(repr=False)
class DatasetExperimentItem(Utils):
+ """
+ An item of a `DatasetExperiment`: it may be linked to a `DatasetItem`.
+ """
id: str
dataset_experiment_id: str
dataset_item_id: Optional[str]
@@ -65,6 +68,9 @@ class DatasetExperimentDict(TypedDict, total=False):
@dataclass(repr=False)
class DatasetExperiment(Utils):
+ """
+ An experiment, linked or not to a `Dataset`.
+ """
api: "LiteralAPI"
id: str
created_at: str
@@ -75,6 +81,9 @@ class DatasetExperiment(Utils):
items: List[DatasetExperimentItem] = field(default_factory=lambda: [])
def log(self, item_dict: DatasetExperimentItemDict) -> DatasetExperimentItem:
+ """
+ Logs an item to the dataset experiment.
+ """
experiment_run_id = active_experiment_item_run_id_var.get()
dataset_experiment_item = DatasetExperimentItem.from_dict(
{
diff --git a/literalai/evaluation/dataset_item.py b/literalai/evaluation/dataset_item.py
index 9ec1601..a994b19 100644
--- a/literalai/evaluation/dataset_item.py
+++ b/literalai/evaluation/dataset_item.py
@@ -16,6 +16,9 @@ class DatasetItemDict(TypedDict, total=False):
@dataclass(repr=False)
class DatasetItem(Utils):
+ """
+ A `Dataset` item, containing `input`, `expectedOutput` and `metadata`.
+ """
id: str
created_at: str
dataset_id: str
@@ -25,6 +28,9 @@ class DatasetItem(Utils):
intermediary_steps: Optional[List[Dict]] = None
def to_dict(self):
+ """
+ Dumps the contents of the object into a dictionary.
+ """
return {
"id": self.id,
"createdAt": self.created_at,
@@ -37,6 +43,9 @@ def to_dict(self):
@classmethod
def from_dict(cls, dataset_item: DatasetItemDict) -> "DatasetItem":
+ """
+ Builds a `DatasetItem` object from a dictionary.
+ """
return cls(
id=dataset_item.get("id", ""),
created_at=dataset_item.get("createdAt", ""),
diff --git a/literalai/instrumentation/llamaindex/__init__.py b/literalai/instrumentation/llamaindex/__init__.py
index dd0a272..5379f09 100644
--- a/literalai/instrumentation/llamaindex/__init__.py
+++ b/literalai/instrumentation/llamaindex/__init__.py
@@ -7,6 +7,9 @@
is_llamaindex_instrumented = False
def instrument_llamaindex(client: "LiteralClient"):
+ """
+ Instruments LlamaIndex to automatically send logs to Literal AI.
+ """
global is_llamaindex_instrumented
if is_llamaindex_instrumented:
return
diff --git a/literalai/instrumentation/mistralai.py b/literalai/instrumentation/mistralai.py
index 31966d1..881256f 100644
--- a/literalai/instrumentation/mistralai.py
+++ b/literalai/instrumentation/mistralai.py
@@ -98,6 +98,9 @@
def instrument_mistralai(client: "LiteralClient", on_new_generation=None):
+ """
+ Instruments all Mistral AI LLM calls to automatically send logs to Literal AI.
+ """
global is_mistralai_instrumented
if is_mistralai_instrumented:
return
diff --git a/literalai/instrumentation/openai.py b/literalai/instrumentation/openai.py
index 6130d9e..a3117ec 100644
--- a/literalai/instrumentation/openai.py
+++ b/literalai/instrumentation/openai.py
@@ -60,6 +60,9 @@
def instrument_openai(client: "LiteralClient", on_new_generation=None):
+ """
+ Instruments all OpenAI LLM calls to automatically send logs to Literal AI.
+ """
global is_openai_instrumented
if is_openai_instrumented:
return
diff --git a/literalai/observability/step.py b/literalai/observability/step.py
index d582e2f..8a3ee89 100644
--- a/literalai/observability/step.py
+++ b/literalai/observability/step.py
@@ -68,6 +68,16 @@ class AttachmentDict(TypedDict, total=False):
@dataclass(repr=False)
class Score(Utils):
+ """
+ A score captures information about the quality of a step/experiment item.
+ It can be of type either:
+ - HUMAN: to capture human feedback
+ - CODE: to capture the result of a code execution (deterministic)
+ - AI: to capture the result of an AI model or a hybrid approach including code as well (non-deterministic)
+
+ Learn more about scores [here](https://docs.literalai.com/guides/settings/scoring#score-schemas).
+ """
+
name: str
type: ScoreType
value: float
@@ -116,6 +126,10 @@ def from_dict(cls, score_dict: ScoreDict) -> "Score":
@dataclass(repr=False)
class Attachment(Utils):
+ """
+ An attachment is an object that can be associated with a step.
+ It can be an image, a file, a video, etc.
+ """
step_id: Optional[str] = None
thread_id: Optional[str] = None
id: Optional[str] = Field(default_factory=lambda: str(uuid.uuid4()))
@@ -276,77 +290,31 @@ def my_step():
## Step parameters
-
- The id of the thread
-
-
-
- The id of the step. If not provided, a random uuid will be generated. Use
- custom ones to match your own system. Step ids must be unique across your
- project.
-
-
-
- The name of the step (automatically set to the function name if using the
- decorator)
-
-
-
- The type of the step. A Step can be one of the following types:
-
- - `run`: A generic step
- - `tool`: A step that runs a tool
- - `llm`: A step that runs a language model
- - `embedding`: A step that runs an embedding model
- - `retrieval`: A step that retrieves documents
- - `rerank`: A step that reranks documents
- - `undefined`: An undefined step
-
-
-
-
- Metadata associated with the step. This enables you to add custom fields to
- your steps.
-
-
-
- The id of the parent step. This enables you to create nested steps.
-
-
-
- The start time of the step.
-
-
-
- The end time of the step.
-
-
-
- The server-side creation time of the step.
-
-
-
- A dictionary symbolizing an input.
- Prefer using `content` key to store a message.
-
-
-
- A dictionary symbolizing an output.
- Prefer using `content` key to store a message.
-
-
-
- The tags of the step. This is a complimentary field to the metadata field. It
- enables you to add custom tags to your steps.
-
-
-
- The generation object associated with the step.
-
-
-
- The attachments associated with the step.
-
+ Attributes:
+ **thread_id** (Optional[str]): The id of the thread.
+ **id** (Optional[str]): The id of the step. If not provided, a random uuid will be generated. Use
+ custom ones to match your own system. Step ids must be unique across your
+ project.
+ **name** (Optional[str]): The name of the step (automatically set to the function name if using the
+ decorator).
+ **type** (Optional[StepType]): The type of the step. A Step can be one of the following types:
+ - `run`: A generic step
+ - `tool`: A step that runs a tool
+ - `llm`: A step that runs a language model
+ - `embedding`: A step that runs an embedding model
+ - `retrieval`: A step that retrieves documents
+ - `rerank`: A step that reranks documents
+ - `undefined`: An undefined step
+ **metadata** (Optional[Dict]): Metadata associated with the step. This enables you to add custom fields to your steps.
+ **parent_id** (Optional[str]): The id of the parent step. This enables you to create nested steps.
+ **start_time** (Optional[str]): The start time of the step.
+ **end_time** (Optional[str]): The end time of the step.
+ **created_at** (Optional[str]): The server-side creation time of the step.
+ **input** (Optional[Dict]): A dictionary symbolizing an input. Prefer using `content` key to store a message.
+ **output** (Optional[Dict]): A dictionary symbolizing an output. Prefer using `content` key to store a message.
+ **tags** (Optional[List[str]]): The tags of the step. This is a complimentary field to the metadata field. It enables you to add custom tags to your steps.
+ **generation** (Optional[Union[ChatGeneration, CompletionGeneration]]): The generation object associated with the step.
+ **attachments** (Optional[List[Attachment]]): The attachments associated with the step.
"""
id: str
diff --git a/literalai/prompt_engineering/prompt.py b/literalai/prompt_engineering/prompt.py
index 510b408..00e5bea 100644
--- a/literalai/prompt_engineering/prompt.py
+++ b/literalai/prompt_engineering/prompt.py
@@ -62,6 +62,29 @@ class PromptDict(TypedDict, total=False):
@dataclass(repr=False)
class Prompt(Utils):
+ """
+ Represents a version of a prompt template with variables, tools and settings.
+
+ Attributes
+ ----------
+ template_messages : List[GenerationMessage]
+ The messages that make up the prompt. Messages can be of type `text` or `image`.
+ Messages can reference variables.
+ variables : List[PromptVariable]
+ Variables exposed in the prompt.
+ tools : Optional[List[Dict]]
+ Tools LLM can pick from.
+ settings : ProviderSettings
+ LLM provider settings.
+
+ Methods
+ -------
+ format_messages(**kwargs: Any):
+ Formats the prompt's template messages with the given variables.
+ Variables may be passed as a dictionary or as keyword arguments.
+ Keyword arguments take precedence over variables passed as a dictionary.
+ """
+
api: "LiteralAPI"
id: str
created_at: str
@@ -129,7 +152,6 @@ def format_messages(self, **kwargs: Any) -> List[Any]:
Args:
variables (Optional[Dict[str, Any]]): Optional variables to resolve in the template messages.
-
Returns:
List[Any]: List of formatted chat completion messages.
"""
@@ -160,9 +182,15 @@ def format_messages(self, **kwargs: Any) -> List[Any]:
@deprecated('Please use "format_messages" instead')
def format(self, variables: Optional[Dict[str, Any]] = None) -> List[Any]:
+ """
+ Deprecated. Please use `format_messages` instead.
+ """
return self.format_messages(**(variables or {}))
def to_langchain_chat_prompt_template(self, additional_messages=[]):
+ """
+ Converts a Literal AI prompt to a LangChain prompt template format.
+ """
try:
version("langchain")
except Exception: