diff --git a/.gitignore b/.gitignore
index bec600f..0f2c2f2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -172,3 +172,4 @@ cython_debug/
.python-version
uv.lock
_proc
+experiments
diff --git a/ragas_annotator/backends/__init__.py b/nbs/.notest
similarity index 100%
rename from ragas_annotator/backends/__init__.py
rename to nbs/.notest
diff --git a/nbs/backends/factory.ipynb b/nbs/backends/factory.ipynb
index c9d5595..388bf14 100644
--- a/nbs/backends/factory.ipynb
+++ b/nbs/backends/factory.ipynb
@@ -24,13 +24,10 @@
"metadata": {},
"outputs": [],
"source": [
- "# | export\n",
+ "#| export\n",
"import typing as t\n",
- "import os\n",
"\n",
- "from notion_client import Client as NotionClient\n",
- "from ragas_annotator.backends.mock_notion import MockNotionClient\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend"
+ "from ragas_experimental.backends.ragas_api_client import RagasApiClient"
]
},
{
@@ -39,204 +36,37 @@
"metadata": {},
"outputs": [],
"source": [
- "# | export\n",
- "class NotionClientFactory:\n",
- " \"\"\"Factory for creating Notion client instances.\"\"\"\n",
+ "#| export\n",
+ "class RagasApiClientFactory:\n",
+ " \"\"\"Factory for creating Ragas API client instances.\"\"\"\n",
"\n",
" @staticmethod\n",
" def create(\n",
- " use_mock: bool = False,\n",
- " api_key: t.Optional[str] = None,\n",
- " initialize_project: bool = False,\n",
- " root_page_id: t.Optional[str] = None,\n",
- " ) -> t.Union[NotionClient, MockNotionClient]:\n",
- " \"\"\"Create a Notion client.\n",
+ " app_token: t.Optional[str] = None,\n",
+ " base_url: t.Optional[str] = None,\n",
+ " ) -> RagasApiClient:\n",
+ " \"\"\"Create a Ragas API client.\n",
"\n",
" Args:\n",
- " use_mock: If True, create a mock client\n",
- " api_key: Notion API key (only used for real client)\n",
- " initialize_project: If True and using mock, initialize project structure\n",
- " root_page_id: Required if initialize_project is True\n",
+ " api_key: The API key for the Ragas API\n",
+ " base_url: The base URL for the Ragas API\n",
"\n",
" Returns:\n",
- " Union[NotionClient, MockNotionClient]: A real or mock client\n",
+ " RagasApiClient: A Ragas API client instance\n",
" \"\"\"\n",
- " if use_mock:\n",
- " client = MockNotionClient()\n",
+ " if app_token is None:\n",
+ " app_token = os.getenv(\"RAGAS_APP_TOKEN\")\n",
"\n",
- " # Optionally initialize project structure\n",
- " if initialize_project and root_page_id:\n",
- " # Create root page if it doesn't exist in the mock client\n",
- " if root_page_id not in client._pages:\n",
- " # Create root page\n",
- " root_page = {\n",
- " \"id\": root_page_id,\n",
- " \"object\": \"page\",\n",
- " \"created_time\": client._get_timestamp(),\n",
- " \"last_edited_time\": client._get_timestamp(),\n",
- " \"archived\": False,\n",
- " \"properties\": {\n",
- " \"title\": {\n",
- " \"type\": \"title\",\n",
- " \"title\": [\n",
- " {\n",
- " \"plain_text\": \"Root Page\",\n",
- " \"type\": \"text\",\n",
- " \"text\": {\"content\": \"Root Page\"},\n",
- " }\n",
- " ],\n",
- " }\n",
- " },\n",
- " }\n",
- " client.add_page(root_page)\n",
+ " if app_token is None:\n",
+ " raise ValueError(\"RAGAS_API_KEY environment variable is not set\")\n",
"\n",
- " # Create required sub-pages\n",
- " for page_name in [\"Datasets\", \"Experiments\", \"Comparisons\"]:\n",
- " # Create page ID\n",
- " page_id = client._create_id()\n",
+ " if base_url is None:\n",
+ " base_url = os.getenv(\"RAGAS_API_BASE_URL\")\n",
"\n",
- " # Create page\n",
- " page = {\n",
- " \"id\": page_id,\n",
- " \"object\": \"page\",\n",
- " \"created_time\": client._get_timestamp(),\n",
- " \"last_edited_time\": client._get_timestamp(),\n",
- " \"archived\": False,\n",
- " \"properties\": {\n",
- " \"title\": {\n",
- " \"type\": \"title\",\n",
- " \"title\": [\n",
- " {\n",
- " \"plain_text\": page_name,\n",
- " \"type\": \"text\",\n",
- " \"text\": {\"content\": page_name},\n",
- " }\n",
- " ],\n",
- " }\n",
- " },\n",
- " \"parent\": {\"type\": \"page_id\", \"page_id\": root_page_id},\n",
- " }\n",
- " client.add_page(page)\n",
+ " if base_url is None:\n",
+ " base_url = \"https://api.dev.app.ragas.io\"\n",
"\n",
- " # Add child block to root\n",
- " child_block = {\n",
- " \"id\": client._create_id(),\n",
- " \"object\": \"block\",\n",
- " \"type\": \"child_page\",\n",
- " \"created_time\": client._get_timestamp(),\n",
- " \"last_edited_time\": client._get_timestamp(),\n",
- " \"child_page\": {\"title\": page_name},\n",
- " }\n",
- "\n",
- " client.add_children(root_page_id, [child_block])\n",
- "\n",
- " return client\n",
- " else:\n",
- " # For real client, use provided API key or environment variable\n",
- " if api_key is None:\n",
- " api_key = os.getenv(\"NOTION_API_KEY\")\n",
- "\n",
- " if api_key is None:\n",
- " raise ValueError(\n",
- " \"api_key must be provided or set as NOTION_API_KEY environment variable\"\n",
- " )\n",
- "\n",
- " return NotionClient(auth=api_key)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "MockNotionClient(num_pages=0, num_databases=0, num_blocks=0)"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "# create the mock notion client\n",
- "mock_notion_client = NotionClientFactory.create(use_mock=True)\n",
- "mock_notion_client"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "the `initialize_project` adds the project pages too for you."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "MockNotionClient(num_pages=4, num_databases=0, num_blocks=0)"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "mock_notion_client = NotionClientFactory.create(\n",
- " use_mock=True, initialize_project=True, root_page_id=\"your_root_page_id\"\n",
- ")\n",
- "mock_notion_client"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class NotionBackendFactory:\n",
- " \"\"\"Factory for creating NotionBackend instances.\"\"\"\n",
- "\n",
- " @staticmethod\n",
- " def create(\n",
- " root_page_id: str,\n",
- " use_mock: bool = False,\n",
- " api_key: t.Optional[str] = None,\n",
- " initialize_project: bool = False,\n",
- " notion_client: t.Optional[t.Union[NotionClient, MockNotionClient]] = None,\n",
- " ) -> NotionBackend:\n",
- " \"\"\"Create a NotionBackend instance.\n",
- "\n",
- " Args:\n",
- " root_page_id: The ID of the root page\n",
- " use_mock: If True, create a backend with a mock client\n",
- " api_key: Notion API key (only used for real client)\n",
- " initialize_project: If True and using mock, initialize project structure\n",
- " notion_client: Optional pre-configured Notion client\n",
- "\n",
- " Returns:\n",
- " NotionBackend: A backend instance with either real or mock client\n",
- " \"\"\"\n",
- " # Use provided client or create one\n",
- " if notion_client is None:\n",
- " notion_client = NotionClientFactory.create(\n",
- " use_mock=use_mock,\n",
- " api_key=api_key,\n",
- " initialize_project=initialize_project,\n",
- " root_page_id=root_page_id,\n",
- " )\n",
- "\n",
- " # Create and return the backend\n",
- " return NotionBackend(root_page_id=root_page_id, notion_client=notion_client)"
+ " return RagasApiClient(app_token=app_token, base_url=base_url)\n"
]
}
],
diff --git a/nbs/backends/mock_notion_client.ipynb b/nbs/backends/mock_notion_client.ipynb
deleted file mode 100644
index 1c26c2f..0000000
--- a/nbs/backends/mock_notion_client.ipynb
+++ /dev/null
@@ -1,328 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# `MockNotionClient`\n",
- "\n",
- "> Helps with testing `ragas_annotator` better."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | default_exp backends.mock_notion"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "import typing as t\n",
- "import uuid\n",
- "from copy import deepcopy\n",
- "from datetime import datetime\n",
- "\n",
- "from ragas_annotator.exceptions import NotFoundError"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class MockPagesAPI:\n",
- " \"\"\"Mock implementation of notion_client.Client.pages\"\"\"\n",
- "\n",
- " def __init__(self, client):\n",
- " self.client = client\n",
- "\n",
- " def create(self, parent, properties, **kwargs):\n",
- " \"\"\"Create a new page.\"\"\"\n",
- " page_id = self.client._create_id()\n",
- "\n",
- " # Create the page object\n",
- " page = {\n",
- " \"id\": page_id,\n",
- " \"object\": \"page\",\n",
- " \"created_time\": self.client._get_timestamp(),\n",
- " \"last_edited_time\": self.client._get_timestamp(),\n",
- " \"archived\": False,\n",
- " \"properties\": deepcopy(properties),\n",
- " \"parent\": deepcopy(parent),\n",
- " }\n",
- "\n",
- " # Add page to storage\n",
- " self.client._pages[page_id] = page\n",
- "\n",
- " # Add child reference to parent\n",
- " parent_type = parent.get(\"type\")\n",
- " parent_id = parent.get(f\"{parent_type}_id\")\n",
- "\n",
- " if parent_id:\n",
- " child_block = {\n",
- " \"id\": self.client._create_id(),\n",
- " \"object\": \"block\",\n",
- " \"type\": \"child_page\",\n",
- " \"created_time\": self.client._get_timestamp(),\n",
- " \"last_edited_time\": self.client._get_timestamp(),\n",
- " \"child_page\": {\"title\": self._extract_title(properties)},\n",
- " }\n",
- "\n",
- " if parent_id not in self.client._children:\n",
- " self.client._children[parent_id] = []\n",
- "\n",
- " self.client._children[parent_id].append(child_block)\n",
- "\n",
- " return deepcopy(page)\n",
- "\n",
- " def retrieve(self, page_id):\n",
- " \"\"\"Retrieve a page by ID.\"\"\"\n",
- " if page_id not in self.client._pages:\n",
- " raise NotFoundError(f\"Page {page_id} not found\")\n",
- "\n",
- " return deepcopy(self.client._pages[page_id])\n",
- "\n",
- " def update(self, page_id, properties=None, archived=None, **kwargs):\n",
- " \"\"\"Update a page.\"\"\"\n",
- " if page_id not in self.client._pages:\n",
- " raise NotFoundError(f\"Page {page_id} not found\")\n",
- "\n",
- " page = self.client._pages[page_id]\n",
- "\n",
- " if properties:\n",
- " # Update properties\n",
- " for key, value in properties.items():\n",
- " page[\"properties\"][key] = deepcopy(value)\n",
- "\n",
- " if archived is not None:\n",
- " page[\"archived\"] = archived\n",
- "\n",
- " page[\"last_edited_time\"] = self.client._get_timestamp()\n",
- "\n",
- " return deepcopy(page)\n",
- "\n",
- " def _extract_title(self, properties):\n",
- " \"\"\"Extract page title from properties.\"\"\"\n",
- " for prop in properties.values():\n",
- " if prop.get(\"type\") == \"title\" and prop.get(\"title\"):\n",
- " for text_obj in prop[\"title\"]:\n",
- " if text_obj.get(\"type\") == \"text\" and \"content\" in text_obj.get(\n",
- " \"text\", {}\n",
- " ):\n",
- " return text_obj[\"text\"][\"content\"]\n",
- " return \"Untitled\""
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class MockDatabasesAPI:\n",
- " \"\"\"Mock implementation of notion_client.Client.databases\"\"\"\n",
- "\n",
- " def __init__(self, client):\n",
- " self.client = client\n",
- "\n",
- " def create(self, parent, title, properties, **kwargs):\n",
- " \"\"\"Create a new database.\"\"\"\n",
- " database_id = self.client._create_id()\n",
- "\n",
- " # Create database object\n",
- " database = {\n",
- " \"id\": database_id,\n",
- " \"object\": \"database\",\n",
- " \"created_time\": self.client._get_timestamp(),\n",
- " \"last_edited_time\": self.client._get_timestamp(),\n",
- " \"title\": deepcopy(title),\n",
- " \"properties\": deepcopy(properties),\n",
- " \"parent\": deepcopy(parent),\n",
- " }\n",
- "\n",
- " # Add database to storage\n",
- " self.client._databases[database_id] = database\n",
- "\n",
- " # Add child reference to parent\n",
- " parent_type = parent.get(\"type\")\n",
- " parent_id = parent.get(f\"{parent_type}_id\")\n",
- "\n",
- " if parent_id:\n",
- " child_block = {\n",
- " \"id\": self.client._create_id(),\n",
- " \"object\": \"block\",\n",
- " \"type\": \"child_database\",\n",
- " \"created_time\": self.client._get_timestamp(),\n",
- " \"last_edited_time\": self.client._get_timestamp(),\n",
- " \"child_database\": {\"title\": self._extract_title(title)},\n",
- " }\n",
- "\n",
- " if parent_id not in self.client._children:\n",
- " self.client._children[parent_id] = []\n",
- "\n",
- " self.client._children[parent_id].append(child_block)\n",
- "\n",
- " return deepcopy(database)\n",
- "\n",
- " def retrieve(self, database_id):\n",
- " \"\"\"Retrieve a database by ID.\"\"\"\n",
- " if database_id not in self.client._databases:\n",
- " raise NotFoundError(f\"Database {database_id} not found\")\n",
- "\n",
- " return deepcopy(self.client._databases[database_id])\n",
- "\n",
- " def query(\n",
- " self,\n",
- " database_id,\n",
- " filter=None,\n",
- " sorts=None,\n",
- " start_cursor=None,\n",
- " page_size=100,\n",
- " **kwargs,\n",
- " ):\n",
- " \"\"\"Query a database.\"\"\"\n",
- " if database_id not in self.client._databases:\n",
- " raise NotFoundError(f\"Database {database_id} not found\")\n",
- "\n",
- " # Get all pages in the database\n",
- " results = []\n",
- " for page_id, page in self.client._pages.items():\n",
- " parent = page.get(\"parent\", {})\n",
- " if (\n",
- " parent.get(\"type\") == \"database_id\"\n",
- " and parent.get(\"database_id\") == database_id\n",
- " ):\n",
- " results.append(deepcopy(page))\n",
- "\n",
- " # TODO: Implement filtering, sorting, and pagination if needed\n",
- "\n",
- " return {\"results\": results, \"has_more\": False, \"next_cursor\": None}\n",
- "\n",
- " def _extract_title(self, title):\n",
- " \"\"\"Extract database title from title array.\"\"\"\n",
- " for text_obj in title:\n",
- " if text_obj.get(\"type\") == \"text\" and \"content\" in text_obj.get(\"text\", {}):\n",
- " return text_obj[\"text\"][\"content\"]\n",
- " return \"Untitled\""
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class MockBlocksAPI:\n",
- " \"\"\"Mock implementation of notion_client.Client.blocks\"\"\"\n",
- "\n",
- " def __init__(self, client):\n",
- " self.client = client\n",
- " self.children = MockBlockChildrenAPI(client)\n",
- "\n",
- " def retrieve(self, block_id):\n",
- " \"\"\"Retrieve a block by ID.\"\"\"\n",
- " if block_id not in self.client._blocks:\n",
- " raise NotFoundError(f\"Block {block_id} not found\")\n",
- "\n",
- " return deepcopy(self.client._blocks[block_id])\n",
- "\n",
- "\n",
- "class MockBlockChildrenAPI:\n",
- " \"\"\"Mock implementation of notion_client.Client.blocks.children\"\"\"\n",
- "\n",
- " def __init__(self, client):\n",
- " self.client = client\n",
- "\n",
- " def list(self, block_id, start_cursor=None, page_size=100):\n",
- " \"\"\"List children of a block.\"\"\"\n",
- " children = self.client._children.get(block_id, [])\n",
- "\n",
- " # TODO: Implement pagination if needed\n",
- "\n",
- " return {\"results\": deepcopy(children), \"has_more\": False, \"next_cursor\": None}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class MockNotionClient:\n",
- " \"\"\"Mock implementation of notion_client.Client for testing.\"\"\"\n",
- "\n",
- " def __init__(self, auth=None):\n",
- " \"\"\"Initialize the mock client with in-memory storage.\n",
- "\n",
- " Args:\n",
- " auth: Ignored in mock implementation\n",
- " \"\"\"\n",
- " # In-memory storage\n",
- " self._pages = {} # page_id -> page object\n",
- " self._databases = {} # database_id -> database object\n",
- " self._blocks = {} # block_id -> block object\n",
- " self._children = {} # parent_id -> list of child blocks\n",
- "\n",
- " # Create API namespaces to match real client\n",
- " self.pages = MockPagesAPI(self)\n",
- " self.databases = MockDatabasesAPI(self)\n",
- " self.blocks = MockBlocksAPI(self)\n",
- "\n",
- " def _get_timestamp(self):\n",
- " \"\"\"Generate a timestamp in Notion API format.\"\"\"\n",
- " return datetime.utcnow().isoformat() + \"Z\"\n",
- "\n",
- " def _create_id(self):\n",
- " \"\"\"Generate a random ID in Notion format.\"\"\"\n",
- " return str(uuid.uuid4()).replace(\"-\", \"\")\n",
- "\n",
- " def add_page(self, page_data):\n",
- " \"\"\"Add a page to the mock storage.\"\"\"\n",
- " self._pages[page_data[\"id\"]] = deepcopy(page_data)\n",
- "\n",
- " def add_database(self, database_data):\n",
- " \"\"\"Add a database to the mock storage.\"\"\"\n",
- " self._databases[database_data[\"id\"]] = deepcopy(database_data)\n",
- "\n",
- " def add_block(self, block_data):\n",
- " \"\"\"Add a block to the mock storage.\"\"\"\n",
- " self._blocks[block_data[\"id\"]] = deepcopy(block_data)\n",
- "\n",
- " def add_children(self, parent_id, children):\n",
- " \"\"\"Add children to a parent.\"\"\"\n",
- " if parent_id not in self._children:\n",
- " self._children[parent_id] = []\n",
- " self._children[parent_id].extend(deepcopy(children))\n",
- "\n",
- " def __str__(self):\n",
- " return \"MockNotionClient(num_pages={}, num_databases={}, num_blocks={})\".format(\n",
- " len(self._pages), len(self._databases), len(self._blocks)\n",
- " )\n",
- "\n",
- " __repr__ = __str__"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "python3",
- "language": "python",
- "name": "python3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/nbs/backends/notion.ipynb b/nbs/backends/notion.ipynb
deleted file mode 100644
index b6da42b..0000000
--- a/nbs/backends/notion.ipynb
+++ /dev/null
@@ -1,637 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# `NotionBackend`\n",
- "\n",
- "> `Project` uses this backend to interact with the Notion API."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | default_exp backends.notion_backend"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "from nbdev.showdoc import *"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "import typing as t\n",
- "import os\n",
- "from datetime import datetime\n",
- "import uuid\n",
- "\n",
- "from notion_client import Client as NotionClient\n",
- "from fastcore.utils import patch_to, patch\n",
- "\n",
- "from ragas_annotator.exceptions import DuplicateError, NotFoundError"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class NotionBackend:\n",
- " \"\"\"A backend for interacting with the Notion API\"\"\"\n",
- "\n",
- " def __init__(\n",
- " self, root_page_id: str, notion_client: t.Optional[NotionClient] = None\n",
- " ):\n",
- " self.root_page_id = root_page_id\n",
- " if notion_client is None:\n",
- " self.client = NotionClient(auth=os.getenv(\"NOTION_API_KEY\"))\n",
- " else:\n",
- " self.client = notion_client\n",
- "\n",
- " def __repr__(self):\n",
- " return f\"NotionBackend(root_page_id={self.root_page_id})\"\n",
- "\n",
- " def validate_project_structure(self, root_page_id):\n",
- " \"\"\"\n",
- " Validate the project structure by checking if the root page exists and has the correct sub-pages.\n",
- " Structure is as follows:\n",
- " - Root Page\n",
- " - Datasets\n",
- " - Experiments\n",
- " - Comparisons\n",
- " \"\"\"\n",
- " # Check if root page exists\n",
- " if not self.page_exists(root_page_id):\n",
- " return False\n",
- "\n",
- " # Search for required sub-pages under root\n",
- " required_pages = {\"Datasets\", \"Experiments\", \"Comparisons\"}\n",
- " found_pages = set()\n",
- "\n",
- " # Search for child pages\n",
- " children = self.client.blocks.children.list(root_page_id)\n",
- " for block in children[\"results\"]:\n",
- " if block[\"type\"] == \"child_page\":\n",
- " found_pages.add(block[\"child_page\"][\"title\"])\n",
- "\n",
- " # Verify all required pages exist\n",
- " return required_pages.issubset(found_pages)\n",
- "\n",
- " def create_new_page(self, parent_page_id, page_name) -> str:\n",
- " \"\"\"\n",
- " Create a new page inside the given parent page and return the page id.\n",
- "\n",
- " Args:\n",
- " parent_page_id (str): The ID of the parent page\n",
- " page_name (str): The title for the new page\n",
- "\n",
- " Returns:\n",
- " str: The ID of the newly created page\n",
- "\n",
- " Raises:\n",
- " ValueError: If the parent page does not exist\n",
- " \"\"\"\n",
- " # First check if parent page exists\n",
- " if not self.page_exists(parent_page_id):\n",
- " raise ValueError(f\"Parent page {parent_page_id} does not exist\")\n",
- "\n",
- " # Create a new child page\n",
- " response = self.client.pages.create(\n",
- " parent={\"type\": \"page_id\", \"page_id\": parent_page_id},\n",
- " properties={\"title\": [{\"type\": \"text\", \"text\": {\"content\": page_name}}]},\n",
- " )\n",
- "\n",
- " # Return the ID of the newly created page\n",
- " return response[\"id\"]\n",
- "\n",
- " def page_exists(self, page_id):\n",
- " \"\"\"Check if a page exists by attempting to retrieve it.\"\"\"\n",
- " try:\n",
- " self.client.pages.retrieve(page_id)\n",
- " return True\n",
- " except:\n",
- " return False\n",
- "\n",
- " def create_new_database(\n",
- " self, parent_page_id: str, title: str, properties: dict\n",
- " ) -> str:\n",
- " \"\"\"Create a new database inside the given parent page.\n",
- "\n",
- " Args:\n",
- " parent_page_id (str): The ID of the parent page\n",
- " title (str): The title for the new database\n",
- " properties (dict): The database properties definition\n",
- "\n",
- " Returns:\n",
- " str: The ID of the newly created database\n",
- " \"\"\"\n",
- " response = self.client.databases.create(\n",
- " parent={\"type\": \"page_id\", \"page_id\": parent_page_id},\n",
- " title=[{\"type\": \"text\", \"text\": {\"content\": title}}],\n",
- " properties=properties,\n",
- " )\n",
- " return response[\"id\"]"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Lets create a new project in Notion with the `NotionBackend` class. In order to get the root_page_id you can click on the \"share\" button on the page you want to use as the root page. This will give a link like this:\n",
- "\n",
- "```\n",
- "https://www.notion.so/ragas/nbdev_notion_annotator-1b05d9bf94ff8092b52ae8d676e6abf2?pvs=4\n",
- "```\n",
- "\n",
- "The page id is the uuid after the last dash (here it is `1b05d9bf94ff8092b52ae8d676e6abf2`)."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "NotionBackend(root_page_id=your_root_page_id)"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "root_page_id = \"your_root_page_id\"\n",
- "\n",
- "notion_backend = NotionBackend(root_page_id)\n",
- "notion_backend"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "\n",
- "# uncomment if you want to test this on the notion page \"nbdev_notion_annotator\"\n",
- "# root_page_id = \"1b05d9bf94ff8092b52ae8d676e6abf2\"\n",
- "# notion_backend = NotionBackend(root_page_id)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "from unittest.mock import MagicMock"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "\n",
- "# patch\n",
- "mocked_notion_client = MagicMock()\n",
- "mocked_notion_client.blocks.children.list.return_value = {\n",
- " \"results\": [\n",
- " # {\"type\": \"child_page\", \"child_page\": {\"title\": \"Datasets\"}},\n",
- " # {\"type\": \"child_page\", \"child_page\": {\"title\": \"Experiments\"}},\n",
- " # {\"type\": \"child_page\", \"child_page\": {\"title\": \"Comparisons\"}}\n",
- " ]\n",
- "}\n",
- "notion_backend = NotionBackend(root_page_id, mocked_notion_client)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "False"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "notion_backend.validate_project_structure(root_page_id)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# | test\n",
- "\n",
- "# mock page creation\n",
- "mocked_notion_client.pages.create.return_value = {\"id\": \"1234567890\"}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# create the structure inside the project root page\n",
- "pages = [\"Datasets\", \"Experiments\", \"Comparisons\"]\n",
- "for page in pages:\n",
- " notion_backend.create_new_page(root_page_id, page)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Now you should have the following structure for your project:\n",
- "\n",
- "```\n",
- " (Root Project Page)\n",
- " ├── Datasets\n",
- " ├── Experiments\n",
- " └─ Comparisons\n",
- "```\n",
- "\n",
- "and `notion_backend.validate_project_structure(root_page_id)` should return `True`"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# | test\n",
- "\n",
- "# now that we have the structure, lets return the correct structure\n",
- "mocked_notion_client.blocks.children.list.return_value = {\n",
- " \"results\": [\n",
- " {\"type\": \"child_page\", \"child_page\": {\"title\": \"Datasets\"}},\n",
- " {\"type\": \"child_page\", \"child_page\": {\"title\": \"Experiments\"}},\n",
- " {\"type\": \"child_page\", \"child_page\": {\"title\": \"Comparisons\"}},\n",
- " ]\n",
- "}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "True"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "notion_backend.validate_project_structure(root_page_id)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@t.overload\n",
- "def get_page_id(\n",
- " self, parent_id: str, page_name: str, return_multiple: t.Literal[False] = False\n",
- ") -> str: ...\n",
- "@t.overload\n",
- "def get_page_id(\n",
- " self, parent_id: str, page_name: str, return_multiple: t.Literal[True]\n",
- ") -> t.List[str]: ...\n",
- "@patch_to(NotionBackend)\n",
- "def get_page_id(\n",
- " self, parent_id: str, page_name: str, return_multiple: bool = False\n",
- ") -> t.Union[str, t.List[str]]:\n",
- " \"\"\"Get page ID(s) by name under a parent page.\n",
- "\n",
- " Args:\n",
- " parent_id (str): The ID of the parent page to search under\n",
- " page_name (str): The title of the page to find\n",
- " return_multiple (bool): If True, returns all matching page IDs\n",
- "\n",
- " Returns:\n",
- " Union[str, List[str]]: Single page ID or list of page IDs\n",
- "\n",
- " Raises:\n",
- " DuplicateError: If return_multiple is False and multiple pages found\n",
- " ValueError: If no pages found\n",
- " \"\"\"\n",
- " matching_pages = []\n",
- " next_cursor = None\n",
- "\n",
- " while True:\n",
- " # Get page of results, using cursor if we have one\n",
- " response = self.client.blocks.children.list(parent_id, start_cursor=next_cursor)\n",
- "\n",
- " # Check each block in current page\n",
- " for block in response[\"results\"]:\n",
- " if (\n",
- " block[\"type\"] == \"child_page\"\n",
- " and block[\"child_page\"][\"title\"] == page_name\n",
- " ):\n",
- " matching_pages.append(block[\"id\"])\n",
- "\n",
- " # Check if there are more results\n",
- " if not response.get(\"has_more\", False):\n",
- " break\n",
- "\n",
- " next_cursor = response.get(\"next_cursor\")\n",
- "\n",
- " if not matching_pages:\n",
- " raise NotFoundError(f\"No page found with name '{page_name}'\")\n",
- "\n",
- " if return_multiple:\n",
- " return matching_pages\n",
- " else:\n",
- " if len(matching_pages) > 1:\n",
- " raise DuplicateError(f\"Multiple pages found with name '{page_name}'\")\n",
- " return matching_pages[0]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@t.overload\n",
- "def get_database_id(\n",
- " self, parent_page_id: str, name: str, return_multiple: t.Literal[False] = False\n",
- ") -> str: ...\n",
- "\n",
- "\n",
- "@t.overload\n",
- "def get_database_id(\n",
- " self, parent_page_id: str, name: str, return_multiple: t.Literal[True]\n",
- ") -> t.List[str]: ...\n",
- "\n",
- "\n",
- "@patch_to(NotionBackend)\n",
- "def get_database_id(\n",
- " self, parent_page_id: str, name: str, return_multiple: bool = False\n",
- ") -> t.Union[str, t.List[str]]:\n",
- " \"\"\"Get the database ID(s) by name under a parent page.\n",
- "\n",
- " Args:\n",
- " parent_page_id (str): The ID of the parent page to search under\n",
- " name (str): The name of the database to find\n",
- " return_multiple (bool): If True, returns all matching database IDs\n",
- "\n",
- " Returns:\n",
- " Union[str, List[str]]: Single database ID or list of database IDs\n",
- "\n",
- " Raises:\n",
- " NotFoundError: If no database found with given name\n",
- " DuplicateError: If return_multiple is False and multiple databases found\n",
- " \"\"\"\n",
- " matching_databases = []\n",
- " next_cursor = None\n",
- "\n",
- " while True:\n",
- " response = self.client.blocks.children.list(\n",
- " parent_page_id, start_cursor=next_cursor\n",
- " )\n",
- "\n",
- " for block in response[\"results\"]:\n",
- " if block[\"type\"] == \"child_database\":\n",
- " database = self.client.databases.retrieve(database_id=block[\"id\"])\n",
- " if database[\"title\"][0][\"plain_text\"].lower() == name.lower():\n",
- " matching_databases.append(block[\"id\"])\n",
- "\n",
- " if not response.get(\"has_more\", False):\n",
- " break\n",
- "\n",
- " next_cursor = response.get(\"next_cursor\")\n",
- "\n",
- " if not matching_databases:\n",
- " raise NotFoundError(f\"No database found with name '{name}'\")\n",
- "\n",
- " if return_multiple:\n",
- " return matching_databases\n",
- " else:\n",
- " if len(matching_databases) > 1:\n",
- " raise DuplicateError(f\"Multiple databases found with name '{name}'\")\n",
- " return matching_databases[0]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def create_page_in_database(\n",
- " self: NotionBackend,\n",
- " database_id: str,\n",
- " properties: dict,\n",
- " parent: t.Optional[dict] = None,\n",
- ") -> dict:\n",
- " \"\"\"Create a new page in a database.\n",
- "\n",
- " Args:\n",
- " database_id: The ID of the database to create the page in\n",
- " properties: The page properties\n",
- " parent: Optional parent object (defaults to database parent)\n",
- "\n",
- " Returns:\n",
- " dict: The created page object\n",
- " \"\"\"\n",
- " if parent is None:\n",
- " parent = {\"type\": \"database_id\", \"database_id\": database_id}\n",
- "\n",
- " # Remove any unique_id properties as they cannot be updated directly\n",
- " filtered_properties = {\n",
- " k: v\n",
- " for k, v in properties.items()\n",
- " if not (isinstance(v, dict) and v.get(\"type\") == \"unique_id\")\n",
- " }\n",
- "\n",
- " response = self.client.pages.create(parent=parent, properties=filtered_properties)\n",
- "\n",
- " return response"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def get_database(self: NotionBackend, database_id: str) -> dict:\n",
- " \"\"\"Get a database by ID.\n",
- "\n",
- " Args:\n",
- " database_id: The ID of the database to retrieve\n",
- "\n",
- " Returns:\n",
- " dict: The database object\n",
- " \"\"\"\n",
- " return self.client.databases.retrieve(database_id=database_id)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def query_database(\n",
- " self: NotionBackend,\n",
- " database_id: str,\n",
- " filter: t.Optional[dict] = None,\n",
- " sorts: t.Optional[t.List[dict]] = None,\n",
- " archived: bool = False,\n",
- ") -> dict:\n",
- " \"\"\"Query a database with optional filtering and sorting.\n",
- "\n",
- " Args:\n",
- " database_id: The ID of the database to query\n",
- " filter: Optional filter conditions\n",
- " sorts: Optional sort conditions\n",
- " archived: If True, include archived pages. If False, only return non-archived pages\n",
- "\n",
- " Returns:\n",
- " dict: Query response containing all results\n",
- " \"\"\"\n",
- " query_params = {\n",
- " \"database_id\": database_id,\n",
- " \"page_size\": 100, # Maximum allowed by Notion API\n",
- " }\n",
- "\n",
- " if filter:\n",
- " query_params[\"filter\"] = filter\n",
- " if sorts:\n",
- " query_params[\"sorts\"] = sorts\n",
- "\n",
- " # Initialize results\n",
- " all_results = []\n",
- " has_more = True\n",
- " next_cursor = None\n",
- "\n",
- " # Fetch all pages\n",
- " while has_more:\n",
- " if next_cursor:\n",
- " query_params[\"start_cursor\"] = next_cursor\n",
- "\n",
- " response = self.client.databases.query(**query_params)\n",
- "\n",
- " # Filter results based on archived status\n",
- " filtered_results = [\n",
- " page\n",
- " for page in response[\"results\"]\n",
- " if page.get(\"archived\", False) == archived\n",
- " ]\n",
- " all_results.extend(filtered_results)\n",
- "\n",
- " has_more = response.get(\"has_more\", False)\n",
- " next_cursor = response.get(\"next_cursor\")\n",
- "\n",
- " # Return combined results\n",
- " return {\"results\": all_results, \"has_more\": False, \"next_cursor\": None}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def update_page(\n",
- " self: NotionBackend,\n",
- " page_id: str,\n",
- " properties: t.Optional[t.Dict[str, t.Any]] = None,\n",
- " archived: bool = False,\n",
- ") -> dict:\n",
- " \"\"\"Update a page's properties and/or archive status.\n",
- "\n",
- " Args:\n",
- " page_id: The ID of the page to update\n",
- " properties: Optional properties to update\n",
- " archived: Whether to archive the page\n",
- "\n",
- " Returns:\n",
- " dict: The updated page object\n",
- " \"\"\"\n",
- " update_params = {\"page_id\": page_id}\n",
- "\n",
- " if properties:\n",
- " # Remove any unique_id properties as they cannot be updated directly\n",
- " filtered_properties = {\n",
- " k: v\n",
- " for k, v in properties.items()\n",
- " if not (isinstance(v, dict) and v.get(\"type\") == \"unique_id\")\n",
- " }\n",
- " update_params[\"properties\"] = filtered_properties\n",
- "\n",
- " if archived:\n",
- " update_params[\"archived\"] = True # type: ignore\n",
- "\n",
- " return self.client.pages.update(**update_params)"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "python3",
- "language": "python",
- "name": "python3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/nbs/backends/ragas_api_client.ipynb b/nbs/backends/ragas_api_client.ipynb
index 0157579..33e9f9f 100644
--- a/nbs/backends/ragas_api_client.ipynb
+++ b/nbs/backends/ragas_api_client.ipynb
@@ -9,6 +9,13 @@
"> Python client to api.ragas.io"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -37,12 +44,8 @@
"#| export\n",
"import httpx\n",
"import asyncio\n",
- "import functools\n",
"import typing as t\n",
- "import inspect\n",
"from pydantic import BaseModel, Field\n",
- "from enum import StrEnum\n",
- "import uuid\n",
"from fastcore.utils import patch"
]
},
@@ -701,7 +704,17 @@
"outputs": [],
"source": [
"#| export\n",
- "class ColumnType(StrEnum):\n",
+ "from enum import Enum"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class ColumnType(str, Enum):\n",
" NUMBER = \"number\"\n",
" TEXT = \"text\"\n",
" LONG_TEXT = \"longText\"\n",
@@ -1374,6 +1387,63 @@
"nano_id"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "import uuid\n",
+ "import string"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def create_nano_id(size=12):\n",
+ " # Define characters to use (alphanumeric)\n",
+ " alphabet = string.ascii_letters + string.digits\n",
+ " \n",
+ " # Generate UUID and convert to int\n",
+ " uuid_int = uuid.uuid4().int\n",
+ " \n",
+ " # Convert to base62\n",
+ " result = \"\"\n",
+ " while uuid_int:\n",
+ " uuid_int, remainder = divmod(uuid_int, len(alphabet))\n",
+ " result = alphabet[remainder] + result\n",
+ " \n",
+ " # Pad if necessary and return desired length\n",
+ " return result[:size]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'Anvz5k9geU7T'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# Usage\n",
+ "nano_id = create_nano_id() # e.g., \"8dK9cNw3mP5x\"\n",
+ "nano_id"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/nbs/dataset.ipynb b/nbs/dataset.ipynb
index 545c331..9ad3005 100644
--- a/nbs/dataset.ipynb
+++ b/nbs/dataset.ipynb
@@ -41,8 +41,9 @@
"\n",
"from fastcore.utils import patch\n",
"\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend"
+ "from ragas_experimental.model.pydantic_model import ExtendedPydanticBaseModel as BaseModel\n",
+ "from ragas_experimental.utils import create_nano_id, async_to_sync\n",
+ "from ragas_experimental.backends.ragas_api_client import RagasApiClient"
]
},
{
@@ -52,71 +53,91 @@
"outputs": [],
"source": [
"# | export\n",
- "NotionModelType = t.TypeVar(\"NotionModelType\", bound=NotionModel)\n",
+ "BaseModelType = t.TypeVar(\"BaseModelType\", bound=BaseModel)\n",
"\n",
- "\n",
- "class Dataset(t.Generic[NotionModelType]):\n",
- " \"\"\"A list-like interface for managing NotionModel instances in a Notion database.\"\"\"\n",
+ "class Dataset(t.Generic[BaseModelType]):\n",
+ " \"\"\"A list-like interface for managing dataset entries with backend synchronization.\n",
+ " \n",
+ " This class behaves like a Python list while synchronizing operations with the\n",
+ " Ragas backend API.\n",
+ " \"\"\"\n",
"\n",
" def __init__(\n",
" self,\n",
" name: str,\n",
- " model: t.Type[NotionModel],\n",
- " database_id: str,\n",
- " notion_backend: NotionBackend,\n",
+ " model: t.Type[BaseModel],\n",
+ " project_id: str,\n",
+ " dataset_id: str,\n",
+ " ragas_api_client: RagasApiClient,\n",
" ):\n",
" self.name = name\n",
" self.model = model\n",
- " self.database_id = database_id\n",
- " self._notion_backend = notion_backend\n",
- " self._entries: t.List[NotionModelType] = []\n",
+ " self.project_id = project_id\n",
+ " self.dataset_id = dataset_id\n",
+ " self._ragas_api_client = ragas_api_client\n",
+ " self._entries: t.List[BaseModelType] = []\n",
+ "\n",
+ " # Initialize column mapping if it doesn't exist yet\n",
+ " if not hasattr(self.model, \"__column_mapping__\"):\n",
+ " self.model.__column_mapping__ = {}\n",
+ " \n",
+ " # Get column mappings from API and update the model's mapping\n",
+ " column_id_map = self._get_column_id_map(dataset_id=dataset_id)\n",
+ " \n",
+ " # Update the model's column mapping with the values from the API\n",
+ " for field_name, column_id in column_id_map.items():\n",
+ " self.model.__column_mapping__[field_name] = column_id\n",
+ "\n",
+ " def _get_column_id_map(self: \"Dataset\", dataset_id: str) -> dict:\n",
+ " \"\"\"Get a map of column name to column id\"\"\"\n",
+ " sync_func = async_to_sync(self._ragas_api_client.list_dataset_columns)\n",
+ " columns = sync_func(project_id=self.project_id, dataset_id=dataset_id)\n",
+ " column_id_map = {column[\"name\"]: column[\"id\"] for column in columns[\"items\"]}\n",
+ "\n",
+ " # add the column id map to the model, selectively overwriting existing column mapping\n",
+ " for field in self.model.__column_mapping__.keys():\n",
+ " if field in column_id_map:\n",
+ " self.model.__column_mapping__[field] = column_id_map[field]\n",
+ " return column_id_map\n",
"\n",
" def __getitem__(\n",
" self, key: t.Union[int, slice]\n",
- " ) -> t.Union[NotionModelType, \"Dataset[NotionModelType]\"]:\n",
+ " ) -> t.Union[BaseModelType, \"Dataset[BaseModelType]\"]:\n",
" \"\"\"Get an entry by index or slice.\"\"\"\n",
" if isinstance(key, slice):\n",
" new_dataset = type(self)(\n",
" name=self.name,\n",
" model=self.model,\n",
- " database_id=self.database_id,\n",
- " notion_backend=self._notion_backend,\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id,\n",
+ " ragas_api_client=self._ragas_api_client,\n",
" )\n",
" new_dataset._entries = self._entries[key]\n",
" return new_dataset\n",
" else:\n",
" return self._entries[key]\n",
"\n",
- " def __setitem__(self, index: int, entry: NotionModelType) -> None:\n",
- " \"\"\"Update an entry at the given index and sync to Notion.\"\"\"\n",
+ " def __setitem__(self, index: int, entry: BaseModelType) -> None:\n",
+ " \"\"\"Update an entry at the given index and sync to backend.\"\"\"\n",
" if not isinstance(entry, self.model):\n",
" raise TypeError(f\"Entry must be an instance of {self.model.__name__}\")\n",
"\n",
- " # Get existing entry to get Notion page ID\n",
+ " # Get existing entry to get its ID\n",
" existing = self._entries[index]\n",
- " if not hasattr(existing, \"_page_id\"):\n",
- " raise ValueError(\"Existing entry has no page_id\")\n",
- "\n",
- " # Update in Notion\n",
- " assert (\n",
- " existing._page_id is not None\n",
- " ) # mypy fails to infer that we check for it above\n",
- " response = self._notion_backend.update_page(\n",
- " page_id=existing._page_id, properties=entry.to_notion()[\"properties\"]\n",
- " )\n",
- "\n",
- " # Update local cache with response data\n",
- " self._entries[index] = self.model.from_notion(response)\n",
+ " \n",
+ " # Update in backend\n",
+ " self.save(entry)\n",
+ " \n",
+ " # Update local cache\n",
+ " self._entries[index] = entry\n",
"\n",
" def __repr__(self) -> str:\n",
- " return (\n",
- " f\"Dataset(name={self.name}, model={self.model.__name__}, len={len(self)})\"\n",
- " )\n",
+ " return f\"Dataset(name={self.name}, model={self.model.__name__}, len={len(self)})\"\n",
"\n",
" def __len__(self) -> int:\n",
" return len(self._entries)\n",
"\n",
- " def __iter__(self) -> t.Iterator[NotionModelType]:\n",
+ " def __iter__(self) -> t.Iterator[BaseModelType]:\n",
" return iter(self._entries)"
]
},
@@ -127,10 +148,21 @@
"outputs": [],
"source": [
"# | hide\n",
- "import ragas_annotator.model.notion_typing as nmt\n",
- "from ragas_annotator.backends.mock_notion import MockNotionClient\n",
- "from ragas_annotator.backends.factory import NotionClientFactory\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend"
+ "import ragas_experimental.typing as rt\n",
+ "from ragas_experimental.backends.factory import RagasApiClientFactory"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# test model\n",
+ "class TestModel(BaseModel):\n",
+ " id: int\n",
+ " name: str\n",
+ " description: str"
]
},
{
@@ -141,7 +173,7 @@
{
"data": {
"text/plain": [
- "TestModel(name='test' description='test description')"
+ "{'id': 'id', 'name': 'name', 'description': 'description'}"
]
},
"execution_count": null,
@@ -150,45 +182,110 @@
}
],
"source": [
- "# test model\n",
- "class TestModel(NotionModel):\n",
- " id: int = nmt.ID()\n",
- " name: str = nmt.Title()\n",
- " description: str = nmt.Text()\n",
- "\n",
- "\n",
- "test_model = TestModel(name=\"test\", description=\"test description\")\n",
+ "TestModel.__column_mapping__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "TestModel(id=0, name='test', description='test description')"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "test_model = TestModel(id=0, name=\"test\", description=\"test description\")\n",
"test_model"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'id': 'id', 'name': 'name', 'description': 'description'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "test_model.__column_mapping__"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
- "# | hide\n",
- "# Set up a test environment with mock Notion client and a test database.\n",
- "# root page id\n",
- "root_page_id = \"test-root-id\"\n",
- "# Create a mock client\n",
- "mock_client = NotionClientFactory.create(\n",
- " use_mock=True, initialize_project=True, root_page_id=root_page_id\n",
- ")\n",
- "\n",
- "# Create NotionBackend with mock client\n",
- "backend = NotionBackend(root_page_id=root_page_id, notion_client=mock_client)\n",
- "\n",
- "# get the page id of the datasets page\n",
- "dataset_page_id = backend.get_page_id(parent_id=root_page_id, page_name=\"Datasets\")\n",
+ "import os"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "RAGAS_APP_TOKEN = \"apt.47bd-c55e4a45b27c-02f8-8446-1441f09b-651a8\"\n",
+ "RAGAS_API_BASE_URL = \"https://api.dev.app.ragas.io\"\n",
"\n",
- "# create a new database in the datasets page\n",
- "properties = {}\n",
- "for _, field in TestModel._fields.items():\n",
- " properties.update(field._to_notion_property())\n",
- "datasets_id = backend.create_new_database(\n",
- " parent_page_id=dataset_page_id, title=\"TestModel\", properties=properties\n",
- ")"
+ "os.environ[\"RAGAS_APP_TOKEN\"] = RAGAS_APP_TOKEN\n",
+ "os.environ[\"RAGAS_API_BASE_URL\"] = RAGAS_API_BASE_URL"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "ragas_api_client = RagasApiClientFactory.create()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'id': '0fee5330-9f6e-44a9-a85c-e3b947b697de',\n",
+ " 'name': 'TestModel',\n",
+ " 'description': None,\n",
+ " 'created_at': '2025-04-10T22:41:36.582714+00:00',\n",
+ " 'updated_at': '2025-04-10T22:41:36.582714+00:00',\n",
+ " 'version_counter': 0,\n",
+ " 'project_id': 'bbe45632-3268-43a6-9694-b020b3f5226f'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# https://dev.app.ragas.io/dashboard/projects/0a7c4ecb-b313-4bb0-81c0-852c9634ce03/datasets/a4f0d169-ebce-4a2b-b758-0ff49c0c4312\n",
+ "TEST_PROJECT_ID = \"bbe45632-3268-43a6-9694-b020b3f5226f\"\n",
+ "TEST_DATASET_ID = \"0fee5330-9f6e-44a9-a85c-e3b947b697de\"\n",
+ "test_project = await ragas_api_client.get_project(project_id=TEST_PROJECT_ID)\n",
+ "test_dataset = await ragas_api_client.get_dataset(project_id=TEST_PROJECT_ID, dataset_id=TEST_DATASET_ID)\n",
+ "test_dataset"
]
},
{
@@ -198,10 +295,30 @@
"outputs": [],
"source": [
"dataset = Dataset(\n",
- " name=\"TestModel\", model=TestModel, database_id=datasets_id, notion_backend=backend\n",
+ " name=\"TestModel\", model=TestModel, project_id=TEST_PROJECT_ID, dataset_id=TEST_DATASET_ID, ragas_api_client=ragas_api_client\n",
")"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'id': 'fSh3ESKGNZLf', 'description': 'ftKwfDqnbinL', 'name': 'fVnsYEnvxARh'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "dataset.model.__column_mapping__"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -210,19 +327,32 @@
"source": [
"# | export\n",
"@patch\n",
- "def append(self: Dataset, entry: NotionModelType) -> None:\n",
+ "def append(self: Dataset, entry: BaseModelType) -> None:\n",
" \"\"\"Add a new entry to the dataset and sync to Notion.\"\"\"\n",
- " # if not isinstance(entry, self.model):\n",
- " # raise TypeError(f\"Entry must be an instance of {self.model.__name__}\")\n",
- "\n",
- " # Create in Notion and get response\n",
- " response = self._notion_backend.create_page_in_database(\n",
- " database_id=self.database_id, properties=entry.to_notion()[\"properties\"]\n",
+ " # Create row inside the table\n",
+ "\n",
+ " # first get the columns for the dataset\n",
+ " column_id_map = self.model.__column_mapping__\n",
+ "\n",
+ " # create the rows\n",
+ " row_dict = entry.model_dump()\n",
+ " row_id = create_nano_id()\n",
+ " row_data = {}\n",
+ " for key, value in row_dict.items():\n",
+ " if key in column_id_map:\n",
+ " row_data[column_id_map[key]] = value\n",
+ "\n",
+ " sync_func = async_to_sync(self._ragas_api_client.create_dataset_row)\n",
+ " response = sync_func(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id,\n",
+ " id=row_id,\n",
+ " data=row_data,\n",
" )\n",
- "\n",
+ " # add the row id to the entry\n",
+ " entry._row_id = response[\"id\"]\n",
" # Update entry with Notion data (like ID)\n",
- " updated_entry = self.model.from_notion(response)\n",
- " self._entries.append(updated_entry)"
+ " self._entries.append(entry)"
]
},
{
@@ -233,7 +363,7 @@
{
"data": {
"text/plain": [
- "1"
+ "3"
]
},
"execution_count": null,
@@ -264,15 +394,17 @@
"source": [
"# | export\n",
"@patch\n",
- "def pop(self: Dataset, index: int = -1) -> NotionModelType:\n",
+ "def pop(self: Dataset, index: int = -1) -> BaseModelType:\n",
" \"\"\"Remove and return entry at index, sync deletion to Notion.\"\"\"\n",
" entry = self._entries[index]\n",
- " if not hasattr(entry, \"_page_id\"):\n",
- " raise ValueError(\"Entry has no page_id\")\n",
+ " # get the row id\n",
+ " row_id = entry._row_id\n",
+ " if row_id is None:\n",
+ " raise ValueError(\"Entry has no row id. This likely means it was not added or synced to the dataset.\")\n",
"\n",
- " # Archive in Notion (soft delete)\n",
- " assert entry._page_id is not None # mypy fails to infer that we check for it above\n",
- " self._notion_backend.update_page(page_id=entry._page_id, archived=True)\n",
+ " # soft delete the row\n",
+ " sync_func = async_to_sync(self._ragas_api_client.delete_dataset_row)\n",
+ " sync_func(project_id=self.project_id, dataset_id=self.dataset_id, row_id=row_id)\n",
"\n",
" # Remove from local cache\n",
" return self._entries.pop(index)"
@@ -286,7 +418,7 @@
{
"data": {
"text/plain": [
- "0"
+ "1"
]
},
"execution_count": null,
@@ -318,18 +450,37 @@
"# | export\n",
"@patch\n",
"def load(self: Dataset) -> None:\n",
- " \"\"\"Load all entries from the Notion database.\"\"\"\n",
- " # Query the database\n",
- " response = self._notion_backend.query_database(\n",
- " database_id=self.database_id, archived=False\n",
+ " \"\"\"Load all entries from the backend API.\"\"\"\n",
+ " # Get all rows\n",
+ " sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)\n",
+ " response = sync_func(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id\n",
" )\n",
- "\n",
+ " \n",
+ " # Get column mapping (ID -> name)\n",
+ " column_map = {v: k for k, v in self.model.__column_mapping__.items()}\n",
+ " \n",
" # Clear existing entries\n",
" self._entries.clear()\n",
- "\n",
- " # Convert results to model instances\n",
- " for page in response.get(\"results\", []):\n",
- " entry = self.model.from_notion(page)\n",
+ " \n",
+ " # Process rows\n",
+ " for row in response.get(\"items\", []):\n",
+ " model_data = {}\n",
+ " row_id = row.get(\"id\")\n",
+ " \n",
+ " # Convert from API data format to model fields\n",
+ " for col_id, value in row.get(\"data\", {}).items():\n",
+ " if col_id in column_map:\n",
+ " field_name = column_map[col_id]\n",
+ " model_data[field_name] = value\n",
+ " \n",
+ " # Create model instance\n",
+ " entry = self.model(**model_data)\n",
+ " \n",
+ " # Store row ID for future operations\n",
+ " entry._row_id = row_id\n",
+ " \n",
" self._entries.append(entry)"
]
},
@@ -342,6 +493,39 @@
"dataset.load()"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# | export\n",
+ "@patch\n",
+ "def load_as_dicts(self: Dataset) -> t.List[t.Dict]:\n",
+ " \"\"\"Load all entries as dictionaries.\"\"\"\n",
+ " # Get all rows\n",
+ " sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)\n",
+ " response = sync_func(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id\n",
+ " )\n",
+ " \n",
+ " # Get column mapping (ID -> name)\n",
+ " column_map = {v: k for k, v in self.model.__column_mapping__.items()}\n",
+ " \n",
+ " # Convert to dicts with field names\n",
+ " result = []\n",
+ " for row in response.get(\"items\", []):\n",
+ " item_dict = {}\n",
+ " for col_id, value in row.get(\"data\", {}).items():\n",
+ " if col_id in column_map:\n",
+ " field_name = column_map[col_id]\n",
+ " item_dict[field_name] = value\n",
+ " result.append(item_dict)\n",
+ " \n",
+ " return result"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -350,7 +534,8 @@
{
"data": {
"text/plain": [
- "3"
+ "[{'id': 0, 'name': 'test', 'description': 'test description'},\n",
+ " {'id': 0, 'name': 'test', 'description': 'test description'}]"
]
},
"execution_count": null,
@@ -359,9 +544,28 @@
}
],
"source": [
- "for i in range(3):\n",
- " dataset.append(test_model)\n",
- "len(dataset)"
+ "dataset.load_as_dicts()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# | export\n",
+ "@patch\n",
+ "def to_pandas(self: Dataset) -> \"pd.DataFrame\":\n",
+ " \"\"\"Convert dataset to pandas DataFrame.\"\"\"\n",
+ " import pandas as pd\n",
+ " \n",
+ " # Make sure we have data\n",
+ " if not self._entries:\n",
+ " self.load()\n",
+ " \n",
+ " # Convert entries to dictionaries\n",
+ " data = [entry.model_dump() for entry in self._entries]\n",
+ " return pd.DataFrame(data)"
]
},
{
@@ -371,8 +575,51 @@
"outputs": [
{
"data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " id | \n",
+ " name | \n",
+ " description | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " | 0 | \n",
+ " 0 | \n",
+ " test | \n",
+ " test description | \n",
+ "
\n",
+ " \n",
+ " | 1 | \n",
+ " 0 | \n",
+ " test | \n",
+ " test description | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
"text/plain": [
- "0"
+ " id name description\n",
+ "0 0 test test description\n",
+ "1 0 test test description"
]
},
"execution_count": null,
@@ -381,14 +628,7 @@
}
],
"source": [
- "# create a new instance of the dataset\n",
- "dataset = Dataset(\n",
- " name=\"TestModel\",\n",
- " model=TestModel,\n",
- " database_id=datasets_id,\n",
- " notion_backend=backend,\n",
- ")\n",
- "len(dataset)"
+ "dataset.to_pandas()"
]
},
{
@@ -397,33 +637,74 @@
"metadata": {},
"outputs": [],
"source": [
- "dataset.load()\n",
- "test_eq(len(dataset), 3)"
+ "# | export\n",
+ "@patch\n",
+ "def save(self: Dataset, item: BaseModelType) -> None:\n",
+ " \"\"\"Save changes to an item to the backend.\"\"\"\n",
+ " if not isinstance(item, self.model):\n",
+ " raise TypeError(f\"Item must be an instance of {self.model.__name__}\")\n",
+ " \n",
+ " # Get the row ID\n",
+ " row_id = None\n",
+ " if hasattr(item, \"_row_id\") and item._row_id:\n",
+ " row_id = item._row_id\n",
+ " else:\n",
+ " # Try to find it in our entries by matching\n",
+ " for i, entry in enumerate(self._entries):\n",
+ " if id(entry) == id(item): # Check if it's the same object\n",
+ " if hasattr(entry, \"_row_id\") and entry._row_id:\n",
+ " row_id = entry._row_id\n",
+ " break\n",
+ " \n",
+ " if not row_id:\n",
+ " raise ValueError(\"Cannot save: item is not from this dataset or was not properly synced\")\n",
+ " \n",
+ " # Get column mapping and prepare data\n",
+ " column_id_map = self.model.__column_mapping__\n",
+ " row_dict = item.model_dump()\n",
+ " row_data = {}\n",
+ " \n",
+ " for key, value in row_dict.items():\n",
+ " if key in column_id_map:\n",
+ " row_data[column_id_map[key]] = value\n",
+ " \n",
+ " # Update in backend\n",
+ " sync_func = async_to_sync(self._ragas_api_client.update_dataset_row)\n",
+ " response = sync_func(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id,\n",
+ " row_id=row_id,\n",
+ " data=row_data,\n",
+ " )\n",
+ " \n",
+ " # Find and update in local cache if needed\n",
+ " for i, entry in enumerate(self._entries):\n",
+ " if hasattr(entry, \"_row_id\") and entry._row_id == row_id:\n",
+ " # If it's not the same object, update our copy\n",
+ " if id(entry) != id(item):\n",
+ " self._entries[i] = item\n",
+ " break"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "TestModel(id=0, name='test', description='test description')"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "# | export\n",
- "@patch\n",
- "def get(self: Dataset, id: int) -> t.Optional[NotionModelType]:\n",
- " \"\"\"Get an entry by ID.\"\"\"\n",
- " if not self._notion_backend:\n",
- " return None\n",
- "\n",
- " # Query the database for the specific ID\n",
- " response = self._notion_backend.query_database(\n",
- " database_id=self.database_id,\n",
- " filter={\"property\": \"id\", \"unique_id\": {\"equals\": id}},\n",
- " )\n",
- "\n",
- " if not response.get(\"results\"):\n",
- " return None\n",
- "\n",
- " return self.model.from_notion(response[\"results\"][0])"
+ "d = dataset[0]\n",
+ "d"
]
},
{
@@ -434,7 +715,7 @@
{
"data": {
"text/plain": [
- "TestModel(name='test' description='test description')"
+ "'updated name'"
]
},
"execution_count": null,
@@ -443,18 +724,30 @@
}
],
"source": [
- "test_model = dataset.get(0)\n",
- "test_model"
+ "d.name = \"updated name\"\n",
+ "dataset.save(d)\n",
+ "dataset[0].name"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[{'id': 0, 'name': 'updated name', 'description': 'test description'},\n",
+ " {'id': 0, 'name': 'test', 'description': 'test description'}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "# | hide\n",
- "test_eq(test_model.description, \"test description\")"
+ "dataset.load_as_dicts()"
]
},
{
@@ -465,35 +758,66 @@
"source": [
"# | export\n",
"@patch\n",
- "def save(self: Dataset, item: NotionModelType) -> None:\n",
- " \"\"\"Save changes to an item to Notion.\"\"\"\n",
- " if not isinstance(item, self.model):\n",
- " raise TypeError(f\"Item must be an instance of {self.model.__name__}\")\n",
- "\n",
- " if not hasattr(item, \"_page_id\"):\n",
- " raise ValueError(\"Item has no page_id\")\n",
- "\n",
- " # Update in Notion\n",
- " assert item._page_id is not None # mypy fails to infer that we check for it above\n",
- " response = self._notion_backend.update_page(\n",
- " page_id=item._page_id, properties=item.to_notion()[\"properties\"]\n",
- " )\n",
- "\n",
- " # Update local cache\n",
- " for i, existing in enumerate(self._entries):\n",
- " if existing._page_id == item._page_id:\n",
- " self._entries[i] = self.model.from_notion(response)\n",
- " break"
+ "def get(self: Dataset, field_value: str, field_name: str = \"_row_id\") -> t.Optional[BaseModelType]:\n",
+ " \"\"\"Get an entry by field value.\n",
+ " \n",
+ " Args:\n",
+ " id_value: The value to match\n",
+ " field_name: The field to match against (default: \"id\")\n",
+ " \n",
+ " Returns:\n",
+ " The matching model instance or None if not found\n",
+ " \"\"\"\n",
+ " # Check if we need to load entries\n",
+ " if not self._entries:\n",
+ " self.load()\n",
+ " \n",
+ " # Search in local entries first\n",
+ " for entry in self._entries:\n",
+ " if hasattr(entry, field_name) and getattr(entry, field_name) == field_value:\n",
+ " return entry\n",
+ " \n",
+ " # If not found and field is \"id\", try to get directly from API\n",
+ " if field_name == \"id\":\n",
+ " # Get column ID for field\n",
+ " if field_name not in self.model.__column_mapping__:\n",
+ " return None\n",
+ " \n",
+ " column_id = self.model.__column_mapping__[field_name]\n",
+ " \n",
+ " # Get rows with filter\n",
+ " sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)\n",
+ " response = sync_func(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=self.dataset_id,\n",
+ " # We don't have direct filter support in the API client,\n",
+ " # so this would need to be implemented there.\n",
+ " # For now, we've already checked our local cache.\n",
+ " )\n",
+ " \n",
+ " # Would parse response here if we had filtering\n",
+ " \n",
+ " return None"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'eWlNKu6F4jIX'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "test_model.description = \"updated description\"\n",
- "dataset.save(test_model)"
+ "d._row_id"
]
},
{
@@ -504,7 +828,7 @@
{
"data": {
"text/plain": [
- "TestModel(name='test' description='updated description')"
+ "TestModel(id=0, name='updated name', description='test description')"
]
},
"execution_count": null,
@@ -513,7 +837,8 @@
}
],
"source": [
- "dataset.get(0)"
+ "test_model = dataset.get(d._row_id)\n",
+ "test_model"
]
},
{
@@ -521,10 +846,7 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "# | hide\n",
- "test_eq(dataset.get(0).description, \"updated description\")"
- ]
+ "source": []
}
],
"metadata": {
diff --git a/nbs/utils/exceptions.ipynb b/nbs/exceptions.ipynb
similarity index 100%
rename from nbs/utils/exceptions.ipynb
rename to nbs/exceptions.ipynb
diff --git a/nbs/experiment.ipynb b/nbs/experiment.ipynb
index 975d547..e0f8afd 100644
--- a/nbs/experiment.ipynb
+++ b/nbs/experiment.ipynb
@@ -29,9 +29,9 @@
"\n",
"from fastcore.utils import patch\n",
"\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend\n",
- "from ragas_annotator.dataset import Dataset"
+ "from ragas_experimental.model.pydantic_model import ExtendedPydanticBaseModel as BaseModel\n",
+ "from ragas_experimental.backends.ragas_api_client import RagasApiClient\n",
+ "from ragas_experimental.dataset import Dataset"
]
},
{
@@ -45,60 +45,19 @@
" def __init__(\n",
" self,\n",
" name: str,\n",
- " model: t.Type[NotionModel],\n",
- " database_id: str,\n",
- " notion_backend: NotionBackend,\n",
+ " model: t.Type[BaseModel],\n",
+ " project_id: str,\n",
+ " experiment_id: str,\n",
+ " ragas_api_client: RagasApiClient,\n",
" ):\n",
- " super().__init__(name, model, database_id, notion_backend)\n",
+ " self.experiment_id = experiment_id\n",
+ " super().__init__(name, model, project_id, experiment_id, ragas_api_client)\n",
"\n",
" def __str__(self):\n",
" return f\"Experiment(name={self.name}, model={self.model.__name__})\"\n",
"\n",
" __repr__ = __str__"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "from unittest.mock import MagicMock"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "Experiment(name=test_experiment, model=NotionModel)"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "example_notion_backend = MagicMock(spec=NotionBackend)\n",
- "test_database_id = \"test_database_id\"\n",
- "\n",
- "test_experiment = Experiment(\n",
- " \"test_experiment\", NotionModel, test_database_id, example_notion_backend\n",
- ")\n",
- "test_experiment"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
diff --git a/nbs/index.ipynb b/nbs/index.ipynb
index 5f09d6e..d600746 100644
--- a/nbs/index.ipynb
+++ b/nbs/index.ipynb
@@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "# ragas_annotator\n",
+ "# ragas_experimental\n",
"\n",
"> Experimental Ragas Evaluation UI and Library"
]
@@ -30,20 +30,20 @@
"Install latest from the GitHub [repository][repo]:\n",
"\n",
"```sh\n",
- "$ pip install git+https://github.com/explodinggradients/ragas_annotator.git\n",
+ "$ pip install git+https://github.com/explodinggradients/ragas_experimental.git\n",
"```\n",
"\n",
"or from [pypi][pypi]\n",
"\n",
"\n",
"```sh\n",
- "$ pip install ragas_annotator\n",
+ "$ pip install ragas_experimental\n",
"```\n",
"\n",
"\n",
- "[repo]: https://github.com/explodinggradients/ragas_annotator\n",
- "[docs]: https://explodinggradients.github.io/ragas_annotator/\n",
- "[pypi]: https://pypi.org/project/ragas_annotator/"
+ "[repo]: https://github.com/explodinggradients/ragas_experimental\n",
+ "[docs]: https://explodinggradients.github.io/ragas_experimental/\n",
+ "[pypi]: https://pypi.org/project/ragas_experimental/"
]
},
{
diff --git a/nbs/init_module.ipynb b/nbs/init_module.ipynb
index be2c32f..2dccf85 100644
--- a/nbs/init_module.ipynb
+++ b/nbs/init_module.ipynb
@@ -23,13 +23,14 @@
"outputs": [],
"source": [
"# | export\n",
- "from ragas_annotator.project.core import Project\n",
- "import ragas_annotator.model.notion_typing as nmt\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
+ "from ragas_experimental.project.core import Project\n",
+ "import ragas_experimental.model.notion_typing as nmt\n",
+ "from ragas_experimental.model.notion_model import NotionModel\n",
+ "from ragas_experimental.model.pydantic_model import ExtendedPydanticBaseModel as BaseModel\n",
"\n",
"# just import to run the module\n",
- "import ragas_annotator.project.experiments\n",
- "import ragas_annotator.project.comparison"
+ "import ragas_experimental.project.experiments\n",
+ "import ragas_experimental.project.comparison"
]
},
{
@@ -39,7 +40,7 @@
"outputs": [],
"source": [
"# | export\n",
- "__all__ = [\"Project\", \"NotionModel\", \"nmt\"]"
+ "__all__ = [\"Project\", \"NotionModel\", \"nmt\", \"BaseModel\"]"
]
},
{
diff --git a/nbs/metric/base.ipynb b/nbs/metric/base.ipynb
index 6366fee..6e59eb1 100644
--- a/nbs/metric/base.ipynb
+++ b/nbs/metric/base.ipynb
@@ -24,16 +24,7 @@
"execution_count": null,
"id": "e8ccff58",
"metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "/opt/homebrew/Caskroom/miniforge/base/envs/random/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
- " from .autonotebook import tqdm as notebook_tqdm\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"#| export\n",
"\n",
@@ -46,15 +37,24 @@
"import string\n",
"\n",
"\n",
- "from ragas_annotator.prompt.base import Prompt\n",
- "from ragas_annotator.embedding.base import BaseEmbedding\n",
- "from ragas_annotator.metric import MetricResult\n",
- "from ragas_annotator.llm import RagasLLM\n",
- "from ragas_annotator.project.core import Project\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "from ragas_annotator.prompt.dynamic_few_shot import DynamicFewShotPrompt\n",
- "\n",
+ "from ragas_experimental.prompt.base import Prompt\n",
+ "from ragas_experimental.embedding.base import BaseEmbedding\n",
+ "from ragas_experimental.metric import MetricResult\n",
+ "from ragas_experimental.llm import RagasLLM\n",
+ "from ragas_experimental.model.notion_model import NotionModel\n",
+ "from ragas_experimental.prompt.dynamic_few_shot import DynamicFewShotPrompt\n",
"\n",
+ "if t.TYPE_CHECKING:\n",
+ " from ragas_experimental.project.core import Project"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
"@dataclass\n",
"class Metric(ABC):\n",
" \"\"\"Base class for all metrics in the LLM evaluation library.\"\"\"\n",
@@ -127,7 +127,7 @@
" # Run all tasks concurrently and return results\n",
" return await asyncio.gather(*async_tasks)\n",
" \n",
- " def train(self,project:Project, experiment_names: t.List[str], model:NotionModel, embedding_model: BaseEmbedding,method: t.Dict[str, t.Any]):\n",
+ " def train(self,project: \"Project\", experiment_names: t.List[str], model:NotionModel, embedding_model: BaseEmbedding,method: t.Dict[str, t.Any]):\n",
" \n",
" assert isinstance(self.prompt, Prompt)\n",
" self.prompt = DynamicFewShotPrompt.from_prompt(self.prompt,embedding_model)\n",
@@ -174,7 +174,7 @@
{
"data": {
"text/plain": [
- "100"
+ "1"
]
},
"execution_count": null,
@@ -185,7 +185,7 @@
"source": [
"#| eval: false\n",
"\n",
- "from ragas_annotator.llm import ragas_llm\n",
+ "from ragas_experimental.llm import ragas_llm\n",
"from openai import OpenAI\n",
"\n",
"llm = ragas_llm(provider=\"openai\",model=\"gpt-4o\",client=OpenAI())\n",
diff --git a/nbs/metric/decorator.ipynb b/nbs/metric/decorator.ipynb
index 7f9752a..1435006 100644
--- a/nbs/metric/decorator.ipynb
+++ b/nbs/metric/decorator.ipynb
@@ -29,9 +29,9 @@
"import inspect\n",
"import asyncio\n",
"from dataclasses import dataclass\n",
- "from ragas_annotator.metric import MetricResult\n",
- "from ragas_annotator.llm import RagasLLM\n",
- "from ragas_annotator.prompt.base import Prompt\n",
+ "from ragas_experimental.metric import MetricResult\n",
+ "from ragas_experimental.llm import RagasLLM\n",
+ "from ragas_experimental.prompt.base import Prompt\n",
"\n",
"\n",
"\n",
@@ -161,10 +161,10 @@
"#| eval: false\n",
"\n",
"\n",
- "from ragas_annotator.metric import DiscreteMetric, MetricResult\n",
+ "from ragas_experimental.metric import DiscreteMetric, MetricResult\n",
"from pydantic import BaseModel\n",
"\n",
- "from ragas_annotator.llm import ragas_llm\n",
+ "from ragas_experimental.llm import ragas_llm\n",
"from openai import OpenAI\n",
"\n",
"llm = ragas_llm(provider=\"openai\",model=\"gpt-4o\",client=OpenAI())\n",
diff --git a/nbs/metric/discrete.ipynb b/nbs/metric/discrete.ipynb
index bdb8a28..6d75f62 100644
--- a/nbs/metric/discrete.ipynb
+++ b/nbs/metric/discrete.ipynb
@@ -28,8 +28,8 @@
"from dataclasses import dataclass, field\n",
"from pydantic import BaseModel, create_model\n",
"from collections import Counter\n",
- "from ragas_annotator.metric import Metric, MetricResult\n",
- "from ragas_annotator.metric.decorator import create_metric_decorator\n",
+ "from ragas_experimental.metric import Metric, MetricResult\n",
+ "from ragas_experimental.metric.decorator import create_metric_decorator\n",
"\n",
"\n",
"@dataclass\n",
@@ -90,17 +90,8 @@
"name": "stdout",
"output_type": "stream",
"text": [
- "med\n",
- "The given input \"this is my response\" is too vague to provide a comprehensive evaluation.\n",
- "\n",
- "Positives:\n",
- "1. Clear Statement: It's a straightforward indication that a response has been provided.\n",
- "\n",
- "Negatives:\n",
- "1. Lack of Context: Without context or additional information, it's impossible to assess the relevance or accuracy of the response.\n",
- "2. No Specificity: The response doesn't convey any specific information or insight related to a topic or question.\n",
- "\n",
- "If this response was intended to be part of a conversation or instruction, more detail would be required to make it highly effective. At present, it serves as a neutral statement without actionable or informative content.\n"
+ "low\n",
+ "The response is incomplete and lacks any specific information. It cannot be evaluated for helpfulness without further context or content.\n"
]
}
],
@@ -108,7 +99,7 @@
"\n",
"#| eval: false\n",
"\n",
- "from ragas_annotator.llm import ragas_llm\n",
+ "from ragas_experimental.llm import ragas_llm\n",
"from openai import OpenAI\n",
"\n",
"llm = ragas_llm(provider=\"openai\",model=\"gpt-4o\",client=OpenAI())\n",
@@ -150,11 +141,14 @@
],
"source": [
"#| eval: false\n",
- "from ragas_annotator.metric.result import MetricResult\n",
+ "from ragas_experimental.metric.result import MetricResult\n",
"\n",
- "@discrete_metric(llm=llm,\n",
+ "@discrete_metric(\n",
+ " llm=llm,\n",
" prompt=\"Evaluate if given answer is helpful\\n\\n{response}\",\n",
- " name='new_metric',values=[\"low\",\"med\",\"high\"])\n",
+ " name='new_metric',\n",
+ " values=[\"low\",\"med\",\"high\"]\n",
+ ")\n",
"def my_metric(llm,prompt,**kwargs):\n",
"\n",
" class response_model(BaseModel):\n",
diff --git a/nbs/metric/numeric.ipynb b/nbs/metric/numeric.ipynb
index e6e5681..89b3889 100644
--- a/nbs/metric/numeric.ipynb
+++ b/nbs/metric/numeric.ipynb
@@ -37,8 +37,8 @@
"import typing as t\n",
"from dataclasses import dataclass, field\n",
"from pydantic import BaseModel, create_model\n",
- "from ragas_annotator.metric import Metric, MetricResult\n",
- "from ragas_annotator.metric.decorator import create_metric_decorator\n",
+ "from ragas_experimental.metric import Metric, MetricResult\n",
+ "from ragas_experimental.metric.decorator import create_metric_decorator\n",
"\n",
"@dataclass\n",
"class NumericMetric(Metric):\n",
@@ -102,7 +102,7 @@
"\n",
"#| eval: false\n",
"\n",
- "from ragas_annotator.llm import ragas_llm\n",
+ "from ragas_experimental.llm import ragas_llm\n",
"from openai import OpenAI\n",
"\n",
"llm = ragas_llm(provider=\"openai\",model=\"gpt-4o\",client=OpenAI())\n",
@@ -147,7 +147,7 @@
"source": [
"\n",
"#| eval: false\n",
- "from ragas_annotator.metric import MetricResult\n",
+ "from ragas_experimental.metric import MetricResult\n",
"\n",
"@numeric_metric(llm=llm,\n",
" prompt=\"Evaluate if given answer is helpful\\n\\n{response}\",\n",
diff --git a/nbs/metric/ranking.ipynb b/nbs/metric/ranking.ipynb
index 1c7cd4c..a140725 100644
--- a/nbs/metric/ranking.ipynb
+++ b/nbs/metric/ranking.ipynb
@@ -37,8 +37,8 @@
"import typing as t\n",
"from dataclasses import dataclass\n",
"from pydantic import BaseModel, Field\n",
- "from ragas_annotator.metric import Metric, MetricResult\n",
- "from ragas_annotator.metric.decorator import create_metric_decorator\n",
+ "from ragas_experimental.metric import Metric, MetricResult\n",
+ "from ragas_experimental.metric.decorator import create_metric_decorator\n",
"\n",
"@dataclass\n",
"class RankingMetric(Metric):\n",
@@ -138,7 +138,7 @@
"\n",
"#| eval: false\n",
"\n",
- "from ragas_annotator.llm import ragas_llm\n",
+ "from ragas_experimental.llm import ragas_llm\n",
"from openai import OpenAI\n",
"\n",
"llm = ragas_llm(provider=\"openai\",model=\"gpt-4o\",client=OpenAI())\n",
@@ -185,7 +185,7 @@
"source": [
"#| eval: false\n",
"\n",
- "from ragas_annotator.metric import MetricResult\n",
+ "from ragas_experimental.metric import MetricResult\n",
"\n",
"@ranking_metric(\n",
" llm=llm, # Your language model instance\n",
diff --git a/nbs/metric/result.ipynb b/nbs/metric/result.ipynb
index 26149b3..ecbab85 100644
--- a/nbs/metric/result.ipynb
+++ b/nbs/metric/result.ipynb
@@ -22,7 +22,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "0f1c801a-6568-4ba4-8bbe-30bf154174fe",
+ "id": "dcc3080c",
"metadata": {},
"outputs": [],
"source": [
@@ -30,10 +30,17 @@
"\n",
"import typing as t\n",
"\n",
- "\n",
- "\n",
- "\n",
- "\n",
+ "from fastcore.utils import patch"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "0f1c801a-6568-4ba4-8bbe-30bf154174fe",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
"class MetricResult:\n",
" \"\"\"Class to hold the result of a metric evaluation.\n",
" \n",
@@ -248,10 +255,109 @@
"print(list_result[1:]) # 2\n"
]
},
+ {
+ "cell_type": "markdown",
+ "id": "06ce7a1d",
+ "metadata": {},
+ "source": [
+ "now lets make it `Pydantic` compatible also"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "5d8fb818",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "from pydantic_core import core_schema\n",
+ "from pydantic import GetCoreSchemaHandler, ValidationInfo"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f4c288c0",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "\n",
+ "@patch(cls_method=True)\n",
+ "def validate(cls: MetricResult, value: t.Any, info: ValidationInfo):\n",
+ " \"\"\"Provide compatibility with older Pydantic versions.\"\"\"\n",
+ " if isinstance(value, MetricResult):\n",
+ " return value\n",
+ " return MetricResult(result=value)\n",
+ "\n",
+ "# Add Pydantic compatibility methods\n",
+ "@patch(cls_method=True)\n",
+ "def __get_pydantic_core_schema__(\n",
+ " cls: MetricResult, \n",
+ " _source_type: t.Any, \n",
+ " _handler: GetCoreSchemaHandler\n",
+ ") -> core_schema.CoreSchema:\n",
+ " \"\"\"Generate a Pydantic core schema for MetricResult.\"\"\"\n",
+ " return core_schema.with_info_plain_validator_function(cls.validate)\n",
+ "\n",
+ "\n",
+ "@patch\n",
+ "def model_dump(self: MetricResult):\n",
+ " \"\"\"Support Pydantic's model_dump method.\"\"\"\n",
+ " return self.to_dict()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f49739a6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from pydantic import BaseModel\n",
+ "\n",
+ "class TestModel(BaseModel):\n",
+ " response: str\n",
+ " grade: MetricResult\n",
+ " faithfulness: MetricResult\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6ac6b955",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "m = TestModel(response=\"test\", grade=MetricResult(result=1, reason=\"test\"), faithfulness=MetricResult(result=1, reason=\"test\"))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "4ffe750f",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'test'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "m.grade.reason"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
- "id": "a984dde9",
+ "id": "9d32b10f",
"metadata": {},
"outputs": [],
"source": []
diff --git a/nbs/model/notion_model.ipynb b/nbs/model/notion_model.ipynb
deleted file mode 100644
index 24281e5..0000000
--- a/nbs/model/notion_model.ipynb
+++ /dev/null
@@ -1,286 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# `NotionModel`\n",
- "\n",
- "> NotionModel is a class that allows you to create a model of a Notion database."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | default_exp model.notion_model"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "from fastcore.test import *"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "from dataclasses import dataclass\n",
- "import typing as t\n",
- "from datetime import datetime\n",
- "\n",
- "from fastcore.utils import patch, patch_to\n",
- "\n",
- "from ragas_annotator.exceptions import ValidationError\n",
- "from ragas_annotator.model.notion_typing import Field, ID"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class NotionModelMeta(type):\n",
- " \"\"\"Metaclass for NotionModel to handle field registration.\"\"\"\n",
- "\n",
- " def __new__(mcs, name: str, bases: tuple, namespace: dict):\n",
- " _fields: t.Dict[str, Field] = {}\n",
- "\n",
- " # Collect fields from base classes\n",
- " for base in bases:\n",
- " if hasattr(base, \"_fields\"):\n",
- " _fields.update(base._fields)\n",
- "\n",
- " # Collect fields from class variables and type annotations\n",
- " for key, value in namespace.items():\n",
- " # Skip internal attributes\n",
- " if key.startswith(\"_\"):\n",
- " continue\n",
- "\n",
- " # Handle field instances directly defined in class\n",
- " if isinstance(value, Field):\n",
- " _fields[key] = value\n",
- " # Handle annotated but not instantiated fields\n",
- " elif (\n",
- " key in namespace.get(\"__annotations__\", {})\n",
- " and isinstance(value, type)\n",
- " and issubclass(value, Field)\n",
- " ):\n",
- " _fields[key] = value()\n",
- "\n",
- " namespace[\"_fields\"] = _fields\n",
- " return super().__new__(mcs, name, bases, namespace)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class NotionModel(metaclass=NotionModelMeta):\n",
- " \"\"\"Base class for Notion database models.\n",
- "\n",
- " Represents a row in a Notion database with typed fields that map to\n",
- " Notion property values.\n",
- " \"\"\"\n",
- "\n",
- " _fields: t.ClassVar[t.Dict[str, Field]]\n",
- " _created_time: t.Optional[datetime] = None\n",
- " _last_edited_time: t.Optional[datetime] = None\n",
- " _page_id: t.Optional[str] = None\n",
- "\n",
- " def __init__(self, **kwargs):\n",
- " self._values: t.Dict[str, t.Any] = {}\n",
- " self._page_id = kwargs.pop(\"page_id\", None) # Extract page_id from kwargs\n",
- " self._created_time = kwargs.pop(\"created_time\", None)\n",
- " self._last_edited_time = kwargs.pop(\"last_edited_time\", None)\n",
- "\n",
- " # Get required fields\n",
- " required_fields = {\n",
- " name\n",
- " for name, field in self._fields.items()\n",
- " if field.required and name not in kwargs\n",
- " }\n",
- "\n",
- " if required_fields:\n",
- " raise ValidationError(f\"Missing required fields: {required_fields}\")\n",
- "\n",
- " # Set values and validate\n",
- " for name, value in kwargs.items():\n",
- " if name in self._fields:\n",
- " setattr(self, name, value)\n",
- " else:\n",
- " raise ValidationError(f\"Unknown field: {name}\")\n",
- "\n",
- " def __setattr__(self, name: str, value: t.Any):\n",
- " \"\"\"Handle field validation on attribute setting.\"\"\"\n",
- " if name.startswith(\"_\"):\n",
- " super().__setattr__(name, value)\n",
- " return\n",
- "\n",
- " field = self._fields.get(name)\n",
- " if field is not None:\n",
- " value = field.validate(value)\n",
- " self._values[name] = value\n",
- " else:\n",
- " super().__setattr__(name, value)\n",
- "\n",
- " def __getattr__(self, name: str) -> t.Any:\n",
- " \"\"\"Handle field access.\"\"\"\n",
- " if name in self._values:\n",
- " return self._values[name]\n",
- " raise AttributeError(f\"'{self.__class__.__name__}' has no attribute '{name}'\")\n",
- "\n",
- " def __repr__(self) -> str:\n",
- " \"\"\"Return a string representation of the model instance.\"\"\"\n",
- " class_name = self.__class__.__name__\n",
- " parts = []\n",
- "\n",
- " # First add ID fields\n",
- " for name, field in self.__class__._fields.items():\n",
- " if isinstance(field, ID) and name in self._values:\n",
- " value = self._values[name]\n",
- " if value is not None:\n",
- " parts.append(f\"{name}={repr(value)}\")\n",
- "\n",
- " # Then add other fields in declaration order\n",
- " for name, field in self.__class__._fields.items():\n",
- " if not isinstance(field, ID) and name in self._values:\n",
- " value = self._values[name]\n",
- " if value is not None:\n",
- " if isinstance(value, str):\n",
- " parts.append(f\"{name}='{value}'\")\n",
- " else:\n",
- " parts.append(f\"{name}={repr(value)}\")\n",
- "\n",
- " return f\"{class_name}({' '.join(parts)})\""
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from ragas_annotator.model.notion_typing import ID, Text, URL\n",
- "\n",
- "\n",
- "class TestModel(NotionModel):\n",
- " id: int = ID()\n",
- " name: str = Text()\n",
- " url: str = URL()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "TestModel(id=1 name='test' url='https://www.google.com')"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "TestModel(id=1, name=\"test\", url=\"https://www.google.com\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def to_notion(self: NotionModel) -> dict:\n",
- " \"\"\"Convert the model to Notion API format.\"\"\"\n",
- " properties = {}\n",
- " for name, field in self._fields.items():\n",
- " if name in self._values:\n",
- " value = self._values[name]\n",
- " if value is not None:\n",
- " properties.update(field._to_notion(value))\n",
- " return {\"properties\": properties}\n",
- "\n",
- "\n",
- "@patch_to(NotionModel, cls_method=True)\n",
- "def from_notion(cls, data: dict) -> \"NotionModel\":\n",
- " \"\"\"Create a model instance from Notion API data.\"\"\"\n",
- " values = {}\n",
- " for name, field in cls._fields.items():\n",
- " if name in data.get(\"properties\", {}):\n",
- " values[name] = field._from_notion({\"properties\": data[\"properties\"]})\n",
- "\n",
- " # Handle system properties\n",
- " if \"id\" in data:\n",
- " values[\"page_id\"] = data[\"id\"] # Set page_id from Notion's id\n",
- " if \"created_time\" in data:\n",
- " values[\"created_time\"] = datetime.fromisoformat(\n",
- " data[\"created_time\"].replace(\"Z\", \"+00:00\")\n",
- " )\n",
- " if \"last_edited_time\" in data:\n",
- " values[\"last_edited_time\"] = datetime.fromisoformat(\n",
- " data[\"last_edited_time\"].replace(\"Z\", \"+00:00\")\n",
- " )\n",
- "\n",
- " return cls(**values)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | test\n",
- "test_model = TestModel(id=1, name=\"test\", url=\"https://www.google.com\")\n",
- "test_model_as_properties = {\n",
- " \"properties\": {\n",
- " \"id\": {\"type\": \"unique_id\", \"unique_id\": 1},\n",
- " \"name\": {\"rich_text\": [{\"text\": {\"content\": \"test\"}}]},\n",
- " \"url\": {\"url\": \"https://www.google.com\"},\n",
- " }\n",
- "}\n",
- "\n",
- "test_eq(test_model.to_notion(), test_model_as_properties)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "python3",
- "language": "python",
- "name": "python3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/nbs/model/notion_types.ipynb b/nbs/model/notion_types.ipynb
deleted file mode 100644
index 8ac4f7b..0000000
--- a/nbs/model/notion_types.ipynb
+++ /dev/null
@@ -1,679 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# `NotionTypes`\n",
- "\n",
- "> Represents the types of Notion objects like text, number, select, multi-select, etc."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | default_exp model.notion_typing"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "\n",
- "import typing as t\n",
- "from datetime import datetime\n",
- "\n",
- "from ragas_annotator.exceptions import ValidationError\n",
- "\n",
- "T = t.TypeVar(\"T\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "\n",
- "\n",
- "class Field(t.Generic[T]):\n",
- " \"\"\"Base class for all Notion field types.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"\"\n",
- " _type: t.Type[T]\n",
- "\n",
- " def __init__(self, required: bool = True):\n",
- " self.required = required\n",
- " self.name: str = \"\"\n",
- " super().__init__()\n",
- "\n",
- " def __set_name__(self, owner: t.Type, name: str):\n",
- " \"\"\"Set the field name when the class is created.\"\"\"\n",
- " self.name = name\n",
- " if not hasattr(owner, \"_fields\"):\n",
- " owner._fields = {}\n",
- " owner._fields[name] = self\n",
- "\n",
- " def __get__(self, instance, owner=None):\n",
- " \"\"\"Implement descriptor protocol for getting field values.\"\"\"\n",
- " if instance is None:\n",
- " return self\n",
- " return instance._values.get(self.name)\n",
- "\n",
- " def __set__(self, instance, value):\n",
- " \"\"\"Implement descriptor protocol for setting field values.\"\"\"\n",
- " if instance is None:\n",
- " return\n",
- " value = self.validate(value)\n",
- " instance._values[self.name] = value\n",
- "\n",
- " def validate(self, value: t.Any) -> t.Any:\n",
- " \"\"\"Validate the field value.\"\"\"\n",
- " if value is None and self.required:\n",
- " raise ValidationError(f\"Field {self.name} is required\")\n",
- " return value\n",
- "\n",
- " def _to_notion(self, value: t.Any) -> dict:\n",
- " \"\"\"Convert Python value to Notion format.\"\"\"\n",
- " raise NotImplementedError\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Any:\n",
- " \"\"\"Convert Notion format to Python value.\"\"\"\n",
- " raise NotImplementedError\n",
- "\n",
- " def _to_notion_property(self) -> dict:\n",
- " \"\"\"Convert field to Notion property definition format.\"\"\"\n",
- " return {self.name: {\"type\": self.NOTION_FIELD_TYPE, self.NOTION_FIELD_TYPE: {}}}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "SAMPLE_NOTION_DATA = {\n",
- " \"title_field\": {\n",
- " \"type\": \"title\",\n",
- " \"title\": [\n",
- " {\n",
- " \"type\": \"text\",\n",
- " \"text\": {\"content\": \"What is product led growth?\", \"link\": None},\n",
- " \"plain_text\": \"What is product led growth?\",\n",
- " \"href\": None,\n",
- " }\n",
- " ],\n",
- " },\n",
- " \"text_field\": {\n",
- " \"type\": \"rich_text\",\n",
- " \"rich_text\": [\n",
- " {\n",
- " \"type\": \"text\",\n",
- " \"text\": {\n",
- " \"content\": \"Product-led Growth (PLG) is a business strategy\",\n",
- " \"link\": None,\n",
- " },\n",
- " \"plain_text\": \"Product-led Growth (PLG) is a business strategy\",\n",
- " }\n",
- " ],\n",
- " },\n",
- " \"select_field\": {\n",
- " \"type\": \"select\",\n",
- " \"select\": {\"id\": \"DJTw\", \"name\": \"elena\", \"color\": \"default\"},\n",
- " },\n",
- " \"multi_select_field\": {\n",
- " \"type\": \"multi_select\",\n",
- " \"multi_select\": [\n",
- " {\"id\": \"cf54414e\", \"name\": \"indexing_issues\", \"color\": \"orange\"},\n",
- " {\"id\": \"abc123\", \"name\": \"performance\", \"color\": \"blue\"},\n",
- " ],\n",
- " },\n",
- " \"id_field\": {\"type\": \"unique_id\", \"unique_id\": {\"prefix\": None, \"number\": 42}},\n",
- " \"url_field\": {\"type\": \"url\", \"url\": \"https://www.google.com\"},\n",
- "}"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## `ID`\n",
- "\n",
- "Represents a unique identifier for any NotionModel object."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class ID(Field[int], int):\n",
- " \"\"\"System ID field type for integer IDs.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"unique_id\"\n",
- " _type = int\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return int.__new__(cls)\n",
- "\n",
- " def __init__(self, required: bool = False):\n",
- " super().__init__(required=required)\n",
- "\n",
- " def validate(self, value: t.Optional[int]) -> t.Optional[int]:\n",
- " value = super().validate(value)\n",
- " if value is not None and not isinstance(value, int):\n",
- " raise ValidationError(f\"ID must be an integer, got {type(value)}\")\n",
- " return value\n",
- "\n",
- " def _to_notion(self, value: int) -> dict:\n",
- " return {self.name: {\"type\": \"unique_id\", \"unique_id\": value}}\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Optional[int]:\n",
- " if \"properties\" in data:\n",
- " if self.name in data[\"properties\"]:\n",
- " return data[\"properties\"][self.name][\"unique_id\"][\"number\"]\n",
- " else:\n",
- " if self.name in data:\n",
- " return data[self.name][\"unique_id\"][\"number\"]\n",
- " # if not found and required, raise error\n",
- " if self.required:\n",
- " raise ValidationError(\n",
- " f\"ID field {self.name} is required but not found in the data\"\n",
- " )\n",
- " else:\n",
- " return None\n",
- "\n",
- " def _to_notion_property(self) -> dict:\n",
- " return {self.name: {\"type\": \"unique_id\", \"unique_id\": {\"prefix\": None}}}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "import pytest\n",
- "from fastcore.test import *"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# test to_notion\n",
- "field = ID()\n",
- "field.name = \"id\"\n",
- "result = field._to_notion(42)\n",
- "assert result == {\"id\": {\"type\": \"unique_id\", \"unique_id\": 42}}\n",
- "\n",
- "# test from_notion\n",
- "result = field._from_notion({\"id\": SAMPLE_NOTION_DATA[\"id_field\"]})\n",
- "assert result == 42\n",
- "\n",
- "# test from_notion with no id\n",
- "result = field._from_notion({})\n",
- "assert result is None\n",
- "\n",
- "# test validate\n",
- "assert field.validate(42) == 42\n",
- "assert field.validate(None) is None\n",
- "with pytest.raises(ValidationError):\n",
- " field.validate(\"not_an_integer\") # type: ignore (since we are testing the validation)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## `Text`\n",
- "\n",
- "Represents notion's rich text property type"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class Text(Field[str], str):\n",
- " \"\"\"Rich text property type.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"rich_text\"\n",
- " _type = str\n",
- " CHUNK_SIZE = 2000 # Notion's character limit per rich text block\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return str.__new__(cls)\n",
- "\n",
- " def __init__(self, required: bool = True):\n",
- " super().__init__(required=required)\n",
- "\n",
- " def _to_notion(self, value: str) -> dict:\n",
- " # Split the text into chunks of CHUNK_SIZE characters\n",
- " if not value:\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: []}}\n",
- "\n",
- " chunks = [\n",
- " value[i : i + self.CHUNK_SIZE]\n",
- " for i in range(0, len(value), self.CHUNK_SIZE)\n",
- " ]\n",
- " rich_text_array = [{\"text\": {\"content\": chunk}} for chunk in chunks]\n",
- "\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: rich_text_array}}\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Optional[str]:\n",
- " # Handle both direct and properties-wrapped format\n",
- " if \"properties\" in data:\n",
- " rich_text = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " rich_text = data[self.name][self.NOTION_FIELD_TYPE]\n",
- "\n",
- " if not rich_text:\n",
- " return None\n",
- "\n",
- " # Combine all text chunks into a single string\n",
- " return \"\".join(item[\"text\"][\"content\"] for item in rich_text if \"text\" in item)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#| hide\n",
- "\n",
- "what all should we test for `Text()`\n",
- "\n",
- "1. Make sure validate works for strings\n",
- "2. Make sure validate works for non-string like other python objects - we can convert them to string with python.\n",
- "3. Make sure larger texts with greater than 2000 pages are broken down and read from effectively"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# test to_notion\n",
- "field = Text()\n",
- "field.name = \"text\"\n",
- "result = field._to_notion(\"test\")\n",
- "test_eq(result, {\"text\": {\"rich_text\": [{\"text\": {\"content\": \"test\"}}]}})\n",
- "\n",
- "# test from_notion\n",
- "result = field._from_notion({\"text\": SAMPLE_NOTION_DATA[\"text_field\"]})\n",
- "test_eq(result, \"Product-led Growth (PLG) is a business strategy\")\n",
- "\n",
- "# test validate\n",
- "test_eq(field.validate(\"test\"), \"test\")\n",
- "# test_eq(field.validate(42), \"42\")\n",
- "# test_eq(field.validate(None), None)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# Test Text class with chunking functionality\n",
- "\n",
- "# 1. Test with short text (under 2000 chars)\n",
- "field = Text()\n",
- "field.name = \"text\"\n",
- "short_text = \"This is a short text\"\n",
- "result = field._to_notion(short_text)\n",
- "test_eq(result, {\"text\": {\"rich_text\": [{\"text\": {\"content\": short_text}}]}})\n",
- "test_eq(len(result[\"text\"][\"rich_text\"]), 1)\n",
- "\n",
- "# 2. Test with exactly 2000 chars (boundary case)\n",
- "text_2000 = \"a\" * 2000\n",
- "result = field._to_notion(text_2000)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"]), 1)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"][0][\"text\"][\"content\"]), 2000)\n",
- "\n",
- "# 3. Test with long text (over 2000 chars)\n",
- "long_text = \"a\" * 4500 # Should create 3 chunks\n",
- "result = field._to_notion(long_text)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"]), 3)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"][0][\"text\"][\"content\"]), 2000)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"][1][\"text\"][\"content\"]), 2000)\n",
- "test_eq(len(result[\"text\"][\"rich_text\"][2][\"text\"][\"content\"]), 500)\n",
- "\n",
- "# 4. Test from_notion with multiple chunks\n",
- "multi_chunk_data = {\n",
- " \"text\": {\n",
- " \"rich_text\": [\n",
- " {\"text\": {\"content\": \"First chunk. \"}},\n",
- " {\"text\": {\"content\": \"Second chunk. \"}},\n",
- " {\"text\": {\"content\": \"Last chunk.\"}},\n",
- " ]\n",
- " }\n",
- "}\n",
- "result = field._from_notion(multi_chunk_data)\n",
- "test_eq(result, \"First chunk. Second chunk. Last chunk.\")\n",
- "\n",
- "# 5. Test empty text\n",
- "result = field._to_notion(\"\")\n",
- "test_eq(result, {\"text\": {\"rich_text\": []}})\n",
- "\n",
- "# 6. Test from_notion with empty rich_text\n",
- "empty_data = {\"text\": {\"rich_text\": []}}\n",
- "result = field._from_notion(empty_data)\n",
- "test_eq(result, None)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## `Title`"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class Title(Field[str], str):\n",
- " \"\"\"Title property type.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"title\"\n",
- " _type = str\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return str.__new__(cls)\n",
- "\n",
- " def __init__(self, required: bool = True):\n",
- " super().__init__(required=required)\n",
- "\n",
- " def _to_notion(self, value: str) -> dict:\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: [{\"text\": {\"content\": value}}]}}\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Optional[str]:\n",
- " if \"properties\" in data:\n",
- " title = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " title = data[self.name][self.NOTION_FIELD_TYPE]\n",
- " if not title:\n",
- " return None\n",
- " return title[0][\"text\"][\"content\"]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class Select(Field[str], str):\n",
- " \"\"\"Select property type.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"select\"\n",
- " _type = str\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return str.__new__(cls)\n",
- "\n",
- " def __init__(self, options: t.Optional[list[str]] = None, required: bool = True):\n",
- " self.options = options\n",
- " super().__init__(required=required)\n",
- "\n",
- " def validate(self, value: t.Optional[str]) -> t.Optional[str]:\n",
- " value = super().validate(value)\n",
- " if value == \"\": # Allow empty string for optional fields\n",
- " return value\n",
- " if value is not None and self.options and value not in self.options:\n",
- " raise ValidationError(\n",
- " f\"Value {value} not in allowed options: {self.options}\"\n",
- " )\n",
- " return value\n",
- "\n",
- " def _to_notion(self, value: str) -> dict:\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: {\"name\": value}}}\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Optional[str]:\n",
- " if \"properties\" in data:\n",
- " select_data = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " select_data = data[self.name][self.NOTION_FIELD_TYPE]\n",
- " if select_data is None:\n",
- " return None\n",
- " return select_data[\"name\"]\n",
- "\n",
- " def _to_notion_property(self) -> dict:\n",
- " prop = super()._to_notion_property()\n",
- " if self.options:\n",
- " prop[self.name][\"select\"][\"options\"] = [\n",
- " {\"name\": option} for option in self.options\n",
- " ]\n",
- " return prop"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class MultiSelect(Field[list[str]], list):\n",
- " \"\"\"Multi-select property type.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"multi_select\"\n",
- " _type = list\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return list.__new__(cls)\n",
- "\n",
- " def __init__(self, options: t.Optional[list[str]] = None, required: bool = True):\n",
- " self.options = options\n",
- " super().__init__(required=required)\n",
- "\n",
- " def validate(self, value: t.Optional[list[str]]) -> t.Optional[list[str]]:\n",
- " value = super().validate(value)\n",
- " if value is not None and self.options:\n",
- " invalid_options = [v for v in value if v not in self.options]\n",
- " if invalid_options:\n",
- " raise ValidationError(\n",
- " f\"Values {invalid_options} not in allowed options: {self.options}\"\n",
- " )\n",
- " return value\n",
- "\n",
- " def _to_notion(self, value: list[str]) -> dict:\n",
- " return {\n",
- " self.name: {self.NOTION_FIELD_TYPE: [{\"name\": option} for option in value]}\n",
- " }\n",
- "\n",
- " def _from_notion(self, data: dict) -> list[str]:\n",
- " if \"properties\" in data:\n",
- " multi_select = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " multi_select = data[self.name][self.NOTION_FIELD_TYPE]\n",
- " if not multi_select:\n",
- " return []\n",
- " return [item[\"name\"] for item in multi_select]\n",
- "\n",
- " def _to_notion_property(self) -> dict:\n",
- " prop = super()._to_notion_property()\n",
- " if self.options:\n",
- " prop[self.name][\"multi_select\"][\"options\"] = [\n",
- " {\"name\": option} for option in self.options\n",
- " ]\n",
- " return prop"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class URL(Field[str], str):\n",
- " \"\"\"URL property type.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"url\"\n",
- " _type = str\n",
- "\n",
- " def __new__(cls, *args, **kwargs):\n",
- " return str.__new__(cls)\n",
- "\n",
- " def __init__(self, required: bool = False):\n",
- " super().__init__(required=required)\n",
- "\n",
- " def validate(self, value: t.Optional[str]) -> t.Optional[str]:\n",
- " value = super().validate(value)\n",
- " if value is not None and not isinstance(value, str):\n",
- " raise ValidationError(f\"URL must be a string, got {type(value)}\")\n",
- " return value\n",
- "\n",
- " def _to_notion(self, value: str) -> dict:\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: value}}\n",
- "\n",
- " def _from_notion(self, data: dict) -> t.Optional[str]:\n",
- " if \"properties\" in data:\n",
- " url = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " url = data[self.name][self.NOTION_FIELD_TYPE]\n",
- " return url"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## New Types"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "T = t.TypeVar(\"T\")\n",
- "\n",
- "\n",
- "class NotionFieldMeta:\n",
- " \"\"\"Base metadata class for Notion field types.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE: t.ClassVar[str] = \"\"\n",
- "\n",
- " def __init__(self, required: bool = True):\n",
- " self.required = required\n",
- " self.name: str = \"\" # Will be set during model initialization\n",
- "\n",
- " def __set_name__(self, owner, name: str):\n",
- " \"\"\"Set field name when used directly as class attribute.\"\"\"\n",
- " self.name = name\n",
- "\n",
- " def validate(self, value: t.Any) -> t.Any:\n",
- " \"\"\"Validate field value.\"\"\"\n",
- " if value is None and self.required:\n",
- " raise ValueError(f\"Field {self.name} is required\")\n",
- " return value\n",
- "\n",
- " def to_notion(self, value: t.Any) -> dict:\n",
- " \"\"\"Convert Python value to Notion format.\"\"\"\n",
- " raise NotImplementedError()\n",
- "\n",
- " def from_notion(self, data: dict) -> t.Any:\n",
- " \"\"\"Convert Notion format to Python value.\"\"\"\n",
- " raise NotImplementedError()\n",
- "\n",
- " def to_notion_property(self) -> dict:\n",
- " \"\"\"Convert field to Notion property definition.\"\"\"\n",
- " return {self.name: {\"type\": self.NOTION_FIELD_TYPE, self.NOTION_FIELD_TYPE: {}}}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "class TextNew(NotionFieldMeta):\n",
- " \"\"\"Rich text property type for Notion.\"\"\"\n",
- "\n",
- " NOTION_FIELD_TYPE = \"rich_text\"\n",
- " CHUNK_SIZE = 2000 # Notion's character limit per rich text block\n",
- "\n",
- " def __init__(self, required: bool = True):\n",
- " super().__init__(required=required)\n",
- "\n",
- " def to_notion(self, value: str) -> dict:\n",
- " # Split text into chunks of CHUNK_SIZE characters\n",
- " if not value:\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: []}}\n",
- "\n",
- " chunks = [\n",
- " value[i : i + self.CHUNK_SIZE]\n",
- " for i in range(0, len(value), self.CHUNK_SIZE)\n",
- " ]\n",
- " rich_text_array = [{\"text\": {\"content\": chunk}} for chunk in chunks]\n",
- "\n",
- " return {self.name: {self.NOTION_FIELD_TYPE: rich_text_array}}\n",
- "\n",
- " def from_notion(self, data: dict) -> t.Optional[str]:\n",
- " # Handle both direct and properties-wrapped format\n",
- " if \"properties\" in data:\n",
- " if self.name in data[\"properties\"]:\n",
- " rich_text = data[\"properties\"][self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " return None\n",
- " else:\n",
- " if self.name in data:\n",
- " rich_text = data[self.name][self.NOTION_FIELD_TYPE]\n",
- " else:\n",
- " return None\n",
- "\n",
- " if not rich_text:\n",
- " return None\n",
- "\n",
- " # Combine all text chunks into a single string\n",
- " return \"\".join(item[\"text\"][\"content\"] for item in rich_text if \"text\" in item)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "python3",
- "language": "python",
- "name": "python3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/nbs/model/pydantic_mode.ipynb b/nbs/model/pydantic_mode.ipynb
new file mode 100644
index 0000000..5e644ba
--- /dev/null
+++ b/nbs/model/pydantic_mode.ipynb
@@ -0,0 +1,182 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Ragas `BaseModel`\n",
+ "\n",
+ "> An Extended version of Pydantics `BaseModel` for some ragas specific stuff"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| default_exp model.pydantic_model"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "import typing as t\n",
+ "\n",
+ "from pydantic import BaseModel, PrivateAttr\n",
+ "\n",
+ "from ragas_experimental.typing import FieldMeta as RagasFieldMeta"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class ExtendedPydanticBaseModel(BaseModel):\n",
+ " \"\"\"Extended Pydantic BaseModel with database integration capabilities\"\"\"\n",
+ " \n",
+ " # Private attribute for storing the database row_id\n",
+ " _row_id: t.Optional[int] = PrivateAttr(default=None)\n",
+ " \n",
+ " # Class variable for storing column mapping overrides\n",
+ " __column_mapping__: t.ClassVar[t.Dict[str, str]] = {}\n",
+ " \n",
+ " def __init__(self, **data):\n",
+ " super().__init__(**data)\n",
+ " # Initialize column mapping if not already defined\n",
+ " if not self.__class__.__column_mapping__:\n",
+ " self._initialize_column_mapping()\n",
+ " \n",
+ " @classmethod\n",
+ " def _initialize_column_mapping(cls):\n",
+ " \"\"\"Initialize mapping from field names to column IDs.\"\"\"\n",
+ " for field_name, field_info in cls.model_fields.items():\n",
+ " # Check if field has Column metadata (for Pydantic v2)\n",
+ " column_id = None\n",
+ " for extra in field_info.metadata or []:\n",
+ " if isinstance(extra, RagasFieldMeta) and extra.id:\n",
+ " column_id = extra.id\n",
+ " break\n",
+ " \n",
+ " # If no Column metadata found, use field name as column ID\n",
+ " if not column_id:\n",
+ " column_id = field_name\n",
+ " \n",
+ " cls.__column_mapping__[field_name] = column_id\n",
+ " \n",
+ " @classmethod\n",
+ " def get_column_id(cls, field_name: str) -> str:\n",
+ " \"\"\"Get the column ID for a given field name.\"\"\"\n",
+ " if field_name not in cls.__column_mapping__:\n",
+ " raise ValueError(f\"No column mapping found for field {field_name}\")\n",
+ " return cls.__column_mapping__[field_name]\n",
+ " \n",
+ " @classmethod\n",
+ " def set_column_id(cls, field_name: str, column_id: str):\n",
+ " \"\"\"Set the column ID for a given field name.\"\"\"\n",
+ " if field_name not in cls.model_fields:\n",
+ " raise ValueError(f\"Field {field_name} not found in model\")\n",
+ " cls.__column_mapping__[field_name] = column_id\n",
+ " \n",
+ " def get_db_field_mapping(self) -> t.Dict[str, str]:\n",
+ " \"\"\"Get a mapping from field names to column IDs for this model.\"\"\"\n",
+ " return self.__class__.__column_mapping__\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import ragas_experimental.typing as rt"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Example usage\n",
+ "class TestDataRow(ExtendedPydanticBaseModel):\n",
+ " id: t.Optional[int] = None\n",
+ " query: t.Annotated[str, rt.Text(id=\"search_query\")]\n",
+ " persona: t.List[t.Literal[\"opt1\", \"opt2\", \"opt3\"]]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "TestDataRow.__column_mapping__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "t = TestDataRow(id=1, query=\"this is a test\", persona=[\"opt1\"])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'id': 'id', 'query': 'search_query', 'persona': 'persona'}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "t.__column_mapping__"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "python3",
+ "language": "python",
+ "name": "python3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/nbs/nbdev.yml b/nbs/nbdev.yml
index f4c5e6e..f3c38c8 100644
--- a/nbs/nbdev.yml
+++ b/nbs/nbdev.yml
@@ -2,7 +2,7 @@ project:
output-dir: _docs
website:
- title: "ragas_annotator"
+ title: "ragas_experimental"
site-url: "https://explodinggradients.github.io/ragas_annotator"
description: "Experimental Ragas Evaluation UI and Library"
repo-branch: main
diff --git a/ragas_annotator/model/__init__.py b/nbs/project/.notest
similarity index 100%
rename from ragas_annotator/model/__init__.py
rename to nbs/project/.notest
diff --git a/nbs/project/comparison.ipynb b/nbs/project/comparison.ipynb
deleted file mode 100644
index e864516..0000000
--- a/nbs/project/comparison.ipynb
+++ /dev/null
@@ -1,526 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# `Comparison` \n",
- "\n",
- "> Create Comparison views with different experiments"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | default_exp project.comparison"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "import pytest\n",
- "from unittest.mock import MagicMock\n",
- "from fastcore.test import *\n",
- "\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "import typing as t\n",
- "import logging\n",
- "\n",
- "from fastcore.utils import patch\n",
- "from tqdm import tqdm\n",
- "\n",
- "from ragas_annotator.project.core import Project\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "import ragas_annotator.model.notion_typing as nmt\n",
- "from ragas_annotator.experiment import Experiment\n",
- "from ragas_annotator.dataset import Dataset"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "logger = logging.getLogger(__name__)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "# utility function to check if a model has a title property and get the name of the title property\n",
- "@t.overload\n",
- "def _get_title_property(\n",
- " model: NotionModel | t.Type[NotionModel], raise_exception: t.Literal[True] = True\n",
- ") -> str: ...\n",
- "@t.overload\n",
- "def _get_title_property(\n",
- " model: NotionModel | t.Type[NotionModel], raise_exception: t.Literal[False] = False\n",
- ") -> t.Optional[str]: ...\n",
- "def _get_title_property(\n",
- " model: NotionModel | t.Type[NotionModel], raise_exception: bool = True\n",
- ") -> t.Optional[str]:\n",
- " has_title = False\n",
- " for field in model._fields.keys():\n",
- " if isinstance(model._fields[field], nmt.Title):\n",
- " has_title = True\n",
- " title_property = field\n",
- " return title_property\n",
- "\n",
- " if not has_title:\n",
- " if raise_exception:\n",
- " raise ValueError(\"Model has no title property\")\n",
- " else:\n",
- " return None"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "class ModelWithoutTitle(NotionModel):\n",
- " id: int = nmt.ID()\n",
- " select: str = nmt.Select()\n",
- "\n",
- "\n",
- "class ModelWithTitle(ModelWithoutTitle):\n",
- " some_title: str = nmt.Title()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "test_eq(_get_title_property(ModelWithoutTitle, raise_exception=False), None)\n",
- "pytest.raises(ValueError, _get_title_property, ModelWithoutTitle)\n",
- "test_eq(_get_title_property(ModelWithTitle), \"some_title\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "def _validate_experiments(experiments: t.Sequence[Experiment]):\n",
- " # validate we have more than 2 experiments\n",
- " if len(experiments) < 2:\n",
- " raise ValueError(\"We need at least 2 experiments to compare\")\n",
- "\n",
- " # validate that all experiments are of the same model\n",
- " top_exp = experiments[0]\n",
- " title_property = _get_title_property(top_exp.model)\n",
- " for exp in experiments:\n",
- " if not isinstance(exp, Experiment):\n",
- " raise ValueError(\"All experiments must be of type Experiment\")\n",
- " if top_exp != exp.model:\n",
- " logger.warning(\n",
- " f\"Experiments have different models: {top_exp.model} and {exp.model}\"\n",
- " )\n",
- " if title_property != _get_title_property(exp.model):\n",
- " raise ValueError(\"All experiments must have the same title property.\")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n",
- "Experiments have different models: and \n"
- ]
- }
- ],
- "source": [
- "# | hide\n",
- "\n",
- "example_notion_backend = MagicMock(spec=NotionBackend)\n",
- "\n",
- "# test the validation logics\n",
- "with pytest.raises(ValueError):\n",
- " _validate_experiments(\n",
- " [\n",
- " Experiment(\n",
- " \"test_experiment_1\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_1\",\n",
- " ModelWithoutTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_2\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " ]\n",
- " )\n",
- "\n",
- "\n",
- "# with should pass\n",
- "_validate_experiments(\n",
- " [\n",
- " Experiment(\n",
- " \"test_experiment_1\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_2\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " ]\n",
- ")\n",
- "\n",
- "\n",
- "# throw a warning if the models are different\n",
- "class DifferentTitleModel(ModelWithoutTitle):\n",
- " some_title: str = nmt.Title()\n",
- "\n",
- "\n",
- "_validate_experiments(\n",
- " [\n",
- " Experiment(\n",
- " \"test_experiment_1\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_2\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_3\",\n",
- " DifferentTitleModel,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " ]\n",
- ")\n",
- "\n",
- "\n",
- "# throw an error if the title properties are different\n",
- "class DifferentTitleNameModel(ModelWithoutTitle):\n",
- " some_title_other: str = nmt.Title()\n",
- "\n",
- "\n",
- "with pytest.raises(ValueError):\n",
- " _validate_experiments(\n",
- " [\n",
- " Experiment(\n",
- " \"test_experiment_1\",\n",
- " ModelWithTitle,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " Experiment(\n",
- " \"test_experiment_2\",\n",
- " DifferentTitleNameModel,\n",
- " \"test_database_id\",\n",
- " example_notion_backend,\n",
- " ),\n",
- " ]\n",
- " )"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "test_eq(_get_title_property(ModelWithTitle), \"some_title\")\n",
- "test_eq(_get_title_property(DifferentTitleNameModel), \"some_title_other\")\n",
- "with pytest.raises(ValueError):\n",
- " _get_title_property(ModelWithoutTitle)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/plain": [
- "[{'id_str': '1',\n",
- " 'experiment_name': 'test_experiment_1',\n",
- " 'some_title': 'test_1',\n",
- " 'select': 'test_exp_1_1'},\n",
- " {'id_str': '1',\n",
- " 'experiment_name': 'test_experiment_2',\n",
- " 'some_title': 'test_1',\n",
- " 'select': 'test_exp_2_1'},\n",
- " {'id_str': '1',\n",
- " 'experiment_name': 'test_experiment_3',\n",
- " 'some_title': 'test_1',\n",
- " 'select': 'test_exp_3_1'}]"
- ]
- },
- "execution_count": null,
- "metadata": {},
- "output_type": "execute_result"
- }
- ],
- "source": [
- "# | hide\n",
- "# a test for grouping experiments\n",
- "NUM_EXPS = 3\n",
- "# dummy experiments\n",
- "exp1 = Experiment(\n",
- " \"test_experiment_1\", ModelWithTitle, \"test_database_id\", example_notion_backend\n",
- ")\n",
- "exp2 = Experiment(\n",
- " \"test_experiment_2\", ModelWithTitle, \"test_database_id\", example_notion_backend\n",
- ")\n",
- "exp3 = Experiment(\n",
- " \"test_experiment_3\", ModelWithTitle, \"test_database_id\", example_notion_backend\n",
- ")\n",
- "# fill the experiments with dummy data\n",
- "for i in range(NUM_EXPS):\n",
- " exp1._entries.append(\n",
- " ModelWithTitle(some_title=f\"test_{i}\", id=i, select=f\"test_exp_1_{i}\")\n",
- " )\n",
- "for i in range(NUM_EXPS):\n",
- " exp2._entries.append(\n",
- " ModelWithTitle(some_title=f\"test_{i}\", id=i, select=f\"test_exp_2_{i}\")\n",
- " )\n",
- "for i in range(NUM_EXPS):\n",
- " exp3._entries.append(\n",
- " ModelWithTitle(some_title=f\"test_{i}\", id=i, select=f\"test_exp_3_{i}\")\n",
- " )\n",
- "\n",
- "\n",
- "# manually create the combined fields\n",
- "combined_experiments_fields = []\n",
- "for i in range(NUM_EXPS):\n",
- " exp1_as_field = {\n",
- " \"id_str\": str(i),\n",
- " \"experiment_name\": \"test_experiment_1\",\n",
- " \"some_title\": f\"test_{i}\",\n",
- " \"select\": f\"test_exp_1_{i}\",\n",
- " }\n",
- " exp2_as_field = exp1_as_field.copy()\n",
- " exp2_as_field[\"experiment_name\"] = \"test_experiment_2\"\n",
- " exp2_as_field[\"some_title\"] = f\"test_{i}\"\n",
- " exp2_as_field[\"select\"] = f\"test_exp_2_{i}\"\n",
- " exp3_as_field = exp1_as_field.copy()\n",
- " exp3_as_field[\"experiment_name\"] = \"test_experiment_3\"\n",
- " exp3_as_field[\"some_title\"] = f\"test_{i}\"\n",
- " exp3_as_field[\"select\"] = f\"test_exp_3_{i}\"\n",
- " combined_experiments_fields.append([exp1_as_field, exp2_as_field, exp3_as_field])\n",
- "\n",
- "combined_experiments_fields[1]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "def _model_to_dict(model: NotionModel) -> dict:\n",
- " # drop ID filed\n",
- " data = {}\n",
- " for field_name in model._fields.keys():\n",
- " if isinstance(model._fields[field_name], nmt.ID):\n",
- " continue\n",
- " data[field_name] = model.__getattribute__(field_name)\n",
- " return data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# test it\n",
- "for i, grouped_row in enumerate(combined_experiments_fields):\n",
- " # add the missing fields to exp1\n",
- " exp1_dict = _model_to_dict(exp1._entries[i])\n",
- " exp1_dict[\"id_str\"] = str(i)\n",
- " exp1_dict[\"experiment_name\"] = \"test_experiment_1\"\n",
- " test_eq(grouped_row[0], exp1_dict)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "def _combine_experiments(experiments: t.Sequence[Experiment]):\n",
- " \"\"\"Group experiment rows by their title property value.\"\"\"\n",
- " if not experiments:\n",
- " return []\n",
- "\n",
- " title_property: str = _get_title_property(experiments[0].model)\n",
- "\n",
- " # Create a dictionary to group rows by title value\n",
- " grouped_by_title = {}\n",
- "\n",
- " # Process each experiment\n",
- " for exp in experiments:\n",
- " for row in exp:\n",
- " title_value = getattr(row, title_property)\n",
- "\n",
- " # Create key if it doesn't exist\n",
- " if title_value not in grouped_by_title:\n",
- " grouped_by_title[title_value] = []\n",
- "\n",
- " # Add this row to the appropriate group\n",
- " row_dict = _model_to_dict(row)\n",
- " row_dict[\"experiment_name\"] = exp.name\n",
- " grouped_by_title[title_value].append(row_dict)\n",
- "\n",
- " # Convert dictionary to list and add id_str\n",
- " result = []\n",
- " for i, (_, rows) in enumerate(grouped_by_title.items()):\n",
- " for row in rows:\n",
- " row[\"id_str\"] = str(i)\n",
- " result.append(rows)\n",
- "\n",
- " return result"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "# lets see if the asserts pass though\n",
- "test_eq(_combine_experiments([exp1, exp2, exp3]), combined_experiments_fields)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "TODO:\n",
- "- leverage the `Dataset` object here to reduce duplicate code."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | export\n",
- "@patch\n",
- "def compare_experiments(\n",
- " self: Project,\n",
- " *experiments: Experiment,\n",
- "):\n",
- " _validate_experiments(experiments)\n",
- "\n",
- " # create a combined Model with all the fields of the experiments\n",
- " class CombinedModel(NotionModel):\n",
- " id_str: str = nmt.Text()\n",
- " experiment_name: str = nmt.Text()\n",
- "\n",
- " for exp in experiments:\n",
- " for field in exp.model._fields.keys():\n",
- " if field not in CombinedModel._fields:\n",
- " CombinedModel._fields[field] = exp.model._fields[field]\n",
- "\n",
- " # create a new database with the combined model\n",
- " properties = {}\n",
- " for field in CombinedModel._fields.keys():\n",
- " properties.update(CombinedModel._fields[field]._to_notion_property())\n",
- " comparison_database_id = self._notion_backend.create_new_database(\n",
- " parent_page_id=self.comparisons_page_id,\n",
- " title=f\"{' and '.join([exp.name for exp in experiments])}\",\n",
- " properties=properties,\n",
- " )\n",
- "\n",
- " # make sure all experiments are synced to upstream\n",
- " for exp in experiments:\n",
- " exp.load()\n",
- "\n",
- " # group together by title property\n",
- " grouped_experiments = _combine_experiments(experiments)\n",
- "\n",
- " # append these to database\n",
- " for grouped_row in tqdm(grouped_experiments, desc=\"Uploading to Notion\"):\n",
- " for row in grouped_row:\n",
- " combined_model_instance = CombinedModel(**row)\n",
- " self._notion_backend.create_page_in_database(\n",
- " database_id=comparison_database_id,\n",
- " properties=combined_model_instance.to_notion()[\"properties\"],\n",
- " )\n",
- " # Get the URL for the created database\n",
- " # The format for Notion URLs is: https://www.notion.so/{database_id}\n",
- " notion_url = f\"https://www.notion.so/{comparison_database_id.replace('-', '')}\"\n",
- "\n",
- " return notion_url"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "python3",
- "language": "python",
- "name": "python3"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/nbs/project/core.ipynb b/nbs/project/core.ipynb
index c3da57d..c53c029 100644
--- a/nbs/project/core.ipynb
+++ b/nbs/project/core.ipynb
@@ -28,6 +28,16 @@
"from nbdev.showdoc import *"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "from ragas_experimental.model.notion_model import NotionModel"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -37,16 +47,17 @@
"# | export\n",
"import typing as t\n",
"import os\n",
+ "import asyncio\n",
"\n",
- "from notion_client import Client as NotionClient\n",
"from fastcore.utils import patch\n",
+ "from pydantic import BaseModel\n",
"\n",
- "from ragas_annotator.backends.notion_backend import NotionBackend\n",
- "from ragas_annotator.backends.factory import NotionBackendFactory\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "import ragas_annotator.model.notion_typing as nmt\n",
- "from ragas_annotator.dataset import Dataset\n",
- "from ragas_annotator.experiment import Experiment"
+ "from ragas_experimental.backends.factory import RagasApiClientFactory\n",
+ "from ragas_experimental.backends.ragas_api_client import RagasApiClient\n",
+ "import ragas_experimental.typing as rt\n",
+ "from ragas_experimental.utils import async_to_sync, create_nano_id\n",
+ "from ragas_experimental.dataset import Dataset\n",
+ "from ragas_experimental.experiment import Experiment"
]
},
{
@@ -59,96 +70,44 @@
"class Project:\n",
" def __init__(\n",
" self,\n",
- " name: str,\n",
- " notion_backend: t.Optional[NotionBackend] = None,\n",
- " notion_api_key: t.Optional[str] = None,\n",
- " notion_root_page_id: t.Optional[str] = None,\n",
+ " project_id: str,\n",
+ " ragas_app_client: t.Optional[RagasApiClient] = None,\n",
" ):\n",
- " self.name = name\n",
- " self.datasets_page_id = \"\"\n",
- " self.experiments_page_id = \"\"\n",
- " self.comparisons_page_id = \"\"\n",
- "\n",
- " if notion_backend is None:\n",
- " # check that the environment variables are set\n",
- " notion_api_key = os.getenv(\"NOTION_API_KEY\") or notion_api_key\n",
- " notion_root_page_id = (\n",
- " os.getenv(\"NOTION_ROOT_PAGE_ID\") or notion_root_page_id\n",
- " )\n",
- "\n",
- " if notion_api_key is None:\n",
- " raise ValueError(\"NOTION_API_KEY is not set\")\n",
- "\n",
- " if notion_root_page_id is None:\n",
- " raise ValueError(\"NOTION_ROOT_PAGE_ID is not set\")\n",
- "\n",
- " if notion_api_key == \"TEST\":\n",
- " self._notion_backend = NotionBackendFactory.create(\n",
- " root_page_id=notion_root_page_id,\n",
- " use_mock=True,\n",
- " initialize_project=True,\n",
- " )\n",
- " else:\n",
- " self._notion_backend = NotionBackend(\n",
- " notion_client=NotionClient(auth=notion_api_key),\n",
- " root_page_id=notion_root_page_id,\n",
- " )\n",
+ " self.project_id = project_id\n",
+ " if ragas_app_client is None:\n",
+ " self._ragas_api_client = RagasApiClientFactory.create()\n",
" else:\n",
- " self._notion_backend = notion_backend\n",
+ " self._ragas_api_client = ragas_app_client\n",
"\n",
- " # initialize the project structure\n",
- " self.initialize()\n",
+ " # create the project\n",
+ " try:\n",
+ " sync_version = async_to_sync(self._ragas_api_client.get_project)\n",
+ " existing_project = sync_version(project_id=self.project_id)\n",
+ " self.project_id = existing_project[\"id\"]\n",
+ " self.name = existing_project[\"title\"]\n",
+ " self.description = existing_project[\"description\"]\n",
+ " except Exception as e:\n",
+ " raise e\n",
"\n",
- " def initialize(self):\n",
- " \"\"\"Initialize the project structure in Notion.\"\"\"\n",
- " root_page_id = self._notion_backend.root_page_id\n",
- "\n",
- " # if page doesn't exist, create it\n",
- " if not self._notion_backend.page_exists(root_page_id):\n",
- " raise ValueError(f\"Root page '{root_page_id}' does not exist\")\n",
- " # if page exists, but structure is invalid\n",
- " elif not self._notion_backend.validate_project_structure(root_page_id):\n",
- " # create the missing pages\n",
- " print(f\"Creating missing pages inside root page '{root_page_id}'\")\n",
- " self._create_project_structure(root_page_id)\n",
- " else:\n",
- " # if page exists and structure is valid, get the page ids\n",
- " # for datasets, experiments, and comparisons\n",
- " self.datasets_page_id = self._notion_backend.get_page_id(\n",
- " root_page_id, \"Datasets\"\n",
- " )\n",
- " self.experiments_page_id = self._notion_backend.get_page_id(\n",
- " root_page_id, \"Experiments\"\n",
- " )\n",
- " self.comparisons_page_id = self._notion_backend.get_page_id(\n",
- " root_page_id, \"Comparisons\"\n",
- " )\n",
+ " @classmethod\n",
+ " def create(\n",
+ " cls,\n",
+ " name: str,\n",
+ " description: str = \"\",\n",
+ " ragas_app_client: t.Optional[RagasApiClient] = None,\n",
+ " ):\n",
+ " ragas_app_client = RagasApiClientFactory.create()\n",
+ " sync_version = async_to_sync(ragas_app_client.create_project)\n",
+ " new_project = sync_version(title=name, description=description)\n",
+ " return cls(new_project[\"id\"], ragas_app_client)\n",
"\n",
- " def _create_project_structure(self, root_page_id: str):\n",
- " \"\"\"Create the basic project structure with required pages.\"\"\"\n",
- " # Create each required page\n",
- " self.datasets_page_id = self._notion_backend.create_new_page(\n",
- " root_page_id, \"Datasets\"\n",
- " )\n",
- " self.experiments_page_id = self._notion_backend.create_new_page(\n",
- " root_page_id, \"Experiments\"\n",
- " )\n",
- " self.comparisons_page_id = self._notion_backend.create_new_page(\n",
- " root_page_id, \"Comparisons\"\n",
- " )\n",
+ " def delete(self):\n",
+ " sync_version = async_to_sync(self._ragas_api_client.delete_project)\n",
+ " sync_version(project_id=self.project_id)\n",
+ " print(\"Project deleted!\")\n",
"\n",
" def __repr__(self):\n",
- " return f\"Project(name='{self.name}', root_page_id={self._notion_backend.root_page_id})\""
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# | hide\n",
- "from ragas_annotator.backends.factory import NotionBackendFactory"
+ " return f\"Project(name='{self.name}')\""
]
},
{
@@ -157,10 +116,11 @@
"metadata": {},
"outputs": [],
"source": [
- "# | hide\n",
- "notion_backend = NotionBackendFactory.create(\n",
- " root_page_id=\"your_root_page_id\", use_mock=True, initialize_project=True\n",
- ")"
+ "RAGAS_APP_TOKEN = \"apt.47bd-c55e4a45b27c-02f8-8446-1441f09b-651a8\"\n",
+ "RAGAS_API_BASE_URL = \"https://api.dev.app.ragas.io\"\n",
+ "\n",
+ "os.environ[\"RAGAS_APP_TOKEN\"] = RAGAS_APP_TOKEN\n",
+ "os.environ[\"RAGAS_API_BASE_URL\"] = RAGAS_API_BASE_URL"
]
},
{
@@ -171,7 +131,7 @@
{
"data": {
"text/plain": [
- "Project(name='My Project', root_page_id=your_root_page_id)"
+ "Project(name='Demo Project')"
]
},
"execution_count": null,
@@ -180,10 +140,19 @@
}
],
"source": [
- "project = Project(\"My Project\", notion_backend=notion_backend)\n",
+ "project = Project.create(\"Demo Project\")\n",
"project"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#project.delete()"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -192,6 +161,30 @@
"\n"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "async def create_dataset_columns(project_id, dataset_id, columns, create_dataset_column_func):\n",
+ " tasks = []\n",
+ " for column in columns:\n",
+ " tasks.append(create_dataset_column_func(\n",
+ " project_id=project_id,\n",
+ " dataset_id=dataset_id,\n",
+ " id=create_nano_id(),\n",
+ " name=column[\"name\"],\n",
+ " type=column[\"type\"],\n",
+ " settings={\n",
+ " \"max_length\": 255,\n",
+ " \"is_required\": True,\n",
+ " },\n",
+ " ))\n",
+ " return await asyncio.gather(*tasks)\n"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -201,7 +194,7 @@
"# | export\n",
"@patch\n",
"def create_dataset(\n",
- " self: Project, model: t.Type[NotionModel], name: t.Optional[str] = None\n",
+ " self: Project, model: t.Type[BaseModel], name: t.Optional[str] = None\n",
") -> Dataset:\n",
" \"\"\"Create a new dataset database.\n",
"\n",
@@ -212,34 +205,30 @@
" Returns:\n",
" Dataset: A new dataset object for managing entries\n",
" \"\"\"\n",
- " # Collect all properties from model fields\n",
- " properties = {}\n",
- " has_title = False\n",
- " for field_name, field in model._fields.items():\n",
- " properties.update(field._to_notion_property())\n",
- " if isinstance(field, nmt.Title): # Check if we have a title field\n",
- " has_title = True\n",
- "\n",
- " if not has_title:\n",
- " raise ValueError(\n",
- " \"In order to create a dataset, the model must have a nmt.Title field\"\n",
- " )\n",
- "\n",
- " # Create the database\n",
- " if self.datasets_page_id == \"\":\n",
- " raise ValueError(\"Datasets page ID is not set\")\n",
- " database_id = self._notion_backend.create_new_database(\n",
- " parent_page_id=self.datasets_page_id,\n",
- " title=name if name is not None else model.__name__,\n",
- " properties=properties,\n",
+ " # create the dataset\n",
+ " sync_version = async_to_sync(self._ragas_api_client.create_dataset)\n",
+ " dataset_info = sync_version(\n",
+ " project_id=self.project_id,\n",
+ " name=name if name is not None else model.__name__,\n",
" )\n",
"\n",
+ " # create the columns for the dataset\n",
+ " column_types = rt.ModelConverter.model_to_columns(model)\n",
+ " sync_version = async_to_sync(create_dataset_columns)\n",
+ " sync_version(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=dataset_info[\"id\"],\n",
+ " columns=column_types,\n",
+ " create_dataset_column_func=self._ragas_api_client.create_dataset_column,\n",
+ " )\n",
+ " \n",
" # Return a new Dataset instance\n",
" return Dataset(\n",
" name=name if name is not None else model.__name__,\n",
" model=model,\n",
- " database_id=database_id,\n",
- " notion_backend=self._notion_backend,\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=dataset_info[\"id\"],\n",
+ " ragas_api_client=self._ragas_api_client,\n",
" )"
]
},
@@ -261,9 +250,10 @@
],
"source": [
"# create an example dataset\n",
- "class TestModel(NotionModel):\n",
- " name: str = nmt.Title()\n",
- " description: str = nmt.Text()\n",
+ "class TestModel(BaseModel):\n",
+ " id: int\n",
+ " name: str\n",
+ " description: str\n",
"\n",
"\n",
"test_dataset = project.create_dataset(TestModel)\n",
@@ -278,7 +268,7 @@
{
"data": {
"text/plain": [
- "'TestModel'"
+ "'bbe45632-3268-43a6-9694-b020b3f5226f'"
]
},
"execution_count": null,
@@ -287,7 +277,27 @@
}
],
"source": [
- "test_dataset.name"
+ "project.project_id"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'0fee5330-9f6e-44a9-a85c-e3b947b697de'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "test_dataset.dataset_id"
]
},
{
@@ -298,25 +308,65 @@
"source": [
"# | export\n",
"@patch\n",
- "def get_dataset(self: Project, name: str, model: t.Type[NotionModel]) -> Dataset:\n",
+ "def get_dataset(self: Project, dataset_id: str, model) -> Dataset:\n",
" \"\"\"Get an existing dataset by name.\"\"\"\n",
- " if self.datasets_page_id == \"\":\n",
- " raise ValueError(\"Datasets page ID is not set\")\n",
- "\n",
" # Search for database with given name\n",
- " database_id = self._notion_backend.get_database_id(\n",
- " parent_page_id=self.datasets_page_id, name=name, return_multiple=False\n",
+ " sync_version = async_to_sync(self._ragas_api_client.get_dataset)\n",
+ " dataset_info = sync_version(\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=dataset_id\n",
" )\n",
"\n",
" # For now, return Dataset without model type\n",
" return Dataset(\n",
- " name=name,\n",
+ " name=dataset_info[\"name\"],\n",
" model=model,\n",
- " database_id=database_id,\n",
- " notion_backend=self._notion_backend,\n",
+ " project_id=self.project_id,\n",
+ " dataset_id=dataset_id,\n",
+ " ragas_api_client=self._ragas_api_client,\n",
" )"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Dataset(name=TestModel, model=TestModel, len=0)"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "project.get_dataset(test_dataset.dataset_id, TestModel)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'0a7c4ecb-b313-4bb0-81c0-852c9634ce03'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "project.project_id"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
diff --git a/nbs/project/experiments.ipynb b/nbs/project/experiments.ipynb
index a51fb1c..891d94c 100644
--- a/nbs/project/experiments.ipynb
+++ b/nbs/project/experiments.ipynb
@@ -33,10 +33,12 @@
"\n",
"from fastcore.utils import patch\n",
"\n",
- "from ragas_annotator.project.core import Project\n",
- "from ragas_annotator.model.notion_model import NotionModel\n",
- "from ragas_annotator.experiment import Experiment\n",
- "from ragas_annotator.dataset import Dataset"
+ "from ragas_experimental.project.core import Project\n",
+ "from ragas_experimental.model.pydantic_model import ExtendedPydanticBaseModel as BaseModel\n",
+ "from ragas_experimental.utils import async_to_sync, create_nano_id\n",
+ "from ragas_experimental.dataset import Dataset, BaseModelType\n",
+ "from ragas_experimental.experiment import Experiment\n",
+ "import ragas_experimental.typing as rt"
]
},
{
@@ -48,36 +50,125 @@
"# | export\n",
"@patch\n",
"def create_experiment(\n",
- " self: Project, name: str, model: t.Type[NotionModel]\n",
+ " self: Project, name: str, model: t.Type[BaseModel]\n",
") -> Experiment:\n",
- " \"\"\"Create a new experiment view.\n",
+ " \"\"\"Create a new experiment.\n",
"\n",
" Args:\n",
" name: Name of the experiment\n",
" model: Model class defining the experiment structure\n",
"\n",
" Returns:\n",
- " ExperimentView: View for managing experiment results\n",
+ " Experiment: An experiment object for managing results\n",
" \"\"\"\n",
- " if self.experiments_page_id == \"\":\n",
- " raise ValueError(\"Experiments page ID is not set\")\n",
- "\n",
- " # Collect all properties from model fields\n",
- " properties = {}\n",
- " for field_name, field in model._fields.items():\n",
- " properties.update(field._to_notion_property())\n",
- "\n",
- " # Create the database\n",
- " database_id = self._notion_backend.create_new_database(\n",
- " parent_page_id=self.experiments_page_id, title=name, properties=properties\n",
+ " # Create the experiment\n",
+ " sync_version = async_to_sync(self._ragas_api_client.create_experiment)\n",
+ " experiment_info = sync_version(\n",
+ " project_id=self.project_id,\n",
+ " name=name,\n",
" )\n",
"\n",
+ " # Create the columns for the experiment\n",
+ " column_types = rt.ModelConverter.model_to_columns(model)\n",
+ " sync_version = async_to_sync(create_experiment_columns)\n",
+ " sync_version(\n",
+ " project_id=self.project_id,\n",
+ " experiment_id=experiment_info[\"id\"],\n",
+ " columns=column_types,\n",
+ " create_experiment_column_func=self._ragas_api_client.create_experiment_column,\n",
+ " )\n",
+ " \n",
+ " # Return a new Experiment instance\n",
" return Experiment(\n",
" name=name,\n",
" model=model,\n",
- " database_id=database_id,\n",
- " notion_backend=self._notion_backend,\n",
- " )"
+ " project_id=self.project_id,\n",
+ " experiment_id=experiment_info[\"id\"],\n",
+ " ragas_api_client=self._ragas_api_client,\n",
+ " )\n",
+ "\n",
+ "# Add this helper function similar to create_dataset_columns in core.ipynb\n",
+ "async def create_experiment_columns(project_id, experiment_id, columns, create_experiment_column_func):\n",
+ " tasks = []\n",
+ " for column in columns:\n",
+ " tasks.append(create_experiment_column_func(\n",
+ " project_id=project_id,\n",
+ " experiment_id=experiment_id,\n",
+ " id=create_nano_id(),\n",
+ " name=column[\"name\"],\n",
+ " type=column[\"type\"],\n",
+ " settings={\n",
+ " \"max_length\": 255,\n",
+ " \"is_required\": True,\n",
+ " },\n",
+ " ))\n",
+ " return await asyncio.gather(*tasks)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Project(name='SuperMe')"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import os\n",
+ "\n",
+ "RAGAS_APP_TOKEN = \"apt.47bd-c55e4a45b27c-02f8-8446-1441f09b-651a8\"\n",
+ "RAGAS_API_BASE_URL = \"https://api.dev.app.ragas.io\"\n",
+ "\n",
+ "os.environ[\"RAGAS_APP_TOKEN\"] = RAGAS_APP_TOKEN\n",
+ "os.environ[\"RAGAS_API_BASE_URL\"] = RAGAS_API_BASE_URL\n",
+ "\n",
+ "PROJECT_ID = \"a6ccabe0-7b8d-4866-98af-f167a36b94ff\"\n",
+ "p = Project(project_id=PROJECT_ID)\n",
+ "p"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "class TestModel(BaseModel):\n",
+ " name: str\n",
+ " description: str\n",
+ " price: float\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Experiment(name=just name, desc, price 2, model=TestModel)"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "experiment_id = \"5d7752ab-17bf-46bc-a302-afe04ce1a763\"\n",
+ "exp = p.create_experiment(name=\"just name, desc, price 2\", model=TestModel)\n",
+ "#exp = p.create_dataset(name=\"just name and desc 2\", model=TestModel)\n",
+ "\n",
+ "exp"
]
},
{
@@ -88,24 +179,84 @@
"source": [
"# | export\n",
"@patch\n",
- "def get_experiment(self: Project, name: str, model: t.Type[NotionModel]) -> Experiment:\n",
- " \"\"\"Get an existing experiment by name.\"\"\"\n",
- " if self.experiments_page_id == \"\":\n",
- " raise ValueError(\"Experiments page ID is not set\")\n",
- "\n",
- " # Search for database with given name\n",
- " database_id = self._notion_backend.get_database_id(\n",
- " parent_page_id=self.experiments_page_id, name=name, return_multiple=False\n",
+ "def get_experiment(self: Project, experiment_id: str, model: t.Type[BaseModel]) -> Experiment:\n",
+ " \"\"\"Get an existing experiment by ID.\"\"\"\n",
+ " # Get experiment info\n",
+ " sync_version = async_to_sync(self._ragas_api_client.get_experiment)\n",
+ " experiment_info = sync_version(\n",
+ " project_id=self.project_id,\n",
+ " experiment_id=experiment_id\n",
" )\n",
"\n",
" return Experiment(\n",
- " name=name,\n",
+ " name=experiment_info[\"name\"],\n",
" model=model,\n",
- " database_id=database_id,\n",
- " notion_backend=self._notion_backend,\n",
+ " project_id=self.project_id,\n",
+ " experiment_id=experiment_id,\n",
+ " ragas_api_client=self._ragas_api_client,\n",
" )"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "'22bbb40c-1fc0-4a09-b26a-ccc93c8bd595'"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "exp.dataset_id"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Project(name='SuperMe')"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "p"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Experiment(name=just name, desc, price 2, model=TestModel)"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "p.get_experiment(exp.dataset_id, TestModel)"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -138,7 +289,7 @@
"outputs": [],
"source": [
"# | export\n",
- "from ragas_annotator.project.naming import MemorableNames"
+ "from ragas_experimental.project.naming import MemorableNames"
]
},
{
@@ -160,7 +311,7 @@
"# | export\n",
"@patch\n",
"def experiment(\n",
- " self: Project, experiment_model: t.Type[NotionModel], name_prefix: str = \"\"\n",
+ " self: Project, experiment_model, name_prefix: str = \"\"\n",
"):\n",
" \"\"\"Decorator for creating experiment functions without Langfuse integration.\n",
"\n",
@@ -183,6 +334,8 @@
" # if name is not provided, generate a memorable name\n",
" if name is None:\n",
" name = memorable_names.generate_unique_name()\n",
+ " if name_prefix:\n",
+ " name = f\"{name_prefix}-{name}\"\n",
"\n",
" # Create tasks for all items\n",
" tasks = []\n",
@@ -210,6 +363,74 @@
" return decorator"
]
},
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# create experimental test dataset\n",
+ "test_dataset = p.create_dataset(name=\"test dataset for experiment\", model=TestModel)\n",
+ "test_dataset.append(TestModel(name=\"test item 1\", description=\"test item 1 description\", price=100))\n",
+ "test_dataset.append(TestModel(name=\"test item 2\", description=\"test item 2 description\", price=200))\n",
+ "test_dataset.append(TestModel(name=\"test item 3\", description=\"test item 3 description\", price=300))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# create experiment model\n",
+ "class TextExperimentModel(TestModel):\n",
+ " response: str\n",
+ " is_correct: t.Literal[\"yes\", \"no\"]\n",
+ "\n",
+ "# create a test experiment function\n",
+ "@p.experiment(TextExperimentModel)\n",
+ "async def test_experiment(item: TestModel):\n",
+ " print(item)\n",
+ " return TextExperimentModel(**item.model_dump(), response=\"test response\", is_correct=\"yes\")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "100%|██████████| 3/3 [00:00<00:00, 7752.87it/s]\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "name='test item 2' description='test item 2 description' price=200.0\n",
+ "name='test item 1' description='test item 1 description' price=100.0\n",
+ "name='test item 3' description='test item 3 description' price=300.0\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "Experiment(name=keen_backus, model=TextExperimentModel)"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "# run the experiment\n",
+ "await test_experiment.run_async(test_dataset)"
+ ]
+ },
{
"cell_type": "code",
"execution_count": null,
@@ -219,7 +440,7 @@
"# | export\n",
"@patch\n",
"def langfuse_experiment(\n",
- " self: Project, experiment_model: t.Type[NotionModel], name_prefix: str = \"\"\n",
+ " self: Project, experiment_model, name_prefix: str = \"\"\n",
"):\n",
" \"\"\"Decorator for creating experiment functions with Langfuse integration.\n",
"\n",
diff --git a/nbs/prompt/dynamic_few_shot.ipynb b/nbs/prompt/dynamic_few_shot.ipynb
index 10bb417..29e7bbf 100644
--- a/nbs/prompt/dynamic_few_shot.ipynb
+++ b/nbs/prompt/dynamic_few_shot.ipynb
@@ -28,8 +28,8 @@
"import numpy as np\n",
"from abc import ABC, abstractmethod\n",
"\n",
- "from ragas_annotator.prompt.base import Prompt\n",
- "from ragas_annotator.embedding import BaseEmbedding\n",
+ "from ragas_experimental.prompt.base import Prompt\n",
+ "from ragas_experimental.embedding import BaseEmbedding\n",
"\n",
"class ExampleStore(ABC):\n",
" @abstractmethod\n",
@@ -251,8 +251,8 @@
],
"source": [
"#| eval: false\n",
- "from ragas_annotator.embedding import ragas_embedding\n",
- "from ragas_annotator.prompt import Prompt\n",
+ "from ragas_experimental.embedding import ragas_embedding\n",
+ "from ragas_experimental.prompt import Prompt\n",
"from openai import OpenAI\n",
"\n",
"embedding = ragas_embedding(provider=\"openai\", client=OpenAI(),model=\"text-embedding-3-small\")\n",
diff --git a/nbs/typing.ipynb b/nbs/typing.ipynb
new file mode 100644
index 0000000..cbab5ff
--- /dev/null
+++ b/nbs/typing.ipynb
@@ -0,0 +1,892 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| default_exp typing"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Typing Module\n",
+ "\n",
+ "> Field Metadata for python's `t.Annotate`."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "import typing as t\n",
+ "from enum import Enum\n",
+ "from pydantic import BaseModel, create_model\n",
+ "from datetime import datetime, date\n",
+ "import inspect\n",
+ "\n",
+ "from ragas_experimental.metric.result import MetricResult"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Supported Types\n",
+ "\n",
+ "Here we have the supported types and meta-types."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class ColumnType(str, Enum):\n",
+ " \"\"\"Column types supported by the Ragas API.\"\"\"\n",
+ " NUMBER = \"number\"\n",
+ " TEXT = \"text\"\n",
+ " SELECT = \"select\"\n",
+ " MULTI_SELECT = \"multiSelect\"\n",
+ " CHECKBOX = \"checkbox\"\n",
+ " DATE = \"date\"\n",
+ " CUSTOM = \"custom\"\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class FieldMeta:\n",
+ " \"\"\"Base metadata for field type annotations.\"\"\"\n",
+ " def __init__(self, type, required=True, id: t.Optional[str]=None, **settings):\n",
+ " self.type = type\n",
+ " self.required = required\n",
+ " self.id = id\n",
+ " self.settings = settings.copy()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class Number(FieldMeta):\n",
+ " \"\"\"Number field metadata.\"\"\"\n",
+ " def __init__(self, min_value: t.Optional[float] = None, max_value: t.Optional[float] = None, required: bool = True, id: t.Optional[str]=None):\n",
+ " settings = {}\n",
+ " if min_value is not None or max_value is not None:\n",
+ " settings[\"range\"] = {}\n",
+ " if min_value is not None:\n",
+ " settings[\"range\"][\"min\"] = min_value\n",
+ " if max_value is not None:\n",
+ " settings[\"range\"][\"max\"] = max_value\n",
+ " super().__init__(ColumnType.NUMBER, required, id, **settings)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class Text(FieldMeta):\n",
+ " \"\"\"Text field metadata.\"\"\"\n",
+ " def __init__(self, max_length: int = 1000, required: bool = True, id: t.Optional[str]=None):\n",
+ " settings = {}\n",
+ " if max_length is not None:\n",
+ " settings[\"max_length\"] = max_length\n",
+ " super().__init__(ColumnType.TEXT, required, id, **settings)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class Select(FieldMeta):\n",
+ " \"\"\"Select field metadata.\"\"\"\n",
+ " def __init__(self, options: t.Optional[t.List[str]] = None, required: bool = True):\n",
+ " settings = {}\n",
+ " if options:\n",
+ " settings[\"options\"] = [{\"name\": option} for option in options]\n",
+ " super().__init__(ColumnType.SELECT, required, **settings)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class MultiSelect(FieldMeta):\n",
+ " \"\"\"MultiSelect field metadata.\"\"\"\n",
+ " def __init__(self, options: t.Optional[t.List[str]] = None, required: bool = True):\n",
+ " settings = {}\n",
+ " if options:\n",
+ " settings[\"options\"] = [{\"name\": option} for option in options]\n",
+ " super().__init__(ColumnType.MULTI_SELECT, required, **settings)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class Checkbox(FieldMeta):\n",
+ " \"\"\"Checkbox field metadata.\"\"\"\n",
+ " def __init__(self, required: bool = True):\n",
+ " super().__init__(ColumnType.CHECKBOX, required)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class Date(FieldMeta):\n",
+ " \"\"\"Date field metadata.\"\"\"\n",
+ " def __init__(self, include_time: bool = False, required: bool = True):\n",
+ " settings = {}\n",
+ " if include_time:\n",
+ " settings[\"include_time\"] = include_time\n",
+ " super().__init__(ColumnType.DATE, required, **settings)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "#| export\n",
+ "class Custom(FieldMeta):\n",
+ " \"\"\"Custom field metadata.\"\"\"\n",
+ " def __init__(self, custom_type: str = \"\", required: bool = True):\n",
+ " settings = {}\n",
+ " if custom_type:\n",
+ " settings[\"type\"] = custom_type\n",
+ " super().__init__(ColumnType.CUSTOM, required, **settings)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Model Converter"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "class ModelConverter:\n",
+ " \"\"\"Convert Pydantic models to Ragas API columns and rows.\"\"\"\n",
+ " \n",
+ " @staticmethod\n",
+ " def infer_field_type(annotation, field_info=None):\n",
+ " \"\"\"Infer field type from Python type annotation.\"\"\"\n",
+ " # Check for Annotated with our custom metadata\n",
+ " origin = t.get_origin(annotation)\n",
+ " args = t.get_args(annotation)\n",
+ " \n",
+ " # Check if this is a MetricResult type\n",
+ " if annotation is MetricResult or (hasattr(annotation, \"__origin__\") and annotation.__origin__ is MetricResult):\n",
+ " # Default to Text since we can't determine the result type statically\n",
+ " return Text()\n",
+ " \n",
+ " # If this is an Annotated field with our metadata\n",
+ " if origin is t.Annotated and len(args) > 1:\n",
+ " for arg in args[1:]:\n",
+ " if isinstance(arg, FieldMeta):\n",
+ " return arg\n",
+ " \n",
+ " # If no field metadata found, infer from the base type\n",
+ " return ModelConverter.infer_field_type(args[0], field_info)\n",
+ " \n",
+ " # Handle Optional, List, etc.\n",
+ " if origin is t.Union:\n",
+ " if type(None) in args:\n",
+ " # This is Optional[T]\n",
+ " non_none_args = [arg for arg in args if arg is not type(None)]\n",
+ " if len(non_none_args) == 1:\n",
+ " # Get the field type of the non-None arg\n",
+ " field_meta = ModelConverter.infer_field_type(non_none_args[0], field_info)\n",
+ " field_meta.required = False\n",
+ " return field_meta\n",
+ " \n",
+ " # Handle List and array types\n",
+ " # NOTE: here we are converting lists to strings, except for literal types\n",
+ " if origin is list or origin is t.List:\n",
+ " if len(args) > 0:\n",
+ " # Check if it's a list of literals\n",
+ " if t.get_origin(args[0]) is t.Literal:\n",
+ " literal_options = t.get_args(args[0])\n",
+ " return MultiSelect(options=list(literal_options))\n",
+ " # Otherwise just a regular list\n",
+ " return Text() # Default to Text for lists\n",
+ " \n",
+ " # Handle Literal\n",
+ " if origin is t.Literal:\n",
+ " return Select(options=list(args))\n",
+ " \n",
+ " # Basic type handling\n",
+ " if annotation is str:\n",
+ " return Text()\n",
+ " elif annotation is int or annotation is float:\n",
+ " return Number()\n",
+ " elif annotation is bool:\n",
+ " return Checkbox()\n",
+ " elif annotation is datetime or annotation is date:\n",
+ " return Date(include_time=annotation is datetime)\n",
+ " \n",
+ " # Default to Text for complex or unknown types\n",
+ " return Text()\n",
+ " \n",
+ " @staticmethod\n",
+ " def infer_metric_result_type(field_value):\n",
+ " \"\"\"Infer field type from a MetricResult instance.\"\"\"\n",
+ " if field_value is None:\n",
+ " return Text()\n",
+ " \n",
+ " # Infer type based on the _result type\n",
+ " result_value = field_value._result\n",
+ " \n",
+ " if isinstance(result_value, (int, float)):\n",
+ " return Number()\n",
+ " elif isinstance(result_value, bool):\n",
+ " return Checkbox()\n",
+ " elif isinstance(result_value, (list, tuple)):\n",
+ " # For ranking metrics that return lists\n",
+ " return Text()\n",
+ " else:\n",
+ " # Default to Text for string or other types\n",
+ " return Text()\n",
+ " \n",
+ " @classmethod\n",
+ " def model_to_columns(cls, model_class):\n",
+ " \"\"\"Convert a Pydantic model class to Ragas API column definitions.\"\"\"\n",
+ " columns = []\n",
+ " for field_name, field_info in model_class.model_fields.items():\n",
+ " # Get the field's type annotation\n",
+ " annotation = field_info.annotation\n",
+ " \n",
+ " # Special handling for MetricResult fields\n",
+ " if (annotation is MetricResult or \n",
+ " (hasattr(annotation, \"__origin__\") and annotation.__origin__ is MetricResult) or\n",
+ " (hasattr(field_info, \"annotation\") and str(field_info.annotation).find(\"MetricResult\") != -1)):\n",
+ " \n",
+ " # Create column for the result value\n",
+ " field_meta = cls.infer_field_type(annotation, field_info)\n",
+ " column = {\n",
+ " \"id\": field_name,\n",
+ " \"name\": field_name,\n",
+ " \"type\": field_meta.type.value,\n",
+ " \"settings\": field_meta.settings.copy(),\n",
+ " \"editable\": True\n",
+ " }\n",
+ " columns.append(column)\n",
+ " \n",
+ " # Create additional column for the reason\n",
+ " reason_column = {\n",
+ " \"id\": f\"{field_name}_reason\",\n",
+ " \"name\": f\"{field_name}_reason\",\n",
+ " \"type\": ColumnType.TEXT.value,\n",
+ " \"settings\": Text().settings.copy(),\n",
+ " \"editable\": True\n",
+ " }\n",
+ " columns.append(reason_column)\n",
+ " else:\n",
+ " # Regular field handling\n",
+ " field_meta = cls.infer_field_type(annotation, field_info)\n",
+ " \n",
+ " column = {\n",
+ " \"id\": field_name,\n",
+ " \"name\": field_name,\n",
+ " \"type\": field_meta.type.value,\n",
+ " \"settings\": field_meta.settings.copy(),\n",
+ " \"editable\": False # Non-MetricResult fields are not editable\n",
+ " }\n",
+ " \n",
+ " columns.append(column)\n",
+ " \n",
+ " return columns\n",
+ " \n",
+ " @classmethod\n",
+ " def instance_to_row(cls, instance, model_class=None):\n",
+ " \"\"\"Convert a Pydantic model instance to a Ragas API row.\"\"\"\n",
+ " if model_class is None:\n",
+ " model_class = instance.__class__\n",
+ " \n",
+ " row_cells = []\n",
+ " model_data = instance.model_dump()\n",
+ " \n",
+ " for field_name, field_info in model_class.model_fields.items():\n",
+ " if field_name in model_data:\n",
+ " value = model_data[field_name]\n",
+ " # Get the field's type annotation\n",
+ " annotation = field_info.annotation\n",
+ " \n",
+ " # Special handling for MetricResult fields\n",
+ " if isinstance(value, MetricResult):\n",
+ " # Process the result value\n",
+ " field_meta = cls.infer_metric_result_type(value)\n",
+ " processed_value = value._result\n",
+ " \n",
+ " # Add result cell\n",
+ " row_cells.append({\n",
+ " \"column_id\": field_name,\n",
+ " \"data\": processed_value\n",
+ " })\n",
+ " \n",
+ " # Add reason cell\n",
+ " row_cells.append({\n",
+ " \"column_id\": f\"{field_name}_reason\",\n",
+ " \"data\": value.reason\n",
+ " })\n",
+ " else:\n",
+ " # Regular field handling\n",
+ " field_meta = cls.infer_field_type(annotation, field_info)\n",
+ " \n",
+ " # Special handling for various types\n",
+ " if field_meta.type == ColumnType.MULTI_SELECT and isinstance(value, list):\n",
+ " # Convert list to string format accepted by API\n",
+ " processed_value = value\n",
+ " elif field_meta.type == ColumnType.DATE and isinstance(value, (datetime, date)):\n",
+ " # Format date as string\n",
+ " processed_value = value.isoformat()\n",
+ " else:\n",
+ " processed_value = value\n",
+ " \n",
+ " row_cells.append({\n",
+ " \"column_id\": field_name,\n",
+ " \"data\": processed_value\n",
+ " })\n",
+ " \n",
+ " return {\n",
+ " \"data\": row_cells\n",
+ " }\n",
+ " \n",
+ " @classmethod\n",
+ " def instances_to_rows(cls, instances, model_class=None):\n",
+ " \"\"\"Convert multiple Pydantic model instances to Ragas API rows.\"\"\"\n",
+ " if not instances:\n",
+ " return []\n",
+ " \n",
+ " if model_class is None and instances:\n",
+ " model_class = instances[0].__class__\n",
+ " \n",
+ " return [cls.instance_to_row(instance, model_class) for instance in instances]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "# class ModelConverter:\n",
+ "# \"\"\"Convert Pydantic models to Ragas API columns and rows.\"\"\"\n",
+ " \n",
+ "# @staticmethod\n",
+ "# def infer_field_type(annotation, field_info=None):\n",
+ "# \"\"\"Infer field type from Python type annotation.\"\"\"\n",
+ "# # Check for Annotated with our custom metadata\n",
+ "# origin = t.get_origin(annotation)\n",
+ "# args = t.get_args(annotation)\n",
+ " \n",
+ "# # If this is an Annotated field with our metadata\n",
+ "# if origin is t.Annotated and len(args) > 1:\n",
+ "# for arg in args[1:]:\n",
+ "# if isinstance(arg, FieldMeta):\n",
+ "# return arg\n",
+ " \n",
+ "# # If no field metadata found, infer from the base type\n",
+ "# return ModelConverter.infer_field_type(args[0], field_info)\n",
+ " \n",
+ "# # Handle Optional, List, etc.\n",
+ "# if origin is t.Union:\n",
+ "# if type(None) in args:\n",
+ "# # This is Optional[T]\n",
+ "# non_none_args = [arg for arg in args if arg is not type(None)]\n",
+ "# if len(non_none_args) == 1:\n",
+ "# # Get the field type of the non-None arg\n",
+ "# field_meta = ModelConverter.infer_field_type(non_none_args[0], field_info)\n",
+ "# field_meta.required = False\n",
+ "# return field_meta\n",
+ " \n",
+ "# # Handle List and array types\n",
+ "# # NOTE: here we are converting lists to strings, except for literal types\n",
+ "# if origin is list or origin is t.List:\n",
+ "# if len(args) > 0:\n",
+ "# # Check if it's a list of literals\n",
+ "# if t.get_origin(args[0]) is t.Literal:\n",
+ "# literal_options = t.get_args(args[0])\n",
+ "# return MultiSelect(options=list(literal_options))\n",
+ "# # Otherwise just a regular list\n",
+ "# return Text() # Default to Text for lists\n",
+ " \n",
+ "# # Handle Literal\n",
+ "# if origin is t.Literal:\n",
+ "# return Select(options=list(args))\n",
+ " \n",
+ "# # Basic type handling\n",
+ "# if annotation is str:\n",
+ "# return Text()\n",
+ "# elif annotation is int or annotation is float:\n",
+ "# return Number()\n",
+ "# elif annotation is bool:\n",
+ "# return Checkbox()\n",
+ "# elif annotation is datetime or annotation is date:\n",
+ "# return Date(include_time=annotation is datetime)\n",
+ " \n",
+ "# # Default to Text for complex or unknown types\n",
+ "# return Text()\n",
+ " \n",
+ "# @classmethod\n",
+ "# def model_to_columns(cls, model_class):\n",
+ "# \"\"\"Convert a Pydantic model class to Ragas API column definitions.\"\"\"\n",
+ "# columns = []\n",
+ "# for field_name, field_info in model_class.model_fields.items():\n",
+ "# # Get the field's type annotation\n",
+ "# annotation = field_info.annotation\n",
+ " \n",
+ "# # Try to get field metadata\n",
+ "# field_meta = cls.infer_field_type(annotation, field_info)\n",
+ " \n",
+ "# # Create column definition\n",
+ "# column = {\n",
+ "# \"id\": field_name,\n",
+ "# \"name\": field_name,\n",
+ "# \"type\": field_meta.type.value,\n",
+ "# \"settings\": field_meta.settings.copy()\n",
+ "# }\n",
+ " \n",
+ "# columns.append(column)\n",
+ " \n",
+ "# return columns\n",
+ " \n",
+ "# @classmethod\n",
+ "# def instance_to_row(cls, instance, model_class=None):\n",
+ "# \"\"\"Convert a Pydantic model instance to a Ragas API row.\"\"\"\n",
+ "# if model_class is None:\n",
+ "# model_class = instance.__class__\n",
+ " \n",
+ "# row_cells = []\n",
+ "# model_data = instance.model_dump()\n",
+ " \n",
+ "# for field_name, field_info in model_class.model_fields.items():\n",
+ "# if field_name in model_data:\n",
+ "# value = model_data[field_name]\n",
+ "# # Process value based on field type\n",
+ "# annotation = field_info.annotation\n",
+ "# field_meta = cls.infer_field_type(annotation, field_info)\n",
+ " \n",
+ "# # Special handling for various types\n",
+ "# if field_meta.type == ColumnType.MULTI_SELECT and isinstance(value, list):\n",
+ "# # Convert list to string format accepted by API\n",
+ "# processed_value = value\n",
+ "# elif field_meta.type == ColumnType.DATE and isinstance(value, (datetime, date)):\n",
+ "# # Format date as string\n",
+ "# processed_value = value.isoformat()\n",
+ "# else:\n",
+ "# processed_value = value\n",
+ " \n",
+ "# row_cells.append({\n",
+ "# \"column_id\": field_name,\n",
+ "# \"data\": processed_value\n",
+ "# })\n",
+ " \n",
+ "# return {\n",
+ "# \"data\": row_cells\n",
+ "# }\n",
+ " \n",
+ "# @classmethod\n",
+ "# def instances_to_rows(cls, instances, model_class=None):\n",
+ "# \"\"\"Convert multiple Pydantic model instances to Ragas API rows.\"\"\"\n",
+ "# if not instances:\n",
+ "# return []\n",
+ " \n",
+ "# if model_class is None and instances:\n",
+ "# model_class = instances[0].__class__\n",
+ " \n",
+ "# return [cls.instance_to_row(instance, model_class) for instance in instances]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Let's test the ModelConverter\n",
+ "# First, let's define a test model with various field types\n",
+ "class TestModel(BaseModel):\n",
+ " # Basic types\n",
+ " id: int\n",
+ " name: str\n",
+ " is_active: bool\n",
+ " created_at: datetime\n",
+ " \n",
+ " # Optional fields\n",
+ " optional_text: t.Optional[str] = None\n",
+ " \n",
+ " # Lists\n",
+ " tags: t.List[str] = []\n",
+ " \n",
+ " # Literal types\n",
+ " status: t.Literal[\"pending\", \"active\", \"completed\"] = \"pending\"\n",
+ " \n",
+ " # Annotated types with our field metadata\n",
+ " score: t.Annotated[float, Number(min_value=0, max_value=100)]\n",
+ " description: t.Annotated[str, Text(max_length=500)]\n",
+ " category: t.Annotated[t.Literal[\"A\", \"B\", \"C\"], Select(options=[\"A\", \"B\", \"C\"])]\n",
+ " features: t.Annotated[t.List[str], MultiSelect(options=[\"feature1\", \"feature2\", \"feature3\"])]\n",
+ " \n",
+ "# Now let's create some test instances\n",
+ "test_instances = [\n",
+ " TestModel(\n",
+ " id=1,\n",
+ " name=\"Test Item 1\",\n",
+ " is_active=True,\n",
+ " created_at=datetime.now(),\n",
+ " score=85.5,\n",
+ " description=\"This is a test description for item 1\",\n",
+ " category=\"A\",\n",
+ " features=[\"feature1\", \"feature3\"],\n",
+ " tags=[\"tag1\", \"tag2\"],\n",
+ " status=\"active\"\n",
+ " ),\n",
+ " TestModel(\n",
+ " id=2,\n",
+ " name=\"Test Item 2\",\n",
+ " is_active=False,\n",
+ " created_at=datetime.now(),\n",
+ " optional_text=\"This is optional\",\n",
+ " score=42.0,\n",
+ " description=\"A shorter description\",\n",
+ " category=\"B\",\n",
+ " features=[\"feature2\"],\n",
+ " status=\"completed\"\n",
+ " )\n",
+ "]\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Testing model_to_columns:\n"
+ ]
+ },
+ {
+ "ename": "ImportError",
+ "evalue": "cannot import name 'Project' from partially initialized module 'ragas_experimental.project.core' (most likely due to a circular import) (/Users/jjmachan/workspace/eglabs/ragas_experimental/ragas_experimental/project/core.py)",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[31m---------------------------------------------------------------------------\u001b[39m",
+ "\u001b[31mImportError\u001b[39m Traceback (most recent call last)",
+ "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[15]\u001b[39m\u001b[32m, line 3\u001b[39m\n\u001b[32m 1\u001b[39m \u001b[38;5;66;03m# Test the model_to_columns method\u001b[39;00m\n\u001b[32m 2\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mTesting model_to_columns:\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m----> \u001b[39m\u001b[32m3\u001b[39m columns = \u001b[43mModelConverter\u001b[49m\u001b[43m.\u001b[49m\u001b[43mmodel_to_columns\u001b[49m\u001b[43m(\u001b[49m\u001b[43mTestModel\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 4\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m col \u001b[38;5;129;01min\u001b[39;00m columns:\n\u001b[32m 5\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m- \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcol[\u001b[33m'\u001b[39m\u001b[33mname\u001b[39m\u001b[33m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m (\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcol[\u001b[33m'\u001b[39m\u001b[33mtype\u001b[39m\u001b[33m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m): \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcol[\u001b[33m'\u001b[39m\u001b[33msettings\u001b[39m\u001b[33m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n",
+ "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[12]\u001b[39m\u001b[32m, line 90\u001b[39m, in \u001b[36mModelConverter.model_to_columns\u001b[39m\u001b[34m(cls, model_class)\u001b[39m\n\u001b[32m 87\u001b[39m \u001b[38;5;129m@classmethod\u001b[39m\n\u001b[32m 88\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mmodel_to_columns\u001b[39m(\u001b[38;5;28mcls\u001b[39m, model_class):\n\u001b[32m 89\u001b[39m \u001b[38;5;250m \u001b[39m\u001b[33;03m\"\"\"Convert a Pydantic model class to Ragas API column definitions.\"\"\"\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m90\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mresult\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m MetricResult\n\u001b[32m 92\u001b[39m columns = []\n\u001b[32m 93\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m field_name, field_info \u001b[38;5;129;01min\u001b[39;00m model_class.model_fields.items():\n\u001b[32m 94\u001b[39m \u001b[38;5;66;03m# Get the field's type annotation\u001b[39;00m\n",
+ "\u001b[36mFile \u001b[39m\u001b[32m~/workspace/eglabs/ragas_experimental/ragas_experimental/__init__.py:8\u001b[39m\n\u001b[32m 5\u001b[39m __all__ = []\n\u001b[32m 7\u001b[39m \u001b[38;5;66;03m# %% ../nbs/init_module.ipynb 2\u001b[39;00m\n\u001b[32m----> \u001b[39m\u001b[32m8\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01mproject\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mcore\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m Project\n\u001b[32m 9\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmodel\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mnotion_typing\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mnmt\u001b[39;00m\n\u001b[32m 10\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmodel\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mnotion_model\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m NotionModel\n",
+ "\u001b[36mFile \u001b[39m\u001b[32m~/workspace/eglabs/ragas_experimental/ragas_experimental/project/core.py:18\u001b[39m\n\u001b[32m 16\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mbackends\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mfactory\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m RagasApiClientFactory\n\u001b[32m 17\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mbackends\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mragas_api_client\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m RagasApiClient\n\u001b[32m---> \u001b[39m\u001b[32m18\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mtyping\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mas\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mrt\u001b[39;00m\n\u001b[32m 19\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mutils\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m async_to_sync, create_nano_id\n\u001b[32m 20\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mdataset\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m Dataset\n",
+ "\u001b[36mFile \u001b[39m\u001b[32m~/workspace/eglabs/ragas_experimental/ragas_experimental/typing.py:15\u001b[39m\n\u001b[32m 12\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mdatetime\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m datetime, date\n\u001b[32m 13\u001b[39m \u001b[38;5;28;01mimport\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01minspect\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m15\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mresult\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m MetricResult\n\u001b[32m 17\u001b[39m \u001b[38;5;66;03m# %% ../nbs/typing.ipynb 4\u001b[39;00m\n\u001b[32m 18\u001b[39m \u001b[38;5;28;01mclass\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mColumnType\u001b[39;00m(\u001b[38;5;28mstr\u001b[39m, Enum):\n",
+ "\u001b[36mFile \u001b[39m\u001b[32m~/workspace/eglabs/ragas_experimental/ragas_experimental/metric/__init__.py:2\u001b[39m\n\u001b[32m 1\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mresult\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m MetricResult\n\u001b[32m----> \u001b[39m\u001b[32m2\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mbase\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m Metric\n\u001b[32m 3\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mdiscrete\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m DiscreteMetric\n\u001b[32m 4\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01mragas_experimental\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmetric\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mnumeric\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m NumericMetric\n",
+ "\u001b[36mFile \u001b[39m\u001b[32m~/workspace/eglabs/ragas_experimental/ragas_experimental/metric/base.py:22\u001b[39m\n\u001b[32m 20\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m MetricResult\n\u001b[32m 21\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mllm\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m RagasLLM\n\u001b[32m---> \u001b[39m\u001b[32m22\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mproject\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mcore\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m Project\n\u001b[32m 23\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mmodel\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mnotion_model\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m NotionModel\n\u001b[32m 24\u001b[39m \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01m.\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mprompt\u001b[39;00m\u001b[34;01m.\u001b[39;00m\u001b[34;01mdynamic_few_shot\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mimport\u001b[39;00m DynamicFewShotPrompt\n",
+ "\u001b[31mImportError\u001b[39m: cannot import name 'Project' from partially initialized module 'ragas_experimental.project.core' (most likely due to a circular import) (/Users/jjmachan/workspace/eglabs/ragas_experimental/ragas_experimental/project/core.py)"
+ ]
+ }
+ ],
+ "source": [
+ "# Test the model_to_columns method\n",
+ "print(\"Testing model_to_columns:\")\n",
+ "columns = ModelConverter.model_to_columns(TestModel)\n",
+ "for col in columns:\n",
+ " print(f\"- {col['name']} ({col['type']}): {col['settings']}\")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Testing instance_to_row:\n",
+ "- id: 1\n",
+ "- name: Test Item 1\n",
+ "- is_active: True\n",
+ "- created_at: 2025-04-14T17:36:24.492518\n",
+ "- optional_text: None\n",
+ "- tags: ['tag1', 'tag2']\n",
+ "- status: active\n",
+ "- score: 85.5\n",
+ "- description: This is a test description for item 1\n",
+ "- category: A\n",
+ "- features: ['feature1', 'feature3']\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "print(\"\\nTesting instance_to_row:\")\n",
+ "# Test the instance_to_row method\n",
+ "row = ModelConverter.instance_to_row(test_instances[0])\n",
+ "for cell in row[\"data\"]:\n",
+ " print(f\"- {cell['column_id']}: {cell['data']}\")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Testing instances_to_rows:\n",
+ "Generated 2 rows\n",
+ "\n",
+ "Testing type inference:\n",
+ "- : number (Required: True)\n",
+ "- : text (Required: True)\n",
+ "- : checkbox (Required: True)\n",
+ "- : date (Required: True)\n",
+ "- typing.Optional[str]: text (Required: False)\n",
+ "- typing.List[str]: text (Required: True)\n",
+ "- typing.Literal['a', 'b']: select (Required: True)\n",
+ " - Options: ['a', 'b']\n",
+ "- typing.List[typing.Literal['x', 'y']]: multiSelect (Required: True)\n",
+ " - Options: ['x', 'y']\n",
+ "- typing.Annotated[int, <__main__.Number object>]: number (Required: True)\n"
+ ]
+ }
+ ],
+ "source": [
+ "\n",
+ "print(\"\\nTesting instances_to_rows:\")\n",
+ "# Test the instances_to_rows method\n",
+ "rows = ModelConverter.instances_to_rows(test_instances)\n",
+ "print(f\"Generated {len(rows)} rows\")\n",
+ "\n",
+ "# Test type inference\n",
+ "print(\"\\nTesting type inference:\")\n",
+ "types = [\n",
+ " (int, \"Number\"),\n",
+ " (str, \"Text\"),\n",
+ " (bool, \"Checkbox\"),\n",
+ " (datetime, \"Date\"),\n",
+ " (t.Optional[str], \"Text (not required)\"),\n",
+ " (t.List[str], \"Text\"),\n",
+ " (t.Literal[\"a\", \"b\"], \"Select\"),\n",
+ " (t.List[t.Literal[\"x\", \"y\"]], \"MultiSelect\"),\n",
+ " (t.Annotated[int, Number(min_value=10)], \"Number with min=10\")\n",
+ "]\n",
+ "\n",
+ "for annotation, expected in types:\n",
+ " field_meta = ModelConverter.infer_field_type(annotation)\n",
+ " print(f\"- {annotation}: {field_meta.type.value} (Required: {field_meta.required})\")\n",
+ " if hasattr(field_meta, \"min_value\") and field_meta.min_value is not None:\n",
+ " print(f\" - Min value: {field_meta.min_value}\")\n",
+ " if \"options\" in field_meta.settings:\n",
+ " print(f\" - Options: {[opt['name'] for opt in field_meta.settings['options']]}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Unit Tests"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "[{'id': 'id', 'name': 'id', 'type': 'number', 'settings': {}, 'editable': False}, {'id': 'name', 'name': 'name', 'type': 'text', 'settings': {'max_length': 1000}, 'editable': False}, {'id': 'created_at', 'name': 'created_at', 'type': 'date', 'settings': {'include_time': True}, 'editable': False}]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# 1. Test the actual implementation with basic fields\n",
+ "class BasicModel(BaseModel):\n",
+ " id: int\n",
+ " name: str\n",
+ " created_at: datetime\n",
+ " \n",
+ " # No custom metadata - just test basic type inference\n",
+ "\n",
+ "# 2. Test with selective metadata where it works\n",
+ "class PartialMetadataModel(BaseModel):\n",
+ " # Only use metadata that works with your current implementation\n",
+ " status: t.Literal[\"pending\", \"active\", \"completed\"] # Test literal type\n",
+ "\n",
+ "# 3. Document current limitations\n",
+ "# print(\"Note: Currently, using complex metadata classes directly in t.Annotated causes validation issues\")\n",
+ "\n",
+ "# Test the actual implementation\n",
+ "def test_basic_type_inference():\n",
+ " \"\"\"Test basic type inference without custom metadata.\"\"\"\n",
+ " # Get column definitions\n",
+ " columns = ModelConverter.model_to_columns(BasicModel)\n",
+ " print(columns)\n",
+ " \n",
+ " # Find columns and check their properties\n",
+ " id_col = next((c for c in columns if c[\"id\"] == \"id\"), None)\n",
+ " assert id_col is not None, \"id column should exist\"\n",
+ " assert id_col[\"type\"] == ColumnType.NUMBER.value, \"id should map to NUMBER type\"\n",
+ " \n",
+ " name_col = next((c for c in columns if c[\"id\"] == \"name\"), None)\n",
+ " assert name_col is not None, \"name column should exist\"\n",
+ " assert name_col[\"type\"] == ColumnType.TEXT.value, \"name should map to TEXT type\"\n",
+ " \n",
+ " date_col = next((c for c in columns if c[\"id\"] == \"created_at\"), None)\n",
+ " assert date_col is not None, \"created_at column should exist\"\n",
+ " assert date_col[\"type\"] == ColumnType.DATE.value, \"datetime should map to DATE type\"\n",
+ "\n",
+ "# Run the tests with what actually works\n",
+ "test_basic_type_inference()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## `MetricResult`"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "class ModelWithResult(BaseModel):\n",
+ " query: str\n",
+ " response: str\n",
+ " score: MetricResult"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "[{'id': 'query',\n",
+ " 'name': 'query',\n",
+ " 'type': 'text',\n",
+ " 'settings': {'max_length': 1000},\n",
+ " 'editable': False},\n",
+ " {'id': 'response',\n",
+ " 'name': 'response',\n",
+ " 'type': 'text',\n",
+ " 'settings': {'max_length': 1000},\n",
+ " 'editable': False},\n",
+ " {'id': 'score',\n",
+ " 'name': 'score',\n",
+ " 'type': 'text',\n",
+ " 'settings': {'max_length': 1000},\n",
+ " 'editable': True},\n",
+ " {'id': 'score_reason',\n",
+ " 'name': 'score_reason',\n",
+ " 'type': 'text',\n",
+ " 'settings': {'max_length': 1000},\n",
+ " 'editable': True}]"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "columns = ModelConverter.model_to_columns(ModelWithResult)\n",
+ "columns"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'data': [{'column_id': 'query', 'data': 'test'},\n",
+ " {'column_id': 'response', 'data': 'test'},\n",
+ " {'column_id': 'score', 'data': 1},\n",
+ " {'column_id': 'score_reason', 'data': 'test'}]}"
+ ]
+ },
+ "execution_count": null,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "m = ModelWithResult(query=\"test\", response=\"test\", score=MetricResult(result=1, reason=\"test\"))\n",
+ "ModelConverter.instance_to_row(m)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "python3",
+ "language": "python",
+ "name": "python3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/nbs/utils.ipynb b/nbs/utils.ipynb
new file mode 100644
index 0000000..8a2a2b8
--- /dev/null
+++ b/nbs/utils.ipynb
@@ -0,0 +1,91 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| default_exp utils"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "import string\n",
+ "import uuid\n",
+ "import functools\n",
+ "import asyncio"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#| export\n",
+ "def create_nano_id(size=12):\n",
+ " # Define characters to use (alphanumeric)\n",
+ " alphabet = string.ascii_letters + string.digits\n",
+ " \n",
+ " # Generate UUID and convert to int\n",
+ " uuid_int = uuid.uuid4().int\n",
+ " \n",
+ " # Convert to base62\n",
+ " result = \"\"\n",
+ " while uuid_int:\n",
+ " uuid_int, remainder = divmod(uuid_int, len(alphabet))\n",
+ " result = alphabet[remainder] + result\n",
+ " \n",
+ " # Pad if necessary and return desired length\n",
+ " return result[:size]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# | export\n",
+ "def async_to_sync(async_func):\n",
+ " \"\"\"Convert an async function to a sync function\"\"\"\n",
+ " @functools.wraps(async_func)\n",
+ " def sync_wrapper(*args, **kwargs):\n",
+ " try:\n",
+ " loop = asyncio.get_event_loop()\n",
+ " if loop.is_running():\n",
+ " import concurrent.futures\n",
+ " with concurrent.futures.ThreadPoolExecutor() as executor:\n",
+ " future = executor.submit(asyncio.run, async_func(*args, **kwargs))\n",
+ " return future.result()\n",
+ " else:\n",
+ " return loop.run_until_complete(async_func(*args, **kwargs))\n",
+ " except RuntimeError:\n",
+ " return asyncio.run(async_func(*args, **kwargs))\n",
+ " return sync_wrapper"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "python3",
+ "language": "python",
+ "name": "python3"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/ragas_annotator/_modidx.py b/ragas_annotator/_modidx.py
deleted file mode 100644
index 09bd5f7..0000000
--- a/ragas_annotator/_modidx.py
+++ /dev/null
@@ -1,619 +0,0 @@
-# Autogenerated by nbdev
-
-d = { 'settings': { 'branch': 'main',
- 'doc_baseurl': '/ragas_annotator',
- 'doc_host': 'https://explodinggradients.github.io',
- 'git_url': 'https://github.com/explodinggradients/ragas_annotator',
- 'lib_path': 'ragas_annotator'},
- 'syms': { 'ragas_annotator.backends.factory': { 'ragas_annotator.backends.factory.NotionBackendFactory': ( 'backends/factory.html#notionbackendfactory',
- 'ragas_annotator/backends/factory.py'),
- 'ragas_annotator.backends.factory.NotionBackendFactory.create': ( 'backends/factory.html#notionbackendfactory.create',
- 'ragas_annotator/backends/factory.py'),
- 'ragas_annotator.backends.factory.NotionClientFactory': ( 'backends/factory.html#notionclientfactory',
- 'ragas_annotator/backends/factory.py'),
- 'ragas_annotator.backends.factory.NotionClientFactory.create': ( 'backends/factory.html#notionclientfactory.create',
- 'ragas_annotator/backends/factory.py')},
- 'ragas_annotator.backends.mock_notion': { 'ragas_annotator.backends.mock_notion.MockBlockChildrenAPI': ( 'backends/mock_notion_client.html#mockblockchildrenapi',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockBlockChildrenAPI.__init__': ( 'backends/mock_notion_client.html#mockblockchildrenapi.__init__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockBlockChildrenAPI.list': ( 'backends/mock_notion_client.html#mockblockchildrenapi.list',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockBlocksAPI': ( 'backends/mock_notion_client.html#mockblocksapi',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockBlocksAPI.__init__': ( 'backends/mock_notion_client.html#mockblocksapi.__init__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockBlocksAPI.retrieve': ( 'backends/mock_notion_client.html#mockblocksapi.retrieve',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI': ( 'backends/mock_notion_client.html#mockdatabasesapi',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI.__init__': ( 'backends/mock_notion_client.html#mockdatabasesapi.__init__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI._extract_title': ( 'backends/mock_notion_client.html#mockdatabasesapi._extract_title',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI.create': ( 'backends/mock_notion_client.html#mockdatabasesapi.create',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI.query': ( 'backends/mock_notion_client.html#mockdatabasesapi.query',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockDatabasesAPI.retrieve': ( 'backends/mock_notion_client.html#mockdatabasesapi.retrieve',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient': ( 'backends/mock_notion_client.html#mocknotionclient',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.__init__': ( 'backends/mock_notion_client.html#mocknotionclient.__init__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.__str__': ( 'backends/mock_notion_client.html#mocknotionclient.__str__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient._create_id': ( 'backends/mock_notion_client.html#mocknotionclient._create_id',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient._get_timestamp': ( 'backends/mock_notion_client.html#mocknotionclient._get_timestamp',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.add_block': ( 'backends/mock_notion_client.html#mocknotionclient.add_block',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.add_children': ( 'backends/mock_notion_client.html#mocknotionclient.add_children',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.add_database': ( 'backends/mock_notion_client.html#mocknotionclient.add_database',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockNotionClient.add_page': ( 'backends/mock_notion_client.html#mocknotionclient.add_page',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI': ( 'backends/mock_notion_client.html#mockpagesapi',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI.__init__': ( 'backends/mock_notion_client.html#mockpagesapi.__init__',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI._extract_title': ( 'backends/mock_notion_client.html#mockpagesapi._extract_title',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI.create': ( 'backends/mock_notion_client.html#mockpagesapi.create',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI.retrieve': ( 'backends/mock_notion_client.html#mockpagesapi.retrieve',
- 'ragas_annotator/backends/mock_notion.py'),
- 'ragas_annotator.backends.mock_notion.MockPagesAPI.update': ( 'backends/mock_notion_client.html#mockpagesapi.update',
- 'ragas_annotator/backends/mock_notion.py')},
- 'ragas_annotator.backends.notion_backend': { 'ragas_annotator.backends.notion_backend.NotionBackend': ( 'backends/notion.html#notionbackend',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.__init__': ( 'backends/notion.html#notionbackend.__init__',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.__repr__': ( 'backends/notion.html#notionbackend.__repr__',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.create_new_database': ( 'backends/notion.html#notionbackend.create_new_database',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.create_new_page': ( 'backends/notion.html#notionbackend.create_new_page',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.create_page_in_database': ( 'backends/notion.html#notionbackend.create_page_in_database',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.get_database': ( 'backends/notion.html#notionbackend.get_database',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.get_database_id': ( 'backends/notion.html#notionbackend.get_database_id',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.get_page_id': ( 'backends/notion.html#notionbackend.get_page_id',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.page_exists': ( 'backends/notion.html#notionbackend.page_exists',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.query_database': ( 'backends/notion.html#notionbackend.query_database',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.update_page': ( 'backends/notion.html#notionbackend.update_page',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.NotionBackend.validate_project_structure': ( 'backends/notion.html#notionbackend.validate_project_structure',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.get_database_id': ( 'backends/notion.html#get_database_id',
- 'ragas_annotator/backends/notion_backend.py'),
- 'ragas_annotator.backends.notion_backend.get_page_id': ( 'backends/notion.html#get_page_id',
- 'ragas_annotator/backends/notion_backend.py')},
- 'ragas_annotator.backends.ragas_api_client': { 'ragas_annotator.backends.ragas_api_client.Column': ( 'backends/ragas_api_client.html#column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.ColumnType': ( 'backends/ragas_api_client.html#columntype',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient': ( 'backends/ragas_api_client.html#ragasapiclient',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.__init__': ( 'backends/ragas_api_client.html#ragasapiclient.__init__',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._create_resource': ( 'backends/ragas_api_client.html#ragasapiclient._create_resource',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._create_with_data': ( 'backends/ragas_api_client.html#ragasapiclient._create_with_data',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._delete_resource': ( 'backends/ragas_api_client.html#ragasapiclient._delete_resource',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._get_resource': ( 'backends/ragas_api_client.html#ragasapiclient._get_resource',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._list_resources': ( 'backends/ragas_api_client.html#ragasapiclient._list_resources',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._request': ( 'backends/ragas_api_client.html#ragasapiclient._request',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient._update_resource': ( 'backends/ragas_api_client.html#ragasapiclient._update_resource',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.convert_raw_data': ( 'backends/ragas_api_client.html#ragasapiclient.convert_raw_data',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_column_map': ( 'backends/ragas_api_client.html#ragasapiclient.create_column_map',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_dataset_with_data': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_with_data',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_experiment_with_data': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_with_data',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_project': ( 'backends/ragas_api_client.html#ragasapiclient.create_project',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.create_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.delete_project': ( 'backends/ragas_api_client.html#ragasapiclient.delete_project',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.get_project': ( 'backends/ragas_api_client.html#ragasapiclient.get_project',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_dataset_columns': ( 'backends/ragas_api_client.html#ragasapiclient.list_dataset_columns',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_dataset_rows': ( 'backends/ragas_api_client.html#ragasapiclient.list_dataset_rows',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_datasets': ( 'backends/ragas_api_client.html#ragasapiclient.list_datasets',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_experiment_columns': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiment_columns',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_experiment_rows': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiment_rows',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_experiments': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiments',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.list_projects': ( 'backends/ragas_api_client.html#ragasapiclient.list_projects',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment_column',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment_row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RagasApiClient.update_project': ( 'backends/ragas_api_client.html#ragasapiclient.update_project',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.Row': ( 'backends/ragas_api_client.html#row',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.RowCell': ( 'backends/ragas_api_client.html#rowcell',
- 'ragas_annotator/backends/ragas_api_client.py'),
- 'ragas_annotator.backends.ragas_api_client.create_nano_id': ( 'backends/ragas_api_client.html#create_nano_id',
- 'ragas_annotator/backends/ragas_api_client.py')},
- 'ragas_annotator.core': {'ragas_annotator.core.foo': ('core.html#foo', 'ragas_annotator/core.py')},
- 'ragas_annotator.dataset': { 'ragas_annotator.dataset.Dataset': ('dataset.html#dataset', 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__getitem__': ( 'dataset.html#dataset.__getitem__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__init__': ( 'dataset.html#dataset.__init__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__iter__': ( 'dataset.html#dataset.__iter__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__len__': ( 'dataset.html#dataset.__len__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__repr__': ( 'dataset.html#dataset.__repr__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.__setitem__': ( 'dataset.html#dataset.__setitem__',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.append': ( 'dataset.html#dataset.append',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.get': ('dataset.html#dataset.get', 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.load': ( 'dataset.html#dataset.load',
- 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.pop': ('dataset.html#dataset.pop', 'ragas_annotator/dataset.py'),
- 'ragas_annotator.dataset.Dataset.save': ( 'dataset.html#dataset.save',
- 'ragas_annotator/dataset.py')},
- 'ragas_annotator.embedding.base': { 'ragas_annotator.embedding.base.BaseEmbedding': ( 'embedding/base.html#baseembedding',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.BaseEmbedding.aembed_document': ( 'embedding/base.html#baseembedding.aembed_document',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.BaseEmbedding.aembed_text': ( 'embedding/base.html#baseembedding.aembed_text',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.BaseEmbedding.embed_document': ( 'embedding/base.html#baseembedding.embed_document',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.BaseEmbedding.embed_text': ( 'embedding/base.html#baseembedding.embed_text',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings': ( 'embedding/base.html#openaiembeddings',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings.__init__': ( 'embedding/base.html#openaiembeddings.__init__',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings.aembed_document': ( 'embedding/base.html#openaiembeddings.aembed_document',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings.aembed_text': ( 'embedding/base.html#openaiembeddings.aembed_text',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings.embed_document': ( 'embedding/base.html#openaiembeddings.embed_document',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.OpenAIEmbeddings.embed_text': ( 'embedding/base.html#openaiembeddings.embed_text',
- 'ragas_annotator/embedding/base.py'),
- 'ragas_annotator.embedding.base.ragas_embedding': ( 'embedding/base.html#ragas_embedding',
- 'ragas_annotator/embedding/base.py')},
- 'ragas_annotator.exceptions': { 'ragas_annotator.exceptions.DuplicateError': ( 'utils/exceptions.html#duplicateerror',
- 'ragas_annotator/exceptions.py'),
- 'ragas_annotator.exceptions.NotFoundError': ( 'utils/exceptions.html#notfounderror',
- 'ragas_annotator/exceptions.py'),
- 'ragas_annotator.exceptions.ValidationError': ( 'utils/exceptions.html#validationerror',
- 'ragas_annotator/exceptions.py')},
- 'ragas_annotator.experiment': { 'ragas_annotator.experiment.Experiment': ( 'experiment.html#experiment',
- 'ragas_annotator/experiment.py'),
- 'ragas_annotator.experiment.Experiment.__init__': ( 'experiment.html#experiment.__init__',
- 'ragas_annotator/experiment.py'),
- 'ragas_annotator.experiment.Experiment.__str__': ( 'experiment.html#experiment.__str__',
- 'ragas_annotator/experiment.py')},
- 'ragas_annotator.llm.llm': { 'ragas_annotator.llm.llm.RagasLLM': ('llm/llm.html#ragasllm', 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM.__init__': ( 'llm/llm.html#ragasllm.__init__',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM._check_client_async': ( 'llm/llm.html#ragasllm._check_client_async',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM._initialize_client': ( 'llm/llm.html#ragasllm._initialize_client',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM._run_async_in_current_loop': ( 'llm/llm.html#ragasllm._run_async_in_current_loop',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM.agenerate': ( 'llm/llm.html#ragasllm.agenerate',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.RagasLLM.generate': ( 'llm/llm.html#ragasllm.generate',
- 'ragas_annotator/llm/llm.py'),
- 'ragas_annotator.llm.llm.ragas_llm': ('llm/llm.html#ragas_llm', 'ragas_annotator/llm/llm.py')},
- 'ragas_annotator.metric.base': { 'ragas_annotator.metric.base.Metric': ( 'metric/base.html#metric',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.__post_init__': ( 'metric/base.html#metric.__post_init__',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric._ensemble': ( 'metric/base.html#metric._ensemble',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric._get_response_model': ( 'metric/base.html#metric._get_response_model',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.abatch_score': ( 'metric/base.html#metric.abatch_score',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.ascore': ( 'metric/base.html#metric.ascore',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.batch_score': ( 'metric/base.html#metric.batch_score',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.get_variables': ( 'metric/base.html#metric.get_variables',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.score': ( 'metric/base.html#metric.score',
- 'ragas_annotator/metric/base.py'),
- 'ragas_annotator.metric.base.Metric.train': ( 'metric/base.html#metric.train',
- 'ragas_annotator/metric/base.py')},
- 'ragas_annotator.metric.decorator': { 'ragas_annotator.metric.decorator.create_metric_decorator': ( 'metric/decorator.html#create_metric_decorator',
- 'ragas_annotator/metric/decorator.py')},
- 'ragas_annotator.metric.discrete': { 'ragas_annotator.metric.discrete.DiscreteMetric': ( 'metric/discrete.html#discretemetric',
- 'ragas_annotator/metric/discrete.py'),
- 'ragas_annotator.metric.discrete.DiscreteMetric._ensemble': ( 'metric/discrete.html#discretemetric._ensemble',
- 'ragas_annotator/metric/discrete.py'),
- 'ragas_annotator.metric.discrete.DiscreteMetric._get_response_model': ( 'metric/discrete.html#discretemetric._get_response_model',
- 'ragas_annotator/metric/discrete.py')},
- 'ragas_annotator.metric.numeric': { 'ragas_annotator.metric.numeric.NumericMetric': ( 'metric/numeric.html#numericmetric',
- 'ragas_annotator/metric/numeric.py'),
- 'ragas_annotator.metric.numeric.NumericMetric._ensemble': ( 'metric/numeric.html#numericmetric._ensemble',
- 'ragas_annotator/metric/numeric.py'),
- 'ragas_annotator.metric.numeric.NumericMetric._get_response_model': ( 'metric/numeric.html#numericmetric._get_response_model',
- 'ragas_annotator/metric/numeric.py')},
- 'ragas_annotator.metric.ranking': { 'ragas_annotator.metric.ranking.RankingMetric': ( 'metric/ranking.html#rankingmetric',
- 'ragas_annotator/metric/ranking.py'),
- 'ragas_annotator.metric.ranking.RankingMetric._ensemble': ( 'metric/ranking.html#rankingmetric._ensemble',
- 'ragas_annotator/metric/ranking.py'),
- 'ragas_annotator.metric.ranking.RankingMetric._get_response_model': ( 'metric/ranking.html#rankingmetric._get_response_model',
- 'ragas_annotator/metric/ranking.py')},
- 'ragas_annotator.metric.result': { 'ragas_annotator.metric.result.MetricResult': ( 'metric/result.html#metricresult',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__add__': ( 'metric/result.html#metricresult.__add__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__eq__': ( 'metric/result.html#metricresult.__eq__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__float__': ( 'metric/result.html#metricresult.__float__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__ge__': ( 'metric/result.html#metricresult.__ge__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__getattr__': ( 'metric/result.html#metricresult.__getattr__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__getitem__': ( 'metric/result.html#metricresult.__getitem__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__gt__': ( 'metric/result.html#metricresult.__gt__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__init__': ( 'metric/result.html#metricresult.__init__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__int__': ( 'metric/result.html#metricresult.__int__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__iter__': ( 'metric/result.html#metricresult.__iter__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__le__': ( 'metric/result.html#metricresult.__le__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__len__': ( 'metric/result.html#metricresult.__len__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__lt__': ( 'metric/result.html#metricresult.__lt__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__mul__': ( 'metric/result.html#metricresult.__mul__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__radd__': ( 'metric/result.html#metricresult.__radd__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__repr__': ( 'metric/result.html#metricresult.__repr__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__rmul__': ( 'metric/result.html#metricresult.__rmul__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__rsub__': ( 'metric/result.html#metricresult.__rsub__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__rtruediv__': ( 'metric/result.html#metricresult.__rtruediv__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__str__': ( 'metric/result.html#metricresult.__str__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__sub__': ( 'metric/result.html#metricresult.__sub__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.__truediv__': ( 'metric/result.html#metricresult.__truediv__',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.result': ( 'metric/result.html#metricresult.result',
- 'ragas_annotator/metric/result.py'),
- 'ragas_annotator.metric.result.MetricResult.to_dict': ( 'metric/result.html#metricresult.to_dict',
- 'ragas_annotator/metric/result.py')},
- 'ragas_annotator.model.notion_model': { 'ragas_annotator.model.notion_model.NotionModel': ( 'model/notion_model.html#notionmodel',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.__getattr__': ( 'model/notion_model.html#notionmodel.__getattr__',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.__init__': ( 'model/notion_model.html#notionmodel.__init__',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.__repr__': ( 'model/notion_model.html#notionmodel.__repr__',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.__setattr__': ( 'model/notion_model.html#notionmodel.__setattr__',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.from_notion': ( 'model/notion_model.html#notionmodel.from_notion',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModel.to_notion': ( 'model/notion_model.html#notionmodel.to_notion',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModelMeta': ( 'model/notion_model.html#notionmodelmeta',
- 'ragas_annotator/model/notion_model.py'),
- 'ragas_annotator.model.notion_model.NotionModelMeta.__new__': ( 'model/notion_model.html#notionmodelmeta.__new__',
- 'ragas_annotator/model/notion_model.py')},
- 'ragas_annotator.model.notion_typing': { 'ragas_annotator.model.notion_typing.Field': ( 'model/notion_types.html#field',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field.__get__': ( 'model/notion_types.html#field.__get__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field.__init__': ( 'model/notion_types.html#field.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field.__set__': ( 'model/notion_types.html#field.__set__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field.__set_name__': ( 'model/notion_types.html#field.__set_name__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field._from_notion': ( 'model/notion_types.html#field._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field._to_notion': ( 'model/notion_types.html#field._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field._to_notion_property': ( 'model/notion_types.html#field._to_notion_property',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Field.validate': ( 'model/notion_types.html#field.validate',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID': ( 'model/notion_types.html#id',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID.__init__': ( 'model/notion_types.html#id.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID.__new__': ( 'model/notion_types.html#id.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID._from_notion': ( 'model/notion_types.html#id._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID._to_notion': ( 'model/notion_types.html#id._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID._to_notion_property': ( 'model/notion_types.html#id._to_notion_property',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.ID.validate': ( 'model/notion_types.html#id.validate',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect': ( 'model/notion_types.html#multiselect',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect.__init__': ( 'model/notion_types.html#multiselect.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect.__new__': ( 'model/notion_types.html#multiselect.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect._from_notion': ( 'model/notion_types.html#multiselect._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect._to_notion': ( 'model/notion_types.html#multiselect._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect._to_notion_property': ( 'model/notion_types.html#multiselect._to_notion_property',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.MultiSelect.validate': ( 'model/notion_types.html#multiselect.validate',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta': ( 'model/notion_types.html#notionfieldmeta',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.__init__': ( 'model/notion_types.html#notionfieldmeta.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.__set_name__': ( 'model/notion_types.html#notionfieldmeta.__set_name__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.from_notion': ( 'model/notion_types.html#notionfieldmeta.from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.to_notion': ( 'model/notion_types.html#notionfieldmeta.to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.to_notion_property': ( 'model/notion_types.html#notionfieldmeta.to_notion_property',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.NotionFieldMeta.validate': ( 'model/notion_types.html#notionfieldmeta.validate',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select': ( 'model/notion_types.html#select',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select.__init__': ( 'model/notion_types.html#select.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select.__new__': ( 'model/notion_types.html#select.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select._from_notion': ( 'model/notion_types.html#select._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select._to_notion': ( 'model/notion_types.html#select._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select._to_notion_property': ( 'model/notion_types.html#select._to_notion_property',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Select.validate': ( 'model/notion_types.html#select.validate',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Text': ( 'model/notion_types.html#text',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Text.__init__': ( 'model/notion_types.html#text.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Text.__new__': ( 'model/notion_types.html#text.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Text._from_notion': ( 'model/notion_types.html#text._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Text._to_notion': ( 'model/notion_types.html#text._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.TextNew': ( 'model/notion_types.html#textnew',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.TextNew.__init__': ( 'model/notion_types.html#textnew.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.TextNew.from_notion': ( 'model/notion_types.html#textnew.from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.TextNew.to_notion': ( 'model/notion_types.html#textnew.to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Title': ( 'model/notion_types.html#title',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Title.__init__': ( 'model/notion_types.html#title.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Title.__new__': ( 'model/notion_types.html#title.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Title._from_notion': ( 'model/notion_types.html#title._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.Title._to_notion': ( 'model/notion_types.html#title._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL': ( 'model/notion_types.html#url',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL.__init__': ( 'model/notion_types.html#url.__init__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL.__new__': ( 'model/notion_types.html#url.__new__',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL._from_notion': ( 'model/notion_types.html#url._from_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL._to_notion': ( 'model/notion_types.html#url._to_notion',
- 'ragas_annotator/model/notion_typing.py'),
- 'ragas_annotator.model.notion_typing.URL.validate': ( 'model/notion_types.html#url.validate',
- 'ragas_annotator/model/notion_typing.py')},
- 'ragas_annotator.project': { 'ragas_annotator.project.ExperimentProtocol': ( 'project/experiments.html#experimentprotocol',
- 'ragas_annotator/project.py'),
- 'ragas_annotator.project.ExperimentProtocol.__call__': ( 'project/experiments.html#experimentprotocol.__call__',
- 'ragas_annotator/project.py'),
- 'ragas_annotator.project.ExperimentProtocol.run_async': ( 'project/experiments.html#experimentprotocol.run_async',
- 'ragas_annotator/project.py'),
- 'ragas_annotator.project.Project.create_experiment': ( 'project/experiments.html#project.create_experiment',
- 'ragas_annotator/project.py'),
- 'ragas_annotator.project.Project.experiment': ( 'project/experiments.html#project.experiment',
- 'ragas_annotator/project.py'),
- 'ragas_annotator.project.Project.get_experiment': ( 'project/experiments.html#project.get_experiment',
- 'ragas_annotator/project.py')},
- 'ragas_annotator.project.comparison': { 'ragas_annotator.project.comparison.Project.compare_experiments': ( 'project/comparison.html#project.compare_experiments',
- 'ragas_annotator/project/comparison.py'),
- 'ragas_annotator.project.comparison._combine_experiments': ( 'project/comparison.html#_combine_experiments',
- 'ragas_annotator/project/comparison.py'),
- 'ragas_annotator.project.comparison._get_title_property': ( 'project/comparison.html#_get_title_property',
- 'ragas_annotator/project/comparison.py'),
- 'ragas_annotator.project.comparison._model_to_dict': ( 'project/comparison.html#_model_to_dict',
- 'ragas_annotator/project/comparison.py'),
- 'ragas_annotator.project.comparison._validate_experiments': ( 'project/comparison.html#_validate_experiments',
- 'ragas_annotator/project/comparison.py')},
- 'ragas_annotator.project.core': { 'ragas_annotator.project.core.Project': ( 'project/core.html#project',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project.__init__': ( 'project/core.html#project.__init__',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project.__repr__': ( 'project/core.html#project.__repr__',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project._create_project_structure': ( 'project/core.html#project._create_project_structure',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project.create_dataset': ( 'project/core.html#project.create_dataset',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project.get_dataset': ( 'project/core.html#project.get_dataset',
- 'ragas_annotator/project/core.py'),
- 'ragas_annotator.project.core.Project.initialize': ( 'project/core.html#project.initialize',
- 'ragas_annotator/project/core.py')},
- 'ragas_annotator.project.experiments': { 'ragas_annotator.project.experiments.ExperimentProtocol': ( 'project/experiments.html#experimentprotocol',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.ExperimentProtocol.__call__': ( 'project/experiments.html#experimentprotocol.__call__',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.ExperimentProtocol.run_async': ( 'project/experiments.html#experimentprotocol.run_async',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.Project.create_experiment': ( 'project/experiments.html#project.create_experiment',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.Project.experiment': ( 'project/experiments.html#project.experiment',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.Project.get_experiment': ( 'project/experiments.html#project.get_experiment',
- 'ragas_annotator/project/experiments.py'),
- 'ragas_annotator.project.experiments.Project.langfuse_experiment': ( 'project/experiments.html#project.langfuse_experiment',
- 'ragas_annotator/project/experiments.py')},
- 'ragas_annotator.project.naming': { 'ragas_annotator.project.naming.MemorableNames': ( 'project/naming.html#memorablenames',
- 'ragas_annotator/project/naming.py'),
- 'ragas_annotator.project.naming.MemorableNames.__init__': ( 'project/naming.html#memorablenames.__init__',
- 'ragas_annotator/project/naming.py'),
- 'ragas_annotator.project.naming.MemorableNames.generate_name': ( 'project/naming.html#memorablenames.generate_name',
- 'ragas_annotator/project/naming.py'),
- 'ragas_annotator.project.naming.MemorableNames.generate_unique_name': ( 'project/naming.html#memorablenames.generate_unique_name',
- 'ragas_annotator/project/naming.py'),
- 'ragas_annotator.project.naming.MemorableNames.generate_unique_names': ( 'project/naming.html#memorablenames.generate_unique_names',
- 'ragas_annotator/project/naming.py')},
- 'ragas_annotator.prompt.base': { 'ragas_annotator.prompt.base.Prompt': ( 'prompt/base.html#prompt',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt.__init__': ( 'prompt/base.html#prompt.__init__',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt.__str__': ( 'prompt/base.html#prompt.__str__',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt._format_examples': ( 'prompt/base.html#prompt._format_examples',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt._validate_instruction': ( 'prompt/base.html#prompt._validate_instruction',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt.add_example': ( 'prompt/base.html#prompt.add_example',
- 'ragas_annotator/prompt/base.py'),
- 'ragas_annotator.prompt.base.Prompt.format': ( 'prompt/base.html#prompt.format',
- 'ragas_annotator/prompt/base.py')},
- 'ragas_annotator.prompt.dynamic_few_shot': { 'ragas_annotator.prompt.dynamic_few_shot.DynamicFewShotPrompt': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.DynamicFewShotPrompt.__init__': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.__init__',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.DynamicFewShotPrompt.add_example': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.add_example',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.DynamicFewShotPrompt.format': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.format',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.DynamicFewShotPrompt.from_prompt': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.from_prompt',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.ExampleStore': ( 'prompt/dynamic_few_shot.html#examplestore',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.ExampleStore.add_example': ( 'prompt/dynamic_few_shot.html#examplestore.add_example',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.ExampleStore.get_examples': ( 'prompt/dynamic_few_shot.html#examplestore.get_examples',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore.__init__': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.__init__',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore.__len__': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.__len__',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore._get_embedding': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore._get_embedding',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore._get_nearest_examples': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore._get_nearest_examples',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore.add_example': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.add_example',
- 'ragas_annotator/prompt/dynamic_few_shot.py'),
- 'ragas_annotator.prompt.dynamic_few_shot.InMemoryExampleStore.get_examples': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.get_examples',
- 'ragas_annotator/prompt/dynamic_few_shot.py')},
- 'ragas_annotator.tracing.langfuse': { 'ragas_annotator.tracing.langfuse.LangfuseTrace': ( 'tracing/langfuse.html#langfusetrace',
- 'ragas_annotator/tracing/langfuse.py'),
- 'ragas_annotator.tracing.langfuse.LangfuseTrace.__init__': ( 'tracing/langfuse.html#langfusetrace.__init__',
- 'ragas_annotator/tracing/langfuse.py'),
- 'ragas_annotator.tracing.langfuse.LangfuseTrace.filter': ( 'tracing/langfuse.html#langfusetrace.filter',
- 'ragas_annotator/tracing/langfuse.py'),
- 'ragas_annotator.tracing.langfuse.LangfuseTrace.get_url': ( 'tracing/langfuse.html#langfusetrace.get_url',
- 'ragas_annotator/tracing/langfuse.py'),
- 'ragas_annotator.tracing.langfuse.add_query_param': ( 'tracing/langfuse.html#add_query_param',
- 'ragas_annotator/tracing/langfuse.py'),
- 'ragas_annotator.tracing.langfuse.sync_trace': ( 'tracing/langfuse.html#sync_trace',
- 'ragas_annotator/tracing/langfuse.py')}}}
diff --git a/ragas_annotator/backends/factory.py b/ragas_annotator/backends/factory.py
deleted file mode 100644
index 715fc9e..0000000
--- a/ragas_annotator/backends/factory.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""Factory class for creating the backends or mocked backends."""
-
-# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/backends/factory.ipynb.
-
-# %% auto 0
-__all__ = ['NotionClientFactory', 'NotionBackendFactory']
-
-# %% ../../nbs/backends/factory.ipynb 2
-import typing as t
-import os
-
-from notion_client import Client as NotionClient
-from .mock_notion import MockNotionClient
-from .notion_backend import NotionBackend
-
-# %% ../../nbs/backends/factory.ipynb 3
-class NotionClientFactory:
- """Factory for creating Notion client instances."""
-
- @staticmethod
- def create(
- use_mock: bool = False,
- api_key: t.Optional[str] = None,
- initialize_project: bool = False,
- root_page_id: t.Optional[str] = None,
- ) -> t.Union[NotionClient, MockNotionClient]:
- """Create a Notion client.
-
- Args:
- use_mock: If True, create a mock client
- api_key: Notion API key (only used for real client)
- initialize_project: If True and using mock, initialize project structure
- root_page_id: Required if initialize_project is True
-
- Returns:
- Union[NotionClient, MockNotionClient]: A real or mock client
- """
- if use_mock:
- client = MockNotionClient()
-
- # Optionally initialize project structure
- if initialize_project and root_page_id:
- # Create root page if it doesn't exist in the mock client
- if root_page_id not in client._pages:
- # Create root page
- root_page = {
- "id": root_page_id,
- "object": "page",
- "created_time": client._get_timestamp(),
- "last_edited_time": client._get_timestamp(),
- "archived": False,
- "properties": {
- "title": {
- "type": "title",
- "title": [
- {
- "plain_text": "Root Page",
- "type": "text",
- "text": {"content": "Root Page"},
- }
- ],
- }
- },
- }
- client.add_page(root_page)
-
- # Create required sub-pages
- for page_name in ["Datasets", "Experiments", "Comparisons"]:
- # Create page ID
- page_id = client._create_id()
-
- # Create page
- page = {
- "id": page_id,
- "object": "page",
- "created_time": client._get_timestamp(),
- "last_edited_time": client._get_timestamp(),
- "archived": False,
- "properties": {
- "title": {
- "type": "title",
- "title": [
- {
- "plain_text": page_name,
- "type": "text",
- "text": {"content": page_name},
- }
- ],
- }
- },
- "parent": {"type": "page_id", "page_id": root_page_id},
- }
- client.add_page(page)
-
- # Add child block to root
- child_block = {
- "id": client._create_id(),
- "object": "block",
- "type": "child_page",
- "created_time": client._get_timestamp(),
- "last_edited_time": client._get_timestamp(),
- "child_page": {"title": page_name},
- }
-
- client.add_children(root_page_id, [child_block])
-
- return client
- else:
- # For real client, use provided API key or environment variable
- if api_key is None:
- api_key = os.getenv("NOTION_API_KEY")
-
- if api_key is None:
- raise ValueError(
- "api_key must be provided or set as NOTION_API_KEY environment variable"
- )
-
- return NotionClient(auth=api_key)
-
-# %% ../../nbs/backends/factory.ipynb 7
-class NotionBackendFactory:
- """Factory for creating NotionBackend instances."""
-
- @staticmethod
- def create(
- root_page_id: str,
- use_mock: bool = False,
- api_key: t.Optional[str] = None,
- initialize_project: bool = False,
- notion_client: t.Optional[t.Union[NotionClient, MockNotionClient]] = None,
- ) -> NotionBackend:
- """Create a NotionBackend instance.
-
- Args:
- root_page_id: The ID of the root page
- use_mock: If True, create a backend with a mock client
- api_key: Notion API key (only used for real client)
- initialize_project: If True and using mock, initialize project structure
- notion_client: Optional pre-configured Notion client
-
- Returns:
- NotionBackend: A backend instance with either real or mock client
- """
- # Use provided client or create one
- if notion_client is None:
- notion_client = NotionClientFactory.create(
- use_mock=use_mock,
- api_key=api_key,
- initialize_project=initialize_project,
- root_page_id=root_page_id,
- )
-
- # Create and return the backend
- return NotionBackend(root_page_id=root_page_id, notion_client=notion_client)
diff --git a/ragas_annotator/dataset.py b/ragas_annotator/dataset.py
deleted file mode 100644
index 2900b4d..0000000
--- a/ragas_annotator/dataset.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""A python list like object that contains your evaluation data."""
-
-# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/dataset.ipynb.
-
-# %% auto 0
-__all__ = ['NotionModelType', 'Dataset']
-
-# %% ../nbs/dataset.ipynb 3
-import typing as t
-
-from fastcore.utils import patch
-
-from .model.notion_model import NotionModel
-from .backends.notion_backend import NotionBackend
-
-# %% ../nbs/dataset.ipynb 4
-NotionModelType = t.TypeVar("NotionModelType", bound=NotionModel)
-
-
-class Dataset(t.Generic[NotionModelType]):
- """A list-like interface for managing NotionModel instances in a Notion database."""
-
- def __init__(
- self,
- name: str,
- model: t.Type[NotionModel],
- database_id: str,
- notion_backend: NotionBackend,
- ):
- self.name = name
- self.model = model
- self.database_id = database_id
- self._notion_backend = notion_backend
- self._entries: t.List[NotionModelType] = []
-
- def __getitem__(
- self, key: t.Union[int, slice]
- ) -> t.Union[NotionModelType, "Dataset[NotionModelType]"]:
- """Get an entry by index or slice."""
- if isinstance(key, slice):
- new_dataset = type(self)(
- name=self.name,
- model=self.model,
- database_id=self.database_id,
- notion_backend=self._notion_backend,
- )
- new_dataset._entries = self._entries[key]
- return new_dataset
- else:
- return self._entries[key]
-
- def __setitem__(self, index: int, entry: NotionModelType) -> None:
- """Update an entry at the given index and sync to Notion."""
- if not isinstance(entry, self.model):
- raise TypeError(f"Entry must be an instance of {self.model.__name__}")
-
- # Get existing entry to get Notion page ID
- existing = self._entries[index]
- if not hasattr(existing, "_page_id"):
- raise ValueError("Existing entry has no page_id")
-
- # Update in Notion
- assert (
- existing._page_id is not None
- ) # mypy fails to infer that we check for it above
- response = self._notion_backend.update_page(
- page_id=existing._page_id, properties=entry.to_notion()["properties"]
- )
-
- # Update local cache with response data
- self._entries[index] = self.model.from_notion(response)
-
- def __repr__(self) -> str:
- return (
- f"Dataset(name={self.name}, model={self.model.__name__}, len={len(self)})"
- )
-
- def __len__(self) -> int:
- return len(self._entries)
-
- def __iter__(self) -> t.Iterator[NotionModelType]:
- return iter(self._entries)
-
-# %% ../nbs/dataset.ipynb 9
-@patch
-def append(self: Dataset, entry: NotionModelType) -> None:
- """Add a new entry to the dataset and sync to Notion."""
- # if not isinstance(entry, self.model):
- # raise TypeError(f"Entry must be an instance of {self.model.__name__}")
-
- # Create in Notion and get response
- response = self._notion_backend.create_page_in_database(
- database_id=self.database_id, properties=entry.to_notion()["properties"]
- )
-
- # Update entry with Notion data (like ID)
- updated_entry = self.model.from_notion(response)
- self._entries.append(updated_entry)
-
-# %% ../nbs/dataset.ipynb 12
-@patch
-def pop(self: Dataset, index: int = -1) -> NotionModelType:
- """Remove and return entry at index, sync deletion to Notion."""
- entry = self._entries[index]
- if not hasattr(entry, "_page_id"):
- raise ValueError("Entry has no page_id")
-
- # Archive in Notion (soft delete)
- assert entry._page_id is not None # mypy fails to infer that we check for it above
- self._notion_backend.update_page(page_id=entry._page_id, archived=True)
-
- # Remove from local cache
- return self._entries.pop(index)
-
-# %% ../nbs/dataset.ipynb 15
-@patch
-def load(self: Dataset) -> None:
- """Load all entries from the Notion database."""
- # Query the database
- response = self._notion_backend.query_database(
- database_id=self.database_id, archived=False
- )
-
- # Clear existing entries
- self._entries.clear()
-
- # Convert results to model instances
- for page in response.get("results", []):
- entry = self.model.from_notion(page)
- self._entries.append(entry)
-
-# %% ../nbs/dataset.ipynb 20
-@patch
-def get(self: Dataset, id: int) -> t.Optional[NotionModelType]:
- """Get an entry by ID."""
- if not self._notion_backend:
- return None
-
- # Query the database for the specific ID
- response = self._notion_backend.query_database(
- database_id=self.database_id,
- filter={"property": "id", "unique_id": {"equals": id}},
- )
-
- if not response.get("results"):
- return None
-
- return self.model.from_notion(response["results"][0])
-
-# %% ../nbs/dataset.ipynb 23
-@patch
-def save(self: Dataset, item: NotionModelType) -> None:
- """Save changes to an item to Notion."""
- if not isinstance(item, self.model):
- raise TypeError(f"Item must be an instance of {self.model.__name__}")
-
- if not hasattr(item, "_page_id"):
- raise ValueError("Item has no page_id")
-
- # Update in Notion
- assert item._page_id is not None # mypy fails to infer that we check for it above
- response = self._notion_backend.update_page(
- page_id=item._page_id, properties=item.to_notion()["properties"]
- )
-
- # Update local cache
- for i, existing in enumerate(self._entries):
- if existing._page_id == item._page_id:
- self._entries[i] = self.model.from_notion(response)
- break
diff --git a/ragas_annotator/embedding/__init__.py b/ragas_annotator/embedding/__init__.py
deleted file mode 100644
index eb0ef1a..0000000
--- a/ragas_annotator/embedding/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from ragas_annotator.embedding.base import BaseEmbedding
-from ragas_annotator.embedding.base import ragas_embedding
-
-__all__ = ['ragas_embedding','BaseEmbedding']
\ No newline at end of file
diff --git a/ragas_annotator/llm/__init__.py b/ragas_annotator/llm/__init__.py
deleted file mode 100644
index cea67d0..0000000
--- a/ragas_annotator/llm/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from ragas_annotator.llm.llm import RagasLLM, ragas_llm
-
-__all__ = ["RagasLLM", "ragas_llm"]
\ No newline at end of file
diff --git a/ragas_annotator/metric/__init__.py b/ragas_annotator/metric/__init__.py
deleted file mode 100644
index 4733fc4..0000000
--- a/ragas_annotator/metric/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from ragas_annotator.metric.result import MetricResult
-from ragas_annotator.metric.base import Metric
-from ragas_annotator.metric.discrete import DiscreteMetric
-from ragas_annotator.metric.numeric import NumericMetric
-from ragas_annotator.metric.ranking import RankingMetric
-
-__all__ = ['MetricResult',
- 'Metric',
- 'DiscreteMetric',
- 'NumericMetric',
- 'RankingMetric',
- ]
diff --git a/ragas_annotator/project/core.py b/ragas_annotator/project/core.py
deleted file mode 100644
index fc2dda3..0000000
--- a/ragas_annotator/project/core.py
+++ /dev/null
@@ -1,169 +0,0 @@
-"""Use this class to represent the AI project that we are working on and to interact with datasets and experiments in it."""
-
-# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/project/core.ipynb.
-
-# %% auto 0
-__all__ = ['Project']
-
-# %% ../../nbs/project/core.ipynb 3
-import typing as t
-import os
-
-from notion_client import Client as NotionClient
-from fastcore.utils import patch
-
-from ..backends.notion_backend import NotionBackend
-from ..backends.factory import NotionBackendFactory
-from ..model.notion_model import NotionModel
-import ragas_annotator.model.notion_typing as nmt
-from ..dataset import Dataset
-from ..experiment import Experiment
-
-# %% ../../nbs/project/core.ipynb 4
-class Project:
- def __init__(
- self,
- name: str,
- notion_backend: t.Optional[NotionBackend] = None,
- notion_api_key: t.Optional[str] = None,
- notion_root_page_id: t.Optional[str] = None,
- ):
- self.name = name
- self.datasets_page_id = ""
- self.experiments_page_id = ""
- self.comparisons_page_id = ""
-
- if notion_backend is None:
- # check that the environment variables are set
- notion_api_key = os.getenv("NOTION_API_KEY") or notion_api_key
- notion_root_page_id = (
- os.getenv("NOTION_ROOT_PAGE_ID") or notion_root_page_id
- )
-
- if notion_api_key is None:
- raise ValueError("NOTION_API_KEY is not set")
-
- if notion_root_page_id is None:
- raise ValueError("NOTION_ROOT_PAGE_ID is not set")
-
- if notion_api_key == "TEST":
- self._notion_backend = NotionBackendFactory.create(
- root_page_id=notion_root_page_id,
- use_mock=True,
- initialize_project=True,
- )
- else:
- self._notion_backend = NotionBackend(
- notion_client=NotionClient(auth=notion_api_key),
- root_page_id=notion_root_page_id,
- )
- else:
- self._notion_backend = notion_backend
-
- # initialize the project structure
- self.initialize()
-
- def initialize(self):
- """Initialize the project structure in Notion."""
- root_page_id = self._notion_backend.root_page_id
-
- # if page doesn't exist, create it
- if not self._notion_backend.page_exists(root_page_id):
- raise ValueError(f"Root page '{root_page_id}' does not exist")
- # if page exists, but structure is invalid
- elif not self._notion_backend.validate_project_structure(root_page_id):
- # create the missing pages
- print(f"Creating missing pages inside root page '{root_page_id}'")
- self._create_project_structure(root_page_id)
- else:
- # if page exists and structure is valid, get the page ids
- # for datasets, experiments, and comparisons
- self.datasets_page_id = self._notion_backend.get_page_id(
- root_page_id, "Datasets"
- )
- self.experiments_page_id = self._notion_backend.get_page_id(
- root_page_id, "Experiments"
- )
- self.comparisons_page_id = self._notion_backend.get_page_id(
- root_page_id, "Comparisons"
- )
-
- def _create_project_structure(self, root_page_id: str):
- """Create the basic project structure with required pages."""
- # Create each required page
- self.datasets_page_id = self._notion_backend.create_new_page(
- root_page_id, "Datasets"
- )
- self.experiments_page_id = self._notion_backend.create_new_page(
- root_page_id, "Experiments"
- )
- self.comparisons_page_id = self._notion_backend.create_new_page(
- root_page_id, "Comparisons"
- )
-
- def __repr__(self):
- return f"Project(name='{self.name}', root_page_id={self._notion_backend.root_page_id})"
-
-# %% ../../nbs/project/core.ipynb 9
-@patch
-def create_dataset(
- self: Project, model: t.Type[NotionModel], name: t.Optional[str] = None
-) -> Dataset:
- """Create a new dataset database.
-
- Args:
- name (str): Name of the dataset
- model (NotionModel): Model class defining the database structure
-
- Returns:
- Dataset: A new dataset object for managing entries
- """
- # Collect all properties from model fields
- properties = {}
- has_title = False
- for field_name, field in model._fields.items():
- properties.update(field._to_notion_property())
- if isinstance(field, nmt.Title): # Check if we have a title field
- has_title = True
-
- if not has_title:
- raise ValueError(
- "In order to create a dataset, the model must have a nmt.Title field"
- )
-
- # Create the database
- if self.datasets_page_id == "":
- raise ValueError("Datasets page ID is not set")
- database_id = self._notion_backend.create_new_database(
- parent_page_id=self.datasets_page_id,
- title=name if name is not None else model.__name__,
- properties=properties,
- )
-
- # Return a new Dataset instance
- return Dataset(
- name=name if name is not None else model.__name__,
- model=model,
- database_id=database_id,
- notion_backend=self._notion_backend,
- )
-
-# %% ../../nbs/project/core.ipynb 12
-@patch
-def get_dataset(self: Project, name: str, model: t.Type[NotionModel]) -> Dataset:
- """Get an existing dataset by name."""
- if self.datasets_page_id == "":
- raise ValueError("Datasets page ID is not set")
-
- # Search for database with given name
- database_id = self._notion_backend.get_database_id(
- parent_page_id=self.datasets_page_id, name=name, return_multiple=False
- )
-
- # For now, return Dataset without model type
- return Dataset(
- name=name,
- model=model,
- database_id=database_id,
- notion_backend=self._notion_backend,
- )
diff --git a/ragas_annotator/prompt/__init__.py b/ragas_annotator/prompt/__init__.py
deleted file mode 100644
index a0dffbc..0000000
--- a/ragas_annotator/prompt/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from ragas_annotator.prompt.base import Prompt
-from ragas_annotator.prompt.dynamic_few_shot import DynamicFewShotPrompt
-
-
-__all__ = ['Prompt', 'DynamicFewShotPrompt']
\ No newline at end of file
diff --git a/ragas_annotator/__init__.py b/ragas_experimental/__init__.py
similarity index 51%
rename from ragas_annotator/__init__.py
rename to ragas_experimental/__init__.py
index cb36591..90d803c 100644
--- a/ragas_annotator/__init__.py
+++ b/ragas_experimental/__init__.py
@@ -6,12 +6,13 @@
# %% ../nbs/init_module.ipynb 2
from .project.core import Project
-import ragas_annotator.model.notion_typing as nmt
+import ragas_experimental.model.notion_typing as nmt
from .model.notion_model import NotionModel
+from .model.pydantic_model import ExtendedPydanticBaseModel as BaseModel
# just import to run the module
-import ragas_annotator.project.experiments
-import ragas_annotator.project.comparison
+import ragas_experimental.project.experiments
+import ragas_experimental.project.comparison
# %% ../nbs/init_module.ipynb 3
-__all__ = ["Project", "NotionModel", "nmt"]
+__all__ = ["Project", "NotionModel", "nmt", "BaseModel"]
diff --git a/ragas_experimental/_modidx.py b/ragas_experimental/_modidx.py
new file mode 100644
index 0000000..ad33045
--- /dev/null
+++ b/ragas_experimental/_modidx.py
@@ -0,0 +1,690 @@
+# Autogenerated by nbdev
+
+d = { 'settings': { 'branch': 'main',
+ 'doc_baseurl': '/ragas_annotator',
+ 'doc_host': 'https://explodinggradients.github.io',
+ 'git_url': 'https://github.com/explodinggradients/ragas_annotator',
+ 'lib_path': 'ragas_experimental'},
+ 'syms': { 'ragas_experimental.backends.factory': { 'ragas_experimental.backends.factory.RagasApiClientFactory': ( 'backends/factory.html#ragasapiclientfactory',
+ 'ragas_experimental/backends/factory.py'),
+ 'ragas_experimental.backends.factory.RagasApiClientFactory.create': ( 'backends/factory.html#ragasapiclientfactory.create',
+ 'ragas_experimental/backends/factory.py')},
+ 'ragas_experimental.backends.mock_notion': { 'ragas_experimental.backends.mock_notion.MockBlockChildrenAPI': ( 'backends/mock_notion_client.html#mockblockchildrenapi',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockBlockChildrenAPI.__init__': ( 'backends/mock_notion_client.html#mockblockchildrenapi.__init__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockBlockChildrenAPI.list': ( 'backends/mock_notion_client.html#mockblockchildrenapi.list',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockBlocksAPI': ( 'backends/mock_notion_client.html#mockblocksapi',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockBlocksAPI.__init__': ( 'backends/mock_notion_client.html#mockblocksapi.__init__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockBlocksAPI.retrieve': ( 'backends/mock_notion_client.html#mockblocksapi.retrieve',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI': ( 'backends/mock_notion_client.html#mockdatabasesapi',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI.__init__': ( 'backends/mock_notion_client.html#mockdatabasesapi.__init__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI._extract_title': ( 'backends/mock_notion_client.html#mockdatabasesapi._extract_title',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI.create': ( 'backends/mock_notion_client.html#mockdatabasesapi.create',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI.query': ( 'backends/mock_notion_client.html#mockdatabasesapi.query',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockDatabasesAPI.retrieve': ( 'backends/mock_notion_client.html#mockdatabasesapi.retrieve',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient': ( 'backends/mock_notion_client.html#mocknotionclient',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.__init__': ( 'backends/mock_notion_client.html#mocknotionclient.__init__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.__str__': ( 'backends/mock_notion_client.html#mocknotionclient.__str__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient._create_id': ( 'backends/mock_notion_client.html#mocknotionclient._create_id',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient._get_timestamp': ( 'backends/mock_notion_client.html#mocknotionclient._get_timestamp',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.add_block': ( 'backends/mock_notion_client.html#mocknotionclient.add_block',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.add_children': ( 'backends/mock_notion_client.html#mocknotionclient.add_children',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.add_database': ( 'backends/mock_notion_client.html#mocknotionclient.add_database',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockNotionClient.add_page': ( 'backends/mock_notion_client.html#mocknotionclient.add_page',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI': ( 'backends/mock_notion_client.html#mockpagesapi',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI.__init__': ( 'backends/mock_notion_client.html#mockpagesapi.__init__',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI._extract_title': ( 'backends/mock_notion_client.html#mockpagesapi._extract_title',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI.create': ( 'backends/mock_notion_client.html#mockpagesapi.create',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI.retrieve': ( 'backends/mock_notion_client.html#mockpagesapi.retrieve',
+ 'ragas_experimental/backends/mock_notion.py'),
+ 'ragas_experimental.backends.mock_notion.MockPagesAPI.update': ( 'backends/mock_notion_client.html#mockpagesapi.update',
+ 'ragas_experimental/backends/mock_notion.py')},
+ 'ragas_experimental.backends.notion_backend': { 'ragas_experimental.backends.notion_backend.NotionBackend': ( 'backends/notion.html#notionbackend',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.__init__': ( 'backends/notion.html#notionbackend.__init__',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.__repr__': ( 'backends/notion.html#notionbackend.__repr__',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.create_new_database': ( 'backends/notion.html#notionbackend.create_new_database',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.create_new_page': ( 'backends/notion.html#notionbackend.create_new_page',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.create_page_in_database': ( 'backends/notion.html#notionbackend.create_page_in_database',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.get_database': ( 'backends/notion.html#notionbackend.get_database',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.get_database_id': ( 'backends/notion.html#notionbackend.get_database_id',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.get_page_id': ( 'backends/notion.html#notionbackend.get_page_id',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.page_exists': ( 'backends/notion.html#notionbackend.page_exists',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.query_database': ( 'backends/notion.html#notionbackend.query_database',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.update_page': ( 'backends/notion.html#notionbackend.update_page',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.NotionBackend.validate_project_structure': ( 'backends/notion.html#notionbackend.validate_project_structure',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.get_database_id': ( 'backends/notion.html#get_database_id',
+ 'ragas_experimental/backends/notion_backend.py'),
+ 'ragas_experimental.backends.notion_backend.get_page_id': ( 'backends/notion.html#get_page_id',
+ 'ragas_experimental/backends/notion_backend.py')},
+ 'ragas_experimental.backends.ragas_api_client': { 'ragas_experimental.backends.ragas_api_client.Column': ( 'backends/ragas_api_client.html#column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.ColumnType': ( 'backends/ragas_api_client.html#columntype',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient': ( 'backends/ragas_api_client.html#ragasapiclient',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.__init__': ( 'backends/ragas_api_client.html#ragasapiclient.__init__',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._create_resource': ( 'backends/ragas_api_client.html#ragasapiclient._create_resource',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._create_with_data': ( 'backends/ragas_api_client.html#ragasapiclient._create_with_data',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._delete_resource': ( 'backends/ragas_api_client.html#ragasapiclient._delete_resource',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._get_resource': ( 'backends/ragas_api_client.html#ragasapiclient._get_resource',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._list_resources': ( 'backends/ragas_api_client.html#ragasapiclient._list_resources',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._request': ( 'backends/ragas_api_client.html#ragasapiclient._request',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient._update_resource': ( 'backends/ragas_api_client.html#ragasapiclient._update_resource',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.convert_raw_data': ( 'backends/ragas_api_client.html#ragasapiclient.convert_raw_data',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_column_map': ( 'backends/ragas_api_client.html#ragasapiclient.create_column_map',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_dataset_with_data': ( 'backends/ragas_api_client.html#ragasapiclient.create_dataset_with_data',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_experiment_with_data': ( 'backends/ragas_api_client.html#ragasapiclient.create_experiment_with_data',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_project': ( 'backends/ragas_api_client.html#ragasapiclient.create_project',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.create_row': ( 'backends/ragas_api_client.html#ragasapiclient.create_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.delete_dataset_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.delete_experiment_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.delete_project': ( 'backends/ragas_api_client.html#ragasapiclient.delete_project',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.get_dataset_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.get_experiment_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.get_project': ( 'backends/ragas_api_client.html#ragasapiclient.get_project',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_dataset_columns': ( 'backends/ragas_api_client.html#ragasapiclient.list_dataset_columns',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_dataset_rows': ( 'backends/ragas_api_client.html#ragasapiclient.list_dataset_rows',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_datasets': ( 'backends/ragas_api_client.html#ragasapiclient.list_datasets',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_experiment_columns': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiment_columns',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_experiment_rows': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiment_rows',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_experiments': ( 'backends/ragas_api_client.html#ragasapiclient.list_experiments',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.list_projects': ( 'backends/ragas_api_client.html#ragasapiclient.list_projects',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_dataset': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_dataset_column': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_dataset_row': ( 'backends/ragas_api_client.html#ragasapiclient.update_dataset_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_experiment': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_experiment_column': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment_column',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_experiment_row': ( 'backends/ragas_api_client.html#ragasapiclient.update_experiment_row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RagasApiClient.update_project': ( 'backends/ragas_api_client.html#ragasapiclient.update_project',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.Row': ( 'backends/ragas_api_client.html#row',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.RowCell': ( 'backends/ragas_api_client.html#rowcell',
+ 'ragas_experimental/backends/ragas_api_client.py'),
+ 'ragas_experimental.backends.ragas_api_client.create_nano_id': ( 'backends/ragas_api_client.html#create_nano_id',
+ 'ragas_experimental/backends/ragas_api_client.py')},
+ 'ragas_experimental.core': {'ragas_experimental.core.foo': ('core.html#foo', 'ragas_experimental/core.py')},
+ 'ragas_experimental.dataset': { 'ragas_experimental.dataset.Dataset': ('dataset.html#dataset', 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__getitem__': ( 'dataset.html#dataset.__getitem__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__init__': ( 'dataset.html#dataset.__init__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__iter__': ( 'dataset.html#dataset.__iter__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__len__': ( 'dataset.html#dataset.__len__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__repr__': ( 'dataset.html#dataset.__repr__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.__setitem__': ( 'dataset.html#dataset.__setitem__',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset._get_column_id_map': ( 'dataset.html#dataset._get_column_id_map',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.append': ( 'dataset.html#dataset.append',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.get': ( 'dataset.html#dataset.get',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.load': ( 'dataset.html#dataset.load',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.load_as_dicts': ( 'dataset.html#dataset.load_as_dicts',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.pop': ( 'dataset.html#dataset.pop',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.save': ( 'dataset.html#dataset.save',
+ 'ragas_experimental/dataset.py'),
+ 'ragas_experimental.dataset.Dataset.to_pandas': ( 'dataset.html#dataset.to_pandas',
+ 'ragas_experimental/dataset.py')},
+ 'ragas_experimental.embedding.base': { 'ragas_experimental.embedding.base.BaseEmbedding': ( 'embedding/base.html#baseembedding',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.BaseEmbedding.aembed_document': ( 'embedding/base.html#baseembedding.aembed_document',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.BaseEmbedding.aembed_text': ( 'embedding/base.html#baseembedding.aembed_text',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.BaseEmbedding.embed_document': ( 'embedding/base.html#baseembedding.embed_document',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.BaseEmbedding.embed_text': ( 'embedding/base.html#baseembedding.embed_text',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings': ( 'embedding/base.html#openaiembeddings',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings.__init__': ( 'embedding/base.html#openaiembeddings.__init__',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings.aembed_document': ( 'embedding/base.html#openaiembeddings.aembed_document',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings.aembed_text': ( 'embedding/base.html#openaiembeddings.aembed_text',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings.embed_document': ( 'embedding/base.html#openaiembeddings.embed_document',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.OpenAIEmbeddings.embed_text': ( 'embedding/base.html#openaiembeddings.embed_text',
+ 'ragas_experimental/embedding/base.py'),
+ 'ragas_experimental.embedding.base.ragas_embedding': ( 'embedding/base.html#ragas_embedding',
+ 'ragas_experimental/embedding/base.py')},
+ 'ragas_experimental.exceptions': { 'ragas_experimental.exceptions.DuplicateError': ( 'exceptions.html#duplicateerror',
+ 'ragas_experimental/exceptions.py'),
+ 'ragas_experimental.exceptions.NotFoundError': ( 'exceptions.html#notfounderror',
+ 'ragas_experimental/exceptions.py'),
+ 'ragas_experimental.exceptions.ValidationError': ( 'exceptions.html#validationerror',
+ 'ragas_experimental/exceptions.py')},
+ 'ragas_experimental.experiment': { 'ragas_experimental.experiment.Experiment': ( 'experiment.html#experiment',
+ 'ragas_experimental/experiment.py'),
+ 'ragas_experimental.experiment.Experiment.__init__': ( 'experiment.html#experiment.__init__',
+ 'ragas_experimental/experiment.py'),
+ 'ragas_experimental.experiment.Experiment.__str__': ( 'experiment.html#experiment.__str__',
+ 'ragas_experimental/experiment.py')},
+ 'ragas_experimental.llm.llm': { 'ragas_experimental.llm.llm.RagasLLM': ( 'llm/llm.html#ragasllm',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM.__init__': ( 'llm/llm.html#ragasllm.__init__',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM._check_client_async': ( 'llm/llm.html#ragasllm._check_client_async',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM._initialize_client': ( 'llm/llm.html#ragasllm._initialize_client',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM._run_async_in_current_loop': ( 'llm/llm.html#ragasllm._run_async_in_current_loop',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM.agenerate': ( 'llm/llm.html#ragasllm.agenerate',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.RagasLLM.generate': ( 'llm/llm.html#ragasllm.generate',
+ 'ragas_experimental/llm/llm.py'),
+ 'ragas_experimental.llm.llm.ragas_llm': ( 'llm/llm.html#ragas_llm',
+ 'ragas_experimental/llm/llm.py')},
+ 'ragas_experimental.metric.base': { 'ragas_experimental.metric.base.Metric': ( 'metric/base.html#metric',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.__post_init__': ( 'metric/base.html#metric.__post_init__',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric._ensemble': ( 'metric/base.html#metric._ensemble',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric._get_response_model': ( 'metric/base.html#metric._get_response_model',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.abatch_score': ( 'metric/base.html#metric.abatch_score',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.ascore': ( 'metric/base.html#metric.ascore',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.batch_score': ( 'metric/base.html#metric.batch_score',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.get_variables': ( 'metric/base.html#metric.get_variables',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.score': ( 'metric/base.html#metric.score',
+ 'ragas_experimental/metric/base.py'),
+ 'ragas_experimental.metric.base.Metric.train': ( 'metric/base.html#metric.train',
+ 'ragas_experimental/metric/base.py')},
+ 'ragas_experimental.metric.decorator': { 'ragas_experimental.metric.decorator.create_metric_decorator': ( 'metric/decorator.html#create_metric_decorator',
+ 'ragas_experimental/metric/decorator.py')},
+ 'ragas_experimental.metric.discrete': { 'ragas_experimental.metric.discrete.DiscreteMetric': ( 'metric/discrete.html#discretemetric',
+ 'ragas_experimental/metric/discrete.py'),
+ 'ragas_experimental.metric.discrete.DiscreteMetric._ensemble': ( 'metric/discrete.html#discretemetric._ensemble',
+ 'ragas_experimental/metric/discrete.py'),
+ 'ragas_experimental.metric.discrete.DiscreteMetric._get_response_model': ( 'metric/discrete.html#discretemetric._get_response_model',
+ 'ragas_experimental/metric/discrete.py')},
+ 'ragas_experimental.metric.numeric': { 'ragas_experimental.metric.numeric.NumericMetric': ( 'metric/numeric.html#numericmetric',
+ 'ragas_experimental/metric/numeric.py'),
+ 'ragas_experimental.metric.numeric.NumericMetric._ensemble': ( 'metric/numeric.html#numericmetric._ensemble',
+ 'ragas_experimental/metric/numeric.py'),
+ 'ragas_experimental.metric.numeric.NumericMetric._get_response_model': ( 'metric/numeric.html#numericmetric._get_response_model',
+ 'ragas_experimental/metric/numeric.py')},
+ 'ragas_experimental.metric.ranking': { 'ragas_experimental.metric.ranking.RankingMetric': ( 'metric/ranking.html#rankingmetric',
+ 'ragas_experimental/metric/ranking.py'),
+ 'ragas_experimental.metric.ranking.RankingMetric._ensemble': ( 'metric/ranking.html#rankingmetric._ensemble',
+ 'ragas_experimental/metric/ranking.py'),
+ 'ragas_experimental.metric.ranking.RankingMetric._get_response_model': ( 'metric/ranking.html#rankingmetric._get_response_model',
+ 'ragas_experimental/metric/ranking.py')},
+ 'ragas_experimental.metric.result': { 'ragas_experimental.metric.result.MetricResult': ( 'metric/result.html#metricresult',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__add__': ( 'metric/result.html#metricresult.__add__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__eq__': ( 'metric/result.html#metricresult.__eq__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__float__': ( 'metric/result.html#metricresult.__float__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__ge__': ( 'metric/result.html#metricresult.__ge__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__get_pydantic_core_schema__': ( 'metric/result.html#metricresult.__get_pydantic_core_schema__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__getattr__': ( 'metric/result.html#metricresult.__getattr__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__getitem__': ( 'metric/result.html#metricresult.__getitem__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__gt__': ( 'metric/result.html#metricresult.__gt__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__init__': ( 'metric/result.html#metricresult.__init__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__int__': ( 'metric/result.html#metricresult.__int__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__iter__': ( 'metric/result.html#metricresult.__iter__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__le__': ( 'metric/result.html#metricresult.__le__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__len__': ( 'metric/result.html#metricresult.__len__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__lt__': ( 'metric/result.html#metricresult.__lt__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__mul__': ( 'metric/result.html#metricresult.__mul__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__radd__': ( 'metric/result.html#metricresult.__radd__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__repr__': ( 'metric/result.html#metricresult.__repr__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__rmul__': ( 'metric/result.html#metricresult.__rmul__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__rsub__': ( 'metric/result.html#metricresult.__rsub__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__rtruediv__': ( 'metric/result.html#metricresult.__rtruediv__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__str__': ( 'metric/result.html#metricresult.__str__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__sub__': ( 'metric/result.html#metricresult.__sub__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.__truediv__': ( 'metric/result.html#metricresult.__truediv__',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.model_dump': ( 'metric/result.html#metricresult.model_dump',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.result': ( 'metric/result.html#metricresult.result',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.to_dict': ( 'metric/result.html#metricresult.to_dict',
+ 'ragas_experimental/metric/result.py'),
+ 'ragas_experimental.metric.result.MetricResult.validate': ( 'metric/result.html#metricresult.validate',
+ 'ragas_experimental/metric/result.py')},
+ 'ragas_experimental.model.notion_model': { 'ragas_experimental.model.notion_model.NotionModel': ( 'model/notion_model.html#notionmodel',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.__getattr__': ( 'model/notion_model.html#notionmodel.__getattr__',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.__init__': ( 'model/notion_model.html#notionmodel.__init__',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.__repr__': ( 'model/notion_model.html#notionmodel.__repr__',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.__setattr__': ( 'model/notion_model.html#notionmodel.__setattr__',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.from_notion': ( 'model/notion_model.html#notionmodel.from_notion',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModel.to_notion': ( 'model/notion_model.html#notionmodel.to_notion',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModelMeta': ( 'model/notion_model.html#notionmodelmeta',
+ 'ragas_experimental/model/notion_model.py'),
+ 'ragas_experimental.model.notion_model.NotionModelMeta.__new__': ( 'model/notion_model.html#notionmodelmeta.__new__',
+ 'ragas_experimental/model/notion_model.py')},
+ 'ragas_experimental.model.notion_typing': { 'ragas_experimental.model.notion_typing.Field': ( 'model/notion_types.html#field',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field.__get__': ( 'model/notion_types.html#field.__get__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field.__init__': ( 'model/notion_types.html#field.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field.__set__': ( 'model/notion_types.html#field.__set__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field.__set_name__': ( 'model/notion_types.html#field.__set_name__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field._from_notion': ( 'model/notion_types.html#field._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field._to_notion': ( 'model/notion_types.html#field._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field._to_notion_property': ( 'model/notion_types.html#field._to_notion_property',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Field.validate': ( 'model/notion_types.html#field.validate',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID': ( 'model/notion_types.html#id',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID.__init__': ( 'model/notion_types.html#id.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID.__new__': ( 'model/notion_types.html#id.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID._from_notion': ( 'model/notion_types.html#id._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID._to_notion': ( 'model/notion_types.html#id._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID._to_notion_property': ( 'model/notion_types.html#id._to_notion_property',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.ID.validate': ( 'model/notion_types.html#id.validate',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect': ( 'model/notion_types.html#multiselect',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect.__init__': ( 'model/notion_types.html#multiselect.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect.__new__': ( 'model/notion_types.html#multiselect.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect._from_notion': ( 'model/notion_types.html#multiselect._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect._to_notion': ( 'model/notion_types.html#multiselect._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect._to_notion_property': ( 'model/notion_types.html#multiselect._to_notion_property',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.MultiSelect.validate': ( 'model/notion_types.html#multiselect.validate',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta': ( 'model/notion_types.html#notionfieldmeta',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.__init__': ( 'model/notion_types.html#notionfieldmeta.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.__set_name__': ( 'model/notion_types.html#notionfieldmeta.__set_name__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.from_notion': ( 'model/notion_types.html#notionfieldmeta.from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.to_notion': ( 'model/notion_types.html#notionfieldmeta.to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.to_notion_property': ( 'model/notion_types.html#notionfieldmeta.to_notion_property',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.NotionFieldMeta.validate': ( 'model/notion_types.html#notionfieldmeta.validate',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select': ( 'model/notion_types.html#select',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select.__init__': ( 'model/notion_types.html#select.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select.__new__': ( 'model/notion_types.html#select.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select._from_notion': ( 'model/notion_types.html#select._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select._to_notion': ( 'model/notion_types.html#select._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select._to_notion_property': ( 'model/notion_types.html#select._to_notion_property',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Select.validate': ( 'model/notion_types.html#select.validate',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Text': ( 'model/notion_types.html#text',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Text.__init__': ( 'model/notion_types.html#text.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Text.__new__': ( 'model/notion_types.html#text.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Text._from_notion': ( 'model/notion_types.html#text._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Text._to_notion': ( 'model/notion_types.html#text._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.TextNew': ( 'model/notion_types.html#textnew',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.TextNew.__init__': ( 'model/notion_types.html#textnew.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.TextNew.from_notion': ( 'model/notion_types.html#textnew.from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.TextNew.to_notion': ( 'model/notion_types.html#textnew.to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Title': ( 'model/notion_types.html#title',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Title.__init__': ( 'model/notion_types.html#title.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Title.__new__': ( 'model/notion_types.html#title.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Title._from_notion': ( 'model/notion_types.html#title._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.Title._to_notion': ( 'model/notion_types.html#title._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL': ( 'model/notion_types.html#url',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL.__init__': ( 'model/notion_types.html#url.__init__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL.__new__': ( 'model/notion_types.html#url.__new__',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL._from_notion': ( 'model/notion_types.html#url._from_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL._to_notion': ( 'model/notion_types.html#url._to_notion',
+ 'ragas_experimental/model/notion_typing.py'),
+ 'ragas_experimental.model.notion_typing.URL.validate': ( 'model/notion_types.html#url.validate',
+ 'ragas_experimental/model/notion_typing.py')},
+ 'ragas_experimental.model.pydantic_model': { 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel': ( 'model/pydantic_mode.html#extendedpydanticbasemodel',
+ 'ragas_experimental/model/pydantic_model.py'),
+ 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel.__init__': ( 'model/pydantic_mode.html#extendedpydanticbasemodel.__init__',
+ 'ragas_experimental/model/pydantic_model.py'),
+ 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel._initialize_column_mapping': ( 'model/pydantic_mode.html#extendedpydanticbasemodel._initialize_column_mapping',
+ 'ragas_experimental/model/pydantic_model.py'),
+ 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel.get_column_id': ( 'model/pydantic_mode.html#extendedpydanticbasemodel.get_column_id',
+ 'ragas_experimental/model/pydantic_model.py'),
+ 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel.get_db_field_mapping': ( 'model/pydantic_mode.html#extendedpydanticbasemodel.get_db_field_mapping',
+ 'ragas_experimental/model/pydantic_model.py'),
+ 'ragas_experimental.model.pydantic_model.ExtendedPydanticBaseModel.set_column_id': ( 'model/pydantic_mode.html#extendedpydanticbasemodel.set_column_id',
+ 'ragas_experimental/model/pydantic_model.py')},
+ 'ragas_experimental.project': { 'ragas_experimental.project.ExperimentProtocol': ( 'project/experiments.html#experimentprotocol',
+ 'ragas_experimental/project.py'),
+ 'ragas_experimental.project.ExperimentProtocol.__call__': ( 'project/experiments.html#experimentprotocol.__call__',
+ 'ragas_experimental/project.py'),
+ 'ragas_experimental.project.ExperimentProtocol.run_async': ( 'project/experiments.html#experimentprotocol.run_async',
+ 'ragas_experimental/project.py'),
+ 'ragas_experimental.project.Project.create_experiment': ( 'project/experiments.html#project.create_experiment',
+ 'ragas_experimental/project.py'),
+ 'ragas_experimental.project.Project.experiment': ( 'project/experiments.html#project.experiment',
+ 'ragas_experimental/project.py'),
+ 'ragas_experimental.project.Project.get_experiment': ( 'project/experiments.html#project.get_experiment',
+ 'ragas_experimental/project.py')},
+ 'ragas_experimental.project.comparison': { 'ragas_experimental.project.comparison.Project.compare_experiments': ( 'project/comparison.html#project.compare_experiments',
+ 'ragas_experimental/project/comparison.py'),
+ 'ragas_experimental.project.comparison._combine_experiments': ( 'project/comparison.html#_combine_experiments',
+ 'ragas_experimental/project/comparison.py'),
+ 'ragas_experimental.project.comparison._get_title_property': ( 'project/comparison.html#_get_title_property',
+ 'ragas_experimental/project/comparison.py'),
+ 'ragas_experimental.project.comparison._model_to_dict': ( 'project/comparison.html#_model_to_dict',
+ 'ragas_experimental/project/comparison.py'),
+ 'ragas_experimental.project.comparison._validate_experiments': ( 'project/comparison.html#_validate_experiments',
+ 'ragas_experimental/project/comparison.py')},
+ 'ragas_experimental.project.core': { 'ragas_experimental.project.core.Project': ( 'project/core.html#project',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.__init__': ( 'project/core.html#project.__init__',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.__repr__': ( 'project/core.html#project.__repr__',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.create': ( 'project/core.html#project.create',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.create_dataset': ( 'project/core.html#project.create_dataset',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.delete': ( 'project/core.html#project.delete',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.Project.get_dataset': ( 'project/core.html#project.get_dataset',
+ 'ragas_experimental/project/core.py'),
+ 'ragas_experimental.project.core.create_dataset_columns': ( 'project/core.html#create_dataset_columns',
+ 'ragas_experimental/project/core.py')},
+ 'ragas_experimental.project.experiments': { 'ragas_experimental.project.experiments.ExperimentProtocol': ( 'project/experiments.html#experimentprotocol',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.ExperimentProtocol.__call__': ( 'project/experiments.html#experimentprotocol.__call__',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.ExperimentProtocol.run_async': ( 'project/experiments.html#experimentprotocol.run_async',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.Project.create_experiment': ( 'project/experiments.html#project.create_experiment',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.Project.experiment': ( 'project/experiments.html#project.experiment',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.Project.get_experiment': ( 'project/experiments.html#project.get_experiment',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.Project.langfuse_experiment': ( 'project/experiments.html#project.langfuse_experiment',
+ 'ragas_experimental/project/experiments.py'),
+ 'ragas_experimental.project.experiments.create_experiment_columns': ( 'project/experiments.html#create_experiment_columns',
+ 'ragas_experimental/project/experiments.py')},
+ 'ragas_experimental.project.naming': { 'ragas_experimental.project.naming.MemorableNames': ( 'project/naming.html#memorablenames',
+ 'ragas_experimental/project/naming.py'),
+ 'ragas_experimental.project.naming.MemorableNames.__init__': ( 'project/naming.html#memorablenames.__init__',
+ 'ragas_experimental/project/naming.py'),
+ 'ragas_experimental.project.naming.MemorableNames.generate_name': ( 'project/naming.html#memorablenames.generate_name',
+ 'ragas_experimental/project/naming.py'),
+ 'ragas_experimental.project.naming.MemorableNames.generate_unique_name': ( 'project/naming.html#memorablenames.generate_unique_name',
+ 'ragas_experimental/project/naming.py'),
+ 'ragas_experimental.project.naming.MemorableNames.generate_unique_names': ( 'project/naming.html#memorablenames.generate_unique_names',
+ 'ragas_experimental/project/naming.py')},
+ 'ragas_experimental.prompt.base': { 'ragas_experimental.prompt.base.Prompt': ( 'prompt/base.html#prompt',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt.__init__': ( 'prompt/base.html#prompt.__init__',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt.__str__': ( 'prompt/base.html#prompt.__str__',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt._format_examples': ( 'prompt/base.html#prompt._format_examples',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt._validate_instruction': ( 'prompt/base.html#prompt._validate_instruction',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt.add_example': ( 'prompt/base.html#prompt.add_example',
+ 'ragas_experimental/prompt/base.py'),
+ 'ragas_experimental.prompt.base.Prompt.format': ( 'prompt/base.html#prompt.format',
+ 'ragas_experimental/prompt/base.py')},
+ 'ragas_experimental.prompt.dynamic_few_shot': { 'ragas_experimental.prompt.dynamic_few_shot.DynamicFewShotPrompt': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.DynamicFewShotPrompt.__init__': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.__init__',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.DynamicFewShotPrompt.add_example': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.add_example',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.DynamicFewShotPrompt.format': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.format',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.DynamicFewShotPrompt.from_prompt': ( 'prompt/dynamic_few_shot.html#dynamicfewshotprompt.from_prompt',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.ExampleStore': ( 'prompt/dynamic_few_shot.html#examplestore',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.ExampleStore.add_example': ( 'prompt/dynamic_few_shot.html#examplestore.add_example',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.ExampleStore.get_examples': ( 'prompt/dynamic_few_shot.html#examplestore.get_examples',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore.__init__': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.__init__',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore.__len__': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.__len__',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore._get_embedding': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore._get_embedding',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore._get_nearest_examples': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore._get_nearest_examples',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore.add_example': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.add_example',
+ 'ragas_experimental/prompt/dynamic_few_shot.py'),
+ 'ragas_experimental.prompt.dynamic_few_shot.InMemoryExampleStore.get_examples': ( 'prompt/dynamic_few_shot.html#inmemoryexamplestore.get_examples',
+ 'ragas_experimental/prompt/dynamic_few_shot.py')},
+ 'ragas_experimental.tracing.langfuse': { 'ragas_experimental.tracing.langfuse.LangfuseTrace': ( 'tracing/langfuse.html#langfusetrace',
+ 'ragas_experimental/tracing/langfuse.py'),
+ 'ragas_experimental.tracing.langfuse.LangfuseTrace.__init__': ( 'tracing/langfuse.html#langfusetrace.__init__',
+ 'ragas_experimental/tracing/langfuse.py'),
+ 'ragas_experimental.tracing.langfuse.LangfuseTrace.filter': ( 'tracing/langfuse.html#langfusetrace.filter',
+ 'ragas_experimental/tracing/langfuse.py'),
+ 'ragas_experimental.tracing.langfuse.LangfuseTrace.get_url': ( 'tracing/langfuse.html#langfusetrace.get_url',
+ 'ragas_experimental/tracing/langfuse.py'),
+ 'ragas_experimental.tracing.langfuse.add_query_param': ( 'tracing/langfuse.html#add_query_param',
+ 'ragas_experimental/tracing/langfuse.py'),
+ 'ragas_experimental.tracing.langfuse.sync_trace': ( 'tracing/langfuse.html#sync_trace',
+ 'ragas_experimental/tracing/langfuse.py')},
+ 'ragas_experimental.typing': { 'ragas_experimental.typing.Checkbox': ('typing.html#checkbox', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Checkbox.__init__': ( 'typing.html#checkbox.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ColumnType': ( 'typing.html#columntype',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Custom': ('typing.html#custom', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Custom.__init__': ( 'typing.html#custom.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Date': ('typing.html#date', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Date.__init__': ( 'typing.html#date.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.FieldMeta': ('typing.html#fieldmeta', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.FieldMeta.__init__': ( 'typing.html#fieldmeta.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter': ( 'typing.html#modelconverter',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter.infer_field_type': ( 'typing.html#modelconverter.infer_field_type',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter.infer_metric_result_type': ( 'typing.html#modelconverter.infer_metric_result_type',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter.instance_to_row': ( 'typing.html#modelconverter.instance_to_row',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter.instances_to_rows': ( 'typing.html#modelconverter.instances_to_rows',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.ModelConverter.model_to_columns': ( 'typing.html#modelconverter.model_to_columns',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.MultiSelect': ( 'typing.html#multiselect',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.MultiSelect.__init__': ( 'typing.html#multiselect.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Number': ('typing.html#number', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Number.__init__': ( 'typing.html#number.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Select': ('typing.html#select', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Select.__init__': ( 'typing.html#select.__init__',
+ 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Text': ('typing.html#text', 'ragas_experimental/typing.py'),
+ 'ragas_experimental.typing.Text.__init__': ( 'typing.html#text.__init__',
+ 'ragas_experimental/typing.py')},
+ 'ragas_experimental.utils': { 'ragas_experimental.utils.async_to_sync': ( 'utils.html#async_to_sync',
+ 'ragas_experimental/utils.py'),
+ 'ragas_experimental.utils.create_nano_id': ( 'utils.html#create_nano_id',
+ 'ragas_experimental/utils.py')}}}
diff --git a/ragas_annotator/project/__init__.py b/ragas_experimental/backends/__init__.py
similarity index 100%
rename from ragas_annotator/project/__init__.py
rename to ragas_experimental/backends/__init__.py
diff --git a/ragas_experimental/backends/factory.py b/ragas_experimental/backends/factory.py
new file mode 100644
index 0000000..f3e986c
--- /dev/null
+++ b/ragas_experimental/backends/factory.py
@@ -0,0 +1,44 @@
+"""Factory class for creating the backends or mocked backends."""
+
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/backends/factory.ipynb.
+
+# %% auto 0
+__all__ = ['RagasApiClientFactory']
+
+# %% ../../nbs/backends/factory.ipynb 2
+import typing as t
+
+from .ragas_api_client import RagasApiClient
+
+# %% ../../nbs/backends/factory.ipynb 3
+class RagasApiClientFactory:
+ """Factory for creating Ragas API client instances."""
+
+ @staticmethod
+ def create(
+ app_token: t.Optional[str] = None,
+ base_url: t.Optional[str] = None,
+ ) -> RagasApiClient:
+ """Create a Ragas API client.
+
+ Args:
+ api_key: The API key for the Ragas API
+ base_url: The base URL for the Ragas API
+
+ Returns:
+ RagasApiClient: A Ragas API client instance
+ """
+ if app_token is None:
+ app_token = os.getenv("RAGAS_APP_TOKEN")
+
+ if app_token is None:
+ raise ValueError("RAGAS_API_KEY environment variable is not set")
+
+ if base_url is None:
+ base_url = os.getenv("RAGAS_API_BASE_URL")
+
+ if base_url is None:
+ base_url = "https://api.dev.app.ragas.io"
+
+ return RagasApiClient(app_token=app_token, base_url=base_url)
+
diff --git a/ragas_annotator/backends/mock_notion.py b/ragas_experimental/backends/mock_notion.py
similarity index 100%
rename from ragas_annotator/backends/mock_notion.py
rename to ragas_experimental/backends/mock_notion.py
diff --git a/ragas_annotator/backends/notion_backend.py b/ragas_experimental/backends/notion_backend.py
similarity index 100%
rename from ragas_annotator/backends/notion_backend.py
rename to ragas_experimental/backends/notion_backend.py
diff --git a/ragas_annotator/backends/ragas_api_client.py b/ragas_experimental/backends/ragas_api_client.py
similarity index 95%
rename from ragas_annotator/backends/ragas_api_client.py
rename to ragas_experimental/backends/ragas_api_client.py
index fea0081..2efe3bd 100644
--- a/ragas_annotator/backends/ragas_api_client.py
+++ b/ragas_experimental/backends/ragas_api_client.py
@@ -5,18 +5,14 @@
# %% auto 0
__all__ = ['DEFAULT_SETTINGS', 'RagasApiClient', 'ColumnType', 'create_nano_id', 'Column', 'RowCell', 'Row']
-# %% ../../nbs/backends/ragas_api_client.ipynb 3
+# %% ../../nbs/backends/ragas_api_client.ipynb 4
import httpx
import asyncio
-import functools
import typing as t
-import inspect
from pydantic import BaseModel, Field
-from enum import StrEnum
-import uuid
from fastcore.utils import patch
-# %% ../../nbs/backends/ragas_api_client.ipynb 4
+# %% ../../nbs/backends/ragas_api_client.ipynb 5
class RagasApiClient():
"""Client for the Ragas Relay API."""
@@ -88,7 +84,7 @@ async def _delete_resource(self, path):
"""Generic resource deletion."""
return await self._request("DELETE", path)
-# %% ../../nbs/backends/ragas_api_client.ipynb 5
+# %% ../../nbs/backends/ragas_api_client.ipynb 6
#---- Projects ----
@patch
async def list_projects(
@@ -150,7 +146,7 @@ async def delete_project(self: RagasApiClient, project_id: str) -> None:
await self._delete_resource(f"projects/{project_id}")
-# %% ../../nbs/backends/ragas_api_client.ipynb 12
+# %% ../../nbs/backends/ragas_api_client.ipynb 13
#---- Datasets ----
@patch
async def list_datasets(
@@ -205,7 +201,7 @@ async def delete_dataset(self: RagasApiClient, project_id: str, dataset_id: str)
"""Delete a dataset."""
await self._delete_resource(f"projects/{project_id}/datasets/{dataset_id}")
-# %% ../../nbs/backends/ragas_api_client.ipynb 19
+# %% ../../nbs/backends/ragas_api_client.ipynb 20
#---- Experiments ----
@patch
async def list_experiments(
@@ -261,8 +257,11 @@ async def delete_experiment(self: RagasApiClient, project_id: str, experiment_id
await self._delete_resource(f"projects/{project_id}/experiments/{experiment_id}")
-# %% ../../nbs/backends/ragas_api_client.ipynb 24
-class ColumnType(StrEnum):
+# %% ../../nbs/backends/ragas_api_client.ipynb 25
+from enum import Enum
+
+# %% ../../nbs/backends/ragas_api_client.ipynb 26
+class ColumnType(str, Enum):
NUMBER = "number"
TEXT = "text"
LONG_TEXT = "longText"
@@ -272,7 +271,7 @@ class ColumnType(StrEnum):
CHECKBOX = "checkbox"
CUSTOM = "custom"
-# %% ../../nbs/backends/ragas_api_client.ipynb 25
+# %% ../../nbs/backends/ragas_api_client.ipynb 27
#---- Dataset Columns ----
@patch
async def list_dataset_columns(
@@ -343,7 +342,7 @@ async def delete_dataset_column(
f"projects/{project_id}/datasets/{dataset_id}/columns/{column_id}"
)
-# %% ../../nbs/backends/ragas_api_client.ipynb 33
+# %% ../../nbs/backends/ragas_api_client.ipynb 35
#---- Dataset Rows ----
@patch
async def list_dataset_rows(
@@ -405,11 +404,11 @@ async def delete_dataset_row(
)
-# %% ../../nbs/backends/ragas_api_client.ipynb 45
+# %% ../../nbs/backends/ragas_api_client.ipynb 47
import uuid
import string
-# %% ../../nbs/backends/ragas_api_client.ipynb 46
+# %% ../../nbs/backends/ragas_api_client.ipynb 48
def create_nano_id(size=12):
# Define characters to use (alphanumeric)
alphabet = string.ascii_letters + string.digits
@@ -426,7 +425,28 @@ def create_nano_id(size=12):
# Pad if necessary and return desired length
return result[:size]
-# %% ../../nbs/backends/ragas_api_client.ipynb 48
+# %% ../../nbs/backends/ragas_api_client.ipynb 50
+import uuid
+import string
+
+# %% ../../nbs/backends/ragas_api_client.ipynb 51
+def create_nano_id(size=12):
+ # Define characters to use (alphanumeric)
+ alphabet = string.ascii_letters + string.digits
+
+ # Generate UUID and convert to int
+ uuid_int = uuid.uuid4().int
+
+ # Convert to base62
+ result = ""
+ while uuid_int:
+ uuid_int, remainder = divmod(uuid_int, len(alphabet))
+ result = alphabet[remainder] + result
+
+ # Pad if necessary and return desired length
+ return result[:size]
+
+# %% ../../nbs/backends/ragas_api_client.ipynb 53
# Default settings for columns
DEFAULT_SETTINGS = {
"is_required": False,
@@ -449,7 +469,7 @@ class Row(BaseModel):
id: str = Field(default_factory=create_nano_id)
data: t.List[RowCell] = Field(...)
-# %% ../../nbs/backends/ragas_api_client.ipynb 49
+# %% ../../nbs/backends/ragas_api_client.ipynb 54
#---- Resource With Data Helper Methods ----
@patch
async def _create_with_data(
@@ -576,7 +596,7 @@ async def create_dataset_with_data(
"dataset", project_id, name, description, columns, rows, batch_size
)
-# %% ../../nbs/backends/ragas_api_client.ipynb 55
+# %% ../../nbs/backends/ragas_api_client.ipynb 60
#---- Experiment Columns ----
@patch
async def list_experiment_columns(
@@ -707,7 +727,7 @@ async def delete_experiment_row(
f"projects/{project_id}/experiments/{experiment_id}/rows/{row_id}"
)
-# %% ../../nbs/backends/ragas_api_client.ipynb 58
+# %% ../../nbs/backends/ragas_api_client.ipynb 63
@patch
async def create_experiment_with_data(
self: RagasApiClient,
@@ -738,7 +758,7 @@ async def create_experiment_with_data(
"experiment", project_id, name, description, columns, rows, batch_size
)
-# %% ../../nbs/backends/ragas_api_client.ipynb 59
+# %% ../../nbs/backends/ragas_api_client.ipynb 64
#---- Utility Methods ----
@patch
def create_column(
diff --git a/ragas_annotator/core.py b/ragas_experimental/core.py
similarity index 100%
rename from ragas_annotator/core.py
rename to ragas_experimental/core.py
diff --git a/ragas_experimental/dataset.py b/ragas_experimental/dataset.py
new file mode 100644
index 0000000..5a70c52
--- /dev/null
+++ b/ragas_experimental/dataset.py
@@ -0,0 +1,316 @@
+"""A python list like object that contains your evaluation data."""
+
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/dataset.ipynb.
+
+# %% auto 0
+__all__ = ['BaseModelType', 'Dataset']
+
+# %% ../nbs/dataset.ipynb 3
+import typing as t
+
+from fastcore.utils import patch
+
+from .model.pydantic_model import ExtendedPydanticBaseModel as BaseModel
+from .utils import create_nano_id, async_to_sync
+from .backends.ragas_api_client import RagasApiClient
+
+# %% ../nbs/dataset.ipynb 4
+BaseModelType = t.TypeVar("BaseModelType", bound=BaseModel)
+
+class Dataset(t.Generic[BaseModelType]):
+ """A list-like interface for managing dataset entries with backend synchronization.
+
+ This class behaves like a Python list while synchronizing operations with the
+ Ragas backend API.
+ """
+
+ def __init__(
+ self,
+ name: str,
+ model: t.Type[BaseModel],
+ project_id: str,
+ dataset_id: str,
+ ragas_api_client: RagasApiClient,
+ ):
+ self.name = name
+ self.model = model
+ self.project_id = project_id
+ self.dataset_id = dataset_id
+ self._ragas_api_client = ragas_api_client
+ self._entries: t.List[BaseModelType] = []
+
+ # Initialize column mapping if it doesn't exist yet
+ if not hasattr(self.model, "__column_mapping__"):
+ self.model.__column_mapping__ = {}
+
+ # Get column mappings from API and update the model's mapping
+ column_id_map = self._get_column_id_map(dataset_id=dataset_id)
+
+ # Update the model's column mapping with the values from the API
+ for field_name, column_id in column_id_map.items():
+ self.model.__column_mapping__[field_name] = column_id
+
+ def _get_column_id_map(self: "Dataset", dataset_id: str) -> dict:
+ """Get a map of column name to column id"""
+ sync_func = async_to_sync(self._ragas_api_client.list_dataset_columns)
+ columns = sync_func(project_id=self.project_id, dataset_id=dataset_id)
+ column_id_map = {column["name"]: column["id"] for column in columns["items"]}
+
+ # add the column id map to the model, selectively overwriting existing column mapping
+ for field in self.model.__column_mapping__.keys():
+ if field in column_id_map:
+ self.model.__column_mapping__[field] = column_id_map[field]
+ return column_id_map
+
+ def __getitem__(
+ self, key: t.Union[int, slice]
+ ) -> t.Union[BaseModelType, "Dataset[BaseModelType]"]:
+ """Get an entry by index or slice."""
+ if isinstance(key, slice):
+ new_dataset = type(self)(
+ name=self.name,
+ model=self.model,
+ project_id=self.project_id,
+ dataset_id=self.dataset_id,
+ ragas_api_client=self._ragas_api_client,
+ )
+ new_dataset._entries = self._entries[key]
+ return new_dataset
+ else:
+ return self._entries[key]
+
+ def __setitem__(self, index: int, entry: BaseModelType) -> None:
+ """Update an entry at the given index and sync to backend."""
+ if not isinstance(entry, self.model):
+ raise TypeError(f"Entry must be an instance of {self.model.__name__}")
+
+ # Get existing entry to get its ID
+ existing = self._entries[index]
+
+ # Update in backend
+ self.save(entry)
+
+ # Update local cache
+ self._entries[index] = entry
+
+ def __repr__(self) -> str:
+ return f"Dataset(name={self.name}, model={self.model.__name__}, len={len(self)})"
+
+ def __len__(self) -> int:
+ return len(self._entries)
+
+ def __iter__(self) -> t.Iterator[BaseModelType]:
+ return iter(self._entries)
+
+# %% ../nbs/dataset.ipynb 16
+@patch
+def append(self: Dataset, entry: BaseModelType) -> None:
+ """Add a new entry to the dataset and sync to Notion."""
+ # Create row inside the table
+
+ # first get the columns for the dataset
+ column_id_map = self.model.__column_mapping__
+
+ # create the rows
+ row_dict = entry.model_dump()
+ row_id = create_nano_id()
+ row_data = {}
+ for key, value in row_dict.items():
+ if key in column_id_map:
+ row_data[column_id_map[key]] = value
+
+ sync_func = async_to_sync(self._ragas_api_client.create_dataset_row)
+ response = sync_func(
+ project_id=self.project_id,
+ dataset_id=self.dataset_id,
+ id=row_id,
+ data=row_data,
+ )
+ # add the row id to the entry
+ entry._row_id = response["id"]
+ # Update entry with Notion data (like ID)
+ self._entries.append(entry)
+
+# %% ../nbs/dataset.ipynb 19
+@patch
+def pop(self: Dataset, index: int = -1) -> BaseModelType:
+ """Remove and return entry at index, sync deletion to Notion."""
+ entry = self._entries[index]
+ # get the row id
+ row_id = entry._row_id
+ if row_id is None:
+ raise ValueError("Entry has no row id. This likely means it was not added or synced to the dataset.")
+
+ # soft delete the row
+ sync_func = async_to_sync(self._ragas_api_client.delete_dataset_row)
+ sync_func(project_id=self.project_id, dataset_id=self.dataset_id, row_id=row_id)
+
+ # Remove from local cache
+ return self._entries.pop(index)
+
+# %% ../nbs/dataset.ipynb 22
+@patch
+def load(self: Dataset) -> None:
+ """Load all entries from the backend API."""
+ # Get all rows
+ sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)
+ response = sync_func(
+ project_id=self.project_id,
+ dataset_id=self.dataset_id
+ )
+
+ # Get column mapping (ID -> name)
+ column_map = {v: k for k, v in self.model.__column_mapping__.items()}
+
+ # Clear existing entries
+ self._entries.clear()
+
+ # Process rows
+ for row in response.get("items", []):
+ model_data = {}
+ row_id = row.get("id")
+
+ # Convert from API data format to model fields
+ for col_id, value in row.get("data", {}).items():
+ if col_id in column_map:
+ field_name = column_map[col_id]
+ model_data[field_name] = value
+
+ # Create model instance
+ entry = self.model(**model_data)
+
+ # Store row ID for future operations
+ entry._row_id = row_id
+
+ self._entries.append(entry)
+
+# %% ../nbs/dataset.ipynb 24
+@patch
+def load_as_dicts(self: Dataset) -> t.List[t.Dict]:
+ """Load all entries as dictionaries."""
+ # Get all rows
+ sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)
+ response = sync_func(
+ project_id=self.project_id,
+ dataset_id=self.dataset_id
+ )
+
+ # Get column mapping (ID -> name)
+ column_map = {v: k for k, v in self.model.__column_mapping__.items()}
+
+ # Convert to dicts with field names
+ result = []
+ for row in response.get("items", []):
+ item_dict = {}
+ for col_id, value in row.get("data", {}).items():
+ if col_id in column_map:
+ field_name = column_map[col_id]
+ item_dict[field_name] = value
+ result.append(item_dict)
+
+ return result
+
+# %% ../nbs/dataset.ipynb 26
+@patch
+def to_pandas(self: Dataset) -> "pd.DataFrame":
+ """Convert dataset to pandas DataFrame."""
+ import pandas as pd
+
+ # Make sure we have data
+ if not self._entries:
+ self.load()
+
+ # Convert entries to dictionaries
+ data = [entry.model_dump() for entry in self._entries]
+ return pd.DataFrame(data)
+
+# %% ../nbs/dataset.ipynb 28
+@patch
+def save(self: Dataset, item: BaseModelType) -> None:
+ """Save changes to an item to the backend."""
+ if not isinstance(item, self.model):
+ raise TypeError(f"Item must be an instance of {self.model.__name__}")
+
+ # Get the row ID
+ row_id = None
+ if hasattr(item, "_row_id") and item._row_id:
+ row_id = item._row_id
+ else:
+ # Try to find it in our entries by matching
+ for i, entry in enumerate(self._entries):
+ if id(entry) == id(item): # Check if it's the same object
+ if hasattr(entry, "_row_id") and entry._row_id:
+ row_id = entry._row_id
+ break
+
+ if not row_id:
+ raise ValueError("Cannot save: item is not from this dataset or was not properly synced")
+
+ # Get column mapping and prepare data
+ column_id_map = self.model.__column_mapping__
+ row_dict = item.model_dump()
+ row_data = {}
+
+ for key, value in row_dict.items():
+ if key in column_id_map:
+ row_data[column_id_map[key]] = value
+
+ # Update in backend
+ sync_func = async_to_sync(self._ragas_api_client.update_dataset_row)
+ response = sync_func(
+ project_id=self.project_id,
+ dataset_id=self.dataset_id,
+ row_id=row_id,
+ data=row_data,
+ )
+
+ # Find and update in local cache if needed
+ for i, entry in enumerate(self._entries):
+ if hasattr(entry, "_row_id") and entry._row_id == row_id:
+ # If it's not the same object, update our copy
+ if id(entry) != id(item):
+ self._entries[i] = item
+ break
+
+# %% ../nbs/dataset.ipynb 32
+@patch
+def get(self: Dataset, field_value: str, field_name: str = "_row_id") -> t.Optional[BaseModelType]:
+ """Get an entry by field value.
+
+ Args:
+ id_value: The value to match
+ field_name: The field to match against (default: "id")
+
+ Returns:
+ The matching model instance or None if not found
+ """
+ # Check if we need to load entries
+ if not self._entries:
+ self.load()
+
+ # Search in local entries first
+ for entry in self._entries:
+ if hasattr(entry, field_name) and getattr(entry, field_name) == field_value:
+ return entry
+
+ # If not found and field is "id", try to get directly from API
+ if field_name == "id":
+ # Get column ID for field
+ if field_name not in self.model.__column_mapping__:
+ return None
+
+ column_id = self.model.__column_mapping__[field_name]
+
+ # Get rows with filter
+ sync_func = async_to_sync(self._ragas_api_client.list_dataset_rows)
+ response = sync_func(
+ project_id=self.project_id,
+ dataset_id=self.dataset_id,
+ # We don't have direct filter support in the API client,
+ # so this would need to be implemented there.
+ # For now, we've already checked our local cache.
+ )
+
+ # Would parse response here if we had filtering
+
+ return None
diff --git a/ragas_experimental/embedding/__init__.py b/ragas_experimental/embedding/__init__.py
new file mode 100644
index 0000000..4df5716
--- /dev/null
+++ b/ragas_experimental/embedding/__init__.py
@@ -0,0 +1,4 @@
+from ragas_experimental.embedding.base import BaseEmbedding
+from ragas_experimental.embedding.base import ragas_embedding
+
+__all__ = ['ragas_embedding','BaseEmbedding']
\ No newline at end of file
diff --git a/ragas_annotator/embedding/base.py b/ragas_experimental/embedding/base.py
similarity index 100%
rename from ragas_annotator/embedding/base.py
rename to ragas_experimental/embedding/base.py
diff --git a/ragas_annotator/exceptions.py b/ragas_experimental/exceptions.py
similarity index 79%
rename from ragas_annotator/exceptions.py
rename to ragas_experimental/exceptions.py
index 905f16e..051a2f3 100644
--- a/ragas_annotator/exceptions.py
+++ b/ragas_experimental/exceptions.py
@@ -1,11 +1,11 @@
"""All the exceptions specific to the `notion_annotator` project."""
-# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/utils/exceptions.ipynb.
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/exceptions.ipynb.
# %% auto 0
__all__ = ['ValidationError', 'DuplicateError', 'NotFoundError']
-# %% ../nbs/utils/exceptions.ipynb 2
+# %% ../nbs/exceptions.ipynb 2
class ValidationError(Exception):
"""Raised when field validation fails."""
diff --git a/ragas_annotator/experiment.py b/ragas_experimental/experiment.py
similarity index 58%
rename from ragas_annotator/experiment.py
rename to ragas_experimental/experiment.py
index 043e574..6cdb14a 100644
--- a/ragas_annotator/experiment.py
+++ b/ragas_experimental/experiment.py
@@ -10,8 +10,8 @@
from fastcore.utils import patch
-from .model.notion_model import NotionModel
-from .backends.notion_backend import NotionBackend
+from .model.pydantic_model import ExtendedPydanticBaseModel as BaseModel
+from .backends.ragas_api_client import RagasApiClient
from .dataset import Dataset
# %% ../nbs/experiment.ipynb 3
@@ -19,11 +19,13 @@ class Experiment(Dataset):
def __init__(
self,
name: str,
- model: t.Type[NotionModel],
- database_id: str,
- notion_backend: NotionBackend,
+ model: t.Type[BaseModel],
+ project_id: str,
+ experiment_id: str,
+ ragas_api_client: RagasApiClient,
):
- super().__init__(name, model, database_id, notion_backend)
+ self.experiment_id = experiment_id
+ super().__init__(name, model, project_id, experiment_id, ragas_api_client)
def __str__(self):
return f"Experiment(name={self.name}, model={self.model.__name__})"
diff --git a/ragas_experimental/llm/__init__.py b/ragas_experimental/llm/__init__.py
new file mode 100644
index 0000000..f3540b2
--- /dev/null
+++ b/ragas_experimental/llm/__init__.py
@@ -0,0 +1,3 @@
+from ragas_experimental.llm.llm import RagasLLM, ragas_llm
+
+__all__ = ["RagasLLM", "ragas_llm"]
\ No newline at end of file
diff --git a/ragas_annotator/llm/llm.py b/ragas_experimental/llm/llm.py
similarity index 100%
rename from ragas_annotator/llm/llm.py
rename to ragas_experimental/llm/llm.py
diff --git a/ragas_experimental/metric/__init__.py b/ragas_experimental/metric/__init__.py
new file mode 100644
index 0000000..0675201
--- /dev/null
+++ b/ragas_experimental/metric/__init__.py
@@ -0,0 +1,12 @@
+from ragas_experimental.metric.result import MetricResult
+from ragas_experimental.metric.base import Metric
+from ragas_experimental.metric.discrete import DiscreteMetric
+from ragas_experimental.metric.numeric import NumericMetric
+from ragas_experimental.metric.ranking import RankingMetric
+
+__all__ = ['MetricResult',
+ 'Metric',
+ 'DiscreteMetric',
+ 'NumericMetric',
+ 'RankingMetric',
+ ]
diff --git a/ragas_annotator/metric/base.py b/ragas_experimental/metric/base.py
similarity index 94%
rename from ragas_annotator/metric/base.py
rename to ragas_experimental/metric/base.py
index 1c64ce4..0c4d1fe 100644
--- a/ragas_annotator/metric/base.py
+++ b/ragas_experimental/metric/base.py
@@ -19,11 +19,13 @@
from ..embedding.base import BaseEmbedding
from . import MetricResult
from ..llm import RagasLLM
-from ..project.core import Project
from ..model.notion_model import NotionModel
from ..prompt.dynamic_few_shot import DynamicFewShotPrompt
+if t.TYPE_CHECKING:
+ from ragas_experimental.project.core import Project
+# %% ../../nbs/metric/base.ipynb 3
@dataclass
class Metric(ABC):
"""Base class for all metrics in the LLM evaluation library."""
@@ -96,7 +98,7 @@ async def abatch_score(self, inputs: t.List[t.Dict[str, t.Any]], reasoning: bool
# Run all tasks concurrently and return results
return await asyncio.gather(*async_tasks)
- def train(self,project:Project, experiment_names: t.List[str], model:NotionModel, embedding_model: BaseEmbedding,method: t.Dict[str, t.Any]):
+ def train(self,project: "Project", experiment_names: t.List[str], model:NotionModel, embedding_model: BaseEmbedding,method: t.Dict[str, t.Any]):
assert isinstance(self.prompt, Prompt)
self.prompt = DynamicFewShotPrompt.from_prompt(self.prompt,embedding_model)
diff --git a/ragas_annotator/metric/decorator.py b/ragas_experimental/metric/decorator.py
similarity index 100%
rename from ragas_annotator/metric/decorator.py
rename to ragas_experimental/metric/decorator.py
diff --git a/ragas_annotator/metric/discrete.py b/ragas_experimental/metric/discrete.py
similarity index 100%
rename from ragas_annotator/metric/discrete.py
rename to ragas_experimental/metric/discrete.py
diff --git a/ragas_annotator/metric/numeric.py b/ragas_experimental/metric/numeric.py
similarity index 100%
rename from ragas_annotator/metric/numeric.py
rename to ragas_experimental/metric/numeric.py
diff --git a/ragas_annotator/metric/ranking.py b/ragas_experimental/metric/ranking.py
similarity index 100%
rename from ragas_annotator/metric/ranking.py
rename to ragas_experimental/metric/ranking.py
diff --git a/ragas_annotator/metric/result.py b/ragas_experimental/metric/result.py
similarity index 87%
rename from ragas_annotator/metric/result.py
rename to ragas_experimental/metric/result.py
index a50e97e..5a6dc22 100644
--- a/ragas_annotator/metric/result.py
+++ b/ragas_experimental/metric/result.py
@@ -8,10 +8,9 @@
# %% ../../nbs/metric/result.ipynb 2
import typing as t
+from fastcore.utils import patch
-
-
-
+# %% ../../nbs/metric/result.ipynb 3
class MetricResult:
"""Class to hold the result of a metric evaluation.
@@ -176,3 +175,31 @@ def to_dict(self):
"result": self._result,
"reason": self.reason
}
+
+# %% ../../nbs/metric/result.ipynb 7
+from pydantic_core import core_schema
+from pydantic import GetCoreSchemaHandler, ValidationInfo
+
+# %% ../../nbs/metric/result.ipynb 8
+@patch(cls_method=True)
+def validate(cls: MetricResult, value: t.Any, info: ValidationInfo):
+ """Provide compatibility with older Pydantic versions."""
+ if isinstance(value, MetricResult):
+ return value
+ return MetricResult(result=value)
+
+# Add Pydantic compatibility methods
+@patch(cls_method=True)
+def __get_pydantic_core_schema__(
+ cls: MetricResult,
+ _source_type: t.Any,
+ _handler: GetCoreSchemaHandler
+) -> core_schema.CoreSchema:
+ """Generate a Pydantic core schema for MetricResult."""
+ return core_schema.with_info_plain_validator_function(cls.validate)
+
+
+@patch
+def model_dump(self: MetricResult):
+ """Support Pydantic's model_dump method."""
+ return self.to_dict()
diff --git a/ragas_annotator/tracing/__init__.py b/ragas_experimental/model/__init__.py
similarity index 100%
rename from ragas_annotator/tracing/__init__.py
rename to ragas_experimental/model/__init__.py
diff --git a/ragas_annotator/model/notion_model.py b/ragas_experimental/model/notion_model.py
similarity index 100%
rename from ragas_annotator/model/notion_model.py
rename to ragas_experimental/model/notion_model.py
diff --git a/ragas_annotator/model/notion_typing.py b/ragas_experimental/model/notion_typing.py
similarity index 100%
rename from ragas_annotator/model/notion_typing.py
rename to ragas_experimental/model/notion_typing.py
diff --git a/ragas_experimental/model/pydantic_model.py b/ragas_experimental/model/pydantic_model.py
new file mode 100644
index 0000000..c3b3eca
--- /dev/null
+++ b/ragas_experimental/model/pydantic_model.py
@@ -0,0 +1,66 @@
+"""An Extended version of Pydantics `BaseModel` for some ragas specific stuff"""
+
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/model/pydantic_mode.ipynb.
+
+# %% auto 0
+__all__ = ['ExtendedPydanticBaseModel']
+
+# %% ../../nbs/model/pydantic_mode.ipynb 2
+import typing as t
+
+from pydantic import BaseModel, PrivateAttr
+
+from ..typing import FieldMeta as RagasFieldMeta
+
+# %% ../../nbs/model/pydantic_mode.ipynb 3
+class ExtendedPydanticBaseModel(BaseModel):
+ """Extended Pydantic BaseModel with database integration capabilities"""
+
+ # Private attribute for storing the database row_id
+ _row_id: t.Optional[int] = PrivateAttr(default=None)
+
+ # Class variable for storing column mapping overrides
+ __column_mapping__: t.ClassVar[t.Dict[str, str]] = {}
+
+ def __init__(self, **data):
+ super().__init__(**data)
+ # Initialize column mapping if not already defined
+ if not self.__class__.__column_mapping__:
+ self._initialize_column_mapping()
+
+ @classmethod
+ def _initialize_column_mapping(cls):
+ """Initialize mapping from field names to column IDs."""
+ for field_name, field_info in cls.model_fields.items():
+ # Check if field has Column metadata (for Pydantic v2)
+ column_id = None
+ for extra in field_info.metadata or []:
+ if isinstance(extra, RagasFieldMeta) and extra.id:
+ column_id = extra.id
+ break
+
+ # If no Column metadata found, use field name as column ID
+ if not column_id:
+ column_id = field_name
+
+ cls.__column_mapping__[field_name] = column_id
+
+ @classmethod
+ def get_column_id(cls, field_name: str) -> str:
+ """Get the column ID for a given field name."""
+ if field_name not in cls.__column_mapping__:
+ raise ValueError(f"No column mapping found for field {field_name}")
+ return cls.__column_mapping__[field_name]
+
+ @classmethod
+ def set_column_id(cls, field_name: str, column_id: str):
+ """Set the column ID for a given field name."""
+ if field_name not in cls.model_fields:
+ raise ValueError(f"Field {field_name} not found in model")
+ cls.__column_mapping__[field_name] = column_id
+
+ def get_db_field_mapping(self) -> t.Dict[str, str]:
+ """Get a mapping from field names to column IDs for this model."""
+ return self.__class__.__column_mapping__
+
+
diff --git a/ragas_annotator/project.py b/ragas_experimental/project.py
similarity index 100%
rename from ragas_annotator/project.py
rename to ragas_experimental/project.py
diff --git a/ragas_experimental/project/__init__.py b/ragas_experimental/project/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ragas_annotator/project/comparison.py b/ragas_experimental/project/comparison.py
similarity index 99%
rename from ragas_annotator/project/comparison.py
rename to ragas_experimental/project/comparison.py
index f7b21af..0621b78 100644
--- a/ragas_annotator/project/comparison.py
+++ b/ragas_experimental/project/comparison.py
@@ -14,7 +14,7 @@
from .core import Project
from ..model.notion_model import NotionModel
-import ragas_annotator.model.notion_typing as nmt
+import ragas_experimental.model.notion_typing as nmt
from ..experiment import Experiment
from ..dataset import Dataset
diff --git a/ragas_experimental/project/core.py b/ragas_experimental/project/core.py
new file mode 100644
index 0000000..a4a1f27
--- /dev/null
+++ b/ragas_experimental/project/core.py
@@ -0,0 +1,142 @@
+"""Use this class to represent the AI project that we are working on and to interact with datasets and experiments in it."""
+
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/project/core.ipynb.
+
+# %% auto 0
+__all__ = ['Project', 'create_dataset_columns']
+
+# %% ../../nbs/project/core.ipynb 4
+import typing as t
+import os
+import asyncio
+
+from fastcore.utils import patch
+from pydantic import BaseModel
+
+from ..backends.factory import RagasApiClientFactory
+from ..backends.ragas_api_client import RagasApiClient
+import ragas_experimental.typing as rt
+from ..utils import async_to_sync, create_nano_id
+from ..dataset import Dataset
+from ..experiment import Experiment
+
+# %% ../../nbs/project/core.ipynb 5
+class Project:
+ def __init__(
+ self,
+ project_id: str,
+ ragas_app_client: t.Optional[RagasApiClient] = None,
+ ):
+ self.project_id = project_id
+ if ragas_app_client is None:
+ self._ragas_api_client = RagasApiClientFactory.create()
+ else:
+ self._ragas_api_client = ragas_app_client
+
+ # create the project
+ try:
+ sync_version = async_to_sync(self._ragas_api_client.get_project)
+ existing_project = sync_version(project_id=self.project_id)
+ self.project_id = existing_project["id"]
+ self.name = existing_project["title"]
+ self.description = existing_project["description"]
+ except Exception as e:
+ raise e
+
+ @classmethod
+ def create(
+ cls,
+ name: str,
+ description: str = "",
+ ragas_app_client: t.Optional[RagasApiClient] = None,
+ ):
+ ragas_app_client = RagasApiClientFactory.create()
+ sync_version = async_to_sync(ragas_app_client.create_project)
+ new_project = sync_version(title=name, description=description)
+ return cls(new_project["id"], ragas_app_client)
+
+ def delete(self):
+ sync_version = async_to_sync(self._ragas_api_client.delete_project)
+ sync_version(project_id=self.project_id)
+ print("Project deleted!")
+
+ def __repr__(self):
+ return f"Project(name='{self.name}')"
+
+# %% ../../nbs/project/core.ipynb 10
+async def create_dataset_columns(project_id, dataset_id, columns, create_dataset_column_func):
+ tasks = []
+ for column in columns:
+ tasks.append(create_dataset_column_func(
+ project_id=project_id,
+ dataset_id=dataset_id,
+ id=create_nano_id(),
+ name=column["name"],
+ type=column["type"],
+ settings={
+ "max_length": 255,
+ "is_required": True,
+ },
+ ))
+ return await asyncio.gather(*tasks)
+
+
+# %% ../../nbs/project/core.ipynb 11
+@patch
+def create_dataset(
+ self: Project, model: t.Type[BaseModel], name: t.Optional[str] = None
+) -> Dataset:
+ """Create a new dataset database.
+
+ Args:
+ name (str): Name of the dataset
+ model (NotionModel): Model class defining the database structure
+
+ Returns:
+ Dataset: A new dataset object for managing entries
+ """
+ # create the dataset
+ sync_version = async_to_sync(self._ragas_api_client.create_dataset)
+ dataset_info = sync_version(
+ project_id=self.project_id,
+ name=name if name is not None else model.__name__,
+ )
+
+ # create the columns for the dataset
+ column_types = rt.ModelConverter.model_to_columns(model)
+ sync_version = async_to_sync(create_dataset_columns)
+ sync_version(
+ project_id=self.project_id,
+ dataset_id=dataset_info["id"],
+ columns=column_types,
+ create_dataset_column_func=self._ragas_api_client.create_dataset_column,
+ )
+
+ # Return a new Dataset instance
+ return Dataset(
+ name=name if name is not None else model.__name__,
+ model=model,
+ project_id=self.project_id,
+ dataset_id=dataset_info["id"],
+ ragas_api_client=self._ragas_api_client,
+ )
+
+# %% ../../nbs/project/core.ipynb 15
+@patch
+def get_dataset(self: Project, dataset_id: str, model) -> Dataset:
+ """Get an existing dataset by name."""
+ # Search for database with given name
+ sync_version = async_to_sync(self._ragas_api_client.get_dataset)
+ dataset_info = sync_version(
+ project_id=self.project_id,
+ dataset_id=dataset_id
+ )
+
+ # For now, return Dataset without model type
+ return Dataset(
+ name=dataset_info["name"],
+ model=model,
+ project_id=self.project_id,
+ dataset_id=dataset_id,
+ ragas_api_client=self._ragas_api_client,
+ )
diff --git a/ragas_annotator/project/experiments.py b/ragas_experimental/project/experiments.py
similarity index 62%
rename from ragas_annotator/project/experiments.py
rename to ragas_experimental/project/experiments.py
index ecc2ffd..b39c6f0 100644
--- a/ragas_annotator/project/experiments.py
+++ b/ragas_experimental/project/experiments.py
@@ -3,7 +3,7 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/project/experiments.ipynb.
# %% auto 0
-__all__ = ['memorable_names', 'ExperimentProtocol']
+__all__ = ['memorable_names', 'create_experiment_columns', 'ExperimentProtocol']
# %% ../../nbs/project/experiments.ipynb 2
from tqdm import tqdm
@@ -15,83 +15,108 @@
from fastcore.utils import patch
from .core import Project
-from ..model.notion_model import NotionModel
+from ..model.pydantic_model import ExtendedPydanticBaseModel as BaseModel
+from ..utils import async_to_sync, create_nano_id
+from ..dataset import Dataset, BaseModelType
from ..experiment import Experiment
-from ..dataset import Dataset
+import ragas_experimental.typing as rt
# %% ../../nbs/project/experiments.ipynb 3
@patch
def create_experiment(
- self: Project, name: str, model: t.Type[NotionModel]
+ self: Project, name: str, model: t.Type[BaseModel]
) -> Experiment:
- """Create a new experiment view.
+ """Create a new experiment.
Args:
name: Name of the experiment
model: Model class defining the experiment structure
Returns:
- ExperimentView: View for managing experiment results
+ Experiment: An experiment object for managing results
"""
- if self.experiments_page_id == "":
- raise ValueError("Experiments page ID is not set")
-
- # Collect all properties from model fields
- properties = {}
- for field_name, field in model._fields.items():
- properties.update(field._to_notion_property())
-
- # Create the database
- database_id = self._notion_backend.create_new_database(
- parent_page_id=self.experiments_page_id, title=name, properties=properties
+ # Create the experiment
+ sync_version = async_to_sync(self._ragas_api_client.create_experiment)
+ experiment_info = sync_version(
+ project_id=self.project_id,
+ name=name,
)
+ # Create the columns for the experiment
+ column_types = rt.ModelConverter.model_to_columns(model)
+ sync_version = async_to_sync(create_experiment_columns)
+ sync_version(
+ project_id=self.project_id,
+ experiment_id=experiment_info["id"],
+ columns=column_types,
+ create_experiment_column_func=self._ragas_api_client.create_experiment_column,
+ )
+
+ # Return a new Experiment instance
return Experiment(
name=name,
model=model,
- database_id=database_id,
- notion_backend=self._notion_backend,
+ project_id=self.project_id,
+ experiment_id=experiment_info["id"],
+ ragas_api_client=self._ragas_api_client,
)
-# %% ../../nbs/project/experiments.ipynb 4
+# Add this helper function similar to create_dataset_columns in core.ipynb
+async def create_experiment_columns(project_id, experiment_id, columns, create_experiment_column_func):
+ tasks = []
+ for column in columns:
+ tasks.append(create_experiment_column_func(
+ project_id=project_id,
+ experiment_id=experiment_id,
+ id=create_nano_id(),
+ name=column["name"],
+ type=column["type"],
+ settings={
+ "max_length": 255,
+ "is_required": True,
+ },
+ ))
+ return await asyncio.gather(*tasks)
+
+# %% ../../nbs/project/experiments.ipynb 7
@patch
-def get_experiment(self: Project, name: str, model: t.Type[NotionModel]) -> Experiment:
- """Get an existing experiment by name."""
- if self.experiments_page_id == "":
- raise ValueError("Experiments page ID is not set")
-
- # Search for database with given name
- database_id = self._notion_backend.get_database_id(
- parent_page_id=self.experiments_page_id, name=name, return_multiple=False
+def get_experiment(self: Project, experiment_id: str, model: t.Type[BaseModel]) -> Experiment:
+ """Get an existing experiment by ID."""
+ # Get experiment info
+ sync_version = async_to_sync(self._ragas_api_client.get_experiment)
+ experiment_info = sync_version(
+ project_id=self.project_id,
+ experiment_id=experiment_id
)
return Experiment(
- name=name,
+ name=experiment_info["name"],
model=model,
- database_id=database_id,
- notion_backend=self._notion_backend,
+ project_id=self.project_id,
+ experiment_id=experiment_id,
+ ragas_api_client=self._ragas_api_client,
)
-# %% ../../nbs/project/experiments.ipynb 5
+# %% ../../nbs/project/experiments.ipynb 11
@t.runtime_checkable
class ExperimentProtocol(t.Protocol):
async def __call__(self, *args, **kwargs): ...
async def run_async(self, name: str, dataset: Dataset): ...
-# %% ../../nbs/project/experiments.ipynb 6
+# %% ../../nbs/project/experiments.ipynb 12
# this one we have to clean up
from langfuse.decorators import observe
-# %% ../../nbs/project/experiments.ipynb 7
+# %% ../../nbs/project/experiments.ipynb 13
from .naming import MemorableNames
-# %% ../../nbs/project/experiments.ipynb 8
+# %% ../../nbs/project/experiments.ipynb 14
memorable_names = MemorableNames()
-# %% ../../nbs/project/experiments.ipynb 9
+# %% ../../nbs/project/experiments.ipynb 15
@patch
def experiment(
- self: Project, experiment_model: t.Type[NotionModel], name_prefix: str = ""
+ self: Project, experiment_model, name_prefix: str = ""
):
"""Decorator for creating experiment functions without Langfuse integration.
@@ -114,6 +139,8 @@ async def run_async(dataset: Dataset, name: t.Optional[str] = None):
# if name is not provided, generate a memorable name
if name is None:
name = memorable_names.generate_unique_name()
+ if name_prefix:
+ name = f"{name_prefix}-{name}"
# Create tasks for all items
tasks = []
@@ -140,10 +167,10 @@ async def run_async(dataset: Dataset, name: t.Optional[str] = None):
return decorator
-# %% ../../nbs/project/experiments.ipynb 10
+# %% ../../nbs/project/experiments.ipynb 19
@patch
def langfuse_experiment(
- self: Project, experiment_model: t.Type[NotionModel], name_prefix: str = ""
+ self: Project, experiment_model, name_prefix: str = ""
):
"""Decorator for creating experiment functions with Langfuse integration.
diff --git a/ragas_annotator/project/naming.py b/ragas_experimental/project/naming.py
similarity index 100%
rename from ragas_annotator/project/naming.py
rename to ragas_experimental/project/naming.py
diff --git a/ragas_experimental/prompt/__init__.py b/ragas_experimental/prompt/__init__.py
new file mode 100644
index 0000000..680fe35
--- /dev/null
+++ b/ragas_experimental/prompt/__init__.py
@@ -0,0 +1,5 @@
+from ragas_experimental.prompt.base import Prompt
+from ragas_experimental.prompt.dynamic_few_shot import DynamicFewShotPrompt
+
+
+__all__ = ['Prompt', 'DynamicFewShotPrompt']
\ No newline at end of file
diff --git a/ragas_annotator/prompt/base.py b/ragas_experimental/prompt/base.py
similarity index 100%
rename from ragas_annotator/prompt/base.py
rename to ragas_experimental/prompt/base.py
diff --git a/ragas_annotator/prompt/dynamic_few_shot.py b/ragas_experimental/prompt/dynamic_few_shot.py
similarity index 100%
rename from ragas_annotator/prompt/dynamic_few_shot.py
rename to ragas_experimental/prompt/dynamic_few_shot.py
diff --git a/ragas_experimental/tracing/__init__.py b/ragas_experimental/tracing/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/ragas_annotator/tracing/langfuse.py b/ragas_experimental/tracing/langfuse.py
similarity index 100%
rename from ragas_annotator/tracing/langfuse.py
rename to ragas_experimental/tracing/langfuse.py
diff --git a/ragas_experimental/typing.py b/ragas_experimental/typing.py
new file mode 100644
index 0000000..f537ffe
--- /dev/null
+++ b/ragas_experimental/typing.py
@@ -0,0 +1,431 @@
+"""Field Metadata for python's `t.Annotate`."""
+
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/typing.ipynb.
+
+# %% auto 0
+__all__ = ['ColumnType', 'FieldMeta', 'Number', 'Text', 'Select', 'MultiSelect', 'Checkbox', 'Date', 'Custom', 'ModelConverter']
+
+# %% ../nbs/typing.ipynb 2
+import typing as t
+from enum import Enum
+from pydantic import BaseModel, create_model
+from datetime import datetime, date
+import inspect
+
+from .metric.result import MetricResult
+
+# %% ../nbs/typing.ipynb 4
+class ColumnType(str, Enum):
+ """Column types supported by the Ragas API."""
+ NUMBER = "number"
+ TEXT = "text"
+ SELECT = "select"
+ MULTI_SELECT = "multiSelect"
+ CHECKBOX = "checkbox"
+ DATE = "date"
+ CUSTOM = "custom"
+
+
+# %% ../nbs/typing.ipynb 5
+class FieldMeta:
+ """Base metadata for field type annotations."""
+ def __init__(self, type, required=True, id: t.Optional[str]=None, **settings):
+ self.type = type
+ self.required = required
+ self.id = id
+ self.settings = settings.copy()
+
+# %% ../nbs/typing.ipynb 6
+class Number(FieldMeta):
+ """Number field metadata."""
+ def __init__(self, min_value: t.Optional[float] = None, max_value: t.Optional[float] = None, required: bool = True, id: t.Optional[str]=None):
+ settings = {}
+ if min_value is not None or max_value is not None:
+ settings["range"] = {}
+ if min_value is not None:
+ settings["range"]["min"] = min_value
+ if max_value is not None:
+ settings["range"]["max"] = max_value
+ super().__init__(ColumnType.NUMBER, required, id, **settings)
+
+
+# %% ../nbs/typing.ipynb 7
+class Text(FieldMeta):
+ """Text field metadata."""
+ def __init__(self, max_length: int = 1000, required: bool = True, id: t.Optional[str]=None):
+ settings = {}
+ if max_length is not None:
+ settings["max_length"] = max_length
+ super().__init__(ColumnType.TEXT, required, id, **settings)
+
+# %% ../nbs/typing.ipynb 8
+class Select(FieldMeta):
+ """Select field metadata."""
+ def __init__(self, options: t.Optional[t.List[str]] = None, required: bool = True):
+ settings = {}
+ if options:
+ settings["options"] = [{"name": option} for option in options]
+ super().__init__(ColumnType.SELECT, required, **settings)
+
+# %% ../nbs/typing.ipynb 9
+class MultiSelect(FieldMeta):
+ """MultiSelect field metadata."""
+ def __init__(self, options: t.Optional[t.List[str]] = None, required: bool = True):
+ settings = {}
+ if options:
+ settings["options"] = [{"name": option} for option in options]
+ super().__init__(ColumnType.MULTI_SELECT, required, **settings)
+
+
+# %% ../nbs/typing.ipynb 10
+class Checkbox(FieldMeta):
+ """Checkbox field metadata."""
+ def __init__(self, required: bool = True):
+ super().__init__(ColumnType.CHECKBOX, required)
+
+
+# %% ../nbs/typing.ipynb 11
+class Date(FieldMeta):
+ """Date field metadata."""
+ def __init__(self, include_time: bool = False, required: bool = True):
+ settings = {}
+ if include_time:
+ settings["include_time"] = include_time
+ super().__init__(ColumnType.DATE, required, **settings)
+
+
+# %% ../nbs/typing.ipynb 12
+class Custom(FieldMeta):
+ """Custom field metadata."""
+ def __init__(self, custom_type: str = "", required: bool = True):
+ settings = {}
+ if custom_type:
+ settings["type"] = custom_type
+ super().__init__(ColumnType.CUSTOM, required, **settings)
+
+# %% ../nbs/typing.ipynb 14
+class ModelConverter:
+ """Convert Pydantic models to Ragas API columns and rows."""
+
+ @staticmethod
+ def infer_field_type(annotation, field_info=None):
+ """Infer field type from Python type annotation."""
+ # Check for Annotated with our custom metadata
+ origin = t.get_origin(annotation)
+ args = t.get_args(annotation)
+
+ # Check if this is a MetricResult type
+ if annotation is MetricResult or (hasattr(annotation, "__origin__") and annotation.__origin__ is MetricResult):
+ # Default to Text since we can't determine the result type statically
+ return Text()
+
+ # If this is an Annotated field with our metadata
+ if origin is t.Annotated and len(args) > 1:
+ for arg in args[1:]:
+ if isinstance(arg, FieldMeta):
+ return arg
+
+ # If no field metadata found, infer from the base type
+ return ModelConverter.infer_field_type(args[0], field_info)
+
+ # Handle Optional, List, etc.
+ if origin is t.Union:
+ if type(None) in args:
+ # This is Optional[T]
+ non_none_args = [arg for arg in args if arg is not type(None)]
+ if len(non_none_args) == 1:
+ # Get the field type of the non-None arg
+ field_meta = ModelConverter.infer_field_type(non_none_args[0], field_info)
+ field_meta.required = False
+ return field_meta
+
+ # Handle List and array types
+ # NOTE: here we are converting lists to strings, except for literal types
+ if origin is list or origin is t.List:
+ if len(args) > 0:
+ # Check if it's a list of literals
+ if t.get_origin(args[0]) is t.Literal:
+ literal_options = t.get_args(args[0])
+ return MultiSelect(options=list(literal_options))
+ # Otherwise just a regular list
+ return Text() # Default to Text for lists
+
+ # Handle Literal
+ if origin is t.Literal:
+ return Select(options=list(args))
+
+ # Basic type handling
+ if annotation is str:
+ return Text()
+ elif annotation is int or annotation is float:
+ return Number()
+ elif annotation is bool:
+ return Checkbox()
+ elif annotation is datetime or annotation is date:
+ return Date(include_time=annotation is datetime)
+
+ # Default to Text for complex or unknown types
+ return Text()
+
+ @staticmethod
+ def infer_metric_result_type(field_value):
+ """Infer field type from a MetricResult instance."""
+ if field_value is None:
+ return Text()
+
+ # Infer type based on the _result type
+ result_value = field_value._result
+
+ if isinstance(result_value, (int, float)):
+ return Number()
+ elif isinstance(result_value, bool):
+ return Checkbox()
+ elif isinstance(result_value, (list, tuple)):
+ # For ranking metrics that return lists
+ return Text()
+ else:
+ # Default to Text for string or other types
+ return Text()
+
+ @classmethod
+ def model_to_columns(cls, model_class):
+ """Convert a Pydantic model class to Ragas API column definitions."""
+ columns = []
+ for field_name, field_info in model_class.model_fields.items():
+ # Get the field's type annotation
+ annotation = field_info.annotation
+
+ # Special handling for MetricResult fields
+ if (annotation is MetricResult or
+ (hasattr(annotation, "__origin__") and annotation.__origin__ is MetricResult) or
+ (hasattr(field_info, "annotation") and str(field_info.annotation).find("MetricResult") != -1)):
+
+ # Create column for the result value
+ field_meta = cls.infer_field_type(annotation, field_info)
+ column = {
+ "id": field_name,
+ "name": field_name,
+ "type": field_meta.type.value,
+ "settings": field_meta.settings.copy(),
+ "editable": True
+ }
+ columns.append(column)
+
+ # Create additional column for the reason
+ reason_column = {
+ "id": f"{field_name}_reason",
+ "name": f"{field_name}_reason",
+ "type": ColumnType.TEXT.value,
+ "settings": Text().settings.copy(),
+ "editable": True
+ }
+ columns.append(reason_column)
+ else:
+ # Regular field handling
+ field_meta = cls.infer_field_type(annotation, field_info)
+
+ column = {
+ "id": field_name,
+ "name": field_name,
+ "type": field_meta.type.value,
+ "settings": field_meta.settings.copy(),
+ "editable": False # Non-MetricResult fields are not editable
+ }
+
+ columns.append(column)
+
+ return columns
+
+ @classmethod
+ def instance_to_row(cls, instance, model_class=None):
+ """Convert a Pydantic model instance to a Ragas API row."""
+ if model_class is None:
+ model_class = instance.__class__
+
+ row_cells = []
+ model_data = instance.model_dump()
+
+ for field_name, field_info in model_class.model_fields.items():
+ if field_name in model_data:
+ value = model_data[field_name]
+ # Get the field's type annotation
+ annotation = field_info.annotation
+
+ # Special handling for MetricResult fields
+ if isinstance(value, MetricResult):
+ # Process the result value
+ field_meta = cls.infer_metric_result_type(value)
+ processed_value = value._result
+
+ # Add result cell
+ row_cells.append({
+ "column_id": field_name,
+ "data": processed_value
+ })
+
+ # Add reason cell
+ row_cells.append({
+ "column_id": f"{field_name}_reason",
+ "data": value.reason
+ })
+ else:
+ # Regular field handling
+ field_meta = cls.infer_field_type(annotation, field_info)
+
+ # Special handling for various types
+ if field_meta.type == ColumnType.MULTI_SELECT and isinstance(value, list):
+ # Convert list to string format accepted by API
+ processed_value = value
+ elif field_meta.type == ColumnType.DATE and isinstance(value, (datetime, date)):
+ # Format date as string
+ processed_value = value.isoformat()
+ else:
+ processed_value = value
+
+ row_cells.append({
+ "column_id": field_name,
+ "data": processed_value
+ })
+
+ return {
+ "data": row_cells
+ }
+
+ @classmethod
+ def instances_to_rows(cls, instances, model_class=None):
+ """Convert multiple Pydantic model instances to Ragas API rows."""
+ if not instances:
+ return []
+
+ if model_class is None and instances:
+ model_class = instances[0].__class__
+
+ return [cls.instance_to_row(instance, model_class) for instance in instances]
+
+# %% ../nbs/typing.ipynb 15
+# class ModelConverter:
+# """Convert Pydantic models to Ragas API columns and rows."""
+
+# @staticmethod
+# def infer_field_type(annotation, field_info=None):
+# """Infer field type from Python type annotation."""
+# # Check for Annotated with our custom metadata
+# origin = t.get_origin(annotation)
+# args = t.get_args(annotation)
+
+# # If this is an Annotated field with our metadata
+# if origin is t.Annotated and len(args) > 1:
+# for arg in args[1:]:
+# if isinstance(arg, FieldMeta):
+# return arg
+
+# # If no field metadata found, infer from the base type
+# return ModelConverter.infer_field_type(args[0], field_info)
+
+# # Handle Optional, List, etc.
+# if origin is t.Union:
+# if type(None) in args:
+# # This is Optional[T]
+# non_none_args = [arg for arg in args if arg is not type(None)]
+# if len(non_none_args) == 1:
+# # Get the field type of the non-None arg
+# field_meta = ModelConverter.infer_field_type(non_none_args[0], field_info)
+# field_meta.required = False
+# return field_meta
+
+# # Handle List and array types
+# # NOTE: here we are converting lists to strings, except for literal types
+# if origin is list or origin is t.List:
+# if len(args) > 0:
+# # Check if it's a list of literals
+# if t.get_origin(args[0]) is t.Literal:
+# literal_options = t.get_args(args[0])
+# return MultiSelect(options=list(literal_options))
+# # Otherwise just a regular list
+# return Text() # Default to Text for lists
+
+# # Handle Literal
+# if origin is t.Literal:
+# return Select(options=list(args))
+
+# # Basic type handling
+# if annotation is str:
+# return Text()
+# elif annotation is int or annotation is float:
+# return Number()
+# elif annotation is bool:
+# return Checkbox()
+# elif annotation is datetime or annotation is date:
+# return Date(include_time=annotation is datetime)
+
+# # Default to Text for complex or unknown types
+# return Text()
+
+# @classmethod
+# def model_to_columns(cls, model_class):
+# """Convert a Pydantic model class to Ragas API column definitions."""
+# columns = []
+# for field_name, field_info in model_class.model_fields.items():
+# # Get the field's type annotation
+# annotation = field_info.annotation
+
+# # Try to get field metadata
+# field_meta = cls.infer_field_type(annotation, field_info)
+
+# # Create column definition
+# column = {
+# "id": field_name,
+# "name": field_name,
+# "type": field_meta.type.value,
+# "settings": field_meta.settings.copy()
+# }
+
+# columns.append(column)
+
+# return columns
+
+# @classmethod
+# def instance_to_row(cls, instance, model_class=None):
+# """Convert a Pydantic model instance to a Ragas API row."""
+# if model_class is None:
+# model_class = instance.__class__
+
+# row_cells = []
+# model_data = instance.model_dump()
+
+# for field_name, field_info in model_class.model_fields.items():
+# if field_name in model_data:
+# value = model_data[field_name]
+# # Process value based on field type
+# annotation = field_info.annotation
+# field_meta = cls.infer_field_type(annotation, field_info)
+
+# # Special handling for various types
+# if field_meta.type == ColumnType.MULTI_SELECT and isinstance(value, list):
+# # Convert list to string format accepted by API
+# processed_value = value
+# elif field_meta.type == ColumnType.DATE and isinstance(value, (datetime, date)):
+# # Format date as string
+# processed_value = value.isoformat()
+# else:
+# processed_value = value
+
+# row_cells.append({
+# "column_id": field_name,
+# "data": processed_value
+# })
+
+# return {
+# "data": row_cells
+# }
+
+# @classmethod
+# def instances_to_rows(cls, instances, model_class=None):
+# """Convert multiple Pydantic model instances to Ragas API rows."""
+# if not instances:
+# return []
+
+# if model_class is None and instances:
+# model_class = instances[0].__class__
+
+# return [cls.instance_to_row(instance, model_class) for instance in instances]
diff --git a/ragas_experimental/utils.py b/ragas_experimental/utils.py
new file mode 100644
index 0000000..d330081
--- /dev/null
+++ b/ragas_experimental/utils.py
@@ -0,0 +1,45 @@
+# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/utils.ipynb.
+
+# %% auto 0
+__all__ = ['create_nano_id', 'async_to_sync']
+
+# %% ../nbs/utils.ipynb 1
+import string
+import uuid
+import functools
+import asyncio
+
+# %% ../nbs/utils.ipynb 2
+def create_nano_id(size=12):
+ # Define characters to use (alphanumeric)
+ alphabet = string.ascii_letters + string.digits
+
+ # Generate UUID and convert to int
+ uuid_int = uuid.uuid4().int
+
+ # Convert to base62
+ result = ""
+ while uuid_int:
+ uuid_int, remainder = divmod(uuid_int, len(alphabet))
+ result = alphabet[remainder] + result
+
+ # Pad if necessary and return desired length
+ return result[:size]
+
+# %% ../nbs/utils.ipynb 3
+def async_to_sync(async_func):
+ """Convert an async function to a sync function"""
+ @functools.wraps(async_func)
+ def sync_wrapper(*args, **kwargs):
+ try:
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ import concurrent.futures
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ future = executor.submit(asyncio.run, async_func(*args, **kwargs))
+ return future.result()
+ else:
+ return loop.run_until_complete(async_func(*args, **kwargs))
+ except RuntimeError:
+ return asyncio.run(async_func(*args, **kwargs))
+ return sync_wrapper
diff --git a/settings.ini b/settings.ini
index 1f8bd3d..07695a6 100644
--- a/settings.ini
+++ b/settings.ini
@@ -3,8 +3,9 @@
# See https://github.com/AnswerDotAI/nbdev/blob/main/settings.ini for examples.
### Python library ###
+# TODO: change to ragas_experimental
repo = ragas_annotator
-lib_name = %(repo)s
+lib_name = ragas_experimental
version = 0.0.2
min_python = 3.7
license = apache2
@@ -12,7 +13,7 @@ black_formatting = False
### nbdev ###
doc_path = _docs
-lib_path = ragas_annotator
+lib_path = ragas_experimental
nbs_path = nbs
recursive = True
tst_flags = notest
@@ -38,7 +39,7 @@ status = 3
user = explodinggradients
### Dependencies ###
-requirements = notion-client fastcore tqdm langfuse instructor pydantic numpy
+requirements = fastcore tqdm langfuse instructor pydantic numpy
dev_requirements = pytest
# console_scripts =
# conda_user =