diff --git a/examples/deep_research_api/introduction_to_deep_research_api_agents.ipynb b/examples/deep_research_api/introduction_to_deep_research_api_agents.ipynb index 7a57f526f0..0e45a7d791 100644 --- a/examples/deep_research_api/introduction_to_deep_research_api_agents.ipynb +++ b/examples/deep_research_api/introduction_to_deep_research_api_agents.ipynb @@ -31,7 +31,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -39,7 +39,158 @@ "id": "FWE9uQq4j0Ws", "outputId": "99c15803-0506-4464-d624-a31b5bc809a4" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Collecting openai>=1.88\n", + " Downloading openai-1.97.1-py3-none-any.whl.metadata (29 kB)\n", + "Collecting openai-agents>=0.0.19\n", + " Downloading openai_agents-0.2.3-py3-none-any.whl.metadata (11 kB)\n", + "Collecting anyio<5,>=3.5.0 (from openai>=1.88)\n", + " Downloading anyio-4.9.0-py3-none-any.whl.metadata (4.7 kB)\n", + "Collecting distro<2,>=1.7.0 (from openai>=1.88)\n", + " Downloading distro-1.9.0-py3-none-any.whl.metadata (6.8 kB)\n", + "Collecting httpx<1,>=0.23.0 (from openai>=1.88)\n", + " Downloading httpx-0.28.1-py3-none-any.whl.metadata (7.1 kB)\n", + "Collecting jiter<1,>=0.4.0 (from openai>=1.88)\n", + " Downloading jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl.metadata (5.2 kB)\n", + "Collecting pydantic<3,>=1.9.0 (from openai>=1.88)\n", + " Downloading pydantic-2.11.7-py3-none-any.whl.metadata (67 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m68.0/68.0 kB\u001b[0m \u001b[31m626.1 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m1m936.5 kB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hCollecting sniffio (from openai>=1.88)\n", + " Downloading sniffio-1.3.1-py3-none-any.whl.metadata (3.9 kB)\n", + "Collecting tqdm>4 (from openai>=1.88)\n", + " Downloading tqdm-4.67.1-py3-none-any.whl.metadata (57 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m57.7/57.7 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting typing-extensions<5,>=4.11 (from openai>=1.88)\n", + " Downloading typing_extensions-4.14.1-py3-none-any.whl.metadata (3.0 kB)\n", + "Collecting griffe<2,>=1.5.6 (from openai-agents>=0.0.19)\n", + " Downloading griffe-1.9.0-py3-none-any.whl.metadata (5.0 kB)\n", + "Collecting mcp<2,>=1.11.0 (from openai-agents>=0.0.19)\n", + " Downloading mcp-1.12.2-py3-none-any.whl.metadata (60 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.0/61.0 kB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting requests<3,>=2.0 (from openai-agents>=0.0.19)\n", + " Downloading requests-2.32.4-py3-none-any.whl.metadata (4.9 kB)\n", + "Collecting types-requests<3,>=2.0 (from openai-agents>=0.0.19)\n", + " Downloading types_requests-2.32.4.20250611-py3-none-any.whl.metadata (2.1 kB)\n", + "Collecting idna>=2.8 (from anyio<5,>=3.5.0->openai>=1.88)\n", + " Downloading idna-3.10-py3-none-any.whl.metadata (10 kB)\n", + "Collecting colorama>=0.4 (from griffe<2,>=1.5.6->openai-agents>=0.0.19)\n", + " Downloading colorama-0.4.6-py2.py3-none-any.whl.metadata (17 kB)\n", + "Collecting certifi (from httpx<1,>=0.23.0->openai>=1.88)\n", + " Downloading certifi-2025.7.14-py3-none-any.whl.metadata (2.4 kB)\n", + "Collecting httpcore==1.* (from httpx<1,>=0.23.0->openai>=1.88)\n", + " Downloading httpcore-1.0.9-py3-none-any.whl.metadata (21 kB)\n", + "Collecting h11>=0.16 (from httpcore==1.*->httpx<1,>=0.23.0->openai>=1.88)\n", + " Downloading h11-0.16.0-py3-none-any.whl.metadata (8.3 kB)\n", + "Collecting httpx-sse>=0.4 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading httpx_sse-0.4.1-py3-none-any.whl.metadata (9.4 kB)\n", + "Collecting jsonschema>=4.20.0 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading jsonschema-4.25.0-py3-none-any.whl.metadata (7.7 kB)\n", + "Collecting pydantic-settings>=2.5.2 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading pydantic_settings-2.10.1-py3-none-any.whl.metadata (3.4 kB)\n", + "Collecting python-multipart>=0.0.9 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading python_multipart-0.0.20-py3-none-any.whl.metadata (1.8 kB)\n", + "Collecting sse-starlette>=1.6.1 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading sse_starlette-3.0.2-py3-none-any.whl.metadata (11 kB)\n", + "Collecting starlette>=0.27 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading starlette-0.47.2-py3-none-any.whl.metadata (6.2 kB)\n", + "Collecting uvicorn>=0.23.1 (from mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading uvicorn-0.35.0-py3-none-any.whl.metadata (6.5 kB)\n", + "Collecting annotated-types>=0.6.0 (from pydantic<3,>=1.9.0->openai>=1.88)\n", + " Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)\n", + "Collecting pydantic-core==2.33.2 (from pydantic<3,>=1.9.0->openai>=1.88)\n", + " Downloading pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl.metadata (6.8 kB)\n", + "Collecting typing-inspection>=0.4.0 (from pydantic<3,>=1.9.0->openai>=1.88)\n", + " Downloading typing_inspection-0.4.1-py3-none-any.whl.metadata (2.6 kB)\n", + "Collecting charset_normalizer<4,>=2 (from requests<3,>=2.0->openai-agents>=0.0.19)\n", + " Downloading charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl.metadata (35 kB)\n", + "Collecting urllib3<3,>=1.21.1 (from requests<3,>=2.0->openai-agents>=0.0.19)\n", + " Downloading urllib3-2.5.0-py3-none-any.whl.metadata (6.5 kB)\n", + "Collecting attrs>=22.2.0 (from jsonschema>=4.20.0->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading attrs-25.3.0-py3-none-any.whl.metadata (10 kB)\n", + "Collecting jsonschema-specifications>=2023.03.6 (from jsonschema>=4.20.0->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading jsonschema_specifications-2025.4.1-py3-none-any.whl.metadata (2.9 kB)\n", + "Collecting referencing>=0.28.4 (from jsonschema>=4.20.0->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading referencing-0.36.2-py3-none-any.whl.metadata (2.8 kB)\n", + "Collecting rpds-py>=0.7.1 (from jsonschema>=4.20.0->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl.metadata (4.2 kB)\n", + "Collecting python-dotenv>=0.21.0 (from pydantic-settings>=2.5.2->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading python_dotenv-1.1.1-py3-none-any.whl.metadata (24 kB)\n", + "Collecting click>=7.0 (from uvicorn>=0.23.1->mcp<2,>=1.11.0->openai-agents>=0.0.19)\n", + " Downloading click-8.2.1-py3-none-any.whl.metadata (2.5 kB)\n", + "Downloading openai-1.97.1-py3-none-any.whl (764 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m764.4/764.4 kB\u001b[0m \u001b[31m1.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m:01\u001b[0m\n", + "\u001b[?25hDownloading openai_agents-0.2.3-py3-none-any.whl (161 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m161.4/161.4 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m31m2.3 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading anyio-4.9.0-py3-none-any.whl (100 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m100.9/100.9 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading distro-1.9.0-py3-none-any.whl (20 kB)\n", + "Downloading griffe-1.9.0-py3-none-any.whl (137 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m137.1/137.1 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading httpx-0.28.1-py3-none-any.whl (73 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.5/73.5 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading httpcore-1.0.9-py3-none-any.whl (78 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.8/78.8 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl (320 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m320.1/320.1 kB\u001b[0m \u001b[31m1.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mm eta \u001b[36m0:00:01\u001b[0m0:01\u001b[0m:01\u001b[0m\n", + "\u001b[?25hDownloading mcp-1.12.2-py3-none-any.whl (158 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m158.5/158.5 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading pydantic-2.11.7-py3-none-any.whl (444 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m444.8/444.8 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl (1.8 MB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.8/1.8 MB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0mm eta \u001b[36m0:00:01\u001b[0m[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading requests-2.32.4-py3-none-any.whl (64 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m64.8/64.8 kB\u001b[0m \u001b[31m3.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading sniffio-1.3.1-py3-none-any.whl (10 kB)\n", + "Downloading tqdm-4.67.1-py3-none-any.whl (78 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.5/78.5 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading types_requests-2.32.4.20250611-py3-none-any.whl (20 kB)\n", + "Downloading typing_extensions-4.14.1-py3-none-any.whl (43 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m43.9/43.9 kB\u001b[0m \u001b[31m2.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading annotated_types-0.7.0-py3-none-any.whl (13 kB)\n", + "Downloading certifi-2025.7.14-py3-none-any.whl (162 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m162.7/162.7 kB\u001b[0m \u001b[31m3.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl (199 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.9/199.9 kB\u001b[0m \u001b[31m4.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n", + "Downloading httpx_sse-0.4.1-py3-none-any.whl (8.1 kB)\n", + "Downloading idna-3.10-py3-none-any.whl (70 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.4/70.4 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading jsonschema-4.25.0-py3-none-any.whl (89 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m89.2/89.2 kB\u001b[0m \u001b[31m2.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m[31m2.7 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading pydantic_settings-2.10.1-py3-none-any.whl (45 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m45.2/45.2 kB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading python_multipart-0.0.20-py3-none-any.whl (24 kB)\n", + "Downloading sse_starlette-3.0.2-py3-none-any.whl (11 kB)\n", + "Downloading starlette-0.47.2-py3-none-any.whl (72 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m73.0/73.0 kB\u001b[0m \u001b[31m2.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading typing_inspection-0.4.1-py3-none-any.whl (14 kB)\n", + "Downloading urllib3-2.5.0-py3-none-any.whl (129 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m129.8/129.8 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m\n", + "\u001b[?25hDownloading uvicorn-0.35.0-py3-none-any.whl (66 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m66.4/66.4 kB\u001b[0m \u001b[31m3.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading attrs-25.3.0-py3-none-any.whl (63 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m63.8/63.8 kB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading click-8.2.1-py3-none-any.whl (102 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m102.2/102.2 kB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading h11-0.16.0-py3-none-any.whl (37 kB)\n", + "Downloading jsonschema_specifications-2025.4.1-py3-none-any.whl (18 kB)\n", + "Downloading python_dotenv-1.1.1-py3-none-any.whl (20 kB)\n", + "Downloading referencing-0.36.2-py3-none-any.whl (26 kB)\n", + "Downloading rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl (350 kB)\n", + "\u001b[2K \u001b[38;2;114;156;31m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m350.4/350.4 kB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m MB/s\u001b[0m eta \u001b[36m0:00:01\u001b[0m:01\u001b[0m\n", + "\u001b[?25hInstalling collected packages: urllib3, typing-extensions, tqdm, sniffio, rpds-py, python-multipart, python-dotenv, jiter, idna, httpx-sse, h11, distro, colorama, click, charset_normalizer, certifi, attrs, annotated-types, uvicorn, typing-inspection, types-requests, requests, referencing, pydantic-core, httpcore, griffe, anyio, starlette, sse-starlette, pydantic, jsonschema-specifications, httpx, pydantic-settings, openai, jsonschema, mcp, openai-agents\n", + "Successfully installed annotated-types-0.7.0 anyio-4.9.0 attrs-25.3.0 certifi-2025.7.14 charset_normalizer-3.4.2 click-8.2.1 colorama-0.4.6 distro-1.9.0 griffe-1.9.0 h11-0.16.0 httpcore-1.0.9 httpx-0.28.1 httpx-sse-0.4.1 idna-3.10 jiter-0.10.0 jsonschema-4.25.0 jsonschema-specifications-2025.4.1 mcp-1.12.2 openai-1.97.1 openai-agents-0.2.3 pydantic-2.11.7 pydantic-core-2.33.2 pydantic-settings-2.10.1 python-dotenv-1.1.1 python-multipart-0.0.20 referencing-0.36.2 requests-2.32.4 rpds-py-0.26.0 sniffio-1.3.1 sse-starlette-3.0.2 starlette-0.47.2 tqdm-4.67.1 types-requests-2.32.4.20250611 typing-extensions-4.14.1 typing-inspection-0.4.1 urllib3-2.5.0 uvicorn-0.35.0\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.0\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m25.1.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip3 install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ "%pip install --upgrade \"openai>=1.88\" \"openai-agents>=0.0.19\"" ] @@ -59,7 +210,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 19, "metadata": { "id": "OWnnTNZJj0Wt" }, @@ -94,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": { "colab": { "base_uri": "https://localhost:8080/" @@ -102,7 +253,53 @@ "id": "c91rFNYWj0Wy", "outputId": "6f2e3bbe-f321-4a8e-b7df-6b6c5bade65a" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Researching: Research the economic impact of semaglutide on global healthcare systems.\n", + "\n", + "--- switched to agent: Research Agent ---\n", + "\n", + "--- RESEARCHING ---\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Error streaming response: Error code: 401 - {'error': {'message': 'Missing bearer or basic authentication in header', 'type': 'invalid_request_error', 'param': None, 'code': None}}\n" + ] + }, + { + "ename": "AuthenticationError", + "evalue": "Error code: 401 - {'error': {'message': 'Missing bearer or basic authentication in header', 'type': 'invalid_request_error', 'param': None, 'code': None}}", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mAuthenticationError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[20]\u001b[39m\u001b[32m, line 34\u001b[39m\n\u001b[32m 31\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m result_stream.final_output\n\u001b[32m 33\u001b[39m \u001b[38;5;66;03m# Run the research and print the result\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m34\u001b[39m result = \u001b[38;5;28;01mawait\u001b[39;00m basic_research(\u001b[33m\"\u001b[39m\u001b[33mResearch the economic impact of semaglutide on global healthcare systems.\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 35\u001b[39m \u001b[38;5;28mprint\u001b[39m(result)\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[20]\u001b[39m\u001b[32m, line 17\u001b[39m, in \u001b[36mbasic_research\u001b[39m\u001b[34m(query)\u001b[39m\n\u001b[32m 11\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mResearching: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mquery\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n\u001b[32m 12\u001b[39m result_stream = Runner.run_streamed(\n\u001b[32m 13\u001b[39m research_agent,\n\u001b[32m 14\u001b[39m query\n\u001b[32m 15\u001b[39m )\n\u001b[32m---> \u001b[39m\u001b[32m17\u001b[39m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mfor\u001b[39;00m ev \u001b[38;5;129;01min\u001b[39;00m result_stream.stream_events():\n\u001b[32m 18\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m ev.type == \u001b[33m\"\u001b[39m\u001b[33magent_updated_stream_event\u001b[39m\u001b[33m\"\u001b[39m:\n\u001b[32m 19\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m--- switched to agent: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mev.new_agent.name\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m ---\u001b[39m\u001b[33m\"\u001b[39m)\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/agents/result.py:215\u001b[39m, in \u001b[36mRunResultStreaming.stream_events\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 212\u001b[39m \u001b[38;5;28mself\u001b[39m._cleanup_tasks()\n\u001b[32m 214\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m._stored_exception:\n\u001b[32m--> \u001b[39m\u001b[32m215\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m._stored_exception\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/agents/run.py:723\u001b[39m, in \u001b[36mAgentRunner._start_streaming\u001b[39m\u001b[34m(cls, starting_input, streamed_result, starting_agent, max_turns, hooks, context_wrapper, run_config, previous_response_id, session)\u001b[39m\n\u001b[32m 712\u001b[39m streamed_result._input_guardrails_task = asyncio.create_task(\n\u001b[32m 713\u001b[39m \u001b[38;5;28mcls\u001b[39m._run_input_guardrails_with_queue(\n\u001b[32m 714\u001b[39m starting_agent,\n\u001b[32m (...)\u001b[39m\u001b[32m 720\u001b[39m )\n\u001b[32m 721\u001b[39m )\n\u001b[32m 722\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m723\u001b[39m turn_result = \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mcls\u001b[39m._run_single_turn_streamed(\n\u001b[32m 724\u001b[39m streamed_result,\n\u001b[32m 725\u001b[39m current_agent,\n\u001b[32m 726\u001b[39m hooks,\n\u001b[32m 727\u001b[39m context_wrapper,\n\u001b[32m 728\u001b[39m run_config,\n\u001b[32m 729\u001b[39m should_run_agent_start_hooks,\n\u001b[32m 730\u001b[39m tool_use_tracker,\n\u001b[32m 731\u001b[39m all_tools,\n\u001b[32m 732\u001b[39m previous_response_id,\n\u001b[32m 733\u001b[39m )\n\u001b[32m 734\u001b[39m should_run_agent_start_hooks = \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[32m 736\u001b[39m streamed_result.raw_responses = streamed_result.raw_responses + [\n\u001b[32m 737\u001b[39m turn_result.model_response\n\u001b[32m 738\u001b[39m ]\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/agents/run.py:867\u001b[39m, in \u001b[36mAgentRunner._run_single_turn_streamed\u001b[39m\u001b[34m(cls, streamed_result, agent, hooks, context_wrapper, run_config, should_run_agent_start_hooks, tool_use_tracker, all_tools, previous_response_id)\u001b[39m\n\u001b[32m 864\u001b[39m \u001b[38;5;28minput\u001b[39m.extend([item.to_input_item() \u001b[38;5;28;01mfor\u001b[39;00m item \u001b[38;5;129;01min\u001b[39;00m streamed_result.new_items])\n\u001b[32m 866\u001b[39m \u001b[38;5;66;03m# 1. Stream the output events\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m867\u001b[39m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mfor\u001b[39;00m event \u001b[38;5;129;01min\u001b[39;00m model.stream_response(\n\u001b[32m 868\u001b[39m system_prompt,\n\u001b[32m 869\u001b[39m \u001b[38;5;28minput\u001b[39m,\n\u001b[32m 870\u001b[39m model_settings,\n\u001b[32m 871\u001b[39m all_tools,\n\u001b[32m 872\u001b[39m output_schema,\n\u001b[32m 873\u001b[39m handoffs,\n\u001b[32m 874\u001b[39m get_model_tracing_impl(\n\u001b[32m 875\u001b[39m run_config.tracing_disabled, run_config.trace_include_sensitive_data\n\u001b[32m 876\u001b[39m ),\n\u001b[32m 877\u001b[39m previous_response_id=previous_response_id,\n\u001b[32m 878\u001b[39m prompt=prompt_config,\n\u001b[32m 879\u001b[39m ):\n\u001b[32m 880\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(event, ResponseCompletedEvent):\n\u001b[32m 881\u001b[39m usage = (\n\u001b[32m 882\u001b[39m Usage(\n\u001b[32m 883\u001b[39m requests=\u001b[32m1\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 891\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m Usage()\n\u001b[32m 892\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/agents/models/openai_responses.py:161\u001b[39m, in \u001b[36mOpenAIResponsesModel.stream_response\u001b[39m\u001b[34m(self, system_instructions, input, model_settings, tools, output_schema, handoffs, tracing, previous_response_id, prompt)\u001b[39m\n\u001b[32m 159\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m response_span(disabled=tracing.is_disabled()) \u001b[38;5;28;01mas\u001b[39;00m span_response:\n\u001b[32m 160\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m161\u001b[39m stream = \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m._fetch_response(\n\u001b[32m 162\u001b[39m system_instructions,\n\u001b[32m 163\u001b[39m \u001b[38;5;28minput\u001b[39m,\n\u001b[32m 164\u001b[39m model_settings,\n\u001b[32m 165\u001b[39m tools,\n\u001b[32m 166\u001b[39m output_schema,\n\u001b[32m 167\u001b[39m handoffs,\n\u001b[32m 168\u001b[39m previous_response_id,\n\u001b[32m 169\u001b[39m stream=\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[32m 170\u001b[39m prompt=prompt,\n\u001b[32m 171\u001b[39m )\n\u001b[32m 173\u001b[39m final_response: Response | \u001b[38;5;28;01mNone\u001b[39;00m = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 175\u001b[39m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mfor\u001b[39;00m chunk \u001b[38;5;129;01min\u001b[39;00m stream:\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/agents/models/openai_responses.py:267\u001b[39m, in \u001b[36mOpenAIResponsesModel._fetch_response\u001b[39m\u001b[34m(self, system_instructions, input, model_settings, tools, output_schema, handoffs, previous_response_id, stream, prompt)\u001b[39m\n\u001b[32m 256\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 257\u001b[39m logger.debug(\n\u001b[32m 258\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mCalling LLM \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m.model\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m with input:\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m\"\u001b[39m\n\u001b[32m 259\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mjson.dumps(list_input,\u001b[38;5;250m \u001b[39mindent=\u001b[32m2\u001b[39m,\u001b[38;5;250m \u001b[39mensure_ascii=\u001b[38;5;28;01mFalse\u001b[39;00m)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m\"\u001b[39m\n\u001b[32m (...)\u001b[39m\u001b[32m 264\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mPrevious response id: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mprevious_response_id\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m\"\u001b[39m\n\u001b[32m 265\u001b[39m )\n\u001b[32m--> \u001b[39m\u001b[32m267\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m._client.responses.create(\n\u001b[32m 268\u001b[39m previous_response_id=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(previous_response_id),\n\u001b[32m 269\u001b[39m instructions=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(system_instructions),\n\u001b[32m 270\u001b[39m model=\u001b[38;5;28mself\u001b[39m.model,\n\u001b[32m 271\u001b[39m \u001b[38;5;28minput\u001b[39m=list_input,\n\u001b[32m 272\u001b[39m include=include,\n\u001b[32m 273\u001b[39m tools=converted_tools.tools,\n\u001b[32m 274\u001b[39m prompt=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(prompt),\n\u001b[32m 275\u001b[39m temperature=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.temperature),\n\u001b[32m 276\u001b[39m top_p=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.top_p),\n\u001b[32m 277\u001b[39m truncation=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.truncation),\n\u001b[32m 278\u001b[39m max_output_tokens=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.max_tokens),\n\u001b[32m 279\u001b[39m tool_choice=tool_choice,\n\u001b[32m 280\u001b[39m parallel_tool_calls=parallel_tool_calls,\n\u001b[32m 281\u001b[39m stream=stream,\n\u001b[32m 282\u001b[39m extra_headers={**_HEADERS, **(model_settings.extra_headers \u001b[38;5;129;01mor\u001b[39;00m {})},\n\u001b[32m 283\u001b[39m extra_query=model_settings.extra_query,\n\u001b[32m 284\u001b[39m extra_body=model_settings.extra_body,\n\u001b[32m 285\u001b[39m text=response_format,\n\u001b[32m 286\u001b[39m store=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.store),\n\u001b[32m 287\u001b[39m reasoning=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.reasoning),\n\u001b[32m 288\u001b[39m metadata=\u001b[38;5;28mself\u001b[39m._non_null_or_not_given(model_settings.metadata),\n\u001b[32m 289\u001b[39m **(model_settings.extra_args \u001b[38;5;129;01mor\u001b[39;00m {}),\n\u001b[32m 290\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/openai/resources/responses/responses.py:2000\u001b[39m, in \u001b[36mAsyncResponses.create\u001b[39m\u001b[34m(self, background, include, input, instructions, max_output_tokens, max_tool_calls, metadata, model, parallel_tool_calls, previous_response_id, prompt, reasoning, service_tier, store, stream, temperature, text, tool_choice, tools, top_logprobs, top_p, truncation, user, extra_headers, extra_query, extra_body, timeout)\u001b[39m\n\u001b[32m 1967\u001b[39m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcreate\u001b[39m(\n\u001b[32m 1968\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 1969\u001b[39m *,\n\u001b[32m (...)\u001b[39m\u001b[32m 1998\u001b[39m timeout: \u001b[38;5;28mfloat\u001b[39m | httpx.Timeout | \u001b[38;5;28;01mNone\u001b[39;00m | NotGiven = NOT_GIVEN,\n\u001b[32m 1999\u001b[39m ) -> Response | AsyncStream[ResponseStreamEvent]:\n\u001b[32m-> \u001b[39m\u001b[32m2000\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m._post(\n\u001b[32m 2001\u001b[39m \u001b[33m\"\u001b[39m\u001b[33m/responses\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m 2002\u001b[39m body=\u001b[38;5;28;01mawait\u001b[39;00m async_maybe_transform(\n\u001b[32m 2003\u001b[39m {\n\u001b[32m 2004\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mbackground\u001b[39m\u001b[33m\"\u001b[39m: background,\n\u001b[32m 2005\u001b[39m \u001b[33m\"\u001b[39m\u001b[33minclude\u001b[39m\u001b[33m\"\u001b[39m: include,\n\u001b[32m 2006\u001b[39m \u001b[33m\"\u001b[39m\u001b[33minput\u001b[39m\u001b[33m\"\u001b[39m: \u001b[38;5;28minput\u001b[39m,\n\u001b[32m 2007\u001b[39m \u001b[33m\"\u001b[39m\u001b[33minstructions\u001b[39m\u001b[33m\"\u001b[39m: instructions,\n\u001b[32m 2008\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mmax_output_tokens\u001b[39m\u001b[33m\"\u001b[39m: max_output_tokens,\n\u001b[32m 2009\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mmax_tool_calls\u001b[39m\u001b[33m\"\u001b[39m: max_tool_calls,\n\u001b[32m 2010\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mmetadata\u001b[39m\u001b[33m\"\u001b[39m: metadata,\n\u001b[32m 2011\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mmodel\u001b[39m\u001b[33m\"\u001b[39m: model,\n\u001b[32m 2012\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mparallel_tool_calls\u001b[39m\u001b[33m\"\u001b[39m: parallel_tool_calls,\n\u001b[32m 2013\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mprevious_response_id\u001b[39m\u001b[33m\"\u001b[39m: previous_response_id,\n\u001b[32m 2014\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mprompt\u001b[39m\u001b[33m\"\u001b[39m: prompt,\n\u001b[32m 2015\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mreasoning\u001b[39m\u001b[33m\"\u001b[39m: reasoning,\n\u001b[32m 2016\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mservice_tier\u001b[39m\u001b[33m\"\u001b[39m: service_tier,\n\u001b[32m 2017\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mstore\u001b[39m\u001b[33m\"\u001b[39m: store,\n\u001b[32m 2018\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mstream\u001b[39m\u001b[33m\"\u001b[39m: stream,\n\u001b[32m 2019\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtemperature\u001b[39m\u001b[33m\"\u001b[39m: temperature,\n\u001b[32m 2020\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtext\u001b[39m\u001b[33m\"\u001b[39m: text,\n\u001b[32m 2021\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtool_choice\u001b[39m\u001b[33m\"\u001b[39m: tool_choice,\n\u001b[32m 2022\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtools\u001b[39m\u001b[33m\"\u001b[39m: tools,\n\u001b[32m 2023\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtop_logprobs\u001b[39m\u001b[33m\"\u001b[39m: top_logprobs,\n\u001b[32m 2024\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtop_p\u001b[39m\u001b[33m\"\u001b[39m: top_p,\n\u001b[32m 2025\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtruncation\u001b[39m\u001b[33m\"\u001b[39m: truncation,\n\u001b[32m 2026\u001b[39m \u001b[33m\"\u001b[39m\u001b[33muser\u001b[39m\u001b[33m\"\u001b[39m: user,\n\u001b[32m 2027\u001b[39m },\n\u001b[32m 2028\u001b[39m response_create_params.ResponseCreateParamsStreaming\n\u001b[32m 2029\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m stream\n\u001b[32m 2030\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m response_create_params.ResponseCreateParamsNonStreaming,\n\u001b[32m 2031\u001b[39m ),\n\u001b[32m 2032\u001b[39m options=make_request_options(\n\u001b[32m 2033\u001b[39m extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout\n\u001b[32m 2034\u001b[39m ),\n\u001b[32m 2035\u001b[39m cast_to=Response,\n\u001b[32m 2036\u001b[39m stream=stream \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[32m 2037\u001b[39m stream_cls=AsyncStream[ResponseStreamEvent],\n\u001b[32m 2038\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/openai/_base_client.py:1791\u001b[39m, in \u001b[36mAsyncAPIClient.post\u001b[39m\u001b[34m(self, path, cast_to, body, files, options, stream, stream_cls)\u001b[39m\n\u001b[32m 1777\u001b[39m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mpost\u001b[39m(\n\u001b[32m 1778\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 1779\u001b[39m path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 1786\u001b[39m stream_cls: \u001b[38;5;28mtype\u001b[39m[_AsyncStreamT] | \u001b[38;5;28;01mNone\u001b[39;00m = \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m 1787\u001b[39m ) -> ResponseT | _AsyncStreamT:\n\u001b[32m 1788\u001b[39m opts = FinalRequestOptions.construct(\n\u001b[32m 1789\u001b[39m method=\u001b[33m\"\u001b[39m\u001b[33mpost\u001b[39m\u001b[33m\"\u001b[39m, url=path, json_data=body, files=\u001b[38;5;28;01mawait\u001b[39;00m async_to_httpx_files(files), **options\n\u001b[32m 1790\u001b[39m )\n\u001b[32m-> \u001b[39m\u001b[32m1791\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m.request(cast_to, opts, stream=stream, stream_cls=stream_cls)\n", + "\u001b[36mFile \u001b[39m\u001b[32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/openai/_base_client.py:1591\u001b[39m, in \u001b[36mAsyncAPIClient.request\u001b[39m\u001b[34m(self, cast_to, options, stream, stream_cls)\u001b[39m\n\u001b[32m 1588\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m err.response.aread()\n\u001b[32m 1590\u001b[39m log.debug(\u001b[33m\"\u001b[39m\u001b[33mRe-raising status error\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m-> \u001b[39m\u001b[32m1591\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m._make_status_error_from_response(err.response) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1593\u001b[39m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[32m 1595\u001b[39m \u001b[38;5;28;01massert\u001b[39;00m response \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m, \u001b[33m\"\u001b[39m\u001b[33mcould not resolve response (should never happen)\u001b[39m\u001b[33m\"\u001b[39m\n", + "\u001b[31mAuthenticationError\u001b[39m: Error code: 401 - {'error': {'message': 'Missing bearer or basic authentication in header', 'type': 'invalid_request_error', 'param': None, 'code': None}}" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "OPENAI_API_KEY is not set, skipping trace export\n" + ] + } + ], "source": [ "# Define the research agent\n", "research_agent = Agent(\n", @@ -168,7 +365,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": { "id": "glkoOX6q6Ph9" }, @@ -260,14 +457,14 @@ " - Streams intermediate events for transparency\n", " - Outputs final Research Artifact (which we later parse)\n", "\n", - "![../../images/agents_dr.png](../../../images/agent_dr.png)\n", + "![Four‑Agent Diagram](https://raw.githubusercontent.com/openai/openai-cookbook/main/images/agent_dr.png)\n", "\n", "For more insight into _how_ the MCP server is build. [See this resource.](https://cookbook.openai.com/examples/deep_research_api/how_to_build_a_deep_research_mcp_server/readme )" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": { "id": "y-8WVGMBj0Wz" }, @@ -428,6 +625,7 @@ " try:\n", " parsed_args = json.loads(args)\n", " if parsed_args:\n", + " \n", " args_str = json.dumps(parsed_args)\n", " except Exception:\n", " if args.strip() and args.strip() != \"{}\":\n", @@ -549,7 +747,7 @@ "provenance": [] }, "kernelspec": { - "display_name": ".venv", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -563,7 +761,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.13.0" + "version": "3.12.3" } }, "nbformat": 4,