|
20 | 20 | "metadata": {}, |
21 | 21 | "outputs": [], |
22 | 22 | "source": [ |
23 | | - "# Import needed modules from LightRAGfrom adalflow.core.component import Component\n", |
| 23 | + "from IPython.display import clear_output\n", |
| 24 | + "!pip install -U adalflow[openai,groq,faiss-cpu]\n", |
| 25 | + "clear_output()" |
| 26 | + ] |
| 27 | + }, |
| 28 | + { |
| 29 | + "cell_type": "code", |
| 30 | + "execution_count": null, |
| 31 | + "metadata": {}, |
| 32 | + "outputs": [], |
| 33 | + "source": [ |
| 34 | + "# Import needed modules from Adalflow\n", |
| 35 | + "import os\n", |
| 36 | + "from getpass import getpass\n", |
| 37 | + "from adalflow.core.component import Component\n", |
24 | 38 | "from adalflow.core.generator import Generator\n", |
25 | | - "from adalflow.components.memory.memory import Memory" |
| 39 | + "from adalflow.components.memory.memory import Memory\n", |
| 40 | + "from adalflow.components.model_client import OpenAIClient # Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient" |
26 | 41 | ] |
27 | 42 | }, |
28 | 43 | { |
|
31 | 46 | "metadata": {}, |
32 | 47 | "outputs": [], |
33 | 48 | "source": [ |
34 | | - "# Here, we use the OpenAIClient as an example, but you can use any other clients (with the corresponding API Key as needed), such as AnthropicAPIClient\n", |
35 | | - "from adalflow.components.model_client import OpenAIClient\n", |
36 | | - "OPENAI_API_KEY=\"YOUR_API_KEY\" # Replace with your OpenAI API Key, or you can put it in a .env file" |
| 49 | + "# Prompt user to enter their API keys securely\n", |
| 50 | + "openai_api_key = getpass(\"Please enter your OpenAI API key: \")\n", |
| 51 | + "# Set environment variables\n", |
| 52 | + "os.environ['OPENAI_API_KEY'] = openai_api_key\n", |
| 53 | + "# Replace with your OpenAI API Key, or you can put it in a .env file" |
37 | 54 | ] |
38 | 55 | }, |
39 | 56 | { |
|
48 | 65 | " super().__init__()\n", |
49 | 66 | " self.generator = Generator(\n", |
50 | 67 | " model_client=OpenAIClient(),\n", |
51 | | - " model_kwargs={'model': 'gpt-3.5-turbo'}\n", |
| 68 | + " model_kwargs={'model': 'gpt-4o-mini'}\n", |
52 | 69 | " )\n", |
53 | 70 | " self.chat_history = Memory() # Memory to store the chat history\n", |
54 | 71 | " \n", |
|
0 commit comments