|
25 | 25 | "metadata": {},
|
26 | 26 | "outputs": [],
|
27 | 27 | "source": [
|
28 |
| - "%pip install -qU langchain>=0.2.8 langchain-openai langchain-anthropic langchain-google-vertexai" |
| 28 | + "%pip install -qU langchain langchain-openai langchain-anthropic langchain-google-genai" |
29 | 29 | ]
|
30 | 30 | },
|
31 | 31 | {
|
|
38 | 38 | },
|
39 | 39 | {
|
40 | 40 | "cell_type": "code",
|
41 |
| - "execution_count": 2, |
| 41 | + "execution_count": 5, |
42 | 42 | "id": "79e14913-803c-4382-9009-5c6af3d75d35",
|
43 | 43 | "metadata": {
|
44 | 44 | "execution": {
|
|
49 | 49 | }
|
50 | 50 | },
|
51 | 51 | "outputs": [
|
52 |
| - { |
53 |
| - "name": "stderr", |
54 |
| - "output_type": "stream", |
55 |
| - "text": [ |
56 |
| - "/var/folders/4j/2rz3865x6qg07tx43146py8h0000gn/T/ipykernel_95293/571506279.py:4: LangChainBetaWarning: The function `init_chat_model` is in beta. It is actively being worked on, so the API may change.\n", |
57 |
| - " gpt_4o = init_chat_model(\"gpt-4o\", model_provider=\"openai\", temperature=0)\n" |
58 |
| - ] |
59 |
| - }, |
60 |
| - { |
61 |
| - "name": "stdout", |
62 |
| - "output_type": "stream", |
63 |
| - "text": [ |
64 |
| - "GPT-4o: I'm an AI created by OpenAI, and I don't have a personal name. How can I assist you today?\n", |
65 |
| - "\n" |
66 |
| - ] |
67 |
| - }, |
68 |
| - { |
69 |
| - "name": "stdout", |
70 |
| - "output_type": "stream", |
71 |
| - "text": [ |
72 |
| - "Claude Opus: My name is Claude. It's nice to meet you!\n", |
73 |
| - "\n" |
74 |
| - ] |
75 |
| - }, |
76 | 52 | {
|
77 | 53 | "name": "stdout",
|
78 | 54 | "output_type": "stream",
|
79 | 55 | "text": [
|
80 |
| - "Gemini 1.5: I am a large language model, trained by Google. \n", |
| 56 | + "GPT-4o: I’m called ChatGPT. How can I assist you today?\n", |
81 | 57 | "\n",
|
82 |
| - "I don't have a name like a person does. You can call me Bard if you like! 😊 \n", |
| 58 | + "Claude Opus: My name is Claude. It's nice to meet you!\n", |
83 | 59 | "\n",
|
| 60 | + "Gemini 2.5: I do not have a name. I am a large language model, trained by Google.\n", |
84 | 61 | "\n"
|
85 | 62 | ]
|
86 | 63 | }
|
87 | 64 | ],
|
88 | 65 | "source": [
|
89 | 66 | "from langchain.chat_models import init_chat_model\n",
|
90 | 67 | "\n",
|
| 68 | + "# Don't forget to set your environment variables for the API keys of the respective providers!\n", |
| 69 | + "# For example, you can set them in your terminal or in a .env file:\n", |
| 70 | + "# export OPENAI_API_KEY=\"your_openai_api_key\"\n", |
| 71 | + "\n", |
91 | 72 | "# Returns a langchain_openai.ChatOpenAI instance.\n",
|
92 | 73 | "gpt_4o = init_chat_model(\"gpt-4o\", model_provider=\"openai\", temperature=0)\n",
|
93 | 74 | "# Returns a langchain_anthropic.ChatAnthropic instance.\n",
|
|
96 | 77 | ")\n",
|
97 | 78 | "# Returns a langchain_google_vertexai.ChatVertexAI instance.\n",
|
98 | 79 | "gemini_15 = init_chat_model(\n",
|
99 |
| - " \"gemini-1.5-pro\", model_provider=\"google_vertexai\", temperature=0\n", |
| 80 | + " \"gemini-2.5-pro\", model_provider=\"google_genai\", temperature=0\n", |
100 | 81 | ")\n",
|
101 | 82 | "\n",
|
102 | 83 | "# Since all model integrations implement the ChatModel interface, you can use them in the same way.\n",
|
103 | 84 | "print(\"GPT-4o: \" + gpt_4o.invoke(\"what's your name\").content + \"\\n\")\n",
|
104 | 85 | "print(\"Claude Opus: \" + claude_opus.invoke(\"what's your name\").content + \"\\n\")\n",
|
105 |
| - "print(\"Gemini 1.5: \" + gemini_15.invoke(\"what's your name\").content + \"\\n\")" |
| 86 | + "print(\"Gemini 2.5: \" + gemini_15.invoke(\"what's your name\").content + \"\\n\")" |
106 | 87 | ]
|
107 | 88 | },
|
108 | 89 | {
|
|
117 | 98 | },
|
118 | 99 | {
|
119 | 100 | "cell_type": "code",
|
120 |
| - "execution_count": 3, |
| 101 | + "execution_count": null, |
121 | 102 | "id": "0378ccc6-95bc-4d50-be50-fccc193f0a71",
|
122 | 103 | "metadata": {
|
123 | 104 | "execution": {
|
|
131 | 112 | "source": [
|
132 | 113 | "gpt_4o = init_chat_model(\"gpt-4o\", temperature=0)\n",
|
133 | 114 | "claude_opus = init_chat_model(\"claude-3-opus-20240229\", temperature=0)\n",
|
134 |
| - "gemini_15 = init_chat_model(\"gemini-1.5-pro\", temperature=0)" |
| 115 | + "gemini_15 = init_chat_model(\"gemini-2.5-pro\", temperature=0)" |
135 | 116 | ]
|
136 | 117 | },
|
137 | 118 | {
|
|
146 | 127 | },
|
147 | 128 | {
|
148 | 129 | "cell_type": "code",
|
149 |
| - "execution_count": 4, |
| 130 | + "execution_count": 7, |
150 | 131 | "id": "6c037f27-12d7-4e83-811e-4245c0e3ba58",
|
151 | 132 | "metadata": {
|
152 | 133 | "execution": {
|
|
160 | 141 | {
|
161 | 142 | "data": {
|
162 | 143 | "text/plain": [
|
163 |
| - "AIMessage(content=\"I'm an AI created by OpenAI, and I don't have a personal name. How can I assist you today?\", additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 23, 'prompt_tokens': 11, 'total_tokens': 34}, 'model_name': 'gpt-4o-2024-05-13', 'system_fingerprint': 'fp_25624ae3a5', 'finish_reason': 'stop', 'logprobs': None}, id='run-b41df187-4627-490d-af3c-1c96282d3eb0-0', usage_metadata={'input_tokens': 11, 'output_tokens': 23, 'total_tokens': 34})" |
| 144 | + "AIMessage(content='I’m called ChatGPT. How can I assist you today?', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 13, 'prompt_tokens': 11, 'total_tokens': 24, 'completion_tokens_details': {'accepted_prediction_tokens': 0, 'audio_tokens': 0, 'reasoning_tokens': 0, 'rejected_prediction_tokens': 0}, 'prompt_tokens_details': {'audio_tokens': 0, 'cached_tokens': 0}}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_07871e2ad8', 'id': 'chatcmpl-BwCyyBpMqn96KED6zPhLm4k9SQMiQ', 'service_tier': 'default', 'finish_reason': 'stop', 'logprobs': None}, id='run--fada10c3-4128-406c-b83d-a850d16b365f-0', usage_metadata={'input_tokens': 11, 'output_tokens': 13, 'total_tokens': 24, 'input_token_details': {'audio': 0, 'cache_read': 0}, 'output_token_details': {'audio': 0, 'reasoning': 0}})" |
164 | 145 | ]
|
165 | 146 | },
|
166 |
| - "execution_count": 4, |
| 147 | + "execution_count": 7, |
167 | 148 | "metadata": {},
|
168 | 149 | "output_type": "execute_result"
|
169 | 150 | }
|
|
178 | 159 | },
|
179 | 160 | {
|
180 | 161 | "cell_type": "code",
|
181 |
| - "execution_count": 5, |
| 162 | + "execution_count": 8, |
182 | 163 | "id": "321e3036-abd2-4e1f-bcc6-606efd036954",
|
183 | 164 | "metadata": {
|
184 | 165 | "execution": {
|
|
192 | 173 | {
|
193 | 174 | "data": {
|
194 | 175 | "text/plain": [
|
195 |
| - "AIMessage(content=\"My name is Claude. It's nice to meet you!\", additional_kwargs={}, response_metadata={'id': 'msg_01Fx9P74A7syoFkwE73CdMMY', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 11, 'output_tokens': 15}}, id='run-a0fd2bbd-3b7e-46bf-8d69-a48c7e60b03c-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26})" |
| 176 | + "AIMessage(content=\"My name is Claude. It's nice to meet you!\", additional_kwargs={}, response_metadata={'id': 'msg_01VDGrG9D6yefanbBG9zPJrc', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 11, 'output_tokens': 15, 'server_tool_use': None, 'service_tier': 'standard'}, 'model_name': 'claude-3-5-sonnet-20240620'}, id='run--f0156087-debf-4b4b-9aaa-f3328a81ef92-0', usage_metadata={'input_tokens': 11, 'output_tokens': 15, 'total_tokens': 26, 'input_token_details': {'cache_read': 0, 'cache_creation': 0}})" |
196 | 177 | ]
|
197 | 178 | },
|
198 |
| - "execution_count": 5, |
| 179 | + "execution_count": 8, |
199 | 180 | "metadata": {},
|
200 | 181 | "output_type": "execute_result"
|
201 | 182 | }
|
|
394 | 375 | ],
|
395 | 376 | "metadata": {
|
396 | 377 | "kernelspec": {
|
397 |
| - "display_name": "poetry-venv-2", |
| 378 | + "display_name": "langchain", |
398 | 379 | "language": "python",
|
399 |
| - "name": "poetry-venv-2" |
| 380 | + "name": "python3" |
400 | 381 | },
|
401 | 382 | "language_info": {
|
402 | 383 | "codemirror_mode": {
|
|
408 | 389 | "name": "python",
|
409 | 390 | "nbconvert_exporter": "python",
|
410 | 391 | "pygments_lexer": "ipython3",
|
411 |
| - "version": "3.11.9" |
| 392 | + "version": "3.10.16" |
412 | 393 | }
|
413 | 394 | },
|
414 | 395 | "nbformat": 4,
|
|
0 commit comments