Skip to content

Commit 485eaf0

Browse files
sdgilleykdestin
andauthored
Update notebook metadata for docs (#264)
* update model and add metadate * move metadata to code cell * update codeowners * chore: Run nb-clean --------- Co-authored-by: kdestin <[email protected]>
1 parent ccf438b commit 485eaf0

File tree

3 files changed

+65
-20
lines changed

3 files changed

+65
-20
lines changed

.github/CODEOWNERS

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,19 @@
55
# Default owner for everything, unless overridden by a more specific rule.
66
* @Azure-Samples/azure-ai-samples-maintainers
77

8-
scenarios/docs @Azure-Samples/azure-ai-samples-maintainers @Azure-Samples/AI-Platform-Docs
8+
9+
#### files referenced in docs (DO NOT EDIT, except for Docs team!!!) ###################
10+
/scenarios/projects/basic/chat-simple.py @azure-samples/AI-Platform-Docs
11+
/scenarios/projects/basic/chat-template.py @azure-samples/AI-Platform-Docs
12+
/scenarios/rag/custom-rag-app/assets/chat_eval_data.jsonl @azure-samples/AI-Platform-Docs
13+
/scenarios/rag/custom-rag-app/assets/grounded_chat.prompty @azure-samples/AI-Platform-Docs
14+
/scenarios/rag/custom-rag-app/assets/intent_mapping.prompty @azure-samples/AI-Platform-Docs
15+
/scenarios/rag/custom-rag-app/assets/products.csv @azure-samples/AI-Platform-Docs
16+
/scenarios/rag/custom-rag-app/chat_with_products.py @azure-samples/AI-Platform-Docs
17+
/scenarios/rag/custom-rag-app/config.py @azure-samples/AI-Platform-Docs
18+
/scenarios/rag/custom-rag-app/create_search_index.py @azure-samples/AI-Platform-Docs
19+
/scenarios/rag/custom-rag-app/evaluate.py @azure-samples/AI-Platform-Docs
20+
/scenarios/rag/custom-rag-app/get_product_documents.py @azure-samples/AI-Platform-Docs
21+
/scenarios/rag/custom-rag-app/requirements.txt @azure-samples/AI-Platform-Docs
22+
/scenarios/langchain/getting-started-with-langchain-chat-models.ipynb @azure-samples/AI-Platform-Docs
23+
/scenarios/langchain/getting-started-with-langchain-embeddings.ipynb @azure-samples/AI-Platform-Docs

scenarios/langchain/getting-started-with-langchain-chat-models.ipynb

Lines changed: 37 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
"\n",
2727
" 1. Create an [Azure subscription](https://azure.microsoft.com).\n",
2828
" 2. Create an Azure AI hub resource as explained at [How to create and manage an Azure AI Studio hub](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/create-azure-ai-resource).\n",
29-
" 3. Deploy one model supporting the [Azure AI model inference API](https://aka.ms/azureai/modelinference). In this example we use a `Mistral-Large-2407` and a `Mistral-Small` deployment. \n",
29+
" 3. Deploy one model supporting the [Azure AI model inference API](https://aka.ms/azureai/modelinference). In this example we use a `mistral-medium-2505` and a `Mistral-Small` deployment. \n",
3030
"\n",
3131
" * You can follow the instructions at [Add and configure models to Azure AI model inference service](https://learn.microsoft.com/azure/ai-studio/ai-services/how-to/create-model-deployments)."
3232
]
@@ -54,7 +54,9 @@
5454
{
5555
"cell_type": "code",
5656
"execution_count": null,
57-
"metadata": {},
57+
"metadata": {
58+
"name": "create_client"
59+
},
5860
"outputs": [],
5961
"source": [
6062
"import os\n",
@@ -63,7 +65,7 @@
6365
"model = AzureAIChatCompletionsModel(\n",
6466
" endpoint=os.environ[\"AZURE_INFERENCE_ENDPOINT\"],\n",
6567
" credential=os.environ[\"AZURE_INFERENCE_CREDENTIAL\"],\n",
66-
" model=\"mistral-large-2407\",\n",
68+
" model=\"mistral-medium-2505\",\n",
6769
")"
6870
]
6971
},
@@ -84,7 +86,9 @@
8486
{
8587
"cell_type": "code",
8688
"execution_count": null,
87-
"metadata": {},
89+
"metadata": {
90+
"name": "human_message"
91+
},
8892
"outputs": [],
8993
"source": [
9094
"from langchain_core.messages import HumanMessage, SystemMessage\n",
@@ -181,15 +185,17 @@
181185
{
182186
"cell_type": "code",
183187
"execution_count": null,
184-
"metadata": {},
188+
"metadata": {
189+
"name": "create_producer_verifier"
190+
},
185191
"outputs": [],
186192
"source": [
187193
"from langchain_azure_ai.chat_models import AzureAIChatCompletionsModel\n",
188194
"\n",
189195
"producer = AzureAIChatCompletionsModel(\n",
190196
" endpoint=os.environ[\"AZURE_INFERENCE_ENDPOINT\"],\n",
191197
" credential=os.environ[\"AZURE_INFERENCE_CREDENTIAL\"],\n",
192-
" model=\"mistral-large-2407\",\n",
198+
" model=\"mistral-medium-2505\",\n",
193199
")\n",
194200
"\n",
195201
"verifier = AzureAIChatCompletionsModel(\n",
@@ -209,7 +215,9 @@
209215
{
210216
"cell_type": "code",
211217
"execution_count": null,
212-
"metadata": {},
218+
"metadata": {
219+
"name": "generate_poem"
220+
},
213221
"outputs": [],
214222
"source": [
215223
"from langchain_core.prompts import PromptTemplate\n",
@@ -242,7 +250,9 @@
242250
{
243251
"cell_type": "code",
244252
"execution_count": null,
245-
"metadata": {},
253+
"metadata": {
254+
"name": "create_output_parser"
255+
},
246256
"outputs": [],
247257
"source": [
248258
"from langchain_core.output_parsers import StrOutputParser\n",
@@ -260,7 +270,9 @@
260270
{
261271
"cell_type": "code",
262272
"execution_count": null,
263-
"metadata": {},
273+
"metadata": {
274+
"name": "create_chain"
275+
},
264276
"outputs": [],
265277
"source": [
266278
"chain = producer_template | producer | parser | verifier_template | verifier | parser"
@@ -276,7 +288,9 @@
276288
{
277289
"cell_type": "code",
278290
"execution_count": null,
279-
"metadata": {},
291+
"metadata": {
292+
"name": "create_multiple_outputs_chain"
293+
},
280294
"outputs": [],
281295
"source": [
282296
"generate_poem = producer_template | producer | parser\n",
@@ -286,7 +300,9 @@
286300
{
287301
"cell_type": "code",
288302
"execution_count": null,
289-
"metadata": {},
303+
"metadata": {
304+
"name": "create_chain_with_passthrough"
305+
},
290306
"outputs": [],
291307
"source": [
292308
"from langchain_core.runnables import RunnablePassthrough, RunnableParallel\n",
@@ -304,7 +320,9 @@
304320
{
305321
"cell_type": "code",
306322
"execution_count": null,
307-
"metadata": {},
323+
"metadata": {
324+
"name": "invoke_chain"
325+
},
308326
"outputs": [],
309327
"source": [
310328
"chain.invoke({\"topic\": \"living in a foreign country\"})"
@@ -329,7 +347,9 @@
329347
{
330348
"cell_type": "code",
331349
"execution_count": null,
332-
"metadata": {},
350+
"metadata": {
351+
"name": "configure_logging"
352+
},
333353
"outputs": [],
334354
"source": [
335355
"import sys\n",
@@ -363,7 +383,9 @@
363383
{
364384
"cell_type": "code",
365385
"execution_count": null,
366-
"metadata": {},
386+
"metadata": {
387+
"name": "create_client_with_logging"
388+
},
367389
"outputs": [],
368390
"source": [
369391
"import os\n",
@@ -372,7 +394,7 @@
372394
"model = AzureAIChatCompletionsModel(\n",
373395
" endpoint=os.environ[\"AZURE_INFERENCE_ENDPOINT\"],\n",
374396
" credential=os.environ[\"AZURE_INFERENCE_CREDENTIAL\"],\n",
375-
" model=\"mistral-large-2407\",\n",
397+
" model=\"mistral-medium-2505\",\n",
376398
" client_kwargs={\"logging_enable\": True},\n",
377399
")"
378400
]

scenarios/langchain/getting-started-with-langchain-embeddings.ipynb

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,9 @@
5454
{
5555
"cell_type": "code",
5656
"execution_count": null,
57-
"metadata": {},
57+
"metadata": {
58+
"name": "create_embed_model_client"
59+
},
5860
"outputs": [],
5961
"source": [
6062
"import os\n",
@@ -84,7 +86,9 @@
8486
{
8587
"cell_type": "code",
8688
"execution_count": null,
87-
"metadata": {},
89+
"metadata": {
90+
"name": "create_vector_store"
91+
},
8892
"outputs": [],
8993
"source": [
9094
"from langchain_core.vectorstores import InMemoryVectorStore\n",
@@ -102,7 +106,9 @@
102106
{
103107
"cell_type": "code",
104108
"execution_count": null,
105-
"metadata": {},
109+
"metadata": {
110+
"name": "add_documents"
111+
},
106112
"outputs": [],
107113
"source": [
108114
"from langchain_core.documents import Document\n",
@@ -124,7 +130,9 @@
124130
{
125131
"cell_type": "code",
126132
"execution_count": null,
127-
"metadata": {},
133+
"metadata": {
134+
"name": "search_similarity"
135+
},
128136
"outputs": [],
129137
"source": [
130138
"results = vector_store.similarity_search(query=\"thud\", k=1)\n",

0 commit comments

Comments
 (0)