From 5134b197f234834910de4f722869268482205f0b Mon Sep 17 00:00:00 2001 From: Carson Date: Wed, 26 Feb 2025 09:54:58 -0600 Subject: [PATCH 1/6] Encourage async in response generation --- .../templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py | 2 +- shiny/templates/chat/llm-enterprise/azure-openai/app.py | 2 +- shiny/templates/chat/llms/anthropic/app.py | 2 +- shiny/templates/chat/llms/google/app.py | 2 +- shiny/templates/chat/llms/langchain/app.py | 2 +- shiny/templates/chat/llms/ollama/app.py | 2 +- shiny/templates/chat/llms/openai/app.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py index d2916f85f..c9a174a5e 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py @@ -36,5 +36,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/app.py b/shiny/templates/chat/llm-enterprise/azure-openai/app.py index d6ba133cd..0482df872 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/app.py +++ b/shiny/templates/chat/llm-enterprise/azure-openai/app.py @@ -37,5 +37,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/anthropic/app.py b/shiny/templates/chat/llms/anthropic/app.py index 585dd5a58..88d3c4b4a 100644 --- a/shiny/templates/chat/llms/anthropic/app.py +++ b/shiny/templates/chat/llms/anthropic/app.py @@ -37,5 +37,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/google/app.py b/shiny/templates/chat/llms/google/app.py index ed5b75248..1008014d4 100644 --- a/shiny/templates/chat/llms/google/app.py +++ b/shiny/templates/chat/llms/google/app.py @@ -33,5 +33,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/langchain/app.py b/shiny/templates/chat/llms/langchain/app.py index 1c62ce399..f30a9f9ad 100644 --- a/shiny/templates/chat/llms/langchain/app.py +++ b/shiny/templates/chat/llms/langchain/app.py @@ -38,5 +38,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/ollama/app.py b/shiny/templates/chat/llms/ollama/app.py index 581050a98..9e45a634e 100644 --- a/shiny/templates/chat/llms/ollama/app.py +++ b/shiny/templates/chat/llms/ollama/app.py @@ -29,5 +29,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/openai/app.py b/shiny/templates/chat/llms/openai/app.py index 07ff62bb2..b13a6b703 100644 --- a/shiny/templates/chat/llms/openai/app.py +++ b/shiny/templates/chat/llms/openai/app.py @@ -37,5 +37,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) From c692cca293da1eb371235f37881edb33ffefb2f5 Mon Sep 17 00:00:00 2001 From: Carson Date: Wed, 26 Feb 2025 09:55:42 -0600 Subject: [PATCH 2/6] Update requirements and update models --- .../llm-enterprise/aws-bedrock-anthropic/requirements.txt | 1 - .../chat/llm-enterprise/azure-openai/requirements.txt | 1 - shiny/templates/chat/llms/anthropic/app.py | 2 +- shiny/templates/chat/llms/google/app.py | 2 +- shiny/templates/chat/llms/google/requirements.txt | 5 ++--- shiny/templates/chat/llms/langchain/requirements.txt | 1 - shiny/templates/chat/llms/ollama/requirements.txt | 1 - shiny/templates/chat/llms/openai/requirements.txt | 1 - shiny/templates/chat/llms/playground/app.py | 6 +++--- shiny/templates/chat/llms/playground/requirements.txt | 4 ++-- 10 files changed, 9 insertions(+), 15 deletions(-) diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt index a0d9e4048..73c49fa54 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas anthropic[bedrock] diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt b/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt index e7c42d64c..937dbb3e5 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt +++ b/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas openai diff --git a/shiny/templates/chat/llms/anthropic/app.py b/shiny/templates/chat/llms/anthropic/app.py index 88d3c4b4a..964428803 100644 --- a/shiny/templates/chat/llms/anthropic/app.py +++ b/shiny/templates/chat/llms/anthropic/app.py @@ -14,7 +14,7 @@ load_dotenv() chat_model = ChatAnthropic( api_key=os.environ.get("ANTHROPIC_API_KEY"), - model="claude-3-5-sonnet-latest", + model="claude-3-7-sonnet-latest", system_prompt="You are a helpful assistant.", ) diff --git a/shiny/templates/chat/llms/google/app.py b/shiny/templates/chat/llms/google/app.py index 1008014d4..0f650df8d 100644 --- a/shiny/templates/chat/llms/google/app.py +++ b/shiny/templates/chat/llms/google/app.py @@ -15,7 +15,7 @@ chat_model = ChatGoogle( api_key=os.environ.get("GOOGLE_API_KEY"), system_prompt="You are a helpful assistant.", - model="gemini-1.5-flash", + model="gemini-2.0-flash", ) # Set some Shiny page options diff --git a/shiny/templates/chat/llms/google/requirements.txt b/shiny/templates/chat/llms/google/requirements.txt index f51cd04e3..fdb4a7e80 100644 --- a/shiny/templates/chat/llms/google/requirements.txt +++ b/shiny/templates/chat/llms/google/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers -chatlas -google-generativeai +chatlas>=0.4.0 +google-genai diff --git a/shiny/templates/chat/llms/langchain/requirements.txt b/shiny/templates/chat/llms/langchain/requirements.txt index 0391632e3..bb1f1f4aa 100644 --- a/shiny/templates/chat/llms/langchain/requirements.txt +++ b/shiny/templates/chat/llms/langchain/requirements.txt @@ -1,4 +1,3 @@ shiny python-dotenv -tokenizers langchain-openai diff --git a/shiny/templates/chat/llms/ollama/requirements.txt b/shiny/templates/chat/llms/ollama/requirements.txt index 5901288be..9831b6305 100644 --- a/shiny/templates/chat/llms/ollama/requirements.txt +++ b/shiny/templates/chat/llms/ollama/requirements.txt @@ -1,4 +1,3 @@ shiny -tokenizers chatlas ollama diff --git a/shiny/templates/chat/llms/openai/requirements.txt b/shiny/templates/chat/llms/openai/requirements.txt index e7c42d64c..937dbb3e5 100644 --- a/shiny/templates/chat/llms/openai/requirements.txt +++ b/shiny/templates/chat/llms/openai/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas openai diff --git a/shiny/templates/chat/llms/playground/app.py b/shiny/templates/chat/llms/playground/app.py index a8c3b0ebb..4f2846bce 100644 --- a/shiny/templates/chat/llms/playground/app.py +++ b/shiny/templates/chat/llms/playground/app.py @@ -15,13 +15,13 @@ load_dotenv() models = { - "openai": ["gpt-4o-mini", "gpt-4o"], "claude": [ + "claude-3-7-sonnet-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-latest", "claude-3-haiku-20240307", ], - "google": ["gemini-1.5-pro-latest"], + "openai": ["gpt-4o-mini", "gpt-4o"], + "google": ["gemini-2.0-flash"], } model_choices: dict[str, dict[str, str]] = {} diff --git a/shiny/templates/chat/llms/playground/requirements.txt b/shiny/templates/chat/llms/playground/requirements.txt index 4cec5d5bb..c7b2458aa 100644 --- a/shiny/templates/chat/llms/playground/requirements.txt +++ b/shiny/templates/chat/llms/playground/requirements.txt @@ -1,6 +1,6 @@ -chatlas +chatlas>=0.4 openai anthropic -google-generativeai +google-genai python-dotenv shiny From 8f1d23471c55246d7bd20f8cd60b830333f934f6 Mon Sep 17 00:00:00 2001 From: Carson Date: Wed, 26 Feb 2025 09:56:11 -0600 Subject: [PATCH 3/6] Add template.env files --- .../chat/llm-enterprise/aws-bedrock-anthropic/app.py | 4 ---- .../chat/llm-enterprise/aws-bedrock-anthropic/template.env | 6 ++++++ .../templates/chat/llm-enterprise/azure-openai/template.env | 3 +++ shiny/templates/chat/llms/anthropic/template.env | 3 +++ shiny/templates/chat/llms/google/template.env | 3 +++ shiny/templates/chat/llms/langchain/template.env | 3 +++ shiny/templates/chat/llms/openai/template.env | 3 +++ shiny/templates/chat/llms/playground/template.env | 5 +++++ 8 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env create mode 100644 shiny/templates/chat/llm-enterprise/azure-openai/template.env create mode 100644 shiny/templates/chat/llms/anthropic/template.env create mode 100644 shiny/templates/chat/llms/google/template.env create mode 100644 shiny/templates/chat/llms/langchain/template.env create mode 100644 shiny/templates/chat/llms/openai/template.env create mode 100644 shiny/templates/chat/llms/playground/template.env diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py index c9a174a5e..cf71cefe9 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py @@ -15,10 +15,6 @@ load_dotenv() chat_model = ChatBedrockAnthropic( model="anthropic.claude-3-sonnet-20240229-v1:0", - # aws_secret_key=os.getenv("AWS_SECRET_KEY"), - # aws_access_key=os.getenv("AWS_ACCESS_KEY"), - # aws_region=os.getenv("AWS_REGION"), - # aws_account_id=os.getenv("AWS_ACCOUNT_ID"), ) # Set some Shiny page options diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env new file mode 100644 index 000000000..fc4cc092b --- /dev/null +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env @@ -0,0 +1,6 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +AWS_SECRET_KEY= +AWS_ACCESS_KEY= +AWS_REGION= +AWS_ACCOUNT_ID= diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/template.env b/shiny/templates/chat/llm-enterprise/azure-openai/template.env new file mode 100644 index 000000000..017e4200e --- /dev/null +++ b/shiny/templates/chat/llm-enterprise/azure-openai/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +AZURE_OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/anthropic/template.env b/shiny/templates/chat/llms/anthropic/template.env new file mode 100644 index 000000000..6fe7cf7f7 --- /dev/null +++ b/shiny/templates/chat/llms/anthropic/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +ANTHROPIC_API_KEY= diff --git a/shiny/templates/chat/llms/google/template.env b/shiny/templates/chat/llms/google/template.env new file mode 100644 index 000000000..b41ee2ba5 --- /dev/null +++ b/shiny/templates/chat/llms/google/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +GOOGLE_API_KEY= diff --git a/shiny/templates/chat/llms/langchain/template.env b/shiny/templates/chat/llms/langchain/template.env new file mode 100644 index 000000000..33b6abc3f --- /dev/null +++ b/shiny/templates/chat/llms/langchain/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/openai/template.env b/shiny/templates/chat/llms/openai/template.env new file mode 100644 index 000000000..33b6abc3f --- /dev/null +++ b/shiny/templates/chat/llms/openai/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/playground/template.env b/shiny/templates/chat/llms/playground/template.env new file mode 100644 index 000000000..d93f6dc7f --- /dev/null +++ b/shiny/templates/chat/llms/playground/template.env @@ -0,0 +1,5 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +ANTHROPIC_API_KEY= +OPENAI_API_KEY= +GOOGLE_API_KEY= From e5b55e1bfb0cca54835e48611999f3d4bcb1e2ed Mon Sep 17 00:00:00 2001 From: Carson Date: Thu, 27 Feb 2025 14:20:50 -0600 Subject: [PATCH 4/6] Remove unnecessary call to markdown() in hello chat template --- .../chat/starters/hello/.app-core.py.swp | Bin 0 -> 12288 bytes shiny/templates/chat/starters/hello/app-core.py | 12 +++++------- shiny/templates/chat/starters/hello/app.py | 12 +++++------- 3 files changed, 10 insertions(+), 14 deletions(-) create mode 100644 shiny/templates/chat/starters/hello/.app-core.py.swp diff --git a/shiny/templates/chat/starters/hello/.app-core.py.swp b/shiny/templates/chat/starters/hello/.app-core.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..3e57fbbce34446ef922beb2c9222fde0594d13ad GIT binary patch literal 12288 zcmeI2JBt)S5XW0%UnnMmP|ktO!rqJz3_Lj>=ko+Xe1XWZ_U_c)G&j@TO!wTnh?<)i znj48>Ozw_wos_I{LvTO~H-CMl@r<3D?#y%nLeSNw9 z@#@<>2agIdvQFV9RzA%AVcd|T6=ODTRwgN=-rn%(++n(|d|nSzJ6&|S($lp4_E_3N zdUWY1mp*k~Rvs&t=2#RamDVnfOofS^ZXbhZm;e*lj=*kl^2G6DVRj)p0tfcr-OiBT zGXW;R1egF5U;<2l2`~XB@E;QJc87RI``X#sE0!)AjFaajO1egF5 zU;<2l2`~XB@DCD@nGiR33Gsmx{{Nr-{{OUBh>xUqq*tWpBumnyl5~r7hIEqjWseYV zNY_a#r1PY6q_d<$q%XUL_)L0DdO=zvog)39nBPg?y7=!y4m_9u6JP>NfC(@GCcp%k z025#We?fp6hG7{lSR2#bpjLfwSW!Q7k9#^dO|sliE= zEy3fORt}W5wSN-z=8G4Cb6NuuDsoH$gbN=yWilK zfYd6vNO!TymZR`0>d(yF@`DK$BU9p9PlS|PUU9SWKA74QfI!hXWi+KLL<=K|q9(%! zV^d_5fEfcmlGMzNZY|Eew{9uzdcG5KpIYOTU(9fGs{`Cuh-oR6{%tRKFKPTcMLwql z+ahg+3O8dgHFW}4Oa&!1h|gn(RH-7la?R>%>ImGS2cVCH#uviv(0>jO1CfVtcQt{_ zntIN+(7_N3GXbg{tinVUMdK=LxI@(vEoHTlnTd|Y0$i*pyHKT>#k>kFtuc9#Lplnj z8L9%8?^HGR8@E)NP!T$-aa)?+{JoVLi-A|Zz&N@V8sfXuL*^0LwP(D|#?k>E%)-P% Vj4M+@(_;@r&{RGg+&-NI@e{*KOE3Tc literal 0 HcmV?d00001 diff --git a/shiny/templates/chat/starters/hello/app-core.py b/shiny/templates/chat/starters/hello/app-core.py index 5c088fcaf..8628b85df 100644 --- a/shiny/templates/chat/starters/hello/app-core.py +++ b/shiny/templates/chat/starters/hello/app-core.py @@ -7,13 +7,11 @@ ) # Create a welcome message -welcome = ui.markdown( - """ - Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will - simply repeat it back to you. For more examples, see this - [folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). - """ -) +welcome = """ +Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will +simply repeat it back to you. For more examples, see this +[folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). +""" def server(input, output, session): diff --git a/shiny/templates/chat/starters/hello/app.py b/shiny/templates/chat/starters/hello/app.py index 43fb202d5..e7f8cfc2c 100644 --- a/shiny/templates/chat/starters/hello/app.py +++ b/shiny/templates/chat/starters/hello/app.py @@ -8,13 +8,11 @@ ) # Create a welcome message -welcome = ui.markdown( - """ - Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will - simply repeat it back to you. For more examples, see this - [folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). - """ -) +welcome = """ +Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will +simply repeat it back to you. For more examples, see this +[folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). +""" # Create a chat instance chat = ui.Chat( From 22037aa2307bb5feaa8730565630bff5150f9712 Mon Sep 17 00:00:00 2001 From: Carson Date: Thu, 27 Feb 2025 14:25:48 -0600 Subject: [PATCH 5/6] Proper naming of hello chat express app file --- shiny/templates/chat/starters/hello/{app.py => app-express.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename shiny/templates/chat/starters/hello/{app.py => app-express.py} (100%) diff --git a/shiny/templates/chat/starters/hello/app.py b/shiny/templates/chat/starters/hello/app-express.py similarity index 100% rename from shiny/templates/chat/starters/hello/app.py rename to shiny/templates/chat/starters/hello/app-express.py From d3f4c93a1d8d61856048ed2d85846a78db99388b Mon Sep 17 00:00:00 2001 From: Carson Date: Thu, 27 Feb 2025 17:23:19 -0600 Subject: [PATCH 6/6] Add next steps and follow up --- .../aws-bedrock-anthropic/_template.json | 16 +++++- .../azure-openai/_template.json | 16 +++++- .../chat/llms/anthropic/_template.json | 16 +++++- .../templates/chat/llms/google/_template.json | 16 +++++- .../chat/llms/langchain/_template.json | 6 +- .../templates/chat/llms/ollama/_template.json | 17 +++++- shiny/templates/chat/llms/ollama/app.py | 2 +- .../templates/chat/llms/openai/_template.json | 16 +++++- .../chat/llms/playground/_template.json | 16 +++++- .../chat/starters/hello/.app-core.py.swp | Bin 12288 -> 0 bytes .../chat/starters/hello/_template.json | 5 +- .../chat/starters/hello/app-express.py | 54 ++++++++++++------ .../chat/starters/sidebar-dark/_template.json | 5 +- 13 files changed, 158 insertions(+), 27 deletions(-) delete mode 100644 shiny/templates/chat/starters/hello/.app-core.py.swp diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json index 39712d0f7..eb6e9ff7b 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-anthropic-aws", - "title": "Chat AI using Anthropic via AWS Bedrock" + "title": "Chat AI using Anthropic via AWS Bedrock", + "next_steps": [ + "Put your Bedrock credentials in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Bedrock?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatBedrockAnthropic.html" + } + ] } diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/_template.json b/shiny/templates/chat/llm-enterprise/azure-openai/_template.json index 14702f9c7..9bd24ede2 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/_template.json +++ b/shiny/templates/chat/llm-enterprise/azure-openai/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-azure-openai", - "title": "Chat AI using OpenAI via Azure" + "title": "Chat AI using OpenAI via Azure", + "next_steps": [ + "Put your Azure credentials in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Azure?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatAzureOpenAI.html" + } + ] } diff --git a/shiny/templates/chat/llms/anthropic/_template.json b/shiny/templates/chat/llms/anthropic/_template.json index 79e2bf257..c727ce455 100644 --- a/shiny/templates/chat/llms/anthropic/_template.json +++ b/shiny/templates/chat/llms/anthropic/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-anthropic", - "title": "Chat AI using Anthropic" + "title": "Chat AI using Anthropic", + "next_steps": [ + "Put your Anthropic API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatAnthropic.html" + } + ] } diff --git a/shiny/templates/chat/llms/google/_template.json b/shiny/templates/chat/llms/google/_template.json index baf30e7cd..8ee55e079 100644 --- a/shiny/templates/chat/llms/google/_template.json +++ b/shiny/templates/chat/llms/google/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-gemini", - "title": "Chat AI using Google Gemini" + "title": "Chat AI using Google Gemini", + "next_steps": [ + "Put your Google API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatGoogle.html" + } + ] } diff --git a/shiny/templates/chat/llms/langchain/_template.json b/shiny/templates/chat/llms/langchain/_template.json index 3ac04a285..b7731fd68 100644 --- a/shiny/templates/chat/llms/langchain/_template.json +++ b/shiny/templates/chat/llms/langchain/_template.json @@ -1,5 +1,9 @@ { "type": "app", "id": "chat-ai-langchain", - "title": "Chat AI using LangChain" + "title": "Chat AI using LangChain", + "next_steps": [ + "Put your OpenAI API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ] } diff --git a/shiny/templates/chat/llms/ollama/_template.json b/shiny/templates/chat/llms/ollama/_template.json index 9a1c53ccc..fb0fb217a 100644 --- a/shiny/templates/chat/llms/ollama/_template.json +++ b/shiny/templates/chat/llms/ollama/_template.json @@ -1,5 +1,20 @@ { "type": "app", "id": "chat-ai-ollama", - "title": "Chat AI using Ollama" + "title": "Chat AI using Ollama", + "next_steps": [ + "If you haven't already, download the Ollama executable from https://ollama.com/", + "Run the executable and download the relevant model (llama3.2)", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Ollama?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatOllama.html" + } + ] } diff --git a/shiny/templates/chat/llms/ollama/app.py b/shiny/templates/chat/llms/ollama/app.py index 9e45a634e..0bff056cb 100644 --- a/shiny/templates/chat/llms/ollama/app.py +++ b/shiny/templates/chat/llms/ollama/app.py @@ -9,7 +9,7 @@ # ChatOllama() requires an Ollama model server to be running locally. # See the docs for more information on how to set up a local Ollama server. # https://posit-dev.github.io/chatlas/reference/ChatOllama.html -chat_model = ChatOllama(model="llama3.1") +chat_model = ChatOllama(model="llama3.2") # Set some Shiny page options ui.page_opts( diff --git a/shiny/templates/chat/llms/openai/_template.json b/shiny/templates/chat/llms/openai/_template.json index 89bfb15d3..4fcf812e8 100644 --- a/shiny/templates/chat/llms/openai/_template.json +++ b/shiny/templates/chat/llms/openai/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-openai", - "title": "Chat AI using OpenAI" + "title": "Chat AI using OpenAI", + "next_steps": [ + "Put your OpenAI API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatOpenAI.html" + } + ] } diff --git a/shiny/templates/chat/llms/playground/_template.json b/shiny/templates/chat/llms/playground/_template.json index b753f492c..98dbfe728 100644 --- a/shiny/templates/chat/llms/playground/_template.json +++ b/shiny/templates/chat/llms/playground/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-playground", - "title": "Chat Playground w/ OpenAI, Anthropic, and Google" + "title": "Chat Playground w/ OpenAI, Anthropic, and Google", + "next_steps": [ + "Put your OpenAI, Anthropic, and Google API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API keys?" + }, + { + "type": "action", + "text": "Learn how to obtain them at https://posit-dev.github.io/chatlas/reference/" + } + ] } diff --git a/shiny/templates/chat/starters/hello/.app-core.py.swp b/shiny/templates/chat/starters/hello/.app-core.py.swp deleted file mode 100644 index 3e57fbbce34446ef922beb2c9222fde0594d13ad..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI2JBt)S5XW0%UnnMmP|ktO!rqJz3_Lj>=ko+Xe1XWZ_U_c)G&j@TO!wTnh?<)i znj48>Ozw_wos_I{LvTO~H-CMl@r<3D?#y%nLeSNw9 z@#@<>2agIdvQFV9RzA%AVcd|T6=ODTRwgN=-rn%(++n(|d|nSzJ6&|S($lp4_E_3N zdUWY1mp*k~Rvs&t=2#RamDVnfOofS^ZXbhZm;e*lj=*kl^2G6DVRj)p0tfcr-OiBT zGXW;R1egF5U;<2l2`~XB@E;QJc87RI``X#sE0!)AjFaajO1egF5 zU;<2l2`~XB@DCD@nGiR33Gsmx{{Nr-{{OUBh>xUqq*tWpBumnyl5~r7hIEqjWseYV zNY_a#r1PY6q_d<$q%XUL_)L0DdO=zvog)39nBPg?y7=!y4m_9u6JP>NfC(@GCcp%k z025#We?fp6hG7{lSR2#bpjLfwSW!Q7k9#^dO|sliE= zEy3fORt}W5wSN-z=8G4Cb6NuuDsoH$gbN=yWilK zfYd6vNO!TymZR`0>d(yF@`DK$BU9p9PlS|PUU9SWKA74QfI!hXWi+KLL<=K|q9(%! zV^d_5fEfcmlGMzNZY|Eew{9uzdcG5KpIYOTU(9fGs{`Cuh-oR6{%tRKFKPTcMLwql z+ahg+3O8dgHFW}4Oa&!1h|gn(RH-7la?R>%>ImGS2cVCH#uviv(0>jO1CfVtcQt{_ zntIN+(7_N3GXbg{tinVUMdK=LxI@(vEoHTlnTd|Y0$i*pyHKT>#k>kFtuc9#Lplnj z8L9%8?^HGR8@E)NP!T$-aa)?+{JoVLi-A|Zz&N@V8sfXuL*^0LwP(D|#?k>E%)-P% Vj4M+@(_;@r&{RGg+&-NI@e{*KOE3Tc diff --git a/shiny/templates/chat/starters/hello/_template.json b/shiny/templates/chat/starters/hello/_template.json index 6cdfd1a2f..1d63a9705 100644 --- a/shiny/templates/chat/starters/hello/_template.json +++ b/shiny/templates/chat/starters/hello/_template.json @@ -1,5 +1,8 @@ { "type": "app", "id": "chat-hello", - "title": "Hello Shiny Chat" + "title": "Hello Shiny Chat", + "next_steps": [ + "Run the app with `shiny run app.py`." + ] } diff --git a/shiny/templates/chat/starters/hello/app-express.py b/shiny/templates/chat/starters/hello/app-express.py index e7f8cfc2c..02bc2156a 100644 --- a/shiny/templates/chat/starters/hello/app-express.py +++ b/shiny/templates/chat/starters/hello/app-express.py @@ -1,31 +1,53 @@ -from shiny.express import ui +from shiny.express import expressify, ui -# Set some Shiny page options -ui.page_opts( - title="Hello Shiny Chat", - fillable=True, - fillable_mobile=True, -) -# Create a welcome message -welcome = """ -Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will -simply repeat it back to you. For more examples, see this -[folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). +@expressify +def card_suggestion(title: str, suggestion: str, img_src: str, img_alt: str): + with ui.card(data_suggestion=suggestion): + ui.card_header(title) + ui.img( + src=img_src, + alt=img_alt, + style="margin-top:auto; margin-bottom:auto;", + ) + + +@expressify +def card_suggestions(): + with ui.layout_column_wrap(): + card_suggestion( + title="Learn Python", + suggestion="Teach me Python", + img_src="https://upload.wikimedia.org/wikipedia/commons/c/c3/Python-logo-notext.svg", + img_alt="Python logo", + ) + card_suggestion( + title="Learn R", + suggestion="Teach me R", + img_src="https://upload.wikimedia.org/wikipedia/commons/1/1b/R_logo.svg", + img_alt="R logo", + ) + + +with ui.hold() as suggestions: + card_suggestions() + +welcome = f""" +**Hello!** How can I help you today? + +Here are a couple suggestions: + +{suggestions[0]} """ -# Create a chat instance chat = ui.Chat( id="chat", messages=[welcome], ) -# Display it chat.ui() -# Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - # Append a response to the chat await chat.append_message(f"You said: {user_input}") diff --git a/shiny/templates/chat/starters/sidebar-dark/_template.json b/shiny/templates/chat/starters/sidebar-dark/_template.json index eb96af780..78dd6d22d 100644 --- a/shiny/templates/chat/starters/sidebar-dark/_template.json +++ b/shiny/templates/chat/starters/sidebar-dark/_template.json @@ -1,5 +1,8 @@ { "type": "app", "id": "chat-sidebar-dark", - "title": "Chat in a sidebar with dark mode" + "title": "Chat in a sidebar with dark mode", + "next_steps": [ + "Run the app with `shiny run app.py`." + ] }