diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json index 39712d0f7..eb6e9ff7b 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-anthropic-aws", - "title": "Chat AI using Anthropic via AWS Bedrock" + "title": "Chat AI using Anthropic via AWS Bedrock", + "next_steps": [ + "Put your Bedrock credentials in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Bedrock?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatBedrockAnthropic.html" + } + ] } diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py index d2916f85f..cf71cefe9 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/app.py @@ -15,10 +15,6 @@ load_dotenv() chat_model = ChatBedrockAnthropic( model="anthropic.claude-3-sonnet-20240229-v1:0", - # aws_secret_key=os.getenv("AWS_SECRET_KEY"), - # aws_access_key=os.getenv("AWS_ACCESS_KEY"), - # aws_region=os.getenv("AWS_REGION"), - # aws_account_id=os.getenv("AWS_ACCOUNT_ID"), ) # Set some Shiny page options @@ -36,5 +32,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt index a0d9e4048..73c49fa54 100644 --- a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas anthropic[bedrock] diff --git a/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env new file mode 100644 index 000000000..fc4cc092b --- /dev/null +++ b/shiny/templates/chat/llm-enterprise/aws-bedrock-anthropic/template.env @@ -0,0 +1,6 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +AWS_SECRET_KEY= +AWS_ACCESS_KEY= +AWS_REGION= +AWS_ACCOUNT_ID= diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/_template.json b/shiny/templates/chat/llm-enterprise/azure-openai/_template.json index 14702f9c7..9bd24ede2 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/_template.json +++ b/shiny/templates/chat/llm-enterprise/azure-openai/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-azure-openai", - "title": "Chat AI using OpenAI via Azure" + "title": "Chat AI using OpenAI via Azure", + "next_steps": [ + "Put your Azure credentials in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Azure?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatAzureOpenAI.html" + } + ] } diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/app.py b/shiny/templates/chat/llm-enterprise/azure-openai/app.py index d6ba133cd..0482df872 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/app.py +++ b/shiny/templates/chat/llm-enterprise/azure-openai/app.py @@ -37,5 +37,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt b/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt index e7c42d64c..937dbb3e5 100644 --- a/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt +++ b/shiny/templates/chat/llm-enterprise/azure-openai/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas openai diff --git a/shiny/templates/chat/llm-enterprise/azure-openai/template.env b/shiny/templates/chat/llm-enterprise/azure-openai/template.env new file mode 100644 index 000000000..017e4200e --- /dev/null +++ b/shiny/templates/chat/llm-enterprise/azure-openai/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +AZURE_OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/anthropic/_template.json b/shiny/templates/chat/llms/anthropic/_template.json index 79e2bf257..c727ce455 100644 --- a/shiny/templates/chat/llms/anthropic/_template.json +++ b/shiny/templates/chat/llms/anthropic/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-anthropic", - "title": "Chat AI using Anthropic" + "title": "Chat AI using Anthropic", + "next_steps": [ + "Put your Anthropic API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatAnthropic.html" + } + ] } diff --git a/shiny/templates/chat/llms/anthropic/app.py b/shiny/templates/chat/llms/anthropic/app.py index 585dd5a58..964428803 100644 --- a/shiny/templates/chat/llms/anthropic/app.py +++ b/shiny/templates/chat/llms/anthropic/app.py @@ -14,7 +14,7 @@ load_dotenv() chat_model = ChatAnthropic( api_key=os.environ.get("ANTHROPIC_API_KEY"), - model="claude-3-5-sonnet-latest", + model="claude-3-7-sonnet-latest", system_prompt="You are a helpful assistant.", ) @@ -37,5 +37,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/anthropic/template.env b/shiny/templates/chat/llms/anthropic/template.env new file mode 100644 index 000000000..6fe7cf7f7 --- /dev/null +++ b/shiny/templates/chat/llms/anthropic/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +ANTHROPIC_API_KEY= diff --git a/shiny/templates/chat/llms/google/_template.json b/shiny/templates/chat/llms/google/_template.json index baf30e7cd..8ee55e079 100644 --- a/shiny/templates/chat/llms/google/_template.json +++ b/shiny/templates/chat/llms/google/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-gemini", - "title": "Chat AI using Google Gemini" + "title": "Chat AI using Google Gemini", + "next_steps": [ + "Put your Google API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatGoogle.html" + } + ] } diff --git a/shiny/templates/chat/llms/google/app.py b/shiny/templates/chat/llms/google/app.py index ed5b75248..0f650df8d 100644 --- a/shiny/templates/chat/llms/google/app.py +++ b/shiny/templates/chat/llms/google/app.py @@ -15,7 +15,7 @@ chat_model = ChatGoogle( api_key=os.environ.get("GOOGLE_API_KEY"), system_prompt="You are a helpful assistant.", - model="gemini-1.5-flash", + model="gemini-2.0-flash", ) # Set some Shiny page options @@ -33,5 +33,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/google/requirements.txt b/shiny/templates/chat/llms/google/requirements.txt index f51cd04e3..fdb4a7e80 100644 --- a/shiny/templates/chat/llms/google/requirements.txt +++ b/shiny/templates/chat/llms/google/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers -chatlas -google-generativeai +chatlas>=0.4.0 +google-genai diff --git a/shiny/templates/chat/llms/google/template.env b/shiny/templates/chat/llms/google/template.env new file mode 100644 index 000000000..b41ee2ba5 --- /dev/null +++ b/shiny/templates/chat/llms/google/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +GOOGLE_API_KEY= diff --git a/shiny/templates/chat/llms/langchain/_template.json b/shiny/templates/chat/llms/langchain/_template.json index 3ac04a285..b7731fd68 100644 --- a/shiny/templates/chat/llms/langchain/_template.json +++ b/shiny/templates/chat/llms/langchain/_template.json @@ -1,5 +1,9 @@ { "type": "app", "id": "chat-ai-langchain", - "title": "Chat AI using LangChain" + "title": "Chat AI using LangChain", + "next_steps": [ + "Put your OpenAI API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ] } diff --git a/shiny/templates/chat/llms/langchain/app.py b/shiny/templates/chat/llms/langchain/app.py index 1c62ce399..f30a9f9ad 100644 --- a/shiny/templates/chat/llms/langchain/app.py +++ b/shiny/templates/chat/llms/langchain/app.py @@ -38,5 +38,5 @@ # Define a callback to run when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/langchain/requirements.txt b/shiny/templates/chat/llms/langchain/requirements.txt index 0391632e3..bb1f1f4aa 100644 --- a/shiny/templates/chat/llms/langchain/requirements.txt +++ b/shiny/templates/chat/llms/langchain/requirements.txt @@ -1,4 +1,3 @@ shiny python-dotenv -tokenizers langchain-openai diff --git a/shiny/templates/chat/llms/langchain/template.env b/shiny/templates/chat/llms/langchain/template.env new file mode 100644 index 000000000..33b6abc3f --- /dev/null +++ b/shiny/templates/chat/llms/langchain/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/ollama/_template.json b/shiny/templates/chat/llms/ollama/_template.json index 9a1c53ccc..fb0fb217a 100644 --- a/shiny/templates/chat/llms/ollama/_template.json +++ b/shiny/templates/chat/llms/ollama/_template.json @@ -1,5 +1,20 @@ { "type": "app", "id": "chat-ai-ollama", - "title": "Chat AI using Ollama" + "title": "Chat AI using Ollama", + "next_steps": [ + "If you haven't already, download the Ollama executable from https://ollama.com/", + "Run the executable and download the relevant model (llama3.2)", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help connecting to Ollama?" + }, + { + "type": "action", + "text": "Learn more at https://posit-dev.github.io/chatlas/reference/ChatOllama.html" + } + ] } diff --git a/shiny/templates/chat/llms/ollama/app.py b/shiny/templates/chat/llms/ollama/app.py index 581050a98..0bff056cb 100644 --- a/shiny/templates/chat/llms/ollama/app.py +++ b/shiny/templates/chat/llms/ollama/app.py @@ -9,7 +9,7 @@ # ChatOllama() requires an Ollama model server to be running locally. # See the docs for more information on how to set up a local Ollama server. # https://posit-dev.github.io/chatlas/reference/ChatOllama.html -chat_model = ChatOllama(model="llama3.1") +chat_model = ChatOllama(model="llama3.2") # Set some Shiny page options ui.page_opts( @@ -29,5 +29,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/ollama/requirements.txt b/shiny/templates/chat/llms/ollama/requirements.txt index 5901288be..9831b6305 100644 --- a/shiny/templates/chat/llms/ollama/requirements.txt +++ b/shiny/templates/chat/llms/ollama/requirements.txt @@ -1,4 +1,3 @@ shiny -tokenizers chatlas ollama diff --git a/shiny/templates/chat/llms/openai/_template.json b/shiny/templates/chat/llms/openai/_template.json index 89bfb15d3..4fcf812e8 100644 --- a/shiny/templates/chat/llms/openai/_template.json +++ b/shiny/templates/chat/llms/openai/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-openai", - "title": "Chat AI using OpenAI" + "title": "Chat AI using OpenAI", + "next_steps": [ + "Put your OpenAI API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API key?" + }, + { + "type": "action", + "text": "Learn how to obtain one at https://posit-dev.github.io/chatlas/reference/ChatOpenAI.html" + } + ] } diff --git a/shiny/templates/chat/llms/openai/app.py b/shiny/templates/chat/llms/openai/app.py index 07ff62bb2..b13a6b703 100644 --- a/shiny/templates/chat/llms/openai/app.py +++ b/shiny/templates/chat/llms/openai/app.py @@ -37,5 +37,5 @@ # Generate a response when the user submits a message @chat.on_user_submit async def handle_user_input(user_input: str): - response = chat_model.stream(user_input) + response = await chat_model.stream_async(user_input) await chat.append_message_stream(response) diff --git a/shiny/templates/chat/llms/openai/requirements.txt b/shiny/templates/chat/llms/openai/requirements.txt index e7c42d64c..937dbb3e5 100644 --- a/shiny/templates/chat/llms/openai/requirements.txt +++ b/shiny/templates/chat/llms/openai/requirements.txt @@ -1,5 +1,4 @@ shiny python-dotenv -tokenizers chatlas openai diff --git a/shiny/templates/chat/llms/openai/template.env b/shiny/templates/chat/llms/openai/template.env new file mode 100644 index 000000000..33b6abc3f --- /dev/null +++ b/shiny/templates/chat/llms/openai/template.env @@ -0,0 +1,3 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +OPENAI_API_KEY= diff --git a/shiny/templates/chat/llms/playground/_template.json b/shiny/templates/chat/llms/playground/_template.json index b753f492c..98dbfe728 100644 --- a/shiny/templates/chat/llms/playground/_template.json +++ b/shiny/templates/chat/llms/playground/_template.json @@ -1,5 +1,19 @@ { "type": "app", "id": "chat-ai-playground", - "title": "Chat Playground w/ OpenAI, Anthropic, and Google" + "title": "Chat Playground w/ OpenAI, Anthropic, and Google", + "next_steps": [ + "Put your OpenAI, Anthropic, and Google API key in the `template.env` file and rename it to `.env`.", + "Run the app with `shiny run app.py`." + ], + "follow_up": [ + { + "type": "info", + "text": "Need help obtaining an API keys?" + }, + { + "type": "action", + "text": "Learn how to obtain them at https://posit-dev.github.io/chatlas/reference/" + } + ] } diff --git a/shiny/templates/chat/llms/playground/app.py b/shiny/templates/chat/llms/playground/app.py index a8c3b0ebb..4f2846bce 100644 --- a/shiny/templates/chat/llms/playground/app.py +++ b/shiny/templates/chat/llms/playground/app.py @@ -15,13 +15,13 @@ load_dotenv() models = { - "openai": ["gpt-4o-mini", "gpt-4o"], "claude": [ + "claude-3-7-sonnet-latest", "claude-3-opus-latest", - "claude-3-5-sonnet-latest", "claude-3-haiku-20240307", ], - "google": ["gemini-1.5-pro-latest"], + "openai": ["gpt-4o-mini", "gpt-4o"], + "google": ["gemini-2.0-flash"], } model_choices: dict[str, dict[str, str]] = {} diff --git a/shiny/templates/chat/llms/playground/requirements.txt b/shiny/templates/chat/llms/playground/requirements.txt index 4cec5d5bb..c7b2458aa 100644 --- a/shiny/templates/chat/llms/playground/requirements.txt +++ b/shiny/templates/chat/llms/playground/requirements.txt @@ -1,6 +1,6 @@ -chatlas +chatlas>=0.4 openai anthropic -google-generativeai +google-genai python-dotenv shiny diff --git a/shiny/templates/chat/llms/playground/template.env b/shiny/templates/chat/llms/playground/template.env new file mode 100644 index 000000000..d93f6dc7f --- /dev/null +++ b/shiny/templates/chat/llms/playground/template.env @@ -0,0 +1,5 @@ +# Once you provided your API key, rename this file to .env +# The load_dotenv() in the app.py will then load this env variable +ANTHROPIC_API_KEY= +OPENAI_API_KEY= +GOOGLE_API_KEY= diff --git a/shiny/templates/chat/starters/hello/_template.json b/shiny/templates/chat/starters/hello/_template.json index 6cdfd1a2f..1d63a9705 100644 --- a/shiny/templates/chat/starters/hello/_template.json +++ b/shiny/templates/chat/starters/hello/_template.json @@ -1,5 +1,8 @@ { "type": "app", "id": "chat-hello", - "title": "Hello Shiny Chat" + "title": "Hello Shiny Chat", + "next_steps": [ + "Run the app with `shiny run app.py`." + ] } diff --git a/shiny/templates/chat/starters/hello/app-core.py b/shiny/templates/chat/starters/hello/app-core.py index 5c088fcaf..8628b85df 100644 --- a/shiny/templates/chat/starters/hello/app-core.py +++ b/shiny/templates/chat/starters/hello/app-core.py @@ -7,13 +7,11 @@ ) # Create a welcome message -welcome = ui.markdown( - """ - Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will - simply repeat it back to you. For more examples, see this - [folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). - """ -) +welcome = """ +Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will +simply repeat it back to you. For more examples, see this +[folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). +""" def server(input, output, session): diff --git a/shiny/templates/chat/starters/hello/app-express.py b/shiny/templates/chat/starters/hello/app-express.py new file mode 100644 index 000000000..02bc2156a --- /dev/null +++ b/shiny/templates/chat/starters/hello/app-express.py @@ -0,0 +1,53 @@ +from shiny.express import expressify, ui + + +@expressify +def card_suggestion(title: str, suggestion: str, img_src: str, img_alt: str): + with ui.card(data_suggestion=suggestion): + ui.card_header(title) + ui.img( + src=img_src, + alt=img_alt, + style="margin-top:auto; margin-bottom:auto;", + ) + + +@expressify +def card_suggestions(): + with ui.layout_column_wrap(): + card_suggestion( + title="Learn Python", + suggestion="Teach me Python", + img_src="https://upload.wikimedia.org/wikipedia/commons/c/c3/Python-logo-notext.svg", + img_alt="Python logo", + ) + card_suggestion( + title="Learn R", + suggestion="Teach me R", + img_src="https://upload.wikimedia.org/wikipedia/commons/1/1b/R_logo.svg", + img_alt="R logo", + ) + + +with ui.hold() as suggestions: + card_suggestions() + +welcome = f""" +**Hello!** How can I help you today? + +Here are a couple suggestions: + +{suggestions[0]} +""" + +chat = ui.Chat( + id="chat", + messages=[welcome], +) + +chat.ui() + + +@chat.on_user_submit +async def handle_user_input(user_input: str): + await chat.append_message(f"You said: {user_input}") diff --git a/shiny/templates/chat/starters/hello/app.py b/shiny/templates/chat/starters/hello/app.py deleted file mode 100644 index 43fb202d5..000000000 --- a/shiny/templates/chat/starters/hello/app.py +++ /dev/null @@ -1,33 +0,0 @@ -from shiny.express import ui - -# Set some Shiny page options -ui.page_opts( - title="Hello Shiny Chat", - fillable=True, - fillable_mobile=True, -) - -# Create a welcome message -welcome = ui.markdown( - """ - Hi! This is a simple Shiny `Chat` UI. Enter a message below and I will - simply repeat it back to you. For more examples, see this - [folder of examples](https://github.com/posit-dev/py-shiny/tree/main/shiny/templates/chat). - """ -) - -# Create a chat instance -chat = ui.Chat( - id="chat", - messages=[welcome], -) - -# Display it -chat.ui() - - -# Define a callback to run when the user submits a message -@chat.on_user_submit -async def handle_user_input(user_input: str): - # Append a response to the chat - await chat.append_message(f"You said: {user_input}") diff --git a/shiny/templates/chat/starters/sidebar-dark/_template.json b/shiny/templates/chat/starters/sidebar-dark/_template.json index eb96af780..78dd6d22d 100644 --- a/shiny/templates/chat/starters/sidebar-dark/_template.json +++ b/shiny/templates/chat/starters/sidebar-dark/_template.json @@ -1,5 +1,8 @@ { "type": "app", "id": "chat-sidebar-dark", - "title": "Chat in a sidebar with dark mode" + "title": "Chat in a sidebar with dark mode", + "next_steps": [ + "Run the app with `shiny run app.py`." + ] }