|
| 1 | +import { getCustomStaticPath } from "@/utils/getCustomStaticPath"; |
| 2 | + |
| 3 | +export const meta = { |
| 4 | + title: "Context", |
| 5 | + description: |
| 6 | + "How to pass client-side context to the LLM to help it respond.", |
| 7 | + platforms: [ |
| 8 | + "javascript", |
| 9 | + "react-native", |
| 10 | + "angular", |
| 11 | + "nextjs", |
| 12 | + "react", |
| 13 | + "vue", |
| 14 | + ], |
| 15 | +}; |
| 16 | + |
| 17 | +export const getStaticPaths = async () => { |
| 18 | + return getCustomStaticPath(meta.platforms); |
| 19 | +}; |
| 20 | + |
| 21 | +export function getStaticProps(context) { |
| 22 | + return { |
| 23 | + props: { |
| 24 | + platform: context.params.platform, |
| 25 | + meta, |
| 26 | + showBreadcrumbs: false, |
| 27 | + }, |
| 28 | + }; |
| 29 | +} |
| 30 | + |
| 31 | + |
| 32 | + |
| 33 | +For LLMs to provide high-quality answers to users' questions, they need to have the right information. Sometimes this information is contextual, based on the user or the state of the application. To allow for this, you can send `aiContext` with any user message to the LLM, which can be any unstructured or structured data that might be useful. |
| 34 | + |
| 35 | +<InlineFilter filters={["javascript","vue","angular"]}> |
| 36 | + |
| 37 | +```ts |
| 38 | +import { generateClient } from "aws-amplify/data"; |
| 39 | +import type { Schema } from "../amplify/data/resource"; |
| 40 | + |
| 41 | +const client = generateClient<Schema>({ authMode: 'userPool' }); |
| 42 | + |
| 43 | +const { data: conversation } = await client.conversations.chat.create(); |
| 44 | + |
| 45 | +conversation.sendMessage({ |
| 46 | + content: [{ text: "hello" }], |
| 47 | + // aiContext can be any shape |
| 48 | + aiContext: { |
| 49 | + username: "danny" |
| 50 | + } |
| 51 | +}) |
| 52 | +``` |
| 53 | + |
| 54 | +</InlineFilter> |
| 55 | + |
| 56 | + |
| 57 | +<InlineFilter filters={["react-native"]}> |
| 58 | + |
| 59 | +```tsx |
| 60 | +export default function Chat() { |
| 61 | + const [ |
| 62 | + { |
| 63 | + data: { messages }, |
| 64 | + isLoading, |
| 65 | + }, |
| 66 | + sendMessage, |
| 67 | + ] = useAIConversation('chat'); |
| 68 | + |
| 69 | + function handleSendMessage(message) { |
| 70 | + sendMessage({ |
| 71 | + ...message, |
| 72 | + // this can be any object that can be stringified |
| 73 | + aiContext: { |
| 74 | + currentTime: new Date().toLocaleTimeString() |
| 75 | + } |
| 76 | + }) |
| 77 | + } |
| 78 | + |
| 79 | + return ( |
| 80 | + //... |
| 81 | + ) |
| 82 | +} |
| 83 | +``` |
| 84 | + |
| 85 | +</InlineFilter> |
| 86 | + |
| 87 | + |
| 88 | +<InlineFilter filters={["react", "nextjs"]}> |
| 89 | + |
| 90 | +```tsx |
| 91 | +function Chat() { |
| 92 | + const [ |
| 93 | + { |
| 94 | + data: { messages }, |
| 95 | + isLoading, |
| 96 | + }, |
| 97 | + sendMessage, |
| 98 | + ] = useAIConversation('chat'); |
| 99 | + |
| 100 | + return ( |
| 101 | + <AIConversation |
| 102 | + messages={messages} |
| 103 | + isLoading={isLoading} |
| 104 | + handleSendMessage={sendMessage} |
| 105 | + // This will let the LLM know about the current state of this application |
| 106 | + // so it can better respond to questions |
| 107 | + aiContext={() => { |
| 108 | + return { |
| 109 | + currentTime: new Date().toLocaleTimeString(), |
| 110 | + }; |
| 111 | + }} |
| 112 | + /> |
| 113 | + ); |
| 114 | +} |
| 115 | +``` |
| 116 | + |
| 117 | + |
| 118 | +The function passed to the `aiContext` prop will be run immediately before the request is sent in order to get the most up to date information. |
| 119 | + |
| 120 | +You can use React context or other state management systems to update the data passed to `aiContext`. Using React context we can provide more information about the current state of the application: |
| 121 | + |
| 122 | +```tsx |
| 123 | +// Create a context to share state across components |
| 124 | +const DataContext = React.createContext<{ |
| 125 | + data: any; |
| 126 | + setData: (value: React.SetStateAction<any>) => void; |
| 127 | +}>({ data: {}, setData: () => {} }); |
| 128 | + |
| 129 | +// Create a component that updates the shared state |
| 130 | +function Counter() { |
| 131 | + const { data, setData } = React.useContext(AIContext); |
| 132 | + const count = data.count ?? 0; |
| 133 | + return ( |
| 134 | + <Button onClick={() => setData({ ...data, count: count + 1 })}> |
| 135 | + {count} |
| 136 | + </Button> |
| 137 | + ); |
| 138 | +} |
| 139 | + |
| 140 | +// reference shared data in aiContext |
| 141 | +function Chat() { |
| 142 | + const { data } = React.useContext(DataContext); |
| 143 | + const [ |
| 144 | + { |
| 145 | + data: { messages }, |
| 146 | + isLoading, |
| 147 | + }, |
| 148 | + sendMessage, |
| 149 | + ] = useAIConversation('pirateChat'); |
| 150 | + |
| 151 | + return ( |
| 152 | + <AIConversation |
| 153 | + messages={messages} |
| 154 | + isLoading={isLoading} |
| 155 | + handleSendMessage={sendMessage} |
| 156 | + // This will let the LLM know about the current state of this application |
| 157 | + // so it can better respond to questions |
| 158 | + aiContext={() => { |
| 159 | + return { |
| 160 | + ...data, |
| 161 | + currentTime: new Date().toLocaleTimeString(), |
| 162 | + }; |
| 163 | + }} |
| 164 | + /> |
| 165 | + ); |
| 166 | +} |
| 167 | + |
| 168 | +export default function Example() { |
| 169 | + const [data, setData] = React.useState({}); |
| 170 | + return ( |
| 171 | + <Authenticator> |
| 172 | + <DataContext.Provider value={{ data, setData }}> |
| 173 | + <Counter /> |
| 174 | + <Chat /> |
| 175 | + </DataContext.Provider> |
| 176 | + </Authenticator> |
| 177 | + ) |
| 178 | +} |
| 179 | +``` |
| 180 | + |
| 181 | + |
| 182 | +</InlineFilter> |
0 commit comments