Skip to content

Commit c82f81e

Browse files
authored
Invoke a model in modus quickstart (#142)
* docs: use model call in quickstart * trunk issues
1 parent af32ac3 commit c82f81e

File tree

1 file changed

+153
-35
lines changed

1 file changed

+153
-35
lines changed

modus/quickstart.mdx

Lines changed: 153 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -84,31 +84,63 @@ learn how to use the basic components of a Modus app and how to run it locally.
8484
```
8585

8686
</Step>
87-
<Step title="Add a function">
87+
<Step title="Add a model">
88+
Modus also supports AI models. You can define new models in your `modus.json` file. Let's add a new meta-llama model:
89+
90+
```json
91+
"models": {
92+
"text-generator": {
93+
"sourceModel": "meta-llama/Llama-3.2-3B-Instruct",
94+
"provider": "hugging-face",
95+
"connection": "hypermode"
96+
}
97+
},
98+
```
99+
100+
</Step>
101+
<Step title="Install the Hyp CLI and log in">
102+
Next, install the Hyp CLI. This allows you to access hosted models on the Hypermode platform.
103+
104+
```sh
105+
npm install -g @hypermode/hyp-cli
106+
```
107+
108+
You can now use the `hyp login` command to log in to the Hyp CLI.
109+
This links your project to the Hypermode platform, allowing you to leverage the model in your modus app.
110+
111+
</Step>
112+
113+
<Step title="Add a function with AI integration">
88114
Functions are the building blocks of your app. Let's add a function that fetches a random quote from
89-
the ZenQuotes connection you just created.
115+
the ZenQuotes connection and uses AI to generate a summary for the quote.
90116

91117
<Tabs>
92118
<Tab title="Go">
93-
To add a function, create a new file in the root directory with the following code:
119+
Create a new file in the root directory with the following code:
94120

95121
```go quotes.go
96122
package main
97123

98124
import (
99125
"errors"
100126
"fmt"
127+
"strings"
101128

102129
"github.com/hypermodeinc/modus/sdk/go/pkg/http"
130+
"github.com/hypermodeinc/modus/sdk/go/pkg/models"
131+
"github.com/hypermodeinc/modus/sdk/go/pkg/models/openai"
103132
)
104133

105134
type Quote struct {
106-
Quote string `json:"q"`
107-
Author string `json:"a"`
135+
Quote string `json:"q"`
136+
Author string `json:"a"`
137+
Summary string `json:"summary,omitempty"`
108138
}
109139

110-
// this function makes a request to an API that returns data in JSON format, and
111-
// returns an object representing the data
140+
const modelName = "text-generator"
141+
142+
// this function makes a request to an API that returns data in JSON format,
143+
// and returns a single quote with AI-generated summary
112144
func GetRandomQuote() (*Quote, error) {
113145
request := http.NewRequest("https://zenquotes.io/api/random")
114146

@@ -117,25 +149,72 @@ learn how to use the basic components of a Modus app and how to run it locally.
117149
return nil, err
118150
}
119151
if !response.Ok() {
120-
return nil, fmt.Errorf("Failed to fetch quote. Received: %d %s", response.Status, response.StatusText)
152+
return nil, fmt.Errorf("failed to fetch quote. Received: %d %s", response.Status, response.StatusText)
121153
}
122154

123-
// the API returns an array of quotes, but we only want the first one
155+
// the API returns an array of quotes, but we only need the first one
124156
var quotes []Quote
125157
response.JSON(&quotes)
126158
if len(quotes) == 0 {
127159
return nil, errors.New("expected at least one quote in the response, but none were found")
128160
}
129-
return &quotes[0], nil
161+
162+
// Get the first (and only) quote
163+
quote := quotes[0]
164+
165+
// Generate AI summary for the quote
166+
summary, err := summarizeQuote(quote.Quote, quote.Author)
167+
if err != nil {
168+
fmt.Printf("Warning: failed to summarize quote by %s: %v\n", quote.Author, err)
169+
quote.Summary = "Summary unavailable"
170+
} else {
171+
quote.Summary = summary
172+
}
173+
174+
return &quote, nil
175+
}
176+
177+
// summarizeQuote uses the AI model to generate a concise summary of the quote
178+
func summarizeQuote(quote, author string) (string, error) {
179+
model, err := models.GetModel[openai.ChatModel](modelName)
180+
if err != nil {
181+
return "", err
182+
}
183+
184+
instruction := "Provide a brief, insightful summary that captures the essence and meaning of the quote in 1-2 sentences."
185+
prompt := fmt.Sprintf("Quote: \"%s\" - %s", quote, author)
186+
187+
input, err := model.CreateInput(
188+
openai.NewSystemMessage(instruction),
189+
openai.NewUserMessage(prompt),
190+
)
191+
if err != nil {
192+
return "", err
193+
}
194+
195+
// Set temperature for consistent but creative responses
196+
input.Temperature = 0.7
197+
198+
output, err := model.Invoke(input)
199+
if err != nil {
200+
return "", err
201+
}
202+
203+
return strings.TrimSpace(output.Choices[0].Message.Content), nil
130204
}
131205
```
132206
</Tab>
133207

134208
<Tab title="AssemblyScript">
135-
To add a function, create a new file in the `assembly` directory with the following code:
209+
Create a new file in the `assembly` directory with the following code:
136210

137211
```ts quotes.ts
138-
import { http } from "@hypermode/modus-sdk-as";
212+
import { http, models } from "@hypermode/modus-sdk-as";
213+
import {
214+
OpenAIChatModel,
215+
SystemMessage,
216+
UserMessage,
217+
} from "@hypermode/modus-sdk-as/models/openai/chat";
139218

140219
@json
141220
class Quote {
@@ -144,10 +223,14 @@ learn how to use the basic components of a Modus app and how to run it locally.
144223

145224
@alias("a")
146225
author!: string;
226+
227+
summary?: string;
147228
}
148229

149-
// this function makes a request to an API that returns data in JSON format, and
150-
// returns an object representing the data
230+
const modelName: string = "text-generator";
231+
232+
// this function makes a request to an API that returns data in JSON format,
233+
// and returns a single quote with AI-generated summary
151234
export function getRandomQuote(): Quote {
152235
const request = new http.Request("https://zenquotes.io/api/random");
153236

@@ -158,8 +241,43 @@ learn how to use the basic components of a Modus app and how to run it locally.
158241
);
159242
}
160243

161-
// the API returns an array of quotes, but we only want the first one
162-
return response.json<Quote[]>()[0];
244+
// the API returns an array of quotes, but we only need the first one
245+
const quotes = response.json<Quote[]>();
246+
if (quotes.length === 0) {
247+
throw new Error("Expected at least one quote in the response, but none were found");
248+
}
249+
250+
// Get the first (and only) quote
251+
const quote = quotes[0];
252+
253+
// Generate AI summary for the quote
254+
try {
255+
quote.summary = summarizeQuote(quote.quote, quote.author);
256+
} catch (error) {
257+
console.log(`Warning: failed to summarize quote by ${quote.author}: ${error}`);
258+
quote.summary = "Summary unavailable";
259+
}
260+
261+
return quote;
262+
}
263+
264+
// summarizeQuote uses the AI model to generate a concise summary of the quote
265+
function summarizeQuote(quote: string, author: string): string {
266+
const model = models.getModel<OpenAIChatModel>(modelName);
267+
268+
const instruction = "Provide a brief, insightful summary that captures the essence and meaning of the quote in 1-2 sentences.";
269+
const prompt = `Quote: "${quote}" - ${author}`;
270+
271+
const input = model.createInput([
272+
new SystemMessage(instruction),
273+
new UserMessage(prompt),
274+
]);
275+
276+
// Set temperature for consistent but creative responses
277+
input.temperature = 0.7;
278+
279+
const output = model.invoke(input);
280+
return output.choices[0].message.content.trim();
163281
}
164282
```
165283

@@ -172,32 +290,26 @@ learn how to use the basic components of a Modus app and how to run it locally.
172290

173291
</Tab>
174292
</Tabs>
175-
After adding your function, you can use the API Explorer interface to test the `GetRandomQuote` function.
176293

177294
</Step>
178-
<Step title="Add a model">
179-
Modus also supports AI models. You can define new models in your `modus.json` file. Let's add a new meta-llama model:
180-
181-
```json
182-
"models": {
183-
"text-generator": {
184-
"sourceModel": "meta-llama/Llama-3.2-3B-Instruct",
185-
"provider": "hugging-face",
186-
"connection": "hypermode"
187-
}
188-
},
189-
```
190295

191-
</Step>
192-
<Step title="Install the Hyp CLI and log in">
193-
Next, install the Hyp CLI. This allows you to access hosted models on the Hypermode platform.
296+
<Step title="Make your first AI call">
297+
Now that you've integrated the AI model, let's test it! After adding your function, restart your development server:
194298

195299
```sh
196-
npm install -g @hypermode/hyp-cli
300+
modus dev
197301
```
198302

199-
You can now use the `hyp login` command to log in to the Hyp CLI.
200-
This links your project to the Hypermode platform, allowing you to leverage the model in your modus app.
303+
Navigate to the API Explorer at `http://localhost:8686/explorer` and you'll see your `randomQuote` function available to test.
304+
305+
When you call the function, you'll notice that the quote includes three fields:
306+
- `quote`: The original quote text
307+
- `author`: The author's name
308+
- `summary`: An AI-generated summary that captures the essence of the quote
309+
310+
The AI model analyzes the quote and provides insightful context about its meaning, making your app more engaging and informative for users.
311+
312+
Try calling the function multiple times to see how the AI generates different summaries for various quotes!
201313

202314
</Step>
203315

@@ -213,6 +325,12 @@ learn how to use the basic components of a Modus app and how to run it locally.
213325

214326
![local model tracing](../images/observe-functions/local-inference-history.png)
215327

328+
You can now see detailed information about each AI model call, including:
329+
- Input prompts sent to the model
330+
- Generated responses
331+
- Performance metrics like response time
332+
- Token usage and costs
333+
216334
</Step>
217335
</Steps>
218336

0 commit comments

Comments
 (0)