|
2 | 2 |
|
3 | 3 | The AI framework for building declarative and composable AI-powered LLM products. |
4 | 4 |
|
5 | | -## Getting Started with @langbase/core |
| 5 | +## Getting Started with langbase |
6 | 6 |
|
7 | 7 | ### Installation |
8 | 8 |
|
9 | | -First, install the `@langbase/core` package using npm or yarn: |
| 9 | +First, install the `langbase` package using npm or yarn: |
10 | 10 |
|
11 | 11 | ```bash |
12 | | -npm install @langbase/core |
| 12 | +npm install langbase |
13 | 13 | ``` |
14 | 14 |
|
15 | 15 | or |
16 | 16 |
|
17 | 17 | ```bash |
18 | | -pnpm add @langbase/core |
| 18 | +pnpm add langbase |
19 | 19 | ``` |
20 | 20 |
|
21 | 21 | or |
22 | 22 |
|
23 | 23 | ```bash |
24 | | -yarn add @langbase/core |
| 24 | +yarn add langbase |
25 | 25 | ``` |
26 | 26 |
|
27 | 27 | ### Usage |
28 | 28 |
|
29 | | -To use the `generate` function from the `@langbase/core` package, follow these steps: |
| 29 | +You can `generateText` or `streamText` based on the type of a pipe. |
30 | 30 |
|
31 | | -1. **Import the `generate` function**: |
32 | 31 |
|
33 | | - ```typescript |
34 | | - import {generate} from '@langbase/core'; |
35 | | - ``` |
| 32 | +```TypeScript |
| 33 | +import 'dotenv/config'; |
| 34 | +import {Pipe} from 'langbase'; |
36 | 35 |
|
37 | | -2. **Set up environment variables**: |
38 | | - |
39 | | - Ensure you have the following environment variables set in your `.env` file: |
40 | | - |
41 | | - ```env |
42 | | - OPENAI_API_KEY=your_openai_api_key |
43 | | - ``` |
44 | | - |
45 | | -3. **Generate a response using a prompt**: |
46 | | - |
47 | | - ```typescript |
48 | | - import {generate} from '@langbase/core'; |
49 | | -
|
50 | | - async function exampleWithPrompt() { |
51 | | - const response = await generate({ |
52 | | - model: 'gpt-3.5-turbo-0125', |
53 | | - prompt: '1+1', |
54 | | - }); |
55 | | -
|
56 | | - console.log(response); // Output: '2' |
57 | | - } |
58 | | -
|
59 | | - exampleWithPrompt(); |
60 | | - ``` |
61 | | - |
62 | | -4. **Generate a response using messages array**: |
63 | | - |
64 | | - ```typescript |
65 | | - import {generate} from '@langbase/core'; |
66 | | -
|
67 | | - async function exampleWithMessages() { |
68 | | - const response = await generate({ |
69 | | - model: 'gpt-3.5-turbo-0125', |
70 | | - messages: [ |
71 | | - {role: 'system', content: 'You are a helpful assistant.'}, |
72 | | - {role: 'user', content: 'Give me 5 title ideas'}, |
73 | | - {role: 'assistant', content: 'Sure, here you go … …'}, |
74 | | - ], |
75 | | - }); |
76 | | -
|
77 | | - console.log(response); |
78 | | - } |
79 | | -
|
80 | | - exampleWithMessages(); |
81 | | - ``` |
82 | | - |
83 | | -## API Reference |
84 | | - |
85 | | -### `generate` |
86 | | - |
87 | | -Generates a response using the specified model, prompt, or messages array. |
88 | | - |
89 | | -#### Signature |
90 | | - |
91 | | -```typescript |
92 | | -async function generate(params: GenerateParams): Promise<string>; |
93 | | -``` |
94 | | - |
95 | | -#### Parameters |
96 | | - |
97 | | -- `params`: An object containing the following properties: |
98 | | - - `model` (string): The model to use for generating the response. |
99 | | - - `prompt` (optional string): The prompt to use for generating the response. Either `prompt` or `messages` must be provided. |
100 | | - - `messages` (optional `Message[]`): An array of message objects. Each message object should contain `role` and `content` properties. Either `prompt` or `messages` must be provided. |
101 | | - |
102 | | -#### Returns |
103 | | - |
104 | | -- A promise that resolves to a string containing the generated response. |
105 | | - |
106 | | -#### Example |
107 | | - |
108 | | -```typescript |
109 | | -import {generate} from '@langbase/core'; |
110 | | -
|
111 | | -const responseFromPrompt = await generate({ |
112 | | - model: 'gpt-3.5-turbo-0125', |
113 | | - prompt: '1+1', |
| 36 | +// STREAM: OFF |
| 37 | +const pipeStreamOff = new Pipe({ |
| 38 | + apiKey: process.env.PIPE_LESS_WORDY!, |
114 | 39 | }); |
115 | 40 |
|
116 | | -console.log(responseFromPrompt); |
117 | | -
|
118 | | -const responseFromMessages = await generate({ |
119 | | - model: 'gpt-3.5-turbo-0125', |
120 | | - messages: [ |
121 | | - {role: 'system', content: 'You are a helpful assistant.'}, |
122 | | - {role: 'user', content: 'Give me 5 title ideas'}, |
123 | | - {role: 'assistant', content: 'Sure, here you go … …'}, |
124 | | - ], |
| 41 | +const result = await pipeStreamOff.generateText({ |
| 42 | + messages: [{role: 'user', content: 'Who is Ahmad Awais?'}], |
125 | 43 | }); |
126 | 44 |
|
127 | | -console.log(responseFromMessages); |
128 | | -``` |
129 | | - |
130 | | -### `validateInput` |
131 | | - |
132 | | -Validates the input parameters and environment variables. |
133 | | - |
134 | | -#### Signature |
135 | | - |
136 | | -```typescript |
137 | | -function validateInput(params: GenerateParams): ValidatedParams; |
138 | | -``` |
139 | | - |
140 | | -#### Parameters |
141 | | - |
142 | | -- `params`: An object containing the following properties: |
143 | | - - `model` (string): The model to use for generating the response. |
144 | | - - `prompt` (optional string): The prompt to use for generating the response. |
145 | | - - `messages` (optional `Message[]`): An array of message objects. |
146 | | - |
147 | | -#### Returns |
148 | | - |
149 | | -- An object containing the validated parameters and environment variables. |
150 | | - |
151 | | -#### Example |
152 | | - |
153 | | -```typescript |
154 | | -const validatedParams = validateInput({ |
155 | | - model: 'gpt-3.5-turbo-0125', |
156 | | - prompt: 'Hi', |
157 | | -}); |
158 | | -``` |
159 | | - |
160 | | -### `buildMessages` |
161 | | - |
162 | | -Constructs the messages array using the provided prompt or messages array. |
163 | | - |
164 | | -#### Signature |
165 | | - |
166 | | -```typescript |
167 | | -function buildMessages({ |
168 | | - prompt, |
169 | | - messages, |
170 | | -}: { |
171 | | - prompt?: string; |
172 | | - messages?: Message[]; |
173 | | -}): Message[]; |
174 | | -``` |
175 | | - |
176 | | -#### Parameters |
177 | | - |
178 | | -- `prompt` (optional string): The prompt to use for generating the response. |
179 | | -- `messages` (optional `Message[]`): An array of message objects. |
180 | | - |
181 | | -#### Returns |
182 | | - |
183 | | -- An array of message objects. |
184 | | - |
185 | | -#### Example |
186 | | - |
187 | | -```typescript |
188 | | -const messages = buildMessages({prompt: 'Hi'}); |
189 | | -``` |
190 | | - |
191 | | -### `buildHeaders` |
192 | | - |
193 | | -Constructs the headers for the API request using the provided API key. |
194 | | - |
195 | | -#### Signature |
196 | | - |
197 | | -```typescript |
198 | | -function buildHeaders(API_KEY: string): Record<string, string>; |
199 | | -``` |
200 | | - |
201 | | -#### Parameters |
202 | | - |
203 | | -- `API_KEY` (string): The API key to use for the request. |
204 | | - |
205 | | -#### Returns |
206 | | - |
207 | | -- An object containing the headers for the API request. |
208 | | - |
209 | | -#### Example |
210 | | - |
211 | | -```typescript |
212 | | -const headers = buildHeaders('your-api-key'); |
213 | | -``` |
214 | | - |
215 | | -### `handleResponse` |
216 | | - |
217 | | -Processes the API response and extracts the generated message content. |
218 | | - |
219 | | -#### Signature |
220 | | - |
221 | | -```typescript |
222 | | -async function handleResponse(response: Response): Promise<string>; |
223 | | -``` |
224 | | - |
225 | | -#### Parameters |
226 | | - |
227 | | -- `response` (Response): The response object from the API request. |
228 | | - |
229 | | -#### Returns |
230 | | - |
231 | | -- A promise that resolves to a string containing the generated message content. |
232 | | - |
233 | | -#### Example |
234 | | - |
235 | | -```typescript |
236 | | -const content = await handleResponse(response); |
237 | | -``` |
238 | | - |
239 | | -## Types |
| 45 | +console.log(result.completion); |
240 | 46 |
|
241 | | -### `GenerateParams` |
242 | | - |
243 | | -Type definition for the parameters of the `generate` function. |
244 | | - |
245 | | -```typescript |
246 | | -interface GenerateParams { |
247 | | - model: string; |
248 | | - prompt?: string; |
249 | | - messages?: Message[]; |
250 | | -} |
251 | | -``` |
252 | | - |
253 | | -### `Message` |
254 | | - |
255 | | -Type definition for a message object. |
256 | | - |
257 | | -```typescript |
258 | | -interface Message { |
259 | | - role: 'system' | 'user' | 'assistant'; |
260 | | - content: string; |
261 | | -} |
262 | | -``` |
263 | | - |
264 | | -### `ValidatedEnv` |
265 | | - |
266 | | -Type definition for the validated environment variables. |
267 | | - |
268 | | -```typescript |
269 | | -interface ValidatedEnv { |
270 | | - API_KEY: string; |
271 | | - API_URL_CHAT: string; |
272 | | -} |
273 | | -``` |
274 | | - |
275 | | -## Environment Variables |
276 | | - |
277 | | -### `OPEN_AI_API_KEY` |
278 | | - |
279 | | -The API key for authenticating requests to the OpenAI API. |
280 | | - |
281 | | -### `OPEN_AI_API_URL_CHAT` |
282 | | - |
283 | | -The URL for the OpenAI API chat endpoint. |
284 | | - |
285 | | -## Example Usage |
286 | | - |
287 | | -```typescript |
288 | | -import {generate} from '@langbase/core'; |
289 | | -
|
290 | | -const responseFromPrompt = await generate({ |
291 | | - model: 'gpt-3.5-turbo-0125', |
292 | | - prompt: '1+1', |
| 47 | +// STREAM: ON |
| 48 | +const pipeStreaming = new Pipe({ |
| 49 | + apiKey: process.env.PIPE_LESS_WORDY_STREAM!, |
293 | 50 | }); |
294 | 51 |
|
295 | | -console.log(responseFromPrompt); |
296 | | -
|
297 | | -const responseFromMessages = await generate({ |
298 | | - model: 'gpt-3.5-turbo-0125', |
299 | | - messages: [ |
300 | | - {role: 'system', content: 'You are a helpful assistant.'}, |
301 | | - {role: 'user', content: 'Give me 5 title ideas'}, |
302 | | - {role: 'assistant', content: 'Sure, here you go … …'}, |
303 | | - ], |
| 52 | +const stream = await pipeStreaming.streamText({ |
| 53 | + messages: [{role: 'user', content: 'Who is Ahmad Awais?'}], |
304 | 54 | }); |
305 | 55 |
|
306 | | -console.log(responseFromMessages); |
| 56 | +for await (const chunk of stream) { |
| 57 | + process.stdout.write(chunk.choices[0]?.delta?.content || ''); |
| 58 | +} |
307 | 59 | ``` |
308 | | - |
309 | | -This documentation provides a comprehensive guide for getting started with the `@langbase/core` package, as well as a detailed API reference for the `generate` function and its related components. |
0 commit comments