|
| 1 | +--- |
| 2 | +title: Node.js Fetch |
| 3 | +layout: learn |
| 4 | +authors: benhalverson, LankyMoose |
| 5 | +--- |
| 6 | + |
| 7 | +# Using the Fetch API with Undici in Node.js |
| 8 | + |
| 9 | +## Introduction |
| 10 | + |
| 11 | +[Undici](https://undici.nodejs.org) is an HTTP client library that powers the fetch API in Node.js. It was written from scratch and does not rely on the built-in HTTP client in Node.js. It includes a number of features that make it a good choice for high-performance applications. |
| 12 | + |
| 13 | +## Basic GET Usage |
| 14 | + |
| 15 | +```js |
| 16 | +async function main() { |
| 17 | + // Like the browser fetch API, the default method is GET |
| 18 | + const response = await fetch('https://jsonplaceholder.typicode.com/posts'); |
| 19 | + const data = await response.json(); |
| 20 | + console.log(data); |
| 21 | + // returns something like: |
| 22 | + // { |
| 23 | + // userId: 1, |
| 24 | + // id: 1, |
| 25 | + // title: 'sunt aut facere repellat provident occaecati excepturi optio reprehenderit', |
| 26 | + // body: 'quia et suscipit\n' + |
| 27 | + // 'suscipit recusandae consequuntur expedita et cum\n' + |
| 28 | + // 'reprehenderit molestiae ut ut quas totam\n' + |
| 29 | + // 'nostrum rerum est autem sunt rem eveniet architecto' |
| 30 | + // } |
| 31 | +} |
| 32 | + |
| 33 | +main().catch(console.error); |
| 34 | +``` |
| 35 | + |
| 36 | +## Basic POST Usage |
| 37 | + |
| 38 | +```js |
| 39 | +// Data sent from the client to the server |
| 40 | +const body = { |
| 41 | + title: 'foo', |
| 42 | + body: 'bar', |
| 43 | + userId: 1, |
| 44 | +}; |
| 45 | + |
| 46 | +async function main() { |
| 47 | + const response = await fetch('https://jsonplaceholder.typicode.com/posts', { |
| 48 | + method: 'POST', |
| 49 | + headers: { |
| 50 | + 'User-Agent': 'undici-stream-example', |
| 51 | + 'Content-Type': 'application/json', |
| 52 | + }, |
| 53 | + body: JSON.stringify(body), |
| 54 | + }); |
| 55 | + const data = await response.json(); |
| 56 | + console.log(data); |
| 57 | + // returns something like: |
| 58 | + // { title: 'foo', body: 'bar', userId: 1, id: 101 } |
| 59 | +} |
| 60 | + |
| 61 | +main().catch(console.error); |
| 62 | +``` |
| 63 | + |
| 64 | +## Customizing the Fetch API with Undici |
| 65 | + |
| 66 | +Undici allows you to customize the Fetch API by providing options to the `fetch` function. For example, you can set custom headers, set the request method, and set the request body. Here is an example of how you can customize the Fetch API with Undici: |
| 67 | + |
| 68 | +The [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) function takes two arguments: the URL to fetch and an options object. The options object is the [Request](https://undici.nodejs.org/#/docs/api/Dispatcher?id=parameter-requestoptions) object that you can use to customize the request. The function returns a [Promises](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises) that resolves to a [Response](https://undici.nodejs.org/#/docs/api/Dispatcher?id=parameter-responsedata) object. One difference between the Fetch API in the browser and the Fetch API in Node.js is that the Node.js version does not support |
| 69 | + |
| 70 | +In the following example, we are sending a POST request to the Ollama API with a JSON payload. Ollama is a cli tool that allows you to run LLM's (Large Language Models) on your local machine. You can download it [here](https://ollama.com/download) |
| 71 | + |
| 72 | +```bash |
| 73 | +ollama run mistral |
| 74 | +``` |
| 75 | + |
| 76 | +This will download the `mistral` model and run it on your local machine. |
| 77 | + |
| 78 | +With a pool, you can reuse connections to the same server, which can improve performance. Here is an example of how you can use a pool with Undici: |
| 79 | + |
| 80 | +```js |
| 81 | +import { Pool } from 'undici'; |
| 82 | + |
| 83 | +const ollamaPool = new Pool('http://localhost:11434', { |
| 84 | + connections: 10, |
| 85 | +}); |
| 86 | + |
| 87 | +/** |
| 88 | + * Stream the completion of a prompt using the Ollama API. |
| 89 | + * @param {string} prompt - The prompt to complete. |
| 90 | + * @link https://github.com/ollama/ollama/blob/main/docs/api.md |
| 91 | + **/ |
| 92 | +async function streamOllamaCompletion(prompt) { |
| 93 | + const { statusCode, body } = await ollamaPool.request({ |
| 94 | + path: '/api/generate', |
| 95 | + method: 'POST', |
| 96 | + headers: { |
| 97 | + 'Content-Type': 'application/json', |
| 98 | + }, |
| 99 | + body: JSON.stringify({ prompt, model: 'mistral' }), |
| 100 | + }); |
| 101 | + |
| 102 | + // You can read about HTTP status codes here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status |
| 103 | + // 200 means the request was successful. |
| 104 | + if (statusCode !== 200) { |
| 105 | + throw new Error(`Ollama request failed with status ${statusCode}`); |
| 106 | + } |
| 107 | + |
| 108 | + let partial = ''; |
| 109 | + |
| 110 | + const decoder = new TextDecoder(); |
| 111 | + for await (const chunk of body) { |
| 112 | + partial += decoder.decode(chunk, { stream: true }); |
| 113 | + console.log(partial); |
| 114 | + } |
| 115 | + |
| 116 | + console.log('Streaming complete.'); |
| 117 | +} |
| 118 | + |
| 119 | +try { |
| 120 | + await streamOllamaCompletion('What is recursion?'); |
| 121 | +} catch (error) { |
| 122 | + console.error('Error calling Ollama:', error); |
| 123 | +} finally { |
| 124 | + console.log('Closing Ollama pool.'); |
| 125 | + ollamaPool.close(); |
| 126 | +} |
| 127 | +``` |
| 128 | + |
| 129 | +## Streaming Responses with Undici |
| 130 | + |
| 131 | +[Streams](https://nodejs.org/docs/v22.14.0/api/stream.html#stream) is a feature in Node.js that allows you to read and write chucks of data. |
| 132 | + |
| 133 | +```js |
| 134 | +import { stream } from 'undici'; |
| 135 | +import { Writable } from 'stream'; |
| 136 | + |
| 137 | +async function fetchGitHubRepos() { |
| 138 | + const url = 'https://api.github.com/users/nodejs/repos'; |
| 139 | + |
| 140 | + const { statusCode } = await stream( |
| 141 | + url, |
| 142 | + { |
| 143 | + method: 'GET', |
| 144 | + headers: { |
| 145 | + 'User-Agent': 'undici-stream-example', |
| 146 | + Accept: 'application/json', |
| 147 | + }, |
| 148 | + }, |
| 149 | + () => { |
| 150 | + let buffer = ''; |
| 151 | + |
| 152 | + return new Writable({ |
| 153 | + write(chunk, encoding, callback) { |
| 154 | + buffer += chunk.toString(); |
| 155 | + |
| 156 | + try { |
| 157 | + const json = JSON.parse(buffer); |
| 158 | + console.log( |
| 159 | + 'Repository Names:', |
| 160 | + json.map(repo => repo.name) |
| 161 | + ); |
| 162 | + buffer = ''; |
| 163 | + } catch (error) { |
| 164 | + console.error('Error parsing JSON:', error); |
| 165 | + } |
| 166 | + |
| 167 | + callback(); |
| 168 | + }, |
| 169 | + final(callback) { |
| 170 | + console.log('Stream processing completed.'); |
| 171 | + callback(); |
| 172 | + }, |
| 173 | + }); |
| 174 | + } |
| 175 | + ); |
| 176 | + |
| 177 | + console.log(`Response status: ${statusCode}`); |
| 178 | +} |
| 179 | + |
| 180 | +fetchGitHubRepos().catch(console.error); |
| 181 | +``` |
0 commit comments