Using the Fetch API with Undici in Node.js

Introduction

Undici is an HTTP client library that powers the fetch API in Node.js. It was written from scratch and does not rely on the built-in HTTP client in Node.js. It includes a number of features that make it a good choice for high-performance applications.

For information on Undici's specification compliance, see the Undici documentation.

Basic GET Usage

async function function main(): Promise<void>main() {
  // Like the browser fetch API, the default method is GET
  const const response: Responseresponse = await function fetch(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response> (+1 overload)fetch('https://jsonplaceholder.typicode.com/posts');
  const const data: anydata = await const response: Responseresponse.Body.json(): Promise<any>json();
  var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log(const data: anydata);
  // returns something like:
  //   {
  //   userId: 1,
  //   id: 1,
  //   title: 'sunt aut facere repellat provident occaecati excepturi optio reprehenderit',
  //   body: 'quia et suscipit\n' +
  //     'suscipit recusandae consequuntur expedita et cum\n' +
  //     'reprehenderit molestiae ut ut quas totam\n' +
  //     'nostrum rerum est autem sunt rem eveniet architecto'
  // }
}

function main(): Promise<void>main().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>catch(var console: Consoleconsole.Console.error(...data: any[]): void (+1 overload)error);

Basic POST Usage

// Data sent from the client to the server
const 
const body: {
    title: string;
    body: string;
    userId: number;
}
body
= {
title: stringtitle: 'foo', body: stringbody: 'bar', userId: numberuserId: 1, }; async function function main(): Promise<void>main() { const const response: Responseresponse = await function fetch(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response> (+1 overload)fetch('https://jsonplaceholder.typicode.com/posts', { RequestInit.method?: string | undefinedmethod: 'POST', RequestInit.headers?: HeadersInit | undefinedheaders: { 'User-Agent': 'undici-stream-example', 'Content-Type': 'application/json', }, RequestInit.body?: BodyInit | null | undefinedbody: var JSON: JSONJSON.JSON.stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string (+1 overload)stringify(
const body: {
    title: string;
    body: string;
    userId: number;
}
body
),
}); const const data: anydata = await const response: Responseresponse.Body.json(): Promise<any>json(); var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log(const data: anydata); // returns something like: // { title: 'foo', body: 'bar', userId: 1, id: 101 } } function main(): Promise<void>main().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>catch(var console: Consoleconsole.Console.error(...data: any[]): void (+1 overload)error);

Customizing the Fetch API with Undici

Undici allows you to customize the Fetch API by providing options to the fetch function. For example, you can set custom headers, set the request method, and set the request body. Here is an example of how you can customize the Fetch API with Undici:

The fetch function takes two arguments: the URL to fetch and an options object. The options object is the Request object that you can use to customize the request. The function returns a Promises that resolves to a Response object.

In the following example, we are sending a POST request to the Ollama API with a JSON payload. Ollama is a cli tool that allows you to run LLM's (Large Language Models) on your local machine. You can download it here

ollama run mistral

This will download the mistral model and run it on your local machine.

With a pool, you can reuse connections to the same server, which can improve performance. Here is an example of how you can use a pool with Undici:

import { import PoolPool } from 'undici';

const const ollamaPool: anyollamaPool = new import PoolPool('http://localhost:11434', {
  connections: numberconnections: 10,
});

/**
 * Stream the completion of a prompt using the Ollama API.
 * @param {string} prompt - The prompt to complete.
 * @link https://github.com/ollama/ollama/blob/main/docs/api.md
 **/
async function function streamOllamaCompletion(prompt: string): Promise<void>streamOllamaCompletion(prompt: stringprompt) {
  const { const statusCode: anystatusCode, const body: anybody } = await const ollamaPool: anyollamaPool.request({
    path: stringpath: '/api/generate',
    method: stringmethod: 'POST',
    
headers: {
    'Content-Type': string;
}
headers
: {
'Content-Type': 'application/json', }, body: stringbody: var JSON: JSONJSON.JSON.stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string (+1 overload)stringify({ prompt: stringprompt, model: stringmodel: 'mistral' }), }); // You can read about HTTP status codes here: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status // 200 means the request was successful. if (const statusCode: anystatusCode !== 200) { throw new
var Error: ErrorConstructor
new (message?: string, options?: ErrorOptions) => Error (+1 overload)
Error
(`Ollama request failed with status ${const statusCode: anystatusCode}`);
} let let partial: stringpartial = ''; const const decoder: TextDecoderdecoder = new var TextDecoder: new (label?: string, options?: TextDecoderOptions) => TextDecoderTextDecoder(); for await (const const chunk: anychunk of const body: anybody) { let partial: stringpartial += const decoder: TextDecoderdecoder.TextDecoder.decode(input?: AllowSharedBufferSource, options?: TextDecodeOptions): stringdecode(const chunk: anychunk, { TextDecodeOptions.stream?: boolean | undefinedstream: true }); var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log(let partial: stringpartial); } var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log('Streaming complete.'); } try { await function streamOllamaCompletion(prompt: string): Promise<void>streamOllamaCompletion('What is recursion?'); } catch (var error: unknownerror) { var console: Consoleconsole.Console.error(message?: any, ...optionalParams: any[]): void (+1 overload)error('Error calling Ollama:', var error: unknownerror); } finally { var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log('Closing Ollama pool.'); const ollamaPool: anyollamaPool.close(); }

Streaming Responses with Undici

Streams is a feature in Node.js that allows you to read and write chunks of data.

import { class WritableWritable } from 'stream';

import { import streamstream } from 'undici';

async function function fetchGitHubRepos(): Promise<void>fetchGitHubRepos() {
  const const url: "https://api.github.com/users/nodejs/repos"url = 'https://api.github.com/users/nodejs/repos';

  const { const statusCode: anystatusCode } = await import streamstream(
    const url: "https://api.github.com/users/nodejs/repos"url,
    {
      method: stringmethod: 'GET',
      
headers: {
    'User-Agent': string;
    Accept: string;
}
headers
: {
'User-Agent': 'undici-stream-example', type Accept: stringAccept: 'application/json', }, }, () => { let let buffer: stringbuffer = ''; return new new Writable(opts?: Stream.WritableOptions): WritableWritable({ Stream.WritableOptions<Stream.Writable>.write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): voidwrite(chunk: anychunk, encoding: BufferEncodingencoding, callback: (error?: Error | null) => voidcallback) { let buffer: stringbuffer += chunk: anychunk.toString(); try { const const json: anyjson = var JSON: JSONJSON.JSON.parse(text: string, reviver?: (this: any, key: string, value: any) => any): anyparse(let buffer: stringbuffer); var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log( 'Repository Names:', const json: anyjson.map(repo: anyrepo => repo: anyrepo.name) ); let buffer: stringbuffer = ''; } catch (function (local var) error: unknownerror) { var console: Consoleconsole.Console.error(message?: any, ...optionalParams: any[]): void (+1 overload)error('Error parsing JSON:', function (local var) error: unknownerror); } callback: (error?: Error | null) => voidcallback(); }, Stream.WritableOptions<Stream.Writable>.final?(this: Writable, callback: (error?: Error | null) => void): voidfinal(callback: (error?: Error | null) => voidcallback) { var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log('Stream processing completed.'); callback: (error?: Error | null) => voidcallback(); }, }); } ); var console: Consoleconsole.Console.log(message?: any, ...optionalParams: any[]): void (+1 overload)log(`Response status: ${const statusCode: anystatusCode}`); } function fetchGitHubRepos(): Promise<void>fetchGitHubRepos().Promise<void>.catch<void>(onrejected?: ((reason: any) => void | PromiseLike<void>) | null | undefined): Promise<void>catch(var console: Consoleconsole.Console.error(...data: any[]): void (+1 overload)error);