Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 90 additions & 0 deletions examples/create-from-files/create-from-files.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import ollama from 'ollama'
import path from 'path'

// download
// - https://huggingface.co/ggml-org/tinygemma3-GGUF/blob/main/tinygemma3-Q8_0.gguf
// - https://huggingface.co/ggml-org/tinygemma3-GGUF/blob/main/mmproj-tinygemma3.gguf

const GGUF_1 = path.join(__dirname, './tinygemma3-Q8_0.gguf')
const GGUF_2 = path.join(__dirname, './mmproj-tinygemma3.gguf')

async function main() {
console.log('Example: Creating a model from local files\n')

// Example 1: Create a model from a local GGUF file with modelfile
console.log('Example 1: Creating model from local file with modelfile')
try {
const response = await ollama.create({
model: 'custom-model',
modelfile: `
FROM ${GGUF_1}
SYSTEM "You are mario from super mario bros."
PARAMETER temperature 0.7
`,
files: [
{
filepath: GGUF_1,
// sha256 is optional - will be computed automatically if not provided
},
],
stream: true,
})

for await (const progress of response) {
console.log(`Progress: ${progress.status}`)
}
console.log('Model created successfully!\n')
} catch (error) {
console.error('Error creating model:', error)
}

// Example 2: Create a model from multiple files (e.g., base model + adapter)
console.log('Example 2: Creating model from multiple files')
try {
const response = await ollama.create({
model: 'fusion-model',
modelfile: `FROM ${GGUF_1}`,
files: [
{
filepath: GGUF_1,
},
{
filepath: GGUF_2,
},
],
stream: false, // Non-streaming response
})

console.log(`Status: ${response.status}`)
console.log('Model created successfully!\n')
} catch (error) {
console.error('Error creating model with multiple files:', error)
}

// Example 3: Create a model using the Ollama class directly (for more control)
console.log('Example 3: Using Ollama class directly')
try {
const { Ollama } = await import('ollama')
const ollamaClient = new Ollama({
host: 'http://localhost:11434',
})

const response = await ollamaClient.create({
model: 'nude-model',
modelfile: `FROM ${GGUF_1}`,
files: [
{
filepath: GGUF_1,
},
],
stream: false,
})

console.log(`Status: ${(response as any).status || 'completed'}`)
console.log('Model created successfully!')
} catch (error) {
console.error('Error with direct client:', error)
}
}

main().catch(console.error)
127 changes: 127 additions & 0 deletions src/fileUpload.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
import { createHash } from 'node:crypto'
import { createReadStream, promises } from 'node:fs'
import { basename } from 'node:path'

/**
* Computes the SHA256 hash of a file in a memory-efficient way.
*
* @param filepath - The path to the file
* @returns A promise that resolves to the hex-encoded SHA256 hash
*/
export async function computeFileSHA256(filepath: string): Promise<string> {
const hash = createHash('sha256')
const stream = createReadStream(filepath, { highWaterMark: 64 * 1024 }) // 64KB chunks
const reader = stream[Symbol.asyncIterator]()

while (true) {
const { done, value } = await reader.next()
if (done) break
hash.update(value)
}

return hash.digest('hex')
}

/**
* Checks if a path exists and is a file.
*
* @param filepath - The path to check
* @returns A promise that resolves to true if the path exists and is a file
*/
export async function isFile(filepath: string): Promise<boolean> {
try {
const stats = await promises.stat(filepath)
return stats.isFile()
} catch {
return false
}
}

/**
* Uploads a file as a blob to Ollama server.
*
* @param host - The Ollama server host URL
* @param filepath - Path to the file to upload
* @param sha256 - SHA256 digest of the file
* @param fetchFn - The fetch function to use for HTTP requests
* @param headers - Optional headers to include in requests
* @returns A promise that resolves when upload is complete
*/
export async function uploadBlob(
host: string,
filepath: string,
sha256: string,
fetchFn: typeof fetch,
headers?: Record<string, string>
): Promise<void> {
const digest = `sha256:${sha256}`
const url = `${host}/api/blobs/${digest}`

// Check if blob already exists
try {
const headResponse = await fetchFn(url, {
method: 'HEAD',
headers: headers,
})
if (headResponse.ok) {
// Blob already exists, no need to upload
return
}
} catch {
// Blob doesn't exist, proceed with upload
}

// Stream the file for upload
const fileStream = createReadStream(filepath)

const response = await fetchFn(url, {
method: 'POST',
body: fileStream as any,
headers: {
...headers,
'Content-Type': 'application/octet-stream',
},
// @ts-expect-error - duplex is required for streaming bodies in Node.js fetch
duplex: 'half',
})

if (!response.ok) {
let message = `Failed to upload blob: ${response.status} ${response.statusText}`
try {
const errorData = await response.json()
message = errorData.error || message
} catch {
// Ignore JSON parse errors
}
throw new Error(message)
}
}

/**
* Creates a map of filenames to blob digests for the Ollama create request.
*
* @param files - Array of file objects with filepath and optional sha256
* @param blobDigests - Array of blob digests (sha256:hash format)
* @returns A map of filename to blob digest
*/
export function createBlobFileMap(
files: Array<{ filepath: string; sha256?: string }>,
blobDigests: string[]
): Record<string, string> {
if (files.length !== blobDigests.length) {
throw new Error(
`Mismatch between number of files (${files.length}) and blob digests (${blobDigests.length})`
)
}

const fileMap: Record<string, string> = {}

for (let i = 0; i < files.length; i++) {
const filename = basename(files[i].filepath)
fileMap[filename] = blobDigests[i]
}

return fileMap
}


83 changes: 80 additions & 3 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@ import { Ollama as OllamaBrowser } from './browser.js'

import type { CreateRequest, ProgressResponse } from './interfaces.js'

import {
computeFileSHA256,
isFile,
uploadBlob,
createBlobFileMap,
} from './fileUpload.js'

export class Ollama extends OllamaBrowser {
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
if (typeof image !== 'string') {
Expand Down Expand Up @@ -48,10 +55,19 @@ export class Ollama extends OllamaBrowser {
async create(
request: CreateRequest,
): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
// fail if request.from is a local path
// TODO: https://github.com/ollama/ollama-js/issues/191
// Handle local file uploads
// API compatibility: ollama version 0.14.1
// todo: need to test compatibility for other ollama versions as well
if (request.files !== undefined) {
if (request.files.length < 1) {
throw new Error('At least one file must be specified when using file upload')
}
return this.createFromFiles(request)
}

// Handle local path in 'from' field (legacy support with modelfile - with error)
if (request.from && await this.fileExists(resolve(request.from))) {
throw Error('Creating with a local path is not currently supported from ollama-js')
throw Error('Creating with a local path is not currently supported from ollama-js. Please use the files parameter instead.')
}

if (request.stream) {
Expand All @@ -60,6 +76,67 @@ export class Ollama extends OllamaBrowser {
return super.create(request as CreateRequest & { stream: false })
}
}

/**
* Creates a model from local files by first uploading them as blobs.
* This method handles memory-efficient streaming of large model files.
*
* @param request {CreateRequest} - The request object containing model name and files
* @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or a stream of progress responses
*/
private async createFromFiles(
request: CreateRequest,
): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
if (!request.files || request.files.length === 0) {
throw new Error('At least one file must be specified when using file upload')
}

// Validate all files exist
await this.validateFiles(request.files)

// Upload files as blobs and get their digests
const blobDigests = await this.uploadFilesAsBlobs(request.files)

// Build the modified request with blob references
const modifiedRequest: Record<string, any> = {
...request,
files: createBlobFileMap(request.files, blobDigests),
}

return this.processStreamableRequest<ProgressResponse>('create', modifiedRequest)
}

/**
* Validates that all files exist.
*/
private async validateFiles(files: Array<{ filepath: string; sha256?: string }>): Promise<void> {
for (const file of files) {
const absolutePath = resolve(file.filepath)
if (!(await isFile(absolutePath))) {
throw new Error(`File not found: ${file.filepath}`)
}
}
}

/**
* Uploads files as blobs and returns their digests.
*/
private async uploadFilesAsBlobs(files: Array<{ filepath: string; sha256?: string }>): Promise<string[]> {
const blobDigests: string[] = []

for (const file of files) {
const absolutePath = resolve(file.filepath)
const sha256 = file.sha256 || await computeFileSHA256(absolutePath)

// Upload file as a blob
await uploadBlob(this.config.host, absolutePath, sha256, this.fetch, this.config.headers)
blobDigests.push(`sha256:${sha256}`)
}

return blobDigests
}


}

export default new Ollama()
Expand Down
8 changes: 8 additions & 0 deletions src/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,14 @@ export interface CreateRequest {
parameters?: Record<string, unknown>
messages?: Message[]
adapters?: Record<string, string>
// `modelfile` and `files` are used for creating models from local files https://github.com/ollama/ollama-js/issues/191
modelfile?: string
files?: CreateRequestFile[]
}

export interface CreateRequestFile {
filepath: string
sha256?: string
}

export interface DeleteRequest {
Expand Down
Loading