Skip to content

Commit

Permalink
update create api for v0.5.5 and deprecate create from files (#192)
Browse files Browse the repository at this point in the history
Add new create API structure, remove broken file streaming logic

- Updates CreateRequest interface with new fields
- Removes broken logic for streaming model creation from local files
- Previous implementation did not properly handle file streams
- Model creation from files should be handled by separate PR
- Updates documentation and types to reflect new API structure
  • Loading branch information
BruceMacD authored Jan 13, 2025
1 parent a90f025 commit f655d63
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 123 deletions.
12 changes: 10 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,19 @@ ollama.create(request)

- `request` `<Object>`: The request object containing create parameters.
- `model` `<string>` The name of the model to create.
- `path` `<string>`: (Optional) The path to the Modelfile of the model to create.
- `modelfile` `<string>`: (Optional) The content of the Modelfile to create.
- `from` `<string>`: The base model to derive from.
- `stream` `<boolean>`: (Optional) When true an `AsyncGenerator` is returned.
- `quantize` `<string>`: Quanization precision level (`q8_0`, `q4_K_M`, etc.).
- `template` `<string>`: (Optional) The prompt template to use with the model.
- `license` `<string|string[]>`: (Optional) The license(s) associated with the model.
- `system` `<string>`: (Optional) The system prompt for the model.
- `parameters` `<Record<string, unknown>>`: (Optional) Additional model parameters as key-value pairs.
- `messages` `<Message[]>`: (Optional) Initial chat messages for the model.
- `adapters` `<Record<string, string>>`: (Optional) A key-value map of LoRA adapter configurations.
- Returns: `<ProgressResponse>`

Note: The `files` parameter is not currently supported in `ollama-js`.

### delete

```javascript
Expand Down
7 changes: 2 additions & 5 deletions src/browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,13 +176,10 @@ async encodeImage(image: Uint8Array | string): Promise<string> {
* @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or a stream of progress responses.
*/
async create(
request: CreateRequest,
request: CreateRequest
): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('create', {
name: request.model,
stream: request.stream,
modelfile: request.modelfile,
quantize: request.quantize,
...request
})
}

Expand Down
119 changes: 6 additions & 113 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import * as utils from './utils.js'
import { AbortableAsyncIterator } from './utils.js'

import fs, { createReadStream, promises } from 'fs'
import { dirname, join, resolve } from 'path'
import { createHash } from 'crypto'
import { homedir } from 'os'
import fs, { promises } from 'fs'
import { resolve } from 'path'
import { Ollama as OllamaBrowser } from './browser.js'

import type { CreateRequest, ProgressResponse } from './interfaces.js'
Expand All @@ -28,47 +25,6 @@ export class Ollama extends OllamaBrowser {
return image
}

/**
* Parse the modelfile and replace the FROM and ADAPTER commands with the corresponding blob hashes.
* @param modelfile {string} - The modelfile content
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private async parseModelfile(
modelfile: string,
mfDir: string = process.cwd(),
): Promise<string> {
const out: string[] = []
const lines = modelfile.split('\n')
for (const line of lines) {
const [command, args] = line.split(' ', 2)
if (['FROM', 'ADAPTER'].includes(command.toUpperCase())) {
const path = this.resolvePath(args.trim(), mfDir)
if (await this.fileExists(path)) {
out.push(`${command} @${await this.createBlob(path)}`)
} else {
out.push(`${command} ${args}`)
}
} else {
out.push(line)
}
}
return out.join('\n')
}

/**
* Resolve the path to an absolute path.
* @param inputPath {string} - The input path
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private resolvePath(inputPath, mfDir) {
if (inputPath.startsWith('~')) {
return join(homedir(), inputPath.slice(1))
}
return resolve(mfDir, inputPath)
}

/**
* checks if a file exists
* @param path {string} - The path to the file
Expand All @@ -84,60 +40,6 @@ export class Ollama extends OllamaBrowser {
}
}

private async createBlob(path: string): Promise<string> {
if (typeof ReadableStream === 'undefined') {
// Not all fetch implementations support streaming
// TODO: support non-streaming uploads
throw new Error('Streaming uploads are not supported in this environment.')
}

// Create a stream for reading the file
const fileStream = createReadStream(path)

// Compute the SHA256 digest
const sha256sum = await new Promise<string>((resolve, reject) => {
const hash = createHash('sha256')
fileStream.on('data', (data) => hash.update(data))
fileStream.on('end', () => resolve(hash.digest('hex')))
fileStream.on('error', reject)
})

const digest = `sha256:${sha256sum}`

try {
await utils.head(this.fetch, `${this.config.host}/api/blobs/${digest}`)
} catch (e) {
if (e instanceof Error && e.message.includes('404')) {
// Create a new readable stream for the fetch request
const readableStream = new ReadableStream({
start(controller) {
fileStream.on('data', (chunk) => {
controller.enqueue(chunk) // Enqueue the chunk directly
})

fileStream.on('end', () => {
controller.close() // Close the stream when the file ends
})

fileStream.on('error', (err) => {
controller.error(err) // Propagate errors to the stream
})
},
})

await utils.post(
this.fetch,
`${this.config.host}/api/blobs/${digest}`,
readableStream,
)
} else {
throw e
}
}

return digest
}

create(
request: CreateRequest & { stream: true },
): Promise<AbortableAsyncIterator<ProgressResponse>>
Expand All @@ -146,21 +48,12 @@ export class Ollama extends OllamaBrowser {
async create(
request: CreateRequest,
): Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>> {
let modelfileContent = ''
if (request.path) {
modelfileContent = await promises.readFile(request.path, { encoding: 'utf8' })
modelfileContent = await this.parseModelfile(
modelfileContent,
dirname(request.path),
)
} else if (request.modelfile) {
modelfileContent = await this.parseModelfile(request.modelfile)
} else {
throw new Error('Must provide either path or modelfile to create a model')
// fail if request.from is a local path
// TODO: https://github.com/ollama/ollama-js/issues/191
if (request.from && await this.fileExists(resolve(request.from))) {
throw Error('Creating with a local path is not currently supported from ollama-js')
}
request.modelfile = modelfileContent

// check stream here so that typescript knows which overload to use
if (request.stream) {
return super.create(request as CreateRequest & { stream: true })
} else {
Expand Down
11 changes: 8 additions & 3 deletions src/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,15 @@ export interface PushRequest {

export interface CreateRequest {
model: string
path?: string
modelfile?: string
quantize?: string
from?: string
stream?: boolean
quantize?: string
template?: string
license?: string | string[]
system?: string
parameters?: Record<string, unknown>
messages?: Message[]
adapters?: Record<string, string>
}

export interface DeleteRequest {
Expand Down

0 comments on commit f655d63

Please sign in to comment.