Skip to content

Commit

Permalink
add jsDocs (#88)
Browse files Browse the repository at this point in the history
- refactored return statements in some functions
- added jsDocs
  • Loading branch information
rohit1901 authored May 10, 2024
1 parent 0a5f7de commit 305eb0b
Show file tree
Hide file tree
Showing 4 changed files with 199 additions and 56 deletions.
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -128,3 +128,9 @@ dist
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*

# IDEs
.idea

# MacOS
.DS_Store
119 changes: 93 additions & 26 deletions src/browser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,25 @@ import * as utils from './utils.js'
import 'whatwg-fetch'

import type {
Fetch,
ChatRequest,
ChatResponse,
Config,
GenerateRequest,
PullRequest,
PushRequest,
CopyRequest,
CreateRequest,
DeleteRequest,
EmbeddingsRequest,
GenerateResponse,
EmbeddingsResponse,
ErrorResponse,
Fetch,
GenerateRequest,
GenerateResponse,
ListResponse,
ProgressResponse,
ErrorResponse,
StatusResponse,
DeleteRequest,
CopyRequest,
ShowResponse,
PullRequest,
PushRequest,
ShowRequest,
ChatRequest,
ChatResponse,
CreateRequest,
ShowResponse,
StatusResponse,
} from './interfaces.js'

export class Ollama {
Expand Down Expand Up @@ -50,6 +50,17 @@ export class Ollama {
this.abortController = new AbortController()
}

/**
* Processes a request to the Ollama server. If the request is streamable, it will return an
* AsyncGenerator that yields the response messages. Otherwise, it will return the response
* object.
* @param endpoint {string} - The endpoint to send the request to.
* @param request {object} - The request object to send to the endpoint.
* @protected {T | AsyncGenerator<T>} - The response object or an AsyncGenerator that yields
* response messages.
* @throws {Error} - If the response body is missing or if the response is an error.
* @returns {Promise<T | AsyncGenerator<T>>} - The response object or an AsyncGenerator that yields the streamed response.
*/
protected async processStreamableRequest<T extends object>(
endpoint: string,
request: { stream?: boolean } & Record<string, any>,
Expand Down Expand Up @@ -94,13 +105,17 @@ export class Ollama {
}
}

/**
* Encodes an image to base64 if it is a Uint8Array.
* @param image {Uint8Array | string} - The image to encode.
* @returns {Promise<string>} - The base64 encoded image.
*/
async encodeImage(image: Uint8Array | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array convert it to base64
const uint8Array = new Uint8Array(image)
const numberArray = Array.from(uint8Array)
const base64String = btoa(String.fromCharCode.apply(null, numberArray))
return base64String
return btoa(String.fromCharCode.apply(null, numberArray))
}
// the string may be base64 encoded
return image
Expand All @@ -110,7 +125,12 @@ export class Ollama {
request: GenerateRequest & { stream: true },
): Promise<AsyncGenerator<GenerateResponse>>
generate(request: GenerateRequest & { stream?: false }): Promise<GenerateResponse>

/**
* Generates a response from a text prompt.
* @param request {GenerateRequest} - The request object.
* @returns {Promise<GenerateResponse | AsyncGenerator<GenerateResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async generate(
request: GenerateRequest,
): Promise<GenerateResponse | AsyncGenerator<GenerateResponse>> {
Expand All @@ -122,7 +142,14 @@ export class Ollama {

chat(request: ChatRequest & { stream: true }): Promise<AsyncGenerator<ChatResponse>>
chat(request: ChatRequest & { stream?: false }): Promise<ChatResponse>

/**
* Chats with the model. The request object can contain messages with images that are either
* Uint8Arrays or base64 encoded strings. The images will be base64 encoded before sending the
* request.
* @param request {ChatRequest} - The request object.
* @returns {Promise<ChatResponse | AsyncGenerator<ChatResponse>>} - The response object or an
* AsyncGenerator that yields response messages.
*/
async chat(request: ChatRequest): Promise<ChatResponse | AsyncGenerator<ChatResponse>> {
if (request.messages) {
for (const message of request.messages) {
Expand All @@ -140,7 +167,11 @@ export class Ollama {
request: CreateRequest & { stream: true },
): Promise<AsyncGenerator<ProgressResponse>>
create(request: CreateRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Creates a new model from a stream of data.
* @param request {CreateRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or a stream of progress responses.
*/
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -154,7 +185,13 @@ export class Ollama {

pull(request: PullRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
pull(request: PullRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Pulls a model from the Ollama registry. The request object can contain a stream flag to indicate if the
* response should be streamed.
* @param request {PullRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async pull(
request: PullRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -167,7 +204,13 @@ export class Ollama {

push(request: PushRequest & { stream: true }): Promise<AsyncGenerator<ProgressResponse>>
push(request: PushRequest & { stream?: false }): Promise<ProgressResponse>

/**
* Pushes a model to the Ollama registry. The request object can contain a stream flag to indicate if the
* response should be streamed.
* @param request {PushRequest} - The request object.
* @returns {Promise<ProgressResponse | AsyncGenerator<ProgressResponse>>} - The response object or
* an AsyncGenerator that yields response messages.
*/
async push(
request: PushRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
Expand All @@ -178,38 +221,62 @@ export class Ollama {
})
}

/**
* Deletes a model from the server. The request object should contain the name of the model to
* delete.
* @param request {DeleteRequest} - The request object.
* @returns {Promise<StatusResponse>} - The response object.
*/
async delete(request: DeleteRequest): Promise<StatusResponse> {
await utils.del(this.fetch, `${this.config.host}/api/delete`, {
name: request.model,
})
return { status: 'success' }
}

/**
* Copies a model from one name to another. The request object should contain the name of the
* model to copy and the new name.
* @param request {CopyRequest} - The request object.
* @returns {Promise<StatusResponse>} - The response object.
*/
async copy(request: CopyRequest): Promise<StatusResponse> {
await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request })
return { status: 'success' }
}

/**
* Lists the models on the server.
* @returns {Promise<ListResponse>} - The response object.
* @throws {Error} - If the response body is missing.
*/
async list(): Promise<ListResponse> {
const response = await utils.get(this.fetch, `${this.config.host}/api/tags`)
const listResponse = (await response.json()) as ListResponse
return listResponse
return (await response.json()) as ListResponse
}

/**
* Shows the metadata of a model. The request object should contain the name of the model.
* @param request {ShowRequest} - The request object.
* @returns {Promise<ShowResponse>} - The response object.
*/
async show(request: ShowRequest): Promise<ShowResponse> {
const response = await utils.post(this.fetch, `${this.config.host}/api/show`, {
...request,
})
const showResponse = (await response.json()) as ShowResponse
return showResponse
return (await response.json()) as ShowResponse
}

/**
* Embeds a text prompt into a vector.
* @param request {EmbeddingsRequest} - The request object.
* @returns {Promise<EmbeddingsResponse>} - The response object.
*/
async embeddings(request: EmbeddingsRequest): Promise<EmbeddingsResponse> {
const response = await utils.post(this.fetch, `${this.config.host}/api/embeddings`, {
...request,
})
const embeddingsResponse = (await response.json()) as EmbeddingsResponse
return embeddingsResponse
return (await response.json()) as EmbeddingsResponse
}
}

Expand Down
25 changes: 21 additions & 4 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as utils from './utils.js'
import fs, { promises, createReadStream } from 'fs'
import { join, resolve, dirname } from 'path'
import fs, { createReadStream, promises } from 'fs'
import { dirname, join, resolve } from 'path'
import { createHash } from 'crypto'
import { homedir } from 'os'
import { Ollama as OllamaBrowser } from './browser.js'
Expand All @@ -11,8 +11,7 @@ export class Ollama extends OllamaBrowser {
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array or Buffer, convert it to base64
const result = Buffer.from(image).toString('base64')
return result
return Buffer.from(image).toString('base64')
}
try {
if (fs.existsSync(image)) {
Expand All @@ -27,6 +26,12 @@ export class Ollama extends OllamaBrowser {
return image
}

/**
* Parse the modelfile and replace the FROM and ADAPTER commands with the corresponding blob hashes.
* @param modelfile {string} - The modelfile content
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private async parseModelfile(
modelfile: string,
mfDir: string = process.cwd(),
Expand All @@ -49,13 +54,25 @@ export class Ollama extends OllamaBrowser {
return out.join('\n')
}

/**
* Resolve the path to an absolute path.
* @param inputPath {string} - The input path
* @param mfDir {string} - The directory of the modelfile
* @private @internal
*/
private resolvePath(inputPath, mfDir) {
if (inputPath.startsWith('~')) {
return join(homedir(), inputPath.slice(1))
}
return resolve(mfDir, inputPath)
}

/**
* checks if a file exists
* @param path {string} - The path to the file
* @private @internal
* @returns {Promise<boolean>} - Whether the file exists or not
*/
private async fileExists(path: string): Promise<boolean> {
try {
await promises.access(path)
Expand Down
Loading

0 comments on commit 305eb0b

Please sign in to comment.