Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: refactoring #100

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
feat: constants
Added constants and removed dangling strings for better code comprehensibility.
rohit1901 committed May 11, 2024
commit 7f069507c7f8e6815fd02235d4d527633e02cd06
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -21,7 +21,9 @@ console.log(response.message.content)
```

### Browser Usage

To use the library without node, import the browser module.

```javascript
import ollama from 'ollama/browser'
```
24 changes: 14 additions & 10 deletions src/browser.ts
Original file line number Diff line number Diff line change
@@ -22,6 +22,7 @@ import type {
ShowResponse,
StatusResponse,
} from './interfaces.js'
import { EMPTY_STRING, MESSAGES, OLLAMA_LOCAL_URL, REQUEST_CONSTANTS } from './constants'

export class Ollama {
protected readonly config: Config
@@ -30,10 +31,10 @@ export class Ollama {

constructor(config?: Partial<Config>) {
this.config = {
host: '',
host: EMPTY_STRING,
}
if (!config?.proxy) {
this.config.host = utils.formatHost(config?.host ?? 'http://127.0.0.1:11434')
this.config.host = utils.formatHost(config?.host ?? OLLAMA_LOCAL_URL)
}

this.fetch = fetch
@@ -76,7 +77,7 @@ export class Ollama {
)

if (!response.body) {
throw new Error('Missing body')
throw new Error(MESSAGES.MISSING_BODY)
}

const itr = utils.parseJSON<T | ErrorResponse>(response.body)
@@ -90,15 +91,15 @@ export class Ollama {
yield message
// message will be done in the case of chat and generate
// message will be success in the case of a progress response (pull, push, create)
if ((message as any).done || (message as any).status === 'success') {
if ((message as any).done || (message as any).status === MESSAGES.SUCCESS) {
return
}
}
throw new Error('Did not receive done or success response in stream.')
})()
} else {
const message = await itr.next()
if (!message.value.done && (message.value as any).status !== 'success') {
if (!message.value.done && (message.value as any).status !== MESSAGES.SUCCESS) {
throw new Error('Expected a completed response.')
}
return message.value
@@ -137,7 +138,10 @@ export class Ollama {
if (request.images) {
request.images = await Promise.all(request.images.map(this.encodeImage.bind(this)))
}
return this.processStreamableRequest<GenerateResponse>('generate', request)
return this.processStreamableRequest<GenerateResponse>(
REQUEST_CONSTANTS.GENERATE,
request,
)
}

chat(request: ChatRequest & { stream: true }): Promise<AsyncGenerator<ChatResponse>>
@@ -175,7 +179,7 @@ export class Ollama {
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('create', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.CREATE, {
name: request.model,
stream: request.stream,
modelfile: request.modelfile,
@@ -213,7 +217,7 @@ export class Ollama {
async push(
request: PushRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
return this.processStreamableRequest<ProgressResponse>('push', {
return this.processStreamableRequest<ProgressResponse>(REQUEST_CONSTANTS.PUSH, {
name: request.model,
stream: request.stream,
insecure: request.insecure,
@@ -230,7 +234,7 @@ export class Ollama {
await utils.del(this.fetch, `${this.config.host}/api/delete`, {
name: request.model,
})
return { status: 'success' }
return { status: MESSAGES.SUCCESS }
}

/**
@@ -241,7 +245,7 @@ export class Ollama {
*/
async copy(request: CopyRequest): Promise<StatusResponse> {
await utils.post(this.fetch, `${this.config.host}/api/copy`, { ...request })
return { status: 'success' }
return { status: MESSAGES.SUCCESS }
}

/**
31 changes: 31 additions & 0 deletions src/constants/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
const EMPTY_STRING = ''
const MESSAGES = {
MISSING_BODY: 'Missing body',
SUCCESS: 'Success',
FETCHING_TEXT: 'Getting text from response',
ERROR_FETCHING_TEXT: 'Failed to get text from error response',
ERROR_NO_MODEL_FILE: 'Must provide either path or modelfile to create a model',
ERROR_JSON_PARSE: 'Failed to parse error response as JSON',
} as const
const REQUEST_CONSTANTS = {
GENERATE: 'generate',
CREATE: 'create',
PUSH: 'push',
} as const
const MODEL_FILE_COMMANDS = ['FROM', 'ADAPTER']
const OLLAMA_LOCAL_URL = 'http://127.0.0.1:11434'
const SHA256 = 'sha256'
const ENCODING = {
HEX: 'hex',
BASE64: 'base64',
UTF8: 'utf8',
} as const
export {
EMPTY_STRING,
MESSAGES,
REQUEST_CONSTANTS,
MODEL_FILE_COMMANDS,
OLLAMA_LOCAL_URL,
SHA256,
ENCODING,
}
30 changes: 19 additions & 11 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -6,18 +6,25 @@ import { homedir } from 'os'
import { Ollama as OllamaBrowser } from './browser.js'

import type { CreateRequest, ProgressResponse } from './interfaces.js'
import {
EMPTY_STRING,
ENCODING,
MESSAGES,
MODEL_FILE_COMMANDS,
SHA256,
} from './constants'

export class Ollama extends OllamaBrowser {
async encodeImage(image: Uint8Array | Buffer | string): Promise<string> {
if (typeof image !== 'string') {
// image is Uint8Array or Buffer, convert it to base64
return Buffer.from(image).toString('base64')
return Buffer.from(image).toString(ENCODING.BASE64)
}
try {
if (fs.existsSync(image)) {
// this is a filepath, read the file and convert it to base64
const fileBuffer = await promises.readFile(resolve(image))
return Buffer.from(fileBuffer).toString('base64')
return Buffer.from(fileBuffer).toString(ENCODING.BASE64)
}
} catch {
// continue
@@ -40,7 +47,7 @@ export class Ollama extends OllamaBrowser {
const lines = modelfile.split('\n')
for (const line of lines) {
const [command, args] = line.split(' ', 2)
if (['FROM', 'ADAPTER'].includes(command.toUpperCase())) {
if (MODEL_FILE_COMMANDS.includes(command.toUpperCase())) {
const path = this.resolvePath(args.trim(), mfDir)
if (await this.fileExists(path)) {
out.push(`${command} @${await this.createBlob(path)}`)
@@ -94,13 +101,13 @@ export class Ollama extends OllamaBrowser {

// Compute the SHA256 digest
const sha256sum = await new Promise<string>((resolve, reject) => {
const hash = createHash('sha256')
const hash = createHash(SHA256)
fileStream.on('data', (data) => hash.update(data))
fileStream.on('end', () => resolve(hash.digest('hex')))
fileStream.on('end', () => resolve(hash.digest(ENCODING.HEX)))
fileStream.on('error', reject)
})

const digest = `sha256:${sha256sum}`
const digest = `${SHA256}:${sha256sum}`

try {
await utils.head(this.fetch, `${this.config.host}/api/blobs/${digest}`)
@@ -144,26 +151,27 @@ export class Ollama extends OllamaBrowser {
async create(
request: CreateRequest,
): Promise<ProgressResponse | AsyncGenerator<ProgressResponse>> {
let modelfileContent = ''
let modelfileContent = EMPTY_STRING
if (request.path) {
modelfileContent = await promises.readFile(request.path, { encoding: 'utf8' })
modelfileContent = await promises.readFile(request.path, {
encoding: ENCODING.UTF8,
})
modelfileContent = await this.parseModelfile(
modelfileContent,
dirname(request.path),
)
} else if (request.modelfile) {
modelfileContent = await this.parseModelfile(request.modelfile)
} else {
throw new Error('Must provide either path or modelfile to create a model')
throw new Error(MESSAGES.ERROR_NO_MODEL_FILE)
}
request.modelfile = modelfileContent

// check stream here so that typescript knows which overload to use
if (request.stream) {
return super.create(request as CreateRequest & { stream: true })
} else {
return super.create(request as CreateRequest & { stream: false })
}
return super.create(request as CreateRequest & { stream: false })
}
}

15 changes: 8 additions & 7 deletions src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { version } from './version.js'
import type { Fetch, ErrorResponse } from './interfaces.js'
import { EMPTY_STRING, ENCODING, MESSAGES, OLLAMA_LOCAL_URL } from './constants'

/**
* An error class for response errors.
@@ -36,15 +37,15 @@ const checkOk = async (response: Response): Promise<void> => {
errorData = (await response.json()) as ErrorResponse
message = errorData.error || message
} catch (error) {
console.log('Failed to parse error response as JSON')
console.log(MESSAGES.ERROR_JSON_PARSE)
}
} else {
try {
console.log('Getting text from response')
console.log(MESSAGES.FETCHING_TEXT)
const textResponse = await response.text()
message = textResponse || message
} catch (error) {
console.log('Failed to get text from error response')
console.log(MESSAGES.ERROR_FETCHING_TEXT)
}
}

@@ -181,8 +182,8 @@ export const del = async (
export const parseJSON = async function* <T = unknown>(
itr: ReadableStream<Uint8Array>,
): AsyncGenerator<T> {
const decoder = new TextDecoder('utf-8')
let buffer = ''
const decoder = new TextDecoder(ENCODING.UTF8)
let buffer = EMPTY_STRING

const reader = itr.getReader()

@@ -197,7 +198,7 @@ export const parseJSON = async function* <T = unknown>(

const parts = buffer.split('\n')

buffer = parts.pop() ?? ''
buffer = parts.pop() ?? EMPTY_STRING

for (const part of parts) {
try {
@@ -223,7 +224,7 @@ export const parseJSON = async function* <T = unknown>(
*/
export const formatHost = (host: string): string => {
if (!host) {
return 'http://127.0.0.1:11434'
return OLLAMA_LOCAL_URL
}

let isExplicitProtocol = host.includes('://')
6 changes: 1 addition & 5 deletions tsconfig.json
Original file line number Diff line number Diff line change
@@ -13,11 +13,7 @@
"module": "ES2022",
"outDir": "./dist",
"target": "ES6",
"lib": [
"es6",
"es2018.asyncgenerator",
"dom"
]
"lib": ["es6", "es2018.asyncgenerator", "dom"],
},

"ts-node": {