Skip to content

Commit

Permalink
fix: 兼容baihcuan
Browse files Browse the repository at this point in the history
  • Loading branch information
liuchuanhe committed Nov 15, 2023
1 parent 09127e8 commit 1b2774d
Show file tree
Hide file tree
Showing 17 changed files with 1,605 additions and 54 deletions.
30 changes: 30 additions & 0 deletions examples/baseDemo.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
#!/usr/bin/env -S npm run tsn -T

import OpenAI from '../src';
import { ChatCompletionStream } from '../src/lib/BaseChatCompletionStream';
const openai = new OpenAI({
apiKey: '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', // defaults to process.env["OPENAI_API_KEY"]
baseURL: 'http://llm-engine-test.cf8025269a251437fa494ea5f9c8a924c.cn-beijing.alicontainer.com',
});

async function main() {
const stream2 = await openai.baseChat.completions
.createBase({
model_name: 'Baichuan2',
prompt: {
data: '写一篇100字的小说',
from: 0,
created_at: 12312344,
},
stream: true,
history: [],
})
.asResponse();
const { body } = stream2;
const runner = ChatCompletionStream.fromReadableStream(body as any);
runner.on('content', (delta, snapshot) => {
console.log(snapshot, 'yyyy');
console.log(delta, '*****');
});
}
main();
33 changes: 16 additions & 17 deletions examples/demo.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,27 @@
#!/usr/bin/env -S npm run tsn -T

import OpenAI from 'openai';

// gets API Key from environment variable OPENAI_API_KEY
const openai = new OpenAI();
import OpenAI from '../src';
import { ChatCompletionStream } from '../src/lib/ChatCompletionStream';

const openai = new OpenAI({
apiKey: '96d6c41139d2efc5c25d7c0b93ad69a3', // defaults to process.env["OPENAI_API_KEY"]
baseURL: 'https://api.baichuan-ai.com/v1',
});
async function main() {
// Non-streaming:
const completion = await openai.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test' }],
});
console.log(completion.choices[0]?.message?.content);

// Streaming:
const stream = await openai.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test' }],
const stream = openai.beta.chat.completions.stream({
model: 'Baichuan2',
stream: true,
messages: [{ role: 'user', content: '写一篇200字的小学生作文' }],
});
for await (const part of stream) {
process.stdout.write(part.choices[0]?.delta?.content || '');
console.log(part.choices[0]?.delta?.content, '****');
}
process.stdout.write('\n');
}

main();

// const runner = ChatCompletionStream.fromReadableStream(stream.toReadableStream());

// runner.on('content', (delta, snapshot) => {
// console.log(delta, 'testtest');
// });
2 changes: 1 addition & 1 deletion examples/tool-call-helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ async function main() {
console.log(runner.messages);

console.log();
console.log('final chat completion');
console.log('final chat competion');
console.dir(result, { depth: null });
}

Expand Down
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"name": "openai",
"version": "4.18.0",
"name": "openailchgpt",
"version": "1.0.0",
"description": "Client library for the OpenAI API",
"author": "OpenAI <[email protected]>",
"author": "",
"types": "dist/index.d.ts",
"main": "dist/index.js",
"type": "commonjs",
"repository": "github:openai/openai-node",
"repository": "github:lch000/openai-node",
"license": "Apache-2.0",
"private": false,
"sideEffects": [
Expand Down
18 changes: 14 additions & 4 deletions src/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ async function defaultParseResponse<T>(props: APIResponseProps): Promise<T> {

// Note: there is an invariant here that isn't represented in the type system
// that if you set `stream: true` the response type must also be `Stream<T>`
// return Stream.fromReadableStream(response, props.controller) as any;
return Stream.fromSSEResponse(response, props.controller) as any;
}

Expand Down Expand Up @@ -83,6 +84,7 @@ export class APIPromise<T> extends Promise<T> {
private responsePromise: Promise<APIResponseProps>,
private parseResponse: (props: APIResponseProps) => PromiseOrValue<T> = defaultParseResponse,
) {
console.log(responsePromise, 'responsePromise');
super((resolve) => {
// this is maybe a bit weird but this has to be a no-op to not implicitly
// parse the response body; instead .then, .catch, .finally are overridden
Expand Down Expand Up @@ -180,7 +182,6 @@ export abstract class APIClient {
this.maxRetries = validatePositiveInteger('maxRetries', maxRetries);
this.timeout = validatePositiveInteger('timeout', timeout);
this.httpAgent = httpAgent;

this.fetch = overridenFetch ?? fetch;
}

Expand Down Expand Up @@ -273,7 +274,7 @@ export abstract class APIClient {
options: FinalRequestOptions<Req>,
): { req: RequestInit; url: string; timeout: number } {
const { method, path, query, headers: headers = {} } = options;

console.log(path, 'hear path');
const body =
isMultipartBody(options.body) ? options.body.body
: options.body ? JSON.stringify(options.body, null, 2)
Expand Down Expand Up @@ -373,9 +374,15 @@ export abstract class APIClient {
if (retriesRemaining == null) {
retriesRemaining = options.maxRetries ?? this.maxRetries;
}
// let req = '',
// url = '',

// if (options.isBaseModel) {
// }
console.log(options, 'options');
// options.path = '/v1/chat';
const { req, url, timeout } = this.buildRequest(options);

// console.log(this.buildRequest(options), '解析后的options');
await this.prepareRequest(req, { url, options });

debug('request', url, options, req.headers);
Expand Down Expand Up @@ -403,6 +410,7 @@ export abstract class APIClient {
const responseHeaders = createResponseHeaders(response.headers);

if (!response.ok) {
console.log('请求没成功');
if (retriesRemaining && this.shouldRetry(response)) {
return this.retryRequest(options, retriesRemaining, responseHeaders);
}
Expand Down Expand Up @@ -729,6 +737,8 @@ export type RequestOptions<Req extends {} = Record<string, unknown> | Readable>
headers?: Headers | undefined;

maxRetries?: number;
isBaseModel?: boolean;

stream?: boolean | undefined;
timeout?: number;
httpAgent?: Agent;
Expand All @@ -754,7 +764,7 @@ const requestOptionsKeys: KeysEnum<RequestOptions> = {
httpAgent: true,
signal: true,
idempotencyKey: true,

isBaseModel: true,
__binaryResponse: true,
};

Expand Down
3 changes: 3 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,7 @@ export class OpenAI extends Core.APIClient {

completions: API.Completions = new API.Completions(this);
chat: API.Chat = new API.Chat(this);
baseChat: API.BaseChat = new API.BaseChat(this);
edits: API.Edits = new API.Edits(this);
embeddings: API.Embeddings = new API.Embeddings(this);
files: API.Files = new API.Files(this);
Expand Down Expand Up @@ -225,6 +226,8 @@ export namespace OpenAI {
export import CompletionCreateParamsStreaming = API.CompletionCreateParamsStreaming;

export import Chat = API.Chat;
export import BaseChat = API.BaseChat;

export import ChatCompletion = API.ChatCompletion;
export import ChatCompletionAssistantMessageParam = API.ChatCompletionAssistantMessageParam;
export import ChatCompletionChunk = API.ChatCompletionChunk;
Expand Down
Loading

0 comments on commit 1b2774d

Please sign in to comment.