Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

openai适配器支持usage #839

Merged
merged 1 commit into from
Oct 22, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Update adapter.py
  • Loading branch information
Alex-TG001 committed Oct 16, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit 8eacab953b99621be02951db325338d6b4e624c2
43 changes: 36 additions & 7 deletions python/qianfan/extensions/openai/adapter.py
Original file line number Diff line number Diff line change
@@ -181,6 +181,12 @@ def add_if_exist(openai_key: str, qianfan_key: Optional[str] = None) -> None:
if not isinstance(response_format, str):
response_format = response_format["type"]
qianfan_request["response_format"] = response_format
if "stream_options" in openai_request:
stream_options = openai_request["stream_options"]
qianfan_request["stream"] = True
if not isinstance(stream_options, str):
stream_options = stream_options["include_usage"]
qianfan_request["stream_options"] = stream_options
return qianfan_request

def openai_chat_request_to_qianfan(
@@ -464,6 +470,8 @@ async def task(n: int) -> AsyncIterator[Tuple[int, QfResponse]]:
tasks = [task(i) for i in range(n)]
results = merge_async_iters(*tasks)
base = None
total_prompt_tokens = 0
total_completion_tokens = 0
async for i, res in results:
if base is None:
base = {
@@ -513,11 +521,28 @@ async def task(n: int) -> AsyncIterator[Tuple[int, QfResponse]]:
choices[0]["delta"]["finish_reason"] = "tool_calls"
choices[0]["delta"]["function_call"] = res["function_call"]

if res["is_end"] and "usage" in res:
total_prompt_tokens += res["usage"]["prompt_tokens"]
total_completion_tokens += res["usage"]["completion_tokens"]

yield {
"choices": choices,
**base,
}

# 在流式响应结束后,添加usage信息
base = base or {}
if total_prompt_tokens > 0 or total_completion_tokens > 0:
yield {
"choices": [],
"usage": {
"prompt_tokens": total_prompt_tokens,
"completion_tokens": total_completion_tokens,
"total_tokens": total_prompt_tokens + total_completion_tokens,
},
**base,
}

async def _completion_stream(
self, n: int, openai_request: OpenAIRequest, qianfan_request: QianfanRequest
) -> AsyncIterator[OpenAIResponse]:
@@ -536,13 +561,17 @@ async def task(n: int) -> AsyncIterator[Tuple[int, QfResponse]]:
base = None
async for i, res in results:
if base is None:
base = {
"id": res["id"],
"created": res["created"],
"model": openai_request["model"],
"system_fingerprint": "fp_?",
"object": "text_completion",
}
base = (
{
"id": res["id"],
"created": res["created"],
"model": openai_request["model"],
"system_fingerprint": "fp_?",
"object": "text_completion",
}
if base is None
else base
)
for j in range(n):
yield {
"choices": [