Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

openai_adapter支持usage #838

Closed
wants to merge 6 commits into from
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update adapter.py
  • Loading branch information
Alex-TG001 committed Oct 15, 2024
commit 7274301b0ec36dc14a4638cbd2c01a6eb317426d
10 changes: 3 additions & 7 deletions python/qianfan/extensions/openai/adapter.py
Original file line number Diff line number Diff line change
@@ -124,8 +124,6 @@ def openai_base_request_to_qianfan(
Convert general arguments in OpenAI request to Qianfan request.
"""
qianfan_request = copy.deepcopy(openai_request)
print("\n\nrequest:", qianfan_request, "\n\n")

def add_if_exist(openai_key: str, qianfan_key: Optional[str] = None) -> None:
qianfan_key = openai_key if qianfan_key is None else qianfan_key
if openai_key in openai_request:
@@ -197,7 +195,6 @@ def openai_chat_request_to_qianfan(
Convert chat request in OpenAI to Qianfan request.
"""
qianfan_request = self.openai_base_request_to_qianfan(openai_request)
print("\n\nchat:", qianfan_request, "\n\n")
messages = openai_request["messages"]
if messages[0]["role"] == "system":
if not self._ignore_system:
@@ -227,7 +224,6 @@ def openai_completion_request_to_qianfan(
Convert completion request in OpenAI to Qianfan request.
"""
qianfan_request = self.openai_base_request_to_qianfan(openai_request)
print("\n\ncompletion:", qianfan_request, "\n\n")
prompt = openai_request["prompt"]
if isinstance(prompt, list):
prompt = "".join(prompt)
@@ -568,14 +564,14 @@ async def task(n: int) -> AsyncIterator[Tuple[int, QfResponse]]:
"created": res["created"],
"model": openai_request["model"],
"system_fingerprint": "fp_?",
"object": "text_completion",
}
"object": "text_completion", # 或者 "chat.completion.chunk",视情况而定
} if base is None else base
for j in range(n):
yield {
"choices": [
{
"index": j,
"delta": {"text": ""},
"delta": {"text": ""}, # 如果是消息流,则为 {"role": "assistant", "content": ""}
"logprobs": None,
"finish_reason": None,
}
Loading