-
-
Notifications
You must be signed in to change notification settings - Fork 721
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix(instructor/patch.py, tests/openai/test_multitask.py): Correct model checking and add new tests #413
fix(instructor/patch.py, tests/openai/test_multitask.py): Correct model checking and add new tests #413
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -3,6 +3,7 @@ | |
import logging | ||
from collections.abc import Iterable | ||
from functools import wraps | ||
import pandas as pd | ||
from tenacity import Retrying, AsyncRetrying, stop_after_attempt, RetryError | ||
from json import JSONDecodeError | ||
from typing import ( | ||
|
@@ -213,8 +214,7 @@ def process_response( | |
|
||
# ? This really hints at the fact that we need a better way of | ||
# ? attaching usage data and the raw response to the model we return. | ||
if isinstance(response_model, IterableBase): | ||
#! If the response model is a multitask, return the tasks | ||
if isinstance(model, IterableBase): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The variable |
||
return [task for task in model.tasks] | ||
|
||
if isinstance(response_model, ParallelBase): | ||
|
@@ -267,7 +267,7 @@ async def process_response_async( | |
|
||
# ? This really hints at the fact that we need a better way of | ||
# ? attaching usage data and the raw response to the model we return. | ||
if isinstance(response_model, IterableBase): | ||
if isinstance(model, IterableBase): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The variable 'model' is not defined in the scope of this function. This will cause a NameError at runtime. Please revert the change to 'response_model'. |
||
#! If the response model is a multitask, return the tasks | ||
return [task for task in model.tasks] | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,6 @@ | ||
[tool.poetry] | ||
name = "instructor" | ||
version = "0.5.0" | ||
version = "0.5.1" | ||
description = "structured outputs for llm" | ||
authors = ["Jason Liu <[email protected]>"] | ||
license = "MIT" | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -22,7 +22,6 @@ def test_multi_user(model, mode, client): | |
def stream_extract(input: str) -> Iterable[User]: | ||
return client.chat.completions.create( | ||
model=model, | ||
stream=True, | ||
response_model=Users, | ||
messages=[ | ||
{ | ||
|
@@ -54,6 +53,73 @@ def stream_extract(input: str) -> Iterable[User]: | |
async def test_multi_user_tools_mode_async(model, mode, aclient): | ||
client = instructor.patch(aclient, mode=mode) | ||
|
||
async def stream_extract(input: str) -> Iterable[User]: | ||
return await client.chat.completions.create( | ||
model=model, | ||
response_model=Users, | ||
messages=[ | ||
{ | ||
"role": "user", | ||
"content": ( | ||
f"Consider the data below:\n{input}" | ||
"Correctly segment it into entitites" | ||
"Make sure the JSON is correct" | ||
), | ||
}, | ||
], | ||
max_tokens=1000, | ||
) | ||
|
||
resp = [] | ||
async for user in await stream_extract(input="Jason is 20, Sarah is 30"): | ||
resp.append(user) | ||
print(resp) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please remove the |
||
assert len(resp) == 2 | ||
assert resp[0].name == "Jason" | ||
assert resp[0].age == 20 | ||
assert resp[1].name == "Sarah" | ||
assert resp[1].age == 30 | ||
|
||
|
||
@pytest.mark.parametrize("model, mode", product(models, modes)) | ||
def test_multi_user_stream(model, mode, client): | ||
client = instructor.patch(client, mode=mode) | ||
|
||
def stream_extract(input: str) -> Iterable[User]: | ||
return client.chat.completions.create( | ||
model=model, | ||
stream=True, | ||
response_model=Users, | ||
messages=[ | ||
{ | ||
"role": "system", | ||
"content": "You are a perfect entity extraction system", | ||
}, | ||
{ | ||
"role": "user", | ||
"content": ( | ||
f"Consider the data below:\n{input}" | ||
"Correctly segment it into entitites" | ||
"Make sure the JSON is correct" | ||
), | ||
}, | ||
], | ||
max_tokens=1000, | ||
) | ||
|
||
resp = [user for user in stream_extract(input="Jason is 20, Sarah is 30")] | ||
assert len(resp) == 2 | ||
assert resp[0].name == "Jason" | ||
assert resp[0].age == 20 | ||
assert resp[1].name == "Sarah" | ||
assert resp[1].age == 30 | ||
|
||
|
||
@pytest.mark.asyncio | ||
@pytest.mark.parametrize("model, mode", product(models, modes)) | ||
async def test_multi_user_tools_mode_async_stream(model, mode, aclient): | ||
client = instructor.patch(aclient, mode=mode) | ||
|
||
async def stream_extract(input: str) -> Iterable[User]: | ||
return await client.chat.completions.create( | ||
model=model, | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The variable 'model' is not defined in the scope of this function. This will cause a NameError at runtime. Please revert the change to 'response_model'.