Skip to content

Commit

Permalink
Merge pull request #2421 from hlohaus/model
Browse files Browse the repository at this point in the history
Fix optional fields in api
  • Loading branch information
hlohaus authored Nov 25, 2024
2 parents f2849fc + 0043e04 commit a722abb
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 18 deletions.
1 change: 1 addition & 0 deletions .github/workflows/publish-workflow.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ jobs:
python -m etc.tool.openapi
- uses: actions/upload-artifact@v4
with:
name: openapi
path: openapi.json
publish:
runs-on: ubuntu-latest
Expand Down
16 changes: 8 additions & 8 deletions g4f/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,15 +91,15 @@ def create_app_debug(g4f_api_key: str = None):
class ChatCompletionsConfig(BaseModel):
messages: Messages = Field(examples=[[{"role": "system", "content": ""}, {"role": "user", "content": ""}]])
model: str = Field(default="")
provider: Optional[str] = Field(examples=[None])
provider: Optional[str] = None
stream: bool = False
temperature: Optional[float] = Field(examples=[None])
max_tokens: Optional[int] = Field(examples=[None])
stop: Union[list[str], str, None] = Field(examples=[None])
api_key: Optional[str] = Field(examples=[None])
web_search: Optional[bool] = Field(examples=[None])
proxy: Optional[str] = Field(examples=[None])
conversation_id: Optional[str] = Field(examples=[None])
temperature: Optional[float] = None
max_tokens: Optional[int] = None
stop: Union[list[str], str, None] = None
api_key: Optional[str] = None
web_search: Optional[bool] = None
proxy: Optional[str] = None
conversation_id: Optional[str] = None

class ImageGenerationConfig(BaseModel):
prompt: str
Expand Down
19 changes: 10 additions & 9 deletions g4f/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,7 @@ async def async_iter_response(
content = filter_json(content)
yield ChatCompletion.model_construct(content, finish_reason, completion_id, int(time.time()))
finally:
if hasattr(response, 'aclose'):
await safe_aclose(response)
await safe_aclose(response)

async def async_iter_append_model_and_provider(
response: AsyncChatCompletionResponseType
Expand All @@ -167,8 +166,7 @@ async def async_iter_append_model_and_provider(
chunk.provider = last_provider.get("name")
yield chunk
finally:
if hasattr(response, 'aclose'):
await safe_aclose(response)
await safe_aclose(response)

class Client(BaseClient):
def __init__(
Expand Down Expand Up @@ -292,7 +290,7 @@ async def async_generate(
proxy = self.client.proxy

response = None
if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider):
if hasattr(provider_handler, "create_async_generator"):
messages = [{"role": "user", "content": f"Generate a image: {prompt}"}]
async for item in provider_handler.create_async_generator(model, messages, prompt=prompt, **kwargs):
if isinstance(item, ImageResponse):
Expand Down Expand Up @@ -354,7 +352,7 @@ async def async_create_variation(
if proxy is None:
proxy = self.client.proxy

if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider):
if hasattr(provider, "create_async_generator"):
messages = [{"role": "user", "content": "create a variation of this image"}]
generator = None
try:
Expand All @@ -364,8 +362,7 @@ async def async_create_variation(
response = chunk
break
finally:
if generator and hasattr(generator, 'aclose'):
await safe_aclose(generator)
await safe_aclose(generator)
elif hasattr(provider, 'create_variation'):
if asyncio.iscoroutinefunction(provider.create_variation):
response = await provider.create_variation(image, model=model, response_format=response_format, proxy=proxy, **kwargs)
Expand Down Expand Up @@ -454,7 +451,11 @@ def create(
)
stop = [stop] if isinstance(stop, str) else stop

response = provider.create_completion(
if hasattr(provider, "create_async_generator"):
create_handler = provider.create_async_generator
else:
create_handler = provider.create_completion
response = create_handler(
model,
messages,
stream=stream,
Expand Down
3 changes: 2 additions & 1 deletion g4f/client/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ def filter_none(**kwargs) -> dict:

async def safe_aclose(generator: AsyncGenerator) -> None:
try:
await generator.aclose()
if generator and hasattr(generator, 'aclose'):
await generator.aclose()
except Exception as e:
logging.warning(f"Error while closing generator: {e}")

Expand Down

0 comments on commit a722abb

Please sign in to comment.