Skip to content

Commit

Permalink
Merge pull request #2543 from hlohaus/sun
Browse files Browse the repository at this point in the history
Fix invalid escape in requests module
  • Loading branch information
hlohaus authored Jan 7, 2025
2 parents b35240c + f05d069 commit d4fde1d
Show file tree
Hide file tree
Showing 26 changed files with 328 additions and 230 deletions.
41 changes: 16 additions & 25 deletions docs/requests.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ For scenarios where you want to receive partial responses or stream data as it's
```python
import requests
import json
from queue import Queue

def fetch_response(url, model, messages):
"""
Expand All @@ -87,7 +86,7 @@ def fetch_response(url, model, messages):
Returns:
requests.Response: The streamed response object.
"""
payload = {"model": model, "messages": messages}
payload = {"model": model, "messages": messages, "stream": True}
headers = {
"Content-Type": "application/json",
"Accept": "text/event-stream",
Expand All @@ -99,7 +98,7 @@ def fetch_response(url, model, messages):
)
return response

def process_stream(response, output_queue):
def process_stream(response):
"""
Processes the streamed response and extracts messages.
Expand All @@ -111,37 +110,31 @@ def process_stream(response, output_queue):
if line:
line = line.decode("utf-8")
if line == "data: [DONE]":
print("\n\nConversation completed.")
break
if line.startswith("data: "):
try:
data = json.loads(line[6:])
message = data.get("message", "")
message = data.get("choices", [{}])[0].get("delta", {}).get("content")
if message:
output_queue.put(message)
except json.JSONDecodeError:
print(message, end="", flush=True)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
continue

# Define the API endpoint
chat_url = "http://localhost/v1/chat/completions"
chat_url = "http://localhost:8080/v1/chat/completions"

# Define the payload
model = "gpt-4o"
messages = [{"role": "system", "content": "Hello, how are you?"}]

# Initialize the queue to store output messages
output_queue = Queue()
model = ""
messages = [{"role": "user", "content": "Hello, how are you?"}]

try:
# Fetch the streamed response
response = fetch_response(chat_url, model, messages)

# Process the streamed response
process_stream(response, output_queue)

# Retrieve messages from the queue
while not output_queue.empty():
msg = output_queue.get()
print(msg)
process_stream(response)

except Exception as e:
print(f"An error occurred: {e}")
Expand All @@ -150,23 +143,21 @@ except Exception as e:
**Explanation:**
- **`fetch_response` Function:**
- Sends a POST request to the streaming chat completions endpoint with the specified model and messages.
- Sets the `Accept` header to `text/event-stream` to enable streaming.
- Sets `stream` parameter to `true` to enable streaming.
- Raises an exception if the request fails.

- **`process_stream` Function:**
- Iterates over each line in the streamed response.
- Decodes the line and checks for the termination signal `"data: [DONE]"`.
- Parses lines that start with `"data: "` to extract the message content.
- Enqueues the extracted messages into `output_queue` for further processing.

- **Main Execution:**
- Defines the API endpoint, model, and messages.
- Initializes a `Queue` to store incoming messages.
- Fetches and processes the streamed response.
- Retrieves and prints messages from the queue.
- Retrieves and prints messages.

**Usage Tips:**
- Ensure your local server supports streaming and the `Accept` header appropriately.
- Ensure your local server supports streaming.
- Adjust the `chat_url` if your local server runs on a different port or path.
- Use threading or asynchronous programming for handling streams in real-time applications.

Expand Down Expand Up @@ -286,7 +277,7 @@ async def fetch_response_async(url, model, messages, output_queue):
messages (list): A list of message dictionaries.
output_queue (Queue): A queue to store the extracted messages.
"""
payload = {"model": model, "messages": messages}
payload = {"model": model, "messages": messages, "stream": True}
headers = {
"Content-Type": "application/json",
"Accept": "text/event-stream",
Expand All @@ -305,7 +296,7 @@ async def fetch_response_async(url, model, messages, output_queue):
if decoded_line.startswith("data: "):
try:
data = json.loads(decoded_line[6:])
message = data.get("message", "")
message = data.get("choices", [{}])[0].get("delta", {}).get("content")
if message:
output_queue.put(message)
except json.JSONDecodeError:
Expand Down
2 changes: 1 addition & 1 deletion g4f/Provider/DDG.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ async def create_async_generator(
"Content-Type": "application/json",
},
cookies: dict = None,
max_retries: int = 3,
max_retries: int = 0,
**kwargs
) -> AsyncResult:
if cookies is None and conversation is not None:
Expand Down
1 change: 0 additions & 1 deletion g4f/Provider/GizAI.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import format_prompt


class GizAI(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://app.giz.ai/assistant"
api_endpoint = "https://app.giz.ai/api/data/users/inferenceServer.infer"
Expand Down
4 changes: 3 additions & 1 deletion g4f/Provider/Mhystical.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class Mhystical(OpenaiAPI):
label = "Mhystical"
url = "https://mhystical.cc"
api_endpoint = "https://api.mhystical.cc/v1/completions"
login_url = "https://mhystical.cc/dashboard"
working = True
needs_auth = False
supports_stream = False # Set to False, as streaming is not specified in ChatifyAI
Expand All @@ -37,11 +38,12 @@ def create_async_generator(
model: str,
messages: Messages,
stream: bool = False,
api_key: str = "mhystical",
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
headers = {
"x-api-key": "mhystical",
"x-api-key": api_key,
"Content-Type": "application/json",
"accept": "*/*",
"cache-control": "no-cache",
Expand Down
23 changes: 11 additions & 12 deletions g4f/Provider/PollinationsAI.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
from typing import Optional
from aiohttp import ClientSession

from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..requests.raise_for_status import raise_for_status
from ..typing import AsyncResult, Messages
from ..image import ImageResponse
from .needs_auth.OpenaiAPI import OpenaiAPI

class PollinationsAI(OpenaiAPI):
class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin):
label = "Pollinations AI"
url = "https://pollinations.ai"

Expand All @@ -21,21 +21,21 @@ class PollinationsAI(OpenaiAPI):
supports_stream = True
supports_system_message = True
supports_message_history = True

# API endpoints base
api_base = "https://text.pollinations.ai/openai"

# API endpoints
text_api_endpoint = "https://text.pollinations.ai"
image_api_endpoint = "https://image.pollinations.ai"

# Models configuration
default_model = "openai"
default_image_model = "flux"

image_models = []
models = []

additional_models_image = ["midjourney", "dall-e-3"]
additional_models_text = ["sur", "sur-mistral", "claude"]
model_aliases = {
Expand Down Expand Up @@ -100,7 +100,7 @@ async def create_async_generator(
**kwargs
) -> AsyncResult:
model = cls.get_model(model)

# Check if models
# Image generation
if model in cls.image_models:
Expand Down Expand Up @@ -151,7 +151,6 @@ async def _generate_image(
if seed is None:
seed = random.randint(0, 10000)


headers = {
'Accept': '*/*',
'Accept-Language': 'en-US,en;q=0.9',
Expand All @@ -177,7 +176,7 @@ async def _generate_image(

async with session.head(url, proxy=proxy) as response:
if response.status == 200:
image_response = ImageResponse(images=url, alt=messages[-1]["content"])
image_response = ImageResponse(images=url, alt=messages[-1]["content"] if prompt is None else prompt)
yield image_response

@classmethod
Expand All @@ -195,7 +194,7 @@ async def _generate_text(
) -> AsyncResult:
if api_key is None:
api_key = "dummy" # Default value if api_key is not provided

headers = {
"accept": "*/*",
"accept-language": "en-US,en;q=0.9",
Expand All @@ -215,7 +214,7 @@ async def _generate_text(
"jsonMode": False,
"stream": stream
}

async with session.post(cls.text_api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content:
Expand Down
1 change: 1 addition & 0 deletions g4f/Provider/needs_auth/DeepInfra.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
class DeepInfra(OpenaiAPI):
label = "DeepInfra"
url = "https://deepinfra.com"
login_url = "https://deepinfra.com/dash/api_keys"
working = True
api_base = "https://api.deepinfra.com/v1/openai",
needs_auth = True
Expand Down
14 changes: 14 additions & 0 deletions g4f/Provider/needs_auth/DeepSeek.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from __future__ import annotations

from .OpenaiAPI import OpenaiAPI

class DeepSeek(OpenaiAPI):
label = "DeepSeek"
url = "https://platform.deepseek.com"
login_url = "https://platform.deepseek.com/api_keys"
working = True
api_base = "https://api.deepseek.com"
needs_auth = True
supports_stream = True
supports_message_history = True
default_model = "deepseek-chat"
1 change: 1 addition & 0 deletions g4f/Provider/needs_auth/GlhfChat.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
class GlhfChat(OpenaiAPI):
label = "GlhfChat"
url = "https://glhf.chat"
login_url = "https://glhf.chat/users/settings/api"
api_base = "https://glhf.chat/api/openai/v1"
working = True
model_aliases = {
Expand Down
2 changes: 1 addition & 1 deletion g4f/Provider/needs_auth/OpenaiAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from ...typing import Union, Optional, AsyncResult, Messages, ImagesType
from ...requests import StreamSession, raise_for_status
from ...providers.response import FinishReason, ToolCalls, Usage
from ...errors import MissingAuthError, ResponseError
from ...errors import MissingAuthError
from ...image import to_data_uri
from ... import debug

Expand Down
Loading

0 comments on commit d4fde1d

Please sign in to comment.