Skip to content

Commit

Permalink
fix: Update deprecated DuckDuckGo search backend from 'api' to 'auto' (
Browse files Browse the repository at this point in the history
…#2516)

* fix: Update deprecated DuckDuckGo search backend from 'api' to 'auto'

Fixes UserWarning: 'api' backend is deprecated, using backend='auto'
- Updated default backend parameter from 'api' to 'auto' in search function
- Aligns with latest duckduckgo-search library recommendations

* Update g4f/Provider/Blackbox.py

* Fix response_format=b64_json (g4f/client/__init__.py)

* Update g4f/Provider/Blackbox2.py g4f/Provider/BlackboxCreateAgent.py

---------

Co-authored-by: kqlio67 <>
  • Loading branch information
kqlio67 authored Dec 30, 2024
1 parent 9a5c337 commit 90360cc
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 217 deletions.
26 changes: 18 additions & 8 deletions g4f/Provider/Blackbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import aiohttp
import asyncio
from pathlib import Path
import concurrent.futures

from ..typing import AsyncResult, Messages, ImagesType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
Expand Down Expand Up @@ -220,15 +221,24 @@ async def create_async_generator(
use_internal_search = web_search and model in cls.web_search_models

if web_search and not use_internal_search:

def run_search():
return get_search_message(messages[-1]["content"])
try:
# Create a timeout for web search
async def run_search():
with concurrent.futures.ThreadPoolExecutor() as executor:
return await asyncio.get_event_loop().run_in_executor(
executor,
lambda: get_search_message(messages[-1]["content"])
)

# Set a timeout of 10 seconds for web search
search_result = await asyncio.wait_for(run_search(), timeout=10.0)
messages[-1]["content"] = search_result

import concurrent.futures
with concurrent.futures.ThreadPoolExecutor() as executor:
messages[-1]["content"] = await asyncio.get_event_loop().run_in_executor(
executor, run_search
)
except asyncio.TimeoutError:
debug.log("Web search timed out, proceeding with original message")
except Exception as e:
debug.log(f"Web search failed: {str(e)}, proceeding with original message")

web_search = False

async def process_request():
Expand Down
197 changes: 0 additions & 197 deletions g4f/Provider/Blackbox2.py

This file was deleted.

2 changes: 1 addition & 1 deletion g4f/Provider/BlackboxCreateAgent.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def _get_cache_file(cls) -> Path:
"""Returns the path to the cache file."""
dir = Path(get_cookies_dir())
dir.mkdir(exist_ok=True)
return dir / 'blackbox2.json'
return dir / 'blackbox_create_agent.json'

@classmethod
def _load_cached_value(cls) -> str | None:
Expand Down
21 changes: 13 additions & 8 deletions g4f/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import random
import string
import asyncio
import aiohttp
import base64
import json
from typing import Union, AsyncIterator, Iterator, Coroutine, Optional
Expand Down Expand Up @@ -486,17 +487,21 @@ async def _process_image_response(
if response_format == "url":
# Return original URLs without saving locally
images = [Image.model_construct(url=image, revised_prompt=response.alt) for image in response.get_list()]
elif response_format == "b64_json":
# Convert URLs directly to base64 without saving
async def get_b64_from_url(url: str) -> Image:
async with aiohttp.ClientSession() as session:
async with session.get(url, proxy=proxy) as resp:
if resp.status == 200:
image_data = await resp.read()
b64_data = base64.b64encode(image_data).decode()
return Image.model_construct(b64_json=b64_data, revised_prompt=response.alt)
images = await asyncio.gather(*[get_b64_from_url(image) for image in response.get_list()])
else:
# Save locally for None (default) case
images = await copy_images(response.get_list(), response.get("cookies"), proxy)
if response_format == "b64_json":
async def process_image_item(image_file: str) -> Image:
with open(os.path.join(images_dir, os.path.basename(image_file)), "rb") as file:
image_data = base64.b64encode(file.read()).decode()
return Image.model_construct(b64_json=image_data, revised_prompt=response.alt)
images = await asyncio.gather(*[process_image_item(image) for image in images])
else:
images = [Image.model_construct(url=f"/images/{os.path.basename(image)}", revised_prompt=response.alt) for image in images]
images = [Image.model_construct(url=f"/images/{os.path.basename(image)}", revised_prompt=response.alt) for image in images]

return ImagesResponse.model_construct(
created=int(time.time()),
data=images,
Expand Down
6 changes: 3 additions & 3 deletions g4f/web_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ async def fetch_and_scrape(session: ClientSession, url: str, max_words: int = No
except ClientError:
return

async def search(query: str, max_results: int = 5, max_words: int = 2500, backend: str = "api", add_text: bool = True, timeout: int = 5, region: str = "wt-wt") -> SearchResults:
async def search(query: str, max_results: int = 5, max_words: int = 2500, backend: str = "auto", add_text: bool = True, timeout: int = 5, region: str = "wt-wt") -> SearchResults:
if not has_requirements:
raise MissingRequirementsError('Install "duckduckgo-search" and "beautifulsoup4" package | pip install -U g4f[search]')
with DDGS() as ddgs:
Expand All @@ -113,7 +113,7 @@ async def search(query: str, max_results: int = 5, max_words: int = 2500, backen
safesearch="moderate",
timelimit="y",
max_results=max_results,
backend=backend,
backend=backend, # Changed from 'api' to 'auto'
):
results.append(SearchResultEntry(
result["title"],
Expand Down Expand Up @@ -169,4 +169,4 @@ def get_search_message(prompt: str, raise_search_exceptions=False, **kwargs) ->
if raise_search_exceptions:
raise e
debug.log(f"Couldn't do web search: {e.__class__.__name__}: {e}")
return prompt
return prompt

0 comments on commit 90360cc

Please sign in to comment.