diff --git a/g4f/Provider/needs_auth/HuggingChat.py b/g4f/Provider/needs_auth/HuggingChat.py index dfdd957902f..dec74fe61a0 100644 --- a/g4f/Provider/needs_auth/HuggingChat.py +++ b/g4f/Provider/needs_auth/HuggingChat.py @@ -1,16 +1,17 @@ from __future__ import annotations import json -import requests try: - from curl_cffi.requests import Session + from curl_cffi.requests import Session, CurlMime has_curl_cffi = True except ImportError: has_curl_cffi = False -from ...typing import CreateResult, Messages + +from ...typing import CreateResult, Messages, Cookies from ...errors import MissingRequirementsError from ...requests.raise_for_status import raise_for_status +from ...cookies import get_cookies from ..base_provider import ProviderModelMixin, AbstractProvider from ..helper import format_prompt @@ -53,127 +54,130 @@ def create_completion( model: str, messages: Messages, stream: bool, + web_search: bool = False, + cookies: Cookies = None, **kwargs ) -> CreateResult: if not has_curl_cffi: raise MissingRequirementsError('Install "curl_cffi" package | pip install -U curl_cffi') model = cls.get_model(model) + if cookies is None: + cookies = get_cookies("huggingface.co") + + session = Session(cookies=cookies) + session.headers = { + 'accept': '*/*', + 'accept-language': 'en', + 'cache-control': 'no-cache', + 'origin': 'https://huggingface.co', + 'pragma': 'no-cache', + 'priority': 'u=1, i', + 'referer': 'https://huggingface.co/chat/', + 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-origin', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36', + } + json_data = { + 'model': model, + } + response = session.post('https://huggingface.co/chat/conversation', json=json_data) + raise_for_status(response) + + conversationId = response.json().get('conversationId') - if model in cls.models: - session = Session() - session.headers = { - 'accept': '*/*', - 'accept-language': 'en', - 'cache-control': 'no-cache', - 'origin': 'https://huggingface.co', - 'pragma': 'no-cache', - 'priority': 'u=1, i', - 'referer': 'https://huggingface.co/chat/', - 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36', - } - json_data = { - 'model': model, - } - response = session.post('https://huggingface.co/chat/conversation', json=json_data) - raise_for_status(response) - - conversationId = response.json().get('conversationId') - - # Get the data response and parse it properly - response = session.get(f'https://huggingface.co/chat/conversation/{conversationId}/__data.json?x-sveltekit-invalidated=11') - raise_for_status(response) - - # Split the response content by newlines and parse each line as JSON + # Get the data response and parse it properly + response = session.get(f'https://huggingface.co/chat/conversation/{conversationId}/__data.json?x-sveltekit-invalidated=11') + raise_for_status(response) + + # Split the response content by newlines and parse each line as JSON + try: + json_data = None + for line in response.text.split('\n'): + if line.strip(): + try: + parsed = json.loads(line) + if isinstance(parsed, dict) and "nodes" in parsed: + json_data = parsed + break + except json.JSONDecodeError: + continue + + if not json_data: + raise RuntimeError("Failed to parse response data") + + data: list = json_data["nodes"][1]["data"] + keys: list[int] = data[data[0]["messages"]] + message_keys: dict = data[keys[0]] + messageId: str = data[message_keys["id"]] + + except (KeyError, IndexError, TypeError) as e: + raise RuntimeError(f"Failed to extract message ID: {str(e)}") + + settings = { + "inputs": format_prompt(messages), + "id": messageId, + "is_retry": False, + "is_continue": False, + "web_search": web_search, + "tools": [] + } + + headers = { + 'accept': '*/*', + 'accept-language': 'en', + 'cache-control': 'no-cache', + 'origin': 'https://huggingface.co', + 'pragma': 'no-cache', + 'priority': 'u=1, i', + 'referer': f'https://huggingface.co/chat/conversation/{conversationId}', + 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-origin', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36', + } + + data = CurlMime() + data.addpart('data', data=json.dumps(settings, separators=(',', ':'))) + + response = session.post( + f'https://huggingface.co/chat/conversation/{conversationId}', + cookies=session.cookies, + headers=headers, + multipart=data, + stream=True + ) + raise_for_status(response) + + full_response = "" + for line in response.iter_lines(): + if not line: + continue try: - json_data = None - for line in response.text.split('\n'): - if line.strip(): - try: - parsed = json.loads(line) - if isinstance(parsed, dict) and "nodes" in parsed: - json_data = parsed - break - except json.JSONDecodeError: - continue - - if not json_data: - raise RuntimeError("Failed to parse response data") - - data: list = json_data["nodes"][1]["data"] - keys: list[int] = data[data[0]["messages"]] - message_keys: dict = data[keys[0]] - messageId: str = data[message_keys["id"]] - - except (KeyError, IndexError, TypeError) as e: - raise RuntimeError(f"Failed to extract message ID: {str(e)}") - - settings = { - "inputs": format_prompt(messages), - "id": messageId, - "is_retry": False, - "is_continue": False, - "web_search": False, - "tools": [] - } - - headers = { - 'accept': '*/*', - 'accept-language': 'en', - 'cache-control': 'no-cache', - 'origin': 'https://huggingface.co', - 'pragma': 'no-cache', - 'priority': 'u=1, i', - 'referer': f'https://huggingface.co/chat/conversation/{conversationId}', - 'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"', - 'sec-ch-ua-mobile': '?0', - 'sec-ch-ua-platform': '"macOS"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36', - } - - files = { - 'data': (None, json.dumps(settings, separators=(',', ':'))), - } - - response = requests.post( - f'https://huggingface.co/chat/conversation/{conversationId}', - cookies=session.cookies, - headers=headers, - files=files, - ) - raise_for_status(response) - - full_response = "" - for line in response.iter_lines(): - if not line: - continue - try: - line = json.loads(line) - except json.JSONDecodeError as e: - print(f"Failed to decode JSON: {line}, error: {e}") - continue - - if "type" not in line: - raise RuntimeError(f"Response: {line}") - - elif line["type"] == "stream": - token = line["token"].replace('\u0000', '') - full_response += token - if stream: - yield token - - elif line["type"] == "finalAnswer": - break + line = json.loads(line) + except json.JSONDecodeError as e: + print(f"Failed to decode JSON: {line}, error: {e}") + continue - full_response = full_response.replace('<|im_end|', '').replace('\u0000', '').strip() + if "type" not in line: + raise RuntimeError(f"Response: {line}") + + elif line["type"] == "stream": + token = line["token"].replace('\u0000', '') + full_response += token + if stream: + yield token + + elif line["type"] == "finalAnswer": + break + + full_response = full_response.replace('<|im_end|', '').replace('\u0000', '').strip() - if not stream: - yield full_response + if not stream: + yield full_response \ No newline at end of file diff --git a/g4f/gui/client/static/css/style.css b/g4f/gui/client/static/css/style.css index d5546f480c4..6105223b2f8 100644 --- a/g4f/gui/client/static/css/style.css +++ b/g4f/gui/client/static/css/style.css @@ -92,6 +92,10 @@ body { height: 100vh; } +a:-webkit-any-link { + color: var(--accent); +} + .row { display: flex; gap: 10px; @@ -124,7 +128,7 @@ body { .new_version a { color: var(--colour-4); - text-decoration: underline dotted; + text-decoration: underline; } .conversations { @@ -975,11 +979,6 @@ ul { display: flex; } - -a:-webkit-any-link { - color: var(--accent); -} - .conversation .user-input textarea { font-size: 15px; width: 100%; @@ -1021,7 +1020,6 @@ a:-webkit-any-link { background-image: url('data:image/svg+xml;utf-8,'); background-repeat: no-repeat; background-position: center; - transition: background-color 200ms ease, transform 200ms ease-out } .hljs-copy-button:hover { diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js index d1fd886d1b7..834a5de9b7f 100644 --- a/g4f/gui/client/static/js/chat.v1.js +++ b/g4f/gui/client/static/js/chat.v1.js @@ -1065,6 +1065,7 @@ async function hide_sidebar() { sidebar_button.classList.remove("rotated"); settings.classList.add("hidden"); chat.classList.remove("hidden"); + log_storage.classList.add("hidden"); if (window.location.pathname == "/menu/" || window.location.pathname == "/settings/") { history.back(); } @@ -1182,31 +1183,6 @@ const say_hello = async () => { } } -// Theme storage for recurring viewers -const storeTheme = function (theme) { - appStorage.setItem("theme", theme); -}; - -// set theme when visitor returns -const setTheme = function () { - const activeTheme = appStorage.getItem("theme"); - colorThemes.forEach((themeOption) => { - if (themeOption.id === activeTheme) { - themeOption.checked = true; - } - }); - // fallback for no :has() support - document.documentElement.className = activeTheme; -}; - -colorThemes.forEach((themeOption) => { - themeOption.addEventListener("click", () => { - storeTheme(themeOption.id); - // fallback for no :has() support - document.documentElement.className = themeOption.id; - }); -}); - function count_tokens(model, text) { if (model) { if (window.llamaTokenizer) @@ -1273,7 +1249,6 @@ window.addEventListener('pywebviewready', async function() { }); async function on_load() { - setTheme(); count_input(); if (/\/chat\/.+/.test(window.location.href)) { @@ -1290,8 +1265,7 @@ async function on_api() { if (prompt_lock) return; // If not mobile - if (!window.matchMedia("(pointer:coarse)").matches) - if (evt.keyCode === 13 && !evt.shiftKey) { + if (!window.matchMedia("(pointer:coarse)").matches && evt.keyCode === 13 && !evt.shiftKey) { evt.preventDefault(); console.log("pressed enter"); prompt_lock = true;