Skip to content

Commit

Permalink
Merge branch 'custom_api' of https://github.com/gradio-app/gradio int…
Browse files Browse the repository at this point in the history
…o custom_api
  • Loading branch information
Ali Abid committed Jan 10, 2025
2 parents 4c33559 + 01983f3 commit bce06dd
Show file tree
Hide file tree
Showing 7 changed files with 96 additions and 34 deletions.
5 changes: 5 additions & 0 deletions .changeset/some-cases-notice.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"gradio": patch
---

fix:Update guide for `gr.load_chat` and allow `**kwargs`
5 changes: 5 additions & 0 deletions .changeset/tall-geese-hammer.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"gradio": patch
---

fix:Support `gr.load()`-ing Gradio apps with `Blocks.load()` events
46 changes: 35 additions & 11 deletions gradio/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
utils,
wasm_utils,
)
from gradio.blocks_events import BlocksEvents, BlocksMeta
from gradio.blocks_events import BLOCKS_EVENTS, BlocksEvents, BlocksMeta
from gradio.context import (
Context,
LocalContext,
Expand Down Expand Up @@ -1316,21 +1316,14 @@ def iterate_over_children(children_list):
)
dependency["no_target"] = True
else:
targets = [
getattr(
original_mapping[
target if isinstance(target, int) else target[0]
],
trigger if isinstance(target, int) else target[1],
)
for target in _targets
]
targets = [
EventListenerMethod(
t.__self__ if t.has_trigger else None,
t.event_name, # type: ignore
)
for t in targets
for t in Blocks.get_event_targets(
original_mapping, _targets, trigger
)
]
dependency = root_block.default_config.set_event_trigger(
targets=targets, fn=fn, **dependency
Expand Down Expand Up @@ -1434,6 +1427,11 @@ def render(self):
]
for dependency in self.fns.values():
dependency._id += dependency_offset
# Any event -- e.g. Blocks.load() -- that is triggered by this Blocks
# should now be triggered by the root Blocks instead.
for target in dependency.targets:
if target[0] == self._id:
target = (Context.root_block._id, target[1])
api_name = dependency.api_name
if isinstance(api_name, str):
api_name_ = utils.append_unique_suffix(
Expand Down Expand Up @@ -3007,3 +3005,29 @@ def get_api_info(self, all_endpoints: bool = False) -> dict[str, Any] | None:
api_info["named_endpoints"][f"/{fn.api_name}"] = dependency_info

return api_info

@staticmethod
def get_event_targets(
original_mapping: dict[int, Block], _targets: list, trigger: str
) -> list:
target_events = []
for target in _targets:
# If target is just an integer (old format), use it directly with the trigger
# Otherwise target is a tuple and we use its components
target_id = target if isinstance(target, int) else target[0]
event_name = trigger if isinstance(target, int) else target[1]
block = original_mapping.get(target_id)
# Blocks events are a special case because they are not stored in the blocks list in the config
if block is None:
if event_name in [
event.event_name if isinstance(event, EventListener) else event
for event in BLOCKS_EVENTS
]:
block = Context.root_block
else:
raise ValueError(
f"Cannot find Block with id: {target_id} but is present as a target in the config"
)
event = getattr(block, event_name)
target_events.append(event)
return target_events
14 changes: 9 additions & 5 deletions gradio/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -592,15 +592,17 @@ def load_chat(
*,
system_message: str | None = None,
streaming: bool = True,
**kwargs,
) -> ChatInterface:
"""
Load a chat interface from an OpenAI API chat compatible endpoint.
Parameters:
base_url: The base URL of the endpoint.
model: The model name.
token: The API token.
system_message: The system message for the conversation, if any.
base_url: The base URL of the endpoint, e.g. "http://localhost:11434/v1/"
model: The name of the model you are loading, e.g. "llama3.2"
token: The API token or a placeholder string if you are using a local model, e.g. "ollama"
system_message: The system message to use for the conversation, if any.
streaming: Whether the response should be streamed.
kwargs: Additional keyword arguments to pass into ChatInterface for customization.
"""
try:
from openai import OpenAI
Expand Down Expand Up @@ -645,4 +647,6 @@ def open_api_stream(
response += chunk.choices[0].delta.content
yield response

return ChatInterface(open_api_stream if streaming else open_api, type="messages")
return ChatInterface(
open_api_stream if streaming else open_api, type="messages", **kwargs
)
41 changes: 26 additions & 15 deletions gradio/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ def safe_deepcopy(obj: Any) -> Any:


def assert_configs_are_equivalent_besides_ids(
config1: dict, config2: dict, root_keys: tuple = ("mode",)
config1: BlocksConfigDict, config2: BlocksConfigDict, root_keys: tuple = ("mode",)
):
"""Allows you to test if two different Blocks configs produce the same demo.
Expand Down Expand Up @@ -565,20 +565,31 @@ def same_children_recursive(children1, chidren2):
if "children" in child1 or "children" in child2:
same_children_recursive(child1["children"], child2["children"])

children1 = config1["layout"]["children"]
children2 = config2["layout"]["children"]
same_children_recursive(children1, children2)

for d1, d2 in zip(config1["dependencies"], config2["dependencies"], strict=False):
for t1, t2 in zip(d1.pop("targets"), d2.pop("targets"), strict=False):
assert_same_components(t1[0], t2[0])
for i1, i2 in zip(d1.pop("inputs"), d2.pop("inputs"), strict=False):
assert_same_components(i1, i2)
for o1, o2 in zip(d1.pop("outputs"), d2.pop("outputs"), strict=False):
assert_same_components(o1, o2)

if d1 != d2:
raise ValueError(f"{d1} does not match {d2}")
if "layout" in config1:
if "layout" not in config2:
raise ValueError(
"The first config has a layout key, but the second does not"
)
children1 = config1["layout"]["children"]
children2 = config2["layout"]["children"]
same_children_recursive(children1, children2)

if "dependencies" in config1:
if "dependencies" not in config2:
raise ValueError(
"The first config has a dependencies key, but the second does not"
)
for d1, d2 in zip(
config1["dependencies"], config2["dependencies"], strict=False
):
for t1, t2 in zip(d1.pop("targets"), d2.pop("targets"), strict=False):
assert_same_components(t1[0], t2[0])
for i1, i2 in zip(d1.pop("inputs"), d2.pop("inputs"), strict=False):
assert_same_components(i1, i2)
for o1, o2 in zip(d1.pop("outputs"), d2.pop("outputs"), strict=False):
assert_same_components(o1, o2)
if d1 != d2:
raise ValueError(f"{d1} does not match {d2}")

return True

Expand Down
4 changes: 2 additions & 2 deletions guides/05_chatbots/01_creating-a-chatbot-fast.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ If you have a chat server serving an OpenAI-API compatible endpoint (e.g. Ollama
```python
import gradio as gr

gr.load_chat("http://localhost:11434/v1/", model="llama3.2", token=None).launch()
gr.load_chat("http://localhost:11434/v1/", model="llama3.2", token="ollama").launch()
```

If not, don't worry, keep reading to see how to create an application around any chat model!
If you have your own model, keep reading to see how to create an application around any chat model in Python!

## Defining a chat function

Expand Down
15 changes: 14 additions & 1 deletion test/test_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,20 @@ def update(name):
for component in config1["components"]:
component["props"]["proxy_url"] = f"{fake_url}/"
config2 = demo2.get_config_file()
assert assert_configs_are_equivalent_besides_ids(config1, config2) # type: ignore
assert assert_configs_are_equivalent_besides_ids(config1, config2)

def test_load_from_config_with_blocks_events(self):
fake_url = "https://fake.hf.space"

def fn():
return "Hello"

with gr.Blocks() as demo:
t = gr.Textbox()
demo.load(fn, None, t)

config = demo.get_config_file()
gr.Blocks.from_config(config, [fn], fake_url) # Should not raise

def test_partial_fn_in_config(self):
def greet(name, formatter):
Expand Down

0 comments on commit bce06dd

Please sign in to comment.