Skip to content

Commit

Permalink
Use async SDK by default (AsyncYCloudML)
Browse files Browse the repository at this point in the history
Throught the testing I didn't notice any performance issues. It seems AsyncYCloudML is good to use it as the default option.
  • Loading branch information
black-roland committed Dec 11, 2024
1 parent 40c4685 commit 6c55822
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 18 deletions.
6 changes: 2 additions & 4 deletions custom_components/yandexgpt_conversation/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up YandexGPT from a config entry."""
settings = {**entry.data, **entry.options}

sdk_conf = {"folder_id": settings[CONF_FOLDER_ID], "auth": settings[CONF_API_KEY]}
entry.runtime_data = sdk_factory(
YCloudML(**sdk_conf),
AsyncYCloudML(**sdk_conf),
entry.runtime_data = AsyncYCloudML(
folder_id=settings[CONF_FOLDER_ID], auth=settings[CONF_API_KEY]
)

await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
Expand Down
4 changes: 0 additions & 4 deletions custom_components/yandexgpt_conversation/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
)

from .const import (
CONF_ASYNC_MODE,
CONF_CHAT_MODEL,
CONF_FOLDER_ID,
CONF_MAX_TOKENS,
Expand Down Expand Up @@ -189,9 +188,6 @@ def yandexgpt_config_option_schema(
): SelectSelector(
SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models)
),
vol.Required(
CONF_ASYNC_MODE, default=options.get(CONF_ASYNC_MODE, False)
): bool,
vol.Optional(
CONF_TEMPERATURE,
description={"suggested_value": options.get(CONF_TEMPERATURE)},
Expand Down
1 change: 0 additions & 1 deletion custom_components/yandexgpt_conversation/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
CONF_MAX_TOKENS = "max_tokens"
CONF_TEMPERATURE = "temperature"
CONF_CHAT_MODEL = "chat_model"
CONF_ASYNC_MODE = "async_mode"

RECOMMENDED_CHAT_MODEL = "yandexgpt-lite/latest"
RECOMMENDED_MAX_TOKENS = 1024
Expand Down
12 changes: 3 additions & 9 deletions custom_components/yandexgpt_conversation/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@
from homeassistant.helpers import intent, llm, template
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import ulid
from yandex_cloud_ml_sdk import AsyncYCloudML, YCloudML
from yandex_cloud_ml_sdk import AsyncYCloudML
from yandex_cloud_ml_sdk._models.completions.message import TextMessage

from .const import (
BASE_PROMPT_RU,
CONF_ASYNC_MODE,
CONF_CHAT_MODEL,
CONF_MAX_TOKENS,
CONF_PROMPT,
Expand Down Expand Up @@ -187,8 +186,7 @@ async def async_process(
{"messages": messages},
)

is_async = options.get(CONF_ASYNC_MODE, False)
client: YCloudML | AsyncYCloudML = self.entry.runtime_data(is_async)
client: AsyncYCloudML = self.entry.runtime_data
# TODO: Allow selecting model version
model_name_ver = zip(
["model_name", "model_version"],
Expand All @@ -201,11 +199,7 @@ async def async_process(

try:
model = client.models.completions(**dict(model_name_ver))
model.configure(**model_conf)
if options.get(CONF_ASYNC_MODE, False):
result = await model.run(messages)
else:
result = await self.hass.async_add_executor_job(model.run, messages)
result = await model.configure(**model_conf).run(messages)
except Exception as err:
LOGGER.exception(err)

Expand Down

0 comments on commit 6c55822

Please sign in to comment.