From ae09aa6fcf092abe7951f3f6335a15233254cf01 Mon Sep 17 00:00:00 2001 From: BioBootloader Date: Thu, 7 Mar 2024 23:24:18 -0800 Subject: [PATCH 1/3] add SpiceClient --- run.py | 11 ++++-- spice.py | 104 ++++++++++++++++++++++++++++++------------------------- 2 files changed, 64 insertions(+), 51 deletions(-) diff --git a/run.py b/run.py index 272462a..2d5d3b3 100644 --- a/run.py +++ b/run.py @@ -1,14 +1,19 @@ -from spice import call_llm +from spice import SpiceClient + +# model = "gpt-4-0125-preview" +model = "claude-3-opus-20240229" system_message = "You are a helpful assistant." messages = [ {"role": "user", "content": "list 5 random words"}, ] -for t in call_llm(system_message, messages, stream=True): +client = SpiceClient(model=model) + +for t in client.call_llm(system_message, messages, stream=True): print(t, end="") print("\n####################\n") -response = call_llm(system_message, messages, stream=False) +response = client.call_llm(system_message, messages, stream=False) print(response) diff --git a/spice.py b/spice.py index bcb1970..9e6767e 100644 --- a/spice.py +++ b/spice.py @@ -4,58 +4,66 @@ from dotenv import load_dotenv from openai import OpenAI -load_dotenv() -_openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) -_anthropic_client = Anthropic( - api_key=os.environ.get("ANTHROPIC_API_KEY"), -) -_openai_model = "gpt-4-0125-preview" -_anthropic_model = "claude-3-opus-20240229" - _use_anthropic = True -def call_llm(system_message, messages, stream=False): - if _use_anthropic: - chat_completion_or_stream = _anthropic_client.messages.create( - max_tokens=1024, - system=system_message, - messages=messages, - model=_anthropic_model, - temperature=0.3, - stream=stream, - ) - else: - _messages = [ - { - "role": "system", - "content": system_message, - } - ] + messages - chat_completion_or_stream = _openai_client.chat.completions.create( - messages=_messages, - model=_openai_model, - temperature=0.3, - stream=stream, - ) - - if stream: - return _stream_generator(chat_completion_or_stream) - else: - if _use_anthropic: - response = chat_completion_or_stream.content[0].text +class SpiceClient: + def __init__(self, model): + if model == "gpt-4-0125-preview": + self._provider = "openai" + elif model == "claude-3-opus-20240229": + self._provider = "anthropic" else: - response = chat_completion_or_stream.choices[0].message.content - return response + raise ValueError(f"Unknown model {model}") + + self.model = model + load_dotenv() + if self._provider == "openai": + self._client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) + elif self._provider == "anthropic": + self._client = Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY")) -def _stream_generator(stream): - for chunk in stream: - if _use_anthropic: - content = "" - if chunk.type == "content_block_delta": - content = chunk.delta.text + def call_llm(self, system_message, messages, stream=False): + if self._provider == "anthropic": + chat_completion_or_stream = self._client.messages.create( + max_tokens=1024, + system=system_message, + messages=messages, + model=self.model, + temperature=0.3, + stream=stream, + ) else: - content = chunk.choices[0].delta.content - if content is not None: - yield content + _messages = [ + { + "role": "system", + "content": system_message, + } + ] + messages + chat_completion_or_stream = self._client.chat.completions.create( + messages=_messages, + model=self.model, + temperature=0.3, + stream=stream, + ) + + if stream: + return self._stream_generator(chat_completion_or_stream) + else: + if self._provider == "anthropic": + response = chat_completion_or_stream.content[0].text + else: + response = chat_completion_or_stream.choices[0].message.content + return response + + def _stream_generator(self, stream): + for chunk in stream: + if self._provider == "anthropic": + content = "" + if chunk.type == "content_block_delta": + content = chunk.delta.text + else: + content = chunk.choices[0].delta.content + if content is not None: + yield content From a53ceff3afd85239a6f7a07b31ff78346806eaf8 Mon Sep 17 00:00:00 2001 From: biobootloader <128252497+biobootloader@users.noreply.github.com> Date: Thu, 7 Mar 2024 23:27:31 -0800 Subject: [PATCH 2/3] Update spice.py Co-authored-by: abanteai-butler[bot] <160964065+abanteai-butler[bot]@users.noreply.github.com> --- spice.py | 1 - 1 file changed, 1 deletion(-) diff --git a/spice.py b/spice.py index 9e6767e..cf04c31 100644 --- a/spice.py +++ b/spice.py @@ -4,7 +4,6 @@ from dotenv import load_dotenv from openai import OpenAI -_use_anthropic = True class SpiceClient: From 4ae3272c93b5361cb2e3de927dc0d0c03f67b969 Mon Sep 17 00:00:00 2001 From: BioBootloader Date: Thu, 7 Mar 2024 23:29:45 -0800 Subject: [PATCH 3/3] fixes --- spice.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/spice.py b/spice.py index cf04c31..af7e755 100644 --- a/spice.py +++ b/spice.py @@ -4,6 +4,7 @@ from dotenv import load_dotenv from openai import OpenAI +load_dotenv() class SpiceClient: @@ -17,7 +18,6 @@ def __init__(self, model): self.model = model - load_dotenv() if self._provider == "openai": self._client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) elif self._provider == "anthropic": @@ -64,5 +64,6 @@ def _stream_generator(self, stream): content = chunk.delta.text else: content = chunk.choices[0].delta.content - if content is not None: - yield content + if content is None: + content = "" + yield content