Skip to content

Commit

Permalink
Support for new gpt4o and o1 models
Browse files Browse the repository at this point in the history
  • Loading branch information
alexeichhorn committed Dec 21, 2024
1 parent f9c69d7 commit 4a5ec8c
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 4 deletions.
14 changes: 12 additions & 2 deletions tests/test_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def test_token_counter(self):
]

# check if test covers all models (increase if new models are added)
assert len(OpenAIChatModel.__args__) == 24 #  type: ignore
assert len(OpenAIChatModel.__args__) == 29 #  type: ignore

client = AsyncTypeOpenAI(api_key="mock")

Expand All @@ -52,12 +52,17 @@ def test_token_counter(self):
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o-2024-05-13") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o-2024-08-06") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o-2024-11-20") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o-mini") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="gpt-4o-mini-2024-07-18") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="o1") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="o1-2024-12-17") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="o1-mini") == 26
assert client.chat.completions.num_tokens_from_messages(test_messages, model="o1-mini-2024-09-12") == 26

def test_max_token_counter(self):
# check if test covers all models (increase if new models are added)
assert len(OpenAIChatModel.__args__) == 24 #  type: ignore
assert len(OpenAIChatModel.__args__) == 29 #  type: ignore

client = AsyncTypeOpenAI(api_key="mock")

Expand All @@ -83,8 +88,13 @@ def test_max_token_counter(self):
assert client.chat.completions.max_tokens_of_model("gpt-4o") == 128_000
assert client.chat.completions.max_tokens_of_model("gpt-4o-2024-05-13") == 128_000
assert client.chat.completions.max_tokens_of_model("gpt-4o-2024-08-06") == 128_000
assert client.chat.completions.max_tokens_of_model("gpt-4o-2024-11-20") == 128_000
assert client.chat.completions.max_tokens_of_model("gpt-4o-mini") == 128_000
assert client.chat.completions.max_tokens_of_model("gpt-4o-mini-2024-07-18") == 128_000
assert client.chat.completions.max_tokens_of_model("o1") == 128_000
assert client.chat.completions.max_tokens_of_model("o1-2024-12-17") == 128_000
assert client.chat.completions.max_tokens_of_model("o1-mini") == 128_000
assert client.chat.completions.max_tokens_of_model("o1-mini-2024-09-12") == 128_000

# -

Expand Down
14 changes: 12 additions & 2 deletions typegpt/openai/base_chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,13 @@ def max_tokens_of_model(model: OpenAIChatModel) -> int:
| "gpt-4o"
| "gpt-4o-2024-05-13"
| "gpt-4o-2024-08-06"
| "gpt-4o-2024-11-20"
| "gpt-4o-mini"
| "gpt-4o-mini-2024-07-18"
| "o1"
| "o1-2024-12-17"
| "o1-mini"
| "o1-mini-2024-09-12"
):
return 128_000

Expand All @@ -43,8 +48,8 @@ def num_tokens_from_messages(cls, messages: list[EncodedMessage], model: OpenAIC
try:
encoding = tiktoken.encoding_for_model(model)
except KeyError:
print("Warning: model not found. Using cl100k_base encoding.")
encoding = tiktoken.get_encoding("cl100k_base")
print("Warning: model not found. Using o200k_base encoding.")
encoding = tiktoken.get_encoding("o200k_base")
if model == "gpt-3.5-turbo":
return cls.num_tokens_from_messages(messages, model="gpt-3.5-turbo-0125")
elif model == "gpt-3.5-turbo-16k":
Expand Down Expand Up @@ -74,8 +79,13 @@ def num_tokens_from_messages(cls, messages: list[EncodedMessage], model: OpenAIC
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
"gpt-4o-2024-11-20",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"o1",
"o1-2024-12-17",
"o1-mini",
"o1-mini-2024-09-12",
):
tokens_per_message = 3
tokens_per_name = 1
Expand Down
5 changes: 5 additions & 0 deletions typegpt/openai/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,13 @@
"gpt-4o", # gpt-4o
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
"gpt-4o-2024-11-20",
"gpt-4o-mini", # gpt-4o mini
"gpt-4o-mini-2024-07-18",
"o1", # o1
"o1-2024-12-17",
"o1-mini", # o1 mini
"o1-mini-2024-09-12",
]


Expand Down

0 comments on commit 4a5ec8c

Please sign in to comment.