Skip to content

Commit

Permalink
chore(gemini_engine): remove deprecated models and add gemini-2.0-fla…
Browse files Browse the repository at this point in the history
…sh-exp
  • Loading branch information
AAClause committed Feb 1, 2025
1 parent 33e95e1 commit 20277c8
Showing 1 changed file with 12 additions and 31 deletions.
43 changes: 12 additions & 31 deletions basilisk/provider_engine/gemini_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,19 @@ def models(self) -> list[ProviderAIModel]:
"""
Get models
"""
# See <https://ai.google.dev/gemini-api/docs/models/gemini?hl=en>
return [
ProviderAIModel(
id="gemini-2.0-flash-exp",
# Translators: This is a model description
description=_(
"Next generation features, speed, and multimodal generation for a diverse variety of tasks"
),
context_window=1048576,
max_output_tokens=8192,
vision=True,
default_temperature=1.0,
),
ProviderAIModel(
id="gemini-1.5-flash-latest",
# Translators: This is a model description
Expand Down Expand Up @@ -90,37 +102,6 @@ def models(self) -> list[ProviderAIModel]:
vision=True,
default_temperature=1.0,
),
ProviderAIModel(
id="gemini-1.0-pro-latest",
# Translators: This is a model description
description=_(
"The best model for scaling across a wide range of tasks. This is the latest model."
),
context_window=30720,
max_output_tokens=2048,
default_temperature=0.9,
),
ProviderAIModel(
id="gemini-1.0-pro",
# Translators: This is a model description
description=_(
"The best model for scaling across a wide range of tasks"
),
context_window=30720,
max_output_tokens=2048,
default_temperature=0.9,
),
ProviderAIModel(
id="gemini-1.0-pro-vision-latest",
# Translators: This is a model description
description=_(
'The best image understanding model to handle a broad range of applications'
),
context_window=12288,
max_output_tokens=4096,
vision=True,
default_temperature=0.4,
),
]

def convert_role(self, role: MessageRoleEnum) -> str:
Expand Down

0 comments on commit 20277c8

Please sign in to comment.