Skip to content

Commit

Permalink
Merge pull request #563 from boeckers/main
Browse files Browse the repository at this point in the history
Remove max_tokens from openai_chat.py
  • Loading branch information
zainhoda authored Jul 25, 2024
2 parents 3176ccc + 0a6f9c8 commit de94875
Showing 1 changed file with 0 additions and 9 deletions.
9 changes: 0 additions & 9 deletions src/vanna/openai/openai_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,10 @@ def __init__(self, client=None, config=None):

# default parameters - can be overrided using config
self.temperature = 0.7
self.max_tokens = 500

if "temperature" in config:
self.temperature = config["temperature"]

if "max_tokens" in config:
self.max_tokens = config["max_tokens"]

if "api_type" in config:
raise Exception(
"Passing api_type is now deprecated. Please pass an OpenAI client instead."
Expand Down Expand Up @@ -75,7 +71,6 @@ def submit_prompt(self, prompt, **kwargs) -> str:
response = self.client.chat.completions.create(
model=model,
messages=prompt,
max_tokens=self.max_tokens,
stop=None,
temperature=self.temperature,
)
Expand All @@ -87,7 +82,6 @@ def submit_prompt(self, prompt, **kwargs) -> str:
response = self.client.chat.completions.create(
engine=engine,
messages=prompt,
max_tokens=self.max_tokens,
stop=None,
temperature=self.temperature,
)
Expand All @@ -98,7 +92,6 @@ def submit_prompt(self, prompt, **kwargs) -> str:
response = self.client.chat.completions.create(
engine=self.config["engine"],
messages=prompt,
max_tokens=self.max_tokens,
stop=None,
temperature=self.temperature,
)
Expand All @@ -109,7 +102,6 @@ def submit_prompt(self, prompt, **kwargs) -> str:
response = self.client.chat.completions.create(
model=self.config["model"],
messages=prompt,
max_tokens=self.max_tokens,
stop=None,
temperature=self.temperature,
)
Expand All @@ -123,7 +115,6 @@ def submit_prompt(self, prompt, **kwargs) -> str:
response = self.client.chat.completions.create(
model=model,
messages=prompt,
max_tokens=self.max_tokens,
stop=None,
temperature=self.temperature,
)
Expand Down

0 comments on commit de94875

Please sign in to comment.