Skip to content

Commit

Permalink
run black/isort/ruff, fix issues
Browse files Browse the repository at this point in the history
  • Loading branch information
biobootloader committed Feb 14, 2024
1 parent e907803 commit 4d2ebf2
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 28 deletions.
3 changes: 1 addition & 2 deletions scripts/fine_tune.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
#!/usr/bin/env python
import argparse
import os
import time

from openai import OpenAI

client = OpenAI()
import time

parser = argparse.ArgumentParser(description="Fine tuning GPT-3")
parser.add_argument(
Expand Down
14 changes: 6 additions & 8 deletions src/rawdog/__main__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import argparse
import os
import platform
import readline

from rawdog import __version__
Expand Down Expand Up @@ -55,18 +53,18 @@ def rawdog(prompt: str, config, llm_client):


def banner():
print(
f""" / \__
print(f""" / \__
( @\___ ┳┓┏┓┏ ┓┳┓┏┓┏┓
/ O ┣┫┣┫┃┃┃┃┃┃┃┃┓
/ (_____/ ┛┗┛┗┗┻┛┻┛┗┛┗┛
/_____/ U Rawdog v{__version__}"""
)
/_____/ U Rawdog v{__version__}""")


def main():
parser = argparse.ArgumentParser(
description="A smart assistant that can execute Python code to help or hurt you."
description=(
"A smart assistant that can execute Python code to help or hurt you."
)
)
parser.add_argument(
"prompt",
Expand All @@ -92,7 +90,7 @@ def main():
if llm_client.session_cost > 0:
print(f"Session cost: ${llm_client.session_cost:.4f}")
print("What can I do for you? (Ctrl-C to exit)")
prompt = input(f"> ")
prompt = input("> ")
# Save history after each command to avoid losing it in case of crash
readline.write_history_file(history_file)
print("")
Expand Down
3 changes: 2 additions & 1 deletion src/rawdog/execute_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ def execute_script(script: str, llm_client) -> str:
module_name = llm_client.get_python_package(module)
if (
input(
f"Rawdog wants to use {module_name}. Install to rawdog's venv with pip? (Y/n): "
f"Rawdog wants to use {module_name}. Install to rawdog's"
" venv with pip? (Y/n): "
)
.strip()
.lower()
Expand Down
20 changes: 9 additions & 11 deletions src/rawdog/llm_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import os
from textwrap import dedent, indent
from textwrap import dedent
from typing import Optional

from litellm import completion, completion_cost
Expand All @@ -12,7 +12,6 @@


class LLMClient:

def __init__(self, config: dict):
# In general it's hard to know if the user needs an API key or which environment variables to set
# We do a simple check here for the default case (gpt- models from openai).
Expand All @@ -24,10 +23,11 @@ def __init__(self, config: dict):
os.environ["OPENAI_API_KEY"] = config_api_key
elif not env_api_key:
print(
"It looks like you're using a GPT model without an API key. "
"You can add your API key by setting the OPENAI_API_KEY environment variable "
"or by adding an llm_api_key field to ~/.rawdog/config.yaml. "
"If this was intentional, you can ignore this message."
"It looks like you're using a GPT model without an API key. You can"
" add your API key by setting the OPENAI_API_KEY environment"
" variable or by adding an llm_api_key field to"
" ~/.rawdog/config.yaml. If this was intentional, you can ignore"
" this message."
)

self.conversation = [
Expand All @@ -45,15 +45,13 @@ def get_python_package(self, import_name: str):
messages = [
{
"role": "system",
"content": dedent(
f"""\
"content": dedent(f"""\
The following python import failed: import {import_name}. \
Respond with only one word which is the name of the package \
on pypi. For instance if the import is "import numpy", you \
should respond with "numpy". If the import is "import PIL" \
you should respond with "Pillow". If you are unsure respond \
with the original import name."""
),
with the original import name."""),
}
]

Expand Down Expand Up @@ -119,7 +117,7 @@ def get_script(self, prompt: Optional[str] = None, stream=False):
log_conversation(self.conversation, metadata=metadata)
except Exception as e:
log["error"] = str(e)
print(f"Error:\n", str(log))
print("Error:\n", str(log))
raise e
finally:
with open(rawdog_log_path, "a") as f:
Expand Down
6 changes: 2 additions & 4 deletions src/rawdog/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,10 @@ def log_conversation(
for function_name, function in functions.items():
script += f"def {function_name}():\n" + indent(function, " ") + "\n\n\n"

script += dedent(
f"""\
script += dedent(f"""\
if __name__ == "__main__":
function_{len(functions)}()
"""
)
""")

if filename is None:
script_filename = rawdog_dir / f"script_{timestamp}.py"
Expand Down
4 changes: 2 additions & 2 deletions src/rawdog/parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ def parse_script(response: str) -> tuple[str, str]:
script = "\n".join(script.split("\n")[1:])
try: # Make sure it isn't json
script = json.loads(script)
except Exception as e:
except Exception:
pass
try: # Make sure it's valid python
ast.parse(script)
except SyntaxError as e:
except SyntaxError:
return f"Script contains invalid Python:\n{response}", ""
return message, script

0 comments on commit 4d2ebf2

Please sign in to comment.