Skip to content

Commit

Permalink
feat: anthropic system prompt clean up (instructor-ai#523)
Browse files Browse the repository at this point in the history
Co-authored-by: Jason Liu <[email protected]>
  • Loading branch information
Cruppelt and jxnl authored Mar 22, 2024
1 parent 6bfb699 commit 726ca86
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 8 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,8 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/

.vscode/

examples/citation_with_extraction/fly.toml
my_cache_directory/
tutorials/wandb/*
Expand Down
6 changes: 4 additions & 2 deletions instructor/function_calls.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Any, Dict, Optional, Type, TypeVar
from xml.dom.minidom import parseString
from docstring_parser import parse
from functools import wraps
from pydantic import BaseModel, create_model
Expand Down Expand Up @@ -61,8 +62,9 @@ def openai_schema(cls) -> Dict[str, Any]:
@classmethod
@property
def anthropic_schema(cls) -> str:
from instructor.anthropic_utils import json_to_xml, extract_xml, xml_to_model
return json_to_xml(cls)
from instructor.anthropic_utils import json_to_xml
return "\n".join(line.lstrip() for line in parseString(json_to_xml(cls)).toprettyxml().splitlines()[1:])


@classmethod
def from_response(
Expand Down
9 changes: 3 additions & 6 deletions instructor/process_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@

from instructor.mode import Mode


logger = logging.getLogger("instructor")

T_Model = TypeVar("T_Model", bound=BaseModel)
Expand Down Expand Up @@ -284,8 +283,7 @@ def handle_response_model(
elif mode == Mode.ANTHROPIC_TOOLS:
tool_descriptions = response_model.anthropic_schema
system_prompt = dedent(
f"""
In this environment you have access to a set of tools you can use to answer the user's question.
f"""In this environment you have access to a set of tools you can use to answer the user's question.
You may call them like this:
<function_calls>
<invoke>
Expand All @@ -297,9 +295,8 @@ def handle_response_model(
</invoke>
</function_calls>
Here are the tools available:\n{tool_descriptions}
"""
)
Here are the tools available:""") + tool_descriptions

if "system" in new_kwargs:
new_kwargs["system"] = f"{system_prompt}\n{new_kwargs['system']}"
else:
Expand Down

0 comments on commit 726ca86

Please sign in to comment.