From 93e0c4b79f007a3cf75947322d980a95d44e1ce5 Mon Sep 17 00:00:00 2001 From: Zain AHMAD Date: Sun, 26 Jan 2025 17:16:12 +0800 Subject: [PATCH] added function for bedrock tools --- instructor/patch.py | 3 --- instructor/process_response.py | 20 +++++++++----------- instructor/retry.py | 3 --- 3 files changed, 9 insertions(+), 17 deletions(-) diff --git a/instructor/patch.py b/instructor/patch.py index 99d3a50d5..f4614dda7 100644 --- a/instructor/patch.py +++ b/instructor/patch.py @@ -131,9 +131,6 @@ def patch( # type: ignore logger.debug(f"Patching `client.chat.completions.create` with {mode=}") - # TODO: remove this - print(f"instructor.patch: patching {create.__name__}") - if create is not None: func = create elif client is not None: diff --git a/instructor/process_response.py b/instructor/process_response.py index 420e7251d..e4cd7cfa0 100644 --- a/instructor/process_response.py +++ b/instructor/process_response.py @@ -151,12 +151,6 @@ def process_response( f"Instructor Raw Response: {response}", ) - # TODO: remove this - print(f"instructor.process_response.py: response_model {response_model}") - - # TODO: remove this - print(f"instructor.process_response.py: response {response}") - if response_model is None: logger.debug("No response model, returning response as is") return response @@ -195,9 +189,6 @@ def process_response( model._raw_response = response - # TODO: remove this - print(f"instructor.process_response.py: model {model}") - return model @@ -570,8 +561,6 @@ def handle_vertexai_json( def handle_bedrock_json( response_model: type[T], new_kwargs: dict[str, Any] ) -> tuple[type[T], dict[str, Any]]: - print(f"handle_bedrock_json: response_model {response_model}") - print(f"handle_bedrock_json: new_kwargs {new_kwargs}") json_message = dedent( f""" As a genius expert, your task is to understand the content and provide @@ -594,6 +583,14 @@ def handle_bedrock_json( """ ) system_message.append({"text": json_message}) + new_kwargs["system"] = system_message + + return response_model, new_kwargs + + +def handle_bedrock_tools( + response_model: type[T], new_kwargs: dict[str, Any] +) -> tuple[type[T], dict[str, Any]]: return response_model, new_kwargs @@ -806,6 +803,7 @@ def handle_response_model( Mode.FIREWORKS_TOOLS: handle_fireworks_tools, Mode.WRITER_TOOLS: handle_writer_tools, Mode.BEDROCK_JSON: handle_bedrock_json, + Mode.BEDROCK_TOOLS: handle_bedrock_tools, } if mode in mode_handlers: diff --git a/instructor/retry.py b/instructor/retry.py index 4963b2ac1..e7eb6dd00 100644 --- a/instructor/retry.py +++ b/instructor/retry.py @@ -168,9 +168,6 @@ def retry_sync( response=response, total_usage=total_usage ) - # TODO: remove this - print(f"instructor.retry.py: {response}") - return process_response( # type: ignore response=response, response_model=response_model,