From 291f6b065bba8a701b6e25ea43105adbb2264652 Mon Sep 17 00:00:00 2001 From: Evan Lohn Date: Thu, 6 Feb 2025 19:50:36 -0800 Subject: [PATCH] added context type for when internet search tool is used --- backend/onyx/chat/prompt_builder/citations_prompt.py | 3 +++ backend/onyx/prompts/direct_qa_prompts.py | 4 ++-- .../internet_search/internet_search_tool.py | 4 +++- .../onyx/tools/tool_implementations/search_like_tool_utils.py | 2 ++ 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/backend/onyx/chat/prompt_builder/citations_prompt.py b/backend/onyx/chat/prompt_builder/citations_prompt.py index e28efbb4a68..0465922003a 100644 --- a/backend/onyx/chat/prompt_builder/citations_prompt.py +++ b/backend/onyx/chat/prompt_builder/citations_prompt.py @@ -140,6 +140,7 @@ def build_citations_user_message( context_docs: list[LlmDoc] | list[InferenceChunk], all_doc_useful: bool, history_message: str = "", + context_type: str = "context documents", ) -> HumanMessage: multilingual_expansion = get_multilingual_expansion() task_prompt_with_reminder = build_task_prompt_reminders( @@ -156,6 +157,7 @@ def build_citations_user_message( optional_ignore = "" if all_doc_useful else DEFAULT_IGNORE_STATEMENT user_prompt = CITATIONS_PROMPT.format( + context_type=context_type, optional_ignore_statement=optional_ignore, context_docs_str=context_docs_str, task_prompt=task_prompt_with_reminder, @@ -165,6 +167,7 @@ def build_citations_user_message( else: # if no context docs provided, assume we're in the tool calling flow user_prompt = CITATIONS_PROMPT_FOR_TOOL_CALLING.format( + context_type=context_type, task_prompt=task_prompt_with_reminder, user_query=query, history_block=history_block, diff --git a/backend/onyx/prompts/direct_qa_prompts.py b/backend/onyx/prompts/direct_qa_prompts.py index 133205dfd38..6130a210007 100644 --- a/backend/onyx/prompts/direct_qa_prompts.py +++ b/backend/onyx/prompts/direct_qa_prompts.py @@ -91,7 +91,7 @@ # similar to the chat flow, but with the option of including a # "conversation history" block CITATIONS_PROMPT = f""" -Refer to the following context documents when responding to me.{DEFAULT_IGNORE_STATEMENT} +Refer to the following {{context_type}} when responding to me.{DEFAULT_IGNORE_STATEMENT} CONTEXT: {GENERAL_SEP_PAT} @@ -108,7 +108,7 @@ # NOTE: need to add the extra line about "getting right to the point" since the # tool calling models from OpenAI tend to be more verbose CITATIONS_PROMPT_FOR_TOOL_CALLING = f""" -Refer to the provided context documents when responding to me.{DEFAULT_IGNORE_STATEMENT} \ +Refer to the provided {{context_type}} when responding to me.{DEFAULT_IGNORE_STATEMENT} \ You should always get right to the point, and never use extraneous language. {{history_block}}{{task_prompt}} diff --git a/backend/onyx/tools/tool_implementations/internet_search/internet_search_tool.py b/backend/onyx/tools/tool_implementations/internet_search/internet_search_tool.py index 089739384ed..0a068d8901f 100644 --- a/backend/onyx/tools/tool_implementations/internet_search/internet_search_tool.py +++ b/backend/onyx/tools/tool_implementations/internet_search/internet_search_tool.py @@ -272,11 +272,13 @@ def build_next_prompt( tool_responses: list[ToolResponse], using_tool_calling_llm: bool, ) -> AnswerPromptBuilder: - return build_next_prompt_for_search_like_tool( + x = build_next_prompt_for_search_like_tool( prompt_builder=prompt_builder, tool_call_summary=tool_call_summary, tool_responses=tool_responses, using_tool_calling_llm=using_tool_calling_llm, answer_style_config=self.answer_style_config, prompt_config=self.prompt_config, + context_type="internet search results", ) + return x diff --git a/backend/onyx/tools/tool_implementations/search_like_tool_utils.py b/backend/onyx/tools/tool_implementations/search_like_tool_utils.py index 2b307c2c260..b13b76c74f4 100644 --- a/backend/onyx/tools/tool_implementations/search_like_tool_utils.py +++ b/backend/onyx/tools/tool_implementations/search_like_tool_utils.py @@ -25,6 +25,7 @@ def build_next_prompt_for_search_like_tool( using_tool_calling_llm: bool, answer_style_config: AnswerStyleConfig, prompt_config: PromptConfig, + context_type: str = "context documents", ) -> AnswerPromptBuilder: if not using_tool_calling_llm: final_context_docs_response = next( @@ -58,6 +59,7 @@ def build_next_prompt_for_search_like_tool( else False ), history_message=prompt_builder.single_message_history or "", + context_type=context_type, ) )