Issue with running - gen-ai/Assistants/notebooks/autogen/gpt_assistant_agent.ipynb #111
Open
1 of 4 tasks
Labels
bug
Something isn't working
Solution Accelerators
This repository contains multiple solution accelerators. Please tell us which ones are involved in your report. (Replace the space in between square brackets with an x)
Describe the bug
A clear and concise description of what the bug is.
I am getting the below error in running the gen-ai/Assistants/notebooks/autogen/gpt_assistant_agent.ipynb file , the code block i am running is below
code block
+++++++++++++++
assistant_config = {
"tools": [
{"type": "code_interpreter"}
],
"tool_resources": {
"code_interpreter": {
"file_ids": [file.id]
}
}
}
excel_oai_agent = GPTAssistantAgent(
name="excel_oai_agent",
instructions="You are a code assistant tool that can run Python code",
llm_config=llm_config,
assistant_config=assistant_config,
)
user_proxy.initiate_chat(recipient=excel_oai_agent,
message="What are the columns in the excel spreadsheet uploaded?"
++++++++++++
Error
+++++++++
WARNING:autogen.agentchat.contrib.gpt_assistant_agent:OpenAI client config of GPTAssistantAgent(excel_oai_agent) - model: gpt4-0125
WARNING:autogen.agentchat.contrib.gpt_assistant_agent:No matching assistant found, creating a new assistant
BadRequestError: Error code: 400 - {'error': {'message': "Unknown parameter: 'tool_resources'.", 'type': 'invalid_request_error', 'param': 'tool_resources', 'code': 'unknown_parameter'}}
BadRequestError Traceback (most recent call last)
Cell In[11], line 12
1 assistant_config = {
2 "tools": [
3 {"type": "code_interpreter"}
(...)
9 }
10 }
---> 12 excel_oai_agent = GPTAssistantAgent(
13 name="excel_oai_agent",
14 instructions="You are a code assistant tool that can run Python code",
15 llm_config=llm_config,
16 assistant_config=assistant_config,
17 )
19 user_proxy.initiate_chat(recipient=excel_oai_agent,
20 message="What are the columns in the excel spreadsheet uploaded?"
21 )
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/autogen/agentchat/contrib/gpt_assistant_agent.py:104, in GPTAssistantAgent.init(self, name, instructions, llm_config, assistant_config, overwrite_instructions, overwrite_tools, **kwargs)
100 logger.warning(
101 "No instructions were provided for new assistant. Using default instructions from AssistantAgent.DEFAULT_SYSTEM_MESSAGE."
102 )
103 instructions = AssistantAgent.DEFAULT_SYSTEM_MESSAGE
--> 104 self._openai_assistant = create_gpt_assistant(
105 self._openai_client,
106 name=name,
107 instructions=instructions,
108 model=model_name,
109 assistant_config=openai_assistant_cfg,
110 )
111 else:
112 logger.warning(
113 "Matching assistant found, using the first matching assistant: %s",
114 candidate_assistants[0].dict,
115 )
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/autogen/oai/openai_utils.py:762, in create_gpt_assistant(client, name, instructions, model, assistant_config)
759 assistant_create_kwargs["file_ids"] = assistant_config.get("file_ids", [])
761 logging.info(f"Creating assistant with config: {assistant_create_kwargs}")
--> 762 return client.beta.assistants.create(name=name, instructions=instructions, model=model, **assistant_create_kwargs)
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/openai/resources/beta/assistants.py:156, in Assistants.create(self, model, description, instructions, metadata, name, response_format, temperature, tool_resources, tools, top_p, extra_headers, extra_query, extra_body, timeout)
90 """
91 Create an assistant with a model and instructions.
92
(...)
153 timeout: Override the client-level default timeout for this request, in seconds
154 """
155 extra_headers = {"OpenAI-Beta": "assistants=v2", **(extra_headers or {})}
--> 156 return self._post(
157 "/assistants",
158 body=maybe_transform(
159 {
160 "model": model,
161 "description": description,
162 "instructions": instructions,
163 "metadata": metadata,
164 "name": name,
165 "response_format": response_format,
166 "temperature": temperature,
167 "tool_resources": tool_resources,
168 "tools": tools,
169 "top_p": top_p,
170 },
171 assistant_create_params.AssistantCreateParams,
172 ),
173 options=make_request_options(
174 extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
175 ),
176 cast_to=Assistant,
177 )
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/openai/_base_client.py:1240, in SyncAPIClient.post(self, path, cast_to, body, options, files, stream, stream_cls)
1226 def post(
1227 self,
1228 path: str,
(...)
1235 stream_cls: type[_StreamT] | None = None,
1236 ) -> ResponseT | _StreamT:
1237 opts = FinalRequestOptions.construct(
1238 method="post", url=path, json_data=body, files=to_httpx_files(files), **options
1239 )
-> 1240 return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/openai/_base_client.py:921, in SyncAPIClient.request(self, cast_to, options, remaining_retries, stream, stream_cls)
912 def request(
913 self,
914 cast_to: Type[ResponseT],
(...)
919 stream_cls: type[_StreamT] | None = None,
920 ) -> ResponseT | _StreamT:
--> 921 return self._request(
922 cast_to=cast_to,
923 options=options,
924 stream=stream,
925 stream_cls=stream_cls,
926 remaining_retries=remaining_retries,
927 )
File /anaconda/envs/azureml_py310_sdkv2/lib/python3.10/site-packages/openai/_base_client.py:1020, in SyncAPIClient._request(self, cast_to, options, remaining_retries, stream, stream_cls)
1017 err.response.read()
1019 log.debug("Re-raising status error")
-> 1020 raise self._make_status_error_from_response(err.response) from None
1022 return self._process_response(
1023 cast_to=cast_to,
1024 options=options,
(...)
1027 stream_cls=stream_cls,
1028 )
++++++++
To Reproduce
Steps to reproduce the behavior:
+++++++++++++++
assistant_config = {
"tools": [
{"type": "code_interpreter"}
],
"tool_resources": {
"code_interpreter": {
"file_ids": [file.id]
}
}
}
excel_oai_agent = GPTAssistantAgent(
name="excel_oai_agent",
instructions="You are a code assistant tool that can run Python code",
llm_config=llm_config,
assistant_config=assistant_config,
)
user_proxy.initiate_chat(recipient=excel_oai_agent,
message="What are the columns in the excel spreadsheet uploaded?"..'
Expected behavior
A clear and concise description of what you expected to happen.
Screenshots
If applicable, add screenshots to help explain your problem.
Please complete the following information:
Additional context
I am installing all packages in requirement.txt and using azure gpt4-0125 version
The text was updated successfully, but these errors were encountered: