From a72e57f6886ef71e9b12baf7f17f865cb577563a Mon Sep 17 00:00:00 2001 From: Hedgehugger Date: Wed, 17 Jul 2024 20:11:17 +0800 Subject: [PATCH 1/3] add wenxin and tongyi LLM --- robot/AI.py | 86 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/robot/AI.py b/robot/AI.py index a860829e..1aabc023 100644 --- a/robot/AI.py +++ b/robot/AI.py @@ -416,6 +416,92 @@ def chat(self, texts, parsed): ) return "抱歉,OpenAI 回答失败" +class WenxinRobot(AbstractRobot): + + SLUG = "wenxin" + + def __init__(self, api_key, secret_key): + """ + Wenxin机器人 + """ + super(self.__class__, self).__init__() + self.api_key = api_key + self.secret_key = secret_key + + @classmethod + def get_config(cls): + return config.get("wenxin", {}) + + def chat(self, texts, _): + """ + 使用Wenxin机器人聊天 + + Arguments: + texts -- user input, typically speech, to be parsed by a module + """ + msg = "".join(texts) + msg = utils.stripPunctuation(msg) + wenxinurl = f"https://aip.baidubce.com/oauth/2.0/token?client_id={self.api_key}&\ + client_secret={self.secret_key}&grant_type=client_credentials" + try: + headers = { + "Content-Type": "application/json", + "Accept": "application/json", + } + payload = json.dumps({ + "question": [ + { + "role": "user", + "content": msg, + } + ] + }) + response = requests.request("POST", wenxinurl, headers=headers) + logger.info(f"wenxin response: {response}") + return response.text + + except Exception: + logger.critical("Wenxin robot failed to response for %r", msg, exc_info=True) + return "抱歉, Wenxin回答失败" + +class TongyiRobot(AbstractRobot): + + SLUG = "tongyi" + + def __init__(self, api_key): + """ + Tongyi机器人 + """ + super(self.__class__, self).__init__() + self.api_key = api_key + + @classmethod + def get_config(cls): + return config.get("tongyi", {}) + + def chat(self, texts, _): + """ + 使用Tongyi机器人聊天 + + Arguments: + texts -- user input, typically speech, to be parsed by a module + """ + msg = "".join(texts) + msg = utils.stripPunctuation(msg) + msg = [{"role": "user", "content": msg}] + try: + response = dashscope.Generation.call( + model='qwen1.5-72b-chat', + messages=msg, + result_format='message', # set the result to be "message" format. + ) + logger.info(f"tongyi response: {response}") + return response['output']['choices'][0]['message']['content'] + + except Exception: + logger.critical("Tongyi robot failed to response for %r", msg, exc_info=True) + return "抱歉, Tongyi回答失败" + def get_unknown_response(): """ From 8ab90c36e386db8d27ccfbcfc0c572f7368dd115 Mon Sep 17 00:00:00 2001 From: Hedgehugger Date: Thu, 18 Jul 2024 10:49:48 +0800 Subject: [PATCH 2/3] fix typo bug --- server/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/server.py b/server/server.py index 6c8f8ad4..3c1414b0 100644 --- a/server/server.py +++ b/server/server.py @@ -187,7 +187,7 @@ def post(self): utils.check_and_delete(tmpfile) conversation.doConverse( nfile, - onSay=lambda msg, audio, plugin: self.on_resp(msg, audio, plugin), + onSay=lambda msg, audio, plugin: self.onResp(msg, audio, plugin), onStream=lambda data, resp_uuid: self.onStream( data, resp_uuid) From 411f852ee563c535cbf9dfc1290caead04fe521e Mon Sep 17 00:00:00 2001 From: Hedgehugger Date: Thu, 18 Jul 2024 10:55:33 +0800 Subject: [PATCH 3/3] add annotation in AI.py to comment how to use the Tongyi api --- robot/AI.py | 130 +++++++++++++++++++++++++++------------------------- 1 file changed, 67 insertions(+), 63 deletions(-) diff --git a/robot/AI.py b/robot/AI.py index 1aabc023..98499735 100644 --- a/robot/AI.py +++ b/robot/AI.py @@ -418,89 +418,93 @@ def chat(self, texts, parsed): class WenxinRobot(AbstractRobot): - SLUG = "wenxin" + SLUG = "wenxin" + + def __init__(self, api_key, secret_key): + """ + Wenxin机器人 + """ + super(self.__class__, self).__init__() + self.api_key = api_key + self.secret_key = secret_key + + @classmethod + def get_config(cls): + return config.get("wenxin", {}) + + def chat(self, texts, _): + """ + 使用Wenxin机器人聊天 + + Arguments: + texts -- user input, typically speech, to be parsed by a module + """ + msg = "".join(texts) + msg = utils.stripPunctuation(msg) + wenxinurl = f"https://aip.baidubce.com/oauth/2.0/token?client_id={self.api_key}&\ + client_secret={self.secret_key}&grant_type=client_credentials" + try: + headers = { + "Content-Type": "application/json", + "Accept": "application/json", + } + payload = json.dumps({ + "question": [ + { + "role": "user", + "content": msg, + } + ] + }) + response = requests.request("POST", wenxinurl, headers=headers) + logger.info(f"wenxin response: {response}") + return response.text + + except Exception: + logger.critical("Wenxin robot failed to response for %r", msg, exc_info=True) + return "抱歉, Wenxin回答失败" + +class TongyiRobot(AbstractRobot): + ''' + usage: + pip install dashscope + echo "export DASHSCOPE_API_KEY=YOUR_KEY" >> /.bashrc + ''' + SLUG = "tongyi" - def __init__(self, api_key, secret_key): + def __init__(self, api_key): """ - Wenxin机器人 + Tongyi机器人 """ super(self.__class__, self).__init__() self.api_key = api_key - self.secret_key = secret_key @classmethod def get_config(cls): - return config.get("wenxin", {}) + return config.get("tongyi", {}) def chat(self, texts, _): """ - 使用Wenxin机器人聊天 + 使用Tongyi机器人聊天 Arguments: texts -- user input, typically speech, to be parsed by a module """ msg = "".join(texts) msg = utils.stripPunctuation(msg) - wenxinurl = f"https://aip.baidubce.com/oauth/2.0/token?client_id={self.api_key}&\ - client_secret={self.secret_key}&grant_type=client_credentials" + msg = [{"role": "user", "content": msg}] try: - headers = { - "Content-Type": "application/json", - "Accept": "application/json", - } - payload = json.dumps({ - "question": [ - { - "role": "user", - "content": msg, - } - ] - }) - response = requests.request("POST", wenxinurl, headers=headers) - logger.info(f"wenxin response: {response}") - return response.text + response = dashscope.Generation.call( + model='qwen1.5-72b-chat', + messages=msg, + result_format='message', # set the result to be "message" format. + ) + logger.info(f"tongyi response: {response}") + return response['output']['choices'][0]['message']['content'] except Exception: - logger.critical("Wenxin robot failed to response for %r", msg, exc_info=True) - return "抱歉, Wenxin回答失败" - -class TongyiRobot(AbstractRobot): - - SLUG = "tongyi" - - def __init__(self, api_key): - """ - Tongyi机器人 - """ - super(self.__class__, self).__init__() - self.api_key = api_key - - @classmethod - def get_config(cls): - return config.get("tongyi", {}) - - def chat(self, texts, _): - """ - 使用Tongyi机器人聊天 - - Arguments: - texts -- user input, typically speech, to be parsed by a module - """ - msg = "".join(texts) - msg = utils.stripPunctuation(msg) - msg = [{"role": "user", "content": msg}] - try: - response = dashscope.Generation.call( - model='qwen1.5-72b-chat', - messages=msg, - result_format='message', # set the result to be "message" format. - ) - logger.info(f"tongyi response: {response}") - return response['output']['choices'][0]['message']['content'] - - except Exception: - logger.critical("Tongyi robot failed to response for %r", msg, exc_info=True) - return "抱歉, Tongyi回答失败" + logger.critical("Tongyi robot failed to response for %r", msg, exc_info=True) + return "抱歉, Tongyi回答失败" def get_unknown_response():