From d06597dde66bc1a6ee5a6d3810f7449db79bc55e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=9B=81=E8=A1=8C?= Date: Thu, 14 Sep 2023 21:30:01 +0800 Subject: [PATCH] add model release url. --- README.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a5e3ba53..db963d1d 100644 --- a/README.md +++ b/README.md @@ -4,11 +4,14 @@ ## 模型支持 该项目支持将主流llm模型转换到mnn模型部署推理,目前支持以下模型: -- chatglm-6b -- chatglm2-6b -- codegeex2-6b -- Qwen-7B-Chat -- Baichuan2-7B-Chat + +| 模型 | onnx-fp32 | mnn-int4 | +|------|-----------|----------| +| chatglm-6b | [onnx](https://github.com/wangzhaode/llm-export/releases/tag/chatglm-6b-onnx) | [mnn](https://github.com/wangzhaode/mnn-llm/releases/tag/chatglm-6b-mnn) | +| chatglm2-6b | [onnx](https://github.com/wangzhaode/llm-export/releases/tag/chatglm2-6b-onnx) | [mnn](https://github.com/wangzhaode/mnn-llm/releases/tag/chatglm2-6b-mnn) | +| codegeex2-6b | [onnx](https://github.com/wangzhaode/llm-export/releases/tag/codegeex2-6b-onnx) | [mnn](https://github.com/wangzhaode/mnn-llm/releases/tag/untagged-93eea51bfbbd01f29a5f) | +| Qwen-7B-Chat | [onnx](https://github.com/wangzhaode/llm-export/releases/tag/qwen-7b-chat-onnx) | [mnn](https://github.com/wangzhaode/mnn-llm/releases/tag/untagged-d109db4ac537bfce7a0b) | +| Baichuan2-7B-Chat | [onnx](https://github.com/wangzhaode/llm-export/releases/tag/baichuan2-7b-chat-onnx) | [mnn](https://github.com/wangzhaode/mnn-llm/releases/tag/untagged-6798382d6309a35e20d0) | ## 用法