Skip to content

Commit

Permalink
add max_seq_len.
Browse files Browse the repository at this point in the history
  • Loading branch information
wangzhaode committed Sep 22, 2023
1 parent d191de7 commit 582ea1a
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 3 deletions.
10 changes: 8 additions & 2 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,16 @@ jobs:
unzip $PACAGE_FILE
cd $PACAGE_DIR
./script/model_test.sh ${{ matrix.model }}
- name: windows-test
- name: windows-download
if: matrix.os == 'windows-latest'
run: |
cd workspace
7z x windows-package.zip
cd windows-package
./script/model_test.ps1 ${{ matrix.model }}
echo ${{ matrix.model }}
./script/model_download.ps1 ${{ matrix.model }}
- name: windows-test
if: matrix.os == 'windows-latest'
run: |
cd build
.\Release\cli_demo -m ..\${{ matrix.model }}
1 change: 1 addition & 0 deletions include/llm.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ class Llm {
// gen info
int gen_seq_len_ = 0;
int all_seq_len_ = 0;
int max_seq_len_ = 256;
float load_progress_ = 0.f;
private:
// MNN Modules
Expand Down
2 changes: 1 addition & 1 deletion src/llm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ std::string Llm::response(const std::string& query, std::ostream* os) {
int token = forward(input_ids);
std::string output_str = decode(token);
*os << output_str << std::flush;
while (true) {
while (gen_seq_len_ < max_seq_len_) {
token = forward({token});
if (is_stop(token)) {
*os << std::endl << std::flush;
Expand Down

0 comments on commit 582ea1a

Please sign in to comment.