From a7ff1971f8a34f4502a94b44fa1eec875d397b62 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Thu, 17 Oct 2024 19:39:10 +0800 Subject: [PATCH 01/11] add gid for run --- lazyllm/engine/lightengine.py | 24 ++-- .../standard_test/test_engine.py | 4 +- tests/basic_tests/test_engine.py | 132 +++++++++--------- tests/charge_tests/test_engine.py | 36 ++--- 4 files changed, 99 insertions(+), 97 deletions(-) diff --git a/lazyllm/engine/lightengine.py b/lazyllm/engine/lightengine.py index cfacab54..afc63ece 100644 --- a/lazyllm/engine/lightengine.py +++ b/lazyllm/engine/lightengine.py @@ -1,5 +1,5 @@ from .engine import Engine, Node -from lazyllm import ActionModule, once_wrapper +from lazyllm import once_wrapper from typing import List, Dict, Optional import uuid @@ -34,10 +34,13 @@ def update_node(self, node): def start(self, nodes: List[Dict] = [], edges: List[Dict] = [], resources: List[Dict] = [], gid: Optional[str] = None, name: Optional[str] = None): - node = Node(id=gid or str(uuid.uuid4().hex), kind='Graph', - name=name or str(uuid.uuid4().hex), args=dict(nodes=nodes, edges=edges, resources=resources)) - self.graph = self.build_node(node).func - self.graph.start() + gid, name = gid or str(uuid.uuid4().hex), name or str(uuid.uuid4().hex) + node = Node(id=gid, kind='Graph', name=name, args=dict(nodes=nodes, edges=edges, resources=resources)) + self.build_node(node).func.start() + return gid + + def stop(self, id): + pass def update(self, nodes: List[Dict] = [], changed_nodes: List[Dict] = [], edges: List[Dict] = [], changed_resources: List[Dict] = [], @@ -47,10 +50,9 @@ def update(self, nodes: List[Dict] = [], changed_nodes: List[Dict] = [], raise NotImplementedError('Web and Api server are not allowed now') self.update_node(r) for n in changed_nodes: self.update_node(n) - node = Node(id=gid or str(uuid.uuid4().hex), kind='Graph', - name=name or str(uuid.uuid4().hex), args=dict(nodes=nodes, edges=edges)) - self.graph = self.update_node(node).func - ActionModule(self.graph).start() + gid, name = gid or str(uuid.uuid4().hex), name or str(uuid.uuid4().hex) + node = Node(id=gid, kind='Graph', name=name, args=dict(nodes=nodes, edges=edges)) + self.update_node(node).func.start() - def run(self, *args, **kw): - return self.graph(*args, **kw) + def run(self, id: str, *args, **kw): + return self.build_node(id).func(*args, **kw) diff --git a/tests/advanced_tests/standard_test/test_engine.py b/tests/advanced_tests/standard_test/test_engine.py index 3a2c5b28..3d4a1bd2 100644 --- a/tests/advanced_tests/standard_test/test_engine.py +++ b/tests/advanced_tests/standard_test/test_engine.py @@ -48,6 +48,6 @@ def test_http(self): ] edges = [dict(iid="__start__", oid="1"), dict(iid="1", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges) - ret = engine.run() + gid = engine.start(nodes, edges) + ret = engine.run(gid) assert '商汤科技' in ret['content'] diff --git a/tests/basic_tests/test_engine.py b/tests/basic_tests/test_engine.py index 45306220..0a5d1966 100644 --- a/tests/basic_tests/test_engine.py +++ b/tests/basic_tests/test_engine.py @@ -22,8 +22,8 @@ def test_engine_subgraph(self): edges = [dict(iid='__start__', oid='2'), dict(iid='2', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges, resources) - r = engine.run('1234') + gid = engine.start(nodes, edges, resources) + r = engine.run(gid, '1234') assert 'reply for You are an AI-Agent developed by LazyLLM' in r assert '1234' in r @@ -32,9 +32,9 @@ def test_engine_code(self): edges = [dict(iid='__start__', oid='1'), dict(iid='1', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == 2 - assert engine.run(2) == 4 + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == 2 + assert engine.run(gid, 2) == 4 def test_engine_switch(self): plus1 = dict(id='1', kind='Code', name='m1', args='def test(x: int):\n return 1 + x\n') @@ -48,10 +48,10 @@ def test_engine_switch(self): nodes = [switch] edges = [dict(iid='__start__', oid='4'), dict(iid='4', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == 2 - assert engine.run(2) == 6 - assert engine.run(3) == 9 + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == 2 + assert engine.run(gid, 2) == 6 + assert engine.run(gid, 3) == 9 engine.reset() @@ -60,13 +60,13 @@ def test_engine_switch(self): 'case2': [plus1, double], 'case3': [square] })) - engine.start([switch], edges) - assert engine.run('case1', 1) == 2 - assert engine.run('case2', 1) == 4 - assert engine.run('case3', 1) == 1 - assert engine.run('case1', 2) == 4 - assert engine.run('case2', 2) == 6 - assert engine.run('case3', 3) == 9 + gid = engine.start([switch], edges) + assert engine.run(gid, 'case1', 1) == 2 + assert engine.run(gid, 'case2', 1) == 4 + assert engine.run(gid, 'case3', 1) == 1 + assert engine.run(gid, 'case1', 2) == 4 + assert engine.run(gid, 'case2', 2) == 6 + assert engine.run(gid, 'case3', 3) == 9 def test_engine_ifs(self): plus1 = dict(id='1', kind='Code', name='m1', args='def test(x: int):\n return 1 + x\n') @@ -77,10 +77,10 @@ def test_engine_ifs(self): nodes = [ifs] edges = [dict(iid='__start__', oid='4'), dict(iid='4', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == 4 - assert engine.run(5) == 12 - assert engine.run(10) == 100 + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == 4 + assert engine.run(gid, 5) == 12 + assert engine.run(gid, 10) == 100 def test_engine_loop(self): nodes = [dict(id='1', kind='Code', name='code', args='def square(x: int): return x * x')] @@ -91,8 +91,8 @@ def test_engine_loop(self): edges = [dict(iid='__start__', oid='2'), dict(iid='2', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(2) == 16 + gid = engine.start(nodes, edges) + assert engine.run(gid, 2) == 16 def test_engine_warp(self): nodes = [dict(id='1', kind='Code', name='code', args='def square(x: int): return x * x')] @@ -102,26 +102,26 @@ def test_engine_warp(self): edges = [dict(iid='__start__', oid='2'), dict(iid='2', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(2, 3, 4, 5) == (4, 9, 16, 25) + gid = engine.start(nodes, edges) + assert engine.run(gid, 2, 3, 4, 5) == (4, 9, 16, 25) def test_engine_formatter(self): nodes = [dict(id='1', kind='Formatter', name='f1', args=dict(ftype='python', rule='[:]'))] edges = [dict(iid='__start__', oid='1'), dict(iid='1', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run([1, 2]) == [1, 2] + gid = engine.start(nodes, edges) + assert engine.run(gid, [1, 2]) == [1, 2] engine.reset() nodes = [dict(id='1', kind='Formatter', name='f1', args=dict(ftype='json', rule='{a, c}'))] - engine.start(nodes, edges) - assert engine.run('{"a": 1, "b": 2, "c": 3}') == dict(a=1, c=3) + gid = engine.start(nodes, edges) + assert engine.run(gid, '{"a": 1, "b": 2, "c": 3}') == dict(a=1, c=3) engine.reset() nodes = [dict(id='1', kind='Formatter', name='f1', args=dict(ftype='yaml', rule='[:]{a}'))] - engine.start(nodes, edges) - assert engine.run('- a: 1\n b: 2\n- a: 3\n d: 4\n') == [dict(a=1), dict(a=3)] + gid = engine.start(nodes, edges) + assert engine.run(gid, '- a: 1\n b: 2\n- a: 3\n d: 4\n') == [dict(a=1), dict(a=3)] def test_engine_edge_formatter(self): nodes = [dict(id='1', kind='Code', name='m1', args='def test(x: int):\n return x\n'), @@ -133,9 +133,9 @@ def test_engine_edge_formatter(self): dict(iid='3', oid='4', formatter='[b]'), dict(iid='4', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == '1[2, 4]1' - assert engine.run(2) == '2[4, 8]4' + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == '1[2, 4]1' + assert engine.run(gid, 2) == '2[4, 8]4' def test_engine_edge_formatter_start(self): nodes = [dict(id='1', kind='Code', name='m1', args='def test(x: int): return x'), @@ -145,19 +145,19 @@ def test_engine_edge_formatter_start(self): dict(iid='1', oid='3'), dict(iid='2', oid='3'), dict(iid='3', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(3, 1) == 5 - assert engine.run(5, 3, 1) == 11 + gid = engine.start(nodes, edges) + assert engine.run(gid, 3, 1) == 5 + assert engine.run(gid, 5, 3, 1) == 11 def test_engine_join_stack(self): nodes = [dict(id='0', kind='Code', name='c1', args='def test(x: int): return x'), dict(id='1', kind='JoinFormatter', name='join', args=dict(type='stack'))] edges = [dict(iid='__start__', oid='0'), dict(iid='0', oid='1'), dict(iid='1', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == [1] - assert engine.run('1') == ['1'] - assert engine.run([1]) == [[1]] + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == [1] + assert engine.run(gid, '1') == ['1'] + assert engine.run(gid, [1]) == [[1]] engine.reset() @@ -167,20 +167,20 @@ def test_engine_join_stack(self): dict(id='3', kind='JoinFormatter', name='join', args=dict(type='stack'))] edges = [dict(iid='__start__', oid='0'), dict(iid='__start__', oid='1'), dict(iid='__start__', oid='2'), dict(iid='0', oid='3'), dict(iid='1', oid='3'), dict(iid='2', oid='3'), dict(iid='3', oid='__end__')] - engine.start(nodes, edges) - assert engine.run(1) == [1, 2, 3] - assert engine.run('1') == ['1', '11', '111'] - assert engine.run([1]) == [[1], [1, 1], [1, 1, 1]] + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == [1, 2, 3] + assert engine.run(gid, '1') == ['1', '11', '111'] + assert engine.run(gid, [1]) == [[1], [1, 1], [1, 1, 1]] def test_engine_join_sum(self): nodes = [dict(id='0', kind='Code', name='c1', args='def test(x: int): return [x, 2 * x]'), dict(id='1', kind='JoinFormatter', name='join', args=dict(type='sum'))] edges = [dict(iid='__start__', oid='0'), dict(iid='0', oid='1'), dict(iid='1', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == 3 - assert engine.run('1') == '111' - assert engine.run([1]) == [1, 1, 1] + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == 3 + assert engine.run(gid, '1') == '111' + assert engine.run(gid, [1]) == [1, 1, 1] engine.reset() @@ -190,10 +190,10 @@ def test_engine_join_sum(self): dict(id='3', kind='JoinFormatter', name='join', args=dict(type='sum'))] edges = [dict(iid='__start__', oid='0'), dict(iid='__start__', oid='1'), dict(iid='__start__', oid='2'), dict(iid='0', oid='3'), dict(iid='1', oid='3'), dict(iid='2', oid='3'), dict(iid='3', oid='__end__')] - engine.start(nodes, edges) - assert engine.run(1) == 6 - assert engine.run('1') == '111111' - assert engine.run([1]) == [1, 1, 1, 1, 1, 1] + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == 6 + assert engine.run(gid, '1') == '111111' + assert engine.run(gid, [1]) == [1, 1, 1, 1, 1, 1] def test_engine_join_todict(self): nodes = [dict(id='0', kind='Code', name='c1', args='def test(x: int): return x'), @@ -203,10 +203,10 @@ def test_engine_join_todict(self): edges = [dict(iid='__start__', oid='0'), dict(iid='__start__', oid='1'), dict(iid='__start__', oid='2'), dict(iid='0', oid='3'), dict(iid='1', oid='3'), dict(iid='2', oid='3'), dict(iid='3', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run(1) == dict(a=1, b=2, c=3) - assert engine.run('1') == dict(a='1', b='11', c='111') - assert engine.run([1]) == dict(a=[1], b=[1, 1], c=[1, 1, 1]) + gid = engine.start(nodes, edges) + assert engine.run(gid, 1) == dict(a=1, b=2, c=3) + assert engine.run(gid, '1') == dict(a='1', b='11', c='111') + assert engine.run(gid, [1]) == dict(a=[1], b=[1, 1], c=[1, 1, 1]) def test_engine_join_join(self): nodes = [dict(id='0', kind='Code', name='c1', args='def test(x: int): return x'), @@ -216,12 +216,12 @@ def test_engine_join_join(self): edges = [dict(iid='__start__', oid='0'), dict(iid='__start__', oid='1'), dict(iid='__start__', oid='2'), dict(iid='0', oid='3'), dict(iid='1', oid='3'), dict(iid='2', oid='3'), dict(iid='3', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - assert engine.run('1') == '111111' + gid = engine.start(nodes, edges) + assert engine.run(gid, '1') == '111111' changed_nodes = [dict(id='3', kind='JoinFormatter', name='join', args=dict(type='join', symbol='\n'))] - engine.update(nodes, changed_nodes, edges) - assert engine.run('1') == '1\n11\n111' + engine.update(nodes, changed_nodes, edges, gid=gid) + assert engine.run(gid, '1') == '1\n11\n111' def test_engine_server(self): nodes = [dict(id='1', kind='Code', name='m1', args='def test(x: int):\n return 2 * x\n')] @@ -230,8 +230,8 @@ def test_engine_server(self): dict(id='3', kind='web', name='w1', args=dict(port=None, title='网页', history=[], audio=False)) ] engine = LightEngine() - engine.start(nodes, edges, resources, gid='graph-1') - assert engine.run(1) == 2 + gid = engine.start(nodes, edges, resources, gid='graph-1') + assert engine.run(gid, 1) == 2 time.sleep(3) web = engine.build_node('graph-1').func._web client = Client(web.url, download_files=web.cach_path) @@ -261,8 +261,8 @@ def test_rag(self): dict(iid='4', oid='5'), dict(iid='__start__', oid='5'), dict(iid='5', oid='6'), dict(iid='6', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges, resources) - r = engine.run('何为天道?') + gid = engine.start(nodes, edges, resources) + r = engine.run(gid, '何为天道?') assert '观天之道,执天之行' in r or '天命之谓性,率性之谓道' in r # test add doc_group @@ -277,5 +277,5 @@ def test_rag(self): dict(iid='4', oid='5'), dict(iid='__start__', oid='5'), dict(iid='5', oid='6'), dict(iid='6', oid='__end__')] engine = LightEngine() - engine.update(nodes + changed_nodes, changed_nodes, edges, changed_resources) - assert '观天之道,执天之行' in engine.run('何为天道?') + engine.update(nodes + changed_nodes, changed_nodes, edges, changed_resources, gid=gid) + assert '观天之道,执天之行' in engine.run(gid, '何为天道?') diff --git a/tests/charge_tests/test_engine.py b/tests/charge_tests/test_engine.py index 5fcee548..4368b50d 100644 --- a/tests/charge_tests/test_engine.py +++ b/tests/charge_tests/test_engine.py @@ -28,9 +28,9 @@ def test_intent_classifier(self): nodes={'music': music, 'draw': draw, 'chat': chat}))] edges = [dict(iid="__start__", oid="4"), dict(iid="4", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - assert engine.run("sing a song") == 'Music get sing a song' - assert engine.run("draw a hourse") == 'Draw get draw a hourse' + gid = engine.start(nodes, edges, resources) + assert engine.run(gid, "sing a song") == 'Music get sing a song' + assert engine.run(gid, "draw a hourse") == 'Draw get draw a hourse' def test_toolsforllm(self): resources = [ @@ -49,8 +49,8 @@ def test_toolsforllm(self): args=dict(tools=['1001', '1002', '1003', '1004']))] edges = [dict(iid="__start__", oid="1"), dict(iid="1", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - assert '22' in engine.run([dict(name='get_current_weather', arguments=dict(location='Paris'))])[0] + gid = engine.start(nodes, edges, resources) + assert '22' in engine.run(gid, [dict(name='get_current_weather', arguments=dict(location='Paris'))])[0] def test_fc(self): resources = [ @@ -70,23 +70,23 @@ def test_fc(self): args=dict(llm='0', tools=['1001', '1002', '1003', '1004']))] edges = [dict(iid="__start__", oid="1"), dict(iid="1", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - assert '10' in engine.run("What's the weather like today in celsius in Tokyo.") - assert '22' in engine.run("What will the temperature be in degrees Celsius in Paris tomorrow?") + gid = engine.start(nodes, edges, resources) + assert '10' in engine.run(gid, "What's the weather like today in celsius in Tokyo.") + assert '22' in engine.run(gid, "What will the temperature be in degrees Celsius in Paris tomorrow?") nodes = [dict(id="2", kind="FunctionCall", name="re", args=dict(llm='0', tools=['1003', '1004'], algorithm='React'))] edges = [dict(iid="__start__", oid="2"), dict(iid="2", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - assert '5440' in engine.run("Calculate 20*(45+23)*4, step by step.") + gid = engine.start(nodes, edges, resources) + assert '5440' in engine.run(gid, "Calculate 20*(45+23)*4, step by step.") nodes = [dict(id="3", kind="FunctionCall", name="re", args=dict(llm='0', tools=['1003', '1004'], algorithm='PlanAndSolve'))] edges = [dict(iid="__start__", oid="3"), dict(iid="3", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - assert '5440' in engine.run("Calculate 20*(45+23)*(1+3), step by step.") + gid = engine.start(nodes, edges, resources) + assert '5440' in engine.run(gid, "Calculate 20*(45+23)*(1+3), step by step.") def test_rag(self): prompt = ("作为国学大师,你将扮演一个人工智能国学问答助手的角色,完成一项对话任务。在这个任务中,你需要根据给定的已知国学篇章以及" @@ -117,8 +117,8 @@ def test_rag(self): dict(iid='4', oid='5'), dict(iid='__start__', oid='5'), dict(iid='5', oid='6'), dict(iid='6', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges, resources) - r = engine.run('何为天道?') + gid = engine.start(nodes, edges, resources) + r = engine.run(gid, '何为天道?') assert '观天之道,执天之行' in r or '天命之谓性,率性之谓道' in r def test_sql_call(self): @@ -154,8 +154,8 @@ def test_sql_call(self): nodes = [dict(id="2", kind="SqlCall", name="sql_call", args=dict(sql_manager="0", llm="1", sql_examples=""))] edges = [dict(iid="__start__", oid="2"), dict(iid="2", oid="__end__")] engine = LightEngine() - engine.start(nodes, edges, resources) - str_answer = engine.run("员工编号是3的人来自哪个部门?") + gid = engine.start(nodes, edges, resources) + str_answer = engine.run(gid, "员工编号是3的人来自哪个部门?") assert "销售三部" in str_answer # 3. Release: delete data and table from database @@ -178,9 +178,9 @@ def test_register_tools(self): edges = [dict(iid="__start__", oid="1"), dict(iid="1", oid="__end__")] engine = LightEngine() # TODO handle duplicated node id - engine.start(nodes, edges, resources) + gid = engine.start(nodes, edges, resources) city_name = 'Tokyo' unit = 'Celsius' - ret = engine.run(f"What is the temperature in {city_name} today in {unit}?") + ret = engine.run(gid, f"What is the temperature in {city_name} today in {unit}?") assert city_name in ret and unit in ret and '10' in ret From 038020537e1a541d907cf52769369b5e2015b7ea Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Fri, 18 Oct 2024 14:02:23 +0800 Subject: [PATCH 02/11] add train task lists --- lazyllm/engine/node.py | 12 +-------- lazyllm/module/module.py | 57 ++++++++++++++++++++++++---------------- 2 files changed, 35 insertions(+), 34 deletions(-) diff --git a/lazyllm/engine/node.py b/lazyllm/engine/node.py index 99d561f4..0155cf30 100644 --- a/lazyllm/engine/node.py +++ b/lazyllm/engine/node.py @@ -36,17 +36,7 @@ class NodeArgs(object): builder_argument=dict( trainset=NodeArgs(str), prompt=NodeArgs(str), - finetune_method=NodeArgs(str, getattr_f=partial(getattr, lazyllm.finetune)), - deploy_method=NodeArgs(str, 'vllm', getattr_f=partial(getattr, lazyllm.deploy))), - other_arguments=dict( - finetune_method=dict( - batch_size=NodeArgs(int, 16), - micro_batch_size=NodeArgs(int, 2), - num_epochs=NodeArgs(int, 3), - learning_rate=NodeArgs(float, 5e-4), - lora_r=NodeArgs(int, 8), - lora_alpha=NodeArgs(int, 32), - lora_dropout=NodeArgs(float, 0.05))) + deploy_method=NodeArgs(str, 'vllm', getattr_f=partial(getattr, lazyllm.deploy))) ) all_nodes['OnlineLLM'] = dict( diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index d9532823..be1ec59d 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -10,7 +10,7 @@ import functools from datetime import datetime from lazyllm import ThreadPoolExecutor, FileSystemQueue -from typing import Dict, List, Any, Union +from typing import Dict, List, Any, Union, Optional import lazyllm from lazyllm import FlatList, Option, launchers, LOG, package, kwargs, encode_request, globals @@ -179,7 +179,9 @@ def start(self): return self._update(mode=['server'], recursive=True) def restart(self): return self.start() def wait(self): pass - def stop(self): raise NotImplementedError(f'Function stop is not implemented in {self.__class__}!') + def stop(self): + for m in self._submodules: + m.stop() @property def options(self): @@ -477,42 +479,52 @@ def __init__(self, base_model='', target_path='', stream=False, train=None, fine self._train, self._finetune, self._deploy = train, finetune, deploy self._stream = stream self._father = [] - self._launchers = [] + self._launchers = dict(default=dict(), manual=dict()) self._deployer = None self._specific_target_path = None def _add_father(self, father): if father not in self._father: self._father.append(father) - def _get_args(self, arg_cls, disable=[]): - args = getattr(self, f'_{arg_cls}_args', dict()) + def _get_train_or_deploy_args(self, arg_cls: str, disable: List[str] = []): + args = getattr(self, f'_{arg_cls}_args', dict()).copy() if len(set(args.keys()).intersection(set(disable))) > 0: raise ValueError(f'Key `{", ".join(disable)}` can not be set in ' '{arg_cls}_args, please pass them from Module.__init__()') - if 'launcher' in args: - args['launcher'] = args['launcher'].clone() if args['launcher'] else launchers.remote(sync=False) - self._launchers.append(args['launcher']) + args['launcher'] = args['launcher'].clone() if args.get('launcher') else launchers.remote(sync=False) + self._launchers['default'][arg_cls] = args['launcher'] return args - def _get_train_tasks(self): - trainset_getf = lambda: lazyllm.package(self._trainset, None) \ - if isinstance(self._trainset, str) else self._trainset # noqa E731 + def _get_train_tasks_impl(self, mode: Optional[str], **kw): + mode = mode or self._mode + assert mode in ('train', 'finetune'), 'mode must be train or finetune' + + trainset_getf = (lambda: lazyllm.package(self._trainset, None)) if isinstance( + self._trainset, str) else self._trainset target_path = self._generate_target_path() if not os.path.exists(target_path): os.system(f'mkdir -p {target_path}') - if self._mode == 'train': - args = self._get_args('train', disable=['base_model', 'target_path']) - train = self._train(base_model=self._base_model, target_path=target_path, **args) - elif self._mode == 'finetune': - args = self._get_args('finetune', disable=['base_model', 'target_path']) - train = self._finetune(base_model=self._base_model, target_path=target_path, **args) - else: - raise RuntimeError('mode must be train or finetune') + kw = kw or self._get_train_or_deploy_args(mode, disable=['base_model', 'target_path']) + task = getattr(self, f'_{mode}')(base_model=self._base_model, target_path=target_path, **kw) + return [trainset_getf, task] + + def _get_train_tasks(self): def after_train(real_target_path): self._finetuned_model_path = real_target_path return real_target_path - return Pipeline(trainset_getf, train, after_train) + return Pipeline(*self._get_train_tasks_impl(), after_train) + + def _trian(self, name: str, ngpus: int = 1, mode: str = None, batch_size: int = 16, + micro_batch_size: int = 2, num_epochs: int = 3, learning_rate: float = 5e-4, + lora_r: int = 8, lora_alpha: int = 32, lora_dropout: float = 0.05, **kw): + assert name and isinstance(name, str), 'Invalid name: {name}, expect a valid string' + assert name not in self._launchers['manual'], 'Duplicate name: {name}' + self._launchers['manual'][name] = kw['launcher'] = launchers.remote(sync=False, ngpus=ngpus) + + Pipeline(*self._get_train_tasks_impl( + mode=mode, batch_size=batch_size, micro_batch_size=micro_batch_size, num_epochs=num_epochs, + learning_rate=learning_rate, lora_r=lora_r, lora_alpha=lora_alpha, lora_dropout=lora_dropout, **kw))() def _get_all_finetuned_models(self): valid_paths = [] @@ -578,7 +590,7 @@ def _set_template(self, deployer): f._stream_url_suffix = deployer.stream_url_suffix() def _deploy_setter_hook(self): - self._deploy_args = self._get_args('deploy', disable=['target_path']) + self._deploy_args = self._get_train_or_deploy_args('deploy', disable=['target_path']) if self._deploy is not lazyllm.deploy.AutoDeploy: self._set_template(self._deploy) if url := self._deploy_args.get('url', None): @@ -588,8 +600,7 @@ def _deploy_setter_hook(self): self._get_deploy_tasks.flag.set() def __del__(self): - for launcher in self._launchers: - launcher.cleanup() + [[launcher.cleanup() for launcher in group.values()] for group in self._launchers.values()] def _generate_target_path(self): base_model_name = os.path.basename(self._base_model) From 68fff85d6ad773e60842bf5d78b0ffbf893b60bd Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Fri, 18 Oct 2024 14:53:40 +0800 Subject: [PATCH 03/11] add train task lists --- lazyllm/engine/lightengine.py | 10 ++++++++-- lazyllm/launcher.py | 2 ++ lazyllm/module/module.py | 9 +++++++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/lazyllm/engine/lightengine.py b/lazyllm/engine/lightengine.py index afc63ece..b293d7ab 100644 --- a/lazyllm/engine/lightengine.py +++ b/lazyllm/engine/lightengine.py @@ -39,8 +39,14 @@ def start(self, nodes: List[Dict] = [], edges: List[Dict] = [], resources: List[ self.build_node(node).func.start() return gid - def stop(self, id): - pass + def stop(self, id, task_name: Optional[str] = None): + node = self.build_node(id) + if task_name: + assert node.kind in ('LocalLLM') + node.func.stop(task_name=task_name) + else: + assert node.kind in ('Graph', 'LocalLLM', 'LocalEmbedding', 'SD', 'TTS', 'STT') + node.func.stop() def update(self, nodes: List[Dict] = [], changed_nodes: List[Dict] = [], edges: List[Dict] = [], changed_resources: List[Dict] = [], diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index e4ea645b..42b3f5de 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -29,6 +29,8 @@ class Status(Enum): class LazyLLMLaunchersBase(object, metaclass=LazyLLMRegisterMetaClass): + Status = Status + def __init__(self) -> None: self.status = Status.TBSubmitted self._id = str(uuid.uuid4().hex) diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index be1ec59d..31474dbd 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -14,6 +14,7 @@ import lazyllm from lazyllm import FlatList, Option, launchers, LOG, package, kwargs, encode_request, globals +from ..launcher import LazyLLMLaunchersBase as Launcher from ..components.prompter import PrompterBase, ChatPrompter, EmptyPrompter from ..components.formatter import FormatterBase, EmptyFormatter from ..components.utils import ModelManager @@ -479,7 +480,7 @@ def __init__(self, base_model='', target_path='', stream=False, train=None, fine self._train, self._finetune, self._deploy = train, finetune, deploy self._stream = stream self._father = [] - self._launchers = dict(default=dict(), manual=dict()) + self._launchers: Dict[str, Dict[str, Launcher]] = dict(default=dict(), manual=dict()) self._deployer = None self._specific_target_path = None @@ -495,7 +496,7 @@ def _get_train_or_deploy_args(self, arg_cls: str, disable: List[str] = []): self._launchers['default'][arg_cls] = args['launcher'] return args - def _get_train_tasks_impl(self, mode: Optional[str], **kw): + def _get_train_tasks_impl(self, mode: Optional[str] = None, **kw): mode = mode or self._mode assert mode in ('train', 'finetune'), 'mode must be train or finetune' @@ -661,6 +662,10 @@ def wait(self): for launcher in self._impl._launchers: launcher.wait() + def stop(self, task_name: Optional[str] = None): + launcher = self._impl._launchers['manual' if task_name else 'default'][task_name or 'deploy'] + launcher.cleanup() + # modify default value to '' def prompt(self, prompt=''): if self.base_model != '' and prompt == '' and ModelManager.get_model_type(self.base_model) != 'llm': From c374c92b1d764ef418eeaf18897e422890e52323 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Fri, 18 Oct 2024 15:17:08 +0800 Subject: [PATCH 04/11] add train task lists --- lazyllm/engine/lightengine.py | 9 +++++++-- lazyllm/launcher.py | 5 +++++ lazyllm/module/module.py | 9 +++++++-- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/lazyllm/engine/lightengine.py b/lazyllm/engine/lightengine.py index b293d7ab..572cb748 100644 --- a/lazyllm/engine/lightengine.py +++ b/lazyllm/engine/lightengine.py @@ -39,8 +39,13 @@ def start(self, nodes: List[Dict] = [], edges: List[Dict] = [], resources: List[ self.build_node(node).func.start() return gid - def stop(self, id, task_name: Optional[str] = None): - node = self.build_node(id) + def status(self, node_id: str, task_name: Optional[str] = None): + node = self.build_node(node_id) + assert node.kind in ('LocalLLM') + return node.func.status(task_name=task_name) + + def stop(self, node_id: str, task_name: Optional[str] = None): + node = self.build_node(node_id) if task_name: assert node.kind in ('LocalLLM') node.func.stop(task_name=task_name) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index 42b3f5de..c311ec06 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -47,6 +47,11 @@ def cleanup(self): LOG.info(f"killed job:{k}") self.all_processes.pop(self._id) + @property + def status(self): + assert len(self.all_processes[self._id]) == 1 + return self.all_processes[self._id][0].status + def wait(self): for _, v in self.all_processes[self._id]: v.wait() diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index 31474dbd..89badb4b 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -492,8 +492,9 @@ def _get_train_or_deploy_args(self, arg_cls: str, disable: List[str] = []): if len(set(args.keys()).intersection(set(disable))) > 0: raise ValueError(f'Key `{", ".join(disable)}` can not be set in ' '{arg_cls}_args, please pass them from Module.__init__()') - args['launcher'] = args['launcher'].clone() if args.get('launcher') else launchers.remote(sync=False) - self._launchers['default'][arg_cls] = args['launcher'] + if 'url' not in args: + args['launcher'] = args['launcher'].clone() if args.get('launcher') else launchers.remote(sync=False) + self._launchers['default'][arg_cls] = args['launcher'] return args def _get_train_tasks_impl(self, mode: Optional[str] = None, **kw): @@ -666,6 +667,10 @@ def stop(self, task_name: Optional[str] = None): launcher = self._impl._launchers['manual' if task_name else 'default'][task_name or 'deploy'] launcher.cleanup() + def status(self, task_name: Optional[str] = None): + launcher = self._impl._launchers['manual' if task_name else 'default'][task_name or 'deploy'] + return launcher.status + # modify default value to '' def prompt(self, prompt=''): if self.base_model != '' and prompt == '' and ModelManager.get_model_type(self.base_model) != 'llm': From 83e72bb2fb78b7e57fdd134dadd2f01ceeeab046 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Fri, 18 Oct 2024 16:44:14 +0800 Subject: [PATCH 05/11] fix bugs --- lazyllm/launcher.py | 1 - lazyllm/module/module.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index c311ec06..e699ca38 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -32,7 +32,6 @@ class LazyLLMLaunchersBase(object, metaclass=LazyLLMRegisterMetaClass): Status = Status def __init__(self) -> None: - self.status = Status.TBSubmitted self._id = str(uuid.uuid4().hex) def makejob(self, cmd): diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index 89badb4b..1e9094ba 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -14,11 +14,11 @@ import lazyllm from lazyllm import FlatList, Option, launchers, LOG, package, kwargs, encode_request, globals -from ..launcher import LazyLLMLaunchersBase as Launcher from ..components.prompter import PrompterBase, ChatPrompter, EmptyPrompter from ..components.formatter import FormatterBase, EmptyFormatter from ..components.utils import ModelManager from ..flow import FlowBase, Pipeline, Parallel +from ..launcher import LazyLLMLaunchersBase as Launcher import uuid from ..client import get_redis, redis_client From 252d270c883aeb288ffb5e1a261160ea616a4c70 Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Fri, 18 Oct 2024 17:52:25 +0800 Subject: [PATCH 06/11] fix bugs --- lazyllm/launcher.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index e699ca38..b1f106d0 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -45,6 +45,7 @@ def cleanup(self): v.stop() LOG.info(f"killed job:{k}") self.all_processes.pop(self._id) + self.wait() @property def status(self): @@ -263,7 +264,7 @@ def _get_idle_gpus(self): encoding='utf-8' ) except Exception as e: - LOG.error(f"An error occurred: {e}") + LOG.warning(f"Get idle gpus failed: {e}, if you have no gpu-driver, ignor it.") return [] lines = order_list.strip().split('\n') @@ -322,6 +323,8 @@ def stop(self): cmd = f"scancel --quiet {self.jobid}" subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8', executable='/bin/bash') + self.jobid = None + if self.ps: self.ps.terminate() self.queue = Queue() @@ -548,6 +551,8 @@ def stop(self): self.output_thread_event.set() self.output_thread.join() + self.jobid = None + def wait(self): if self.ps: self.ps.wait() From 7ad2e34cdd2b11988e2bda24431015221ca40ace Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Mon, 21 Oct 2024 11:38:07 +0800 Subject: [PATCH 07/11] fix bugs --- lazyllm/common/common.py | 2 +- lazyllm/engine/lightengine.py | 23 +++++++++++++++++------ lazyllm/launcher.py | 14 ++++++++++++-- lazyllm/module/module.py | 1 + lazyllm/tools/rag/utils.py | 2 +- tests/basic_tests/test_engine.py | 25 +++++++++++++++++++++++++ 6 files changed, 57 insertions(+), 10 deletions(-) diff --git a/lazyllm/common/common.py b/lazyllm/common/common.py index b5f8df9c..1d2cd649 100644 --- a/lazyllm/common/common.py +++ b/lazyllm/common/common.py @@ -335,7 +335,7 @@ def __repr__(self): return repr(self._func) def __doc__(self, value): self._func.__doc__ = value @property - def flag(self): + def flag(self) -> once_flag: return getattr(self._instance, f'_lazyllm_{self._func.__name__}_once_flag') def __init__(self, func): diff --git a/lazyllm/engine/lightengine.py b/lazyllm/engine/lightengine.py index 572cb748..075ba4f1 100644 --- a/lazyllm/engine/lightengine.py +++ b/lazyllm/engine/lightengine.py @@ -1,6 +1,6 @@ from .engine import Engine, Node from lazyllm import once_wrapper -from typing import List, Dict, Optional +from typing import List, Dict, Optional, overload import uuid @@ -32,12 +32,23 @@ def update_node(self, node): self._nodes[node.id] = super(__class__, self).build_node(node) return self._nodes[node.id] + @overload + def start(self, nodes: str) -> None: + ... + + @overload def start(self, nodes: List[Dict] = [], edges: List[Dict] = [], resources: List[Dict] = [], - gid: Optional[str] = None, name: Optional[str] = None): - gid, name = gid or str(uuid.uuid4().hex), name or str(uuid.uuid4().hex) - node = Node(id=gid, kind='Graph', name=name, args=dict(nodes=nodes, edges=edges, resources=resources)) - self.build_node(node).func.start() - return gid + gid: Optional[str] = None, name: Optional[str] = None) -> str: + ... + + def start(self, nodes=[], edges=[], resources=[], gid=None, name=None): + if isinstance(nodes, str): + self.build_node(nodes).func.start() + else: + gid, name = gid or str(uuid.uuid4().hex), name or str(uuid.uuid4().hex) + node = Node(id=gid, kind='Graph', name=name, args=dict(nodes=nodes, edges=edges, resources=resources)) + self.build_node(node).func.start() + return gid def status(self, node_id: str, task_name: Optional[str] = None): node = self.build_node(node_id) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index b1f106d0..d3c647dd 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -545,6 +545,10 @@ def stop(self): stderr=subprocess.STDOUT, encoding='utf-8', executable='/bin/bash') + with lazyllm.timeout(10): + while self.status not in (Status.Done, Status.Cancelled, Status.Failed): + time.sleep(1) + if self.ps: self.ps.terminate() self.queue = Queue() @@ -563,18 +567,24 @@ def status(self): try: id_str = subprocess.check_output(['scontrol', f'--workspace-id={self.workspace_name}', 'show', 'job', str(self.jobid)]).decode("utf-8") + if 'Error: get no resource from higg service' in id_str: + return Status.Cancelled id_json = json.loads(id_str) job_state = id_json['status_phase'].strip().lower() if job_state == 'running': return Status.Running - elif job_state in ['tbsubmitted', 'suspending', 'suspended']: + elif job_state in ['tbsubmitted', 'suspending']: return Status.TBSubmitted elif job_state in ['waiting', 'init', 'queueing', 'creating', 'restarting', 'recovering', 'starting']: return Status.InQueue + elif job_state in ['suspended']: + return Status.Cancelled elif job_state == 'succeeded': return Status.Done - except Exception: + LOG.warning(f'unexpected job_state: {job_state}') + except Exception as e: + LOG.warning(f'Error: {str(e)}') pass return Status.Failed diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index 1e9094ba..a8ccb21a 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -665,6 +665,7 @@ def wait(self): def stop(self, task_name: Optional[str] = None): launcher = self._impl._launchers['manual' if task_name else 'default'][task_name or 'deploy'] + if not task_name: self._impl._get_deploy_tasks.flag.reset() launcher.cleanup() def status(self, task_name: Optional[str] = None): diff --git a/lazyllm/tools/rag/utils.py b/lazyllm/tools/rag/utils.py index 4e748b28..cb75f7d1 100644 --- a/lazyllm/tools/rag/utils.py +++ b/lazyllm/tools/rag/utils.py @@ -173,7 +173,7 @@ def get_status_cond_and_params(status: Union[str, List[str]], prefix = f'{prefix}.' if prefix else '' if isinstance(status, str): if status != DocListManager.Status.all: - conds.append('{prefix}status = ?') + conds.append(f'{prefix}status = ?') params.append(status) elif isinstance(status, (tuple, list)): conds.append(f'{prefix}status IN ({",".join("?" * len(status))})') diff --git a/tests/basic_tests/test_engine.py b/tests/basic_tests/test_engine.py index 0a5d1966..de329a65 100644 --- a/tests/basic_tests/test_engine.py +++ b/tests/basic_tests/test_engine.py @@ -3,6 +3,8 @@ import time from gradio_client import Client import lazyllm +import urllib3 +from lazyllm.common.common import TimeoutException class TestEngine(object): @@ -242,6 +244,29 @@ def test_engine_server(self): lazyllm.launcher.cleanup() web.stop() + def test_engine_stop_and_restart(): + resources = [dict(id='0', kind='LocalLLM', name='m1', args=dict(base_model='', deploy_method='dummy'))] + nodes = [dict(id='1', kind='SharedLLM', name='s1', args=dict(llm='0', prompt=None))] + edges = [dict(iid='__start__', oid='1'), dict(iid='1', oid='__end__')] + + engine = LightEngine() + gid = engine.start(nodes, edges, resources) + r = engine.run(gid, '1234') + assert 'reply for You are an AI-Agent developed by LazyLLM' in r + assert '1234' in r + + time.sleep(1) + engine.stop('0') + time.sleep(1) + + with pytest.raises((TimeoutException, urllib3.exceptions.NewConnectionError, RuntimeError)): + with lazyllm.timeout(3): + engine.run(gid, '1234567') + + engine.start('0') + r = engine.run(gid, '12345') + assert 'reply for You are an AI-Agent developed by LazyLLM' in r + assert '12345' in r class TestEngineRAG(object): From 296d5ac309dcd57b39d12693f111bc982e177aed Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Mon, 21 Oct 2024 11:57:18 +0800 Subject: [PATCH 08/11] fix bugs --- lazyllm/launcher.py | 6 +----- tests/basic_tests/test_engine.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index d3c647dd..4039ce21 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -567,8 +567,6 @@ def status(self): try: id_str = subprocess.check_output(['scontrol', f'--workspace-id={self.workspace_name}', 'show', 'job', str(self.jobid)]).decode("utf-8") - if 'Error: get no resource from higg service' in id_str: - return Status.Cancelled id_json = json.loads(id_str) job_state = id_json['status_phase'].strip().lower() if job_state == 'running': @@ -582,9 +580,7 @@ def status(self): return Status.Cancelled elif job_state == 'succeeded': return Status.Done - LOG.warning(f'unexpected job_state: {job_state}') - except Exception as e: - LOG.warning(f'Error: {str(e)}') + except Exception: pass return Status.Failed diff --git a/tests/basic_tests/test_engine.py b/tests/basic_tests/test_engine.py index de329a65..21cb566c 100644 --- a/tests/basic_tests/test_engine.py +++ b/tests/basic_tests/test_engine.py @@ -244,7 +244,7 @@ def test_engine_server(self): lazyllm.launcher.cleanup() web.stop() - def test_engine_stop_and_restart(): + def test_engine_stop_and_restart(self): resources = [dict(id='0', kind='LocalLLM', name='m1', args=dict(base_model='', deploy_method='dummy'))] nodes = [dict(id='1', kind='SharedLLM', name='s1', args=dict(llm='0', prompt=None))] edges = [dict(iid='__start__', oid='1'), dict(iid='1', oid='__end__')] From e2a4084a5628e2a96ad0ecf777654712ec1bd932 Mon Sep 17 00:00:00 2001 From: sunxiaoye Date: Mon, 21 Oct 2024 17:16:09 +0800 Subject: [PATCH 09/11] Fixed soc launch no stop --- lazyllm/launcher.py | 32 +++++++++++++++++++++++++++----- tests/basic_tests/test_engine.py | 1 + 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/lazyllm/launcher.py b/lazyllm/launcher.py index 4039ce21..1fdca4a3 100644 --- a/lazyllm/launcher.py +++ b/lazyllm/launcher.py @@ -530,6 +530,30 @@ def get_jobip(self): else: raise RuntimeError("Cannot get IP.", f"JobID: {self.jobid}") + def _scancel_job(self, cmd, max_retries=3): + retries = 0 + while retries < max_retries: + ps = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + encoding='utf-8', executable='/bin/bash') + try: + stdout, stderr = ps.communicate(timeout=3) + if stdout: + LOG.info(stdout) + if 'success scancel' in stdout: + break + if stderr: + LOG.error(stderr) + except subprocess.TimeoutExpired: + ps.kill() + LOG.warning(f"Command timed out, retrying... (Attempt {retries + 1}/{max_retries})") + except Exception as e: + LOG.error("Try to scancel, but meet: ", e) + retries += 1 + if retries == max_retries: + LOG.error(f"Command failed after {max_retries} attempts.") + def stop(self): if self.jobid: cmd = f"scancel --workspace-id={self.workspace_name} {self.jobid}" @@ -540,12 +564,10 @@ def stop(self): f"To delete by terminal, you can execute: `{cmd}`" ) else: - subprocess.Popen( - cmd, shell=True, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - encoding='utf-8', executable='/bin/bash') + self._scancel_job(cmd) + time.sleep(0.5) # Avoid the execution of scancel and scontrol too close together. - with lazyllm.timeout(10): + with lazyllm.timeout(25): while self.status not in (Status.Done, Status.Cancelled, Status.Failed): time.sleep(1) diff --git a/tests/basic_tests/test_engine.py b/tests/basic_tests/test_engine.py index 21cb566c..6ff42d23 100644 --- a/tests/basic_tests/test_engine.py +++ b/tests/basic_tests/test_engine.py @@ -267,6 +267,7 @@ def test_engine_stop_and_restart(self): r = engine.run(gid, '12345') assert 'reply for You are an AI-Agent developed by LazyLLM' in r assert '12345' in r + engine.stop('0') class TestEngineRAG(object): From 067e056016a2a94e0b3550c8447a90c63a599cbe Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Mon, 21 Oct 2024 19:38:27 +0800 Subject: [PATCH 10/11] temp --- tests/basic_tests/test_engine.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/tests/basic_tests/test_engine.py b/tests/basic_tests/test_engine.py index 388b9119..4443d7fd 100644 --- a/tests/basic_tests/test_engine.py +++ b/tests/basic_tests/test_engine.py @@ -161,8 +161,8 @@ def test_engine_formatter_end(self): dict(iid='1', oid='__end__')] engine = LightEngine() - engine.start(nodes, edges) - r = engine.run(1) + gid = engine.start(nodes, edges) + r = engine.run(gid, 1) print(r, type(r)) print(isinstance(r, lazyllm.package)) @@ -174,8 +174,8 @@ def test_engine_formatter_end(self): edges = [dict(iid='__start__', oid='1'), dict(iid='__start__', oid='2'), dict(iid='2', oid='3'), dict(iid='1', oid='3'), dict(iid='3', oid='__end__', formatter='*[a, b]')] engine = LightEngine() - engine.start(nodes, edges) - r = engine.run(1) + gid = engine.start(nodes, edges) + r = engine.run(gid, 1) print(r, type(r)) print(isinstance(r, lazyllm.package)) @@ -283,9 +283,7 @@ def test_engine_stop_and_restart(self): assert 'reply for You are an AI-Agent developed by LazyLLM' in r assert '1234' in r - time.sleep(1) engine.stop('0') - time.sleep(1) with pytest.raises((TimeoutException, urllib3.exceptions.NewConnectionError, RuntimeError)): with lazyllm.timeout(3): @@ -295,7 +293,7 @@ def test_engine_stop_and_restart(self): r = engine.run(gid, '12345') assert 'reply for You are an AI-Agent developed by LazyLLM' in r assert '12345' in r - engine.stop('0') + engine.stop(gid) class TestEngineRAG(object): From 1a98e80a6ad24bb2ebf9a9fc73e39ae559a6114a Mon Sep 17 00:00:00 2001 From: wangzhihong Date: Mon, 21 Oct 2024 19:41:27 +0800 Subject: [PATCH 11/11] temp --- lazyllm/module/module.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lazyllm/module/module.py b/lazyllm/module/module.py index a8ccb21a..d216c499 100644 --- a/lazyllm/module/module.py +++ b/lazyllm/module/module.py @@ -437,6 +437,7 @@ def _get_deploy_tasks(self): def stop(self): self._launcher.cleanup() + self._get_deploy_tasks.flag.reset() def __del__(self): self.stop()