From cf0440e16741a9482d2438ce50138e8616f66700 Mon Sep 17 00:00:00 2001 From: a76yyyy <56478790+a76yyyy@users.noreply.github.com> Date: Sat, 3 Feb 2024 13:57:01 +0800 Subject: [PATCH] =?UTF-8?q?refactor(qd):=20=F0=9F=A6=84=20=E4=BC=98?= =?UTF-8?q?=E5=8C=96=E4=BB=A3=E7=A0=81=E5=92=8C=20logger=20=E6=A0=BC?= =?UTF-8?q?=E5=BC=8F=E5=92=8C=E9=9D=99=E6=80=81=E7=B1=BB=E5=9E=8B=20(#496)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor(libs&main): 🦄 优化代码和 logger 格式和静态类型 by pylint/flake8/mypy * Bugfix(logger): 🐛 修复部分日志不显示错误详情 #491 需打开 config.traceback_print 开关 * Change(libs): 📝 修改解析Url方式 * refactor(db&web): 🦄 优化代码和 logger 格式和静态类型 * refactor(web): 🦄 合并 logger 和错误栈打印 * Typo(web): ✏️ 修复输出文本显示 * Change(web): 📝 优化 for 循环调试显示 * Bugfix(web): 🐛 修复无法跳转发布项详情 * Bugfix(web): 🐛 发布模板时会在循环内修改模板 * Bugfix(web): 🐛 删除公开模板时自动取消发布请求 * Bugfix(web): 🐛 修复任务定时开关设置 * Change(config): 📝 运行自定义单用户拥有记事本最大数量配置 --- .flake8 | 4 + Pipfile | 5 + Pipfile.lock | 1078 ++++++++++++++++------------ backup.py | 16 +- chrole.py | 12 +- config.py | 158 ++-- db/__init__.py | 23 +- db/basedb.py | 108 ++- db/db_converter.py | 243 +++---- db/notepad.py | 50 +- db/pubtpl.py | 8 +- db/push_request.py | 34 +- db/redisdb.py | 44 +- db/site.py | 8 +- db/task.py | 48 +- db/tasklog.py | 16 +- db/tpl.py | 51 +- db/user.py | 98 +-- docker-compose.yml | 1 + libs/__init__.py | 3 - libs/config_utils.py | 13 + libs/convert.py | 11 +- libs/cookie_utils.py | 31 +- libs/fetcher.py | 456 ++++++------ libs/funcs.py | 265 ++++--- libs/log.py | 14 +- libs/mcrypto.py | 82 ++- libs/parse_url.py | 82 ++- libs/safe_eval.py | 150 ++-- libs/utils.py | 239 +++--- mypy.ini | 5 + qd.py | 82 ++- requirements.txt | 51 +- run.py | 31 +- web.py | 13 +- web/__init__.py | 5 - web/app.py | 42 +- web/docs/guide/deployment.md | 1 + web/docs/zh_CN/guide/deployment.md | 1 + web/handlers/__init__.py | 18 +- web/handlers/about.py | 11 +- web/handlers/base.py | 54 +- web/handlers/har.py | 228 +++--- web/handlers/index.py | 7 +- web/handlers/login.py | 143 ++-- web/handlers/my.py | 30 +- web/handlers/push.py | 140 ++-- web/handlers/site.py | 53 +- web/handlers/subscribe.py | 254 ++++--- web/handlers/task.py | 406 ++++++----- web/handlers/task_multi.py | 136 ++-- web/handlers/tpl.py | 94 +-- web/handlers/user.py | 603 ++++++++-------- web/handlers/util.py | 581 +++++++-------- worker.py | 222 +++--- 55 files changed, 3485 insertions(+), 3077 deletions(-) create mode 100644 .flake8 create mode 100644 libs/config_utils.py create mode 100644 mypy.ini diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000000..f687ff9488c --- /dev/null +++ b/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-line-length = 120 +ignore = E203, E266, E501, W503 +exclude = .git, __pycache__, venv, dist, build diff --git a/Pipfile b/Pipfile index cd9bcce7861..770e0fbf7eb 100644 --- a/Pipfile +++ b/Pipfile @@ -37,6 +37,11 @@ tzdata = "*" [dev-packages] #ddddocr = "*" #pycurl = {version = "*", markers="sys_platform != 'win32'"} +types-croniter = "*" +types-requests = "*" +types-python-dateutil = "*" +sqlalchemy2-stubs = "*" +types-aiofiles = "*" [requires] python_version = "3.11" diff --git a/Pipfile.lock b/Pipfile.lock index 7c95032b9f7..34d64ed8513 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "093459aff2e02fa88e8ff11dab74fe5b231c6f5513175a7c39baa82ca932c4d9" + "sha256": "c0c91a82d0dacb1a2d38ccbd60dab84cc41a7a3d49d832fe3eb47f15554b7f69" }, "pipfile-spec": 6, "requires": { @@ -27,100 +27,91 @@ "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a" ], "index": "tencent", + "markers": "python_version >= '3.7'", "version": "==23.2.1" }, "aiohttp": { "hashes": [ - "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c", - "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62", - "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53", - "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349", - "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47", - "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31", - "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9", - "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887", - "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358", - "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566", - "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07", - "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a", - "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e", - "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95", - "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93", - "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c", - "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad", - "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80", - "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f", - "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd", - "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0", - "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5", - "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132", - "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b", - "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096", - "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321", - "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1", - "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f", - "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6", - "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5", - "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66", - "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1", - "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31", - "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460", - "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22", - "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34", - "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071", - "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04", - "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8", - "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d", - "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3", - "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb", - "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951", - "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39", - "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921", - "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5", - "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92", - "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1", - "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865", - "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d", - "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6", - "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc", - "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543", - "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b", - "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684", - "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8", - "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed", - "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae", - "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c", - "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976", - "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54", - "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349", - "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17", - "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f", - "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28", - "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4", - "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2", - "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0", - "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b", - "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78", - "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403", - "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c", - "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae", - "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965", - "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446", - "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a", - "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca", - "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e", - "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda", - "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2", - "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771", - "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb", - "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa", - "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a", - "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2", - "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df", - "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479" + "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f", + "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c", + "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af", + "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4", + "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a", + "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489", + "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213", + "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01", + "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5", + "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361", + "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26", + "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0", + "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4", + "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8", + "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1", + "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7", + "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6", + "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a", + "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd", + "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4", + "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499", + "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183", + "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544", + "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821", + "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501", + "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f", + "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe", + "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f", + "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672", + "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5", + "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2", + "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57", + "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87", + "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0", + "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f", + "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7", + "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed", + "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70", + "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0", + "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f", + "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d", + "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f", + "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d", + "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431", + "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff", + "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf", + "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83", + "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690", + "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587", + "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e", + "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb", + "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3", + "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66", + "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014", + "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35", + "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f", + "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0", + "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449", + "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23", + "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5", + "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd", + "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4", + "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b", + "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558", + "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd", + "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766", + "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a", + "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636", + "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d", + "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590", + "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e", + "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d", + "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c", + "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28", + "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065", + "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca" ], "index": "tencent", - "version": "==3.8.6" + "markers": "python_version >= '3.8'", + "version": "==3.9.1" }, "aiomysql": { "hashes": [ @@ -128,6 +119,7 @@ "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a" ], "index": "tencent", + "markers": "python_version >= '3.7'", "version": "==0.2.0" }, "aiosignal": { @@ -144,31 +136,24 @@ "sha256:edba222e03453e094a3ce605db1b970c4b3376264e56f32e2a4959f948d66a96" ], "index": "tencent", - "version": "==0.19.0" - }, - "async-timeout": { - "hashes": [ - "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", - "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028" - ], "markers": "python_version >= '3.7'", - "version": "==4.0.3" + "version": "==0.19.0" }, "attrs": { "hashes": [ - "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", - "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" + "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30", + "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1" ], "markers": "python_version >= '3.7'", - "version": "==23.1.0" + "version": "==23.2.0" }, "certifi": { "hashes": [ - "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", - "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9" + "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1", + "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474" ], "markers": "python_version >= '3.6'", - "version": "==2023.7.22" + "version": "==2023.11.17" }, "cffi": { "hashes": [ @@ -329,7 +314,7 @@ "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956", "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357" ], - "markers": "python_version >= '3.8'", + "markers": "platform_python_implementation != 'PyPy'", "version": "==1.16.0" }, "charset-normalizer": { @@ -426,6 +411,7 @@ "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561" ], "index": "tencent", + "markers": "python_full_version >= '3.7.0'", "version": "==3.3.2" }, "colorama": { @@ -435,7 +421,6 @@ "sha256:96e0137fb3ab6b56576b4638116d77c59f3e0565f4ea081172e4721c722afa92", "sha256:bc3a1efa0b297242dcd0757e2e83d358bcd18bda77735e493aa89a634e74c9bf" ], - "index": "tencent", "markers": "sys_platform == 'win32'", "version": "==0.4.6" }, @@ -445,182 +430,211 @@ "sha256:d199b2ec3ea5e82988d1f72022433c5f9302b3b3ea9e6bfd6a1518f6ea5e700a" ], "index": "pypi", + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==2.0.1" }, "cryptography": { "hashes": [ - "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf", - "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84", - "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e", - "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8", - "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7", - "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1", - "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88", - "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86", - "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179", - "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81", - "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20", - "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548", - "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d", - "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d", - "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5", - "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1", - "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147", - "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936", - "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797", - "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696", - "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72", - "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da", - "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723" + "sha256:0b7cacc142260ada944de070ce810c3e2a438963ee3deb45aa26fd2cee94c9a4", + "sha256:126e0ba3cc754b200a2fb88f67d66de0d9b9e94070c5bc548318c8dab6383cb6", + "sha256:160fa08dfa6dca9cb8ad9bd84e080c0db6414ba5ad9a7470bc60fb154f60111e", + "sha256:16b9260d04a0bfc8952b00335ff54f471309d3eb9d7e8dbfe9b0bd9e26e67881", + "sha256:25ec6e9e81de5d39f111a4114193dbd39167cc4bbd31c30471cebedc2a92c323", + "sha256:265bdc693570b895eb641410b8fc9e8ddbce723a669236162b9d9cfb70bd8d77", + "sha256:2dff7a32880a51321f5de7869ac9dde6b1fca00fc1fef89d60e93f215468e824", + "sha256:2fe16624637d6e3e765530bc55caa786ff2cbca67371d306e5d0a72e7c3d0407", + "sha256:32ea63ceeae870f1a62e87f9727359174089f7b4b01e4999750827bf10e15d60", + "sha256:351db02c1938c8e6b1fee8a78d6b15c5ccceca7a36b5ce48390479143da3b411", + "sha256:430100abed6d3652208ae1dd410c8396213baee2e01a003a4449357db7dc9e14", + "sha256:4d84673c012aa698555d4710dcfe5f8a0ad76ea9dde8ef803128cc669640a2e0", + "sha256:50aecd93676bcca78379604ed664c45da82bc1241ffb6f97f6b7392ed5bc6f04", + "sha256:6ac8924085ed8287545cba89dc472fc224c10cc634cdf2c3e2866fe868108e77", + "sha256:6bfd823b336fdcd8e06285ae8883d3d2624d3bdef312a0e2ef905f332f8e9302", + "sha256:727387886c9c8de927c360a396c5edcb9340d9e960cda145fca75bdafdabd24c", + "sha256:7911586fc69d06cd0ab3f874a169433db1bc2f0e40988661408ac06c4527a986", + "sha256:802d6f83233cf9696b59b09eb067e6b4d5ae40942feeb8e13b213c8fad47f1aa", + "sha256:8d7efb6bf427d2add2f40b6e1e8e476c17508fa8907234775214b153e69c2e11", + "sha256:9544492e8024f29919eac2117edd8c950165e74eb551a22c53f6fdf6ba5f4cb8", + "sha256:95d900d19a370ae36087cc728e6e7be9c964ffd8cbcb517fd1efb9c9284a6abc", + "sha256:9d61fcdf37647765086030d81872488e4cb3fafe1d2dda1d487875c3709c0a49", + "sha256:ab6b302d51fbb1dd339abc6f139a480de14d49d50f65fdc7dff782aa8631d035", + "sha256:b512f33c6ab195852595187af5440d01bb5f8dd57cb7a91e1e009a17f1b7ebca", + "sha256:cb2861a9364fa27d24832c718150fdbf9ce6781d7dc246a516435f57cfa31fe7", + "sha256:d3594947d2507d4ef7a180a7f49a6db41f75fb874c2fd0e94f36b89bfd678bf2", + "sha256:d3902c779a92151f134f68e555dd0b17c658e13429f270d8a847399b99235a3f", + "sha256:d50718dd574a49d3ef3f7ef7ece66ef281b527951eb2267ce570425459f6a404", + "sha256:e5edf189431b4d51f5c6fb4a95084a75cef6b4646c934eb6e32304fc720e1453", + "sha256:e6edc3a568667daf7d349d7e820783426ee4f1c0feab86c29bd1d6fe2755e009", + "sha256:ed1b2130f5456a09a134cc505a17fc2830a1a48ed53efd37dcc904a23d7b82fa", + "sha256:fd33f53809bb363cf126bebe7a99d97735988d9b0131a2be59fbf83e1259a5b7" ], "index": "tencent", - "version": "==41.0.5" + "markers": "python_version >= '3.7'", + "version": "==42.0.1" }, "faker": { "hashes": [ - "sha256:171b27ba106cf69e30a91ac471407c2362bd6af27738e2461dc441aeff5eed91", - "sha256:df44b68b9d231e784f4bfe616d781576cfef9f0c5d9a17671bf84dc10d7b44d6" + "sha256:4e7bb19c1955bb978bf1d4cb0cf25930e37de0e7dfbc6abd768fd4784d9a9653", + "sha256:79cbe3be786789efa05b8c75d7e0ec4efa6ecb786b8fe90eae49c2f8f9cbe31a" ], "index": "pypi", - "version": "==20.0.0" + "markers": "python_version >= '3.8'", + "version": "==22.5.1" }, "frozenlist": { "hashes": [ - "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6", - "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01", - "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251", - "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9", - "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b", - "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87", - "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf", - "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f", - "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0", - "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2", - "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b", - "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc", - "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c", - "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467", - "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9", - "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1", - "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a", - "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79", - "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167", - "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300", - "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf", - "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea", - "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2", - "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab", - "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3", - "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb", - "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087", - "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc", - "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8", - "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62", - "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f", - "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326", - "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c", - "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431", - "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963", - "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7", - "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef", - "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3", - "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956", - "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781", - "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472", - "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc", - "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839", - "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672", - "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3", - "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503", - "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d", - "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8", - "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b", - "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc", - "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f", - "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559", - "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b", - "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95", - "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb", - "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963", - "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919", - "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f", - "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3", - "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1", - "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e" + "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7", + "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98", + "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad", + "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5", + "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae", + "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e", + "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a", + "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701", + "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d", + "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6", + "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6", + "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106", + "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75", + "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868", + "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a", + "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0", + "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1", + "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826", + "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec", + "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6", + "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950", + "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19", + "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0", + "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8", + "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a", + "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09", + "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86", + "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c", + "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5", + "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b", + "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b", + "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d", + "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0", + "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea", + "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776", + "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a", + "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897", + "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7", + "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09", + "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9", + "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe", + "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd", + "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742", + "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09", + "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0", + "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932", + "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1", + "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a", + "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49", + "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d", + "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7", + "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480", + "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89", + "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e", + "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b", + "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82", + "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb", + "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068", + "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8", + "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b", + "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb", + "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2", + "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11", + "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b", + "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc", + "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0", + "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497", + "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17", + "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0", + "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2", + "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439", + "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5", + "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac", + "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825", + "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887", + "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced", + "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74" ], "markers": "python_version >= '3.8'", - "version": "==1.4.0" + "version": "==1.4.1" }, "greenlet": { "hashes": [ - "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174", - "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd", - "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa", - "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a", - "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec", - "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565", - "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d", - "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c", - "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234", - "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d", - "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546", - "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2", - "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74", - "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de", - "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd", - "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9", - "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3", - "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846", - "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2", - "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353", - "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8", - "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166", - "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206", - "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b", - "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d", - "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe", - "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997", - "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445", - "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0", - "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96", - "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884", - "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6", - "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1", - "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619", - "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94", - "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4", - "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1", - "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63", - "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd", - "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a", - "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376", - "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57", - "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16", - "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e", - "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc", - "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a", - "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c", - "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5", - "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a", - "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72", - "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9", - "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9", - "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e", - "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8", - "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65", - "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064", - "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36" + "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67", + "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6", + "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257", + "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4", + "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676", + "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61", + "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc", + "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca", + "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7", + "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728", + "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305", + "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6", + "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379", + "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414", + "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04", + "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a", + "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf", + "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", + "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559", + "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e", + "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274", + "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb", + "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b", + "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9", + "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b", + "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be", + "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506", + "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405", + "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113", + "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f", + "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5", + "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230", + "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d", + "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f", + "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a", + "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e", + "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61", + "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6", + "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d", + "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71", + "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22", + "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2", + "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3", + "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067", + "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc", + "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881", + "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3", + "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e", + "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac", + "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53", + "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0", + "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b", + "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83", + "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41", + "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c", + "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf", + "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da", + "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33" ], "markers": "python_version >= '3' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", - "version": "==3.0.1" + "version": "==3.0.3" }, "idna": { "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" + "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", + "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f" ], "markers": "python_version >= '3.5'", - "version": "==3.4" + "version": "==3.6" }, "incremental": { "hashes": [ @@ -634,67 +648,77 @@ }, "jinja2": { "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" + "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa", + "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90" ], - "index": "tencent", - "version": "==3.1.2" + "markers": "python_version >= '3.7'", + "version": "==3.1.3" }, "markupsafe": { "hashes": [ - "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e", - "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e", - "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431", - "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686", - "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559", - "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc", - "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c", - "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0", - "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4", - "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9", - "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575", - "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba", - "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d", - "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3", - "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00", - "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155", - "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac", - "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52", - "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f", - "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8", - "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b", - "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24", - "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea", - "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198", - "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0", - "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee", - "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be", - "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2", - "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707", - "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6", - "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58", - "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779", - "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636", - "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c", - "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad", - "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee", - "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc", - "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2", - "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48", - "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7", - "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e", - "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b", - "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa", - "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5", - "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e", - "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb", - "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9", - "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57", - "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc", - "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2" + "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69", + "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0", + "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d", + "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec", + "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5", + "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411", + "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3", + "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74", + "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0", + "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949", + "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d", + "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279", + "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f", + "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6", + "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc", + "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e", + "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954", + "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656", + "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc", + "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518", + "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56", + "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc", + "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa", + "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565", + "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4", + "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb", + "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250", + "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4", + "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959", + "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc", + "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474", + "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863", + "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8", + "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f", + "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2", + "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e", + "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e", + "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb", + "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f", + "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a", + "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26", + "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d", + "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2", + "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131", + "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789", + "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6", + "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a", + "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858", + "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e", + "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb", + "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e", + "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84", + "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7", + "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea", + "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b", + "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6", + "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475", + "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74", + "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a", + "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00" ], "markers": "python_version >= '3.7'", - "version": "==2.1.3" + "version": "==2.1.4" }, "multidict": { "hashes": [ @@ -800,46 +824,51 @@ }, "pycryptodome": { "hashes": [ - "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6", - "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810", - "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a", - "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db", - "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33", - "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5", - "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551", - "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa", - "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4", - "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405", - "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc", - "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997", - "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb", - "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e", - "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9", - "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f", - "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e", - "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34", - "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631", - "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c", - "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde", - "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7", - "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa", - "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0", - "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea", - "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e", - "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400", - "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270", - "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f", - "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1", - "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434", - "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49" + "sha256:06d6de87c19f967f03b4cf9b34e538ef46e99a337e9a61a77dbe44b2cbcf0690", + "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7", + "sha256:210ba1b647837bfc42dd5a813cdecb5b86193ae11a3f5d972b9a0ae2c7e9e4b4", + "sha256:2a1250b7ea809f752b68e3e6f3fd946b5939a52eaeea18c73bdab53e9ba3c2dd", + "sha256:2ab6ab0cb755154ad14e507d1df72de9897e99fd2d4922851a276ccc14f4f1a5", + "sha256:3427d9e5310af6680678f4cce149f54e0bb4af60101c7f2c16fdf878b39ccccc", + "sha256:3cd3ef3aee1079ae44afaeee13393cf68b1058f70576b11439483e34f93cf818", + "sha256:405002eafad114a2f9a930f5db65feef7b53c4784495dd8758069b89baf68eab", + "sha256:417a276aaa9cb3be91f9014e9d18d10e840a7a9b9a9be64a42f553c5b50b4d1d", + "sha256:4401564ebf37dfde45d096974c7a159b52eeabd9969135f0426907db367a652a", + "sha256:49a4c4dc60b78ec41d2afa392491d788c2e06edf48580fbfb0dd0f828af49d25", + "sha256:5601c934c498cd267640b57569e73793cb9a83506f7c73a8ec57a516f5b0b091", + "sha256:6e0e4a987d38cfc2e71b4a1b591bae4891eeabe5fa0f56154f576e26287bfdea", + "sha256:76658f0d942051d12a9bd08ca1b6b34fd762a8ee4240984f7c06ddfb55eaf15a", + "sha256:76cb39afede7055127e35a444c1c041d2e8d2f1f9c121ecef573757ba4cd2c3c", + "sha256:8d6b98d0d83d21fb757a182d52940d028564efe8147baa9ce0f38d057104ae72", + "sha256:9b3ae153c89a480a0ec402e23db8d8d84a3833b65fa4b15b81b83be9d637aab9", + "sha256:a60fedd2b37b4cb11ccb5d0399efe26db9e0dd149016c1cc6c8161974ceac2d6", + "sha256:ac1c7c0624a862f2e53438a15c9259d1655325fc2ec4392e66dc46cdae24d044", + "sha256:acae12b9ede49f38eb0ef76fdec2df2e94aad85ae46ec85be3648a57f0a7db04", + "sha256:acc2614e2e5346a4a4eab6e199203034924313626f9620b7b4b38e9ad74b7e0c", + "sha256:acf6e43fa75aca2d33e93409f2dafe386fe051818ee79ee8a3e21de9caa2ac9e", + "sha256:baee115a9ba6c5d2709a1e88ffe62b73ecc044852a925dcb67713a288c4ec70f", + "sha256:c18b381553638414b38705f07d1ef0a7cf301bc78a5f9bc17a957eb19446834b", + "sha256:d29daa681517f4bc318cd8a23af87e1f2a7bad2fe361e8aa29c77d652a065de4", + "sha256:d5954acfe9e00bc83ed9f5cb082ed22c592fbbef86dc48b907238be64ead5c33", + "sha256:ec0bb1188c1d13426039af8ffcb4dbe3aad1d7680c35a62d8eaf2a529b5d3d4f", + "sha256:ec1f93feb3bb93380ab0ebf8b859e8e5678c0f010d2d78367cf6bc30bfeb148e", + "sha256:f0e6d631bae3f231d3634f91ae4da7a960f7ff87f2865b2d2b831af1dfb04e9a", + "sha256:f35d6cee81fa145333137009d9c8ba90951d7d77b67c79cbe5f03c7eb74d8fe2", + "sha256:f47888542a0633baff535a04726948e876bf1ed880fddb7c10a736fa99146ab3", + "sha256:fb3b87461fa35afa19c971b0a2b7456a7b1db7b4eba9a8424666104925b78128" ], "index": "pypi", - "version": "==3.19.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==3.20.0" }, "pymysql": { "hashes": [ "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96", - "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7" + "sha256:5072fb2637f8bfff0e7a15a9c02a0f4ba98f97800e12432e1d6d95936ec6d496", + "sha256:5cc02f2f60936c5d2d6122ffaff27783bd29ba7683ea45a8ab75c5083f00dc20", + "sha256:766b72e4370aba94e6266a4dbd62c51fbc6a894c38de25a41a8a01f0461a2387", + "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7", + "sha256:aade29b861e81a3c68a9e90d43f3db257940c0208983a0128b82f1a4cef639aa" ], "markers": "python_version >= '3.7'", "version": "==1.1.0" @@ -851,6 +880,7 @@ "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0" ], "index": "tencent", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", "version": "==1.7.1" }, "python-dateutil": { @@ -874,6 +904,7 @@ "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f" ], "index": "tencent", + "markers": "python_version >= '3.7'", "version": "==5.0.1" }, "requests": { @@ -882,6 +913,7 @@ "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], "index": "pypi", + "markers": "python_version >= '3.7'", "version": "==2.31.0" }, "six": { @@ -897,59 +929,93 @@ "asyncio" ], "hashes": [ - "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131", - "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3", - "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7", - "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace", - "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a", - "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab", - "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814", - "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf", - "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49", - "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c", - "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796", - "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75", - "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18", - "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c", - "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516", - "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9", - "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293", - "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc", - "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5", - "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb", - "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5", - "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4", - "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436", - "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be", - "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d" + "sha256:0525c4905b4b52d8ccc3c203c9d7ab2a80329ffa077d4bacf31aefda7604dc65", + "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678", + "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e", + "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea", + "sha256:1980e6eb6c9be49ea8f89889989127daafc43f0b1b6843d71efab1514973cca0", + "sha256:1a09d5bd1a40d76ad90e5570530e082ddc000e1d92de495746f6257dc08f166b", + "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb", + "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3", + "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1", + "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b", + "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b", + "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad", + "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707", + "sha256:39b02b645632c5fe46b8dd30755682f629ffbb62ff317ecc14c998c21b2896ff", + "sha256:3b0cd89a7bd03f57ae58263d0f828a072d1b440c8c2949f38f3b446148321171", + "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd", + "sha256:3f0ef620ecbab46e81035cf3dedfb412a7da35340500ba470f9ce43a1e6c423b", + "sha256:50e074aea505f4427151c286955ea025f51752fa42f9939749336672e0674c81", + "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c", + "sha256:5e180fff133d21a800c4f050733d59340f40d42364fcb9d14f6a67764bdc48d2", + "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916", + "sha256:7af40425ac535cbda129d9915edcaa002afe35d84609fd3b9d6a8c46732e02ee", + "sha256:7d8139ca0b9f93890ab899da678816518af74312bb8cd71fb721436a93a93298", + "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f", + "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c", + "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340", + "sha256:8d1d7d63e5d2f4e92a39ae1e897a5d551720179bb8d1254883e7113d3826d43c", + "sha256:8e702e7489f39375601c7ea5a0bef207256828a2bc5986c65cb15cd0cf097a87", + "sha256:a055ba17f4675aadcda3005df2e28a86feb731fdcc865e1f6b4f209ed1225cba", + "sha256:a33cb3f095e7d776ec76e79d92d83117438b6153510770fcd57b9c96f9ef623d", + "sha256:a61184c7289146c8cff06b6b41807c6994c6d437278e72cf00ff7fe1c7a263d1", + "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072", + "sha256:b00cf0471888823b7a9f722c6c41eb6985cf34f077edcf62695ac4bed6ec01ee", + "sha256:b03850c290c765b87102959ea53299dc9addf76ca08a06ea98383348ae205c99", + "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f", + "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894", + "sha256:cecb66492440ae8592797dd705a0cbaa6abe0555f4fa6c5f40b078bd2740fc6b", + "sha256:d0a83afab5e062abffcdcbcc74f9d3ba37b2385294dd0927ad65fc6ebe04e054", + "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4", + "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349", + "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9", + "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6", + "sha256:eaeeb2464019765bc4340214fca1143081d49972864773f3f1e95dba5c7edc7d", + "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12", + "sha256:f2e5b6f5cf7c18df66d082604a1d9c7a2d18f7d1dbe9514a2afaccbb51cc4fc3", + "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade" ], - "index": "tencent", - "version": "==1.4.50" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.4.51" }, "tornado": { "hashes": [ - "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f", - "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5", - "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d", - "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3", - "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2", - "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a", - "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16", - "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a", - "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17", - "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0", - "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe" + "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0", + "sha256:05264e909bd44e9303e81914310a25101034cb3341c798730efcc1c4c89b4d79", + "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63", + "sha256:1ca9b15d7c28784223fdf60c7221210569812163ec6df008a3d332feee84f50a", + "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263", + "sha256:3ec4bd9b4547a7acd6b2bee1bd7a4263cf02e637bc66cbb39741ec690088a7b7", + "sha256:40954904d2c9d3a2c0b76e27b854247995e3f320ba4d535be84d765c34187fd2", + "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052", + "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f", + "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee", + "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78", + "sha256:9e2a05cf38779ede40cfa02143e682eab4e5647076ac50f360dd0355f802913f", + "sha256:a458c869ee68ec610fe047040c30638be8ad5db5b843d2168db428fc52967e28", + "sha256:ad9f48b995986c8a516cffbd70b1c178b0a4ec2d863e260b18dd36715b2fde17", + "sha256:af9243ba960f4f987a61ee72954e04d33f75a10a0ec3ad3cd73f79f1b658a904", + "sha256:c2524457d64e1284fe2c64e2c2a1f7c84cf148beff5023de7d7a4af38bcf1b36", + "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579", + "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212", + "sha256:f3f11f58ccc670a662cb4e92c5f73fa93bf5d17bb2901e094eecf0e3b3c62ec0", + "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e", + "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2", + "sha256:ff0ca6f001914f654ef1e5ae2ef1649a2d8a57a71f80089c08cc7a9324e3c0e2" ], "index": "tencent", - "version": "==6.3.3" + "markers": "python_version >= '3.8'", + "version": "==6.4" }, "tzdata": { "hashes": [ - "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a", - "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda" + "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3", + "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9" ], "index": "tencent", - "version": "==2023.3" + "markers": "python_version >= '2'", + "version": "==2023.4" }, "u-msgpack-python": { "hashes": [ @@ -961,92 +1027,172 @@ }, "urllib3": { "hashes": [ - "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", - "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e" + "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3", + "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54" ], - "markers": "python_version >= '3.7'", - "version": "==2.0.7" + "markers": "python_version >= '3.8'", + "version": "==2.1.0" }, "yarl": { "hashes": [ - "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571", - "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3", - "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3", - "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c", - "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7", - "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04", - "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191", - "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea", - "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4", - "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4", - "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095", - "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e", - "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74", - "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef", - "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33", - "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde", - "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45", - "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf", - "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b", - "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac", - "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0", - "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528", - "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716", - "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb", - "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18", - "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72", - "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6", - "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582", - "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5", - "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368", - "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc", - "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9", - "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be", - "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a", - "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80", - "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8", - "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6", - "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417", - "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574", - "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59", - "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608", - "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82", - "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1", - "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3", - "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d", - "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8", - "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc", - "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac", - "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8", - "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955", - "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0", - "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367", - "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb", - "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a", - "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623", - "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2", - "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6", - "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7", - "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4", - "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051", - "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938", - "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8", - "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9", - "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3", - "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5", - "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9", - "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333", - "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185", - "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3", - "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560", - "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b", - "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7", - "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78", - "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7" + "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51", + "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce", + "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559", + "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0", + "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81", + "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc", + "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4", + "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c", + "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130", + "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136", + "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e", + "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec", + "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7", + "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1", + "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455", + "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099", + "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129", + "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10", + "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142", + "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98", + "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa", + "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7", + "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525", + "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c", + "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9", + "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c", + "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8", + "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b", + "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf", + "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23", + "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd", + "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27", + "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f", + "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece", + "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434", + "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec", + "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff", + "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78", + "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d", + "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863", + "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53", + "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31", + "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15", + "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5", + "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b", + "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57", + "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3", + "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1", + "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f", + "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad", + "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c", + "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7", + "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2", + "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b", + "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2", + "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b", + "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9", + "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be", + "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e", + "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984", + "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4", + "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074", + "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2", + "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392", + "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91", + "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541", + "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf", + "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572", + "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66", + "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575", + "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14", + "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5", + "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1", + "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e", + "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551", + "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17", + "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead", + "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0", + "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe", + "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234", + "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0", + "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7", + "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34", + "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42", + "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385", + "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78", + "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be", + "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958", + "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749", + "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec" ], "markers": "python_version >= '3.7'", - "version": "==1.9.2" + "version": "==1.9.4" } }, - "develop": {} + "develop": { + "sqlalchemy2-stubs": { + "hashes": [ + "sha256:861d722abeb12f13eacd775a9f09379b11a5a9076f469ccd4099961b95800f9e", + "sha256:b62aa46943807287550e2033dafe07564b33b6a815fbaa3c144e396f9cc53bcb" + ], + "index": "tencent", + "markers": "python_version >= '3.6'", + "version": "==0.0.2a38" + }, + "types-aiofiles": { + "hashes": [ + "sha256:7324f9a9f7200c1f4986a9e40a42b548290f707b967709f30b280e99fdacbd99", + "sha256:ef4fa3072441c58beaadbd0d07ba18e89beff49c71648dd223e2ca861f3dac53" + ], + "index": "tencent", + "markers": "python_version >= '3.8'", + "version": "==23.2.0.20240106" + }, + "types-croniter": { + "hashes": [ + "sha256:266d9ecabbc06afab7cc0cfa7f2149eb36f613ed66ddd6c9bac4edcf727e9a58", + "sha256:a5c92566d750e025ab31279029ab44b479e2e3509cd8db3784574bdab1012571" + ], + "index": "tencent", + "markers": "python_version >= '3.8'", + "version": "==2.0.0.20240106" + }, + "types-python-dateutil": { + "hashes": [ + "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f", + "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2" + ], + "index": "tencent", + "markers": "python_version >= '3.8'", + "version": "==2.8.19.20240106" + }, + "types-requests": { + "hashes": [ + "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5", + "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1" + ], + "index": "tencent", + "markers": "python_version >= '3.8'", + "version": "==2.31.0.20240125" + }, + "typing-extensions": { + "hashes": [ + "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783", + "sha256:56a8f7a8776ea160e59ef0af6fc3a3a03b7d42156b90e47f0241515fcec620c2", + "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd", + "sha256:cc22327e22d9b583d1565ce1ed9f5ecc22831afa743f8789a403cad849fb702b" + ], + "markers": "python_version >= '3.8'", + "version": "==4.9.0" + }, + "urllib3": { + "hashes": [ + "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3", + "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54" + ], + "markers": "python_version >= '3.8'", + "version": "==2.1.0" + } + } } diff --git a/backup.py b/backup.py index 8d2d0b1a69c..952fa01956f 100644 --- a/backup.py +++ b/backup.py @@ -4,6 +4,7 @@ # Author: Binux # http://binux.me # Created on 2014-08-09 11:39:25 +# pylint: disable=broad-exception-raised import sqlite3 @@ -127,13 +128,13 @@ def new(self, userid, maindb): `notepadid` INTEGER NOT NULL , `content` TEXT NULL ); - ''' ) + ''') # 获取数据库信息 userid = int(userid) - user = maindb.db.user.get(id=userid, fields=('id', 'email', 'email_verified', 'password', 'password_md5', 'userkey', 'nickname', 'role', 'ctime', 'mtime', 'atime', 'cip', - 'mip', 'aip', 'skey', 'barkurl', 'wxpusher', 'noticeflg', 'logtime', 'status', 'notepad', 'diypusher', 'qywx_token', 'tg_token', 'dingding_token', 'qywx_webhook', 'push_batch')) - userkey = maindb.db.user.__getuserkey(user['env']) + # user = maindb.db.user.get(id=userid, fields=('id', 'email', 'email_verified', 'password', 'password_md5', 'userkey', 'nickname', 'role', 'ctime', 'mtime', 'atime', 'cip', + # 'mip', 'aip', 'skey', 'barkurl', 'wxpusher', 'noticeflg', 'logtime', 'status', 'notepad', 'diypusher', 'qywx_token', 'tg_token', 'dingding_token', 'qywx_webhook', 'push_batch')) + # userkey = maindb.db.user.__getuserkey(user['env']) tpls = [] for tpl in maindb.db.tpl.list(fields=('id', 'userid', 'siteurl', 'sitename', 'banner', 'disabled', 'public', 'lock', 'fork', 'har', 'tpl', 'variables', 'interval', 'note', 'success_count', 'failed_count', 'last_success', 'ctime', 'mtime', 'atime', 'tplurl', 'updateable', '_groups', 'init_env'), limit=None): if tpl['userid'] == userid: @@ -141,16 +142,15 @@ def new(self, userid, maindb): tasks = [] tasklogs = [] for task in maindb.db.task.list(userid, fields=('id', 'tplid', 'userid', 'note', 'disabled', 'init_env', 'env', 'session', 'retry_count', 'retry_interval', 'last_success', 'success_count', - 'failed_count', 'last_failed', 'next', 'last_failed_count', 'ctime', 'mtime', 'ontimeflg', 'ontime', '_groups', 'pushsw', 'newontime'), limit=None): + 'failed_count', 'last_failed', 'next', 'last_failed_count', 'ctime', 'mtime', 'ontimeflg', 'ontime', '_groups', 'pushsw', 'newontime'), limit=None): if task['userid'] == userid: tasks.append(task) - for tasklog in maindb.db.tasklog.list(taskid = task['id'], fields=('id', "taskid", "success", "ctime", "msg")): + for tasklog in maindb.db.tasklog.list(taskid=task['id'], fields=('id', "taskid", "success", "ctime", "msg")): tasklogs.append(tasklog) c.close() conn.close() - except Exception as e: - raise Exception("backup database error") + raise Exception("backup database error") from e print("OK") diff --git a/chrole.py b/chrole.py index 9ee5c0ebfce..cff8182b57b 100644 --- a/chrole.py +++ b/chrole.py @@ -10,16 +10,20 @@ """ import asyncio +import logging import sys import db +logger = logging.getLogger(__name__) + def usage(): - print('Usage: python3 %s [role]' % sys.argv[0]) - print('Example: python3 %s admin@qd.today admin' % sys.argv[0]) + print(f'Usage: python3 {sys.argv[0]} [role]') + print(f'Example: python3 {sys.argv[0]} admin@qd.today admin') sys.exit(1) + async def main(): email = sys.argv[1] role = sys.argv[2] if len(sys.argv) == 3 else '' @@ -31,9 +35,9 @@ async def main(): sys.exit(1) rowcount = await userdb.mod(user['id'], role=role) if rowcount >= 1: - print("role of {} changed to {}".format(email, role or '[empty]')) + logger.info("role of %s changed to %s", email, role or '[empty]') else: - print("role of {} not changed".format(email)) + logger.warning("role of %s not changed", email) if __name__ == '__main__': diff --git a/config.py b/config.py index e24bb528ec6..02da4688f6a 100644 --- a/config.py +++ b/config.py @@ -5,22 +5,25 @@ # http://binux.me # Created on 2014-07-30 12:21:48 # Modified on 2023-06-13 18:12:35 +# pylint: disable=invalid-name, wildcard-import +# flake8: noqa: F401,F403 import hashlib import os -from distutils.util import strtobool from urllib.parse import parse_qs, urlparse +from libs.config_utils import strtobool + # QD 框架常用设置 -debug = bool(strtobool(os.getenv('QD_DEBUG','False'))) # 是否启用 QD 框架 Debug +debug = bool(strtobool(os.getenv('QD_DEBUG', 'False'))) # 是否启用 QD 框架 Debug bind = str(os.getenv('BIND', '0.0.0.0')) # 框架运行监听地址 ('0.0.0.0' 表示监听所有 IP 地址) -port = int(os.getenv('PORT', 8923)) # 监听端口 Port -multiprocess = bool(strtobool(os.getenv('MULTI_PROCESS','False'))) # 是否启用多进程模式, Windows 平台无效, 请谨慎使用 -autoreload = bool(strtobool(os.getenv('AUTO_RELOAD','False'))) # 是否启用自动热加载, `multiprocess=True` 时无效 -gzip = bool(strtobool(os.getenv('GZIP','True'))) # 是否启用 gzip +port = int(os.getenv('PORT', '8923')) # 监听端口 Port +multiprocess = bool(strtobool(os.getenv('MULTI_PROCESS', 'False'))) # 是否启用多进程模式, Windows 平台无效, 请谨慎使用 +autoreload = bool(strtobool(os.getenv('AUTO_RELOAD', 'False'))) # 是否启用自动热加载, `multiprocess=True` 时无效 +gzip = bool(strtobool(os.getenv('GZIP', 'True'))) # 是否启用 gzip accesslog = bool(strtobool(os.getenv('ACCESS_LOG', 'True'))) # 是否输出 Tornado access Log display_import_warning = bool(strtobool(os.getenv('DISPLAY_IMPORT_WARNING', 'True'))) # 是否显示导入模组失败或 Redis 连接失败的警告 -user0isadmin = bool(strtobool(os.getenv('USER0ISADMIN','True'))) # 是否将第一个注册用户设置为管理员 +user0isadmin = bool(strtobool(os.getenv('USER0ISADMIN', 'True'))) # 是否将第一个注册用户设置为管理员 static_url_prefix = os.getenv('STATIC_URL_PREFIX', '/static/') # 静态文件访问路径前缀, 默认为 '/static/' # 指定域名, 用于发送邮件及微信推送内链接域名显示, @@ -29,13 +32,13 @@ domain = os.getenv('DOMAIN', '') # 建议修改, 不然邮件重置密码之类的功能无效 # Cookie 及加密设置 -cookie_days = int(os.getenv('COOKIE_DAY', 5)) # Cookie 在客户端保留时间 +cookie_days = int(os.getenv('COOKIE_DAY', '5')) # Cookie 在客户端保留时间 cookie_secure_mode = bool(strtobool(os.getenv('COOKIE_SECURE_MODE', 'False'))) # Cookie 是否启用安全模式, 默认为 False, - # 启用后仅支持通过 HTTPS 访问 QD 框架, 请确保已正确配置 HTTPS 及证书 - # HTTP 访问将导致 Cookie 无法正常设置, 无法登录和使用框架功能 +# 启用后仅支持通过 HTTPS 访问 QD 框架, 请确保已正确配置 HTTPS 及证书 +# HTTP 访问将导致 Cookie 无法正常设置, 无法登录和使用框架功能 cookie_secret = hashlib.sha256(os.getenv('COOKIE_SECRET', 'binux').encode('utf-8')).digest() # Cookie 加密密钥, 强烈建议修改 -pbkdf2_iterations = int(os.getenv('PBKDF2_ITERATIONS', 400)) # pbkdf2 迭代次数 +pbkdf2_iterations = int(os.getenv('PBKDF2_ITERATIONS', '400')) # pbkdf2 迭代次数 aes_key = hashlib.sha256(os.getenv('AES_KEY', 'binux').encode('utf-8')).digest() # AES 加密密钥, 强烈建议修改 # 数据库设置 @@ -45,24 +48,28 @@ ## MySQL URL设置 mysql_url = urlparse(os.getenv('JAWSDB_MARIA_URL', '')) # 格式: mysql://用户名:密码@hostname:port/数据库名?auth_plugin= -## 数据库连接参数, 建议基于 MySQL URL 自动设置, 可选 -class mysql(object): + +class mysql: + ## 数据库连接参数, 建议基于 MySQL URL 自动设置, 可选 host = mysql_url.hostname or 'localhost' # 访问 MySQL 的 Hostname port = mysql_url.port or '3306' # MySQL 的 端口Port database = mysql_url.path[1:] or 'qd' # QD 框架的数据库名 user = mysql_url.username or 'qd' # 拥有访问 MySQL 内 QD 框架数据库权限的用户名 passwd = mysql_url.password or None # 用户名对应的密码 - auth_plugin = parse_qs(mysql_url.query).get('auth_plugin',[''])[0] # auth_plugin, 默认为空, 可修改为'mysql_native_password','caching_sha2_password' + auth_plugin = parse_qs(mysql_url.query).get('auth_plugin', [''])[0] # auth_plugin, 默认为空, 可修改为'mysql_native_password','caching_sha2_password' ## Sqlite3 设置 -class sqlite3(object): - path = os.path.join(os.path.dirname(__file__),'config','database.db') # Sqlite3数据库文件地址 -## SQLAlchmey配置 -class sqlalchemy(object): + +class sqlite3: + path = os.path.join(os.path.dirname(__file__), 'config', 'database.db') # Sqlite3数据库文件地址 + + +class sqlalchemy: + ## SQLAlchmey配置 logging_name = os.getenv('QD_SQL_LOGGING_NAME', 'QD.sql') # SQLAlchmey 日志名称 logging_level = os.getenv('QD_SQL_LOGGING_LEVEL', 'WARNING') # SQLAlchmey 日志级别 - pool_logging_name = os.getenv('QD_SQL_POOL_LOGGING_NAME', 'QD.sql.pool')# 连接池日志名称 + pool_logging_name = os.getenv('QD_SQL_POOL_LOGGING_NAME', 'QD.sql.pool') # 连接池日志名称 pool_logging_level = os.getenv('QD_SQL_POOL_LOGGING_LEVEL', 'WARNING') # 连接池日志级别 pool_size = int(os.getenv('QD_SQL_POOL_SIZE', '10')) # 连接池大小 max_overflow = int(os.getenv('QD_SQL_MAX_OVERFLOW', '50')) # 连接池连接数量超过 pool_size 时, 最大连接数 @@ -71,65 +78,70 @@ class sqlalchemy(object): pool_timeout = int(os.getenv('QD_SQL_POOL_TIMEOUT', '60')) # 连接池获取连接超时时间, 默认为 60 秒 pool_use_lifo = bool(strtobool(os.getenv('QD_SQL_POOL_USE_LIFO', 'True'))) # 连接池是否使用 LIFO, 默认为 True + # Redis 设置 ## Redis URL设置 redis_url = urlparse(os.getenv('REDISCLOUD_URL', '')) # 格式: (redis/http)://rediscloud:密码@hostname:port -## redis 连接参数, -## 建议基于 Redis URL 自动设置, 可选 -class redis(object): + +class redis: + ## redis 连接参数, 建议基于 Redis URL 自动设置, 可选 host = redis_url.hostname or 'localhost' # 访问 Redis 的 Hostname port = redis_url.port or 6379 # Redis 的 端口Port passwd = redis_url.password or None # 访问 Redis 权限密码 - db = int(os.getenv('REDIS_DB_INDEX', 1)) # 索引 -evil = int(os.getenv('QD_EVIL', 500)) # Redis连接成功后生效, 用于登录用户或IP在1小时内 操作失败(如登录, 验证, 测试等操作)次数*相应惩罚分值 达到evil值上限后自动封禁直至下一小时周期 -evil_pass_lan_ip = bool(strtobool(os.getenv('EVIL_PASS_LAN_IP','True'))) # 是否针对本机私有IP地址用户及 Localhost_API 请求关闭 evil 限制 + db = int(os.getenv('REDIS_DB_INDEX', '1')) # 索引 + + +evil = int(os.getenv('QD_EVIL', '500')) # Redis连接成功后生效, 用于登录用户或IP在1小时内 操作失败(如登录, 验证, 测试等操作)次数*相应惩罚分值 达到evil值上限后自动封禁直至下一小时周期 +evil_pass_lan_ip = bool(strtobool(os.getenv('EVIL_PASS_LAN_IP', 'True'))) # 是否针对本机私有IP地址用户及 Localhost_API 请求关闭 evil 限制 # 任务运行相关设置 -worker_method = str(os.getenv('WORKER_METHOD','Queue')).upper() # 任务定时执行方式, 默认为 Queue, 可选 Queue 或 Batch, Batch 模式为旧版定时任务执行方式, 性能较弱, 建议仅当定时执行失效时使用 -queue_num = int(os.getenv('QUEUE_NUM', 50)) # 定时执行任务队列最大数量, 仅在 Queue 模式下生效 -check_task_loop = int(os.getenv('CHECK_TASK_LOOP', 500)) # Worker 检查任务工作循环时间, 单位毫秒 -task_max_retry_count = int(os.getenv('TASK_MAX_RETRY_COUNT', 8)) # 任务失败默认最大重试次数, 默认为8次 -new_task_delay = int(os.getenv('NEW_TASK_DELAY', 1)) # 新建任务后准备时间, 单位为秒, 默认为1秒 -task_while_loop_timeout = int(os.getenv('TASK_WHILE_LOOP_TIMEOUT', 15*60)) # 任务运行中单个 While 循环最大运行时间, 单位为秒, 默认为15分钟 -task_request_limit = int(os.getenv('TASK_REQUEST_LIMIT', 1500)) # 任务运行中单个任务最大请求次数, 默认为 1500 次 +worker_method = str(os.getenv('WORKER_METHOD', 'Queue')).upper() # 任务定时执行方式, 默认为 Queue, 可选 Queue 或 Batch, Batch 模式为旧版定时任务执行方式, 性能较弱, 建议仅当定时执行失效时使用 +queue_num = int(os.getenv('QUEUE_NUM', '50')) # 定时执行任务队列最大数量, 仅在 Queue 模式下生效 +check_task_loop = int(os.getenv('CHECK_TASK_LOOP', '500')) # Worker 检查任务工作循环时间, 单位毫秒 +task_max_retry_count = int(os.getenv('TASK_MAX_RETRY_COUNT', '8')) # 任务失败默认最大重试次数, 默认为8次 +new_task_delay = int(os.getenv('NEW_TASK_DELAY', '1')) # 新建任务后准备时间, 单位为秒, 默认为1秒 +task_while_loop_timeout = int(os.getenv('TASK_WHILE_LOOP_TIMEOUT', '900')) # 任务运行中单个 While 循环最大运行时间, 单位为秒, 默认为15分钟 +task_request_limit = int(os.getenv('TASK_REQUEST_LIMIT', '1500')) # 任务运行中单个任务最大请求次数, 默认为 1500 次 # Tornado httpclient.HTTPRequest参数配置 -download_size_limit = int(os.getenv('DOWNLOAD_SIZE_LIMIT', 5*1024*1024)) # 允许用户单次请求下载最大值 -request_timeout = float(os.getenv('REQUEST_TIMEOUT', 30.0)) # HTTP Request 请求超时时间 -connect_timeout = float(os.getenv('CONNECT_TIMEOUT', 30.0)) # HTTP Request 连接超时时间 -delay_max_timeout = float(os.getenv('DELAY_MAX_TIMEOUT', 29.9)) # 延时API 最大时间限制, 请小于上述 timeout 配置, 否则会报 599 错误 -unsafe_eval_timeout = float(os.getenv('UNSAFE_EVAL_TIMEOUT', 3.0)) # unsafe_eval 最大时间限制 +download_size_limit = int(os.getenv('DOWNLOAD_SIZE_LIMIT', '5242880')) # 允许用户单次请求下载最大值 +request_timeout = float(os.getenv('REQUEST_TIMEOUT', '30.0')) # HTTP Request 请求超时时间 +connect_timeout = float(os.getenv('CONNECT_TIMEOUT', '30.0')) # HTTP Request 连接超时时间 +delay_max_timeout = float(os.getenv('DELAY_MAX_TIMEOUT', '29.9')) # 延时API 最大时间限制, 请小于上述 timeout 配置, 否则会报 599 错误 +unsafe_eval_timeout = float(os.getenv('UNSAFE_EVAL_TIMEOUT', '3.0')) # unsafe_eval 最大时间限制 # PyCurl 相关设置 -use_pycurl = bool(strtobool(os.getenv('USE_PYCURL','True'))) # 是否启用 Pycurl 模组, 当环境无 PyCurl 模组时无效 +use_pycurl = bool(strtobool(os.getenv('USE_PYCURL', 'True'))) # 是否启用 Pycurl 模组, 当环境无 PyCurl 模组时无效 allow_retry = bool(strtobool(os.getenv('ALLOW_RETRY', 'True'))) # 在 Pycurl 环境下部分请求可能导致 Request 错误时, 自动修改冲突设置并重发请求 dns_server = str(os.getenv('DNS_SERVER', '')) # 通过 Curl 使用指定 DNS 进行解析(仅支持 Pycurl 环境) curl_encoding = bool(strtobool(os.getenv('CURL_ENCODING', 'True'))) # 是否允许使用 Curl 进行 Encoding 操作, - # 当 PyCurl 返回 "Error 61 Unrecognized transfer encoding." 错误, - # 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Headers 中的 Content-Encoding 并重试 +# 当 PyCurl 返回 "Error 61 Unrecognized transfer encoding." 错误, +# 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Headers 中的 Content-Encoding 并重试 curl_length = bool(strtobool(os.getenv('CURL_CONTENT_LENGTH', 'True'))) # 是否允许 Curl 使用 Headers 中自定义 Content-Length 请求, - # 当PyCurl返回 "HTTP 400 Bad Request" 错误, 且 `ALLOW_RETRY=True` 时, - # 本次请求禁用 Headers 中的 Content-Length 并重试 -not_retry_code = list(map(int,os.getenv('NOT_RETRY_CODE', '301|302|303|304|305|307|400|401|403|404|405|407|408|409|410|412|415|413|414|500|501|502|503|504|599').split('|'))) - # 启用后, 当满足 PyCurl 启用, HTTPError code 不在该列表中, 任务代理为空, - # 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Pycurl 并重试 +# 当PyCurl返回 "HTTP 400 Bad Request" 错误, 且 `ALLOW_RETRY=True` 时, +# 本次请求禁用 Headers 中的 Content-Length 并重试 +not_retry_code = list(map(int, os.getenv('NOT_RETRY_CODE', '301|302|303|304|305|307|400|401|403|404|405|407|408|409|410|412|415|413|414|500|501|502|503|504|599').split('|'))) +# 启用后, 当满足 PyCurl 启用, HTTPError code 不在该列表中, 任务代理为空, +# 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Pycurl 并重试 empty_retry = bool(strtobool(os.getenv('EMPTY_RETRY', 'True'))) # 启用后, 当满足 PyCurl 启用, 返回 Response 为空, 任务代理为空, - # 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Pycurl 并重试 +# 且 `ALLOW_RETRY=True` 时, 本次请求禁用 Pycurl 并重试 # 日志及推送设置 traceback_print = bool(strtobool(os.getenv('TRACEBACK_PRINT', 'True' if debug else 'False'))) # 是否启用在控制台日志中打印 Exception 的 TraceBack 信息 push_pic = os.getenv('PUSH_PIC_URL', 'https://gitee.com/qd-today/qd/raw/master/web/static/img/push_pic.png') # 日志推送默认图片地址 push_batch_sw = bool(strtobool(os.getenv('PUSH_BATCH_SW', 'True'))) # 是否允许开启定期推送任务日志, 默认为 True -push_batch_delta = int(os.getenv('PUSH_BATCH_DELTA', 60)) # 执行 PUSH_BATCH 的时间间隔, 单位为秒, 默认为 60s, 非全局推动 QD 任务日志间隔 +push_batch_delta = int(os.getenv('PUSH_BATCH_DELTA', '60')) # 执行 PUSH_BATCH 的时间间隔, 单位为秒, 默认为 60s, 非全局推动 QD 任务日志间隔 + + +class websocket: + # WebSocket 设置 + ping_interval = int(os.getenv('WS_PING_INTERVAL', '5')) # WebSocket ping 间隔, 单位为秒, 默认为 5s + ping_timeout = int(os.getenv('WS_PING_TIMEOUT', '30')) # WebSocket ping超时时间, 单位为秒, 默认为 30s + max_message_size = int(os.getenv('WS_MAX_MESSAGE_SIZE', '10485760')) # WebSocket 单次接收最大消息大小, 默认为 10MB + max_queue_size = int(os.getenv('WS_MAX_QUEUE_SIZE', '100')) # WebSocket 最大消息队列大小, 默认为 100 + max_connections_subscribe = int(os.getenv('WS_MAX_CONNECTIONS_SUBSCRIBE', '30')) # WebSocket 公共模板更新页面最大连接数, 默认为 30 -# WebSocket 设置 -class websocket(object): - ping_interval = int(os.getenv('WS_PING_INTERVAL', 5)) # WebSocket ping 间隔, 单位为秒, 默认为 5s - ping_timeout = int(os.getenv('WS_PING_TIMEOUT', 30)) # WebSocket ping超时时间, 单位为秒, 默认为 30s - max_message_size = int(os.getenv('WS_MAX_MESSAGE_SIZE', 10*1024*1024)) # WebSocket 单次接收最大消息大小, 默认为 10MB - max_queue_size = int(os.getenv('WS_MAX_QUEUE_SIZE', 100)) # WebSocket 最大消息队列大小, 默认为 100 - max_connections_subscribe = int(os.getenv('WS_MAX_CONNECTIONS_SUBSCRIBE', 30)) # WebSocket 公共模板更新页面最大连接数, 默认为 30 # 订阅加速方式或地址, 用于加速公共模板更新, 仅适用于 GitHub. # 可选 jsdelivr_cdn/jsdelivr_fastly/ghproxy/fastgit/自定义地址, 默认为: jsdelivr_cdn. @@ -144,9 +156,9 @@ class websocket(object): ## 任务级代理请在新建或修改任务时添加,任务级代理优先级大于全局代理; proxies = os.getenv('PROXIES', '').split('|') # 若希望部分地址不走代理, 请修改 `proxy_direct_mode` 及 `proxy_direct` proxy_direct_mode = os.getenv('PROXY_DIRECT_MODE', 'regexp') # 直连地址的匹配模式, 默认为 'regexp' 以过滤本地请求, 可选输入: - # 'regexp' 为正则表达式匹配模式; - # 'url' 为网址匹配模式; - # '' 空则不启用全局代理黑名单 +# 'regexp' 为正则表达式匹配模式; +# 'url' 为网址匹配模式; +# '' 空则不启用全局代理黑名单 ## 不同 `proxy_direct_mode` 模式下的直连地址匹配规则: ## `proxy_direct_mode = os.getenv('PROXY_DIRECT_MODE', 'url')` 进入网址完全匹配模式, ## 在 `proxy_direct` 名单的 url 均不通过代理请求, 以 '|' 分隔url网址, @@ -160,43 +172,45 @@ class websocket(object): ([a-z][a-z0-9+\-.]*://)? # Scheme (0(.0){3}|127(.0){2}.1|localhost|\[::([\d]+)?\]) # Domain/Hostname/IPv4/IPv6 (:[0-9]+)? """ # :Port - ) + ) + +# 记事本设置 +notepad_limit = int(os.getenv('NOTEPAD_LIMIT', '20')) # 单个用户拥有记事本最大数量, 默认为 20 # DdddOCR 设置 extra_onnx_name = os.getenv('EXTRA_ONNX_NAME', '').split('|') # config 目录下自定义 ONNX 文件名(不含 ".onnx" 后缀), 多个onnx文件名用"|"分隔 -extra_charsets_name = os.getenv('EXTRA_CHARSETS_NAME', '').split('|') # config 目录下自定义 ONNX 对应自定义 `charsets.json` 文件名(不含 ".json" 后缀), - # 多个 json 文件名用"|"分隔 +extra_charsets_name = os.getenv('EXTRA_CHARSETS_NAME', '').split('|') # config 目录下自定义 ONNX 对应自定义 `charsets.json` 文件名(不含 ".json" 后缀), 多个 json 文件名用"|"分隔 # 邮件发送相关配置 -mail_smtp = os.getenv('MAIL_SMTP',"") # 邮箱 SMTP 服务器 -mail_port = int(os.getenv('MAIL_PORT', 465)) # 邮箱 SMTP 服务器端口 -mail_ssl = bool(strtobool(os.getenv('MAIL_SSL','True'))) # 是否使用 SSL 加密方式收发邮件 +mail_smtp = os.getenv('MAIL_SMTP', "") # 邮箱 SMTP 服务器 +mail_port = int(os.getenv('MAIL_PORT', '465')) # 邮箱 SMTP 服务器端口 +mail_ssl = bool(strtobool(os.getenv('MAIL_SSL', 'True'))) # 是否使用 SSL 加密方式收发邮件 mail_user = os.getenv('MAIL_USER', '') # 邮箱用户名 mail_password = os.getenv('MAIL_PASSWORD', '') # 邮箱密码 mail_from = os.getenv('MAIL_FROM', mail_user) # 发送时使用的邮箱,默认与 MAIL_USER 相同 -mail_domain_https = bool(strtobool(os.getenv('ENABLE_HTTPS', None) or - os.getenv('MAIL_DOMAIN_HTTPS', 'False')))# 发送的邮件链接启用 HTTPS, 非框架自身 HTTPS 开关, 需要 HTTPS 请使用外部反向代理 +mail_domain_https = bool(strtobool(os.getenv('ENABLE_HTTPS', 'False'))) or \ + bool(strtobool(os.getenv('MAIL_DOMAIN_HTTPS', 'False'))) # ))# 发送的邮件链接启用 HTTPS, 非框架自身 HTTPS 开关, 需要 HTTPS 请使用外部反向代理 ## Mailgun 邮件发送方式配置 ## Mailgun 中 Domain 为 QD 框架域名 `domain` 的值 -mailgun_key = os.getenv('MAILGUN_KEY',"") # Mailgun Api_Key, 若不为空则优先用 Mailgun 方式发送邮件 +mailgun_key = os.getenv('MAILGUN_KEY', "") # Mailgun Api_Key, 若不为空则优先用 Mailgun 方式发送邮件 mailgun_domain = os.getenv('MAILGUN_DOMAIN', domain) # Mailgun Domain # Google Analytics 设置 ga_key = os.getenv('GA_KEY', '') # Google Analytics (GA) 密钥, 为空则不启用, - # GA 密钥格式为 G-XXXXXXXXXX, - # 如果为 UA-XXXXXXXXX-X, 请前往GA后台获取新版密钥 +# GA 密钥格式为 G-XXXXXXXXXX, +# 如果为 UA-XXXXXXXXX-X, 请前往GA后台获取新版密钥 try: from local_config import * # 修改 `local_config.py` 文件的内容不受通过 Git 更新源码的影响 if not hasattr(mysql, 'auth_plugin'): - setattr(mysql, 'auth_plugin', parse_qs(mysql_url.query).get('auth_plugin',[''])[0]) + setattr(mysql, 'auth_plugin', parse_qs(mysql_url.query).get('auth_plugin', [''])[0]) except ImportError: pass try: - from libs.parse_url import parse_url - for index,proxy in enumerate(proxies): - if isinstance(proxy,str): + from libs.parse_url import parse_url # pylint: disable=ungrouped-imports + for index, proxy in enumerate(proxies): + if isinstance(proxy, str): proxies[index] = parse_url(proxy) except Exception as e: raise e diff --git a/db/__init__.py b/db/__init__.py index d333539e18b..8ead62530e8 100644 --- a/db/__init__.py +++ b/db/__init__.py @@ -5,21 +5,17 @@ # http://binux.me # Created on 2014-08-08 20:28:15 -import os -import sys from db.basedb import AlchemyMixin - -sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) -from .notepad import Notepad -from .pubtpl import Pubtpl -from .push_request import PushRequest -from .redisdb import RedisDB -from .site import Site -from .task import Task -from .tasklog import Tasklog -from .tpl import Tpl -from .user import User +from db.notepad import Notepad +from db.pubtpl import Pubtpl +from db.push_request import PushRequest +from db.redisdb import RedisDB +from db.site import Site +from db.task import Task +from db.tasklog import Tasklog +from db.tpl import Tpl +from db.user import User class DB(AlchemyMixin): @@ -33,4 +29,3 @@ def __init__(self) -> None: self.site = Site() self.pubtpl = Pubtpl() self.notepad = Notepad() - diff --git a/db/basedb.py b/db/basedb.py index f7f0fa1fd29..1291356084d 100644 --- a/db/basedb.py +++ b/db/basedb.py @@ -6,75 +6,76 @@ # Created on 2012-08-30 17:43:49 import contextlib -import logging from asyncio import current_task -from typing import Tuple +from typing import AsyncIterator, Optional, Union -from sqlalchemy import text from sqlalchemy.dialects.mysql import Insert -from sqlalchemy.engine import CursorResult, Result, ScalarResult +from sqlalchemy.engine import Result, Row from sqlalchemy.ext.asyncio import (AsyncSession, async_scoped_session, create_async_engine) from sqlalchemy.orm import declarative_base, sessionmaker from sqlalchemy.sql import Delete, Select, Update +from sqlalchemy.sql.elements import TextClause import config from libs.log import Log if config.db_type == 'mysql': - host=config.mysql.host - port=config.mysql.port - database=config.mysql.database - user=config.mysql.user - passwd=config.mysql.passwd - auth_plugin=config.mysql.auth_plugin + host = config.mysql.host + port = config.mysql.port + database = config.mysql.database + user = config.mysql.user + passwd = config.mysql.passwd + auth_plugin = config.mysql.auth_plugin engine_url = f"mysql+aiomysql://{user}:{passwd}@{host}:{port}/{database}?auth_plugin={auth_plugin}" engine = create_async_engine(engine_url, - logging_name = config.sqlalchemy.logging_name, - pool_size = config.sqlalchemy.pool_size, - max_overflow = config.sqlalchemy.max_overflow, - pool_logging_name = config.sqlalchemy.pool_logging_name, - pool_pre_ping = config.sqlalchemy.pool_pre_ping, - pool_recycle = config.sqlalchemy.pool_recycle, - pool_timeout = config.sqlalchemy.pool_timeout, - pool_use_lifo = config.sqlalchemy.pool_use_lifo) + logging_name=config.sqlalchemy.logging_name, + pool_size=config.sqlalchemy.pool_size, + max_overflow=config.sqlalchemy.max_overflow, + pool_logging_name=config.sqlalchemy.pool_logging_name, + pool_pre_ping=config.sqlalchemy.pool_pre_ping, + pool_recycle=config.sqlalchemy.pool_recycle, + pool_timeout=config.sqlalchemy.pool_timeout, + pool_use_lifo=config.sqlalchemy.pool_use_lifo) elif config.db_type == 'sqlite3': engine_url = f"sqlite+aiosqlite:///{config.sqlite3.path}" engine = create_async_engine(engine_url, - logging_name = config.sqlalchemy.logging_name, - pool_logging_name = config.sqlalchemy.pool_logging_name, - pool_pre_ping = config.sqlalchemy.pool_pre_ping, - pool_recycle = config.sqlalchemy.pool_recycle ) + logging_name=config.sqlalchemy.logging_name, + pool_logging_name=config.sqlalchemy.pool_logging_name, + pool_pre_ping=config.sqlalchemy.pool_pre_ping, + pool_recycle=config.sqlalchemy.pool_recycle) Log('aiosqlite', logger_level=config.sqlalchemy.pool_logging_level, channel_level=config.sqlalchemy.pool_logging_level).getlogger() else: raise Exception('db_type must be mysql or sqlite3') -logger_DB = Log('sqlalchemy', +logger_db = Log('sqlalchemy', logger_level=config.sqlalchemy.logging_level, channel_level=config.sqlalchemy.logging_level).getlogger() -logger_DB_Engine = Log(engine.engine.logger, - logger_level=config.sqlalchemy.logging_level, - channel_level=config.sqlalchemy.logging_level).getlogger() -if hasattr(engine.pool.logger, 'logger'): - logger_DB_POOL = Log(engine.pool.logger.logger, - logger_level=config.sqlalchemy.pool_logging_level, - channel_level=config.sqlalchemy.pool_logging_level).getlogger() -else: - logger_DB_POOL = Log(engine.pool.logger, - logger_level=config.sqlalchemy.pool_logging_level, - channel_level=config.sqlalchemy.pool_logging_level).getlogger() +logger_db_engine = Log(getattr(engine.sync_engine, 'logger', f'sqlalchemy.engine.Engine.{config.sqlalchemy.logging_name}'), + logger_level=config.sqlalchemy.logging_level, + channel_level=config.sqlalchemy.logging_level).getlogger() +if hasattr(engine.sync_engine.pool, 'logger'): + if hasattr(getattr(engine.sync_engine.pool, 'logger'), 'logger'): + logger_db_pool = Log(engine.sync_engine.pool.logger.logger, + logger_level=config.sqlalchemy.pool_logging_level, + channel_level=config.sqlalchemy.pool_logging_level).getlogger() + else: + logger_db_pool = Log(engine.sync_engine.pool.logger, + logger_level=config.sqlalchemy.pool_logging_level, + channel_level=config.sqlalchemy.pool_logging_level).getlogger() async_session = async_scoped_session(sessionmaker(engine, class_=AsyncSession, expire_on_commit=False), scopefunc=current_task) BaseDB = declarative_base(bind=engine, name="BaseDB") + class AlchemyMixin: @property def sql_session(self) -> AsyncSession: return async_session() @contextlib.asynccontextmanager - async def transaction(self, sql_session:AsyncSession=None): + async def transaction(self, sql_session: Optional[AsyncSession] = None) -> AsyncIterator[AsyncSession]: if sql_session is None: async with self.sql_session as sql_session: # deepcode ignore AttributeLoadOnNone: sql_session is not None @@ -86,26 +87,24 @@ async def transaction(self, sql_session:AsyncSession=None): else: yield sql_session - async def _execute(self, text:Tuple[str,text], sql_session:AsyncSession=None): + async def _execute(self, text: Union[str, TextClause], sql_session: Optional[AsyncSession] = None): async with self.transaction(sql_session) as sql_session: if isinstance(text, str): - text = text.replace(':', r'\:') + text = text.replace(':', r'\:') # 如果text原本是个字符串,则转义冒号 + text = TextClause(text) # 将其转换为TextClause对象 result = await sql_session.execute(text) return result - async def _get(self, stmt: Select, one_or_none=False, first=False, all=True, sql_session:AsyncSession=None): + async def _get(self, stmt: Select, one_or_none=False, first=False, sql_session: Optional[AsyncSession] = None): async with self.transaction(sql_session) as sql_session: result: Result = await sql_session.execute(stmt) if one_or_none: return result.scalar_one_or_none() - elif first: + if first: return result.first() - elif all: - return result.all() - else: - return result + return result.all() - async def _insert(self, instance, many=False, sql_session:AsyncSession=None): + async def _insert(self, instance, many=False, sql_session: Optional[AsyncSession] = None): async with self.transaction(sql_session) as sql_session: if many: sql_session.add_all(instance) @@ -114,27 +113,26 @@ async def _insert(self, instance, many=False, sql_session:AsyncSession=None): await sql_session.flush() return instance.id - async def _update(self, stmt: Update, sql_session:AsyncSession=None): + async def _update(self, stmt: Update, sql_session: Optional[AsyncSession] = None): async with self.transaction(sql_session) as sql_session: result: Result = await sql_session.execute(stmt) - return result.rowcount + return result.rowcount if hasattr(result, 'rowcount') else -1 - async def _insert_or_update(self, insert_stmt: Insert, sql_session:AsyncSession=None, **kwargs) -> int: + async def _insert_or_update(self, insert_stmt: Insert, sql_session: Optional[AsyncSession] = None, **kwargs) -> int: async with self.transaction(sql_session) as sql_session: - on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(**kwargs) - result: CursorResult = await sql_session.execute(on_duplicate_key_stmt) - return result.lastrowid + insert_stmt.on_duplicate_key_update(**kwargs) + result: Result = await sql_session.execute(insert_stmt) + return result.lastrowid if hasattr(result, 'lastrowid') else -1 - async def _delete(self, stmt: Delete, sql_session:AsyncSession=None): + async def _delete(self, stmt: Delete, sql_session: Optional[AsyncSession] = None): async with self.transaction(sql_session) as sql_session: result: Result = await sql_session.execute(stmt) - return result.rowcount + return result.rowcount if hasattr(result, 'rowcount') else -1 @staticmethod - def to_dict(result,fields=None): + def to_dict(result: Row, fields=None): if result is None: return result if fields is None: return {c.name: getattr(result[0], c.name) for c in result[0].__table__.columns} - else: - return dict(result._mapping) + return dict(result._mapping) # pylint: disable=protected-access diff --git a/db/db_converter.py b/db/db_converter.py index 3a32ac286f9..eb6631a58cd 100644 --- a/db/db_converter.py +++ b/db/db_converter.py @@ -7,28 +7,35 @@ import json import re import warnings +from typing import Optional +import aiosqlite +from jinja2.nodes import Filter, Name from sqlalchemy import update import config from db import DB, Site, Task, Tpl, User from libs import mcrypto as crypto +from libs.fetcher import Fetcher from libs.log import Log -logger_DB_converter = Log('QD.DB.Converter').getlogger() +logger_db_converter = Log('QD.DB.Converter').getlogger() + class DBconverter(): def __init__(self, path=config.sqlite3.path): self.path = path + self.db = None - async def ConvertNewType(self, db:DB=None, path=config.sqlite3.path): + async def convert_new_type(self, db: Optional[DB] = None): if db: self.db = db else: self.db = DB() - exec_shell = self.db._execute + exec_shell = self.db._execute # pylint: disable=protected-access + update_shell = self.db._update # pylint: disable=protected-access - async def _convert_task(group_with_underline:bool): + async def _convert_task(group_with_underline: bool): async with self.db.transaction() as sql_session: await exec_shell("ALTER TABLE `task` RENAME TO `taskold`", sql_session=sql_session) if config.db_type == 'sqlite3': @@ -59,22 +66,22 @@ async def _convert_task(group_with_underline:bool): `_groups` VARCHAR(256) NOT NULL DEFAULT 'None', `pushsw` VARCHAR(128) NOT NULL DEFAULT '{\"logen\":false,\"pushen\":true}', `newontime` VARCHAR(256) NOT NULL DEFAULT '{\"sw\":false,\"time\":\"00:10:10\",\"randsw\":false,\"tz1\":0,\"tz2\":0}' - );'''% autokey, sql_session=sql_session) + );''' % autokey, sql_session=sql_session) if group_with_underline: await exec_shell("INSERT INTO `task` SELECT `id`,`tplid`,`userid`,`disabled`,`init_env`,`env`,`session`,`retry_count`,`retry_interval`,`last_success`,`last_failed`,`success_count`,`failed_count`,`last_failed_count`,`next`,`note`,`ctime`,`mtime`,`ontimeflg`,`ontime`,`_groups`,`pushsw`,`newontime` FROM `taskold` ", sql_session=sql_session) else: await exec_shell("INSERT INTO `task` SELECT `id`,`tplid`,`userid`,`disabled`,`init_env`,`env`,`session`,`retry_count`,`retry_interval`,`last_success`,`last_failed`,`success_count`,`failed_count`,`last_failed_count`,`next`,`note`,`ctime`,`mtime`,`ontimeflg`,`ontime`,`groups`,`pushsw`,`newontime` FROM `taskold` ", sql_session=sql_session) await exec_shell("DROP TABLE `taskold` ", sql_session=sql_session) - async def _convert_tpl(group_with_underline:bool): + async def _convert_tpl(group_with_underline: bool): async with self.db.transaction() as sql_session: await exec_shell("ALTER TABLE `tpl` RENAME TO `tplold`", sql_session=sql_session) if config.db_type == 'sqlite3': autokey = 'AUTOINCREMENT' else: autokey = 'AUTO_INCREMENT' - await exec_shell('''CREATE TABLE IF NOT EXISTS `tpl` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + await exec_shell(f"""CREATE TABLE IF NOT EXISTS `tpl` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `userid` INT UNSIGNED NULL, `siteurl` VARCHAR(256) NULL, `sitename` VARCHAR(128) NULL, @@ -97,7 +104,7 @@ async def _convert_tpl(group_with_underline:bool): `tplurl` VARCHAR(1024) NULL DEFAULT '', `updateable` INT UNSIGNED NOT NULL DEFAULT 0, `_groups` VARCHAR(256) NOT NULL DEFAULT 'None' - );'''% autokey, sql_session=sql_session) + );""", sql_session=sql_session) if group_with_underline: await exec_shell("INSERT INTO `tpl` SELECT `id`,`userid`,`siteurl`,`sitename`,`banner`,`disabled`,`public`,`lock`,`fork`,`har`,`tpl`,`variables`,`interval`,`note`,`success_count`,`failed_count`,`last_success`,`ctime`,`mtime`,`atime`,`tplurl`,`updateable`,`_groups` FROM `tplold` ", sql_session=sql_session) else: @@ -175,9 +182,9 @@ async def _convert_user(): `tg_token` VARCHAR(1024) NOT NULL DEFAULT '', `dingding_token` VARCHAR(1024) NOT NULL DEFAULT '', `push_batch` VARCHAR(1024) NOT NULL DEFAULT '{"sw":false,"time":0,"delta":86400}' - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `tpl` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );''' % autokey) + await exec_shell(f"""CREATE TABLE IF NOT EXISTS `tpl` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `userid` INT UNSIGNED NULL, `siteurl` VARCHAR(256) NULL, `sitename` VARCHAR(128) NULL, @@ -201,7 +208,7 @@ async def _convert_user(): `updateable` INT UNSIGNED NOT NULL DEFAULT 0, `_groups` VARCHAR(256) NOT NULL DEFAULT 'None', `init_env` TEXT NULL - );'''% autokey) + );""") await exec_shell('''CREATE TABLE IF NOT EXISTS `task` ( `id` INTEGER NOT NULL PRIMARY KEY %s, `tplid` INT UNSIGNED NOT NULL, @@ -226,16 +233,16 @@ async def _convert_user(): `_groups` VARCHAR(256) NOT NULL DEFAULT 'None', `pushsw` VARCHAR(128) NOT NULL DEFAULT '{"logen":false,"pushen":true}', `newontime` VARCHAR(256) NOT NULL DEFAULT '{"sw":false,"time":"00:10:10","randsw":false,"tz1":0,"tz2":0}' - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `tasklog` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );''' % autokey) + await exec_shell(f'''CREATE TABLE IF NOT EXISTS `tasklog` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `taskid` INT UNSIGNED NOT NULL, `success` TINYINT NOT NULL, `ctime` INT UNSIGNED NOT NULL, `msg` TEXT NULL - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `push_request` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );''') + await exec_shell(f'''CREATE TABLE IF NOT EXISTS `push_request` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `from_tplid` INT UNSIGNED NOT NULL, `from_userid` INT UNSIGNED NOT NULL, `to_tplid` INT UNSIGNED NULL, @@ -245,16 +252,16 @@ async def _convert_user(): `ctime` INT UNSIGNED NOT NULL, `mtime` INT UNSIGNED NOT NULL, `atime` INT UNSIGNED NOT NULL - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `site` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );''') + await exec_shell(f"""CREATE TABLE IF NOT EXISTS `site` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `regEn` INT UNSIGNED NOT NULL DEFAULT 1, `MustVerifyEmailEn` INT UNSIGNED NOT NULL DEFAULT 0, `logDay` INT UNSIGNED NOT NULL DEFAULT 365, `repos` TEXT NOT NULL - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `pubtpl` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );""") + await exec_shell(f'''CREATE TABLE IF NOT EXISTS `pubtpl` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `name` TEXT , `author` TEXT , `comments` TEXT , @@ -269,81 +276,79 @@ async def _convert_user(): `repoacc` TEXT, `repobranch` TEXT, `commenturl` TEXT - );'''% autokey) - await exec_shell('''CREATE TABLE IF NOT EXISTS `notepad` ( - `id` INTEGER NOT NULL PRIMARY KEY %s, + );''') + await exec_shell(f'''CREATE TABLE IF NOT EXISTS `notepad` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `userid` INTEGER NOT NULL , `notepadid` INTEGER NOT NULL , `content` TEXT NULL - );'''% autokey) + );''') if config.db_type == 'sqlite3': for each in ('email', 'nickname'): - await exec_shell('''CREATE UNIQUE INDEX IF NOT EXISTS `ix_%s_%s` ON %s (%s)''' % ( - self.db.user.__tablename__, each, self.db.user.__tablename__, each)) + await exec_shell(f'''CREATE UNIQUE INDEX IF NOT EXISTS `ix_{self.db.user.__tablename__}_{each}` ON {self.db.user.__tablename__} ({each})''') else: for each in ('email', 'nickname'): try: - await exec_shell('''ALTER TABLE `%s` ADD UNIQUE INDEX `ix_%s_%s` (%s)''' % ( - self.db.user.__tablename__, self.db.user.__tablename__, each, each)) + await exec_shell(f'''ALTER TABLE `{self.db.user.__tablename__}` ADD UNIQUE INDEX `ix_{self.db.user.__tablename__}_{each}` ({each})''') except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await self.db.task.list(limit=1, fields=('retry_count',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `retry_count` INT NOT NULL DEFAULT 8 " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `retry_count` INT NOT NULL DEFAULT 8 ") try: await self.db.task.list(limit=1, fields=('retry_interval',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `retry_interval` INT UNSIGNED NULL " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `retry_interval` INT UNSIGNED NULL ") try: await self.db.task.list(limit=1, fields=('ontimeflg',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `task` ADD `ontimeflg` INT UNSIGNED NOT NULL DEFAULT 0 ") try: await self.db.task.list(limit=1, fields=('ontime',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `ontime` VARCHAR(256) NOT NULL DEFAULT '00:10:00' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `ontime` VARCHAR(256) NOT NULL DEFAULT '00:10:00' ") try: await self.db.user.list(limit=1, fields=('skey',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `skey` VARBINARY(128) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('barkurl',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `barkurl` VARBINARY(128) NOT NULL DEFAULT '' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `barkurl` VARBINARY(128) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('wxpusher',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `wxpusher` VARBINARY(128) NOT NULL DEFAULT '' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `wxpusher` VARBINARY(128) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('noticeflg',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `noticeflg` INT UNSIGNED NOT NULL DEFAULT 1 " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `noticeflg` INT UNSIGNED NOT NULL DEFAULT 1 ") try: await self.db.user.list(limit=1, fields=('push_batch',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `push_batch` VARBINARY(1024) NOT NULL DEFAULT '{\"sw\":false,\"time\":0,\"delta\":86400}' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `push_batch` VARBINARY(1024) NOT NULL DEFAULT '{\"sw\":false,\"time\":0,\"delta\":86400}' ") - for user in await self.db.user.list(fields=('id','push_batch')): + for user in await self.db.user.list(fields=('id', 'push_batch')): push_batch_i = json.loads(user['push_batch']) if not push_batch_i.get('delta'): push_batch_i['delta'] = 86400 @@ -352,53 +357,53 @@ async def _convert_user(): try: await self.db.tpl.list(limit=1, fields=('tplurl',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `tpl` ADD `tplurl` VARCHAR(1024) NULL DEFAULT '' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `tpl` ADD `tplurl` VARCHAR(1024) NULL DEFAULT '' ") try: await self.db.tpl.list(limit=1, fields=('updateable',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `tpl` ADD `updateable` INT UNSIGNED NOT NULL DEFAULT 0 " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `tpl` ADD `updateable` INT UNSIGNED NOT NULL DEFAULT 0 ") try: await self.db.task.list(limit=1, fields=('pushsw',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `pushsw` VARBINARY(128) NOT NULL DEFAULT '{\"logen\":false,\"pushen\":true}' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `pushsw` VARBINARY(128) NOT NULL DEFAULT '{\"logen\":false,\"pushen\":true}' ") try: await self.db.task.list(limit=1, fields=('newontime',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `newontime` VARBINARY(256) NOT NULL DEFAULT '{\"sw\":false,\"time\":\"00:10:10\",\"randsw\":false,\"tz1\":0,\"tz2\":0 }' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `newontime` VARBINARY(256) NOT NULL DEFAULT '{\"sw\":false,\"time\":\"00:10:10\",\"randsw\":false,\"tz1\":0,\"tz2\":0 }' ") try: await self.db.user.list(limit=1, fields=('logtime',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `logtime` VARBINARY(128) NOT NULL DEFAULT '{\"en\":false,\"time\":\"20:00:00\",\"ts\":0,\"schanEn\":false,\"WXPEn\":false}' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `logtime` VARBINARY(128) NOT NULL DEFAULT '{\"en\":false,\"time\":\"20:00:00\",\"ts\":0,\"schanEn\":false,\"WXPEn\":false}' ") try: await self.db.user.list(limit=1, fields=('status',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `user` ADD `status` VARBINARY(1024) NOT NULL DEFAULT 'Enable' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `user` ADD `status` VARBINARY(1024) NOT NULL DEFAULT 'Enable' ") try: temp = await self.db.site.get("1", fields=('regEn',)) - if not (temp): + if not temp: raise Exception("for table site, new row will be created") except Exception as e: - logger_DB_converter.debug(e) - insert = dict(regEn = 1, repos='{"repos":[{"reponame":"default","repourl":"https://github.com/qd-today/templates","repobranch":"master","repoacc":true}], "lastupdate":0}') - await self.db.site._insert(Site(**insert)) + logger_db_converter.debug(e) + insert = dict(regEn=1, repos='{"repos":[{"reponame":"default","repourl":"https://github.com/qd-today/templates","repobranch":"master","repoacc":true}], "lastupdate":0}') + await self.db.site._insert(Site(**insert)) # pylint: disable=protected-access try: await self.db.site.get("1", fields=('MustVerifyEmailEn',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `site` ADD `MustVerifyEmailEn` INT UNSIGNED NOT NULL DEFAULT 0 " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `site` ADD `MustVerifyEmailEn` INT UNSIGNED NOT NULL DEFAULT 0 ") try: groups = await self.db.task.list(limit=1, fields=('`groups`',)) @@ -406,99 +411,99 @@ async def _convert_user(): await _convert_task(group_with_underline=False) except Exception as e: if str(e).find('has no attribute \'`groups`\'') < 0 and str(e).find('no such column') > -1: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await self.db.task.list(limit=1, fields=('_groups',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `task` ADD `_groups` VARCHAR(256) NOT NULL DEFAULT 'None' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `task` ADD `_groups` VARCHAR(256) NOT NULL DEFAULT 'None' ") try: groups = await self.db.tpl.list(limit=1, fields=('`groups`',)) if groups: await _convert_tpl(group_with_underline=False) except Exception as e: - if str(e).find('has no attribute \'`groups`\'') < 0 and str(e).find('no such column') < 0: - logger_DB_converter.debug(e) + if str(e).find('has no attribute \'`groups`\'') < 0 and str(e).find('no such column') < 0: + logger_db_converter.debug(e) try: await self.db.tpl.list(limit=1, fields=('_groups',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `tpl` ADD `_groups` VARCHAR(256) NOT NULL DEFAULT 'None' " ) + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `tpl` ADD `_groups` VARCHAR(256) NOT NULL DEFAULT 'None' ") try: tmp = await self.db.site.get("1", fields=('logDay',)) tmp = tmp['logDay'] except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) if (str(e).find('no such column') > -1 or str(e).find('has no attribute') > -1): - await exec_shell("ALTER TABLE `site` ADD `logDay` INT UNSIGNED NOT NULL DEFAULT 365 " ) + await exec_shell("ALTER TABLE `site` ADD `logDay` INT UNSIGNED NOT NULL DEFAULT 365 ") else: if config.db_type == 'sqlite3': autokey = '' else: autokey = 'AUTO_INCREMENT' - await exec_shell('''CREATE TABLE IF NOT EXISTS `newsite` ( - `id` INTEGER NOT NULL PRIMARY KEY {0}, + await exec_shell(f'''CREATE TABLE IF NOT EXISTS `newsite` ( + `id` INTEGER NOT NULL PRIMARY KEY {autokey}, `regEn` INT UNSIGNED NOT NULL DEFAULT 1, `MustVerifyEmailEn` INT UNSIGNED NOT NULL DEFAULT 0, `logDay` INT UNSIGNED NOT NULL DEFAULT 365 - );'''.format(autokey)) + );''') await exec_shell('INSERT INTO `newsite` SELECT id,regEn,MustVerifyEmailEn,LogDay FROM `site`') - await exec_shell("DROP TABLE `site`" ) + await exec_shell("DROP TABLE `site`") await exec_shell('CREATE TABLE `site` as select * from `newsite`') - await exec_shell("DROP TABLE `newsite`" ) + await exec_shell("DROP TABLE `newsite`") try: await self.db.user.list(limit=1, fields=('diypusher',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `diypusher` VARCHAR(1024) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('qywx_token',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `qywx_token` VARCHAR(1024) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('qywx_webhook',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `qywx_webhook` VARCHAR(1024) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('tg_token',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `tg_token` VARCHAR(1024) NOT NULL DEFAULT '' ") try: await self.db.user.list(limit=1, fields=('dingding_token',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) await exec_shell("ALTER TABLE `user` ADD `dingding_token` VARCHAR(1024) NOT NULL DEFAULT '' ") if config.db_type == 'sqlite3': try: - import aiosqlite + conn = await aiosqlite.connect(f"{config.sqlite3.path}") conn.text_factory = bytes cursor = await conn.execute('SELECT id, password, userkey, password_md5 FROM user') for row in await cursor.fetchall(): - result = await self.db._update(update(User).where(User.id == row[0]).values(password=row[1],userkey=row[2],password_md5=row[3])) + result = await update_shell(update(User).where(User.id == row[0]).values(password=row[1], userkey=row[2], password_md5=row[3])) await cursor.close() cursor = await conn.execute('SELECT id, init_env, env, session FROM task') for row in await cursor.fetchall(): - result = await self.db._update(update(Task).where(Task.id == row[0]).values(init_env=row[1],env=row[2],session=row[3])) + result = await update_shell(update(Task).where(Task.id == row[0]).values(init_env=row[1], env=row[2], session=row[3])) await cursor.close() cursor = await conn.execute('SELECT id, har, tpl FROM tpl') for row in await cursor.fetchall(): - result = await self.db._update(update(Tpl).where(Tpl.id == row[0]).values(har=row[1],tpl=row[2])) + result = await update_shell(update(Tpl).where(Tpl.id == row[0]).values(har=row[1], tpl=row[2])) await cursor.close() await conn.close() @@ -508,19 +513,19 @@ async def _convert_user(): try: await self.db.user.list(limit=1, fields=('password_md5',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await exec_shell("ALTER TABLE `user` ADD `password_md5` VARBINARY(128) NOT NULL DEFAULT '' ") except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: for user in await self.db.user.list(fields=('id', 'password_md5')): - if isinstance(user['password_md5'],str) and re.match(r'^[a-z0-9]{32}$',user['password_md5']): + if isinstance(user['password_md5'], str) and re.match(r'^[a-z0-9]{32}$', user['password_md5']): password = (await self.db.user.get(user['id'], fields=('password',)))['password'] - await self.db.user.mod(user['id'],password_md5=crypto.password_hash(user['password_md5'],await self.db.user.decrypt(user['id'], password))) + await self.db.user.mod(user['id'], password_md5=crypto.password_hash(user['password_md5'], await self.db.user.decrypt(user['id'], password))) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await self.db.user.list(limit=1, fields=('notepad',)) @@ -529,13 +534,13 @@ async def _convert_user(): await _convert_user() except Exception as e: if str(e).find('has no attribute \'notepad\'') < 0 and str(e).find('no such column') < 0: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) raise e try: - await self.db.user.list(limit=1, fields=('cip','mip','aip')) + await self.db.user.list(limit=1, fields=('cip', 'mip', 'aip')) cip_is_num = False - for row in await self.db.user.list(fields=('id','cip')): + for row in await self.db.user.list(fields=('id', 'cip')): cip = row['cip'] if len(cip) > 16: cip = bytes() @@ -545,12 +550,12 @@ async def _convert_user(): if cip_is_num: await _convert_user() except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await self.db.site.get("1", fields=('repos',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) if config.db_type == 'sqlite3': await exec_shell('''ALTER TABLE `site` ADD `repos` TEXT NOT NULL DEFAULT '{"repos":[{"reponame":"default","repourl":"https://github.com/qd-today/templates","repobranch":"master","repoacc":true}], "lastupdate":0}' ''') else: @@ -560,9 +565,9 @@ async def _convert_user(): try: tmp = (await self.db.site.get("1", fields=('repos',)))['repos'] if tmp is None or tmp == '': - await exec_shell('''UPDATE `site` SET `repos` = '{"repos":[{"reponame":"default","repourl":"https://github.com/qd-today/templates","repobranch":"master","repoacc":true}], "lastupdate":0}' WHERE `site`.`id` = 1 ''') + await exec_shell('''UPDATE `site` SET `repos` = '{"repos":[{"reponame":"default","repourl":"https://github.com/qd-today/templates","repobranch":"master","repoacc":true}], "lastupdate":0}' WHERE `site`.`id` = 1 ''') except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: repo = await self.db.site.get("1", fields=('repos',)) @@ -571,23 +576,22 @@ async def _convert_user(): repos = json.loads(repo['repos']) tmp = repos['repos'] result = [] - for _, j in enumerate(tmp): + for _, j in enumerate(tmp): if j['repourl'].find('qiandao-today/templates') > 0: j['repourl'] = j['repourl'].replace("qiandao-today/templates", "qd-today/templates") - pubtpls = await self.db.pubtpl.list(reponame=j['reponame'], fields=('id',),sql_session=sql_session) + pubtpls = await self.db.pubtpl.list(reponame=j['reponame'], fields=('id',), sql_session=sql_session) for pubtpl in pubtpls: - await self.db.pubtpl.delete(pubtpl['id'],sql_session=sql_session) + await self.db.pubtpl.delete(pubtpl['id'], sql_session=sql_session) result.append(j) await self.db.site.mod(1, repos=repo['repos'].replace("qiandao-today/templates", "qd-today/templates"), sql_session=sql_session) except Exception as e: - logger_DB_converter.debug(e) - pass + logger_db_converter.debug(e) try: await self.db.pubtpl.list(limit=1, fields=('commenturl',)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) if config.db_type == 'sqlite3': await exec_shell('''ALTER TABLE `pubtpl` ADD `commenturl` TEXT NOT NULL DEFAULT ''; ''') else: @@ -597,27 +601,25 @@ async def _convert_user(): try: await _convert_user() except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await _convert_task(group_with_underline=True) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: # deepcode ignore change_to_is: use "is" will cause error - result = await self.db._update(update(Tpl).where(Tpl.userid == None).where(Tpl.public == 0).values(public=1)) + result = await update_shell(update(Tpl).where(Tpl.userid.is_(None)).where(Tpl.public == 0).values(public=1)) except Exception as e: - logger_DB_converter.debug(e) + logger_db_converter.debug(e) try: await self.db.tpl.list(limit=1, fields=('init_env',)) except Exception as e: - logger_DB_converter.debug(e) - await exec_shell("ALTER TABLE `tpl` ADD `init_env` TEXT NULL" ) - from jinja2.nodes import Filter, Name + logger_db_converter.debug(e) + await exec_shell("ALTER TABLE `tpl` ADD `init_env` TEXT NULL") - from libs.fetcher import Fetcher env = Fetcher().jinja_env async with self.db.transaction() as sql_session: tpls = await self.db.tpl.list(fields=('id', 'userid', 'tpl', 'variables', 'init_env'), sql_session=sql_session) @@ -636,10 +638,9 @@ async def _convert_user(): try: init_env[x.node.name] = x.args[0].as_const() except Exception as e: - logger_DB_converter.debug('Convert init_env error: %s' % e) + logger_db_converter.debug('Convert init_env error: %s', e, exc_info=True) except Exception as e: - logger_DB_converter.debug('Convert init_env error: %s' % e) + logger_db_converter.debug('Convert init_env error: %s', e, exc_info=True) await self.db.tpl.mod(tpl['id'], init_env=json.dumps(init_env), sql_session=sql_session) - return diff --git a/db/notepad.py b/db/notepad.py index e231ea88b5d..7ec5878e9ea 100644 --- a/db/notepad.py +++ b/db/notepad.py @@ -8,10 +8,10 @@ # sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from sqlalchemy import Column, Integer, Text, delete, select, update -from .basedb import AlchemyMixin, BaseDB, logger_DB +from db.basedb import AlchemyMixin, BaseDB -class Notepad(BaseDB,AlchemyMixin): +class Notepad(BaseDB, AlchemyMixin): ''' Site db @@ -45,7 +45,7 @@ async def get(self, userid, notepadid, fields=None, one_or_none=False, first=Tru result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, **kwargs): @@ -64,41 +64,43 @@ async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, ** result = await self._get(smtm, sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, userid, notepadid, sql_session=None): return self._delete(delete(Notepad).where(Notepad.userid == userid).where(Notepad.notepadid == notepadid), sql_session=sql_session) + if __name__ == '__main__': import asyncio + async def test(): notepad = Notepad() try: async with notepad.transaction() as sql_session: - await notepad.add({'userid':1,'notepadid':1,'content':'test'}, sql_session=sql_session) - await notepad.add({'userid':1,'notepadid':2}) - await notepad.add({'userid':2,'notepadid':1}) - await notepad.add({'userid':2,'notepadid':2}) + await notepad.add({'userid': 1, 'notepadid': 1, 'content': 'test'}, sql_session=sql_session) + await notepad.add({'userid': 1, 'notepadid': 2}) + await notepad.add({'userid': 2, 'notepadid': 1}) + await notepad.add({'userid': 2, 'notepadid': 2}) except Exception as e: print(e) - notepad1 = await notepad.get(1,1) - notepad1_content = await notepad.get(1,1,fields=('content',)) + notepad1 = await notepad.get(1, 1) + notepad1_content = await notepad.get(1, 1, fields=('content',)) notepad_list = await notepad.list(userid=1) - notepad_list_content = await notepad.list(userid=1,fields=('content',)) - print('notepad1: ',notepad1) - print('notepad1_content: ',notepad1_content) - print('notepad_list: ',notepad_list) - print('notepad_list_content: ',notepad_list_content) - - await notepad.mod(1,1,content='test1') - notepad1 = await notepad.get(1,1) - print('notepad1 after mod : ',notepad1) - - await notepad.delete(1,1) - await notepad.delete(1,2) - await notepad.delete(2,1) - await notepad.delete(2,2) + notepad_list_content = await notepad.list(userid=1, fields=('content',)) + print('notepad1: ', notepad1) + print('notepad1_content: ', notepad1_content) + print('notepad_list: ', notepad_list) + print('notepad_list_content: ', notepad_list_content) + + await notepad.mod(1, 1, content='test1') + notepad1 = await notepad.get(1, 1) + print('notepad1 after mod : ', notepad1) + + await notepad.delete(1, 1) + await notepad.delete(1, 2) + await notepad.delete(2, 1) + await notepad.delete(2, 2) return loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) diff --git a/db/pubtpl.py b/db/pubtpl.py index 87d25ef21ef..4614d02402c 100644 --- a/db/pubtpl.py +++ b/db/pubtpl.py @@ -7,10 +7,10 @@ from sqlalchemy import Column, Integer, Text, delete, select, update -from .basedb import AlchemyMixin, BaseDB +from db.basedb import AlchemyMixin, BaseDB -class Pubtpl(BaseDB,AlchemyMixin): +class Pubtpl(BaseDB, AlchemyMixin): ''' Site db @@ -51,7 +51,7 @@ async def get(self, id, fields=None, one_or_none=False, first=True, to_dict=True result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, **kwargs): @@ -70,7 +70,7 @@ async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, ** result = await self._get(smtm, sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, id, sql_session=None): diff --git a/db/push_request.py b/db/push_request.py index 184c4e634b4..7219104d992 100644 --- a/db/push_request.py +++ b/db/push_request.py @@ -10,10 +10,10 @@ from sqlalchemy import INTEGER, Column, Integer, String, select, text, update from sqlalchemy.dialects.mysql import TINYINT -from .basedb import AlchemyMixin, BaseDB +from db.basedb import AlchemyMixin, BaseDB -class PushRequest(BaseDB,AlchemyMixin): +class PushRequest(BaseDB, AlchemyMixin): ''' push request db @@ -32,33 +32,33 @@ class PushRequest(BaseDB,AlchemyMixin): to_userid = Column(INTEGER) msg = Column(String(1024)) - PENDING = 0 CANCEL = 1 REFUSE = 2 ACCEPT = 3 - class NOTSET(object): pass + class NOTSET(object): + pass def add(self, from_tplid, from_userid, to_tplid, to_userid, msg='', sql_session=None): now = time.time() insert = dict( - from_tplid = from_tplid, - from_userid = from_userid, - to_tplid = to_tplid, - to_userid = to_userid, - status = PushRequest.PENDING, - msg = msg, - ctime = now, - mtime = now, - atime = now, - ) + from_tplid=from_tplid, + from_userid=from_userid, + to_tplid=to_tplid, + to_userid=to_userid, + status=PushRequest.PENDING, + msg=msg, + ctime=now, + mtime=now, + atime=now, + ) return self._insert(PushRequest(**insert), sql_session=sql_session) def mod(self, id, sql_session=None, **kwargs): for each in ('id', 'from_tplid', 'from_userid', 'to_userid', 'ctime'): - assert each not in kwargs, '%s not modifiable' % each + assert each not in kwargs, f'{each} not modifiable' kwargs['mtime'] = time.time() return self._update(update(PushRequest).where(PushRequest.id == id).values(**kwargs), sql_session=sql_session) @@ -74,7 +74,7 @@ async def get(self, id, fields=None, one_or_none=False, first=True, to_dict=True result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, **kwargs): @@ -93,5 +93,5 @@ async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, ** result = await self._get(smtm.order_by(PushRequest.mtime.desc()), sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result diff --git a/db/redisdb.py b/db/redisdb.py index 02155a55fb2..d932f68055a 100644 --- a/db/redisdb.py +++ b/db/redisdb.py @@ -5,18 +5,26 @@ # http://binux.me # Created on 2014-08-08 20:40:53 +from typing import Any, Optional + import umsgpack import config from libs.log import Log from libs.utils import is_lan -logger_RedisDB = Log('QD.RedisDB').getlogger() +try: + import redis + REDIS: Optional[Any] = redis +except ImportError: + REDIS = None + +logger_redis_db = Log('QD.RedisDB').getlogger() + + class RedisDB(object): def __init__(self, host=config.redis.host, port=config.redis.port, password=config.redis.passwd, db=config.redis.db, evil=config.evil): - try: - import redis - except ImportError: + if REDIS is None: self.client = None return @@ -24,18 +32,18 @@ def __init__(self, host=config.redis.host, port=config.redis.port, password=conf try: self.client = redis.StrictRedis(host=host, port=port, password=password, db=db, socket_timeout=3, socket_connect_timeout=3) self.client.ping() - except redis.exceptions.ConnectionError as e: + except redis.ConnectionError as e: if config.display_import_warning: - logger_RedisDB.warning('Connect Redis falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework. ',e) + logger_redis_db.warning('Connect Redis falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework. ', e) self.client = None def evil(self, ip, userid, cnt=None): if not self.client: return - if cnt == self.client.incrby('ip_%s' % ip, cnt): - self.client.expire('ip_%s' % ip, 3600) - if userid and cnt == self.client.incrby('user_%s' % userid, cnt): - self.client.expire('user_%s' % userid, 3600) + if cnt == self.client.incrby(f'ip_{ip}', cnt): + self.client.expire(f'ip_{ip}', 3600) + if userid and cnt == self.client.incrby(f'user_{userid}', cnt): + self.client.expire(f'user_{userid}', 3600) def is_evil(self, ip, userid=None): if not self.client: @@ -43,22 +51,24 @@ def is_evil(self, ip, userid=None): if config.evil_pass_lan_ip and is_lan(ip): return False if userid: - if int(self.client.get('user_%s' % userid) or '0') > self.evil_limit: + if int(self.client.get(f'user_{userid}') or '0') > self.evil_limit: return True - else: - return False - if int(self.client.get('ip_%s' % ip) or '0') > self.evil_limit: + return False + if int(self.client.get(f'ip_{ip}') or '0') > self.evil_limit: return True return False - def cache(self, key, _lambda, timeout=60*60): + def cache(self, key, _lambda, timeout=60 * 60): if not self.client: return _lambda() - ret = self.client.get('cache_%s' % key) + ret = self.client.get(f'cache_{key}') if ret: return umsgpack.unpackb(ret) ret = _lambda() - self.client.set('cache_%s', umsgpack.packb(ret)) + packed_ret = umsgpack.packb(ret) + self.client.set(f'cache_{key}', packed_ret) + if timeout: + self.client.expire(f'cache_{key}', timeout) return ret def close(self): diff --git a/db/site.py b/db/site.py index d87774dd41a..e1465d7aaeb 100644 --- a/db/site.py +++ b/db/site.py @@ -7,10 +7,10 @@ from sqlalchemy import INTEGER, Column, Integer, Text, select, text, update -from .basedb import AlchemyMixin, BaseDB +from db.basedb import AlchemyMixin, BaseDB -class Site(BaseDB,AlchemyMixin): +class Site(BaseDB, AlchemyMixin): ''' Site db @@ -25,7 +25,7 @@ class Site(BaseDB,AlchemyMixin): repos = Column(Text, nullable=False) def add(self, sql_session=None): - insert = dict(regEn = 1) + insert = dict(regEn=1) return self._insert(Site(**insert), sql_session=sql_session) def mod(self, id, sql_session=None, **kwargs): @@ -43,5 +43,5 @@ async def get(self, id, fields=None, one_or_none=False, first=True, to_dict=True result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result diff --git a/db/task.py b/db/task.py index 88aea1039fc..7b38c466f31 100644 --- a/db/task.py +++ b/db/task.py @@ -6,18 +6,16 @@ # Created on 2014-08-08 19:53:09 import time -from unittest import result -from sqlalchemy import (INTEGER, VARBINARY, Column, Integer, LargeBinary, - String, delete, select, text, update) +from sqlalchemy import (INTEGER, Column, Integer, LargeBinary, String, delete, + select, text, update) from sqlalchemy.dialects.mysql import TINYINT import config +from db.basedb import AlchemyMixin, BaseDB -from .basedb import AlchemyMixin, BaseDB - -class Task(BaseDB,AlchemyMixin): +class Task(BaseDB, AlchemyMixin): ''' task db @@ -52,22 +50,22 @@ def add(self, tplid, userid, env, sql_session=None): now = time.time() insert = dict( - tplid = tplid, - userid = userid, - disabled = 0, - init_env = env, - retry_count = config.task_max_retry_count, - retry_interval = None, - last_success = None, - last_failed = None, - success_count = 0, - failed_count = 0, - next = None, - ctime = now, - mtime = now, - ontime='00:10', - ontimeflg=0, - ) + tplid=tplid, + userid=userid, + disabled=0, + init_env=env, + retry_count=config.task_max_retry_count, + retry_interval=None, + last_success=None, + last_failed=None, + success_count=0, + failed_count=0, + next=None, + ctime=now, + mtime=now, + ontime='00:10', + ontimeflg=0, + ) return self._insert(Task(**insert), sql_session=sql_session) def mod(self, id, sql_session=None, **kwargs): @@ -89,7 +87,7 @@ async def get(self, id, fields=None, one_or_none=False, first=True, to_dict=True result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result async def list(self, userid=None, fields=None, limit=1000, to_dict=True, scan=False, scan_time=None, sql_session=None, **kwargs): @@ -111,9 +109,9 @@ async def list(self, userid=None, fields=None, limit=1000, to_dict=True, scan=Fa if limit: smtm = smtm.limit(limit) - result = await self._get(smtm,sql_session=sql_session) + result = await self._get(smtm, sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, id, sql_session=None): diff --git a/db/tasklog.py b/db/tasklog.py index e2c2e2ef8dd..69c85ac8353 100644 --- a/db/tasklog.py +++ b/db/tasklog.py @@ -10,10 +10,10 @@ from sqlalchemy import INTEGER, Column, Integer, Text, delete, select from sqlalchemy.dialects.mysql import TINYINT -from .basedb import AlchemyMixin, BaseDB +from db.basedb import AlchemyMixin, BaseDB -class Tasklog(BaseDB,AlchemyMixin): +class Tasklog(BaseDB, AlchemyMixin): ''' task log db @@ -31,11 +31,11 @@ def add(self, taskid, success, msg='', sql_session=None): now = time.time() insert = dict( - taskid = taskid, - success = success, - msg = msg, - ctime = now, - ) + taskid=taskid, + success=success, + msg=msg, + ctime=now, + ) return self._insert(Tasklog(**insert), sql_session=sql_session) async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, **kwargs): @@ -54,7 +54,7 @@ async def list(self, fields=None, limit=1000, to_dict=True, sql_session=None, ** result = await self._get(smtm.order_by(Tasklog.ctime.desc()), sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, id, sql_session=None): diff --git a/db/tpl.py b/db/tpl.py index 8e897beb25c..252613161e5 100644 --- a/db/tpl.py +++ b/db/tpl.py @@ -10,12 +10,11 @@ from sqlalchemy import (INTEGER, Column, Integer, String, Text, delete, select, text, update) from sqlalchemy.dialects.mysql import MEDIUMBLOB, TINYINT -from sqlalchemy.engine import Result -from .basedb import AlchemyMixin, BaseDB +from db.basedb import AlchemyMixin, BaseDB -class Tpl(BaseDB,AlchemyMixin): +class Tpl(BaseDB, AlchemyMixin): ''' tpl db @@ -52,23 +51,23 @@ def add(self, userid, har, tpl, variables, init_env, interval=None, sql_session= now = time.time() insert = dict( - userid = userid, - siteurl = None, - sitename = None, - banner = None, - disabled = 0, - public = 0, - fork = None, - har = har, - tpl = tpl, - variables = variables, - init_env = init_env, - interval = interval, - ctime = now, - mtime = now, - atime = now, - last_success = None, - ) + userid=userid, + siteurl=None, + sitename=None, + banner=None, + disabled=0, + public=0, + fork=None, + har=har, + tpl=tpl, + variables=variables, + init_env=init_env, + interval=interval, + ctime=now, + mtime=now, + atime=now, + last_success=None, + ) return self._insert(Tpl(**insert), sql_session=sql_session) def mod(self, id, sql_session=None, **kwargs): @@ -85,17 +84,17 @@ async def get(self, id, fields=None, one_or_none=False, first=True, to_dict=True result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result async def incr_success(self, id, sql_session=None): - result = await self._execute(text('UPDATE tpl SET success_count=success_count+1, last_success=:last_success WHERE id=:id').\ - bindparams(id=int(id), last_success=int(time.time())), sql_session=sql_session) + result = await self._execute(text('UPDATE tpl SET success_count=success_count+1, last_success=:last_success WHERE id=:id'). + bindparams(id=int(id), last_success=int(time.time())), sql_session=sql_session) return result.rowcount async def incr_failed(self, id, sql_session=None): - result = await self._execute(text('UPDATE tpl SET failed_count=failed_count+1 WHERE id=:id').\ - bindparams(id=int(id)), sql_session=sql_session) + result = await self._execute(text('UPDATE tpl SET failed_count=failed_count+1 WHERE id=:id'). + bindparams(id=int(id)), sql_session=sql_session) return result.rowcount async def list(self, fields=None, limit=None, to_dict=True, sql_session=None, **kwargs): @@ -114,7 +113,7 @@ async def list(self, fields=None, limit=None, to_dict=True, sql_session=None, ** result = await self._get(smtm, sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, id, sql_session=None): diff --git a/db/user.py b/db/user.py index ec4c3cf980d..70ee688f22c 100644 --- a/db/user.py +++ b/db/user.py @@ -15,13 +15,12 @@ text, update) from sqlalchemy.dialects.mysql import INTEGER, TINYINT +from db.basedb import AlchemyMixin, BaseDB, config from libs import mcrypto as crypto from libs import utils -from .basedb import AlchemyMixin, BaseDB, config - -class User(BaseDB,AlchemyMixin): +class User(BaseDB, AlchemyMixin): ''' User DB @@ -56,10 +55,17 @@ class User(BaseDB,AlchemyMixin): nickname = Column(String(64), unique=True, index=True) role = Column(String(128)) - class UserDBException(Exception): pass - class NoUserException(UserDBException): pass - class DeplicateUser(UserDBException): pass - class UserNameError(UserDBException): pass + class UserDBException(Exception): + pass + + class NoUserException(UserDBException): + pass + + class DeplicateUser(UserDBException): + pass + + class UserNameError(UserDBException): + pass @staticmethod def check_nickname(nickname): @@ -73,32 +79,32 @@ async def add(self, email, password, ip, sql_session=None): raise self.DeplicateUser('duplicate username') now = time.time() - if isinstance(ip,str): - ipVersion = utils.isIP(ip) - ip = utils.ip2varbinary(ip,ipVersion) + if isinstance(ip, str): + ip_version = utils.is_ip(ip) + ip = utils.ip2varbinary(ip, ip_version) userkey = umsgpack.unpackb(crypto.password_hash(password))[0] hash = MD5.new() hash.update(password.encode('utf-8')) password_hash = crypto.password_hash(password) - password_md5_hash = crypto.password_hash(hash.hexdigest(),password_hash) + password_md5_hash = crypto.password_hash(hash.hexdigest(), password_hash) insert = dict( - email = email, - email_verified = 0, - password = crypto.aes_encrypt(password_hash, userkey), - userkey = crypto.aes_encrypt(userkey), - nickname = None, - role = None, - ctime = now, - mtime = now, - atime = now, - cip = ip, - mip = ip, - aip = ip, - password_md5 = password_md5_hash, - ) - await self._insert(User(**insert),sql_session=sql_session) + email=email, + email_verified=0, + password=crypto.aes_encrypt(password_hash, userkey), + userkey=crypto.aes_encrypt(userkey), + nickname=None, + role=None, + ctime=now, + mtime=now, + atime=now, + cip=ip, + mip=ip, + aip=ip, + password_md5=password_md5_hash, + ) + await self._insert(User(**insert), sql_session=sql_session) return async def challenge(self, email, password, sql_session=None): @@ -111,12 +117,12 @@ async def challenge(self, email, password, sql_session=None): return False - async def challenge_MD5(self, email, password_md5, sql_session=None): + async def challenge_md5(self, email, password_md5, sql_session=None): user = await self.get(email=email, fields=('id', 'password', 'password_md5'), sql_session=sql_session) if user is None: return False else: - if (user['password_md5'] == ''): + if user['password_md5'] == '': pass else: password_hash = await self.decrypt(user['id'], user['password'], sql_session=sql_session) @@ -135,7 +141,7 @@ async def mod(self, id, sql_session=None, **kwargs): if 'token' in kwargs: kwargs['token'] = await self.encrypt(id, crypto.password_hash(kwargs['token']), sql_session=sql_session) - result = await self._update(update(User).where(User.id == id).values(**kwargs),sql_session=sql_session) + result = await self._update(update(User).where(User.id == id).values(**kwargs), sql_session=sql_session) return result # @utils.method_cache @@ -151,8 +157,8 @@ async def encrypt(self, id, data, sql_session=None): try: return crypto.aes_encrypt(data, userkey) - except Exception as e: - raise self.UserDBException('encrypt error') + except Exception as exc: + raise self.UserDBException('encrypt error') from exc async def decrypt(self, id, data, sql_session=None): if id: @@ -161,17 +167,17 @@ async def decrypt(self, id, data, sql_session=None): userkey = config.aes_key try: old = tmp = crypto.aes_decrypt(data, userkey) - if isinstance(tmp,dict): + if isinstance(tmp, dict): old = {} - for key,value in tmp.items(): - if isinstance(key,bytes): - key=key.decode('utf-8') - old[key]=value + for key, value in tmp.items(): + if isinstance(key, bytes): + key = key.decode('utf-8') + old[key] = value return old - except Exception as e: - raise self.UserDBException('decrypt error') + except Exception as exc: + raise self.UserDBException('decrypt error') from exc - async def get(self, id=None, email=None, fields:tuple=None, one_or_none=False, first=True, to_dict=True, sql_session=None): + async def get(self, id=None, email=None, fields=None, one_or_none=False, first=True, to_dict=True, sql_session=None): if fields is None: _fields = User else: @@ -186,10 +192,10 @@ async def get(self, id=None, email=None, fields:tuple=None, one_or_none=False, f result = await self._get(smtm, one_or_none=one_or_none, first=first, sql_session=sql_session) if to_dict and result is not None: - return self.to_dict(result,fields) + return self.to_dict(result, fields) return result - async def list(self, fields:tuple=None, limit=None, to_dict=True, sql_session=None, **kwargs): + async def list(self, fields=None, limit=None, to_dict=True, sql_session=None, **kwargs): if fields is None: _fields = User else: @@ -205,31 +211,33 @@ async def list(self, fields:tuple=None, limit=None, to_dict=True, sql_session=No result = await self._get(smtm, sql_session=sql_session) if to_dict and result is not None: - return [self.to_dict(row,fields) for row in result] + return [self.to_dict(row, fields) for row in result] return result def delete(self, id, sql_session=None): return self._delete(delete(User).where(User.id == id), sql_session=sql_session) + if __name__ == '__main__': import asyncio + async def test(): user = User() try: async with user.session as sql_session: async with sql_session.begin(): - await user.add('admin1@localhost', 'admin', '127.0.0.1',sql_session=sql_session) + await user.add('admin1@localhost', 'admin', '127.0.0.1', sql_session=sql_session) await user.add('admin2@localhost', 'admin', '127.0.0.1') except User.DeplicateUser as e: print(e) await user.get(email='admin1@localhost') user1 = await user.get(email='admin1@localhost') - user2 = await user.get(email='admin2@localhost',fields=('id',)) + user2 = await user.get(email='admin2@localhost', fields=('id',)) print('user1: ', user1) print('user2: ', user2) user1_list = await user.list(email='admin1@localhost') - user2_list = await user.list(email='admin2@localhost',fields=('id','email','password')) + user2_list = await user.list(email='admin2@localhost', fields=('id', 'email', 'password')) print('user1_list: ', user1_list) print('user2_list: ', user2_list) diff --git a/docker-compose.yml b/docker-compose.yml index 3a8010c70ea..2f8a8777cba 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -77,6 +77,7 @@ services: # - PROXIES= # - PROXY_DIRECT_MODE=regexp # - PROXY_DIRECT=(?xi)\A([a-z][a-z0-9+\-.]*://)?(0(.0){3}|127(.0){2}.1|localhost|\[::([\d]+)?\])(:[0-9]+)? + # - NOTEPAD_LIMIT=20 # - EXTRA_ONNX_NAME= # - EXTRA_CHARSETS_NAME= # - MAIL_SMTP= diff --git a/libs/__init__.py b/libs/__init__.py index 92299b512bc..8b137891791 100644 --- a/libs/__init__.py +++ b/libs/__init__.py @@ -1,4 +1 @@ -import os -import sys -sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) diff --git a/libs/config_utils.py b/libs/config_utils.py new file mode 100644 index 00000000000..91556a38a53 --- /dev/null +++ b/libs/config_utils.py @@ -0,0 +1,13 @@ +def strtobool(val: str): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + if val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + raise ValueError(f"invalid truth value {val!r}") diff --git a/libs/convert.py b/libs/convert.py index e5bbc0c7ae4..de087904a1b 100644 --- a/libs/convert.py +++ b/libs/convert.py @@ -12,6 +12,7 @@ except LookupError: HAS_SURROGATEESCAPE = False + def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): """Make sure that a string is a byte string @@ -124,10 +125,11 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): elif nonstring == 'strict': raise TypeError('obj must be a string type') else: - raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring) + raise TypeError(f'Invalid value {nonstring} for to_bytes\' nonstring parameter') return to_bytes(value, encoding, errors) + def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): """Make sure that a string is a text string @@ -206,18 +208,19 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): value = repr(obj) except UnicodeError: # Giving up - return u'' + return '' elif nonstring == 'passthru': return obj elif nonstring == 'empty': - return u'' + return '' elif nonstring == 'strict': raise TypeError('obj must be a string type') else: - raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring) + raise TypeError(f'Invalid value {nonstring} for to_text\'s nonstring parameter') return to_text(value, encoding, errors) + if PY3: to_native = to_text else: diff --git a/libs/cookie_utils.py b/libs/cookie_utils.py index a0356d95d2d..db5d7f9b23f 100644 --- a/libs/cookie_utils.py +++ b/libs/cookie_utils.py @@ -8,30 +8,35 @@ import http.cookiejar as cookielib import time -from http.cookiejar import (_warn_unhandled_exception, parse_ns_headers, - split_header_words) +from http.cookiejar import _warn_unhandled_exception # type:ignore +from http.cookiejar import parse_ns_headers # type:ignore +from http.cookiejar import split_header_words # type:ignore from requests.cookies import (MockRequest, MockResponse, RequestsCookieJar, get_cookie_header) from tornado import httpclient import config - -from .log import Log +from libs.log import Log logger_CookieJar = Log('QD.Http.CookieJar').getlogger() + + def _debug(*args): if not config.debug: - return + return None return logger_CookieJar.debug(*args, stacklevel=2) -cookielib._debug = _debug -def dump_cookie(cookie): + +setattr(cookielib, '_debug', _debug) + + +def dump_cookie(cookie: cookielib.Cookie): result = {} for key in ('name', 'value', 'expires', 'secure', 'port', 'domain', 'path', - 'discard', 'comment', 'comment_url', 'rfc2109'): + 'discard', 'comment', 'comment_url', 'rfc2109'): result[key] = getattr(cookie, key) - result['rest'] = cookie._rest + result['rest'] = cookie._rest # type: ignore # pylint:disable=protected-access return result @@ -70,10 +75,10 @@ def make_cookies(self, response, request): rfc2965 = self._policy.rfc2965 netscape = self._policy.netscape - if ((not rfc2965_hdrs and not ns_hdrs) or - (not ns_hdrs and not rfc2965) or - (not rfc2965_hdrs and not netscape) or - (not netscape and not rfc2965)): + if ((not rfc2965_hdrs and not ns_hdrs) + or (not ns_hdrs and not rfc2965) + or (not rfc2965_hdrs and not netscape) + or (not netscape and not rfc2965)): return [] # no relevant cookie headers: quick exit try: diff --git a/libs/fetcher.py b/libs/fetcher.py index 6058ce32c97..798191bb213 100644 --- a/libs/fetcher.py +++ b/libs/fetcher.py @@ -4,6 +4,7 @@ # Author: Binux # http://binux.me # Created on 2014-08-06 11:55:41 +# pylint: disable=broad-exception-raised import base64 import json @@ -11,36 +12,35 @@ import re import time import traceback -import urllib import urllib.parse as urlparse from datetime import datetime from io import BytesIO -from typing import Iterable +from typing import Dict, Iterable, Tuple from jinja2.sandbox import SandboxedEnvironment as Environment -from tornado import gen, httpclient, simple_httpclient +from tornado import httpclient, simple_httpclient from tornado.escape import native_str from tornado.httputil import HTTPHeaders import config from libs import cookie_utils, utils +from libs.log import Log +from libs.safe_eval import safe_eval -from .log import Log -from .safe_eval import safe_eval - -logger_Fetcher = Log('QD.Http.Fetcher').getlogger() +logger_fetcher = Log('QD.Http.Fetcher').getlogger() if config.use_pycurl: try: - import pycurl + import pycurl # type: ignore except ImportError as e: if config.display_import_warning: - logger_Fetcher.warning('Import PyCurl module falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework.',e) + logger_fetcher.warning('Import PyCurl module falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework.', e) pycurl = None else: - pycurl = None -local_host = f'http://{config.bind}:{config.port}'.replace('0.0.0.0','localhost') + pycurl = None # pylint: disable=invalid-name +local_host = f'http://{config.bind}:{config.port}'.replace('0.0.0.0', 'localhost') NOT_RETYR_CODE = config.not_retry_code + class Fetcher(object): def __init__(self, download_size_limit=config.download_size_limit): if pycurl: @@ -52,7 +52,10 @@ def __init__(self, download_size_limit=config.download_size_limit): self.jinja_env.globals.update(utils.jinja_inner_globals) self.jinja_env.filters.update(utils.jinja_globals) - def render(self, request, env, session=[]): + def render(self, request, env, session=None): + if session is None: + session = [] + request = dict(request) if isinstance(session, cookie_utils.CookieSession): _cookies = session @@ -60,7 +63,6 @@ def render(self, request, env, session=[]): _cookies = cookie_utils.CookieSession() _cookies.from_json(session) - def _render(obj, key): if not obj.get(key): return @@ -68,8 +70,8 @@ def _render(obj, key): obj[key] = self.jinja_env.from_string(obj[key]).render(_cookies=_cookies, **env) return True except Exception as e: - log_error = 'The error occurred when rendering template {}: {} \\r\\n {}'.format(key,obj[key],repr(e)) - raise httpclient.HTTPError(500,log_error) + log_error = f'The error occurred when rendering template {key}: {obj[key]} \\r\\n {repr(e)}' + raise httpclient.HTTPError(500, log_error) _render(request, 'method') _render(request, 'url') @@ -87,7 +89,9 @@ def _render(obj, key): return request def build_request(self, obj, download_size_limit=config.download_size_limit, connect_timeout=config.connect_timeout, request_timeout=config.request_timeout, - proxy={}, CURL_ENCODING=True, CURL_CONTENT_LENGTH=True): + proxy=None, curl_encoding=True, curl_content_length=True): + if proxy is None: + proxy = {} env = obj['env'] rule = obj['rule'] request = self.render(obj['request'], env['variables'], env['session']) @@ -106,65 +110,65 @@ def build_request(self, obj, download_size_limit=config.download_size_limit, con data = request.get('data', '') def set_curl_callback(curl): - def size_limit(download_size, downloaded, upload_size, uploaded): + def size_limit(download_size, downloaded, upload_size, uploaded): # pylint: disable=unused-argument if download_size and download_size > download_size_limit: return 1 if downloaded > download_size_limit: return 1 return 0 if pycurl: - if not CURL_ENCODING: + if not curl_encoding: try: curl.unsetopt(pycurl.ENCODING) except Exception as e: - logger_Fetcher.debug('unsetopt pycurl.ENCODING failed: %s',e) - if not CURL_CONTENT_LENGTH: + logger_fetcher.debug('unsetopt pycurl.ENCODING failed: %s', e) + if not curl_content_length: try: if headers.get('content-length'): headers.pop('content-length') curl.setopt( - pycurl.HTTPHEADER,[ - "%s: %s" % (native_str(k), native_str(v)) + pycurl.HTTPHEADER, [ + f"{native_str(k)}: {native_str(v)}" for k, v in HTTPHeaders(headers).get_all()] ) except Exception as e: - logger_Fetcher.debug('unsetopt pycurl.CONTENT_LENGTH failed: %s',e) + logger_fetcher.debug('unsetopt pycurl.CONTENT_LENGTH failed: %s', e) if config.dns_server: - curl.setopt(pycurl.DNS_SERVERS,config.dns_server) + curl.setopt(pycurl.DNS_SERVERS, config.dns_server) curl.setopt(pycurl.NOPROGRESS, 0) curl.setopt(pycurl.PROGRESSFUNCTION, size_limit) curl.setopt(pycurl.CONNECTTIMEOUT, int(connect_timeout)) curl.setopt(pycurl.TIMEOUT, int(request_timeout)) if proxy: - if proxy.get('scheme','')=='socks5': + if proxy.get('scheme', '') == 'socks5': curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5) - elif proxy.get('scheme','')=='socks5h': + elif proxy.get('scheme', '') == 'socks5h': curl.setopt(pycurl.PROXYTYPE, pycurl.PROXYTYPE_SOCKS5_HOSTNAME) return curl req = httpclient.HTTPRequest( - url = url, - method = method, - headers = headers, - body = data, - follow_redirects = False, - max_redirects = 0, - decompress_response = True, - allow_nonstandard_methods = True, - allow_ipv6 = True, - prepare_curl_callback = set_curl_callback, - validate_cert=False, - connect_timeout=connect_timeout, - request_timeout=request_timeout - ) + url=url, + method=method, + headers=headers, + body=data, + follow_redirects=False, + max_redirects=0, + decompress_response=True, + allow_nonstandard_methods=True, + allow_ipv6=True, + prepare_curl_callback=set_curl_callback, + validate_cert=False, + connect_timeout=connect_timeout, + request_timeout=request_timeout + ) session = cookie_utils.CookieSession() if req.headers.get('cookie'): req.headers['Cookie'] = req.headers.pop("cookie") if req.headers.get('Cookie'): - session.update(dict(x.strip().split('=', 1) \ - for x in req.headers['Cookie'].split(';') \ - if '=' in x)) + session.update(dict(x.strip().split('=', 1) + for x in req.headers['Cookie'].split(';') + if '=' in x)) if isinstance(env['session'], cookie_utils.CookieSession): session.from_json(env['session'].to_json()) else: @@ -178,17 +182,17 @@ def size_limit(download_size, downloaded, upload_size, uploaded): if not config.proxy_direct_mode: for key in proxy: if key != 'scheme': - setattr(req, 'proxy_%s' % key, proxy[key]) + setattr(req, f'proxy_{key}', proxy[key]) elif config.proxy_direct_mode == 'regexp': if not re.compile(config.proxy_direct).search(req.url): for key in proxy: if key != 'scheme': - setattr(req, 'proxy_%s' % key, proxy[key]) + setattr(req, f'proxy_{key}', proxy[key]) elif config.proxy_direct_mode == 'url': if utils.urlmatch(req.url) not in config.proxy_direct.split('|'): for key in proxy: if key != 'scheme': - setattr(req, 'proxy_%s' % key, proxy[key]) + setattr(req, f'proxy_{key}', proxy[key]) env['session'] = session @@ -208,34 +212,34 @@ def build_headers(headers): def build_request(request): url = urlparse.urlparse(request.url) ret = dict( - method = request.method, - url = request.url, - httpVersion = 'HTTP/1.1', - headers = build_headers(request.headers), - queryString = [ - {'name': n, 'value': v} for n, v in\ - urlparse.parse_qsl(url.query)], - cookies = [ - {'name': n, 'value': v} for n, v in \ - urlparse.parse_qsl(request.headers.get('cookie', ''))], - headersSize = -1, - bodySize = len(request.body) if request.body else 0, - ) + method=request.method, + url=request.url, + httpVersion='HTTP/1.1', + headers=build_headers(request.headers), + queryString=[ + {'name': n, 'value': v} for n, v in + urlparse.parse_qsl(url.query)], + cookies=[ + {'name': n, 'value': v} for n, v in + urlparse.parse_qsl(request.headers.get('cookie', ''))], + headersSize=-1, + bodySize=len(request.body) if request.body else 0, + ) if request.body: - if isinstance(request.body,bytes): - request._body = request.body.decode() + if isinstance(request.body, bytes): + request._body = request.body.decode() # pylint: disable=protected-access ret['postData'] = dict( - mimeType = request.headers.get('content-type'), - text = request.body, - ) + mimeType=request.headers.get('content-type'), + text=request.body, + ) if ret['postData']['mimeType'] and 'application/x-www-form-urlencoded' in ret['postData']['mimeType']: ret['postData']['params'] = [ - {'name': n, 'value': v} for n, v in \ - urlparse.parse_qsl(request.body)] + {'name': n, 'value': v} for n, v in + urlparse.parse_qsl(request.body)] try: _ = json.dumps(ret['postData']['params']) - except UnicodeDecodeError: - logger_Fetcher.error('params encoding error') + except UnicodeDecodeError as e: + logger_fetcher.error('params encoding error: %s', e, exc_info=config.traceback_print) del ret['postData']['params'] return ret @@ -248,34 +252,34 @@ def build_response(response): if not response.headers.get('content-type'): response.headers['content-type'] = 'text/plain' if 'charset=' not in response.headers.get('content-type', ''): - response.headers['content-type'] += '; charset='+encoding + response.headers['content-type'] += '; charset=' + encoding return dict( - status = response.code, - statusText = response.reason, - headers = build_headers(response.headers), - cookies = cookies.to_json(), - content = dict( - size = len(response.body), - mimeType = response.headers.get('content-type'), - text = base64.b64encode(response.body).decode('ascii'), - decoded = utils.decode(response.body, response.headers), - ), - redirectURL = response.headers.get('Location'), - headersSize = -1, - bodySize = -1, - ) + status=response.code, + statusText=response.reason, + headers=build_headers(response.headers), + cookies=cookies.to_json(), + content=dict( + size=len(response.body), + mimeType=response.headers.get('content-type'), + text=base64.b64encode(response.body).decode('ascii'), + decoded=utils.decode(response.body, response.headers), + ), + redirectURL=response.headers.get('Location'), + headersSize=-1, + bodySize=-1, + ) entry = dict( - startedDateTime = datetime.now().isoformat(), - time = response.request_time, - request = build_request(request), - response = build_response(response), - cache = {}, - timings = response.time_info, - connections = "0", - pageref = "page_0", - ) + startedDateTime=datetime.now().isoformat(), + time=response.request_time, + request=build_request(request), + response=build_response(response), + cache={}, + timings=response.time_info, + connections="0", + pageref="page_0", + ) if response.body and 'image' in response.headers.get('content-type'): entry['response']['content']['decoded'] = base64.b64encode(response.body).decode('ascii') return entry @@ -285,46 +289,44 @@ def run_rule(self, response, rule, env): msg = '' content = [-1, ] + def getdata(_from): if _from == 'content': if content[0] == -1: - if response.headers and isinstance(response.headers,HTTPHeaders): + if response.headers and isinstance(response.headers, HTTPHeaders): content[0] = utils.decode(response.body, headers=response.headers) else: content[0] = utils.decode(response.body) - if ('content-type' in response.headers): + if 'content-type' in response.headers: if 'image' in response.headers.get('content-type'): return base64.b64encode(response.body).decode('utf8') return content[0] elif _from == 'status': - return '%s' % response.code + return f'{response.code}' elif _from.startswith('header-'): _from = _from[7:] return response.headers.get(_from, '') elif _from == 'header': try: if hasattr(response, 'headers') and isinstance(response.headers, HTTPHeaders): - return '\n'.join(['{key}: {value}'.format(key=key,value=value) for key,value in response.headers.get_all()]) - return '\n'.join(['{key}: {value}'.format(key=key,value=value) for key,value in response.headers._dict.items()]) + return '\n'.join([f'{key}: {value}' for key, value in response.headers.get_all()]) + return '\n'.join([f'{key}: {value}' for key, value in response.headers._dict.items()]) # pylint: disable=protected-access except Exception as e: - if config.traceback_print: - traceback.print_exc() - logger_Fetcher.error('Run rule failed: %s', str(e)) + logger_fetcher.error('Run rule failed: %s', str(e), exc_info=config.traceback_print) try: - return json.dumps(response.headers._dict) + return json.dumps(response.headers._dict) # pylint: disable=protected-access except Exception as e: - if config.traceback_print: - traceback.print_exc() - logger_Fetcher.error('Run rule failed: %s', str(e)) + logger_fetcher.error('Run rule failed: %s', str(e), exc_info=config.traceback_print) else: return '' - session=env['session'] + session = env['session'] if isinstance(session, cookie_utils.CookieSession): _cookies = session else: _cookies = cookie_utils.CookieSession() _cookies.from_json(session) + def _render(obj, key): if not obj.get(key): return @@ -332,9 +334,8 @@ def _render(obj, key): obj[key] = self.jinja_env.from_string(obj[key]).render(_cookies=_cookies, **env['variables']) return True except Exception as e: - log_error = 'The error occurred when rendering template {}: {} \\r\\n {}'.format(key,obj[key],repr(e)) - raise httpclient.HTTPError(500,log_error) - + log_error = f'The error occurred when rendering template {key}: {obj[key]} \\r\\n {repr(e)}' + raise httpclient.HTTPError(500, log_error) for r in rule.get('success_asserts') or '': _render(r, 're') @@ -342,21 +343,20 @@ def _render(obj, key): msg = '' break else: - msg = 'Fail assert: %s from success_asserts' % json.dumps(r, ensure_ascii=False) + msg = f'Fail assert: {json.dumps(r, ensure_ascii=False)} from success_asserts' else: if rule.get('success_asserts'): success = False - for r in rule.get('failed_asserts') or '': _render(r, 're') if r['re'] and re.search(r['re'], getdata(r['from'])): success = False - msg = 'Fail assert: %s from failed_asserts' % json.dumps(r, ensure_ascii=False) + msg = f'Fail assert: {json.dumps(r, ensure_ascii=False)} from failed_asserts' break if not success and msg and (response.error or (response.reason and str(response.reason) != 'OK')): - msg += ', \\r\\nResponse Error : %s' % str(response.error or response.reason) + msg += f', \\r\\nResponse Error : {response.error or response.reason}' for r in rule.get('extract_variables') or '': pattern = r['re'] @@ -367,17 +367,17 @@ def _render(obj, key): if re_m: pattern = re_m.group(1) if 'g' in re_m.group(2): - find_all = True # 全局匹配 + find_all = True # 全局匹配 if 'i' in re_m.group(2): - flags |= re.I # 使匹配对大小写不敏感 + flags |= re.I # 使匹配对大小写不敏感 if 'm' in re_m.group(2): - flags |= re.M # 多行匹配,影响 ^ 和 $ + flags |= re.M # 多行匹配,影响 ^ 和 $ if 's' in re_m.group(2): - flags |= re.S # 使 . 匹配包括换行在内的所有字符 + flags |= re.S # 使 . 匹配包括换行在内的所有字符 if 'u' in re_m.group(2): - flags |= re.U # 根据Unicode字符集解析字符。这个标志影响 \w, \W, \b, \B. + flags |= re.U # 根据Unicode字符集解析字符。这个标志影响 \w, \W, \b, \B. if 'x' in re_m.group(2): - pass# flags |= re.X # 该标志通过给予你更灵活的格式以便你将正则表达式写得更易于理解。暂不启用 + pass # flags |= re.X # 该标志通过给予你更灵活的格式以便你将正则表达式写得更易于理解。暂不启用 if find_all: try: @@ -402,116 +402,119 @@ def tpl2har(tpl): def build_request(en): url = urlparse.urlparse(en['request']['url']) request = dict( - method = en['request']['method'], - url = en['request']['url'], - httpVersion = 'HTTP/1.1', - headers = [ - {'name': x['name'], 'value': x['value'], 'checked': True} for x in\ - en['request'].get('headers', [])], - queryString = [ - {'name': n, 'value': v} for n, v in\ - urlparse.parse_qsl(url.query)], - cookies = [ - {'name': x['name'], 'value': x['value'], 'checked': True} for x in\ - en['request'].get('cookies', [])], - headersSize = -1, - bodySize = len(en['request'].get('data')) if en['request'].get('data') else 0, - - - ) + method=en['request']['method'], + url=en['request']['url'], + httpVersion='HTTP/1.1', + headers=[ + {'name': x['name'], 'value': x['value'], 'checked': True} for x in + en['request'].get('headers', [])], + queryString=[ + {'name': n, 'value': v} for n, v in + urlparse.parse_qsl(url.query)], + cookies=[ + {'name': x['name'], 'value': x['value'], 'checked': True} for x in + en['request'].get('cookies', [])], + headersSize=-1, + bodySize=len(en['request'].get('data')) if en['request'].get('data') else 0, + + + ) if en['request'].get('data'): request['postData'] = dict( - mimeType = en['request'].get('mimeType'), - text = en['request'].get('data'), - ) + mimeType=en['request'].get('mimeType'), + text=en['request'].get('data'), + ) if request['postData']['mimeType'] and 'application/x-www-form-urlencoded' in request['postData']['mimeType'] : - params = [{'name': x[0], 'value': x[1]} \ - for x in urlparse.parse_qsl(en['request']['data'], True)] + params = [{'name': x[0], 'value': x[1]} + for x in urlparse.parse_qsl(en['request']['data'], True)] request['postData']['params'] = params try: _ = json.dumps(request['postData']['params']) - except UnicodeDecodeError: - logger_Fetcher.error('params encoding error') + except UnicodeDecodeError as e: + logger_fetcher.error('params encoding error: %s', e, exc_info=config.traceback_print) del request['postData']['params'] return request entries = [] for en in tpl: entry = dict( - checked = True, - startedDateTime = datetime.now().isoformat(), - time = 1, - request = build_request(en), - response = {}, - cache = {}, - timings = {}, - connections = "0", - pageref = "page_0", - - success_asserts = en.get('rule', {}).get('success_asserts', []), - failed_asserts = en.get('rule', {}).get('failed_asserts', []), - extract_variables = en.get('rule', {}).get('extract_variables', []), - ) + checked=True, + startedDateTime=datetime.now().isoformat(), + time=1, + request=build_request(en), + response={}, + cache={}, + timings={}, + connections="0", + pageref="page_0", + + success_asserts=en.get('rule', {}).get('success_asserts', []), + failed_asserts=en.get('rule', {}).get('failed_asserts', []), + extract_variables=en.get('rule', {}).get('extract_variables', []), + ) entries.append(entry) return dict( - log = dict( - creator = dict( - name = 'binux', - version = 'QD' - ), - entries = entries, - pages = [], - version = '1.2' - ) - ) - async def build_response(self, obj, proxy={}, CURL_ENCODING=config.curl_encoding, CURL_CONTENT_LENGTH=config.curl_length, EMPTY_RETRY = config.empty_retry): + log=dict( + creator=dict( + name='binux', + version='QD' + ), + entries=entries, + pages=[], + version='1.2' + ) + ) + + async def build_response(self, obj, proxy=None, curl_encoding=config.curl_encoding, curl_content_length=config.curl_length, empty_retry=config.empty_retry): + if proxy is None: + proxy = {} try: - req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit,proxy=proxy,CURL_ENCODING=CURL_ENCODING,CURL_CONTENT_LENGTH=CURL_CONTENT_LENGTH) - response = await self.client.fetch(req) - logger_Fetcher.debug("%d %s %s %.2fms", - response.code, - response.request.method, - response.request.url, - 1000.0 * response.request_time) + req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit, proxy=proxy, curl_encoding=curl_encoding, curl_content_length=curl_content_length) + response = await self.client.fetch(req) + logger_fetcher.debug("%d %s %s %.2fms", + response.code, + response.request.method, + response.request.url, + 1000.0 * response.request_time) except httpclient.HTTPError as e: try: if config.allow_retry and pycurl: - if e.__dict__.get('errno','') == 61: - logger_Fetcher.warning('{} {} [Warning] {} -> Try to retry!'.format(req.method,req.url,e)) - req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit,proxy=proxy,CURL_ENCODING=False,CURL_CONTENT_LENGTH=CURL_CONTENT_LENGTH) - e.response = await self.client.fetch(req) + if e.__dict__.get('errno', '') == 61: + logger_fetcher.warning('%s %s [Warning] %s -> Try to retry!', req.method, req.url, e) + req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit, proxy=proxy, curl_encoding=False, curl_content_length=curl_content_length) + e.response = await self.client.fetch(req) elif e.code == 400 and e.message == 'Bad Request' and req and req.headers.get('content-length'): - logger_Fetcher.warning('{} {} [Warning] {} -> Try to retry!'.format(req.method,req.url,e)) - req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit,proxy=proxy,CURL_ENCODING=CURL_ENCODING,CURL_CONTENT_LENGTH=False) - e.response = await self.client.fetch(req) - elif e.code not in NOT_RETYR_CODE or (EMPTY_RETRY and not e.response): + logger_fetcher.warning('%s %s [Warning] %s -> Try to retry!', req.method, req.url, e) + req, rule, env = self.build_request(obj, download_size_limit=self.download_size_limit, proxy=proxy, curl_encoding=curl_encoding, curl_content_length=False) + e.response = await self.client.fetch(req) + elif e.code not in NOT_RETYR_CODE or (empty_retry and not e.response): try: - logger_Fetcher.warning('{} {} [Warning] {} -> Try to retry!'.format(req.method,req.url,e)) + logger_fetcher.warning('%s %s [Warning] %s -> Try to retry!', req.method, req.url, e) client = simple_httpclient.SimpleAsyncHTTPClient() - e.response = await client.fetch(req) - except Exception: - logger_Fetcher.error(e.message.replace('\\r\\n','\r\n') or e.response.replace('\\r\\n','\r\n') or Exception) + e.response = await client.fetch(req) + except Exception as e0: + logger_fetcher.error(e.message.replace('\\r\\n', '\r\n') or e.response.replace('\\r\\n', '\r\n') or e0, exc_info=config.traceback_print) else: try: - logger_Fetcher.warning('{} {} [Warning] {}'.format(req.method,req.url,e)) - except Exception: - logger_Fetcher.error(e.message.replace('\\r\\n','\r\n') or e.response.replace('\\r\\n','\r\n') or Exception) + logger_fetcher.warning('%s %s [Warning] %s', req.method, req.url, e) + except Exception as e0: + logger_fetcher.error(e.message.replace('\\r\\n', '\r\n') or e.response.replace('\\r\\n', '\r\n') or e0, exc_info=config.traceback_print) else: - logger_Fetcher.warning('{} {} [Warning] {}'.format(req.method,req.url,e)) + logger_fetcher.warning('%s %s [Warning] %s', req.method, req.url, e) finally: if 'req' not in locals().keys(): - tmp = {'env':obj['env'],'rule':obj['rule']} + tmp = {'env': obj['env'], 'rule': obj['rule']} tmp['request'] = {'method': 'GET', 'url': 'api://util/unicode?content=', 'headers': [], 'cookies': []} req, rule, env = self.build_request(tmp) - e.response = httpclient.HTTPResponse(request=req,code=e.code,reason=e.message,buffer=BytesIO(str(e).encode())) + e.response = httpclient.HTTPResponse(request=req, code=e.code, reason=e.message, buffer=BytesIO(str(e).encode())) if not e.response: if config.traceback_print: traceback.print_exc() - e.response = httpclient.HTTPResponse(request=req,code=e.code,reason=e.message,buffer=BytesIO(str(e).encode())) - return rule, env, e.response + e.response = httpclient.HTTPResponse(request=req, code=e.code, reason=e.message, buffer=BytesIO(str(e).encode())) + return rule, env, e.response # TODO # pylint: disable=return-in-finally,lost-exception return rule, env, response - async def fetch(self, obj, proxy={}, CURL_ENCODING=config.curl_encoding, CURL_CONTENT_LENGTH=config.curl_length, EMPTY_RETRY = config.empty_retry): + async def fetch(self, obj, proxy=None, curl_encoding=config.curl_encoding, curl_content_length=config.curl_length, empty_retry=config.empty_retry): """ obj = { request: { @@ -535,8 +538,10 @@ async def fetch(self, obj, proxy={}, CURL_ENCODING=config.curl_encoding, CURL_CO } } """ + if proxy is None: + proxy = {} - rule, env, response = await self.build_response(obj, proxy, CURL_ENCODING, CURL_CONTENT_LENGTH, EMPTY_RETRY) + rule, env, response = await self.build_response(obj, proxy, curl_encoding, curl_content_length, empty_retry) env['session'].extract_cookies_to_jar(response.request, response) success, msg = self.run_rule(response, rule, env) @@ -546,13 +551,13 @@ async def fetch(self, obj, proxy={}, CURL_ENCODING=config.curl_encoding, CURL_CO 'response': response, 'env': env, 'msg': msg, - } + } - FOR_START = re.compile('{%\s*for\s+(\w+)\s+in\s+(\w+|list\([\s\S]*\)|range\([\s\S]*\))\s*%}') - IF_START = re.compile('{%\s*if\s+(.+)\s*%}') - WHILE_START = re.compile('{%\s*while\s+(.+)\s*%}') - ELSE_START = re.compile('{%\s*else\s*%}') - PARSE_END = re.compile('{%\s*end(for|if|while)\s*%}') + FOR_START = re.compile(r'{%\s*for\s+(\w+)\s+in\s+(\w+|list\([\s\S]*\)|range\([\s\S]*\))\s*%}') + IF_START = re.compile(r'{%\s*if\s+(.+)\s*%}') + WHILE_START = re.compile(r'{%\s*while\s+(.+)\s*%}') + ELSE_START = re.compile(r'{%\s*else\s*%}') + PARSE_END = re.compile(r'{%\s*end(for|if|while)\s*%}') def parse(self, tpl): stmt_stack = [] @@ -600,7 +605,7 @@ def __append(entry): entry_type = stmt_stack and stmt_stack[-1]['type'] if entry_type == 'for' or entry_type == 'if' or entry_type == 'while': if m.group(1) != entry_type: - raise Exception('Failed at %d/%d end tag \\r\\nError: End tag should be "end%s", but "end%s"' % (i+1, len(tpl), stmt_stack[-1]['type'], m.group(1))) + raise Exception(f"Failed at {i+1}/{len(tpl)} end tag \\r\\nError: End tag should be \"end{stmt_stack[-1]['type']}\", but \"end{m.group(1)}\"") entry = stmt_stack.pop() if stmt_stack: __append(entry) @@ -620,7 +625,7 @@ def __append(entry): while stmt_stack: yield stmt_stack.pop() - async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config.task_request_limit, tpl_length=0): + async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config.task_request_limit, tpl_length=0) -> Tuple[Dict, int]: """ do a fetch of hole tpl """ @@ -632,7 +637,7 @@ async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config. if tpl_length == 0 and len(tpl) > 0: tpl_length = len(tpl) for i, entry in enumerate(tpl): - entry['idx'] = i+1 + entry['idx'] = i + 1 for i, block in enumerate(self.parse(tpl)): if request_limit <= 0: @@ -649,7 +654,7 @@ async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config. support_enum = True except Exception as e: if config.debug: - logger_Fetcher.exception(e) + logger_fetcher.exception(e) if support_enum: env['variables']['loop_length'] = str(len(_from)) env['variables']['loop_depth'] = str(int(env['variables'].get('loop_depth', '0')) + 1) @@ -685,44 +690,41 @@ async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config. env['variables']['loop_index'] = str(while_idx + 1) env['variables']['loop_index0'] = str(while_idx) try: - condition = safe_eval(block['condition'],env['variables']) + condition = safe_eval(block['condition'], env['variables']) except NameError: condition = False except ValueError as e: if len(str(e)) > 20 and str(e)[:20] == ":": condition = False else: - raise Exception('Failed at %d/%d while condition, \\r\\nError: %s, \\r\\nBlock condition: %s' % - (block['idx'], tpl_length, str(e).replace("","ValueError"), block['condition'])) + str_e = str(e).replace("", "ValueError") + raise Exception(f"Failed at {block['idx']}/{tpl_length} while condition, \\r\\nError: {str_e}, \\r\\nBlock condition: {block['condition']}") from e except Exception as e: - raise Exception('Failed at %d/%d while condition, \\r\\nError: %s, \\r\\nBlock condition: %s' % - (block['idx'], tpl_length, e, block['condition'])) + raise Exception(f"Failed at {block['idx']}/{tpl_length} while condition, \\r\\nError: {e}, \\r\\nBlock condition: {block['condition']}") from e if condition: env, request_limit = await self.do_fetch(block['body'], env, proxies=[proxy], request_limit=request_limit, tpl_length=tpl_length) else: if config.debug: - logger_Fetcher.debug('while loop break, time: %ss', time.perf_counter() - start_time) + logger_fetcher.debug('while loop break, time: %ss', time.perf_counter() - start_time) break while_idx += 1 else: - raise Exception('Failed at %d/%d while end, \\r\\nError: while loop timeout, time: %ss \\r\\nBlock condition: %s' % - (block['idx'], tpl_length, time.perf_counter() - start_time , block['condition'])) + raise Exception(f"Failed at {block['idx']}/{tpl_length} while end, \\r\\nError: while loop timeout, time: {time.perf_counter() - start_time}s \\r\\nBlock condition: {block['condition']}") env['variables']['loop_depth'] = str(int(env['variables'].get('loop_depth', '0')) - 1) env['variables']['loop_depth0'] = str(int(env['variables'].get('loop_depth0', '-1')) - 1) elif block['type'] == 'if': try: - condition = safe_eval(block['condition'],env['variables']) + condition = safe_eval(block['condition'], env['variables']) except NameError: condition = False except ValueError as e: if len(str(e)) > 20 and str(e)[:20] == ":": condition = False else: - raise Exception('Failed at %d/%d if condition, \\r\\nError: %s, \\r\\nBlock condition: %s' % - (block['idx'], tpl_length, str(e).replace("","ValueError"), block['condition'])) + str_e = str(e).replace("", "ValueError") + raise Exception(f"Failed at {block['idx']}/{tpl_length} if condition, \\r\\nError: {str_e}, \\r\\nBlock condition: {block['condition']}") from e except Exception as e: - raise Exception('Failed at %d/%d if condition, \\r\\nError: %s, \\r\\nBlock condition: %s' % - (block['idx'], tpl_length, e, block['condition'])) + raise Exception(f"Failed at {block['idx']}/{tpl_length} if condition, \\r\\nError: {e}, \\r\\nBlock condition: {block['condition']}") from e if condition: _, request_limit = await self.do_fetch(block['true'], env, proxies=[proxy], request_limit=request_limit, tpl_length=tpl_length) else: @@ -732,17 +734,15 @@ async def do_fetch(self, tpl, env, proxies=config.proxies, request_limit=config. try: request_limit -= 1 result = await self.fetch(dict( - request = entry['request'], - rule = entry['rule'], - env = env, - ), proxy=proxy) + request=entry['request'], + rule=entry['rule'], + env=env, + ), proxy=proxy) env = result['env'] except Exception as e: if config.debug: - logger_Fetcher.exception(e) - raise Exception('Failed at %d/%d request, \\r\\nError: %r, \\r\\nRequest URL: %s' % ( - entry['idx'], tpl_length, e, entry['request']['url'])) + logger_fetcher.exception(e) + raise Exception(f"Failed at {entry['idx']}/{tpl_length} request, \\r\\nError: {e}, \\r\\nRequest URL: {entry['request']['url']}") from e if not result['success']: - raise Exception('Failed at %d/%d request, \\r\\n%s, \\r\\nRequest URL: %s' % ( - entry['idx'], tpl_length, result['msg'], entry['request']['url'])) + raise Exception(f"Failed at {entry['idx']}/{tpl_length} request, \\r\\n{result['msg']}, \\r\\nRequest URL: {entry['request']['url']}") return env, request_limit diff --git a/libs/funcs.py b/libs/funcs.py index 362be6c8d21..9ab1df6602a 100644 --- a/libs/funcs.py +++ b/libs/funcs.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- encoding: utf-8 -*- # vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8: - +# pylint: disable=broad-exception-raised import datetime import json import os @@ -16,43 +16,43 @@ import config from db import DB from libs import utils +from libs.log import Log -from .log import Log - -logger_Funcs = Log('QD.Http.Funcs').getlogger() +logger_funcs = Log('QD.Http.Funcs').getlogger() -class pusher(object): +class Pusher: def __init__(self, db: DB, sql_session=None): self.db = db self.sql_session = sql_session async def judge_res(self, res: aiohttp.ClientResponse): - if (res.status == 200): + if res.status == 200: return "True" - else: - text = await res.text() - if text: - try: - text = await res.json() - except: - pass - raise Exception(text) - elif res.reason: - raise Exception('Reason: %s' % res.reason) - else: - raise Exception('status code: %d' % res.status) + + text = await res.text() + if text: + _json = {} + try: + _json = await res.json() + except Exception as e: + logger_funcs.debug(e, exc_info=config.traceback_print) + if _json: + raise Exception(_json) + raise Exception(text) + if res.reason: + raise Exception(f'Reason: {res.reason}') + raise Exception(f'status code: {res.status}') async def pusher(self, userid, pushsw, flg, title, content): sql_session = self.sql_session notice = await self.db.user.get(userid, fields=('skey', 'barkurl', 'noticeflg', 'wxpusher', 'qywx_token', 'tg_token', 'dingding_token', 'qywx_webhook', 'diypusher'), sql_session=sql_session) - if (notice['noticeflg'] & flg != 0): + if notice['noticeflg'] & flg != 0: user = await self.db.user.get(userid, fields=('id', 'email', 'email_verified', 'nickname'), sql_session=sql_session) diypusher = notice['diypusher'] - if (diypusher != ''): + if diypusher != '': diypusher = json.loads(diypusher) - self.barklink = notice['barkurl'] pusher = {} pusher["mailpushersw"] = False if ( notice['noticeflg'] & 0x80) == 0 else True @@ -73,10 +73,10 @@ async def pusher(self, userid, pushsw, flg, title, content): pusher["qywxwebhooksw"] = False if ( notice['noticeflg'] & 0x1000) == 0 else True - def nonepush(*args, **kwargs): + def nonepush(*args, **kwargs): # pylint: disable=unused-argument return - if (pushsw['pushen']): + if pushsw['pushen']: send2bark = self.send2bark if (pusher["barksw"]) else nonepush send2s = self.send2s if (pusher["schansw"]) else nonepush send2wxpusher = self.send2wxpusher if ( @@ -97,7 +97,7 @@ def nonepush(*args, **kwargs): send2s(notice['skey'], title, content), send2wxpusher( - notice['wxpusher'], title+u" "+content), + notice['wxpusher'], title + " " + content), sendmail( user['email'], title, content, sql_session=sql_session), cus_pusher_send( @@ -116,8 +116,8 @@ async def send2bark(self, barklink, title, content): r = 'False' try: link = barklink - if (link[-1] != '/'): - link = link+'/' + if link[-1] != '/': + link = link + '/' content = content.replace('\\r\\n', '\n') d = {"title": title, "body": content} async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: @@ -126,16 +126,15 @@ async def send2bark(self, barklink, title, content): except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to Bark error: %s', e) + logger_funcs.error('Sent to Bark error: %s', e, exc_info=config.traceback_print) return e return r - async def send2s(self, skey, title, content): + async def send2s(self, skey: str, title: str, content: str): r = 'False' - if (skey != ""): + if skey != "": try: - link = u"https://sctapi.ftqq.com/{0}.send".format( - skey.replace(".send", "")) + link = f"https://sctapi.ftqq.com/{skey.replace('.send', '')}.send" content = content.replace('\\r\\n', '\n\n') d = {'text': title, 'desp': content} async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: @@ -144,40 +143,38 @@ async def send2s(self, skey, title, content): except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to ServerChan error: %s', e) + logger_funcs.error('Sent to ServerChan error: %s', e, exc_info=config.traceback_print) return e return r async def send2tg(self, tg_token, title, content): r = 'False' tmp = tg_token.split(';') - tgToken = '' - tgUserId = '' + tg_token = '' + tg_user_id = '' if len(tmp) >= 2: - tgToken = tmp[0] - tgUserId = tmp[1] - tgHost = tmp[2] if len(tmp) >= 3 else '' + tg_token = tmp[0] + tg_user_id = tmp[1] + tg_host = tmp[2] if len(tmp) >= 3 else '' proxy = tmp[3] if len(tmp) >= 4 else '' pic = tmp[4] if len(tmp) >= 5 else '' - if tgToken and tgUserId: + if tg_token and tg_user_id: try: - token = tgToken - chat_id = tgUserId + token = tg_token + chat_id = tg_user_id # TG_BOT的token # token = os.environ.get('TG_TOKEN') # 用户的ID # chat_id = os.environ.get('TG_USERID') - if not tgHost: - link = u'https://api.telegram.org/bot{0}/sendMessage'.format( - token) + if not tg_host: + link = f'https://api.telegram.org/bot{token}/sendMessage' else: - if tgHost[-1] != '/': - tgHost = tgHost + '/' - if 'http://' in tgHost or 'https://' in tgHost: - link = u'{0}bot{1}/sendMessage'.format(tgHost, token) + if tg_host[-1] != '/': + tg_host = tg_host + '/' + if 'http://' in tg_host or 'https://' in tg_host: + link = f'{tg_host}bot{token}/sendMessage' else: - link = u'https://{0}bot{1}/sendMessage'.format( - tgHost, token) + link = f'https://{tg_host}bot{token}/sendMessage' picurl = config.push_pic if pic == '' else pic # 匹配标题"QD[定时]任务 {0}-{1} 成功|失败" 的 {0} 部分, 获取 hashtag @@ -190,8 +187,8 @@ async def send2tg(self, tg_token, title, content): title = ' '.join(title_sp) content = content.replace('\\r\\n', '\n') - d = {'chat_id': str(chat_id), 'text': '' + title + '' + '\n' + content + '\n' + - '------QD提醒------', 'disable_web_page_preview': 'true', 'parse_mode': 'HTML'} + d = {'chat_id': str(chat_id), 'text': '' + title + '' + '\n' + content + '\n' + + '------QD提醒------', 'disable_web_page_preview': 'true', 'parse_mode': 'HTML'} if proxy: async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: async with session.post(link, json=d, verify_ssl=False, proxy=proxy, timeout=config.request_timeout) as res: @@ -202,7 +199,7 @@ async def send2tg(self, tg_token, title, content): r = await self.judge_res(res) except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to Telegram error: %s', e) + logger_funcs.error('Sent to Telegram error: %s', e, exc_info=config.traceback_print) return e return r @@ -212,10 +209,9 @@ async def send2dingding(self, dingding_token, title, content): if len(tmp) >= 1: dingding_token = tmp[0] pic = tmp[1] if len(tmp) >= 2 else '' - if (dingding_token != ""): + if dingding_token != "": try: - link = u"https://oapi.dingtalk.com/robot/send?access_token={0}".format( - dingding_token) + link = f"https://oapi.dingtalk.com/robot/send?access_token={dingding_token}" picurl = config.push_pic if pic == '' else pic content = content.replace('\\r\\n', '\n\n > ') d = {"msgtype": "markdown", "markdown": { @@ -228,7 +224,7 @@ async def send2dingding(self, dingding_token, title, content): raise Exception(_json) except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to DingDing error: %s', e) + logger_funcs.error('Sent to DingDing error: %s', e, exc_info=config.traceback_print) return e return r @@ -257,7 +253,7 @@ async def send2wxpusher(self, wxpusher, content): raise Exception(_json) except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to WxPusher error: %s', e) + logger_funcs.error('Sent to WxPusher error: %s', e, exc_info=config.traceback_print) return e else: return Exception("参数不完整! ") @@ -270,60 +266,60 @@ async def cus_pusher_send(self, diypusher, t, log): log = log.replace('"', '\\"').replace('\\\\"', '\\"') curltmp = diypusher['curl'].format(log=log, t=t) - if (diypusher['headers']): + if diypusher['headers']: headerstmp = json.loads(diypusher['headers'].replace( '{log}', log).replace("{t}", t)) else: headerstmp = {} - if (diypusher['mode'] == 'POST'): - postDatatmp = diypusher['postData'].replace( + if diypusher['mode'] == 'POST': + post_data_tmp = diypusher['postData'].replace( '{log}', log).replace("{t}", t) if headerstmp: headerstmp.pop('content-type', '') headerstmp.pop('Content-Type', '') - if (diypusher['postMethod'] == 'x-www-form-urlencoded'): + if diypusher['postMethod'] == 'x-www-form-urlencoded': headerstmp['Content-Type'] = "application/x-www-form-urlencoded; charset=UTF-8" - if (postDatatmp != ''): + if post_data_tmp != '': try: - postDatatmp = json.loads(postDatatmp) - except: - if isinstance(postDatatmp, str): - postDatatmp = postDatatmp.encode('utf-8') + post_data_tmp = json.loads(post_data_tmp) + except Exception as e: + logger_funcs.debug(e, exc_info=config.traceback_print) + if isinstance(post_data_tmp, str): + post_data_tmp = post_data_tmp.encode('utf-8') async with aiohttp.ClientSession(headers=headerstmp, conn_timeout=config.connect_timeout) as session: - async with session.post(curltmp, data=postDatatmp, verify_ssl=False, timeout=config.request_timeout) as res: + async with session.post(curltmp, data=post_data_tmp, verify_ssl=False, timeout=config.request_timeout) as res: r = await self.judge_res(res) else: headerstmp['Content-Type'] = "application/json; charset=UTF-8" - if (postDatatmp != ''): - postDatatmp = json.loads(postDatatmp) + if post_data_tmp != '': + post_data_tmp = json.loads(post_data_tmp) async with aiohttp.ClientSession(headers=headerstmp, conn_timeout=config.connect_timeout) as session: - async with session.post(curltmp, json=postDatatmp, verify_ssl=False, timeout=config.request_timeout) as res: + async with session.post(curltmp, json=post_data_tmp, verify_ssl=False, timeout=config.request_timeout) as res: r = await self.judge_res(res) - elif (diypusher['mode'] == 'GET'): + elif diypusher['mode'] == 'GET': async with aiohttp.ClientSession(headers=headerstmp, conn_timeout=config.connect_timeout) as session: async with session.get(curltmp, verify_ssl=False, timeout=config.request_timeout) as res: r = await self.judge_res(res) else: - raise Exception(u'模式未选择') + raise Exception('模式未选择') except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to Cus_Pusher error: %s', e) + logger_funcs.error('Sent to Cus_Pusher error: %s', e, exc_info=config.traceback_print) return e return r # 获取Access_Token async def get_access_token(self, qywx: dict): - access_url = '{qywxProxy}cgi-bin/gettoken?corpid={id}&corpsecret={secret}'.format( - qywxProxy=qywx[u'代理'], id=qywx[u'企业ID'], secret=qywx[u'应用密钥']) + access_url = f"{qywx['代理']}cgi-bin/gettoken?corpid={qywx['企业ID']}&corpsecret={qywx['应用密钥']}" async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: async with session.get(access_url, verify_ssl=False, timeout=config.request_timeout) as res: get_access_token_res = await res.json() return get_access_token_res # 上传临时素材,返回素材id - async def get_ShortTimeMedia(self, pic_url, access_token, qywxProxy): + async def get_short_time_media(self, pic_url, access_token, qywx_proxy): async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: if pic_url == config.push_pic: with open(os.path.join(os.path.abspath(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 'web', 'static', 'img', 'push_pic.png'), 'rb') as f: @@ -331,7 +327,7 @@ async def get_ShortTimeMedia(self, pic_url, access_token, qywxProxy): else: async with session.get(pic_url, verify_ssl=False, timeout=config.request_timeout) as res: img = await res.read() - url = f'{qywxProxy}cgi-bin/media/upload?access_token={access_token}&type=image' + url = f'{qywx_proxy}cgi-bin/media/upload?access_token={access_token}&type=image' async with session.post(url, data={'image': img}, verify_ssl=False, timeout=config.request_timeout) as res: await self.judge_res(res) _json = await res.json() @@ -345,56 +341,55 @@ async def qywx_pusher_send(self, qywx_token, title: str, log: str): qywx = {} tmp = qywx_token.split(';') if len(tmp) >= 3: - qywx[u'企业ID'] = tmp[0] - qywx[u'应用ID'] = tmp[1] - qywx[u'应用密钥'] = tmp[2] - qywx[u'图片'] = tmp[3] if len(tmp) >= 4 else '' - qywx[u'代理'] = tmp[4] if len( + qywx['企业ID'] = tmp[0] + qywx['应用ID'] = tmp[1] + qywx['应用密钥'] = tmp[2] + qywx['图片'] = tmp[3] if len(tmp) >= 4 else '' + qywx['代理'] = tmp[4] if len( tmp) >= 5 else 'https://qyapi.weixin.qq.com/' else: - raise Exception(u'企业微信Pusher获取AccessToken失败或参数不完整!') + raise Exception('企业微信Pusher获取AccessToken失败或参数不完整!') - if qywx[u'代理'][-1] != '/': - qywx[u'代理'] = qywx[u'代理'] + '/' - if qywx[u'代理'][:4] != 'http': - if qywx[u'代理'] == 'qyapi.weixin.qq.com/': - qywx[u'代理'] = u'https://{0}'.format(qywx[u'代理']) + if qywx['代理'][-1] != '/': + qywx['代理'] = qywx['代理'] + '/' + if qywx['代理'][:4] != 'http': + if qywx['代理'] == 'qyapi.weixin.qq.com/': + qywx['代理'] = f"https://{qywx['代理']}" else: - qywx[u'代理'] = u'http://{0}'.format(qywx[u'代理']) + qywx['代理'] = f"http://{qywx['代理']}" get_access_token_res = await self.get_access_token(qywx) - pic_url = config.push_pic if qywx[u'图片'] == '' else qywx[u'图片'] + pic_url = config.push_pic if qywx['图片'] == '' else qywx['图片'] if (get_access_token_res.get('access_token', '') != '' and get_access_token_res['errmsg'] == 'ok'): access_token = get_access_token_res["access_token"] - if utils.urlMatchWithLimit(pic_url) or utils.domainMatch(pic_url.split('/')[0]): - media_id = await self.get_ShortTimeMedia(pic_url, access_token, qywx[u'代理']) + if utils.url_match_with_limit(pic_url) or utils.domain_match(pic_url.split('/')[0]): + media_id = await self.get_short_time_media(pic_url, access_token, qywx['代理']) else: media_id = pic_url - msgUrl = '{0}cgi-bin/message/send?access_token={1}'.format( - qywx[u'代理'], access_token) - postData = {"touser": "@all", - "toparty": "@all", - "totag": "@all", - "msgtype": "mpnews", - "agentid": qywx[u'应用ID'], - "mpnews": { - "articles": [ - { + msg_url = f"{qywx['代理']}cgi-bin/message/send?access_token={access_token}" + post_data = {"touser": "@all", + "toparty": "@all", + "totag": "@all", + "msgtype": "mpnews", + "agentid": qywx['应用ID'], + "mpnews": { + "articles": [ + { "title": title, "digest": log.replace("\\r\\n", "\n"), "content": log.replace("\\r\\n", "
"), "author": "QD框架", "content_source_url": config.domain, "thumb_media_id": media_id - } - ] - }, - "safe": 0, - "enable_id_trans": 0, - "enable_duplicate_check": 0, - "duplicate_check_interval": 1800 - } + } + ] + }, + "safe": 0, + "enable_id_trans": 0, + "enable_duplicate_check": 0, + "duplicate_check_interval": 1800 + } async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: - async with session.post(msgUrl, json=postData, verify_ssl=False, timeout=config.request_timeout) as res: + async with session.post(msg_url, json=post_data, verify_ssl=False, timeout=config.request_timeout) as res: r = await self.judge_res(res) _json = await res.json() if _json.get('errmsg', '') == 'ok' and _json.get('errcode', 0) == 0: @@ -406,7 +401,7 @@ async def qywx_pusher_send(self, qywx_token, title: str, log: str): except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to QYWX Pusher error: %s', e) + logger_funcs.error('Sent to QYWX Pusher error: %s', e, exc_info=config.traceback_print) return e return r @@ -416,21 +411,20 @@ async def qywx_webhook_send(self, qywx_webhook, title: str, log: str): qywx = {} tmp = qywx_webhook.split(';') if len(tmp) >= 1: - qywx[u'Webhook'] = tmp[0] + qywx['Webhook'] = tmp[0] else: - raise Exception(u'企业微信WebHook获取AccessToken失败或参数不完整!') + raise Exception('企业微信WebHook获取AccessToken失败或参数不完整!') log = log.replace("\\r\\n", "\n") - msgUrl = "https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={0}".format( - qywx[u'Webhook']) - postData = {"msgtype": "text", - "text": { - "content": f"{title}\n{log}" - } - } + msg_url = f"https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={qywx['Webhook']}" + post_data = {"msgtype": "text", + "text": { + "content": f"{title}\n{log}" + } + } async with aiohttp.ClientSession(conn_timeout=config.connect_timeout) as session: - async with session.post(msgUrl, json=postData, verify_ssl=False, timeout=config.request_timeout) as res: + async with session.post(msg_url, json=post_data, verify_ssl=False, timeout=config.request_timeout) as res: r = await self.judge_res(res) _json = await res.json() if _json.get('errmsg', '') == 'ok' and _json.get('errcode', 0) == 0: @@ -439,59 +433,58 @@ async def qywx_webhook_send(self, qywx_webhook, title: str, log: str): raise Exception(_json['errmsg']) except Exception as e: r = traceback.format_exc() - logger_Funcs.error('Sent to QYWX WebHook error: %s', e) + logger_funcs.error('Sent to QYWX WebHook error: %s', e, exc_info=config.traceback_print) return e return r async def sendmail(self, email, title, content: str, sql_session=None): if not config.domain: r = '请配置框架域名 domain, 以启用邮箱推送功能!' - logger_Funcs.error('Send mail error: %s', r) + logger_funcs.error('Send mail error: %s', r) return Exception(r) user = await self.db.user.get(email=email, fields=('id', 'email', 'email_verified', 'nickname'), sql_session=sql_session) if user['email'] and user['email_verified']: try: content = content.replace('\\r\\n', '\n') await utils.send_mail(to=email, - subject=u"在网站{0} {1}".format( - config.domain, title), + subject=f"在网站{config.domain} {title}", text=content, shark=True) except Exception as e: - logger_Funcs.error('Send mail error: %r', e) + logger_funcs.error('Send mail error: %r', e, exc_info=config.traceback_print) -class cal(object): +class Cal: def __init__(self): pass - def calNextTs(self, envs): + def cal_next_ts(self, envs): r = {"r": "True"} try: - if (envs['mode'] == 'ontime'): - t = '{0} {1}'.format(envs['date'], envs['time']) - elif (envs['mode'] == 'cron'): + if envs['mode'] == 'ontime': + t = f"{envs['date']} {envs['time']}" + elif envs['mode'] == 'cron': cron = croniter.croniter( envs['cron_val'], datetime.datetime.now()) t = cron.get_next(datetime.datetime).strftime( "%Y-%m-%d %H:%M:%S") else: - raise Exception(u'参数错误') + raise Exception('参数错误') d = datetime.datetime.strptime(t, "%Y-%m-%d %H:%M:%S").timetuple() ts = int(time.mktime(d)) - if ('randsw' in envs): + if 'randsw' in envs: if (envs['sw'] and envs['randsw']): r_ts = random.randint(int(envs['tz1']), int(envs['tz2'])) ts = ts + r_ts - if ('cron_sec' in envs): + if 'cron_sec' in envs: r_ts = 0 if (envs['cron_sec'] == '') else int(envs['cron_sec']) ts = ts + r_ts r['ts'] = ts except Exception as e: r['r'] = e - logger_Funcs.error('Calculate Next Timestamp error: %s', r['r']) + logger_funcs.error('Calculate Next Timestamp error: %s', r['r'], exc_info=config.traceback_print) return r diff --git a/libs/log.py b/libs/log.py index 90b0949c74a..07b41877d0b 100644 --- a/libs/log.py +++ b/libs/log.py @@ -14,13 +14,15 @@ from config import debug -default_level = logging.DEBUG if debug else logging.INFO +DEFAULT_LEVEL = logging.DEBUG if debug else logging.INFO -class Log(object): + +class Log: ''' 封装后的logging ''' - def __init__(self , logger = None, logger_level = default_level, log_dir_path = None, channel_level = default_level): + + def __init__(self , logger=None, logger_level=DEFAULT_LEVEL, log_dir_path=None, channel_level=DEFAULT_LEVEL): ''' 指定保存日志的文件路径,日志级别,以及调用文件 将日志存入到指定的文件中 @@ -28,9 +30,9 @@ def __init__(self , logger = None, logger_level = default_level, log_dir_path = # 创建一个logger logging.basicConfig() - if logger is None or isinstance(logger,str): + if logger is None or isinstance(logger, str): self.logger = logging.getLogger(logger) - elif isinstance(logger,logging.Logger): + elif isinstance(logger, logging.Logger): self.logger = logger self.logger.setLevel(logger_level) self.logger.propagate = False @@ -54,7 +56,7 @@ def __init__(self , logger = None, logger_level = default_level, log_dir_path = if log_dir_path: self.logger.propagate = True - self.log_name = os.path.join(log_dir_path,self.log_time + '.log') + self.log_name = os.path.join(log_dir_path, self.log_time + '.log') fh = logging.FileHandler(self.log_name, 'a', encoding='utf-8') fh.setFormatter(formatter) self.logger.addHandler(fh) diff --git a/libs/mcrypto.py b/libs/mcrypto.py index 558dfab3ad3..84fbcfbd4fd 100644 --- a/libs/mcrypto.py +++ b/libs/mcrypto.py @@ -4,20 +4,28 @@ # Author: Binux # http://binux.me # Created on 2014-08-07 21:01:31 +# pylint: disable=broad-exception-raised import base64 +import random +import re +import string +import sys from binascii import a2b_hex, b2a_hex +from collections import namedtuple -import umsgpack +import umsgpack # type: ignore from Crypto import Random from Crypto.Cipher import AES -from Crypto.Hash import SHA256 from Crypto.Util.Padding import pad, unpad -from pbkdf2 import PBKDF2 +from pbkdf2 import PBKDF2 # type: ignore import config +from libs.convert import to_bytes, to_text Crypto_random = Random.new() + + def password_hash(word, salt=None, iterations=config.pbkdf2_iterations): if salt is None: salt = Crypto_random.read(16) @@ -30,6 +38,7 @@ def password_hash(word, salt=None, iterations=config.pbkdf2_iterations): return umsgpack.packb([rawhash, salt, iterations]) + def aes_encrypt(word, key=config.aes_key, iv=None, output='base64', padding=True, padding_style='pkcs7', mode=AES.MODE_CBC, no_packb=False): if iv is None: iv = Crypto_random.read(16) @@ -38,7 +47,7 @@ def aes_encrypt(word, key=config.aes_key, iv=None, output='base64', padding=True word = umsgpack.packb(word) if padding: - word = pad(word,AES.block_size,padding_style) + word = pad(word, AES.block_size, padding_style) if mode in [AES.MODE_ECB, AES.MODE_CTR]: aes = AES.new(key, mode) @@ -55,6 +64,7 @@ def aes_encrypt(word, key=config.aes_key, iv=None, output='base64', padding=True return ciphertext return umsgpack.packb([ciphertext, iv]) + def aes_decrypt(word, key=config.aes_key, iv=None, input='base64', padding=True, padding_style='pkcs7', mode=AES.MODE_CBC, no_packb=False): if iv is None and not no_packb: word, iv = umsgpack.unpackb(word) @@ -76,36 +86,29 @@ def aes_decrypt(word, key=config.aes_key, iv=None, input='base64', padding=True, while word: try: return umsgpack.unpackb(word) - except umsgpack.ExtraData: + except umsgpack.ExtraData: # pylint: disable=no-member word = word[:-1] elif padding: - return unpad(word,AES.block_size,padding_style).decode('utf-8') + return unpad(word, AES.block_size, padding_style).decode('utf-8') -import random -import re -import string -import sys -from collections import namedtuple - -from libs.convert import to_bytes, to_text - DEFAULT_PASSWORD_LENGTH = 20 -ascii_lowercase = 'abcdefghijklmnopqrstuvwxyz' -ascii_uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' -ascii_letters = ascii_lowercase + ascii_uppercase -digits = '0123456789' -DEFAULT_PASSWORD_CHARS = to_text(ascii_letters + digits + ".,:-_", errors='strict') # characters included in auto-generated passwords +ASCII_LOWERCASE = 'abcdefghijklmnopqrstuvwxyz' +ASCII_UPPERCASE = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' +ASCII_LETTERS = ASCII_LOWERCASE + ASCII_UPPERCASE +DIGITS = '0123456789' +DEFAULT_PASSWORD_CHARS = to_text(ASCII_LETTERS + DIGITS + ".,:-_", errors='strict') # characters included in auto-generated passwords PASSLIB_E = CRYPT_E = None HAS_CRYPT = PASSLIB_AVAILABLE = False try: - import passlib - import passlib.hash - from passlib.utils.handlers import HasRawSalt, PrefixWrapper + import passlib # type: ignore + import passlib.hash # type: ignore + from passlib.utils.handlers import HasRawSalt # type: ignore + from passlib.utils.handlers import PrefixWrapper try: - from passlib.utils.binary import bcrypt64 + from passlib.utils.binary import bcrypt64 # type: ignore except ImportError: - from passlib.utils import bcrypt64 + from passlib.utils import bcrypt64 # type: ignore PASSLIB_AVAILABLE = True except Exception as e: PASSLIB_E = e @@ -125,13 +128,13 @@ def random_password(length=DEFAULT_PASSWORD_LENGTH, chars=DEFAULT_PASSWORD_CHARS letters, ascii digits, and these symbols ``.,:-_`` ''' if not isinstance(chars, str): - raise Exception('%s (%s) is not a text_type' % (chars, type(chars))) + raise Exception(f'{chars} ({type(chars)}) is not a text_type') if seed is None: random_generator = random.SystemRandom() else: random_generator = random.Random(seed) - return u''.join(random_generator.choice(chars) for dummy in range(length)) + return ''.join(random_generator.choice(chars) for dummy in range(length)) def random_salt(length=8): @@ -139,7 +142,7 @@ def random_salt(length=8): """ # Note passlib salt values must be pure ascii so we can't let the user # configure this - salt_chars = string.ascii_letters + string.digits + u'./' + salt_chars = string.ascii_letters + string.digits + './' return random_password(length=length, chars=salt_chars) @@ -155,6 +158,7 @@ class BaseHash(object): def __init__(self, algorithm): self.algorithm = algorithm + class CryptHash(BaseHash): def __init__(self, algorithm): super(CryptHash, self).__init__(algorithm) @@ -166,7 +170,7 @@ def __init__(self, algorithm): raise Exception("crypt.crypt not supported on Mac OS X/Darwin, install passlib python module") if algorithm not in self.algorithms: - raise Exception("crypt.crypt does not support '%s' algorithm" % self.algorithm) + raise Exception(f"crypt.crypt does not support '{self.algorithm}' algorithm") self.algo_data = self.algorithms[algorithm] def hash(self, secret, salt=None, salt_size=None, rounds=None, ident=None): @@ -202,17 +206,17 @@ def _ident(self, ident): def _hash(self, secret, salt, rounds, ident): saltstring = "" if ident: - saltstring = "$%s" % ident + saltstring = f"${ident}" if rounds: - saltstring += "$rounds=%d" % rounds + saltstring += f"$rounds={rounds}" - saltstring += "$%s" % salt + saltstring += f"${salt}" # crypt.crypt on Python < 3.9 returns None if it cannot parse saltstring # On Python >= 3.9, it throws OSError. try: - result = Crypto.crypt(secret, saltstring) + result = Crypto.crypt(secret, saltstring) # pylint: disable=no-member orig_exc = None except OSError as e: result = None @@ -222,23 +226,24 @@ def _hash(self, secret, salt, rounds, ident): # as no password at all. if not result: raise Exception( - "crypt.crypt does not support '%s' algorithm" % self.algorithm, + f"crypt.crypt does not support '{self.algorithm}' algorithm" , orig_exc=orig_exc, ) return result + class PasslibHash(BaseHash): def __init__(self, algorithm): super(PasslibHash, self).__init__(algorithm) if not PASSLIB_AVAILABLE: - raise Exception("passlib must be installed and usable to hash with '%s'" % algorithm, orig_exc=PASSLIB_E) + raise Exception(f"passlib must be installed and usable to hash with '{algorithm}'" , orig_exc=PASSLIB_E) try: self.crypt_algo = getattr(passlib.hash, algorithm) - except Exception: - raise Exception("passlib does not support '%s' algorithm" % algorithm) + except Exception as e: + raise Exception(f"passlib does not support '{algorithm}' algorithm") from e def hash(self, secret, salt=None, salt_size=None, rounds=None, ident=None): salt = self._clean_salt(salt) @@ -301,13 +306,13 @@ def _hash(self, secret, salt, salt_size, rounds, ident): elif hasattr(self.crypt_algo, 'encrypt'): result = self.crypt_algo.encrypt(secret, **settings) else: - raise Exception("installed passlib version %s not supported" % passlib.__version__) + raise Exception(f"installed passlib version {passlib.__version__} not supported") # passlib.hash should always return something or raise an exception. # Still ensure that there is always a result. # Otherwise an empty password might be assumed by some modules, like the user module. if not result: - raise Exception("failed to hash with algorithm '%s'" % self.algorithm) + raise Exception(f"failed to hash with algorithm '{self.algorithm}'") # Hashes from passlib.hash should be represented as ascii strings of hex # digits so this should not traceback. If it's not representable as such @@ -315,6 +320,7 @@ def _hash(self, secret, salt, salt_size, rounds, ident): # impact calling code. return to_text(result, errors='strict') + def passlib_or_crypt(secret, algorithm, salt=None, salt_size=None, rounds=None, ident=None): if PASSLIB_AVAILABLE: return PasslibHash(algorithm).hash(secret, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident) diff --git a/libs/parse_url.py b/libs/parse_url.py index eeb9379e9e0..f240c7adb75 100644 --- a/libs/parse_url.py +++ b/libs/parse_url.py @@ -5,18 +5,84 @@ # http://www.a76yyyy.cn # Created on 2022-03-14 12:00:00 -import re +from urllib.parse import urlparse def parse_url(url): if not url: return None - result = re.match('((?P(https?|socks5h?)+)://)?((?P[^:@/]+)(:(?P[^@/]+))?@)?(?P[^:@/]+):(?P\d+)', url) - return None if not result else { - 'scheme': result.group('scheme'), - 'host': result.group('host'), - 'port': int(result.group('port')), - 'username': result.group('username'), - 'password': result.group('password'), + + result = urlparse(url) + + # 检查是否成功解析出 netloc,如果没有,则返回 None + if not result.netloc: + return None + + # href = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" # 考虑简化处理href为基本格式 + + return { + 'scheme': result.scheme, + 'host': result.hostname, + 'port': result.port or None, + 'username': result.username, + 'password': result.password or None, 'href': str(url) } + + +# if __name__ == '__main__': +# # 测试URL列表 +# test_urls = [ +# 'http://example.com/sasa', +# 'https://user:pass@example.com/sasa', +# 'socks5h://another.example.org', +# 'socks5://noauth@proxyserver', +# 'http://127.0.0.1', +# 'https://[2001:db8::1]', +# 'ftp://ftp.example.net', + +# # 包含端口号 +# 'http://example.com:80', +# 'https://user:pass@example.com:443', +# 'socks5h://another.example.org:1080', +# 'socks5://noauth@192.168.1.1:1080', +# 'http://127.0.0.1:8080', +# 'https://[2001:db8::1]:443', + +# # 无 schema +# 'example.com', +# 'user:pass@example.com', +# '127.0.0.1', +# '[2001:db8::1]', + +# # 错误或不完整的URL示例 +# 'http://:80', +# 'http://example.com:', +# 'http:///path', +# 'http://user:@example.com', +# ] + +# # import re +# for url in test_urls: +# # regex_result = re.match(r'((?P(https?|socks5h?)+)://)?((?P[^:@/]+)(:(?P[^@/]+))?@)?(?P[^:@/]+):(?P\d+)', url) + +# parse_result = urlparse(url) + +# print(f"URL: {url}") + +# # if regex_result: +# # print("Regex Matched components:") +# # for key, value in regex_result.groupdict().items(): +# # if value: +# # print(f"{key}: {value}") +# # else: +# # print(f"URL '{url}' did not match the regex pattern.\n") + +# print("Parsed with urlparse:") +# print(f"Scheme: {parse_result.scheme}") +# print(f"Host: {parse_result.hostname}") +# print(f"Port: {parse_result.port}") +# print(f"Username: {parse_result.username}") +# print(f"Password: {parse_result.password}") + +# print("\n") diff --git a/libs/safe_eval.py b/libs/safe_eval.py index d86681dea7f..535638e9bd0 100644 --- a/libs/safe_eval.py +++ b/libs/safe_eval.py @@ -14,7 +14,6 @@ # - safe_eval in lp:~xrg/openobject-server/optimize-5.0 # - safe_eval in tryton http://hg.tryton.org/hgwebdir.cgi/trytond/rev/bbb5f73319ad -# import multiprocessing import ctypes import dis import functools @@ -24,11 +23,12 @@ import types from types import CodeType -from opcode import HAVE_ARGUMENT, opmap, opname +# import multiprocessing +import dateutil +from opcode import opmap, opname import config - -from .log import Log +from libs.log import Log __all__ = ['test_expr', 'safe_eval', 'const_eval'] @@ -40,10 +40,13 @@ _UNSAFE_ATTRIBUTES = ['f_builtins', 'f_globals', 'f_locals', 'gi_frame', 'gi_code', 'co_code', 'func_globals'] -def to_opcodes(opnames, _opmap=opmap): + +def to_opcodes(opnames, _opmap=opmap): # pylint: disable=dangerous-default-value for x in opnames: if x in _opmap: yield _opmap[x] + + # opcodes which absolutely positively must not be usable in safe_eval, # explicitly subtracted from all sets of valid opcodes just in case _BLACKLIST = set(to_opcodes([ @@ -60,7 +63,7 @@ def to_opcodes(opnames, _opmap=opmap): # stack manipulations 'POP_TOP', 'ROT_TWO', 'ROT_THREE', 'ROT_FOUR', 'DUP_TOP', 'DUP_TOP_TWO', 'LOAD_CONST', - 'RETURN_VALUE', # return the result of the literal/expr evaluation + 'RETURN_VALUE', # return the result of the literal/expr evaluation # literal collections 'BUILD_LIST', 'BUILD_MAP', 'BUILD_TUPLE', 'BUILD_SET', # 3.6: literal map with constant keys https://bugs.python.org/issue27140 @@ -74,7 +77,7 @@ def to_opcodes(opnames, _opmap=opmap): # operations which are both binary and inplace, same order as in doc' _operations = [ - 'POWER', 'MULTIPLY', # 'MATRIX_MULTIPLY', # matrix operator (3.5+) + 'POWER', 'MULTIPLY', # 'MATRIX_MULTIPLY', # matrix operator (3.5+) 'FLOOR_DIVIDE', 'TRUE_DIVIDE', 'MODULO', 'ADD', 'SUBTRACT', 'LSHIFT', 'RSHIFT', 'AND', 'XOR', 'OR', ] @@ -126,9 +129,9 @@ def to_opcodes(opnames, _opmap=opmap): # replacement of POP_JUMP_IF_TRUE and POP_JUMP_IF_FALSE 'POP_JUMP_FORWARD_IF_FALSE', 'POP_JUMP_FORWARD_IF_TRUE', 'POP_JUMP_BACKWARD_IF_FALSE', 'POP_JUMP_BACKWARD_IF_TRUE', - #replacement of JUMP_ABSOLUTE + # replacement of JUMP_ABSOLUTE 'JUMP_BACKWARD', - #replacement of JUMP_IF_NOT_EXC_MATCH + # replacement of JUMP_IF_NOT_EXC_MATCH 'CHECK_EXC_MATCH', # new opcodes 'RETURN_GENERATOR', @@ -164,6 +167,7 @@ def result(self): return self.queue.get() ''' + class TerminableThread(threading.Thread): """a thread that can be stopped by forcing an exception in the execution context""" @@ -191,6 +195,7 @@ def run(self): ctypes.py_object(self.exception_cls)) self.target_thread.join(self.repeat_sec) + def timeout(sec, raise_sec=1): """ timeout decorator @@ -207,14 +212,14 @@ def wrapped_func(*args, **kwargs): err_msg = f'Function {func.__name__} timed out after {sec} seconds' if sys.platform != 'win32': - ''' - 非Windows系统, 一般对signal都有全面的支持。 - 为了实现Timeout功能, 可以通过以下几步: - 1. 选用SIGALRM信号来代表Timeout事件; - 2. 将抛出超时异常的事件与SIGALRM信号的触发绑定; - 3. 设定在给定时间后触发SIGALRM信号; - 4. 运行目标函数(如超时, 会自动被信号绑定的异常事件打断)。 - ''' + # ''' + # 非Windows系统, 一般对signal都有全面的支持。 + # 为了实现Timeout功能, 可以通过以下几步: + # 1. 选用SIGALRM信号来代表Timeout事件; + # 2. 将抛出超时异常的事件与SIGALRM信号的触发绑定; + # 3. 设定在给定时间后触发SIGALRM信号; + # 4. 运行目标函数(如超时, 会自动被信号绑定的异常事件打断)。 + # ''' def _handle_timeout(signum, frame): raise TimeoutError(err_msg) @@ -228,19 +233,19 @@ def _handle_timeout(signum, frame): return result else: - ''' - Windows系统对signal的支持很差, 因此不能通过上述方法实现。 - 新的实现思路是:开启子线程来运行目标函数, 主线程计时, 超时后中止子线程。 - - 子线程不能向主线程返回执行结果, 但是可以和主线程共享内存。 - 因此, 我们创建result和exception两个mutable变量, 分别用来存储子线程的运行结果和异常。 - 在子线程结束后, 主线程可以直接通过这两个变量获取线程执行结果并顺利返回。 - - 子线程运行中所有的异常, 均要保留到子线程结束后, 在主线程中处理。 - 如果直接在子线程中抛出异常, timeout装饰器的使用者将无法通过try/except捕获并处理该异常。 - 因此, 子线程运行的函数完全被try/except包住, 通过mutable变量交由主线程处理。 - 如果出现FuncTimeoutError, 说明是超时所致, 在子线程内不做捕获。 - ''' + # ''' + # Windows系统对signal的支持很差, 因此不能通过上述方法实现。 + # 新的实现思路是:开启子线程来运行目标函数, 主线程计时, 超时后中止子线程。 + + # 子线程不能向主线程返回执行结果, 但是可以和主线程共享内存。 + # 因此, 我们创建result和exception两个mutable变量, 分别用来存储子线程的运行结果和异常。 + # 在子线程结束后, 主线程可以直接通过这两个变量获取线程执行结果并顺利返回。 + + # 子线程运行中所有的异常, 均要保留到子线程结束后, 在主线程中处理。 + # 如果直接在子线程中抛出异常, timeout装饰器的使用者将无法通过try/except捕获并处理该异常。 + # 因此, 子线程运行的函数完全被try/except包住, 通过mutable变量交由主线程处理。 + # 如果出现FuncTimeoutError, 说明是超时所致, 在子线程内不做捕获。 + # ''' class FuncTimeoutError(TimeoutError): def __init__(self): TimeoutError.__init__(self, err_msg) @@ -251,7 +256,7 @@ def run_func(): try: res = func(*args, **kwargs) except FuncTimeoutError: - _logger.debug(f'Function {func.__name__} timed out after {sec} seconds') + _logger.debug('Function %s timed out after %s seconds', func.__name__, sec) except Exception as e: exception.append(e) else: @@ -266,7 +271,7 @@ def run_func(): # a timeout thread keeps alive after join method, terminate and raise TimeoutError exc = type('TimeoutError', FuncTimeoutError.__bases__, dict(FuncTimeoutError.__dict__)) thread.terminate(exception_cls=FuncTimeoutError, repeat_sec=raise_sec) - raise TimeoutError(err_msg) + raise exc(err_msg) elif exception: # if exception occurs during the thread running, raise it raise exception[0] @@ -274,35 +279,36 @@ def run_func(): # if the thread successfully finished, return its results return result[0] - ''' - # 使用子进程方式实现超时功能 - now = time.time() - proc = RunnableProcessing(func, *args, **kwargs) - proc.start() - proc.join(sec) - if proc.is_alive(): - if force_kill: - proc.terminate() - runtime = time.time() - now - raise TimeoutException('timed out after {0} seconds'.format(runtime)) - assert proc.done() - success, result = proc.result() - if success: - return result - else: - raise result - ''' + # # 使用子进程方式实现超时功能 + # now = time.time() + # proc = RunnableProcessing(func, *args, **kwargs) + # proc.start() + # proc.join(sec) + # if proc.is_alive(): + # if force_kill: + # proc.terminate() + # runtime = time.time() - now + # raise TimeoutException('timed out after {0} seconds'.format(runtime)) + # assert proc.done() + # success, result = proc.result() + # if success: + # return result + # else: + # raise result return wrapped_func return decorator + @timeout(config.unsafe_eval_timeout) def unsafe_eval(*args, **kwargs) : - return eval(*args, **kwargs) + return eval(*args, **kwargs) # pylint: disable=eval-used + class BadCompilingInput(Exception): """ The user tried to input something which might cause compiler to slow down. """ + def check_for_pow(expr): """ Python evaluates power operator during the compile time if its on constants. You can do CPU / memory burning attack with ``2**999999999999999999999**9999999999999``. @@ -312,6 +318,7 @@ def check_for_pow(expr): if "**" in expr: raise BadCompilingInput("Power operation is not allowed") + def assert_no_dunder_name(code_obj, expr): """ assert_no_dunder_name(code_obj, expr) -> None Asserts that the code object does not refer to any "dunder name" @@ -329,7 +336,8 @@ def assert_no_dunder_name(code_obj, expr): """ for name in code_obj.co_names: if "__" in name or name in _UNSAFE_ATTRIBUTES: - raise NameError('Access to forbidden name %r (%r)' % (name, expr)) + raise NameError(f'Access to forbidden name {name!r} ({expr!r})') + def assert_valid_codeobj(allowed_codes, code_obj, expr): """ Asserts that the provided code object validates against the bytecode @@ -353,12 +361,13 @@ def assert_valid_codeobj(allowed_codes, code_obj, expr): # when loading /web according to line_profiler code_codes = {i.opcode for i in dis.get_instructions(code_obj)} if not allowed_codes >= code_codes: - raise ValueError("forbidden opcode(s) in %r: %s" % (expr, ', '.join(opname[x] for x in (code_codes - allowed_codes)))) + raise ValueError(f"forbidden opcode(s) in {expr!r}: {', '.join(opname[x] for x in (code_codes - allowed_codes))}") for const in code_obj.co_consts: if isinstance(const, CodeType): assert_valid_codeobj(allowed_codes, const, 'lambda') + def test_expr(expr, allowed_codes, mode="eval", filename=None): """test_expr(expression, allowed_codes[, mode[, filename]]) -> code_object Test that the expression contains only the allowed opcodes. @@ -377,7 +386,7 @@ def test_expr(expr, allowed_codes, mode="eval", filename=None): except (SyntaxError, TypeError, ValueError): raise except Exception as e: - raise ValueError('"%s" while compiling\n%r' % (e, expr)) + raise ValueError(f'"{e}" while compiling\n{expr!r}') from e assert_valid_codeobj(allowed_codes, code_obj, expr) return code_obj @@ -400,6 +409,7 @@ def const_eval(expr): c = test_expr(expr, _CONST_OPCODES) return unsafe_eval(c) + def expr_eval(expr): """expr_eval(expression) -> value Restricted Python expression evaluation @@ -418,6 +428,7 @@ def expr_eval(expr): c = test_expr(expr, _EXPR_OPCODES) return unsafe_eval(c) + def _import(name, globals=None, locals=None, fromlist=None, level=-1): if globals is None: globals = {} @@ -428,6 +439,8 @@ def _import(name, globals=None, locals=None, fromlist=None, level=-1): if name in _ALLOWED_MODULES: return __import__(name, globals, locals, level) raise ImportError(name) + + _BUILTINS = { '__import__': _import, 'True': True, @@ -466,6 +479,8 @@ def _import(name, globals=None, locals=None, fromlist=None, level=-1): 'zip': zip, 'Exception': Exception, } + + def safe_eval(expr, globals_dict=None, locals_dict=None, mode="eval", nocopy=False, locals_builtins=False, filename=None): """safe_eval(expression[, globals[, locals[, mode[, nocopy]]]]) -> result System-restricted Python expression evaluation @@ -516,7 +531,9 @@ def safe_eval(expr, globals_dict=None, locals_dict=None, mode="eval", nocopy=Fal except ZeroDivisionError: raise except Exception as e: - raise ValueError('%s: "%s"' % (type(e), e)) + raise ValueError(f'{type(e)}: "{e}"') from e + + def test_python_expr(expr, mode="eval"): try: test_expr(expr, _SAFE_OPCODES, mode=mode) @@ -529,7 +546,7 @@ def test_python_expr(expr, mode="eval"): 'offset': err.args[1][2], 'error_line': err.args[1][3], } - msg = "%s : %s at line %d\n%s" % (type(err).__name__, error['message'], error['lineno'], error['error_line']) + msg = f"{type(err).__name__} : {error['message']} at line {error['lineno']}\n{error['error_line']}" else: msg = err return msg @@ -550,7 +567,8 @@ def check_values(d): """) return d -class wrap_module: + +class WrapModule: def __init__(self, module, attributes): """Helper for wrapping a package/module to expose selected attributes :param module: the actual package/module to wrap, as returned by ``import `` @@ -565,25 +583,25 @@ def __init__(self, module, attributes): for attrib in attributes: target = getattr(module, attrib) if isinstance(target, types.ModuleType): - target = wrap_module(target, attributes[attrib]) + target = WrapModule(target, attributes[attrib]) setattr(self, attrib, target) def __repr__(self): return self._repr + # dateutil submodules are lazy so need to import them for them to "exist" -import dateutil mods = ['parser', 'relativedelta', 'rrule', 'tz'] for mod in mods: - __import__('dateutil.%s' % mod) -datetime = wrap_module(__import__('datetime'), ['date', 'datetime', 'time', 'timedelta', 'timezone', 'tzinfo', 'MAXYEAR', 'MINYEAR']) -dateutil = wrap_module(dateutil, { + __import__(f'dateutil.{mod}') +datetime = WrapModule(__import__('datetime'), ['date', 'datetime', 'time', 'timedelta', 'timezone', 'tzinfo', 'MAXYEAR', 'MINYEAR']) +dateutil = WrapModule(dateutil, { # type: ignore mod: getattr(dateutil, mod).__all__ for mod in mods }) -json = wrap_module(__import__('json'), ['loads', 'dumps']) -time = wrap_module(__import__('time'), ['time', 'strptime', 'strftime', 'sleep']) -zoneinfo = wrap_module(__import__('zoneinfo'), [ +json = WrapModule(__import__('json'), ['loads', 'dumps']) +time = WrapModule(__import__('time'), ['time', 'strptime', 'strftime', 'sleep']) +zoneinfo = WrapModule(__import__('zoneinfo'), [ 'ZoneInfo', 'available_timezones', -]) \ No newline at end of file +]) diff --git a/libs/utils.py b/libs/utils.py index d9ca416ce73..b321b0994a2 100644 --- a/libs/utils.py +++ b/libs/utils.py @@ -4,6 +4,7 @@ # Author: Binux # http://binux.me # Created on 2014-08-07 22:00:27 +# pylint: disable=broad-exception-raised import base64 import datetime @@ -26,64 +27,72 @@ from urllib import parse as urllib_parse import charset_normalizer -import umsgpack +import umsgpack # type: ignore from Crypto.Cipher import AES from faker import Faker from jinja2.filters import do_float, do_int from jinja2.runtime import Undefined from jinja2.utils import generate_lorem_ipsum, url_quote from requests.utils import get_encoding_from_headers -from tornado import gen, httpclient +from tornado import httpclient import config from libs.convert import to_bytes, to_native, to_text +from libs.log import Log from libs.mcrypto import aes_decrypt, aes_encrypt, passlib_or_crypt -from .log import Log - try: - from hashlib import md5 as _md5 + from hashlib import md5 as _md5 # pylint: disable=ungrouped-imports except ImportError: # Assume we're running in FIPS mode here - _md5 = None + _md5 = None # type: ignore + +logger_util = Log('QD.Http.Util').getlogger() -logger_Util = Log('QD.Http.Util').getlogger() def ip2int(addr): try: return struct.unpack("!I", socket.inet_aton(addr))[0] - except: + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) return int(ipaddress.ip_address(addr)) -def ip2varbinary(addr:str, version:int): + +def ip2varbinary(addr: str, version: int): if version == 4: return socket.inet_aton(addr) if version == 6: - return socket.inet_pton(socket.AF_INET6,addr) + return socket.inet_pton(socket.AF_INET6, addr) + def is_lan(ip): try: return ipaddress.ip_address(ip.strip()).is_private except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) return False + def int2ip(addr): try: return socket.inet_ntoa(struct.pack("!I", addr)) - except: + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) return str(ipaddress.ip_address(addr)) -def varbinary2ip(addr:bytes or int or str): + +def varbinary2ip(addr: bytes | int | str): if isinstance(addr, int): return int2ip(addr) if isinstance(addr, str): addr = addr.encode('utf-8') if len(addr) == 4: - return socket.inet_ntop(socket.AF_INET,addr) + return socket.inet_ntop(socket.AF_INET, addr) if len(addr) == 16: - return socket.inet_ntop(socket.AF_INET6,addr) + return socket.inet_ntop(socket.AF_INET6, addr) + -def isIP(addr = None): +def is_ip(addr=None): if addr: p = re.compile(r''' ((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?) # IPv4 @@ -102,17 +111,19 @@ def isIP(addr = None): return 0 return 0 + def urlmatch(url): reobj = re.compile(r"""(?xi)\A ([a-z][a-z0-9+\-.]*://)? # Scheme ([a-z0-9\-._~%]+ # domain or IPv4 host |\[[a-z0-9\-._~%!$&'()*+,;=:]+\]) # IPv6+ host (:[0-9]+)? """ # :port - ) + ) match = reobj.search(url) return match.group() -def urlMatchWithLimit(url): + +def url_match_with_limit(url): ip_middle_octet = r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5]))" ip_last_octet = r"(?:\.(?:0|[1-9]\d?|1\d\d|2[0-4]\d|25[0-5]))" @@ -206,7 +217,8 @@ def urlMatchWithLimit(url): return match.group() return '' -def domainMatch(domain): + +def domain_match(domain): reobj = re.compile( r'^(?:[a-zA-Z0-9]' # First character of the domain r'(?:[a-zA-Z0-9-_]{0,61}[A-Za-z0-9])?\.)' # Sub domain + hostname @@ -232,16 +244,18 @@ def wrapper(*args, **kwargs): return wrapper + def method_cache(fn): @functools.wraps(fn) def wrapper(self, *args, **kwargs): + # pylint: disable=protected-access tmp = {} - for i in kwargs: - if i == 'sql_session': + for k, v in kwargs.items(): + if k == 'sql_session': continue - tmp[i] = kwargs[i] + tmp[k] = v if not hasattr(self, '_cache'): - self._cache = dict() + self._cache = {} key = umsgpack.packb((args, tmp)) if key not in self._cache: self._cache[key] = fn(self, *args, **kwargs) @@ -249,8 +263,10 @@ def wrapper(self, *args, **kwargs): return wrapper -#full_format=True 的时候是具体时间,full_format=False就是几秒钟几分钟几小时时间格式----此处为模糊时间格式模式 -def format_date(date, gmt_offset=time.timezone/60, relative=True, shorter=False, full_format=True): +# full_format=True 的时候是具体时间,full_format=False就是几秒钟几分钟几小时时间格式----此处为模糊时间格式模式 + + +def format_date(date, gmt_offset=time.timezone / 60, relative=True, shorter=False, full_format=True): """Formats the given date (which should be GMT). By default, we return a relative time (e.g., "2 minutes ago"). You @@ -272,10 +288,10 @@ def format_date(date, gmt_offset=time.timezone/60, relative=True, shorter=False, local_yesterday = local_now - datetime.timedelta(hours=24) local_tomorrow = local_now + datetime.timedelta(hours=24) if date > now: - later = u"后" + later = "后" date, now = now, date else: - later = u"前" + later = "前" difference = now - date seconds = difference.seconds days = difference.days @@ -284,25 +300,25 @@ def format_date(date, gmt_offset=time.timezone/60, relative=True, shorter=False, if not full_format: if relative and days == 0: if seconds < 50: - return u"%(seconds)d 秒" % {"seconds": seconds} + later + return f"{seconds} 秒" + later if seconds < 50 * 60: minutes = round(seconds / 60.0) - return u"%(minutes)d 分钟" % {"minutes": minutes} + later + return f"{minutes} 分钟" + later hours = round(seconds / (60.0 * 60)) - return u"%(hours)d 小时" % {"hours": hours} + later + return f"{hours} 小时" + later if days == 0: format = "%(time)s" elif days == 1 and local_date.day == local_yesterday.day and \ - relative and later == u'前': - format = u"昨天" if shorter else u"昨天 %(time)s" + relative and later == '前': + format = "昨天" if shorter else "昨天 %(time)s" elif days == 1 and local_date.day == local_tomorrow.day and \ - relative and later == u'后': - format = u"明天" if shorter else u"明天 %(time)s" - #elif days < 5: - #format = "%(weekday)s" if shorter else "%(weekday)s %(time)s" + relative and later == '后': + format = "明天" if shorter else "明天 %(time)s" + # elif days < 5: + # format = "%(weekday)s" if shorter else "%(weekday)s %(time)s" elif days < 334: # 11mo, since confusing for same month last year format = "%(month_name)s-%(day)s" if shorter else \ "%(month_name)s-%(day)s %(time)s" @@ -311,7 +327,7 @@ def format_date(date, gmt_offset=time.timezone/60, relative=True, shorter=False, format = "%(year)s-%(month_name)s-%(day)s" if shorter else \ "%(year)s-%(month_name)s-%(day)s %(time)s" - str_time = "%d:%02d:%02d" % (local_date.hour, local_date.minute, local_date.second) + str_time = f"{local_date.hour:02d}:{local_date.minute:02d}:{local_date.second:02d}" return format % { "month_name": local_date.month, @@ -321,24 +337,28 @@ def format_date(date, gmt_offset=time.timezone/60, relative=True, shorter=False, "time": str_time } + def utf8(value): if isinstance(value, str): return value.encode('utf8') return value + def conver2unicode(value, html_unescape=False): if not isinstance(value, str): try: value = value.decode() - except : + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) value = str(value) - tmp = bytes(value,'unicode_escape').decode('utf-8').replace(r'\u',r'\\u').replace(r'\\\u',r'\\u') - tmp = bytes(tmp,'utf-8').decode('unicode_escape') - tmp = tmp.encode('utf-8').replace(b'\xc2\xa0',b'\xa0').decode('unicode_escape') + tmp = bytes(value, 'unicode_escape').decode('utf-8').replace(r'\u', r'\\u').replace(r'\\\u', r'\\u') + tmp = bytes(tmp, 'utf-8').decode('unicode_escape') + tmp = tmp.encode('utf-8').replace(b'\xc2\xa0', b'\xa0').decode('unicode_escape') if html_unescape: tmp = html.unescape(tmp) return tmp + def urlencode_with_encoding( value: Union[str, Mapping[str, Any], Iterable[Tuple[str, Any]]], encoding: str = "utf-8", @@ -374,6 +394,7 @@ def urlencode_with_encoding( f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items ) + def to_bool(value): ''' return a bool for the arg ''' if value is None or isinstance(value, bool): @@ -384,7 +405,8 @@ def to_bool(value): return True return False -async def send_mail(to, subject, text=None, html=None, shark=False, _from=u"QD提醒 ".format(config.domain)): + +async def send_mail(to, subject, text=None, html=None, shark=False, _from=f"QD提醒 "): if not config.mailgun_key: subtype = 'html' if html else 'plain' await _send_mail(to, subject, html or text or '', subtype) @@ -411,7 +433,7 @@ async def send_mail(to, subject, text=None, html=None, shark=False, _from=u"QD req = httpclient.HTTPRequest( method="POST", - url="https://api.mailgun.net/v3/%s/messages" % config.mailgun_domain, + url=f"https://api.mailgun.net/v3/{config.mailgun_domain}/messages", auth_username="api", auth_password=config.mailgun_key, body=urllib_parse.urlencode(body) @@ -419,18 +441,19 @@ async def send_mail(to, subject, text=None, html=None, shark=False, _from=u"QD res = await client.fetch(req) return res + async def _send_mail(to, subject, text=None, subtype='html'): if not config.mail_smtp: - logger_Util.info('no smtp') + logger_util.info('no smtp') return msg = MIMEText(text, _subtype=subtype, _charset='utf-8') msg['Subject'] = subject msg['From'] = config.mail_from msg['To'] = to try: - logger_Util.info('send mail to {}'.format(to)) + logger_util.info('send mail to %s', to) if config.mail_port: - if config.mail_ssl or config.mail_port in [465,587]: + if config.mail_ssl or config.mail_port in [465, 587]: s = smtplib.SMTP_SSL(config.mail_smtp, config.mail_port) else: s = smtplib.SMTP(config.mail_smtp, config.mail_port) @@ -447,7 +470,7 @@ async def _send_mail(to, subject, text=None, subtype='html'): s.sendmail(config.mail_from, to, msg.as_string()) s.close() except Exception as e: - logger_Util.error('send mail error {}'.format(str(e))) + logger_util.error('send mail error: %s', e, exc_info=config.traceback_print) return @@ -490,8 +513,9 @@ def find_encoding(content, headers=None): try: encoding = get_encodings_from_content(content) encoding = encoding and encoding[0] or None - except: - if isinstance(content,bytes): + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) + if isinstance(content, bytes): return encoding or 'utf-8' if encoding and encoding.lower() == 'gb2312': @@ -508,7 +532,7 @@ def decode(content, headers=None): try: return content.decode(encoding, 'replace') except Exception as e: - logger_Util.error('utils.decode:',e) + logger_util.error('utils.decode: %s', e, exc_info=config.traceback_print) return None @@ -531,24 +555,29 @@ def secure_hash_s(value, hash_func=sha1): digest.update(value) return digest.hexdigest() + def md5string(value): - if not _md5: - raise ValueError('MD5 not available. Possibly running in FIPS mode') + if _md5 is None: + raise ValueError('MD5 not available. Possibly running in FIPS mode') return secure_hash_s(value, _md5) def get_random(min_num, max_num, unit): random_num = random.uniform(min_num, max_num) - result = "%.{0}f".format(int(unit)) % random_num + # result = "%.{0}f".format(int(unit)) % random_num + result = f"{random_num:.{int(unit)}f}" return result + def random_fliter(*args, **kwargs): try: result = get_random(*args, **kwargs) - except: + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) result = random.choice(*args, **kwargs) return result + def randomize_list(mylist, seed=None): try: mylist = list(mylist) @@ -557,38 +586,41 @@ def randomize_list(mylist, seed=None): r.shuffle(mylist) else: random.shuffle(mylist) - except Exception: - raise + except Exception as e: + logger_util.debug(e, exc_info=config.traceback_print) + raise e return mylist def get_date_time(date=True, time=True, time_difference=0): - if isinstance(date,str): - date=int(date) - if isinstance(time,str): - time=int(time) - if isinstance(time_difference,str): + if isinstance(date, str): + date = int(date) + if isinstance(time, str): + time = int(time) + if isinstance(time_difference, str): time_difference = int(time_difference) now_date = datetime.datetime.today() + datetime.timedelta(hours=time_difference) if date: if time: - return str(now_date).split('.')[0] + return str(now_date).split('.', maxsplit=1)[0] else: return str(now_date.date()) elif time: - return str(now_date.time()).split('.')[0] + return str(now_date.time()).split('.', maxsplit=1)[0] else: return "" + def strftime(string_format, second=None): ''' return a date string using string. See https://docs.python.org/3/library/time.html#time.strftime for format ''' if second is not None: try: second = float(second) - except Exception: - raise Exception('Invalid value for epoch value (%s)' % second) + except Exception as e: + raise Exception(f'Invalid value for epoch value ({second})') from e return time.strftime(string_format, time.localtime(second)) + def regex_replace(value='', pattern='', replacement='', count=0, ignorecase=False, multiline=False): ''' Perform a `re.sub` returning a string ''' @@ -602,6 +634,7 @@ def regex_replace(value='', pattern='', replacement='', count=0, ignorecase=Fals _re = re.compile(pattern, flags=flags) return _re.sub(replacement, value, count) + def regex_findall(value, pattern, ignorecase=False, multiline=False): ''' Perform re.findall and return the list of matches ''' @@ -614,6 +647,7 @@ def regex_findall(value, pattern, ignorecase=False, multiline=False): flags |= re.M return str(re.findall(pattern, value, flags)) + def regex_search(value, pattern, *args, **kwargs): ''' Perform re.search and return the list of matches or a backref ''' @@ -646,6 +680,7 @@ def regex_search(value, pattern, *args, **kwargs): items.append(match.group(item)) return str(items) + def ternary(value, true_val, false_val, none_val=None): ''' value ? true_val : false_val ''' if (value is None or isinstance(value, Undefined)) and none_val is not None: @@ -655,12 +690,13 @@ def ternary(value, true_val, false_val, none_val=None): else: return false_val + def regex_escape(value, re_type='python'): value = to_text(value, errors='surrogate_or_strict', nonstring='simplerepr') - '''Escape all regular expressions special characters from STRING.''' + # '''Escape all regular expressions special characters from STRING.''' if re_type == 'python': return re.escape(value) - elif re_type == 'posix_basic': + if re_type == 'posix_basic': # list of BRE special chars: # https://en.wikibooks.org/wiki/Regular_Expressions/POSIX_Basic_Regular_Expressions return regex_replace(value, r'([].[^$*\\])', r'\\\1') @@ -669,15 +705,16 @@ def regex_escape(value, re_type='python'): # but different from PCRE. It's possible that re.escape would work here. # https://remram44.github.io/regex-cheatsheet/regex.html#programs elif re_type == 'posix_extended': - raise Exception('Regex type (%s) not yet implemented' % re_type) + raise Exception(f'Regex type ({re_type}) not yet implemented') else: - raise Exception('Invalid regex type (%s)' % re_type) + raise Exception(f'Invalid regex type ({re_type})') + def timestamp(type='int'): - if type=='float': + if type == 'float': return time.time() - else: - return int(time.time()) + return int(time.time()) + def add(*args): result = 0 @@ -688,9 +725,9 @@ def add(*args): result += float(i) else: return - return '{:f}'.format(result) - else: - return result + return f"{result:f}" + return result + def sub(*args): result = 0 @@ -701,9 +738,9 @@ def sub(*args): result -= float(i) else: return - return '{:f}'.format(result) - else: - return result + return f"{result:f}" + return result + def multiply(*args): result = 0 @@ -714,9 +751,9 @@ def multiply(*args): result *= float(i) else: return - return '{:f}'.format(result) - else: - return result + return f"{result:f}" + return result + def divide(*args): result = 0 @@ -727,28 +764,30 @@ def divide(*args): result /= float(i) else: return - return '{:f}'.format(result) - else: - return result + return f"{result:f}" + return result -def is_num(value:str=''): + +def is_num(value: str = ''): value = str(value) - if value.count('.') ==1: + if value.count('.') == 1: tmp = value.split('.') return tmp[0].lstrip('-').isdigit() and tmp[1].isdigit() else: return value.lstrip('-').isdigit() + def get_hash(value, hashtype='sha1'): try: h = hashlib.new(hashtype) except Exception as e: # hash is not supported? - raise Exception(e) + raise e h.update(to_bytes(value, errors='surrogate_or_strict')) return h.hexdigest() + def get_encrypted_password(password, hashtype='sha512', salt=None, salt_size=None, rounds=None, ident=None): passlib_mapping = { 'md5': 'md5_crypt', @@ -760,37 +799,43 @@ def get_encrypted_password(password, hashtype='sha512', salt=None, salt_size=Non hashtype = passlib_mapping.get(hashtype, hashtype) return passlib_or_crypt(password, hashtype, salt=salt, salt_size=salt_size, rounds=rounds, ident=ident) + def to_uuid(value, namespace=uuid.NAMESPACE_URL): uuid_namespace = namespace if not isinstance(uuid_namespace, uuid.UUID): try: uuid_namespace = uuid.UUID(namespace) except (AttributeError, ValueError) as e: - raise Exception("Invalid value '%s' for 'namespace': %s" % (to_native(namespace), to_native(e))) + raise Exception(f"Invalid value '{to_native(namespace)}' for 'namespace': {to_native(e)}") from e # uuid.uuid5() requires bytes on Python 2 and bytes or text or Python 3 return to_text(uuid.uuid5(uuid_namespace, to_native(value, errors='surrogate_or_strict'))) + def mandatory(value, msg=None): ''' Make a variable mandatory ''' if isinstance(value, Undefined): + # pylint: disable=protected-access if value._undefined_name is not None: - name = "'%s' " % to_text(value._undefined_name) + name = f"'{to_text(value._undefined_name)}' " else: name = '' if msg is not None: raise Exception(to_native(msg)) else: - raise Exception("Mandatory variable %s not defined." % name) + raise Exception(f"Mandatory variable {name} not defined.") return value + def b64encode(value, encoding='utf-8'): return to_text(base64.b64encode(to_bytes(value, encoding=encoding, errors='surrogate_or_strict'))) + def b64decode(value, encoding='utf-8'): return to_text(base64.b64decode(to_bytes(value, errors='surrogate_or_strict')), encoding=encoding) + def switch_mode(mode): mode = mode.upper() if mode == 'CBC': @@ -816,19 +861,25 @@ def switch_mode(mode): elif mode == 'EAX': return AES.MODE_EAX else: - raise Exception('Invalid AES mode: %s' % mode) + raise Exception(f'Invalid AES mode: {mode}') + -def _aes_encrypt(word:str, key:str, mode='CBC', iv:str=None, output_format='base64', padding=True, padding_style='pkcs7', no_packb=True): +def _aes_encrypt(word: str, key: str, mode='CBC', iv: str | bytes | None = None, output_format='base64', padding=True, padding_style='pkcs7', no_packb=True): if key is None: raise Exception('key is required') + if isinstance(iv, str): + iv = iv.encode("utf-8") mode = switch_mode(mode) - return aes_encrypt(word.encode("utf-8"), key.encode("utf-8"), mode=mode, iv=iv.encode("utf-8"), output=output_format, padding=padding, padding_style=padding_style, no_packb=no_packb) + return aes_encrypt(word.encode("utf-8"), key.encode("utf-8"), mode=mode, iv=iv, output=output_format, padding=padding, padding_style=padding_style, no_packb=no_packb) + -def _aes_decrypt(word:str, key:str, mode='CBC', iv:str=None, input_format='base64', padding=True, padding_style='pkcs7', no_packb=True): +def _aes_decrypt(word: str, key: str, mode='CBC', iv: str | bytes | None = None, input_format='base64', padding=True, padding_style='pkcs7', no_packb=True): if key is None: raise Exception('key is required') + if isinstance(iv, str): + iv = iv.encode("utf-8") mode = switch_mode(mode) - return aes_decrypt(word.encode("utf-8"), key.encode("utf-8"), mode=mode, iv=iv.encode("utf-8"), input=input_format, padding=padding, padding_style=padding_style, no_packb=no_packb) + return aes_decrypt(word.encode("utf-8"), key.encode("utf-8"), mode=mode, iv=iv, input=input_format, padding=padding, padding_style=padding_style, no_packb=no_packb) jinja_globals = { diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000000..3499749a640 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,5 @@ +[mypy-umsgpack.*] +ignore_missing_imports = True + +[ddddocr-umsgpack.*] +ignore_missing_imports = True diff --git a/qd.py b/qd.py index 0f1107b440a..92c09f612e8 100644 --- a/qd.py +++ b/qd.py @@ -7,21 +7,38 @@ import asyncio import json +import socket import sys import config - -config.display_import_warning = False from libs.fetcher import Fetcher from libs.log import Log from run import start_server -logger_QD = Log('QD').getlogger() +config.display_import_warning = False + +logger_qd = Log('QD').getlogger() + +# 判断 端口 是否被占用 + + +def check_port(port): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.connect(('127.0.0.1', port)) + s.shutdown(2) + logger_qd.debug('Port %s is used' , port) + return False + except Exception: + logger_qd.debug('Port %s is available' , port) + return True + def usage(): - print("{} tpl.har [--key=value] [env.json]".format(sys.argv[0])) + print(f"{sys.argv[0]} tpl.har [--key=value] [env.json]") sys.exit(1) + if __name__ == '__main__': if len(sys.argv) < 3: usage() @@ -30,54 +47,41 @@ def usage(): tpl_file = sys.argv[1] try: # deepcode ignore PT: tpl_file is a file - tpl = json.load(open(tpl_file,encoding='utf-8')) + tpl = json.load(open(tpl_file, encoding='utf-8')) except Exception as e: - logger_QD.error(e) + logger_qd.error(e, exc_info=config.traceback_print) usage() # load env variables = {} env = {} - env_file = None + ENV_FILE = None for each in sys.argv[2:]: if each.startswith('--'): key, value = each.split('=', 1) key = key.lstrip('--') variables[key] = value else: - env_file = each - if env_file: + ENV_FILE = each + if ENV_FILE: try: # deepcode ignore PT: env_file is a file - env = json.load(open(env_file,encoding='utf-8')) + env = json.load(open(ENV_FILE, encoding='utf-8')) except Exception as e: - logger_QD.error(e) + logger_qd.error(e, exc_info=config.traceback_print) usage() if 'variables' not in env or not isinstance(env['variables'], dict) \ or 'session' not in env: env = { - 'variables': env, - 'session': [], - } + 'variables': env, + 'session': [], + } env['variables'].update(variables) - # 判断 端口 是否被占用 - import re - import socket - def check_port(port): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - s.connect(('127.0.0.1', port)) - s.shutdown(2) - logger_QD.debug('Port %s is used' % port) - return False - except: - logger_QD.debug('Port %s is available' % port) - return True - manual_start = check_port(config.port) - if manual_start: - logger_QD.info('QD service is not running on port %s' % config.port) - logger_QD.info('QD service will be started on port %s' % config.port) + MANUAL_START = check_port(config.port) + if MANUAL_START: + logger_qd.info('QD service is not running on port %s' , config.port) + logger_qd.info('QD service will be started on port %s' , config.port) # 创建新进程, 以执行 run 中的 main 异步函数 import multiprocessing p = multiprocessing.Process(target=start_server) @@ -90,24 +94,24 @@ def check_port(port): import time time.sleep(1) else: - logger_QD.info('QD service is running on port %s' % config.port) + logger_qd.info('QD service is running on port %s' , config.port) # do fetch ioloop = asyncio.new_event_loop() asyncio.set_event_loop(ioloop) - result:asyncio.Task = asyncio.ensure_future(Fetcher().do_fetch(tpl, env), loop=ioloop) - logger_QD.info('QD start to do fetch: %s' % tpl_file) + result: asyncio.Task = asyncio.ensure_future(Fetcher().do_fetch(tpl, env), loop=ioloop) + logger_qd.info('QD start to do fetch: %s', tpl_file) ioloop.run_until_complete(result) ioloop.stop() try: - result, _ = result.result() + env, _ = result.result() except Exception as e: - print('QD failed!', e) + logger_qd.error('QD failed: %s', e, exc_info=config.traceback_print) else: - print('QD success! Results:\n', result.get('variables', {}).get('__log__', '').replace('\\r\\n','\r\n')) + logger_qd.info('QD success! Results:\n %s', env.get('variables', {}).get('__log__', '').replace('\\r\\n', '\r\n')) - if manual_start: + if MANUAL_START: p.terminate() p.join() - logger_QD.info('QD service is ended. ') + logger_qd.info('QD service is ended. ') diff --git a/requirements.txt b/requirements.txt index 653f1b034bd..6591ab07425 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,43 +7,42 @@ -i https://mirrors.cloud.tencent.com/pypi/simple/ --extra-index-url https://pypi.org/simple -aiofiles==23.2.1 -aiohttp==3.8.6 -aiomysql==0.2.0 +aiofiles==23.2.1; python_version >= '3.7' +aiohttp==3.9.1; python_version >= '3.8' +aiomysql==0.2.0; python_version >= '3.7' aiosignal==1.3.1; python_version >= '3.7' -aiosqlite==0.19.0 -async-timeout==4.0.3; python_version >= '3.7' -attrs==23.1.0; python_version >= '3.7' -certifi==2023.7.22; python_version >= '3.6' -cffi==1.16.0; python_version >= '3.8' -charset-normalizer==3.3.2 +aiosqlite==0.19.0; python_version >= '3.7' +attrs==23.2.0; python_version >= '3.7' +certifi==2023.11.17; python_version >= '3.6' +cffi==1.16.0; platform_python_implementation != 'PyPy' +charset-normalizer==3.3.2; python_full_version >= '3.7.0' colorama==0.4.6; sys_platform == 'win32' -croniter==2.0.1 -cryptography==41.0.5 -faker==20.0.0 -frozenlist==1.4.0; python_version >= '3.8' -greenlet==3.0.1; python_version >= '3' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))) -idna==3.4; python_version >= '3.5' +croniter==2.0.1; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' +cryptography==42.0.1; python_version >= '3.7' +faker==22.5.1; python_version >= '3.8' +frozenlist==1.4.1; python_version >= '3.8' +greenlet==3.0.3; python_version >= '3' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))) +idna==3.6; python_version >= '3.5' incremental==22.10.0 -jinja2==3.1.2 -markupsafe==2.1.3; python_version >= '3.7' +jinja2==3.1.3; python_version >= '3.7' +markupsafe==2.1.4; python_version >= '3.7' multidict==6.0.4; python_version >= '3.7' passlib==1.7.4 pbkdf2==1.3 pycparser==2.21 -pycryptodome==3.19.0 +pycryptodome==3.20.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' pymysql==1.1.0; python_version >= '3.7' -pysocks==1.7.1 +pysocks==1.7.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' python-dateutil==2.8.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' pytz==2023.3.post1 -redis==5.0.1 -requests==2.31.0 +redis==5.0.1; python_version >= '3.7' +requests==2.31.0; python_version >= '3.7' six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -sqlalchemy[asyncio]==1.4.50 -tornado==6.3.3 -tzdata==2023.3 +sqlalchemy[asyncio]==1.4.51; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' +tornado==6.4; python_version >= '3.8' +tzdata==2023.4; python_version >= '2' u-msgpack-python==2.8.0 -urllib3==2.0.7; python_version >= '3.7' -yarl==1.9.2; python_version >= '3.7' +urllib3==2.1.0; python_version >= '3.8' +yarl==1.9.4; python_version >= '3.7' # ddddocr==1.4.7 # pycurl==7.45.2 diff --git a/run.py b/run.py index a9cf71f1e87..0a385025c9f 100755 --- a/run.py +++ b/run.py @@ -21,20 +21,21 @@ from web.app import Application from worker import BatchWorker, QueueWorker +if sys.getdefaultencoding() != 'utf-8': + import importlib + importlib.reload(sys) + def start_server(): - if sys.getdefaultencoding() != 'utf-8': - import importlib - importlib.reload(sys) # init logging logger = Log().getlogger() - logger_QD = Log('QD.Run').getlogger() + logger_qd = Log('QD.Run').getlogger() if config.debug: channel = logging.StreamHandler(sys.stderr) channel.setFormatter(tornado.log.LogFormatter()) channel.setLevel(logging.WARNING) - logger_QD.addHandler(channel) + logger_qd.addHandler(channel) if not config.accesslog: tornado.log.access_log.disabled = True @@ -57,12 +58,12 @@ def start_server(): converter = db_converter.DBconverter(database) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - run = asyncio.ensure_future(converter.ConvertNewType(database) , loop=loop) + run = asyncio.ensure_future(converter.convert_new_type(database) , loop=loop) loop.run_until_complete(run) - default_version = json.load(open(os.path.join(os.path.dirname(__file__), 'version.json'),'r', encoding='utf-8'))['version'] - App= Application(database, default_version) - http_server = HTTPServer(App, xheaders=True) + default_version = json.load(open(os.path.join(os.path.dirname(__file__), 'version.json'), 'r', encoding='utf-8'))['version'] + app = Application(database, default_version) + http_server = HTTPServer(app, xheaders=True) http_server.bind(port, config.bind) if config.multiprocess: http_server.start(num_processes=0) @@ -78,20 +79,20 @@ def start_server(): worker = BatchWorker(database) PeriodicCallback(worker, config.check_task_loop).start() else: - raise Exception('worker_method must be Queue or Batch, please check config!') + raise RuntimeError('worker_method must be Queue or Batch, please check config!') except Exception as e: - logger.exception('worker start error!') - raise KeyboardInterrupt() + logger.exception('worker start error: %s', e) + raise KeyboardInterrupt() from e - logger_QD.info("Http Server started on %s:%s", config.bind, port) + logger_qd.info("Http Server started on %s:%s", config.bind, port) io_loop.start() except KeyboardInterrupt : - logger_QD.info("Http Server is being manually interrupted... ") + logger_qd.info("Http Server is being manually interrupted... ") loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) run = asyncio.ensure_future(engine.dispose() , loop=loop) loop.run_until_complete(run) - logger_QD.info("Http Server is ended. ") + logger_qd.info("Http Server is ended. ") if __name__ == "__main__": diff --git a/web.py b/web.py index 527ca1c59bf..939f95b0de2 100644 --- a/web.py +++ b/web.py @@ -5,10 +5,12 @@ # http://binux.me # Created on 2014-07-30 12:38:34 +import logging import sys from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop +from tornado.log import LogFormatter as TornadoLogFormatter import config from db import DB @@ -17,16 +19,13 @@ if __name__ == "__main__": # init logging - logger_Web = Log('QD.Web').getlogger() + logger_web = Log('QD.Web').getlogger() if not config.debug: - import logging - - import tornado.log channel = logging.StreamHandler(sys.stderr) - channel.setFormatter(tornado.log.LogFormatter()) + channel.setFormatter(TornadoLogFormatter()) channel.setLevel(logging.WARNING) - logger_Web.addHandler(channel) + logger_web.addHandler(channel) if len(sys.argv) > 2 and sys.argv[1] == '-p' and sys.argv[2].isdigit(): port = int(sys.argv[2]) @@ -37,5 +36,5 @@ http_server.bind(port, config.bind) http_server.start() - logger_Web.info("http server started on %s:%s", config.bind, port) + logger_web.info("http server started on %s:%s", config.bind, port) IOLoop.instance().start() diff --git a/web/__init__.py b/web/__init__.py index b9b067998f9..1f1f7ece3d7 100644 --- a/web/__init__.py +++ b/web/__init__.py @@ -4,8 +4,3 @@ # Author: Binux # http://binux.me # Created on 2014-07-30 12:22:47 - -import os -import sys - -sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(__file__)))) diff --git a/web/app.py b/web/app.py index 30c4212048e..dc43c52528e 100644 --- a/web/app.py +++ b/web/app.py @@ -17,31 +17,33 @@ from libs.log import Log from web.handlers import handlers, ui_methods, ui_modules -logger_Web = Log('QD.Web').getlogger() +logger_web = Log('QD.Web').getlogger() + + class Application(tornado.web.Application): - def __init__(self, db:DB, default_version=None): + def __init__(self, db: DB, default_version=None): settings = dict( - template_path = os.path.join(os.path.dirname(__file__), "tpl"), - static_path = os.path.join(os.path.dirname(__file__), "static"), - static_url_prefix = config.static_url_prefix, - debug = config.debug, - gzip = config.gzip, - autoreload = config.autoreload, - - cookie_secret = config.cookie_secret, - login_url = '/login', - websocket_ping_interval = config.websocket.ping_interval, - websocket_ping_timeout = config.websocket.ping_timeout, - websocket_max_message_size = config.websocket.max_message_size, - ) + template_path=os.path.join(os.path.dirname(__file__), "tpl"), + static_path=os.path.join(os.path.dirname(__file__), "static"), + static_url_prefix=config.static_url_prefix, + debug=config.debug, + gzip=config.gzip, + autoreload=config.autoreload, + + cookie_secret=config.cookie_secret, + login_url='/login', + websocket_ping_interval=config.websocket.ping_interval, + websocket_ping_timeout=config.websocket.ping_timeout, + websocket_max_message_size=config.websocket.max_message_size, + ) super(Application, self).__init__(handlers, **settings) self.jinja_env = jinja2.Environment( - loader=jinja2.FileSystemLoader(settings['template_path']), - extensions=['jinja2.ext.loopcontrols', ], - autoescape=True, - auto_reload=config.autoreload) + loader=jinja2.FileSystemLoader(settings['template_path']), + extensions=['jinja2.ext.loopcontrols', ], + autoescape=True, + auto_reload=config.autoreload) self.db = db self.version = default_version or 'Debug' @@ -53,5 +55,5 @@ def __init__(self, db:DB, default_version=None): 'format_date': utils.format_date, 'varbinary2ip': utils.varbinary2ip, 'version': self.version, - }) + }) self.jinja_env.filters.update(ui_methods) diff --git a/web/docs/guide/deployment.md b/web/docs/guide/deployment.md index ccb49d00cca..fce3e705813 100644 --- a/web/docs/guide/deployment.md +++ b/web/docs/guide/deployment.md @@ -176,6 +176,7 @@ python ./chrole.py your@email.address admin |NOT_RETRY_CODE|No|[See configuration for details](https://github.com/qd-today/qd/blob/master/config.py)...|[See configuration for details](https://github.com/qd-today/qd/blob/master/config.py)...| |EMPTY_RETRY|No|True|[See configuration for details](https://github.com/qd-today/qd/blob/master/config.py)...| |USER0ISADMIN|No|True|The first registered user is an administrator, False to close| +|NOTEPAD_LIMIT|No|20|The maximum number of notepads within the same user, the default is 20| |EXTRA_ONNX_NAME|No|""|Customize the ONNX file name in the config directory
(do not fill in the ".onnx" suffix)
Separate multiple onnx file names with "\|"| |EXTRA_CHARSETS_NAME|No|""|Custom ONNX in the config directory corresponds to the custom charsets.json file name
(do not fill in the ".json" suffix)
Multiple json file names are separated by "\|"| |WS_PING_INTERVAL|No|5|WebSocket ping interval, the default is 5 seconds| diff --git a/web/docs/zh_CN/guide/deployment.md b/web/docs/zh_CN/guide/deployment.md index 4525b57c9be..b90cec45c8d 100644 --- a/web/docs/zh_CN/guide/deployment.md +++ b/web/docs/zh_CN/guide/deployment.md @@ -176,6 +176,7 @@ CURL_CONTENT_LENGTH|否|True|是否允许Curl使用Headers中自定义Content-Le NOT_RETRY_CODE|否|[详见配置](https://github.com/qd-today/qd/blob/master/config.py)...|[详见配置](https://github.com/qd-today/qd/blob/master/config.py)... EMPTY_RETRY|否|True|[详见配置](https://github.com/qd-today/qd/blob/master/config.py)... USER0ISADMIN|否|True|第一个注册用户为管理员,False关闭 +NOTEPAD_LIMIT|否|20|单个用户拥有记事本最大数量, 默认为 20 EXTRA_ONNX_NAME|否|""|config目录下自定义ONNX文件名
(不填 ".onnx" 后缀)
多个onnx文件名用"\|"分隔 EXTRA_CHARSETS_NAME|否|""|config目录下自定义ONNX对应自定义charsets.json文件名
(不填 ".json" 后缀)
多个json文件名用"\|"分隔 WS_PING_INTERVAL|No|5|WebSocket ping间隔, 单位为秒, 默认为 5s diff --git a/web/handlers/__init__.py b/web/handlers/__init__.py index c69bbc25d3c..561cca967da 100644 --- a/web/handlers/__init__.py +++ b/web/handlers/__init__.py @@ -6,10 +6,6 @@ # Created on 2012-12-15 16:15:50 import os -import sys - -sys.path.append(os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) -from . import base handlers = [] ui_modules = {} @@ -23,10 +19,10 @@ modules.append(file[:-3]) for module in modules: - module = __import__('%s.%s' % (__package__, module), fromlist = ["handlers"]) - if hasattr(module, "handlers"): - handlers.extend(module.handlers) - if hasattr(module, "ui_modules"): - ui_modules.update(module.ui_modules) - if hasattr(module, "ui_methods"): - ui_methods.update(module.ui_methods) + module_object = __import__(f'{__package__}.{module}', fromlist=["handlers"]) + if hasattr(module_object, "handlers"): + handlers.extend(module_object.handlers) + if hasattr(module_object, "ui_modules"): + ui_modules.update(module_object.ui_modules) + if hasattr(module_object, "ui_methods"): + ui_methods.update(module_object.ui_methods) diff --git a/web/handlers/about.py b/web/handlers/about.py index 454695312f6..035de792d59 100644 --- a/web/handlers/about.py +++ b/web/handlers/about.py @@ -5,15 +5,18 @@ # http://binux.me # Created on 2014-08-08 21:06:02 -from .base import * +from tornado.web import addslash + +from web.handlers.base import BaseHandler class AboutHandler(BaseHandler): - @tornado.web.addslash + @addslash async def get(self): await self.render('about.html') return + handlers = [ - ('/about/?', AboutHandler), - ] + ('/about/?', AboutHandler), +] diff --git a/web/handlers/base.py b/web/handlers/base.py index b5f7764d260..28f5d2b8549 100644 --- a/web/handlers/base.py +++ b/web/handlers/base.py @@ -5,12 +5,13 @@ # http://binux.me # Created on 2012-12-15 16:16:38 -from typing import Optional + +from typing import Any, Awaitable, Mapping, MutableMapping, Union import jinja2 import tornado.web import tornado.websocket -import umsgpack +import umsgpack # type: ignore from tornado.web import HTTPError import config @@ -18,17 +19,19 @@ from libs import fetcher, utils from libs.log import Log -logger_Web_Handler = Log('QD.Web.Handler').getlogger() +logger_web_handler = Log('QD.Web.Handler').getlogger() __ALL__ = ['HTTPError', 'BaseHandler', 'BaseWebSocket', 'BaseUIModule', 'logger_Web_Handler', ] + class _BaseHandler(tornado.web.RequestHandler): application_export: set[str] = set(('db', )) - db:DB + db: DB + def __getattr__(self, key): if key in self.application_export: return getattr(self.application, key) - raise AttributeError('no such attr: %s' % key) + raise AttributeError(f'no such attr: {key}') def render_string(self, template_name, **kwargs): try: @@ -37,16 +40,16 @@ def render_string(self, template_name, **kwargs): raise e namespace = dict( - static_url=self.static_url, - xsrf_token=self.xsrf_token, - - handler=self, - request=self.request, - current_user=self.current_user, - locale=self.locale, - xsrf_form_html=self.xsrf_form_html, - reverse_url=self.reverse_url - ) + static_url=self.static_url, + xsrf_token=self.xsrf_token, + + handler=self, + request=self.request, + current_user=self.current_user, + locale=self.locale, + xsrf_form_html=self.xsrf_form_html, + reverse_url=self.reverse_url + ) namespace.update(kwargs) return template.render(namespace) @@ -64,7 +67,7 @@ def ip(self): @property def ip2varbinary(self): - return utils.ip2varbinary(self.request.remote_ip,utils.isIP(self.request.remote_ip)) + return utils.ip2varbinary(self.request.remote_ip, utils.is_ip(self.request.remote_ip)) def get_current_user(self): ret = self.get_secure_cookie('user', max_age_days=config.cookie_days) @@ -72,8 +75,9 @@ def get_current_user(self): return ret user = umsgpack.unpackb(ret) try: - user['isadmin'] = 'admin' in user['role'] if user['role'] else False - except: + user['isadmin'] = 'admin' in user['role'] if isinstance(user, Union[Mapping, MutableMapping]) and user.get('role') else False + except Exception as e: + logger_web_handler.debug(e, exc_info=config.traceback_print) return None return user @@ -92,6 +96,10 @@ def permission(self, obj, mode='r'): return True return False + def data_received(self, chunk: bytes) -> Awaitable[None] | None: + return super().data_received(chunk) + + class BaseHandler(_BaseHandler): application_export = set(('db', 'fetcher')) fetcher: fetcher.Fetcher @@ -115,7 +123,8 @@ def check_permission(self, obj, mode='r'): raise HTTPError(401) return obj -class BaseWebSocketHandler(_BaseHandler,tornado.websocket.WebSocketHandler): + +class BaseWebSocketHandler(_BaseHandler, tornado.websocket.WebSocketHandler): def prepare(self): if config.debug: return @@ -143,5 +152,10 @@ def check_permission(self, obj, mode='r'): def get_compression_options(self): return {} + def on_message(self, message: str | bytes) -> Awaitable[None] | None: + return super().on_message(message) + + class BaseUIModule(tornado.web.UIModule): - pass + def render(self, *args: Any, **kwargs: Any) -> str: # pylint: disable=useless-parent-delegation + return super().render(*args, **kwargs) diff --git a/web/handlers/har.py b/web/handlers/har.py index bc3e71c9ca9..7b17291e6d8 100644 --- a/web/handlers/har.py +++ b/web/handlers/har.py @@ -4,26 +4,25 @@ # Author: Binux # http://binux.me # Created on 2014-08-01 10:35:08 +# pylint: disable=broad-exception-raised -import asyncio -import functools import json import re import time from io import BytesIO -from typing import Iterable +from typing import Sequence -import umsgpack -from jinja2 import Environment, meta -from jinja2.nodes import Const, Filter, Getattr, List, Name, Tuple -from tornado import gen, httpclient +import tornado +from jinja2 import meta +from jinja2.nodes import Filter, Name +from tornado import httpclient +import config from libs import json_typing, utils from libs.fetcher import Fetcher from libs.parse_url import parse_url from libs.safe_eval import safe_eval - -from .base import * +from web.handlers.base import BaseHandler, logger_web_handler class HAREditor(BaseHandler): @@ -33,14 +32,14 @@ async def get(self, id=None): reponame = self.get_argument("reponame", tplurl[1]) if (reponame != '') and (harname != ''): - tpl = await self.db.pubtpl.list(filename = harname, - reponame = reponame, - fields=('id', 'name', 'content', 'comments')) - if (len(tpl) > 0): + tpl = await self.db.pubtpl.list(filename=harname, + reponame=reponame, + fields=('id', 'name', 'content', 'comments')) + if len(tpl) > 0: hardata = tpl[0]['content'] harnote = tpl[0]['comments'] else: - await self.render('tpl_run_failed.html', log=u'此模板不存在') + await self.render('tpl_run_failed.html', log='此模板不存在') return else: hardata = '' @@ -54,7 +53,7 @@ async def post(self, id): async with self.db.transaction() as sql_session: tpl = self.check_permission( - await self.db.tpl.get(id, fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note', 'interval', 'har', 'variables', 'lock', 'init_env'), sql_session=sql_session)) + await self.db.tpl.get(id, fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note', 'interval', 'har', 'variables', 'lock', 'init_env'), sql_session=sql_session)) tpl['har'] = await self.db.user.decrypt(tpl['userid'], tpl['har'], sql_session=sql_session) tpl['variables'] = json.loads(tpl['variables']) @@ -67,102 +66,131 @@ async def post(self, id): task_envs = await self.db.user.decrypt(user['id'], task['init_env'], sql_session=sql_session) envs.update(task_envs) - #await self.db.tpl.mod(id, atime=time.time(), sql_session=sql_session) + # await self.db.tpl.mod(id, atime=time.time(), sql_session=sql_session) await self.finish(dict( - filename = tpl['sitename'] or '未命名模板', - har = tpl['har'], - env = dict((x, envs[x] if x in envs else '') for x in tpl['variables']), - setting = dict( - sitename = tpl['sitename'], - siteurl = tpl['siteurl'], - note = tpl['note'], - banner = tpl['banner'], - interval = tpl['interval'] or '', - ), - readonly = not tpl['userid'] or not self.permission(tpl, 'w') or tpl['lock'], - )) + filename=tpl['sitename'] or '未命名模板', + har=tpl['har'], + env=dict((x, envs[x] if x in envs else '') for x in tpl['variables']), + setting=dict( + sitename=tpl['sitename'], + siteurl=tpl['siteurl'], + note=tpl['note'], + banner=tpl['banner'], + interval=tpl['interval'] or '', + ), + readonly=not tpl['userid'] or not self.permission(tpl, 'w') or tpl['lock'], + )) + class HARTest(BaseHandler): - async def post(self): + async def post(self) -> None: self.evil(+1) try: if 'json' in self.request.headers['Content-Type']: self.request.body = self.request.body.replace(b'\xc2\xa0', b' ') except Exception as e: - logger_Web_Handler.debug('HARTest Replace error: %s' % e) + logger_web_handler.debug('HARTest Replace error: %s', e) data: json_typing.HARTest = json.loads(self.request.body) - FOR_START = re.compile('{%\s*for\s+(\w+)\s+in\s+(\w+|list\([\s\S]*\)|range\([\s\S]*\))\s*%}').match(data['request']['url']) - WHILE_START = re.compile('{%\s*while\s+([\s\S]*)\s*%}').match(data['request']['url']) - IF_START = re.compile('{%\s*if\s+(.+)\s*%}').match(data['request']['url']) - ELSE_START = re.compile('{%\s*else\s*%}').match(data['request']['url']) - PARSE_END = re.compile('{%\s*end(for|if)\s*%}').match(data['request']['url']) + FOR_START = re.compile(r'{%\s*for\s+(\w+)\s+in\s+(\w+|list\([\s\S]*\)|range\([\s\S]*\))\s*%}').match(data['request']['url']) # pylint: disable=invalid-name + WHILE_START = re.compile(r'{%\s*while\s+([\s\S]*)\s*%}').match(data['request']['url']) # pylint: disable=invalid-name + IF_START = re.compile(r'{%\s*if\s+(.+)\s*%}').match(data['request']['url']) # pylint: disable=invalid-name + ELSE_START = re.compile(r'{%\s*else\s*%}').match(data['request']['url']) # pylint: disable=invalid-name + PARSE_END = re.compile(r'{%\s*end(for|if)\s*%}').match(data['request']['url']) # pylint: disable=invalid-name if FOR_START or WHILE_START or IF_START or ELSE_START or PARSE_END: - tmp = {'env':data['env'],'rule':data['rule']} - tmp['request'] = {'method': 'GET', 'url': 'api://util/unicode?content=', 'headers': [], 'cookies': []} + tmp = {'env': data['env'], 'rule': data['rule']} + tmp['request'] = {'method': 'GET', 'url': 'api://util/unicode?content=', 'headers': [], 'cookies': []} # type: ignore req, rule, env = self.fetcher.build_request(tmp) if FOR_START: _target = FOR_START.group(1) _from_var = FOR_START.group(2) _from = env['variables'].get(_from_var, []) try: - if _from_var.startswith('list(') or _from_var.startswith('range('): + if _from_var.startswith(('list(', 'range(')): _from = safe_eval(_from_var, env['variables']) - if not isinstance(_from, Iterable): - raise Exception('for循环只支持可迭代类型及变量') + if not isinstance(_from, Sequence): + raise Exception('for 循环只支持可迭代类型及变量') + env['variables']['loop_index0'] = str(env['variables'].get('loop_index0', 0)) + env['variables']['loop_index'] = str(env['variables'].get('loop_index', 1)) + env['variables']['loop_first'] = str(env['variables'].get('loop_first', True)) + env['variables']['loop_last'] = str(env['variables'].get('loop_last', False)) + env['variables']['loop_length'] = str(env['variables'].get('loop_length', len(_from))) + env['variables']['loop_revindex0'] = str(env['variables'].get('loop_revindex0', len(_from) - 1)) + env['variables']['loop_revindex'] = str(env['variables'].get('loop_revindex', len(_from))) + res = f'循环内赋值变量: {_target}, 循环列表变量: {_from_var}, 循环次数: {len(_from)}, \r\n循环列表内容: {list(_from)}.' + code = 200 + except NameError as e: + logger_web_handler.debug('for 循环变量错误: %s', e, exc_info=config.traceback_print) + res = f'循环变量错误: {e}' + code = 500 + except ValueError as e: + code = 500 + if str(e).startswith(":"): + e_str = str(e).replace("", "NameError") + logger_web_handler.debug('for 循环变量错误: %s', e_str, exc_info=config.traceback_print) + res = f'循环变量错误: {e_str}' + else: + e_str = str(e).replace("", "ValueError") + logger_web_handler.debug('for 循环错误: %s', e_str, exc_info=config.traceback_print) + res = f'for 循环错误: {e_str}' except Exception as e: - raise e - env['variables']['loop_index0'] = str(env['variables'].get('loop_index0', 0)) - env['variables']['loop_index'] = str(env['variables'].get('loop_index', 1)) - env['variables']['loop_first'] = str(env['variables'].get('loop_first', True)) - env['variables']['loop_last'] = str(env['variables'].get('loop_last', False)) - env['variables']['loop_length'] = str(env['variables'].get('loop_length', len(_from))) - env['variables']['loop_revindex0'] = str(env['variables'].get('loop_revindex0', len(_from)-1)) - env['variables']['loop_revindex'] = str(env['variables'].get('loop_revindex', len(_from))) - res = '循环内赋值变量: %s, 循环列表变量: %s, 循环次数: %s, \r\n循环列表内容: %s.\r\n此页面仅用于显示循环信息, 禁止在此页面提取变量' % (_target, _from_var, len(_from), str(list(_from))) - response = httpclient.HTTPResponse(request=req,code=200,reason='OK',buffer=BytesIO(str(res).encode())) + logger_web_handler.debug('for 循环错误: %s', e, exc_info=config.traceback_print) + res = f'for 循环错误: {e}' + code = 500 + res += '\r\n此页面仅用于显示循环信息, 禁止在此页面提取变量' + response = httpclient.HTTPResponse(request=req, code=code, buffer=BytesIO(str(res).encode())) elif WHILE_START: try: env['variables']['loop_index0'] = str(env['variables'].get('loop_index0', 0)) env['variables']['loop_index'] = str(env['variables'].get('loop_index', 1)) - condition = safe_eval(WHILE_START.group(1),env['variables']) + condition = safe_eval(WHILE_START.group(1), env['variables']) condition = 'while 循环判断结果: true' if condition else 'while 循环判断结果: false' code = 200 - except NameError: + except NameError as e: + logger_web_handler.debug('while 循环判断结果: false, error: %s', e, exc_info=config.traceback_print) condition = 'while 循环判断结果: false' code = 200 except ValueError as e: if len(str(e)) > 20 and str(e)[:20] == ":": + logger_web_handler.debug('while 循环判断结果: false, error: %s', e, exc_info=config.traceback_print) condition = 'while 循环判断结果: false' code = 200 else: - condition = 'while 循环条件错误: %s\r\n条件表达式: %s' % (str(e).replace("","ValueError"), WHILE_START.group(1)) + logger_web_handler.debug('while 循环条件错误: %s, 条件表达式: %s', e, WHILE_START.group(1), exc_info=config.traceback_print) + e_str = str(e).replace("", "ValueError") + condition = f'while 循环条件错误: {e_str}\r\n条件表达式: {WHILE_START.group(1)}' code = 500 except Exception as e: - condition = 'while循环条件错误: %s\r\n条件表达式: %s' % (str(e), WHILE_START.group(1)) + logger_web_handler.debug('while 循环条件错误: %s, 条件表达式: %s', e, WHILE_START.group(1), exc_info=config.traceback_print) + condition = f'while 循环条件错误: {e}\r\n条件表达式: {WHILE_START.group(1)}' code = 500 condition += '\r\n此页面仅用于显示循环判断结果, 禁止在此页面提取变量' - response = httpclient.HTTPResponse(request=req,code=code,buffer=BytesIO(str(condition).encode())) + response = httpclient.HTTPResponse(request=req, code=code, buffer=BytesIO(str(condition).encode())) elif IF_START: try: - condition = safe_eval(IF_START.group(1),env['variables']) + condition = safe_eval(IF_START.group(1), env['variables']) condition = '判断结果: true' if condition else '判断结果: false' code = 200 - except NameError: + except NameError as e: + logger_web_handler.debug('判断结果: false, error: %s', e, exc_info=config.traceback_print) condition = False except ValueError as e: if len(str(e)) > 20 and str(e)[:20] == ":": + logger_web_handler.debug('判断结果: false, error: %s', e, exc_info=config.traceback_print) condition = False else: - condition = '判断条件错误: %s\r\n条件表达式: %s' % (str(e).replace("","ValueError"), IF_START.group(1)) + logger_web_handler.debug('判断条件错误: %s, 条件表达式: %s', e, IF_START.group(1), exc_info=config.traceback_print) + e_str = str(e).replace("", "ValueError") + condition = f'判断条件错误: {e_str}\r\n条件表达式: {IF_START.group(1)}' code = 500 except Exception as e: - condition = '判断条件错误: %s\r\n条件表达式: %s' % (str(e), IF_START.group(1)) + logger_web_handler.debug('判断条件错误: %s, 条件表达式: %s', e, IF_START.group(1), exc_info=config.traceback_print) + condition = f'判断条件错误: {e}\r\n条件表达式: {IF_START.group(1)}' code = 500 condition += '\r\n此页面仅用于显示判断结果, 禁止在此页面提取变量' - response = httpclient.HTTPResponse(request=req,code=code,buffer=BytesIO(str(condition).encode())) + response = httpclient.HTTPResponse(request=req, code=code, buffer=BytesIO(str(condition).encode())) else: - e = httpclient.HTTPError(405, "结束等控制语句不支持在单条请求中测试") - response = httpclient.HTTPResponse(request=req,code=e.code,reason=e.message,buffer=BytesIO(str(e).encode())) + exc = httpclient.HTTPError(405, "结束等控制语句不支持在单条请求中测试") # type: ignore + response = httpclient.HTTPResponse(request=req, code=exc.code, reason=exc.message, buffer=BytesIO(str(exc).encode())) env['session'].extract_cookies_to_jar(response.request, response) success, _ = self.fetcher.run_rule(response, rule, env) result = { @@ -171,8 +199,8 @@ async def post(self): 'env': { 'variables': env['variables'], 'session': env['session'].to_json(), - } } + } else: _proxy = parse_url(data['env']['variables'].get('_proxy', '')) if _proxy: @@ -188,18 +216,20 @@ async def post(self): ret = await self.fetcher.fetch(data) result = { - 'success': ret['success'], - 'har': self.fetcher.response2har(ret['response']), - 'env': { - 'variables': ret['env']['variables'], - 'session': ret['env']['session'].to_json(), - } - } + 'success': ret['success'], + 'har': self.fetcher.response2har(ret['response']), + 'env': { + 'variables': ret['env']['variables'], + 'session': ret['env']['session'].to_json(), + } + } await self.finish(result) + class HARSave(BaseHandler): env = Fetcher().jinja_env + @staticmethod def get_variables(env, tpl): variables = set() @@ -215,9 +245,10 @@ def _get(obj, key): return try: ast = env.parse(obj[key]) - except: + except Exception as e: + logger_web_handler.debug("Parse %s from env failed: %s" , obj[key], e, exc_info=config.traceback_print) return - var.update(meta.find_undeclared_variables(ast)) + var.update(meta.find_undeclared_variables(ast)) # pylint: disable=cell-var-from-loop _get(req, 'method') _get(req, 'url') @@ -242,14 +273,14 @@ async def post(self, id): try: if 'json' in self.request.headers['Content-Type']: self.request.body = self.request.body.replace(b'\xc2\xa0', b' ') - except : - logger_Web_Handler.debug('HARSave Replace error: %s' % e) + except Exception as e: + logger_web_handler.debug('HARSave Replace error: %s', e, exc_info=config.traceback_print) data = json.loads(self.request.body) async with self.db.transaction() as sql_session: har = await self.db.user.encrypt(userid, data['har'], sql_session=sql_session) tpl = await self.db.user.encrypt(userid, data['tpl'], sql_session=sql_session) - variables = list(self.get_variables(self.env,data['tpl'])) + variables = list(self.get_variables(self.env, data['tpl'])) init_env = {} try: ast = self.env.parse(data['tpl']) @@ -258,55 +289,56 @@ async def post(self, id): try: init_env[x.node.name] = x.args[0].as_const() except Exception as e: - logger_Web_Handler.debug('HARSave init_env error: %s' % e) + logger_web_handler.debug('HARSave init_env error: %s', e, exc_info=config.traceback_print) except Exception as e: - logger_Web_Handler.debug('HARSave ast error: %s' % e) + logger_web_handler.debug('HARSave ast error: %s', e, exc_info=config.traceback_print) variables = json.dumps(variables) init_env = json.dumps(init_env) - groupName = 'None' + group_name = 'None' if id: _tmp = self.check_permission(await self.db.tpl.get(id, fields=('id', 'userid', 'lock'), sql_session=sql_session), 'w') if not _tmp['userid']: self.set_status(403) - await self.finish(u'公开模板不允许编辑') + await self.finish('公开模板不允许编辑') return if _tmp['lock']: self.set_status(403) - await self.finish(u'模板已锁定') + await self.finish('模板已锁定') return await self.db.tpl.mod(id, har=har, tpl=tpl, variables=variables, init_env=init_env, sql_session=sql_session) - groupName = (await self.db.tpl.get(id, fields=('_groups',), sql_session=sql_session))['_groups'] + group_name = (await self.db.tpl.get(id, fields=('_groups',), sql_session=sql_session))['_groups'] else: try: id = await self.db.tpl.add(userid, har, tpl, variables, init_env=init_env, sql_session=sql_session) except Exception as e: if "max_allowed_packet" in str(e): - raise Exception('har大小超过MySQL max_allowed_packet 限制; \n'+str(e)) + raise Exception('har大小超过MySQL max_allowed_packet 限制; \n' + str(e)) from e if not id: raise Exception('create tpl error') setting = data.get('setting', {}) await self.db.tpl.mod(id, - tplurl = '{0}|{1}'.format(harname, reponame), - sitename=setting.get('sitename'), - siteurl=setting.get('siteurl'), - note=setting.get('note'), - interval=setting.get('interval') or None, - mtime=time.time(), - updateable=0, - _groups=groupName, - sql_session=sql_session) + tplurl=f'{harname}|{reponame}', + sitename=setting.get('sitename'), + siteurl=setting.get('siteurl'), + note=setting.get('note'), + interval=setting.get('interval') or None, + mtime=time.time(), + updateable=0, + _groups=group_name, + sql_session=sql_session) await self.finish({ 'id': id - }) + }) + handlers = [ - (r'/tpl/(\d+)/edit', HAREditor), - (r'/tpl/(\d+)/save', HARSave), + (r'/tpl/(\d+)/edit', HAREditor), + (r'/tpl/(\d+)/save', HARSave), - (r'/har/edit', HAREditor), - (r'/har/save/?(\d+)?', HARSave), + (r'/har/edit', HAREditor), + (r'/har/save/?(\d+)?', HARSave), - (r'/har/test', HARTest), - ] + (r'/har/test', HARTest), +] diff --git a/web/handlers/index.py b/web/handlers/index.py index 3d7453aeb9e..c39ebeb747e 100644 --- a/web/handlers/index.py +++ b/web/handlers/index.py @@ -7,7 +7,7 @@ import json -from .base import * +from web.handlers.base import BaseHandler class IndexHandlers(BaseHandler): @@ -35,6 +35,7 @@ async def get(self): return await self.render('index.html', tpls=tpls, tplid=tplid, tpl=tpl, variables=variables, init_env=json.loads(tpl['init_env'])) + handlers = [ - ('/', IndexHandlers), - ] + ('/', IndexHandlers), +] diff --git a/web/handlers/login.py b/web/handlers/login.py index d384f601162..8f76d346715 100644 --- a/web/handlers/login.py +++ b/web/handlers/login.py @@ -6,67 +6,65 @@ # Created on 2014-08-08 20:38:51 import base64 -import re import time import umsgpack from Crypto.Hash import MD5 from tornado import gen -from tornado.ioloop import IOLoop import config from libs import mcrypto as crypto from libs import utils - -from .base import * +from web.handlers.base import BaseHandler, logger_web_handler class ForbiddenHandler(BaseHandler): async def get(self): return await self.render('Forbidden.html') + class LoginHandler(BaseHandler): async def get(self): if (self.current_user) and (await self.db.user.get(self.current_user['id'], fields=('id',))): self.redirect('/my/') return - regFlg = False if (await self.db.site.get(1, fields=('regEn',)))['regEn'] == 0 else True + reg_flg = False if (await self.db.site.get(1, fields=('regEn',)))['regEn'] == 0 else True - return await self.render('login.html', regFlg=regFlg) + return await self.render('login.html', regFlg=reg_flg) async def post(self): email = self.get_argument('email') password = self.get_argument('password') async with self.db.transaction() as sql_session: siteconfig = await self.db.site.get(1, fields=('MustVerifyEmailEn',), sql_session=sql_session) - regFlg = False if (await self.db.site.get(1, fields=('regEn',), sql_session=sql_session))['regEn'] == 0 else True + reg_flg = False if (await self.db.site.get(1, fields=('regEn',), sql_session=sql_session))['regEn'] == 0 else True if not email or not password: - await self.render('login.html', password_error=u'请输入用户名和密码', email=email, regFlg=regFlg) + await self.render('login.html', password_error='请输入用户名和密码', email=email, regFlg=reg_flg) return user_try = await self.db.user.get(email=email, fields=('id', 'role', 'status'), sql_session=sql_session) - if (user_try): + if user_try: if (user_try['status'] != 'Enable') and (user_try['role'] != 'admin'): - await self.render('login.html', password_error=u'账号已被禁用,请联系管理员', email=email, regFlg=regFlg) + await self.render('login.html', password_error='账号已被禁用,请联系管理员', email=email, regFlg=reg_flg) return else: - await self.render('login.html', password_error=u'不存在此邮箱或密码错误', email=email, regFlg=regFlg) + await self.render('login.html', password_error='不存在此邮箱或密码错误', email=email, regFlg=reg_flg) return if await self.db.user.challenge(email, password, sql_session=sql_session): user = await self.db.user.get(email=email, fields=('id', 'email', 'nickname', 'role', 'email_verified'), sql_session=sql_session) if not user: - await self.render('login.html', password_error=u'不存在此邮箱或密码错误', email=email, regFlg=regFlg) + await self.render('login.html', password_error='不存在此邮箱或密码错误', email=email, regFlg=reg_flg) return if (siteconfig['MustVerifyEmailEn'] != 0) and (user['email_verified'] == 0): - await self.render('login.html', password_error=u'未验证邮箱,请点击注册重新验证邮箱', email=email, regFlg=regFlg) + await self.render('login.html', password_error='未验证邮箱,请点击注册重新验证邮箱', email=email, regFlg=reg_flg) return setcookie = dict( - expires_days=config.cookie_days, - httponly=True, - ) + expires_days=config.cookie_days, + httponly=True, + ) if config.cookie_secure_mode: setcookie['secure'] = True self.set_secure_cookie('user', umsgpack.packb(user), **setcookie) @@ -76,98 +74,101 @@ async def post(self): user = await self.db.user.get(email=email, fields=('id', 'password', 'password_md5'), sql_session=sql_session) hash = MD5.new() hash.update(password.encode('utf-8')) - tmp = crypto.password_hash(hash.hexdigest(),await self.db.user.decrypt(user['id'], user['password'], sql_session=sql_session)) - if (user['password_md5'] != tmp): + tmp = crypto.password_hash(hash.hexdigest(), await self.db.user.decrypt(user['id'], user['password'], sql_session=sql_session)) + if user['password_md5'] != tmp: await self.db.user.mod(user['id'], password_md5=tmp, sql_session=sql_session) else: self.evil(+5) - await self.render('login.html', password_error=u'不存在此邮箱或密码错误', email=email, regFlg=regFlg) + await self.render('login.html', password_error='不存在此邮箱或密码错误', email=email, regFlg=reg_flg) return if user: self.redirect('/my/') + class LogoutHandler(BaseHandler): def get(self): self.clear_all_cookies() self.redirect('/') + class RegisterHandler(BaseHandler): async def get(self): if self.current_user: self.redirect('/my/') return - regFlg = False if (await self.db.site.get(1, fields=('regEn',)))['regEn'] == 0 else True - return await self.render('register.html', regFlg=regFlg) + reg_flg = False if (await self.db.site.get(1, fields=('regEn',)))['regEn'] == 0 else True + return await self.render('register.html', regFlg=reg_flg) async def post(self): async with self.db.transaction() as sql_session: siteconfig = await self.db.site.get(1, fields=('regEn', 'MustVerifyEmailEn'), sql_session=sql_session) - regEn = siteconfig['regEn'] - regFlg = False if regEn == 0 else True - MustVerifyEmailEn = siteconfig['MustVerifyEmailEn'] + reg_en = siteconfig['regEn'] + reg_flg = False if reg_en == 0 else True + must_verify_email_en = siteconfig['MustVerifyEmailEn'] email = self.get_argument('email') password = self.get_argument('password') if not email: - await self.render('register.html', email_error=u'请输入邮箱', regFlg=regFlg) + await self.render('register.html', email_error='请输入邮箱', regFlg=reg_flg) return if email.count('@') != 1 or email.count('.') == 0: - await self.render('register.html', email_error=u'邮箱格式不正确', regFlg=regFlg) + await self.render('register.html', email_error='邮箱格式不正确', regFlg=reg_flg) return if len(password) < 6: - await self.render('register.html', password_error=u'密码需要大于6位', email=email, regFlg=regFlg) + await self.render('register.html', password_error='密码需要大于6位', email=email, regFlg=reg_flg) return - user = await self.db.user.get(email = email, fields=('id', 'email', 'email_verified', 'nickname', 'role'), sql_session=sql_session) + user = await self.db.user.get(email=email, fields=('id', 'email', 'email_verified', 'nickname', 'role'), sql_session=sql_session) if user is None: - if (regEn == 1): + if reg_en == 1: self.evil(+5) try: await self.db.user.add(email=email, password=password, ip=self.ip2varbinary, sql_session=sql_session) except self.db.user.DeplicateUser as e: + logger_web_handler.error("email地址 %s 已注册, error: %s", email, e, exc_info=config.traceback_print) self.evil(+3) - await self.render('register.html', email_error=u'email地址已注册', regFlg=regFlg) + await self.render('register.html', email_error='email地址已注册', regFlg=reg_flg) return user = await self.db.user.get(email=email, fields=('id', 'email', 'nickname', 'role'), sql_session=sql_session) await self.db.notepad.add(dict(userid=user['id'], notepadid=1), sql_session=sql_session) setcookie = dict( - expires_days=config.cookie_days, - httponly=True, - ) + expires_days=config.cookie_days, + httponly=True, + ) if config.cookie_secure_mode: setcookie['secure'] = True self.set_secure_cookie('user', umsgpack.packb(user), **setcookie) usertmp = await self.db.user.list(sql_session=sql_session, fields=('id', 'email', 'nickname', 'role', 'email_verified')) if len(usertmp) == 1 and config.user0isadmin: - if (usertmp[0]['email'] == email): + if usertmp[0]['email'] == email: await self.db.user.mod(usertmp[0]['id'], role='admin', sql_session=sql_session) if siteconfig['MustVerifyEmailEn'] == 1: if not config.domain: - await self.render('register.html', email_error=u'请联系 QD 框架管理员配置框架域名 domain, 以启用邮箱验证功能!', regFlg=regFlg) + await self.render('register.html', email_error='请联系 QD 框架管理员配置框架域名 domain, 以启用邮箱验证功能!', regFlg=reg_flg) else: - await self.render('register.html', email_error=u'请验证邮箱后再登陆', regFlg=regFlg) + await self.render('register.html', email_error='请验证邮箱后再登陆', regFlg=reg_flg) if config.domain: await self.send_mail(user, sql_session=sql_session) else: - logger_Web_Handler.warning('请配置框架域名 domain, 以启用邮箱验证功能!') + logger_web_handler.warning('请配置框架域名 domain, 以启用邮箱验证功能!') else: - await self.render('register.html', email_error=u'管理员关闭注册', regFlg=regFlg) + await self.render('register.html', email_error='管理员关闭注册', regFlg=reg_flg) return else: - if (MustVerifyEmailEn == 1): - if (user['email_verified'] != 1): + if must_verify_email_en == 1: + if user['email_verified'] != 1: if not config.domain: - await self.render('register.html', email_error=u'请联系 QD 框架管理员配置框架域名 domain, 以启用邮箱验证功能!', regFlg=regFlg) + await self.render('register.html', email_error='请联系 QD 框架管理员配置框架域名 domain, 以启用邮箱验证功能!', regFlg=reg_flg) return - await self.render('register.html', email_error=u'email地址未验证, 邮件已发送, 请验证邮件后登陆') + await self.render('register.html', email_error='email地址未验证, 邮件已发送, 请验证邮件后登陆') await self.send_mail(user, sql_session=sql_session) else: - await self.render('register.html', email_error=u'email地址已注册', regFlg=regFlg) + await self.render('register.html', email_error='email地址已注册', regFlg=reg_flg) else: - await self.render('register.html', email_error=u'email地址已注册', regFlg=regFlg) + await self.render('register.html', email_error='email地址已注册', regFlg=reg_flg) return if user: self.redirect('/my/') @@ -177,7 +178,7 @@ async def send_mail(self, user, sql_session=None): verified_code = await self.db.user.encrypt(user['id'], verified_code, sql_session=sql_session) verified_code = await self.db.user.encrypt(0, [user['id'], verified_code], sql_session=sql_session) verified_code = base64.b64encode(verified_code).decode() - await gen.convert_yielded(utils.send_mail(to=user['email'], subject=u"欢迎注册 QD 平台", html=u""" + await gen.convert_yielded(utils.send_mail(to=user['email'], subject="欢迎注册 QD 平台", html="""

 [QD平台]  {http}://{domain}

点击以下链接验证邮箱,当您的定时任务执行失败的时候,会自动给您发送通知邮件。

@@ -194,9 +195,10 @@ async def send_mail(self, user, sql_session=None): return + class VerifyHandler(BaseHandler): async def get(self, code): - userid=None + userid = None try: async with self.db.transaction() as sql_session: verified_code = base64.b64decode(code) @@ -209,14 +211,14 @@ async def get(self, code): assert user['email'] == email await self.db.user.mod(userid, - email_verified=True, - mtime=time.time(), - sql_session=sql_session - ) + email_verified=True, + mtime=time.time(), + sql_session=sql_session + ) await self.finish('验证成功') except Exception as e: self.evil(+5) - logger_Web_Handler.error('UserID: %s verify email failed! Reason: %s', userid or '-1', e) + logger_web_handler.error('UserID: %s verify email failed! Reason: %s', userid or '-1', e, exc_info=config.traceback_print) self.set_status(400) await self.finish('验证失败') @@ -236,7 +238,7 @@ async def get(self, code): assert time.time() - time_time < 60 * 60 except Exception as e: self.evil(+10) - logger_Web_Handler.error('%r',e) + logger_web_handler.error('%r', e, exc_info=config.traceback_print) self.set_status(400) await self.finish('Bad Request') return @@ -253,22 +255,22 @@ async def post(self, code): email = self.get_argument('email') if not email: return await self.render('password_reset_email.html', - email_error=u'请输入邮箱') + email_error='请输入邮箱') if email.count('@') != 1 or email.count('.') == 0: return await self.render('password_reset_email.html', - email_error=u'邮箱格式不正确') + email_error='邮箱格式不正确') user = await self.db.user.get(email=email, fields=('id', 'email', 'mtime', 'nickname', 'role')) await self.finish("如果用户存在,会将发送密码重置邮件到您的邮箱,请注意查收。(如果您没有收到过激活邮件,可能无法也无法收到密码重置邮件)") if user: - logger_Web_Handler.info('password reset: userid=%(id)s email=%(email)s', user) + logger_web_handler.info('password reset: userid=%(id)s email=%(email)s', user) await self.send_mail(user) return else: password = self.get_argument('password') if len(password) < 6: - return await self.render('password_reset.html', password_error=u'密码需要大于6位') + return await self.render('password_reset.html', password_error='密码需要大于6位') async with self.db.transaction() as sql_session: try: @@ -281,17 +283,17 @@ async def post(self, code): assert time.time() - time_time < 60 * 60 except Exception as e: self.evil(+10) - logger_Web_Handler.error('%r',e) + logger_web_handler.error('%r', e, exc_info=config.traceback_print) self.set_status(400) await self.finish('Bad Request') return await self.db.user.mod(userid, - password=password, - mtime=time.time(), - sql_session=sql_session - ) - return self.finish("""密码重置成功! 请点击此处返回登录页面。""".format(http='https' if config.mail_domain_https else 'http', domain=config.domain)) + password=password, + mtime=time.time(), + sql_session=sql_session + ) + return self.finish(f'密码重置成功! 请点击此处返回登录页面。') async def send_mail(self, user): verified_code = [user['mtime'], time.time()] @@ -299,7 +301,7 @@ async def send_mail(self, user): verified_code = await self.db.user.encrypt(0, [user['id'], verified_code]) verified_code = base64.b64encode(verified_code).decode() - await gen.convert_yielded(utils.send_mail(to=user['email'], subject=u"QD平台(%s) 密码重置" % (config.domain), html=u""" + await gen.convert_yielded(utils.send_mail(to=user['email'], subject=f"QD平台({config.domain}) 密码重置", html="""

 [QD平台]  {http}://{domain}

@@ -318,11 +320,12 @@ async def send_mail(self, user): return + handlers = [ - ('/login', LoginHandler), - ('/logout', LogoutHandler), - ('/register', RegisterHandler), - ('/verify/(.*)', VerifyHandler), - ('/password_reset/?(.*)', PasswordResetHandler), - ('/forbidden', ForbiddenHandler), - ] + ('/login', LoginHandler), + ('/logout', LogoutHandler), + ('/register', RegisterHandler), + ('/verify/(.*)', VerifyHandler), + ('/password_reset/?(.*)', PasswordResetHandler), + ('/forbidden', ForbiddenHandler), +] diff --git a/web/handlers/my.py b/web/handlers/my.py index 8e5d649a65a..31d7c477850 100644 --- a/web/handlers/my.py +++ b/web/handlers/my.py @@ -7,7 +7,9 @@ import time -from .base import * +from tornado.web import addslash, authenticated + +from web.handlers.base import BaseHandler def my_status(task): @@ -21,9 +23,10 @@ def my_status(task): return u'正在准备执行任务' return u'正常' + class MyHandler(BaseHandler): - @tornado.web.addslash - @tornado.web.authenticated + @addslash + @authenticated async def get(self): user = self.current_user adminflg = False @@ -36,7 +39,7 @@ async def get(self): tasks = await self.db.task.list(user['id'], fields=('id', 'tplid', 'note', 'disabled', 'last_success', 'success_count', 'failed_count', 'last_failed', 'next', 'last_failed_count', 'ctime', '_groups'), limit=None) for task in tasks: - tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note') ) + tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note')) task['tpl'] = tpl _groups = [] @@ -50,16 +53,17 @@ async def get(self): tplgroups = [] for tpl in tpls: temp = tpl['_groups'] - if (temp not in tplgroups): + if (temp not in tplgroups): tplgroups.append(temp) - await self.render('my.html', tpls=tpls, tasks=tasks, my_status=my_status, userid=user['id'], taskgroups=_groups, tplgroups=tplgroups, adminflg=adminflg) + await self.render('my.html', tpls=tpls, tasks=tasks, my_status=my_status, userid=user['id'], taskgroups=_groups, tplgroups=tplgroups, adminflg=adminflg) else: return self.redirect('/login') + class CheckUpdateHandler(BaseHandler): - @tornado.web.addslash - @tornado.web.authenticated + @addslash + @authenticated async def get(self): user = self.current_user async with self.db.transaction() as sql_session: @@ -70,13 +74,13 @@ async def get(self): hjson[f'{h["filename"]}|{h["reponame"]}'] = h for tpl in tpls: - if tpl["tplurl"] in hjson and hjson[tpl["tplurl"]]["update"] and tpl['mtime'] < time.mktime(time.strptime(hjson[tpl["tplurl"]]['date'],"%Y-%m-%d %H:%M:%S")): + if tpl["tplurl"] in hjson and hjson[tpl["tplurl"]]["update"] and tpl['mtime'] < time.mktime(time.strptime(hjson[tpl["tplurl"]]['date'], "%Y-%m-%d %H:%M:%S")): await self.db.tpl.mod(tpl["id"], updateable=1, sql_session=sql_session) self.redirect('/my/') -handlers = [ - ('/my/?', MyHandler), - ('/my/checkupdate/?', CheckUpdateHandler), - ] +handlers = [ + ('/my/?', MyHandler), + ('/my/checkupdate/?', CheckUpdateHandler), +] diff --git a/web/handlers/push.py b/web/handlers/push.py index f8b9d9fd325..73a04eb3f38 100644 --- a/web/handlers/push.py +++ b/web/handlers/push.py @@ -7,15 +7,14 @@ import json import time -from datetime import datetime -from multiprocessing.connection import wait -from urllib.parse import urlparse -from .base import * +from tornado.web import HTTPError, authenticated + +from web.handlers.base import BaseHandler class PushListHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, status=None): user = self.current_user isadmin = user['isadmin'] @@ -23,20 +22,20 @@ async def get(self, status=None): async def get_user(userid): if not userid: return dict( - nickname = u'公开', - email = None, - email_verified = True, - ) + nickname='公开', + email=None, + email_verified=True, + ) if isadmin: user = await self.db.user.get(userid, fields=('id', 'nickname', 'email', 'email_verified')) else: user = await self.db.user.get(userid, fields=('id', 'nickname')) if not user: return dict( - nickname = u'公开', - email = None, - email_verified = False, - ) + nickname='公开', + email=None, + email_verified=False, + ) return user async def get_tpl(tplid): @@ -56,27 +55,28 @@ async def join(pr): _f = {} if status is not None: _f['status'] = status - for each in await self.db.push_request.list(from_userid = user['id'], **_f): + for each in await self.db.push_request.list(from_userid=user['id'], **_f): pushs.append(await join(each)) if isadmin: - for each in await self.db.push_request.list(from_userid = None, **_f): + for each in await self.db.push_request.list(from_userid=None, **_f): pushs.append(await join(each)) pulls = [] - for each in await self.db.push_request.list(to_userid = user['id'], **_f): + for each in await self.db.push_request.list(to_userid=user['id'], **_f): pulls.append(await join(each)) if isadmin: - for each in await self.db.push_request.list(to_userid = None, **_f): + for each in await self.db.push_request.list(to_userid=None, **_f): pulls.append(await join(each)) await self.render('push_list.html', pushs=pushs, pulls=pulls) + class PushActionHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, prid, action): user = self.current_user async with self.db.transaction() as sql_session: - pr = await self.db.push_request.get(prid,sql_session=sql_session) + pr = await self.db.push_request.get(prid, sql_session=sql_session) if not pr: raise HTTPError(404) @@ -113,7 +113,7 @@ async def post(self, prid, action): else: status = self.db.push_request.PENDING tpl_lock = len(list(await self.db.push_request.list(from_tplid=pr['from_tplid'], - status=status, sql_session=sql_session))) == 0 + status=status, sql_session=sql_session))) == 0 if not tpl_lock: await self.db.tpl.mod(pr['from_tplid'], lock=False, sql_session=sql_session) @@ -133,40 +133,40 @@ async def accept(self, pr, sql_session=None): if not pr['to_tplid']: tplid = await self.db.tpl.add( - userid = pr['to_userid'], - har = har, - tpl = tpl, - variables = tplobj['variables'], - init_env=tplobj['init_env'], - interval = tplobj['interval'], - sql_session=sql_session - ) + userid=pr['to_userid'], + har=har, + tpl=tpl, + variables=tplobj['variables'], + init_env=tplobj['init_env'], + interval=tplobj['interval'], + sql_session=sql_session + ) await self.db.tpl.mod(tplid, - public = 1, - sitename = tplobj['sitename'], - siteurl = tplobj['siteurl'], - banner = tplobj['banner'], - note = tplobj['note'], - fork = pr['from_tplid'], - sql_session=sql_session - ) + public=1, + sitename=tplobj['sitename'], + siteurl=tplobj['siteurl'], + banner=tplobj['banner'], + note=tplobj['note'], + fork=pr['from_tplid'], + sql_session=sql_session + ) else: tplid = pr['to_tplid'] await self.db.tpl.mod(tplid, - har = har, - tpl = tpl, - public = 1, - variables = tplobj['variables'], - init_env = tplobj['init_env'], - interval = tplobj['interval'], - sitename = tplobj['sitename'], - siteurl = tplobj['siteurl'], - banner = tplobj['banner'], - note = tplobj['note'], - fork = pr['from_tplid'], - mtime = time.time(), - sql_session=sql_session - ) + har=har, + tpl=tpl, + public=1, + variables=tplobj['variables'], + init_env=tplobj['init_env'], + interval=tplobj['interval'], + sitename=tplobj['sitename'], + siteurl=tplobj['siteurl'], + banner=tplobj['banner'], + note=tplobj['note'], + fork=pr['from_tplid'], + mtime=time.time(), + sql_session=sql_session + ) if tplid: await self.db.push_request.mod(pr['id'], to_tplid=tplid, status=self.db.push_request.ACCEPT, sql_session=sql_session) else: @@ -183,12 +183,13 @@ async def refuse(self, pr, sql_session=None): if reject_message: await self.db.push_request.mod(pr['id'], msg=reject_message, sql_session=sql_session) + class PushViewHandler(BaseHandler): - @tornado.web.authenticated - async def get(self, prid): + @authenticated + async def get(self, prid): # pylint: disable=unused-argument return await self.render('har/editor.html') - @tornado.web.authenticated + @authenticated async def post(self, prid): user = self.current_user pr = await self.db.push_request.get(prid, fields=('id', 'from_tplid', 'from_userid', 'to_tplid', 'to_userid', 'status')) @@ -225,23 +226,24 @@ async def post(self, prid): raise HTTPError(404) tpl['har'] = self.fetcher.tpl2har( - await self.db.user.decrypt(userid, tpl['tpl'])) + await self.db.user.decrypt(userid, tpl['tpl'])) tpl['variables'] = json.loads(tpl['variables']) await self.finish(dict( - filename = tpl['sitename'] or '未命名模板', - har = tpl['har'], - env = dict((x, '') for x in tpl['variables']), - setting = dict( - sitename = tpl['sitename'], - siteurl = tpl['siteurl'], - banner = tpl['banner'], - note = tpl['note'], - ), - readonly = True, - )) + filename=tpl['sitename'] or '未命名模板', + har=tpl['har'], + env=dict((x, '') for x in tpl['variables']), + setting=dict( + sitename=tpl['sitename'], + siteurl=tpl['siteurl'], + banner=tpl['banner'], + note=tpl['note'], + ), + readonly=True, + )) + handlers = [ - ('/pushs/?(\d+)?', PushListHandler), - ('/push/(\d+)/(cancel|accept|refuse)', PushActionHandler), - ('/push/(\d+)/view', PushViewHandler), - ] + (r'/pushs/?(\d+)?', PushListHandler), + (r'/push/(\d+)/(cancel|accept|refuse)', PushActionHandler), + (r'/push/(\d+)/view', PushViewHandler), +] diff --git a/web/handlers/site.py b/web/handlers/site.py index 7805b480b7a..449a61dba8a 100644 --- a/web/handlers/site.py +++ b/web/handlers/site.py @@ -4,18 +4,22 @@ # Author: Binux # http://binux.me # Created on 2014-08-09 11:39:25 +# pylint: disable=broad-exception-raised import base64 import time import traceback from tornado import gen +from tornado.web import authenticated -from .base import * +import config +from libs import utils +from web.handlers.base import BaseHandler, logger_web_handler class SiteManagerHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): flg = self.get_argument("flg", '') title = self.get_argument("title", '') @@ -29,62 +33,62 @@ async def get(self, userid): site['regEn'] = False if site['regEn'] == 1 else True site['MustVerifyEmailEn'] = False if site['MustVerifyEmailEn'] == 0 else True - await self.render("site_manage.html", userid=userid, adminflg=adminflg, site=site, logDay=site['logDay'], flg=flg, title=title,log=log) + await self.render("site_manage.html", userid=userid, adminflg=adminflg, site=site, logDay=site['logDay'], flg=flg, title=title, log=log) return - @tornado.web.authenticated + @authenticated async def post(self, userid): try: async with self.db.transaction() as sql_session: - user = await self.db.user.get(userid, fields=('id','email', 'role', 'email_verified'), sql_session=sql_session) + user = await self.db.user.get(userid, fields=('id', 'email', 'role', 'email_verified'), sql_session=sql_session) if user and user['role'] == "admin": envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) mail = envs['adminmail'][0] pwd = envs['adminpwd'][0] - if await self.db.user.challenge_MD5(mail, pwd, sql_session=sql_session) and (user['email'] == mail): - if ("site.regEn" in envs): + if await self.db.user.challenge_md5(mail, pwd, sql_session=sql_session) and (user['email'] == mail): + if "site.regEn" in envs: await self.db.site.mod(1, regEn=0, sql_session=sql_session) if (await self.db.site.get(1, fields=('regEn',), sql_session=sql_session))['regEn'] != 0: - raise Exception(u"关闭注册失败") + raise Exception("关闭注册失败") else: await self.db.site.mod(1, regEn=1, sql_session=sql_session) if (await self.db.site.get(1, fields=('regEn',), sql_session=sql_session))['regEn'] != 1: - raise Exception(u"开启注册失败") + raise Exception("开启注册失败") - if ("site.MustVerifyEmailEn" in envs): + if "site.MustVerifyEmailEn" in envs: if not config.domain: raise Exception('请先配置 QD 框架域名 domain, 以启用邮箱验证功能!') - if (user['email_verified'] != 0): + if user['email_verified'] != 0: await self.db.site.mod(1, MustVerifyEmailEn=1, sql_session=sql_session) if (await self.db.site.get(1, fields=('MustVerifyEmailEn',), sql_session=sql_session))['MustVerifyEmailEn'] != 1: - raise Exception(u"开启 强制邮箱验证 失败") + raise Exception("开启 强制邮箱验证 失败") else: await self.send_verify_mail(user) - raise Exception(u"必须验证 管理员邮箱 才能开启, 已尝试发送验证邮件, 请查收。") + raise Exception("必须验证 管理员邮箱 才能开启, 已尝试发送验证邮件, 请查收。") else: await self.db.site.mod(1, MustVerifyEmailEn=0, sql_session=sql_session) if (await self.db.site.get(1, fields=('MustVerifyEmailEn',), sql_session=sql_session))['MustVerifyEmailEn'] != 0: - raise Exception(u"关闭 强制邮箱验证 失败") + raise Exception("关闭 强制邮箱验证 失败") - if ("site.logDay" in envs): + if "site.logDay" in envs: tmp = int(envs["site.logDay"][0]) if tmp != (await self.db.site.get(1, fields=('logDay',), sql_session=sql_session))['logDay']: await self.db.site.mod(1, logDay=tmp, sql_session=sql_session) if (await self.db.site.get(1, fields=('logDay',), sql_session=sql_session))['logDay'] != tmp: - raise Exception(u"设置日志保留天数失败") + raise Exception("设置日志保留天数失败") else: - raise Exception(u"账号/密码错误") + raise Exception("账号/密码错误") else: - raise Exception(u"非管理员,不可操作") + raise Exception("非管理员,不可操作") except Exception as e: if config.traceback_print: traceback.print_exc() - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s modify Manage_Board failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s modify Manage_Board failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n')) return await self.render('utils_run_result.html', title='设置成功', flg='success') return @@ -94,7 +98,7 @@ async def send_verify_mail(self, user): verified_code = await self.db.user.encrypt(user['id'], verified_code) verified_code = await self.db.user.encrypt(0, [user['id'], verified_code]) verified_code = base64.b64encode(verified_code).decode() - await gen.convert_yielded(utils.send_mail(to=user['email'], subject=u"QD平台 验证邮箱", html=u""" + await gen.convert_yielded(utils.send_mail(to=user['email'], subject="QD平台 验证邮箱", html="""

 [QD平台]  {http}://{domain}

点击以下链接验证邮箱,当您的QD失败的时候,会自动给您发送通知邮件。

@@ -111,6 +115,7 @@ async def send_verify_mail(self, user): return + handlers = [ - ('/site/(\d+)/manage', SiteManagerHandler), - ] + (r'/site/(\d+)/manage', SiteManagerHandler), +] diff --git a/web/handlers/subscribe.py b/web/handlers/subscribe.py index 351dafe3c1b..1141f69562d 100644 --- a/web/handlers/subscribe.py +++ b/web/handlers/subscribe.py @@ -4,38 +4,38 @@ # Author: Binux # http://binux.me # Created on 2014-08-08 21:06:02 +# pylint: disable=broad-exception-raised import asyncio import base64 import json import random import time -import traceback -from typing import Any, Dict +from typing import Dict from urllib.parse import quote, urlparse import aiohttp -from tornado import httputil -from tornado.web import Application +from tornado.web import addslash, authenticated -from config import proxies, domain - -from .base import * +import config +from config import domain, proxies +from web.handlers.base import (BaseHandler, BaseWebSocketHandler, + logger_web_handler) class SubscribeHandler(BaseHandler): - @tornado.web.addslash - @tornado.web.authenticated + @addslash + @authenticated async def get(self, userid): msg = '' user = self.current_user adminflg = False - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): adminflg = True repos = json.loads((await self.db.site.get(1, fields=('repos',)))['repos']) try: # 如果上次更新时间大于1天则更新模板仓库 - if (int(time.time()) - int(repos['lastupdate']) > 24 * 3600): + if int(time.time()) - int(repos['lastupdate']) > 24 * 3600: tpls = await self.db.pubtpl.list() await self.render('pubtpl_wait.html', tpls=tpls, user=user, userid=user['id'], adminflg=adminflg, repos=repos['repos'], msg=msg) return @@ -44,21 +44,20 @@ async def get(self, userid): await self.render('pubtpl_subscribe.html', tpls=tpls, user=user, userid=user['id'], adminflg=adminflg, repos=repos['repos'], msg=msg) except Exception as e: - if config.traceback_print: - traceback.print_exc() user = self.current_user tpls = await self.db.pubtpl.list() await self.render('pubtpl_subscribe.html', tpls=tpls, user=user, userid=user['id'], adminflg=adminflg, repos=repos['repos'], msg=str(e)) - logger_Web_Handler.error('UserID: %s browse Subscribe failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s browse Subscribe failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) return + class SubscribeUpdatingHandler(BaseWebSocketHandler): - users:Dict[int, BaseWebSocketHandler] = {} + users: Dict[int, BaseWebSocketHandler] = {} updating = False updating_start_time = 0 def check_origin(self, origin): - parsed_origin = urlparse(origin) + parsed_origin = urlparse(origin) return parsed_origin.netloc.endswith(domain) async def update(self, userid): @@ -78,103 +77,100 @@ async def update(self, userid): else: proxy = None # 如果上次更新时间大于1天则更新模板仓库 - if (int(time.time()) - int(repos['lastupdate']) > 24 * 3600): + if int(time.time()) - int(repos['lastupdate']) > 24 * 3600: for repo in repos['repos']: - await self.send_global_message({'code': 1000, 'message': '-----开始更新 {repo} 模板仓库-----'.format(repo=repo['reponame'])}) + await self.send_global_message({'code': 1000, 'message': f'-----开始更新 {repo["reponame"]} 模板仓库-----'}) if repo['repoacc']: if config.subscribe_accelerate_url == 'jsdelivr_cdn': - url = '{0}@{1}'.format(repo['repourl'].replace('https://github.com/', 'https://cdn.jsdelivr.net/gh/'), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com/', 'https://cdn.jsdelivr.net/gh/')}@{repo['repobranch']}" elif config.subscribe_accelerate_url == 'jsdelivr_fastly': - url = '{0}@{1}'.format(repo['repourl'].replace('https://github.com/', 'https://fastly.jsdelivr.net/gh/'), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com/', 'https://fastly.jsdelivr.net/gh/')}@{repo['repobranch']}" elif config.subscribe_accelerate_url == 'ghproxy': - url = '{0}/{1}'.format(repo['repourl'].replace('https://github.com/', 'https://ghproxy.com/https://raw.githubusercontent.com/'), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com/', 'https://ghproxy.com/https://raw.githubusercontent.com/')}/{repo['repobranch']}" elif config.subscribe_accelerate_url == 'fastgit': - url = '{0}/{1}'.format(repo['repourl'].replace('https://github.com/', 'https://raw.fastgit.org/'), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com/', 'https://raw.fastgit.org/')}/{repo['repobranch']}" else: if config.subscribe_accelerate_url.endswith('/'): - url = '{0}/{1}'.format(repo['repourl'].replace('https://github.com/', config.subscribe_accelerate_url), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com/', config.subscribe_accelerate_url)}/{repo['repobranch']}" else: - url = '{0}/{1}'.format(repo['repourl'].replace('https://github.com', config.subscribe_accelerate_url), repo['repobranch']) + url = f"{repo['repourl'].replace('https://github.com', config.subscribe_accelerate_url)}/{repo['repobranch']}" else: - if (repo['repourl'].find('https://github.com/') > -1): - url = '{0}/{1}'.format(repo['repourl'].replace('https://github.com/', 'https://raw.githubusercontent.com/'), repo['repobranch']) + if repo['repourl'].find('https://github.com/') > -1: + url = f'{repo["repourl"].replace("https://github.com/", "https://raw.githubusercontent.com/")}/{repo["repobranch"]}' else: url = repo['repourl'] - await self.send_global_message({'code': 1000, 'message': '仓库地址: {url}'.format(url=url)}) + await self.send_global_message({'code': 1000, 'message': f'仓库地址: {url}'}) hfile_link = url + '/tpls_history.json' - hfile= {'har': {}} - async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=config.connect_timeout*10, connect=config.connect_timeout*5)) as session: + hfile = {'har': {}} + async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=config.connect_timeout * 10, connect=config.connect_timeout * 5)) as session: await asyncio.sleep(0.001) async with session.get(hfile_link, verify_ssl=False, timeout=config.request_timeout, proxy=proxy) as res: if res.status == 200: hfile = await res.json(content_type="") - logger_Web_Handler.info('200 Get repo {repo} history file success!'.format(repo=repo['reponame'])) + logger_web_handler.info('200 Get repo %s history file success!', repo["reponame"]) await self.send_global_message({'code': 1000, 'message': 'tpls_history.json 文件获取成功'}) else: - logger_Web_Handler.error('Get repo {repo} history file failed! Reason: {link} open error!'.format(repo=repo['reponame'], link=hfile_link)) - await self.send_global_message({'code': 0, 'message': 'tpls_history.json 文件获取失败, 原因: 打开链接 {link} 出错!'.format(link=hfile_link)}) - await self.send_global_message({'code': 0, 'message': 'HTTP 代码: {code}, 错误信息: {reason}'.format(code=res.status, reason=res.reason if res.reason else 'Unknown')}) + logger_web_handler.error('Get repo %s history file failed! Reason: %s open error!', repo["reponame"], hfile_link) + await self.send_global_message({'code': 0, 'message': f'tpls_history.json 文件获取失败, 原因: 打开链接 {hfile_link} 出错!'}) + await self.send_global_message({'code': 0, 'message': f'HTTP 代码: {res.status}, 错误信息: {res.reason if res.reason else "Unknown"}'}) fail_count += 1 continue for har in hfile['har'].values(): for k, v in repo.items(): har[k] = v - tpl = await self.db.pubtpl.list(name = har['name'], - reponame=har['reponame'], - repourl=har['repourl'], - repobranch=har['repobranch'], - fields=('id', 'name', 'version'), - sql_session=sql_session) - - if (len(tpl) > 0): - if (int(tpl[0]['version']) < int(har['version'])): - if (har['content'] == ''): - har_url = "{0}/{1}".format(url, quote(har['filename'])) + tpl = await self.db.pubtpl.list(name=har['name'], + reponame=har['reponame'], + repourl=har['repourl'], + repobranch=har['repobranch'], + fields=('id', 'name', 'version'), + sql_session=sql_session) + + if len(tpl) > 0: + if int(tpl[0]['version']) < int(har['version']): + if har['content'] == '': + har_url = f"{url}/{quote(har['filename'])}" await asyncio.sleep(0.001) async with session.get(har_url, verify_ssl=False, timeout=config.request_timeout, proxy=proxy) as har_res: if har_res.status == 200: har['content'] = base64.b64encode(await har_res.read()).decode() else: - logger_Web_Handler.error('Update {repo} public template {name} failed! Reason: {link} open error!'.format(repo=repo['reponame'], name=har['name'], link=har_url)) - await self.send_global_message({'code': 0, 'message': '模板: {name} 更新失败, 原因: 打开链接 {link} 出错!'.format(name=har['name'], link=har_url)}) - await self.send_global_message({'code': 0, 'message': 'HTTP 代码: {code}, 错误信息: {reason}'.format(code=har_res.status, reason=har_res.reason if har_res.reason else 'Unknown')}) + logger_web_handler.error('Update %s public template %s failed! Reason: %s open error!', repo['reponame'], har['name'], har_url) + await self.send_global_message({'code': 0, 'message': f'模板: {har["name"]} 更新失败, 原因: 打开链接 {har_url} 出错!'}) + await self.send_global_message({'code': 0, 'message': f'HTTP 代码: {har_res.status}, 错误信息: {har_res.reason if har_res.reason else "Unknown"}'}) fail_count += 1 continue har['update'] = True await self.db.pubtpl.mod(tpl[0]['id'], **har, sql_session=sql_session) - logger_Web_Handler.info('Update {repo} public template {name} success!'.format(repo=repo['reponame'], name=har['name'])) - await self.send_global_message({'code': 1000, 'message': '模板: {name} 更新成功'.format(name=har['name'])}) + logger_web_handler.info('Update %s public template %s success!', repo['reponame'], har['name']) + await self.send_global_message({'code': 1000, 'message': f'模板: {har["name"]} 更新成功'}) else: - if (har['content'] == ''): - har_url = "{0}/{1}".format(url, quote(har['filename'])) + if har['content'] == '': + har_url = f"{url}/{quote(har['filename'])}" await asyncio.sleep(0.001) async with session.get(har_url, verify_ssl=False, timeout=config.request_timeout, proxy=proxy) as har_res: if har_res.status == 200: har['content'] = base64.b64encode(await har_res.read()).decode() else: - logger_Web_Handler.error('Add {repo} public template {name} failed! Reason: {link} open error!'.format(repo=repo['reponame'], name=har['name'], link=har_url)) - await self.send_global_message({'code': 0, 'message': '模板: {name} 添加失败, 原因: 打开链接 {link} 出错!'.format(name=har['name'], link=har_url)}) - await self.send_global_message({'code': 0, 'message': 'HTTP 代码: {code}, 错误信息: {reason}'.format(code=har_res.status, reason=har_res.reason if har_res.reason else 'Unknown')}) + logger_web_handler.error('Add %s public template %s failed! Reason: %s open error!', repo['reponame'], har['name'], har_url) + await self.send_global_message({'code': 0, 'message': f'模板: {har["name"]} 添加失败, 原因: 打开链接 {har_url} 出错!'}) + await self.send_global_message({'code': 0, 'message': f'HTTP 代码: {har_res.status}, 错误信息: {har_res.reason if har_res.reason else "Unknown"}'}) fail_count += 1 continue await self.db.pubtpl.add(har, sql_session=sql_session) - logger_Web_Handler.info('Add {repo} public template {name} success!'.format(repo=repo['reponame'], name=har['name'])) - await self.send_global_message({'code': 1000, 'message': '模板: {name} 添加成功'.format(name=har['name'])}) - await self.send_global_message({'code': 1000, 'message': '-----更新 {repo} 模板仓库结束-----'.format(repo=repo['reponame'])}) + logger_web_handler.info('Add %s public template %s success!', repo['reponame'], har['name']) + await self.send_global_message({'code': 1000, 'message': f'模板: {har["name"]} 添加成功'}) + await self.send_global_message({'code': 1000, 'message': f'-----更新 {repo["reponame"]} 模板仓库结束-----'}) success = True except Exception as e: - if config.traceback_print: - traceback.print_exc() - - msg = str(e).replace('\\r\\n','\r\n') + msg = str(e).replace('\\r\\n', '\r\n') if msg == "": msg = e.__class__.__name__ if msg.endswith('\r\n'): msg = msg[:-2] - logger_Web_Handler.error('UserID: %s update Subscribe failed! Reason: %s', userid, msg) - await self.send_global_message({'code': 0, 'message': '更新失败, 原因: {msg}'.format(msg=msg)}) + logger_web_handler.error('UserID: %s update Subscribe failed! Reason: %s', userid, msg, exc_info=config.traceback_print) + await self.send_global_message({'code': 0, 'message': f'更新失败, 原因: {msg}'}) try: async with self.db.transaction() as sql_session: @@ -186,12 +182,11 @@ async def update(self, userid): if fail_count == 0: await self.close_all(1000, 'Update success, please refresh your browser.') else: - await self.close_all(4001, 'Update success, but {0} templates update failed.'.format(fail_count)) + await self.close_all(4001, f'Update success, but {fail_count} templates update failed.') else: await self.close_all(4006, 'Update failed, please check failure reason.') except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('UserID: %s update Subscribe or close connection failed! Reason: %s', userid, e, exc_info=config.traceback_print) SubscribeUpdatingHandler.updating = False SubscribeUpdatingHandler.updating_start_time = 0 @@ -216,9 +211,9 @@ async def send_global_message(self, message): SubscribeUpdatingHandler.users.pop(userid) await asyncio.gather(*task) - @tornado.web.addslash - @tornado.web.authenticated - async def open(self, userid): + @addslash + @authenticated + async def open(self, userid): # pylint: disable=arguments-differ,invalid-overridden-method user = self.current_user # 判断用户是否已经登录 if not user: @@ -232,7 +227,7 @@ async def open(self, userid): # 判断用户是否为管理员 adminflg = False - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): adminflg = True if not adminflg and len(SubscribeUpdatingHandler.users) >= config.websocket.max_connections_subscribe: @@ -267,60 +262,60 @@ async def close_all(self, code, reason): SubscribeUpdatingHandler.users.pop(userid) SubscribeUpdatingHandler.users = {} + class SubscribeRefreshHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): await self.post(userid) - @tornado.web.authenticated + @authenticated async def post(self, userid): try: user = self.current_user op = self.get_argument('op', '') - if (op == ''): + if op == '': raise Exception('op参数为空') async with self.db.transaction() as sql_session: - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): repos = json.loads((await self.db.site.get(1, fields=('repos',), sql_session=sql_session))['repos']) repos["lastupdate"] = 0 await self.db.site.mod(1, repos=json.dumps(repos, ensure_ascii=False, indent=4), sql_session=sql_session) - if (op == 'clear'): + if op == 'clear': for pubtpl in await self.db.pubtpl.list(fields=('id',), sql_session=sql_session): await self.db.pubtpl.delete(pubtpl['id'], sql_session=sql_session) else: raise Exception('没有权限操作') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s refresh Subscribe failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s refresh Subscribe failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - self.redirect('/subscribe/{0}/'.format(userid) ) + self.redirect(f'/subscribe/{int(userid)}/') return -class Subscrib_signup_repos_Handler(BaseHandler): - @tornado.web.authenticated + +class SubscribSignupReposHandler(BaseHandler): + @authenticated async def get(self, userid): user = self.current_user - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): await self.render('pubtpl_register.html', userid=userid) else: - await self.render('utils_run_result.html', log='非管理员用户,不可设置', title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s browse Subscrib_signup_repos failed! Reason: 非管理员用户,不可设置', userid) + await self.render('utils_run_result.html', log='非管理员用户,不可设置', title='设置失败', flg='danger') + logger_web_handler.error('UserID: %s browse Subscrib_signup_repos failed! Reason: 非管理员用户,不可设置', userid) return - @tornado.web.authenticated + @authenticated async def post(self, userid): try: user = self.current_user - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) env = {} - for k, v in envs.items(): + for k, v in envs.items(): if (v[0] == 'false') or (v[0] == 'true'): env[k] = True if v[0] == 'true' else False else: @@ -350,107 +345,102 @@ async def post(self, userid): raise Exception('非管理员用户,不可设置') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s modify Subscribe_signup_repos failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s modify Subscribe_signup_repos failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - await self.render('utils_run_result.html', log=u'设置成功,请关闭操作对话框或刷新页面查看', title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log='设置成功,请关闭操作对话框或刷新页面查看', title='设置成功', flg='success') return + class GetReposInfoHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, userid): try: user = self.current_user - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) tmp = json.loads((await self.db.site.get(1, fields=('repos',)))['repos'])['repos'] repos = [] - for repoid, selected in envs.items(): - if isinstance(selected[0],bytes): + for repoid, selected in envs.items(): + if isinstance(selected[0], bytes): selected[0] = selected[0].decode() - if (selected[0] == 'true'): + if selected[0] == 'true': repos.append(tmp[int(repoid)]) else: raise Exception('非管理员用户,不可查看') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'获取信息失败', flg='danger') - logger_Web_Handler.error('UserID: %s get Subscribe_Repos_Info failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s get Subscribe_Repos_Info failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='获取信息失败', flg='danger') return - await self.render('pubtpl_reposinfo.html', repos=repos) + await self.render('pubtpl_reposinfo.html', repos=repos) return -class unsubscribe_repos_Handler(BaseHandler): - @tornado.web.authenticated + +class UnsubscribeReposHandler(BaseHandler): + @authenticated async def get(self, userid): try: user = self.current_user - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): await self.render('pubtpl_unsubscribe.html', user=user) else: raise Exception('非管理员用户,不可设置') return except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'打开失败', flg='danger') - logger_Web_Handler.error('UserID: %s browse UnSubscribe_Repos failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s browse UnSubscribe_Repos failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='打开失败', flg='danger') return - @tornado.web.authenticated + @authenticated async def post(self, userid): try: user = self.current_user - if (user['id'] == int(userid)) and (user['role'] == u'admin'): + if (user['id'] == int(userid)) and (user['role'] == 'admin'): envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) env = {} - for k, v in envs.items(): + for k, v in envs.items(): try: env[k] = json.loads(v[0]) - except: + except Exception as e: + logger_web_handler.debug('Deserialize failed! Reason: %s', str(e).replace('\\r\\n', '\r\n')) env[k] = v[0] async with self.db.transaction() as sql_session: - repos = json.loads((await self.db.site.get(1, fields=('repos',),sql_session=sql_session))['repos']) + repos = json.loads((await self.db.site.get(1, fields=('repos',), sql_session=sql_session))['repos']) tmp = repos['repos'] result = [] - for i, j in enumerate(tmp): + for i, j in enumerate(tmp): # 检查是否存在同名仓库 - if not env['selectedrepos'].get(str(i),False) : + if not env['selectedrepos'].get(str(i), False) : result.append(j) else: - pubtpls = await self.db.pubtpl.list(reponame=j['reponame'], fields=('id',),sql_session=sql_session) + pubtpls = await self.db.pubtpl.list(reponame=j['reponame'], fields=('id',), sql_session=sql_session) for pubtpl in pubtpls: - await self.db.pubtpl.delete(pubtpl['id'],sql_session=sql_session) + await self.db.pubtpl.delete(pubtpl['id'], sql_session=sql_session) repos['repos'] = result await self.db.site.mod(1, repos=json.dumps(repos, ensure_ascii=False, indent=4), sql_session=sql_session) else: raise Exception('非管理员用户,不可设置') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s unsubscribe Subscribe_Repos failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s unsubscribe Subscribe_Repos failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - await self.render('utils_run_result.html', log=u'设置成功,请关闭操作对话框或刷新页面查看', title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log='设置成功,请关闭操作对话框或刷新页面查看', title='设置成功', flg='success') return -handlers = [ - ('/subscribe/(\d+)/', SubscribeHandler), - ('/subscribe/(\d+)/updating/', SubscribeUpdatingHandler), - ('/subscribe/refresh/(\d+)/', SubscribeRefreshHandler), - ('/subscribe/signup_repos/(\d+)/', Subscrib_signup_repos_Handler), - ('/subscribe/(\d+)/get_reposinfo', GetReposInfoHandler), - ('/subscribe/unsubscribe_repos/(\d+)/', unsubscribe_repos_Handler), - ] +handlers = [ + (r'/subscribe/(\d+)/', SubscribeHandler), + (r'/subscribe/(\d+)/updating/', SubscribeUpdatingHandler), + (r'/subscribe/refresh/(\d+)/', SubscribeRefreshHandler), + (r'/subscribe/signup_repos/(\d+)/', SubscribSignupReposHandler), + (r'/subscribe/(\d+)/get_reposinfo', GetReposInfoHandler), + (r'/subscribe/unsubscribe_repos/(\d+)/', UnsubscribeReposHandler), +] diff --git a/web/handlers/task.py b/web/handlers/task.py index 8c612b9b9f2..58196bf65f0 100644 --- a/web/handlers/task.py +++ b/web/handlers/task.py @@ -4,21 +4,20 @@ # Author: Binux # http://binux.me # Created on 2014-08-09 11:39:25 +# pylint: disable=broad-exception-raised import datetime import json -import random import time -import traceback from codecs import escape_decode from tornado.iostream import StreamClosedError +from tornado.web import HTTPError, authenticated -from libs import utils -from libs.funcs import cal, pusher +import config +from libs.funcs import Cal, Pusher from libs.parse_url import parse_url - -from .base import * +from web.handlers.base import BaseHandler, logger_web_handler class TaskNewHandler(BaseHandler): @@ -31,7 +30,7 @@ async def get(self): if user: tpls += sorted(await self.db.tpl.list(userid=user['id'], fields=fields, limit=None), key=lambda t: -t['id']) if tpls: - tpls.append({'id': 0, 'sitename': u'-----公开模板-----'}) + tpls.append({'id': 0, 'sitename': '-----公开模板-----'}) tpls += sorted(await self.db.tpl.list(userid=None, public=1, fields=fields, limit=None), key=lambda t: -t['success_count']) if not tplid: @@ -42,7 +41,7 @@ async def get(self): if tplid: tplid = int(tplid) - tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid', 'note', 'sitename', 'siteurl', 'variables','init_env'))) + tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid', 'note', 'sitename', 'siteurl', 'variables', 'init_env'))) variables = json.loads(tpl['variables']) if not tpl['init_env']: tpl['init_env'] = '{}' @@ -54,14 +53,14 @@ async def get(self): if not isinstance(task['_groups'], str): task['_groups'] = str(task['_groups']) temp = task['_groups'] - if (temp not in _groups): + if temp not in _groups: _groups.append(temp) await self.render('task_new.html', tpls=tpls, tplid=tplid, tpl=tpl, variables=variables, task={}, _groups=_groups, init_env=init_env, default_retry_count=config.task_max_retry_count) else: - await self.render('utils_run_result.html', log=u'请先添加模板!', title=u'设置失败', flg='danger') + await self.render('utils_run_result.html', log='请先添加模板!', title='设置失败', flg='danger') - @tornado.web.authenticated + @authenticated async def post(self, taskid=None): user = self.current_user tplid = int(self.get_body_argument('_binux_tplid')) @@ -72,7 +71,7 @@ async def post(self, taskid=None): retry_interval = self.get_body_argument('_binux_retry_interval') async with self.db.transaction() as sql_session: - tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid', 'interval'),sql_session=sql_session)) + tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid', 'interval'), sql_session=sql_session)) envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) @@ -89,16 +88,16 @@ async def post(self, taskid=None): env['retry_count'] = retry_count env['retry_interval'] = retry_interval - if ('New_group' in envs): - New_group = envs['New_group'][0].strip() + if 'New_group' in envs: + new_group = envs['New_group'][0].strip() - if New_group != "" : - target_group = New_group + if new_group != "" : + target_group = new_group else: - for value in envs: - if envs[value][0] == 'on': - if (value.find("group-select-") > -1): - target_group = escape_decode(value.replace("group-select-", "").strip()[2:-1], "hex-escape")[0].decode('utf-8') + for key, value in envs.items(): + if value[0] == 'on': + if key.find("group-select-") > -1: + target_group = escape_decode(key.replace("group-select-", "").strip()[2:-1], "hex-escape")[0].decode('utf-8') break else: target_group = 'None' @@ -108,7 +107,7 @@ async def post(self, taskid=None): taskid = await self.db.task.add(tplid, user['id'], env, sql_session=sql_session) if tested: - await self.db.task.mod(taskid, note=note, next=time.time() + (tpl['interval'] or 24*60*60), sql_session=sql_session) + await self.db.task.mod(taskid, note=note, next=time.time() + (tpl['interval'] or 24 * 60 * 60), sql_session=sql_session) else: await self.db.task.mod(taskid, note=note, next=time.time() + config.new_task_delay, sql_session=sql_session) else: @@ -135,22 +134,23 @@ async def post(self, taskid=None): self.redirect('/my/') -class TaskEditHandler(TaskNewHandler): - @tornado.web.authenticated + +class TaskEditHandler(BaseHandler): + @authenticated async def get(self, taskid): user = self.current_user task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', - 'tplid', 'disabled', 'note', 'retry_count', 'retry_interval')), 'w') + 'tplid', 'disabled', 'note', 'retry_count', 'retry_interval')), 'w') task['init_env'] = (await self.db.user.decrypt(user['id'], (await self.db.task.get(taskid, ('init_env',)))['init_env'])) tpl = self.check_permission(await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'note', - 'sitename', 'siteurl', 'variables'))) + 'sitename', 'siteurl', 'variables'))) variables = json.loads(tpl['variables']) init_env = [] for var in variables: value = task['init_env'][var] if var in task['init_env'] else '' - init_env.append({'name':var, 'value':value}) + init_env.append({'name': var, 'value': value}) proxy = task['init_env']['_proxy'] if '_proxy' in task['init_env'] else '' if task['retry_interval'] is None: @@ -158,31 +158,32 @@ async def get(self, taskid): await self.render('task_new.html', tpls=[tpl, ], tplid=tpl['id'], tpl=tpl, variables=variables, task=task, init_env=init_env, proxy=proxy, retry_count=task['retry_count'], retry_interval=task['retry_interval'], default_retry_count=config.task_max_retry_count, task_title="修改任务") + class TaskRunHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, taskid): self.evil(+2) start_ts = int(time.time()) user = self.current_user async with self.db.transaction() as sql_session: task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'init_env', - 'env', 'session', 'retry_count', 'retry_interval', 'last_success', 'last_failed', 'success_count', 'note', - 'failed_count', 'last_failed_count', 'next', 'disabled', 'ontime', 'ontimeflg', 'pushsw','newontime'), sql_session=sql_session), 'w') + 'env', 'session', 'retry_count', 'retry_interval', 'last_success', 'last_failed', 'success_count', 'note', + 'failed_count', 'last_failed_count', 'next', 'disabled', 'ontime', 'ontimeflg', 'pushsw', 'newontime'), sql_session=sql_session), 'w') tpl = self.check_permission(await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', - 'siteurl', 'tpl', 'interval', 'last_success'),sql_session=sql_session)) + 'siteurl', 'tpl', 'interval', 'last_success'), sql_session=sql_session)) fetch_tpl = await self.db.user.decrypt( - 0 if not tpl['userid'] else task['userid'], tpl['tpl'], sql_session=sql_session) + 0 if not tpl['userid'] else task['userid'], tpl['tpl'], sql_session=sql_session) env = dict( - variables = await self.db.user.decrypt(task['userid'], task['init_env'], sql_session=sql_session), - session = [], - ) + variables=await self.db.user.decrypt(task['userid'], task['init_env'], sql_session=sql_session), + session=[], + ) pushsw = json.loads(task['pushsw']) newontime = json.loads(task['newontime']) - pushertool = pusher(self.db, sql_session=sql_session) - caltool = cal() + pushertool = Pusher(self.db, sql_session=sql_session) + caltool = Cal() try: url = parse_url(env['variables'].get('_proxy')) @@ -198,87 +199,87 @@ async def post(self, taskid): } new_env, _ = await self.fetcher.do_fetch(fetch_tpl, env, [proxy]) except Exception as e: - logger_Web_Handler.error('taskid:%d tplid:%d failed! %.4fs \r\n%s', task['id'], task['tplid'], time.time()-start_ts, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('taskid:%d tplid:%d failed! %.4fs \r\n%s', task['id'], task['tplid'], time.time() - start_ts, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) t = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - title = u"QD任务 {0}-{1} 失败".format(tpl['sitename'], task['note']) - logtmp = u"{0} \\r\\n日志:{1}".format(t, e) + title = f"QD任务 {tpl['sitename']}-{task['note']} 失败" + logtmp = f"{t} \\r\\n日志:{e}" await self.db.tasklog.add(task['id'], success=False, msg=str(e), sql_session=sql_session) await self.db.task.mod(task['id'], - last_failed=time.time(), - failed_count=task['failed_count']+1, - last_failed_count=task['last_failed_count']+1, - sql_session=sql_session - ) + last_failed=time.time(), + failed_count=task['failed_count'] + 1, + last_failed_count=task['last_failed_count'] + 1, + sql_session=sql_session + ) try: await self.finish('

运行失败

%s
' % logtmp.replace('\\r\\n', '
')) - except StreamClosedError: - if config.traceback_print: - traceback.print_exc() + except StreamClosedError as e: + logger_web_handler.error('stream closed error: %s', e, exc_info=config.traceback_print) await pushertool.pusher(user['id'], pushsw, 0x4, title, logtmp) return await self.db.tasklog.add(task['id'], success=True, msg=new_env['variables'].get('__log__'), sql_session=sql_session) - if (newontime["sw"]): - if ('mode' not in newontime): + if newontime["sw"]: + if 'mode' not in newontime: newontime['mode'] = 'ontime' - if (newontime['mode'] == 'ontime'): - newontime['date'] = (datetime.datetime.now()+datetime.timedelta(days=1)).strftime("%Y-%m-%d") - nextTime = caltool.calNextTs(newontime)['ts'] + if newontime['mode'] == 'ontime': + newontime['date'] = (datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d") + next_time = caltool.cal_next_ts(newontime)['ts'] else: - nextTime = time.time() + (tpl['interval'] if tpl['interval'] else 24 * 60 * 60) + next_time = time.time() + (tpl['interval'] if tpl['interval'] else 24 * 60 * 60) await self.db.task.mod(task['id'], - disabled = False, - last_success = time.time(), - last_failed_count = 0, - success_count = task['success_count'] + 1, - mtime = time.time(), - next = nextTime, - sql_session=sql_session) + disabled=False, + last_success=time.time(), + last_failed_count=0, + success_count=task['success_count'] + 1, + mtime=time.time(), + next=next_time, + sql_session=sql_session) t = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - title = u"QD任务 {0}-{1} 成功".format(tpl['sitename'], task['note']) + title = f"QD任务 {tpl['sitename']}-{task['note']} 成功" logtmp = new_env['variables'].get('__log__') - logtmp = u"{0} \\r\\n日志:{1}".format(t, logtmp) + logtmp = f"{t} \\r\\n日志:{logtmp}" - await self.db.tpl.incr_success(tpl['id'],sql_session=sql_session) + await self.db.tpl.incr_success(tpl['id'], sql_session=sql_session) try: await self.finish('

运行成功

%s
' % logtmp.replace('\\r\\n', '
')) - except StreamClosedError: - if config.traceback_print: - traceback.print_exc() + except StreamClosedError as e: + logger_web_handler.error('stream closed error: %s', e, exc_info=config.traceback_print) await pushertool.pusher(user['id'], pushsw, 0x8, title, logtmp) - logDay = int((await self.db.site.get(1, fields=('logDay',),sql_session=sql_session))['logDay']) - for log in await self.db.tasklog.list(taskid = taskid, fields=('id', 'ctime'), sql_session=sql_session): - if (time.time() - log['ctime']) > (logDay * 24 * 60 * 60): + log_day = int((await self.db.site.get(1, fields=('logDay',), sql_session=sql_session))['logDay']) + for log in await self.db.tasklog.list(taskid=taskid, fields=('id', 'ctime'), sql_session=sql_session): + if (time.time() - log['ctime']) > (log_day * 24 * 60 * 60): await self.db.tasklog.delete(log['id'], sql_session=sql_session) return + class TaskLogHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, taskid): - user = self.current_user + # user = self.current_user task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'disabled'))) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('success', 'ctime', 'msg')) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('success', 'ctime', 'msg')) await self.render('tasklog.html', task=task, tasklog=tasklog) + class TotalLogHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid, days): tasks = [] days = int(days) user = self.current_user if userid == str(user['id']): for task in await self.db.task.list(userid, fields=('id', 'tplid', 'note'), limit=None): - tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note') ) + tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note')) task['tpl'] = tpl - for log in await self.db.tasklog.list(taskid = task['id'], fields=('id','success', 'ctime', 'msg')): + for log in await self.db.tasklog.list(taskid=task['id'], fields=('id', 'success', 'ctime', 'msg')): if (time.time() - log['ctime']) <= (days * 24 * 60 * 60): task['log'] = log tasks.append(task.copy()) @@ -288,198 +289,203 @@ async def get(self, userid, days): self.evil(+5) raise HTTPError(401) + class TaskLogDelHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, taskid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'disabled'), sql_session=sql_session)) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + self.check_permission(await self.db.task.get(taskid, fields=('userid',), sql_session=sql_session)) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) for log in tasklog: await self.db.tasklog.delete(log['id'], sql_session=sql_session) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) await self.db.task.mod(taskid, - success_count=0, - failed_count=0, - sql_session=sql_session - ) + success_count=0, + failed_count=0, + sql_session=sql_session + ) - self.redirect("/task/{0}/log".format(taskid)) + self.redirect(f"/task/{int(taskid)}/log") return - @tornado.web.authenticated + @authenticated async def post(self, taskid): - user = self.current_user + # user = self.current_user envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) body_arguments = envs day = 365 - if ('day' in body_arguments): + if 'day' in body_arguments: day = int(json.loads(body_arguments['day'][0])) async with self.db.transaction() as sql_session: - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) for log in tasklog: if (time.time() - log['ctime']) > (day * 24 * 60 * 60): await self.db.tasklog.delete(log['id'], sql_session=sql_session) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) - self.redirect("/task/{0}/log".format(taskid)) + self.redirect(f"/task/{int(taskid)}/log") return + class TaskLogSuccessDelHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, taskid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'disabled'), sql_session=sql_session)) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + self.check_permission(await self.db.task.get(taskid, fields=('userid',), sql_session=sql_session)) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) for log in tasklog: if log['success'] == 1: await self.db.tasklog.delete(log['id'], sql_session=sql_session) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) await self.db.task.mod(taskid, - success_count=0, - sql_session=sql_session - ) + success_count=0, + sql_session=sql_session + ) self.redirect('/my/') return + class TaskLogFailDelHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, taskid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'disabled'), sql_session=sql_session)) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + self.check_permission(await self.db.task.get(taskid, fields=('userid',), sql_session=sql_session)) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) for log in tasklog: if log['success'] == 0: await self.db.tasklog.delete(log['id'], sql_session=sql_session) - tasklog = await self.db.tasklog.list(taskid = taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) + tasklog = await self.db.tasklog.list(taskid=taskid, fields=('id', 'success', 'ctime', 'msg'), sql_session=sql_session) await self.db.task.mod(taskid, - failed_count=0, - sql_session=sql_session - ) + failed_count=0, + sql_session=sql_session + ) self.redirect('/my/') return + class TaskDelHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, taskid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', ), sql_session=sql_session), 'w') - logs = await self.db.tasklog.list(taskid = taskid, fields=('id',), sql_session=sql_session) + self.check_permission(await self.db.task.get(taskid, fields=('userid',), sql_session=sql_session), 'w') + logs = await self.db.tasklog.list(taskid=taskid, fields=('id',), sql_session=sql_session) for log in logs: await self.db.tasklog.delete(log['id'], sql_session=sql_session) - await self.db.task.delete(task['id'], sql_session=sql_session) + await self.db.task.delete(taskid, sql_session=sql_session) self.redirect('/my/') + class TaskDisableHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, taskid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', ),sql_session=sql_session), 'w') - logs = await self.db.tasklog.list(taskid = taskid, fields=('id',),sql_session=sql_session) - await self.db.task.mod(task['id'], disabled=1, sql_session=sql_session) + self.check_permission(await self.db.task.get(taskid, fields=('userid',), sql_session=sql_session), 'w') + # logs = await self.db.tasklog.list(taskid=taskid, fields=('id',), sql_session=sql_session) + await self.db.task.mod(taskid, disabled=1, sql_session=sql_session) self.redirect('/my/') -class TaskSetTimeHandler(TaskNewHandler): - @tornado.web.authenticated + +class TaskSetTimeHandler(BaseHandler): + @authenticated async def get(self, taskid): - user = self.current_user + # user = self.current_user task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', - 'tplid', 'disabled', 'note', 'ontime', 'ontimeflg', 'newontime')), 'w') + 'tplid', 'disabled', 'note', 'ontime', 'ontimeflg', 'newontime')), 'w') newontime = json.loads(task['newontime']) ontime = newontime - if ('mode' not in newontime): + if 'mode' not in newontime: ontime['mode'] = 'ontime' else: ontime = newontime - today_date = time.strftime("%Y-%m-%d",time.localtime()) + today_date = time.strftime("%Y-%m-%d", time.localtime()) await self.render('task_setTime.html', task=task, ontime=ontime, today_date=today_date) - @tornado.web.authenticated + @authenticated async def post(self, taskid): - log = u'设置成功' + log = '设置成功' try: envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) - for env in envs.keys(): - if (envs[env][0] == u'true' or envs[env][0] == u'false'): - envs[env] = True if envs[env][0] == u'true' else False + for key, value in envs.items(): + if value[0] == 'true' or value[0] == 'false': + envs[key] = True if value[0] == 'true' else False else: - envs[env] = u'{0}'.format(envs[env][0]) + envs[key] = str(value[0]) async with self.db.transaction() as sql_session: - if (envs['sw']): - c = cal() - if ('time' in envs): - if (len(envs['time'].split(':')) < 3): + if envs['sw']: + c = Cal() + if 'time' in envs: + if len(envs['time'].split(':')) < 3: envs['time'] = envs['time'] + ':00' - tmp = c.calNextTs(envs) - if (tmp['r'] == 'True'): + tmp = c.cal_next_ts(envs) + if tmp['r'] == 'True': await self.db.task.mod(taskid, - disabled = False, - newontime = json.dumps(envs), - next = tmp['ts'], - sql_session=sql_session) + disabled=False, + newontime=json.dumps(envs), + next=tmp['ts'], + sql_session=sql_session) - log = u'设置成功,下次执行时间:{0}'.format(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(tmp['ts']))) + log = f"设置成功,下次执行时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(tmp['ts']))}" else: raise Exception(tmp['r']) else: tmp = json.loads((await self.db.task.get(taskid, fields=('newontime',), sql_session=sql_session))['newontime']) tmp['sw'] = False - await self.db.task.mod(taskid, newontime = json.dumps(tmp), sql_session=sql_session) + await self.db.task.mod(taskid, newontime=json.dumps(tmp), sql_session=sql_session) except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('TaskID: %s set Time failed! Reason: %s', taskid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('TaskID: %s set Time failed! Reason: %s', taskid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return -class TaskGroupHandler(TaskNewHandler): - @tornado.web.authenticated + +class TaskGroupHandler(BaseHandler): + @authenticated async def get(self, taskid): user = self.current_user - groupNow = (await self.db.task.get(taskid, fields=('_groups',)))['_groups'] + group_now = (await self.db.task.get(taskid, fields=('_groups',)))['_groups'] _groups = [] for task in await self.db.task.list(user['id'], fields=('_groups',), limit=None): if not isinstance(task['_groups'], str): task['_groups'] = str(task['_groups']) temp = task['_groups'] - if (temp not in _groups): + if temp not in _groups: _groups.append(temp) - await self.render('task_setgroup.html', taskid=taskid, _groups=_groups, groupNow=groupNow) + await self.render('task_setgroup.html', taskid=taskid, _groups=_groups, groupNow=group_now) - @tornado.web.authenticated + @authenticated async def post(self, taskid): envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) - New_group = envs['New_group'][0].strip() + new_group = envs['New_group'][0].strip() - if New_group != "" : - target_group = New_group + if new_group != "" : + target_group = new_group else: - for value in envs: - if envs[value][0] == 'on': - target_group = escape_decode(value.strip()[2:-1],"hex-escape")[0].decode('utf-8') + for key, value in envs.items(): + if value[0] == 'on': + target_group = escape_decode(key.strip()[2:-1], "hex-escape")[0].decode('utf-8') break else: target_group = 'None' @@ -488,43 +494,44 @@ async def post(self, taskid): self.redirect('/my/') + class TasksDelHandler(BaseHandler): - @tornado.web.authenticated - async def post(self, userid): + @authenticated + async def post(self, userid): # pylint: disable=unused-argument try: - user = self.current_user + # user = self.current_user envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) body_arguments = envs - if ('taskids' in body_arguments): + if 'taskids' in body_arguments: taskids = json.loads(envs['taskids'][0]) - async with self.db.transaction() as sql_session: - if (body_arguments['func'][0] == 'Del'): - for taskid in taskids: - task = self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', ),sql_session=sql_session), 'w') - logs = await self.db.tasklog.list(taskid = taskid, fields=('id',),sql_session=sql_session) - for log in logs: - await self.db.tasklog.delete(log['id'], sql_session=sql_session) - await self.db.task.delete(taskid, sql_session=sql_session) - elif (body_arguments['func'][0] == 'setGroup'): - New_group = body_arguments['groupValue'][0].strip() - if(New_group == ''): - New_group = u'None' - for taskid in taskids: - await self.db.task.mod(taskid, groups=New_group, sql_session=sql_session) - - await self.finish('

操作成功

') + async with self.db.transaction() as sql_session: + if body_arguments['func'][0] == 'Del': + for taskid in taskids: + self.check_permission(await self.db.task.get(taskid, fields=('id', 'userid', ), sql_session=sql_session), 'w') + logs = await self.db.tasklog.list(taskid=taskid, fields=('id',), sql_session=sql_session) + for log in logs: + await self.db.tasklog.delete(log['id'], sql_session=sql_session) + await self.db.task.delete(taskid, sql_session=sql_session) + elif body_arguments['func'][0] == 'setGroup': + new_group = body_arguments['groupValue'][0].strip() + if new_group == '': + new_group = 'None' + for taskid in taskids: + await self.db.task.mod(taskid, groups=new_group, sql_session=sql_session) + + await self.finish('

操作成功

') + raise Exception('taskids not found!') except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('TaskID: %s delete failed! Reason: %s', taskid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) await self.render('tpl_run_failed.html', log=str(e)) - logger_Web_Handler.error('TaskID: %s delete failed! Reason: %s', taskid, str(e).replace('\\r\\n','\r\n')) return -class GetGroupHandler(TaskNewHandler): - @tornado.web.authenticated - async def get(self, taskid): + +class GetGroupHandler(BaseHandler): + @authenticated + async def get(self, taskid): # pylint: disable=unused-argument user = self.current_user _groups = {} for task in await self.db.task.list(user['id'], fields=('_groups',), limit=None): @@ -533,19 +540,20 @@ async def get(self, taskid): self.write(json.dumps(_groups, ensure_ascii=False, indent=4)) return + handlers = [ - ('/task/new', TaskNewHandler), - ('/task/(\d+)/edit', TaskEditHandler), - ('/task/(\d+)/settime', TaskSetTimeHandler), - ('/task/(\d+)/del', TaskDelHandler), - ('/task/(\d+)/disable', TaskDisableHandler), - ('/task/(\d+)/log', TaskLogHandler), - ('/task/(\d+)/log/total/(\d+)', TotalLogHandler), - ('/task/(\d+)/log/del', TaskLogDelHandler), - ('/task/(\d+)/log/del/Success', TaskLogSuccessDelHandler), - ('/task/(\d+)/log/del/Fail', TaskLogFailDelHandler), - ('/task/(\d+)/run', TaskRunHandler), - ('/task/(\d+)/group', TaskGroupHandler), - ('/tasks/(\d+)', TasksDelHandler), - ('/getgroups/(\d+)', GetGroupHandler), - ] + (r'/task/new', TaskNewHandler), + (r'/task/(\d+)/edit', TaskEditHandler), + (r'/task/(\d+)/settime', TaskSetTimeHandler), + (r'/task/(\d+)/del', TaskDelHandler), + (r'/task/(\d+)/disable', TaskDisableHandler), + (r'/task/(\d+)/log', TaskLogHandler), + (r'/task/(\d+)/log/total/(\d+)', TotalLogHandler), + (r'/task/(\d+)/log/del', TaskLogDelHandler), + (r'/task/(\d+)/log/del/Success', TaskLogSuccessDelHandler), + (r'/task/(\d+)/log/del/Fail', TaskLogFailDelHandler), + (r'/task/(\d+)/run', TaskRunHandler), + (r'/task/(\d+)/group', TaskGroupHandler), + (r'/tasks/(\d+)', TasksDelHandler), + (r'/getgroups/(\d+)', GetGroupHandler), +] diff --git a/web/handlers/task_multi.py b/web/handlers/task_multi.py index 019742c6345..71cf70417ef 100644 --- a/web/handlers/task_multi.py +++ b/web/handlers/task_multi.py @@ -4,99 +4,95 @@ # Author: Binux # http://binux.me # Created on 2014-08-09 11:39:25 +# pylint: disable=broad-exception-raised -import datetime import json -import random import time -import traceback -import croniter +from tornado.web import authenticated -from libs.funcs import cal - -from .base import * +import config +from libs.funcs import Cal +from web.handlers.base import BaseHandler, logger_web_handler class TaskMultiOperateHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): try: tasktype = '' user = self.current_user op = self.get_argument('op', '') _groups = [] - if (op != ''): + if op != '': tasktype = op - if isinstance(tasktype,bytes): + if isinstance(tasktype, bytes): tasktype = tasktype.decode() else: raise Exception('错误参数') - if (tasktype == 'setgroup'): + if tasktype == 'setgroup': for task in await self.db.task.list(user['id'], fields=('_groups',), limit=None): if not isinstance(task['_groups'], str): task['_groups'] = str(task['_groups']) temp = task['_groups'] - if (temp not in _groups): + if temp not in _groups: _groups.append(temp) except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'打开失败', flg='danger') - logger_Web_Handler.error('UserID: %s browse Task_Multi failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s browse Task_Multi failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='打开失败', flg='danger') return await self.render('taskmulti.html', user=user, tasktype=tasktype, _groups=_groups) return - @tornado.web.authenticated + @authenticated async def post(self, userid): - user = self.current_user + # user = self.current_user try: envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) env = {} op = self.get_argument('op', '') - if (op != ''): + if op != '': tasktype = op - if isinstance(tasktype,bytes): + if isinstance(tasktype, bytes): tasktype = tasktype.decode() else: raise Exception('错误参数') - for k, v in envs.items(): + for k, v in envs.items(): env[k] = json.loads(v[0]) if len(env['selectedtasks']) == 0: raise Exception('请选择任务') - for taskid, selected in env['selectedtasks'].items(): - if (selected): + for taskid, selected in env['selectedtasks'].items(): + if selected: async with self.db.transaction() as sql_session: - task = await self.db.task.get(taskid, fields=('id', 'note', 'tplid', 'userid'), sql_session=sql_session) - if (task): - if (task['userid']) == int(userid): - if (tasktype == 'disable'): - await self.db.task.mod(taskid, disabled = True, sql_session=sql_session) - if (tasktype == 'enable'): - await self.db.task.mod(taskid, disabled = False, sql_session=sql_session) - if (tasktype == 'delete'): - logs = await self.db.tasklog.list(taskid = taskid, fields=('id',), sql_session=sql_session) + task = await self.db.task.get(taskid, fields=('id', 'note', 'tplid', 'userid'), sql_session=sql_session) + if task: + if task['userid'] == int(userid): + if tasktype == 'disable': + await self.db.task.mod(taskid, disabled=True, sql_session=sql_session) + if tasktype == 'enable': + await self.db.task.mod(taskid, disabled=False, sql_session=sql_session) + if tasktype == 'delete': + logs = await self.db.tasklog.list(taskid=taskid, fields=('id',), sql_session=sql_session) for log in logs: await self.db.tasklog.delete(log['id'], sql_session=sql_session) await self.db.task.delete(taskid, sql_session=sql_session) - if (tasktype == 'setgroup'): + if tasktype == 'setgroup': group_env = env['setgroup'] - New_group = group_env['newgroup'].strip() - if New_group != "" : - target_group = New_group + new_group = group_env['newgroup'].strip() + if new_group != "" : + target_group = new_group else: target_group = group_env['checkgroupname'] or 'None' await self.db.task.mod(taskid, _groups=target_group, sql_session=sql_session) - if (tasktype == 'settime'): + if tasktype == 'settime': time_env = env['settime'] - c = cal() + c = Cal() settime_env = { 'sw': True, 'time': time_env['ontime_val'], @@ -107,67 +103,65 @@ async def post(self, userid): 'cron_val': time_env['cron_val'], } - if (time_env['randtimezone1'] != ''): + if time_env['randtimezone1'] != '': settime_env['randsw'] = True - # if (time_env['cron_sec'] != ''): + # if time_env['cron_sec'] != '': # settime_env['cron_sec'] = time_env['cron_sec'] - if (time_env['ontime_method'] == 'ontime'): - if (time_env['ontime_run_date'] == ''): + if time_env['ontime_method'] == 'ontime': + if time_env['ontime_run_date'] == '': settime_env['date'] = time.strftime("%Y-%m-%d", time.localtime()) - if (time_env['ontime_val'] == ''): + if time_env['ontime_val'] == '': settime_env['time'] = time.strftime("%H:%M:%S", time.localtime()) - if (len(settime_env['time'].split(':')) == 2): + if len(settime_env['time'].split(':')) == 2: settime_env['time'] = settime_env['time'] + ':00' - tmp = c.calNextTs(settime_env) - if (tmp['r'] == 'True'): - await self.db.task.mod(taskid, disabled = False, - newontime = json.dumps(settime_env), - next = tmp['ts'], - sql_session=sql_session) + tmp = c.cal_next_ts(settime_env) + if tmp['r'] == 'True': + await self.db.task.mod(taskid, disabled=False, + newontime=json.dumps(settime_env), + next=tmp['ts'], + sql_session=sql_session) else: - raise Exception(u'参数错误') + raise Exception('参数错误') else: raise Exception('用户id与任务的用户id不一致') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s set Task_Multi failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s set Task_Multi failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - await self.render('utils_run_result.html', log=u'设置成功,请关闭操作对话框或刷新页面查看', title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log='设置成功,请关闭操作对话框或刷新页面查看', title='设置成功', flg='success') return + class GetTasksInfoHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, userid): try: envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) - user = self.current_user + # user = self.current_user tasks = [] - for taskid, selected in envs.items(): - if isinstance(selected[0],bytes): + for taskid, selected in envs.items(): + if isinstance(selected[0], bytes): selected[0] = selected[0].decode() - if (selected[0] == 'true'): - task = await self.db.task.get(taskid, fields=('id', 'note', 'tplid')) - if (task): + if selected[0] == 'true': + task = await self.db.task.get(taskid, fields=('id', 'note', 'tplid')) + if task: sitename = (await self.db.tpl.get(task['tplid'], fields=('sitename',)))['sitename'] task['sitename'] = sitename tasks.append(task) except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'获取信息失败', flg='danger') - logger_Web_Handler.error('UserID: %s get Tasks_Info failed! Reason: %s', userid, str(e).replace('\\r\\n','\r\n')) + logger_web_handler.error('UserID: %s get Tasks_Info failed! Reason: %s', userid, str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='获取信息失败', flg='danger') return - await self.render('taskmulti_tasksinfo.html', tasks=tasks) + await self.render('taskmulti_tasksinfo.html', tasks=tasks) return + handlers = [ - ('/task/(\d+)/multi', TaskMultiOperateHandler), - ('/task/(\d+)/get_tasksinfo', GetTasksInfoHandler), - ] + (r'/task/(\d+)/multi', TaskMultiOperateHandler), + (r'/task/(\d+)/get_tasksinfo', GetTasksInfoHandler), +] diff --git a/web/handlers/tpl.py b/web/handlers/tpl.py index 110cd2dc01e..6fdb0172f69 100644 --- a/web/handlers/tpl.py +++ b/web/handlers/tpl.py @@ -6,33 +6,31 @@ # Created on 2014-08-09 17:52:49 import json -import traceback from codecs import escape_decode -from tornado import gen +from tornado.web import HTTPError, authenticated -from libs import utils +import config from libs.parse_url import parse_url - -from .base import * +from web.handlers.base import BaseHandler, logger_web_handler class TPLPushHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, tplid): - user = self.current_user + # user = self.current_user tpl = await self.db.tpl.get(tplid, fields=('id', 'userid', 'sitename')) if not self.permission(tpl, 'w'): self.evil(+5) - await self.finish(u'没有权限') + await self.finish('没有权限') return tpls = await self.db.tpl.list(userid=None, limit=None, fields=('id', 'sitename', 'public')) - for i in range(len(tpls)): + for i, _ in enumerate(tpls): if tpls[i]['public'] == 2: - tpls[i]['sitename'] += u' [已取消]' + tpls[i]['sitename'] += ' [已取消]' await self.render('tpl_push.html', tpl=tpl, tpls=tpls) - @tornado.web.authenticated + @authenticated async def post(self, tplid): user = self.current_user tplid = int(tplid) @@ -40,7 +38,7 @@ async def post(self, tplid): tpl = await self.db.tpl.get(tplid, fields=('id', 'userid', ), sql_session=sql_session) if not self.permission(tpl, 'w'): self.evil(+5) - await self.finish(u'没有权限') + await self.finish('没有权限') return to_tplid = int(self.get_argument('totpl')) @@ -52,7 +50,7 @@ async def post(self, tplid): totpl = await self.db.tpl.get(to_tplid, fields=('id', 'userid', ), sql_session=sql_session) if not totpl: self.evil(+1) - await self.finish(u'模板不存在') + await self.finish('模板不存在') return to_userid = totpl['userid'] @@ -65,7 +63,7 @@ async def post(self, tplid): class TPLVarHandler(BaseHandler): async def get(self, tplid): - user = self.current_user + # user = self.current_user tpl = await self.db.tpl.get(tplid, fields=('id', 'note', 'userid', 'sitename', 'siteurl', 'variables', 'init_env')) if not self.permission(tpl): self.evil(+5) @@ -77,11 +75,15 @@ async def get(self, tplid): class TPLDelHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def post(self, tplid): - user = self.current_user + # user = self.current_user async with self.db.transaction() as sql_session: - tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid'), sql_session=sql_session), 'w') + tpl = self.check_permission(await self.db.tpl.get(tplid, fields=('id', 'userid', 'public'), sql_session=sql_session), 'w') + if tpl['public'] == 1: + prs = await self.db.push_request.list(to_tplid=tplid, fields=('id', ), sql_session=sql_session) + for pr in prs: + await self.db.push_request.mod(pr['id'], status=self.db.push_request.CANCEL, sql_session=sql_session) await self.db.tpl.delete(tplid, sql_session=sql_session) self.redirect('/my/') @@ -98,7 +100,7 @@ async def post(self, tplid): b'\xc2\xa0', b' ') data = json.loads(self.request.body) except Exception as e: - logger_Web_Handler.debug('TPLRunHandler post error: %s' % e) + logger_web_handler.debug('TPLRunHandler post error: %s', e, exc_info=config.traceback_print) tplid = tplid or data.get( 'tplid') or self.get_argument('_binux_tplid', None) @@ -116,10 +118,11 @@ async def post(self, tplid): if not fetch_tpl: try: fetch_tpl = json.loads(self.get_argument('tpl')) - except: + except Exception as e: + logger_web_handler.debug("parse json error: %s", e, exc_info=config.traceback_print) if not user: return await self.render('tpl_run_failed.html', log="请先登录!") - raise HTTPError(400) + raise HTTPError(400) from e env = data.get('env') if not env: @@ -128,8 +131,9 @@ async def post(self, tplid): variables=json.loads(self.get_argument('env')), session=[] ) - except: - raise HTTPError(400) + except Exception as e: + logger_web_handler.debug("parse json error: %s", e, exc_info=config.traceback_print) + raise HTTPError(400) from e try: url = parse_url(env['variables'].get('_binux_proxy')) @@ -147,12 +151,9 @@ async def post(self, tplid): else: result, _ = await self.fetcher.do_fetch(fetch_tpl, env, proxies=[]) except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('UserID:%d tplID:%d failed! \r\n%s', user.get( + 'id', -1) or -1, int(tplid or -1), str(e).replace('\\r\\n', '\r\n'), exc_info=config.traceback_print) await self.render('tpl_run_failed.html', log=str(e)) - if user: - logger_Web_Handler.error('UserID:%d tplID:%d failed! \r\n%s', user.get( - 'id', -1) or -1, int(tplid or -1), str(e).replace('\\r\\n', '\r\n')) return if tpl: @@ -170,37 +171,36 @@ async def get(self): class TPLGroupHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, tplid): user = self.current_user - groupNow = (await self.db.tpl.get(tplid, fields=('_groups',)))['_groups'] - tasks = [] + group_now = (await self.db.tpl.get(tplid, fields=('_groups',)))['_groups'] + # tasks = [] _groups = [] tpls = await self.db.tpl.list(userid=user['id'], fields=('_groups',), limit=None) for tpl in tpls: temp = tpl['_groups'] - if (temp not in _groups): + if temp not in _groups: _groups.append(temp) - await self.render('tpl_setgroup.html', tplid=tplid, _groups=_groups, groupNow=groupNow) + await self.render('tpl_setgroup.html', tplid=tplid, _groups=_groups, groupNow=group_now) - @tornado.web.authenticated + @authenticated async def post(self, tplid): envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) - New_group = envs['New_group'][0].strip() + new_group = envs['New_group'][0].strip() - if New_group != "": - target_group = New_group + if new_group != "": + target_group = new_group else: - for value in envs: - if envs[value][0] == 'on': + for key, value in envs.items(): + if value[0] == 'on': target_group = escape_decode( - value.strip()[2:-1], "hex-escape")[0].decode('utf-8') + key.strip()[2:-1], "hex-escape")[0].decode('utf-8') break - else: - target_group = 'None' + target_group = 'None' await self.db.tpl.mod(tplid, _groups=target_group) @@ -208,10 +208,10 @@ async def post(self, tplid): handlers = [ - ('/tpl/(\d+)/push', TPLPushHandler), - ('/tpl/(\d+)/var', TPLVarHandler), - ('/tpl/(\d+)/del', TPLDelHandler), - ('/tpl/?(\d+)?/run', TPLRunHandler), - ('/tpls/public', PublicTPLHandler), - ('/tpl/(\d+)/group', TPLGroupHandler), + (r'/tpl/(\d+)/push', TPLPushHandler), + (r'/tpl/(\d+)/var', TPLVarHandler), + (r'/tpl/(\d+)/del', TPLDelHandler), + (r'/tpl/?(\d+)?/run', TPLRunHandler), + (r'/tpls/public', PublicTPLHandler), + (r'/tpl/(\d+)/group', TPLGroupHandler), ] diff --git a/web/handlers/user.py b/web/handlers/user.py index afe170edb11..4b22ba5e837 100644 --- a/web/handlers/user.py +++ b/web/handlers/user.py @@ -4,53 +4,49 @@ # Author: Binux # http://binux.me # Created on 2014-08-09 11:39:25 +# pylint: disable=broad-exception-raised import base64 import datetime import json -import time - -from tornado import gen, iostream - -try: - import aiofiles - aio_import = True -except: - aio_import = False import os import re import sqlite3 +import time import traceback from Crypto.Hash import MD5 +from tornado import gen, iostream +from tornado.web import authenticated import config -from backup import DBnew from libs import mcrypto as crypto -from libs.funcs import pusher +from libs.funcs import Pusher +from web.handlers.base import BaseHandler, logger_web_handler -from .base import * +try: + import aiofiles + AIO_IMPORT = True +except ImportError: + AIO_IMPORT = False def tostr(s): - if isinstance(s, bytes): + if isinstance(s, (bytes, bytearray)): try: return s.decode() - except : - return s - if isinstance(s, bytearray): - try: - return s.decode() - except : + except Exception as e: + logger_web_handler.debug('decode error: %s', e, exc_info=config.traceback_print) return s return s + class UserRegPush(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): await self.render('user_register_pusher.html', userid=userid) - @tornado.web.authenticated + @authenticated async def post(self, userid): envs = {} for key in self.request.body_arguments: @@ -64,172 +60,169 @@ async def post(self, userid): dingding_token = env["dingding_token"] qywx_webhook = env["qywx_webhook"] log = "" - if ("reg" == self.get_body_argument('func')): + if "reg" == self.get_body_argument('func'): try: async with self.db.transaction() as sql_session: - if (barkurl != ""): - if (barkurl[-1] != '/'): - barkurl=barkurl+'/' - await self.db.user.mod(userid, barkurl = barkurl,sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('barkurl',), sql_session=sql_session))["barkurl"] == barkurl): - log = u"注册 Bark 成功\r\n" + if barkurl != "": + if barkurl[-1] != '/': + barkurl = barkurl + '/' + await self.db.user.mod(userid, barkurl=barkurl, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('barkurl',), sql_session=sql_session))["barkurl"] == barkurl: + log = "注册 Bark 成功\r\n" else: - log = u"注册 Bark 失败\r\n" + log = "注册 Bark 失败\r\n" else: - log = u"BarkUrl 未填写完整\r\n" + log = "BarkUrl 未填写完整\r\n" - if (skey != ""): - await self.db.user.mod(userid, skey = skey, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('skey',), sql_session=sql_session))["skey"] == skey): - log = log+u"注册 S酱 成功\r\n" + if skey != "": + await self.db.user.mod(userid, skey=skey, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('skey',), sql_session=sql_session))["skey"] == skey: + log = log + "注册 S酱 成功\r\n" else: - log = log+u"注册 S酱 失败\r\n" + log = log + "注册 S酱 失败\r\n" else: - log = log+u"Sendkey 未填写完整\r\n" + log = log + "Sendkey 未填写完整\r\n" - if (wxpusher_token != ""): - await self.db.user.mod(userid, wxpusher = wxpusher_token, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('wxpusher',), sql_session=sql_session))["wxpusher"] == wxpusher_token): - log = log+u"注册 WxPusher 成功\r\n" + if wxpusher_token != "": + await self.db.user.mod(userid, wxpusher=wxpusher_token, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('wxpusher',), sql_session=sql_session))["wxpusher"] == wxpusher_token: + log = log + "注册 WxPusher 成功\r\n" else: - log = log+u"注册 WxPusher 失败\r\n" + log = log + "注册 WxPusher 失败\r\n" else: - log = log+u"WxPusher 未填写完整\r\n" + log = log + "WxPusher 未填写完整\r\n" - if (qywx_token != ""): - await self.db.user.mod(userid, qywx_token = qywx_token, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('qywx_token',), sql_session=sql_session))["qywx_token"] == qywx_token): - log = log+u"注册 企业微信 Pusher 成功\r\n" + if qywx_token != "": + await self.db.user.mod(userid, qywx_token=qywx_token, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('qywx_token',), sql_session=sql_session))["qywx_token"] == qywx_token: + log = log + "注册 企业微信 Pusher 成功\r\n" else: - log = log+u"注册 企业微信 Pusher 失败\r\n" + log = log + "注册 企业微信 Pusher 失败\r\n" else: - log = log+u"企业微信 未填写完整\r\n" + log = log + "企业微信 未填写完整\r\n" - if (tg_token != ""): - await self.db.user.mod(userid, tg_token = tg_token, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('tg_token',), sql_session=sql_session))["tg_token"] == tg_token): - log = log+u"注册 Tg Bot 成功\r\n" + if tg_token != "": + await self.db.user.mod(userid, tg_token=tg_token, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('tg_token',), sql_session=sql_session))["tg_token"] == tg_token: + log = log + "注册 Tg Bot 成功\r\n" else: - log = log+u"注册 Tg Bot 失败\r\n" + log = log + "注册 Tg Bot 失败\r\n" else: - log = log+u"Tg Bot 未填写完整\r\n" + log = log + "Tg Bot 未填写完整\r\n" - if (dingding_token != ""): - await self.db.user.mod(userid, dingding_token = dingding_token, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('dingding_token',), sql_session=sql_session))["dingding_token"] == dingding_token): - log = log+u"注册 DingDing Bot 成功\r\n" + if dingding_token != "": + await self.db.user.mod(userid, dingding_token=dingding_token, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('dingding_token',), sql_session=sql_session))["dingding_token"] == dingding_token: + log = log + "注册 DingDing Bot 成功\r\n" else: - log = log+u"注册 DingDing Bot 失败\r\n" + log = log + "注册 DingDing Bot 失败\r\n" else: - log = log+u"DingDing Bot 未填写完整\r\n" + log = log + "DingDing Bot 未填写完整\r\n" - if (qywx_webhook != ""): - await self.db.user.mod(userid, qywx_webhook = qywx_webhook, sql_session=sql_session) - if ((await self.db.user.get(userid, fields=('qywx_webhook',), sql_session=sql_session))["qywx_webhook"] == qywx_webhook): - log = log+u"注册 企业微信 Webhook 成功\r\n" + if qywx_webhook != "": + await self.db.user.mod(userid, qywx_webhook=qywx_webhook, sql_session=sql_session) + if (await self.db.user.get(userid, fields=('qywx_webhook',), sql_session=sql_session))["qywx_webhook"] == qywx_webhook: + log = log + "注册 企业微信 Webhook 成功\r\n" else: - log = log+u"注册 企业微信 Webhook 失败\r\n" + log = log + "注册 企业微信 Webhook 失败\r\n" else: - log = log+u"企业微信 Webhook 未填写完整\r\n" + log = log + "企业微信 Webhook 未填写完整\r\n" except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('UserID: %s register Pusher_info failed! Reason: %s', userid or '-1', str(e), exc_info=config.traceback_print) await self.render('tpl_run_failed.html', log=str(e)) - logger_Web_Handler.error('UserID: %s register Pusher_info failed! Reason: %s', userid or '-1', str(e)) return - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return else: try: - f = pusher(self.db) + f = Pusher(self.db) t = datetime.datetime.now().strftime('%y-%m-%d %H:%M:%S') - if (barkurl != ""): - r = await f.send2bark(barkurl, u"正在测试Bark", u"{t} 发送测试".format(t=t)) + if barkurl != "": + r = await f.send2bark(barkurl, "正在测试Bark", f"{t} 发送测试") if r == 'True': - log = u"Bark 已推送, 请检查是否收到\r\n" + log = "Bark 已推送, 请检查是否收到\r\n" else: - log = u"Bark 推送失败, 失败原因: {}\r\n".format(r) + log = "Bark 推送失败, 失败原因: {r}\r\n" else: - log = u"BarkUrl 未填写完整\r\n" + log = "BarkUrl 未填写完整\r\n" - if (skey != ""): - r = await f.send2s(skey, u"正在测试S酱", u"{t} 发送测试".format(t=t)) + if skey != "": + r = await f.send2s(skey, "正在测试S酱", f"{t} 发送测试") if r == 'True': - log = log+u"S酱 已推送, 请检查是否收到\r\n" + log = log + "S酱 已推送, 请检查是否收到\r\n" else: - log = log+u"S酱 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"S酱 推送失败, 失败原因: {r}\r\n" else: - log = log+u"Sendkey 未填写完整\r\n" + log = log + "Sendkey 未填写完整\r\n" - if (wxpusher_token != ""): - r = await f.send2wxpusher("{0}".format(wxpusher_token),u"{t} 发送测试".format(t=t)) + if wxpusher_token != "": + r = await f.send2wxpusher(str(wxpusher_token), f"{t} 发送测试") if r == 'True': - log = log+u"WxPusher 已推送, 请检查是否收到\r\n" + log = log + "WxPusher 已推送, 请检查是否收到\r\n" else: - log = log+u"WxPusher 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"WxPusher 推送失败, 失败原因: {r}\r\n" else: - log = log+u"WxPusher 未填写完整\r\n" + log = log + "WxPusher 未填写完整\r\n" - if (qywx_token != ""): - r = await f.qywx_pusher_send(qywx_token, "正在测试企业微信 Pusher", u"{t} 发送测试".format(t=t)) + if qywx_token != "": + r = await f.qywx_pusher_send(qywx_token, "正在测试企业微信 Pusher", f"{t} 发送测试") if r == 'True': - log = log+u"企业微信 Pusher 已推送, 请检查是否收到\r\n" + log = log + "企业微信 Pusher 已推送, 请检查是否收到\r\n" else: - log = log+u"企业微信 Pusher 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"企业微信 Pusher 推送失败, 失败原因: {r}\r\n" else: - log = log+u"企业微信 未填写完整\r\n" + log = log + "企业微信 未填写完整\r\n" - if (tg_token != ""): - r = await f.send2tg(tg_token, "正在测试Tg Bot", u"{t} 发送测试".format(t=t)) + if tg_token != "": + r = await f.send2tg(tg_token, "正在测试Tg Bot", f"{t} 发送测试") if r == 'True': - log = log+u"Tg Bot 已推送, 请检查是否收到\r\n" + log = log + "Tg Bot 已推送, 请检查是否收到\r\n" else: - log = log+u"Tg Bot 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"Tg Bot 推送失败, 失败原因: {r}\r\n" else: - log = log+u"Tg Bot 未填写完整\r\n" + log = log + "Tg Bot 未填写完整\r\n" - if (dingding_token != ""): - r = await f.send2dingding(dingding_token, "正在测试DingDing Bot", u"{t} 发送测试".format(t=t)) + if dingding_token != "": + r = await f.send2dingding(dingding_token, "正在测试DingDing Bot", f"{t} 发送测试") if r == 'True': - log = log+u"DingDing Bot 已推送, 请检查是否收到\r\n" + log = log + "DingDing Bot 已推送, 请检查是否收到\r\n" else: - log = log+u"DingDing Bot 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"DingDing Bot 推送失败, 失败原因: {r}\r\n" else: - log = log+u"DingDing Bot 未填写完整\r\n" + log = log + "DingDing Bot 未填写完整\r\n" - if (qywx_webhook != ""): - r = await f.qywx_webhook_send(qywx_webhook, "正在测试企业微信 Webhook", u"{t} 发送测试".format(t=t)) + if qywx_webhook != "": + r = await f.qywx_webhook_send(qywx_webhook, "正在测试企业微信 Webhook", f"{t} 发送测试") if r == 'True': - log = log+u"企业微信 Webhook 已推送, 请检查是否收到\r\n" + log = log + "企业微信 Webhook 已推送, 请检查是否收到\r\n" else: - log = log+u"企业微信 Webhook 推送失败, 失败原因: {}\r\n".format(r) + log = log + f"企业微信 Webhook 推送失败, 失败原因: {r}\r\n" except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('UserID: %s test Pusher_info failed! Reason: %s', userid or '-1', str(e), exc_info=config.traceback_print) await self.render('tpl_run_failed.html', log=str(e)) - logger_Web_Handler.error('UserID: %s test Pusher_info failed! Reason: %s', userid or '-1', str(e)) return - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return + class UserRegPushSw(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): tasks = [] for task in await self.db.task.list(userid, fields=('id', 'tplid', 'note', 'disabled', 'ctime', 'pushsw'), limit=None): - tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note') ) + tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note')) task['tpl'] = tpl task['pushsw'] = json.loads(task['pushsw']) tasks.append(task) - temp = await self.db.user.get(userid, fields=('noticeflg','push_batch')) + temp = await self.db.user.get(userid, fields=('noticeflg', 'push_batch')) push_batch = json.loads(temp['push_batch']) - push_batch['time']=time.strftime("%H:%M:%S",time.localtime(int(push_batch['time']))) + push_batch['time'] = time.strftime("%H:%M:%S", time.localtime(int(push_batch['time']))) temp = temp['noticeflg'] flg = {} flg['handpush_succ'] = False if ((temp & 0x008) == 0) else True @@ -237,44 +230,47 @@ async def get(self, userid): flg['autopush_succ'] = False if ((temp & 0x002) == 0) else True flg['autopush_fail'] = False if ((temp & 0x001) == 0) else True - flg['barksw'] = False if ((temp & 0x040) == 0) else True - flg['schansw'] = False if ((temp & 0x020) == 0) else True - flg['wxpushersw'] = False if ((temp & 0x010) == 0) else True - flg['mailpushersw'] = False if ((temp & 0x080) == 0) else True - flg['cuspushersw'] = False if ((temp & 0x100) == 0) else True - flg['qywxpushersw'] = False if ((temp & 0x200) == 0) else True - flg['tgpushersw'] = False if ((temp & 0x400) == 0) else True + flg['barksw'] = False if ((temp & 0x040) == 0) else True + flg['schansw'] = False if ((temp & 0x020) == 0) else True + flg['wxpushersw'] = False if ((temp & 0x010) == 0) else True + flg['mailpushersw'] = False if ((temp & 0x080) == 0) else True + flg['cuspushersw'] = False if ((temp & 0x100) == 0) else True + flg['qywxpushersw'] = False if ((temp & 0x200) == 0) else True + flg['tgpushersw'] = False if ((temp & 0x400) == 0) else True flg['dingdingpushersw'] = False if ((temp & 0x800) == 0) else True flg['qywxwebhooksw'] = False if ((temp & 0x1000) == 0) else True logtime = json.loads((await self.db.user.get(userid, fields=('logtime',)))['logtime']) - if 'schanEN' not in logtime:logtime['schanEN'] = False - if 'WXPEn' not in logtime:logtime['WXPEn'] = False - if 'ErrTolerateCnt' not in logtime:logtime['ErrTolerateCnt'] = 0 - + if 'schanEN' not in logtime: + logtime['schanEN'] = False + if 'WXPEn' not in logtime: + logtime['WXPEn'] = False + if 'ErrTolerateCnt' not in logtime: + logtime['ErrTolerateCnt'] = 0 await self.render('user_register_pushsw.html', userid=userid, flg=flg, tasks=tasks, logtime=logtime, push_batch=push_batch) - @tornado.web.authenticated + @authenticated async def post(self, userid): try: async with self.db.transaction() as sql_session: tasks = [] for task in await self.db.task.list(userid, fields=('id', 'tplid', 'note', 'disabled', 'ctime', 'pushsw'), limit=None, sql_session=sql_session): - tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note'), sql_session=sql_session ) + tpl = await self.db.tpl.get(task['tplid'], fields=('id', 'userid', 'sitename', 'siteurl', 'banner', 'note'), sql_session=sql_session) task['tpl'] = tpl task['pushsw'] = json.loads(task['pushsw']) task['pushsw']["logen"] = False task['pushsw']["pushen"] = False tasks.append(task) - temp = await self.db.user.get(userid, fields=('noticeflg','push_batch'), sql_session=sql_session) + temp = await self.db.user.get(userid, fields=('noticeflg', 'push_batch'), sql_session=sql_session) envs = {} for key in self.request.body_arguments: envs[key] = self.get_body_arguments(key) env = json.loads(envs['env'][0]) logtime = json.loads((await self.db.user.get(userid, fields=('logtime',), sql_session=sql_session))['logtime']) - if 'ErrTolerateCnt' not in logtime:logtime['ErrTolerateCnt'] = 0 - if (logtime['ErrTolerateCnt'] != int(env['ErrTolerateCnt'])): + if 'ErrTolerateCnt' not in logtime: + logtime['ErrTolerateCnt'] = 0 + if logtime['ErrTolerateCnt'] != int(env['ErrTolerateCnt']): logtime['ErrTolerateCnt'] = int(env['ErrTolerateCnt']) await self.db.user.mod(userid, logtime=json.dumps(logtime), sql_session=sql_session) @@ -284,22 +280,22 @@ async def post(self, userid): else: push_batch["sw"] = False if env.get("push_batch_value"): - push_batch["time"] = time.mktime(time.strptime(time.strftime("%Y-%m-%d",time.localtime(time.time()))+env["push_batch_value"],"%Y-%m-%d%H:%M:%S")) + push_batch["time"] = time.mktime(time.strptime(time.strftime("%Y-%m-%d", time.localtime(time.time())) + env["push_batch_value"], "%Y-%m-%d%H:%M:%S")) if env.get("push_batch_delta"): push_batch["delta"] = int(env["push_batch_delta"]) else: push_batch["delta"] = 86400 await self.db.user.mod(userid, push_batch=json.dumps(push_batch), sql_session=sql_session) - barksw_flg = 1 if ("barksw" in env) else 0 - schansw_flg = 1 if ("schansw" in env) else 0 - wxpushersw_flg = 1 if ("wxpushersw" in env) else 0 - mailpushersw_flg = 1 if ("mailpushersw" in env) else 0 - cuspushersw_flg = 1 if ("cuspushersw" in env) else 0 - qywxpushersw_flg = 1 if ("qywxpushersw" in env) else 0 - tgpushersw_flg = 1 if ("tgpushersw" in env) else 0 - dingdingpushersw_flg = 1 if ("dingdingpushersw" in env) else 0 - qywxwebhooksw_flg = 1 if ("qywxwebhooksw" in env) else 0 + barksw_flg = 1 if ("barksw" in env) else 0 + schansw_flg = 1 if ("schansw" in env) else 0 + wxpushersw_flg = 1 if ("wxpushersw" in env) else 0 + mailpushersw_flg = 1 if ("mailpushersw" in env) else 0 + cuspushersw_flg = 1 if ("cuspushersw" in env) else 0 + qywxpushersw_flg = 1 if ("qywxpushersw" in env) else 0 + tgpushersw_flg = 1 if ("tgpushersw" in env) else 0 + dingdingpushersw_flg = 1 if ("dingdingpushersw" in env) else 0 + qywxwebhooksw_flg = 1 if ("qywxwebhooksw" in env) else 0 handpush_succ_flg = 1 if ("handpush_succ" in env) else 0 handpush_fail_flg = 1 if ("handpush_fail" in env) else 0 autopush_succ_flg = 1 if ("autopush_succ" in env) else 0 @@ -324,7 +320,7 @@ async def post(self, userid): if len(temp) > 0: taskid = int(temp[0]) for task in tasks: - if (taskid == task["id"]): + if taskid == task["id"]: task['pushsw']["pushen"] = True await self.db.user.mod(userid, noticeflg=flg, sql_session=sql_session) @@ -332,16 +328,15 @@ async def post(self, userid): await self.db.task.mod(task["id"], pushsw=json.dumps(task['pushsw']), sql_session=sql_session) except Exception as e: - if config.traceback_print: - traceback.print_exc() + logger_web_handler.error('UserID: %s modify Push_settings failed! Reason: %s', userid or '-1', str(e), exc_info=config.traceback_print) await self.render('tpl_run_failed.html', log=str(e)) - logger_Web_Handler.error('UserID: %s modify Push_settings failed! Reason: %s', userid or '-1', str(e)) return - await self.render('utils_run_result.html', log=u"设置完成", title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log="设置完成", title='设置成功', flg='success') return + class UserManagerHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): flg = self.get_argument("flg", '') title = self.get_argument("title", '') @@ -352,35 +347,35 @@ async def get(self, userid): if user and user['role'] == "admin": adminflg = True users = [] - for user in await self.db.user.list(fields=('id','status', 'role', 'ctime', 'email', 'atime', 'email_verified', 'aip')): - if (user['email_verified'] == 0): + for user in await self.db.user.list(fields=('id', 'status', 'role', 'ctime', 'email', 'atime', 'email_verified', 'aip')): + if user['email_verified'] == 0: user['email_verified'] = False else: user['email_verified'] = True users.append(user) - await self.render("user_manage.html", users=users, userid=userid, adminflg=adminflg, flg=flg, title=title,log=log) + await self.render("user_manage.html", users=users, userid=userid, adminflg=adminflg, flg=flg, title=title, log=log) return - @tornado.web.authenticated + @authenticated async def post(self, userid): try: async with self.db.transaction() as sql_session: user = await self.db.user.get(userid, fields=('role',), sql_session=sql_session) if user and user['role'] == "admin": envs = {} - for k, _ in self.request.body_arguments.items(): + for k, _ in self.request.body_arguments.items(): envs[k] = self.get_body_argument(k) mail = envs['adminmail'] pwd = envs['adminpwd'] - if await self.db.user.challenge_MD5(mail, pwd, sql_session=sql_session): - Target_users = [] + if await self.db.user.challenge_md5(mail, pwd, sql_session=sql_session): + target_users = [] for key, value in envs.items(): if value == "on": - Target_users.append(key) + target_users.append(key) - for sub_user in Target_users: - if (await self.db.user.get(sub_user, fields=('role',), sql_session=sql_session) != 'admin'): + for sub_user in target_users: + if await self.db.user.get(sub_user, fields=('role',), sql_session=sql_session) != 'admin': if 'banbtn' in envs: await self.db.user.mod(sub_user, status='Disable', sql_session=sql_session) for task in await self.db.task.list(sub_user, fields=('id',), limit=None, sql_session=sql_session): @@ -397,7 +392,7 @@ async def post(self, userid): if 'delbtn' in envs: for task in await self.db.task.list(sub_user, fields=('id',), limit=None, sql_session=sql_session): await self.db.task.delete(task['id'], sql_session=sql_session) - logs = await self.db.tasklog.list(taskid = task['id'], fields=('id',), sql_session=sql_session) + logs = await self.db.tasklog.list(taskid=task['id'], fields=('id',), sql_session=sql_session) for log in logs: await self.db.tasklog.delete(log['id'], sql_session=sql_session) @@ -405,25 +400,26 @@ async def post(self, userid): if tpl['userid'] == int(sub_user): await self.db.tpl.delete(tpl['id'], sql_session=sql_session) - for notepad in await self.db.notepad.list(fields=('userid','notepadid'), limit=None, userid=sub_user, sql_session=sql_session): + for notepad in await self.db.notepad.list(fields=('userid', 'notepadid'), limit=None, userid=sub_user, sql_session=sql_session): await self.db.notepad.delete(sub_user, notepad['notepadid'], sql_session=sql_session) await self.db.user.delete(sub_user, sql_session=sql_session) else: - raise Exception(u"账号/密码错误") + raise Exception("账号/密码错误") else: - raise Exception(u"非管理员,不可操作") + raise Exception("非管理员,不可操作") except Exception as e: - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s manage User failed! Reason: %s', userid or '-1', str(e)) + logger_web_handler.error('UserID: %s manage User failed! Reason: %s', userid or '-1', str(e), exc_info=config.traceback_print) return await self.render('utils_run_result.html', title='操作成功', flg='success') return + class UserDBHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid): adminflg = False user = await self.db.user.get(userid, fields=('role',)) @@ -432,58 +428,62 @@ async def get(self, userid): await self.render("DB_manage.html", userid=userid, adminflg=adminflg) return - @tornado.web.authenticated + @authenticated async def post(self, userid): + def backup_progress(status, remaining, total): + logger_web_handler.info('Sqlite_Backup:(%s) Copied %s of %s pages...', status, total - remaining, total) + + def restore_progress(status, remaining, total): + logger_web_handler.info('Sqlite_Restore:(%s) Copied %s of %s pages...', status, total - remaining, total) try: async with self.db.transaction() as sql_session: user = await self.db.user.get(userid, fields=('role', 'email'), sql_session=sql_session) envs = {} - for k, _ in self.request.body_arguments.items(): + for k, _ in self.request.body_arguments.items(): envs[k] = self.get_body_argument(k) mail = envs['adminmail'] pwd = envs['adminpwd'] - now=datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') - if user and await self.db.user.challenge_MD5(mail, pwd, sql_session=sql_session) and (user['email'] == mail): - if ('backupbtn' in envs): + if user and await self.db.user.challenge_md5(mail, pwd, sql_session=sql_session) and (user['email'] == mail): + if 'backupbtn' in envs: if user['role'] == "admin": if config.db_type != "sqlite3": - raise Exception(u"抱歉,暂不支持通过本页面备份MySQL数据!ヘ(;´Д`ヘ)") + raise Exception("抱歉,暂不支持通过本页面备份MySQL数据!ヘ(;´Д`ヘ)") filename = config.sqlite3.path - savename = "database_{now}.db".format(now=now) - if not aio_import: - raise Exception(u"更新容器后请先重启容器!") + savename = f"database_{now}.db" + if not AIO_IMPORT: + raise Exception("更新容器后请先重启容器!") conn_src = sqlite3.connect(filename, check_same_thread=False) conn_target = sqlite3.connect(savename, check_same_thread=False) - def progress(status, remaining, total): - logger_Web_Handler.info(f'Sqlite_Backup: Copied {total-remaining} of {total} pages...') - conn_src.backup(conn_target, progress=progress) + + conn_src.backup(conn_target, progress=backup_progress) conn_target.commit() conn_src.close() conn_target.close() try: - self.set_header ('Content-Type', 'application/octet-stream; charset=UTF-8') - self.set_header ('Content-Disposition', ('attachment; filename='+savename).encode('utf-8')) + self.set_header('Content-Type', 'application/octet-stream; charset=UTF-8') + self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8')) content_length = os.stat(savename).st_size self.set_header("Content-Length", content_length) async with aiofiles.open(savename, 'rb') as f: - self.set_header ('Content-Type', 'application/octet-stream') - self.set_header ('Content-Disposition', ('attachment; filename='+savename).encode('utf-8')) + self.set_header('Content-Type', 'application/octet-stream') + self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8')) - chunk_size = 1024*1024*1 # 1MB + chunk_size = 1024 * 1024 * 1 # 1MB while True: chunk = await f.read(chunk_size) if not chunk: break try: - self.write(chunk) # write the chunk to response - await self.flush() # send the chunk to client - except iostream.StreamClosedError: + self.write(chunk) # write the chunk to response + await self.flush() # send the chunk to client + except iostream.StreamClosedError as e: # this means the client has closed the connection # so break the loop - raise Exception("Stream closed") + raise Exception("Stream closed") from e finally: # deleting the chunk is very important because # if many clients are downloading files at the @@ -495,17 +495,17 @@ def progress(status, remaining, total): await gen.sleep(3) os.remove(savename) else: - raise Exception(u"管理员才能备份数据库") + raise Exception("管理员才能备份数据库") - if ('backuptplsbtn' in envs): + if 'backuptplsbtn' in envs: tpls = [] - for tpl in await self.db.tpl.list(userid=userid, fields=('id', 'siteurl', 'sitename', 'banner', 'note','fork', '_groups', 'har', 'tpl', 'variables','init_env'), limit=None, sql_session=sql_session): + for tpl in await self.db.tpl.list(userid=userid, fields=('id', 'siteurl', 'sitename', 'banner', 'note', 'fork', '_groups', 'har', 'tpl', 'variables', 'init_env'), limit=None, sql_session=sql_session): tpl['tpl'] = await self.db.user.decrypt(userid, tpl['tpl'], sql_session=sql_session) tpl['har'] = await self.db.user.decrypt(userid, tpl['har'], sql_session=sql_session) tpls.append(tpl) tasks = [] - for task in await self.db.task.list(userid, fields=('id', 'tplid', 'retry_count', 'retry_interval','note', 'disabled', '_groups', 'init_env', 'env', 'ontimeflg', 'ontime', 'pushsw', 'newontime'), limit=None, sql_session=sql_session): + for task in await self.db.task.list(userid, fields=('id', 'tplid', 'retry_count', 'retry_interval', 'note', 'disabled', '_groups', 'init_env', 'env', 'ontimeflg', 'ontime', 'pushsw', 'newontime'), limit=None, sql_session=sql_session): task['init_env'] = await self.db.user.decrypt(userid, task['init_env'], sql_session=sql_session) task['env'] = await self.db.user.decrypt(userid, task['env'], sql_session=sql_session) if task['env'] else None tasks.append(task) @@ -513,16 +513,16 @@ def progress(status, remaining, total): backupdata = {} backupdata['tpls'] = tpls backupdata['tasks'] = tasks - savename = "{mail}_{now}.json".format(mail = user['email'], now=now) - if not aio_import: - raise Exception(u"更新容器后请先重启容器!") + savename = f"{user['email']}_{now}.json" + if not AIO_IMPORT: + raise Exception("更新容器后请先重启容器!") async with aiofiles.open(savename, 'w', encoding='utf-8') as fp: - await fp.write(json.dumps(backupdata, ensure_ascii=False, indent=4 )) + await fp.write(json.dumps(backupdata, ensure_ascii=False, indent=4)) fp.close() - self.set_header ('Content-Type', 'application/octet-stream; charset=UTF-8') - self.set_header ('Content-Disposition', ('attachment; filename='+savename).encode('utf-8')) + self.set_header('Content-Type', 'application/octet-stream; charset=UTF-8') + self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8')) async with aiofiles.open(savename, 'rb') as f: - chunk_size = 1024*1024*1 # 1MB + chunk_size = 1024 * 1024 * 1 # 1MB while True: data = await f.read(chunk_size) if not data: @@ -533,12 +533,12 @@ def progress(status, remaining, total): await self.finish() return - if ('recoverytplsbtn' in envs): - if ('recfile' in self.request.files): + if 'recoverytplsbtn' in envs: + if 'recfile' in self.request.files: envs['recfile'] = self.request.files['recfile'][0]['body'] if envs['recfile'][:6] == b'SQLite': if user['role'] != "admin": - raise Exception(u"管理员才能操作数据库") + raise Exception("管理员才能操作数据库") db_dir = os.path.dirname(config.sqlite3.path) db_restore = os.path.join(db_dir, 'database_restore.db') with open(db_restore, 'wb') as f: @@ -548,9 +548,8 @@ def progress(status, remaining, total): # 先备份 database.db 到 database_backup.db conn_src = sqlite3.connect(db_now, check_same_thread=False) conn_target = sqlite3.connect(db_backup, check_same_thread=False) - def progress(status, remaining, total): - logger_Web_Handler.info(f'Sqlite_Backup: Copied {total-remaining} of {total} pages...') - conn_src.backup(conn_target,progress=progress) + + conn_src.backup(conn_target, progress=backup_progress) conn_target.commit() conn_src.close() conn_target.close() @@ -558,23 +557,22 @@ def progress(status, remaining, total): # 再还原 database_restore.db 到 database.db conn_src = sqlite3.connect(db_restore, check_same_thread=False) conn_target = sqlite3.connect(db_now, check_same_thread=False) - def progress(status, remaining, total): - logger_Web_Handler.info(f'Sqlite_Restore: Copied {total-remaining} of {total} pages...') - conn_src.backup(conn_target,progress=progress) + + conn_src.backup(conn_target, progress=restore_progress) conn_target.commit() conn_src.close() conn_target.close() - await self.render('utils_run_result.html', log=u"恢复完成, 请务必重启QD程序或容器!!!\r\nPS: 原始 database.db 文件已备份为 database_backup.db 文件!!!\r\n如还原失败, 请手动恢复 database_backup.db 文件!!!", title=u'设置成功', flg='success') - # raise Exception(u"抱歉,暂不支持通过本页面还原SQLite3数据库文件!(╥╯^╰╥)") + await self.render('utils_run_result.html', log="恢复完成, 请务必重启QD程序或容器!!!\r\nPS: 原始 database.db 文件已备份为 database_backup.db 文件!!!\r\n如还原失败, 请手动恢复 database_backup.db 文件!!!", title='设置成功', flg='success') + # raise Exception("抱歉,暂不支持通过本页面还原SQLite3数据库文件!(╥╯^╰╥)") return else: try: tpls = json.loads(envs['recfile'])['tpls'] tasks = json.loads(envs['recfile'])['tasks'] - except: - raise Exception(u"抱歉,暂不支持通过本页面还原该备份文件!(ノ ̄▽ ̄) \\r\\n \ - 请确认该文件来自于该页面\"备份\"按钮 (๑*◡*๑)。") - ids = [] + except Exception as e: + raise Exception("抱歉,暂不支持通过本页面还原该备份文件!(ノ ̄▽ ̄) \\r\\n \ + 请确认该文件来自于该页面\"备份\"按钮 (๑*◡*๑)。") from e + # ids = [] for newtpl in tpls: userid2 = int(userid) har = await self.db.user.encrypt(userid2, newtpl['har'], sql_session=sql_session) @@ -582,177 +580,170 @@ def progress(status, remaining, total): variables = newtpl['variables'] init_env = newtpl.get('init_env', "{}") newid = await self.db.tpl.add(userid2, har, tpl, variables, init_env=init_env, sql_session=sql_session) - await self.db.tpl.mod(newid, fork = newtpl['fork'], - siteurl = newtpl['siteurl'], - sitename = newtpl['sitename'], - note = newtpl['note'], - _groups = u'备份还原', - banner = newtpl['banner'], - sql_session=sql_session - ) + await self.db.tpl.mod(newid, fork=newtpl['fork'], + siteurl=newtpl['siteurl'], + sitename=newtpl['sitename'], + note=newtpl['note'], + _groups='备份还原', + banner=newtpl['banner'], + sql_session=sql_session + ) for task in tasks: - if (task['tplid'] == newtpl['id']): + if task['tplid'] == newtpl['id']: task['tplid'] = newid for newtask in tasks: userid2 = int(userid) newtask['init_env'] = await self.db.user.encrypt(userid2, newtask['init_env'], sql_session=sql_session) newtask['env'] = await self.db.user.encrypt(userid2, newtask['env'], sql_session=sql_session) - newtask['retry_count'] = newtask.get('retry_count',config.task_max_retry_count) + newtask['retry_count'] = newtask.get('retry_count', config.task_max_retry_count) newtask['retry_interval'] = newtask.get('retry_interval') taskid = await self.db.task.add(newtask['tplid'], userid, newtask['env'], sql_session=sql_session) - await self.db.task.mod(taskid, disabled = newtask['disabled'], - init_env = newtask['init_env'], - session = None, - retry_count = newtask['retry_count'], - retry_interval = newtask['retry_interval'], - note = newtask['note'], - _groups = u'备份还原', - ontimeflg = newtask['ontimeflg'], - ontime = newtask['ontime'], - pushsw = newtask['pushsw'], - newontime = newtask['newontime'], - sql_session=sql_session - ) - await self.render('utils_run_result.html', log=u"设置完成", title=u'设置成功', flg='success') + await self.db.task.mod(taskid, disabled=newtask['disabled'], + init_env=newtask['init_env'], + session=None, + retry_count=newtask['retry_count'], + retry_interval=newtask['retry_interval'], + note=newtask['note'], + _groups='备份还原', + ontimeflg=newtask['ontimeflg'], + ontime=newtask['ontime'], + pushsw=newtask['pushsw'], + newontime=newtask['newontime'], + sql_session=sql_session + ) + await self.render('utils_run_result.html', log="设置完成", title='设置成功', flg='success') return else: - raise Exception(u"请上传文件") + raise Exception("请上传文件") else: - raise Exception(u"账号/密码错误") + raise Exception("账号/密码错误") except Exception as e: if config.traceback_print: traceback.print_exc() - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' self.set_status(400) self.set_header('Error-Message', base64.b64encode(str(e).encode('utf-8'))) - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s backup or restore Database failed! Reason: %s', userid or '-1', str(e)) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') + logger_web_handler.error('UserID: %s backup or restore Database failed! Reason: %s', userid or '-1', str(e)) return return class UserPushShowPvar(BaseHandler): - @tornado.web.authenticated - async def post(self,userid): + @authenticated + async def post(self, userid): try: user = await self.db.user.get(userid, fields=('role', 'email')) envs = {} - for k, _ in self.request.body_arguments.items(): + for k, _ in self.request.body_arguments.items(): envs[k] = self.get_body_argument(k) mail = envs['adminmail'] pwd = envs['adminpwd'] - if await self.db.user.challenge_MD5(mail, pwd) and (user['email'] == mail): + if await self.db.user.challenge_md5(mail, pwd) and (user['email'] == mail): key = await self.db.user.get(userid, fields=("barkurl", 'skey', 'wxpusher', 'qywx_token', 'tg_token', 'dingding_token', 'qywx_webhook')) - log = u"""BarkUrl 前值:{bark}\r\nSendkey 前值:{skey}\r\nWxPusher 前值:{wxpusher}\r\n企业微信 Pusher 前值:{qywx_token}\r\nTg Bot 前值:{tg_token}\r\nDingDing Bot 前值:{dingding_token}\r\n企业微信 WebHook 前值: {qywx_webhook}""".format( - bark = key['barkurl'], - skey = key['skey'], - wxpusher = key['wxpusher'], - qywx_token = key['qywx_token'], - tg_token = key['tg_token'], - dingding_token = key['dingding_token'], - qywx_webhook = key['qywx_webhook']) - - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + log = f"""BarkUrl 前值:{key['barkurl']}\r\nSendkey 前值:{key['skey']}\r\nWxPusher 前值:{key['wxpusher']}\r\n企业微信 Pusher 前值:{key['qywx_token']}\r\nTg Bot 前值:{key['tg_token']}\r\nDingDing Bot 前值:{key['dingding_token']}\r\n企业微信 WebHook 前值: {key['qywx_webhook']}""" + + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return - else: - raise Exception(u"账号/密码错误") + raise Exception("账号/密码错误") except Exception as e: if config.traceback_print: traceback.print_exc() - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' await self.render('tpl_run_failed.html', log=str(e)) - logger_Web_Handler.error('UserID: %s show Push_settings failed! Reason: %s', userid or '-1', str(e)) + logger_web_handler.error('UserID: %s show Push_settings failed! Reason: %s', userid or '-1', str(e)) return -class custom_pusher_Handler(BaseHandler): - @tornado.web.authenticated - async def get(self,userid): + +class CustomPusherHandler(BaseHandler): + @authenticated + async def get(self, userid): diypusher = (await self.db.user.get(userid, fields=('diypusher',)))['diypusher'] - diypusher = json.loads(diypusher) if (diypusher != '') else {'mode':'GET'} + diypusher = json.loads(diypusher) if (diypusher != '') else {'mode': 'GET'} await self.render('user_register_cus_pusher.html', userid=userid, diypusher=diypusher) return - @tornado.web.authenticated - async def post(self,userid): + @authenticated + async def post(self, userid): try: envs = {} - for k, _ in self.request.body_arguments.items(): + for k, _ in self.request.body_arguments.items(): envs[k] = self.get_body_argument(k) - req = pusher(self.db) + req = Pusher(self.db) log = '' now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) - tmp = await req.cus_pusher_send(envs ,u'推送测试', now) - if ('True' == tmp): - if (envs['btn'] == 'regbtn'): + tmp = await req.cus_pusher_send(envs , '推送测试', now) + if 'True' == tmp: + if envs['btn'] == 'regbtn': await self.db.user.mod(userid, diypusher=json.dumps(envs)) else: raise Exception(tmp) - log = u'运行成功,请检查是否收到推送' + log = '运行成功,请检查是否收到推送' except Exception as e: - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s register or tes Cus_Pusher failed! Reason: %s', userid or '-1', str(e)) if config.traceback_print: traceback.print_exc() + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') + logger_web_handler.error('UserID: %s register or tes Cus_Pusher failed! Reason: %s', userid or '-1', str(e)) return - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return + class UserSetNewPWDHandler(BaseHandler): - @tornado.web.authenticated - async def get(self,userid): + @authenticated + async def get(self, userid): email = (await self.db.user.get(userid, fields=('email',)))['email'] await self.render('user_setnewpwd.html', userid=userid, usermail=email) return - @tornado.web.authenticated - async def post(self,userid): + @authenticated + async def post(self, userid): try: - log = u'设置成功' + log = '设置成功' envs = {} - for k, _ in self.request.body_arguments.items(): + for k, _ in self.request.body_arguments.items(): envs[k] = self.get_body_argument(k) async with self.db.transaction() as sql_session: adminuser = await self.db.user.get(email=envs['adminmail'], fields=('role', 'email'), sql_session=sql_session) - newPWD = envs['newpwd'] - if await self.db.user.challenge_MD5(envs['adminmail'], envs['adminpwd'], sql_session=sql_session) and (adminuser['role'] == 'admin'): - if (len(newPWD) >= 6): - await self.db.user.mod(userid, password=newPWD, sql_session=sql_session) - user = await self.db.user.get(userid, fields=('email','password','password_md5'), sql_session=sql_session) + new_pwd = envs['newpwd'] + if await self.db.user.challenge_md5(envs['adminmail'], envs['adminpwd'], sql_session=sql_session) and (adminuser['role'] == 'admin'): + if len(new_pwd) >= 6: + await self.db.user.mod(userid, password=new_pwd, sql_session=sql_session) + user = await self.db.user.get(userid, fields=('email', 'password', 'password_md5'), sql_session=sql_session) hash = MD5.new() - hash.update(newPWD.encode('utf-8')) + hash.update(new_pwd.encode('utf-8')) tmp = crypto.password_hash(hash.hexdigest(), await self.db.user.decrypt(userid, user['password'], sql_session=sql_session)) - if (user['password_md5'] != tmp): + if user['password_md5'] != tmp: await self.db.user.mod(userid, password_md5=tmp, sql_session=sql_session) - if not (await self.db.user.challenge(envs['usermail'], newPWD, sql_session=sql_session)): - raise Exception(u'修改失败') + if not await self.db.user.challenge(envs['usermail'], new_pwd, sql_session=sql_session): + raise Exception('修改失败') else: - raise Exception(u'密码长度要大于6位') + raise Exception('密码长度要大于6位') else: - raise Exception(u'管理员用户名/密码错误') + raise Exception('管理员用户名/密码错误') except Exception as e: - if config.traceback_print: - traceback.print_exc() - await self.render('utils_run_result.html', log=str(e), title=u'设置失败', flg='danger') - logger_Web_Handler.error('UserID: %s set New_Password failed! Reason: %s', userid or '-1', str(e)) + logger_web_handler.error('UserID: %s set New_Password failed! Reason: %s', userid or '-1', str(e), exc_info=config.traceback_print) + await self.render('utils_run_result.html', log=str(e), title='设置失败', flg='danger') return - await self.render('utils_run_result.html', log=log, title=u'设置成功', flg='success') + await self.render('utils_run_result.html', log=log, title='设置成功', flg='success') return + handlers = [ - ('/user/(\d+)/pushsw', UserRegPushSw), - ('/user/(\d+)/regpush', UserRegPush), - ('/user/(\d+)/UserPushShowPvar', UserPushShowPvar), - ('/user/(\d+)/manage', UserManagerHandler), - ('/user/(\d+)/database', UserDBHandler), - ('/util/custom/(\d+)/pusher', custom_pusher_Handler), - ('/user/(\d+)/setnewpwd', UserSetNewPWDHandler), - ] + (r'/user/(\d+)/pushsw', UserRegPushSw), + (r'/user/(\d+)/regpush', UserRegPush), + (r'/user/(\d+)/UserPushShowPvar', UserPushShowPvar), + (r'/user/(\d+)/manage', UserManagerHandler), + (r'/user/(\d+)/database', UserDBHandler), + (r'/util/custom/(\d+)/pusher', CustomPusherHandler), + (r'/user/(\d+)/setnewpwd', UserSetNewPWDHandler), +] diff --git a/web/handlers/util.py b/web/handlers/util.py index 24b094e3620..b5bce778a00 100644 --- a/web/handlers/util.py +++ b/web/handlers/util.py @@ -1,5 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +# pylint: disable=broad-exception-raised import base64 import datetime @@ -10,6 +11,7 @@ import time import traceback import urllib +from typing import Optional from zoneinfo import ZoneInfo import aiohttp @@ -17,20 +19,22 @@ from Crypto.Cipher import PKCS1_v1_5 from Crypto.PublicKey import RSA from tornado import gen +from tornado.web import HTTPError, authenticated +import config from config import delay_max_timeout, strtobool from libs.log import Log +from web.handlers.base import BaseHandler, logger_web_handler -from .base import * - -logger_Web_Util = Log('QD.Web.Util').getlogger() +logger_web_util = Log('QD.Web.Util').getlogger() try: - import ddddocr + import ddddocr # type: ignore except ImportError as e: if config.display_import_warning: - logger_Web_Util.warning('Import DdddOCR module falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework.', e) + logger_web_util.warning('Import DdddOCR module falied: \"%s\". \nTips: This warning message is only for prompting, it will not affect running of QD framework.', e) ddddocr = None + def request_parse(req_data): '''解析请求数据并以json形式返回''' if req_data.method == 'POST': @@ -46,10 +50,8 @@ async def get(self): try: seconds = float(self.get_argument("seconds", 0)) except Exception as e: - if config.traceback_print: - traceback.print_exc() - await gen.sleep(0.0) - self.write(u'Error, delay 0.0 second.') + logger_web_handler.debug('Error, delay 0.0 second: %s', e, exc_info=config.traceback_print) + self.write('Error, delay 0.0 second.') return if seconds < 0: seconds = 0.0 @@ -57,11 +59,11 @@ async def get(self): seconds = delay_max_timeout await gen.sleep(seconds) self.write( - u'Error, limited by delay_max_timeout, delay {seconds} second.' + 'Error, limited by delay_max_timeout, delay {seconds} second.' ) return await gen.sleep(seconds) - self.write(u'delay %s second.' % seconds) + self.write(f'delay {seconds} second.') return @@ -71,9 +73,8 @@ async def get(self, seconds): try: seconds = float(seconds) except Exception as e: - if config.traceback_print: - traceback.print_exc() - self.write(u'delay %s second.' % seconds) + logger_web_handler.debug('Error, delay 0.0 second: %s', e, exc_info=config.traceback_print) + self.write('Error, delay 0.0 second.') return if seconds < 0: seconds = 0.0 @@ -81,11 +82,11 @@ async def get(self, seconds): seconds = delay_max_timeout await gen.sleep(seconds) self.write( - u'Error, limited by delay_max_timeout, delay {seconds} second.' + 'Error, limited by delay_max_timeout, delay {seconds} second.' ) return await gen.sleep(seconds) - self.write(u'delay %s second.' % seconds) + self.write(f'delay {seconds} second.') return @@ -95,9 +96,8 @@ async def get(self, seconds): try: seconds = float(seconds) except Exception as e: - if config.traceback_print: - traceback.print_exc() - self.write(u'delay %s second.' % seconds) + logger_web_handler.debug('Error, delay 0.0 second: %s', e, exc_info=config.traceback_print) + self.write('Error, delay 0.0 second.') return if seconds < 0: seconds = 0.0 @@ -105,18 +105,21 @@ async def get(self, seconds): seconds = delay_max_timeout await gen.sleep(seconds) self.write( - u'Error, limited by delay_max_timeout, delay {seconds} second.' + f'Error, limited by {delay_max_timeout}, delay {seconds} second.' ) return await gen.sleep(seconds) - self.write(u'delay %s second.' % seconds) + self.write(f'delay {seconds} second.') return +GMT_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" + + class TimeStampHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: ts = self.get_argument("ts", "") dt = self.get_argument("dt", "") @@ -125,7 +128,6 @@ async def get(self): time_format = "%Y-%m-%d %H:%M:%S" cst_tz = ZoneInfo('Asia/Shanghai') utc_tz = ZoneInfo("UTC") - GMT_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" tmp = datetime.datetime.fromtimestamp if dt: @@ -133,39 +135,39 @@ async def get(self): if ts: # 用户时间戳转北京时间 - Rtv[u"完整时间戳"] = float(ts) - Rtv[u"时间戳"] = int(Rtv[u"完整时间戳"]) - Rtv[u"16位时间戳"] = int(Rtv[u"完整时间戳"] * 1000000) - Rtv[u"周"] = tmp(Rtv[u"完整时间戳"]).strftime("%w/%W") - Rtv[u"日"] = "/".join([ - tmp(Rtv[u"完整时间戳"]).strftime("%j"), - yearday(tmp(Rtv[u"完整时间戳"]).year) + rtv["完整时间戳"] = float(ts) + rtv["时间戳"] = int(rtv["完整时间戳"]) + rtv["16位时间戳"] = int(rtv["完整时间戳"] * 1000000) + rtv["周"] = tmp(rtv["完整时间戳"]).strftime("%w/%W") + rtv["日"] = "/".join([ + tmp(rtv["完整时间戳"]).strftime("%j"), + yearday(tmp(rtv["完整时间戳"]).year) ]) - Rtv[u"北京时间"] = tmp(Rtv[u"完整时间戳"], cst_tz).strftime(time_format) - Rtv[u"GMT格式"] = tmp(Rtv[u"完整时间戳"], utc_tz).strftime(GMT_FORMAT) - Rtv[u"ISO格式"] = tmp(Rtv[u"完整时间戳"], - utc_tz).isoformat().split("+")[0] + "Z" + rtv["北京时间"] = tmp(rtv["完整时间戳"], cst_tz).strftime(time_format) + rtv["GMT格式"] = tmp(rtv["完整时间戳"], utc_tz).strftime(GMT_FORMAT) + rtv["ISO格式"] = tmp(rtv["完整时间戳"], + utc_tz).isoformat().split("+")[0] + "Z" else: # 当前本机时间戳, 本机时间和北京时间 - Rtv[u"完整时间戳"] = time.time() - Rtv[u"时间戳"] = int(Rtv[u"完整时间戳"]) - Rtv[u"16位时间戳"] = int(Rtv[u"完整时间戳"] * 1000000) - Rtv[u"本机时间"] = tmp(Rtv[u"完整时间戳"]).strftime(time_format) - Rtv[u"周"] = tmp(Rtv[u"完整时间戳"]).strftime("%w/%W") - Rtv[u"日"] = "/".join([ - tmp(Rtv[u"完整时间戳"]).strftime("%j"), - yearday(tmp(Rtv[u"完整时间戳"]).year) + rtv["完整时间戳"] = time.time() + rtv["时间戳"] = int(rtv["完整时间戳"]) + rtv["16位时间戳"] = int(rtv["完整时间戳"] * 1000000) + rtv["本机时间"] = tmp(rtv["完整时间戳"]).strftime(time_format) + rtv["周"] = tmp(rtv["完整时间戳"]).strftime("%w/%W") + rtv["日"] = "/".join([ + tmp(rtv["完整时间戳"]).strftime("%j"), + yearday(tmp(rtv["完整时间戳"]).year) ]) - Rtv[u"北京时间"] = tmp(Rtv[u"完整时间戳"], cst_tz).strftime(time_format) - Rtv[u"GMT格式"] = tmp(Rtv[u"完整时间戳"], utc_tz).strftime(GMT_FORMAT) - Rtv[u"ISO格式"] = tmp(Rtv[u"完整时间戳"], - utc_tz).isoformat().split("+")[0] + "Z" - Rtv[u"状态"] = "200" + rtv["北京时间"] = tmp(rtv["完整时间戳"], cst_tz).strftime(time_format) + rtv["GMT格式"] = tmp(rtv["完整时间戳"], utc_tz).strftime(GMT_FORMAT) + rtv["ISO格式"] = tmp(rtv["完整时间戳"], + utc_tz).isoformat().split("+")[0] + "Z" + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) async def post(self): await self.get() @@ -181,7 +183,7 @@ def yearday(year): class UniCodeHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") html_unescape = self.get_argument("html_unescape", "false") @@ -192,17 +194,17 @@ async def get(self): b'\xc2\xa0', b'\xa0').decode('unicode_escape') if strtobool(html_unescape): tmp = html.unescape(tmp) - Rtv[u"转换后"] = tmp - Rtv[u"状态"] = "200" + rtv["转换后"] = tmp + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") html_unescape = self.get_argument("html_unescape", "false") @@ -213,122 +215,122 @@ async def post(self): b'\xc2\xa0', b'\xa0').decode('unicode_escape') if strtobool(html_unescape): tmp = html.unescape(tmp) - Rtv[u"转换后"] = tmp - Rtv[u"状态"] = "200" + rtv["转换后"] = tmp + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return class GB2312Handler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") tmp = urllib.parse.quote(content, encoding="gb2312") - Rtv[u"转换后"] = tmp - Rtv[u"状态"] = "200" + rtv["转换后"] = tmp + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") tmp = urllib.parse.quote(content, encoding="gb2312") - Rtv[u"转换后"] = tmp - Rtv[u"状态"] = "200" + rtv["转换后"] = tmp + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return class UrlDecodeHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") encoding = self.get_argument("encoding", "utf-8") unquote_plus = self.get_argument("unquote_plus", "false") if strtobool(unquote_plus): - Rtv[u"转换后"] = urllib.parse.unquote_plus(content, encoding=encoding) + rtv["转换后"] = urllib.parse.unquote_plus(content, encoding=encoding) else: - Rtv[u"转换后"] = urllib.parse.unquote(content, encoding=encoding) - Rtv[u"状态"] = "200" + rtv["转换后"] = urllib.parse.unquote(content, encoding=encoding) + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: content = self.get_argument("content", "") encoding = self.get_argument("encoding", "utf-8") unquote_plus = self.get_argument("unquote_plus", "false") if strtobool(unquote_plus): - Rtv[u"转换后"] = urllib.parse.unquote_plus(content, encoding=encoding) + rtv["转换后"] = urllib.parse.unquote_plus(content, encoding=encoding) else: - Rtv[u"转换后"] = urllib.parse.unquote(content, encoding=encoding) - Rtv[u"状态"] = "200" + rtv["转换后"] = urllib.parse.unquote(content, encoding=encoding) + rtv["状态"] = "200" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return class UtilRegexHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: data = self.get_argument("data", "") p = self.get_argument("p", "") temp = {} ds = re.findall(p, data, re.IGNORECASE) - for cnt in range(0, len(ds)): - temp[cnt + 1] = ds[cnt] - Rtv[u"数据"] = temp - Rtv[u"状态"] = "OK" + for cnt, d in enumerate(ds): + temp[cnt + 1] = d + rtv["数据"] = temp + rtv["状态"] = "OK" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: data = self.get_argument("data", "") p = self.get_argument("p", "") temp = {} ds = re.findall(p, data, re.IGNORECASE) - for cnt in range(0, len(ds)): - temp[cnt + 1] = ds[cnt] - Rtv[u"数据"] = temp - Rtv[u"状态"] = "OK" + for cnt, d in enumerate(ds): + temp[cnt + 1] = d + rtv["数据"] = temp + rtv["状态"] = "OK" except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return @@ -336,51 +338,51 @@ async def post(self): class UtilStrReplaceHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: s = self.get_argument("s", "") p = self.get_argument("p", "") t = self.get_argument("t", "") - Rtv[u"原始字符串"] = s - Rtv[u"处理后字符串"] = re.sub(p, t, s) - Rtv[u"状态"] = "OK" + rtv["原始字符串"] = s + rtv["处理后字符串"] = re.sub(p, t, s) + rtv["状态"] = "OK" if self.get_argument("r", "") == "text": - self.write(Rtv[u"处理后字符串"]) + self.write(rtv["处理后字符串"]) return else: self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return except Exception as e: - Rtv["状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: s = self.get_argument("s", "") p = self.get_argument("p", "") t = self.get_argument("t", "") - Rtv[u"原始字符串"] = s - Rtv[u"处理后字符串"] = re.sub(p, t, s) - Rtv[u"状态"] = "OK" + rtv["原始字符串"] = s + rtv["处理后字符串"] = re.sub(p, t, s) + rtv["状态"] = "OK" if self.get_argument("r", "") == "text": - self.write(Rtv[u"处理后字符串"]) + self.write(rtv["处理后字符串"]) return else: self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return except Exception as e: - Rtv["状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return @@ -391,16 +393,16 @@ async def get(self): key = self.get_argument("key", "") data = self.get_argument("data", "") func = self.get_argument("f", "encode") - if (key) and (data) and (func): + if key and data and func: lines = "" temp = key temp = re.findall("-----.*?-----", temp) - if (len(temp) == 2): + if len(temp) == 2: keytemp = key for t in temp: keytemp = keytemp.replace(t, "") - while (keytemp): + while keytemp: line = keytemp[0:63] lines = lines + line + "\n" keytemp = keytemp.replace(line, "") @@ -408,27 +410,27 @@ async def get(self): lines = temp[0] + "\n" + lines + temp[1] else: - self.write(u"证书格式错误") + self.write("证书格式错误") return cipher_rsa = PKCS1_v1_5.new(RSA.import_key(lines)) - if (func.find("encode") > -1): + if func.find("encode") > -1: crypt_text = cipher_rsa.encrypt( bytes(data, encoding="utf8")) crypt_text = base64.b64encode(crypt_text).decode('utf8') self.write(crypt_text) return - elif (func.find("decode") > -1): + elif func.find("decode") > -1: t1 = base64.b64decode(data) decrypt_text = cipher_rsa.decrypt(t1, Random.new().read) decrypt_text = decrypt_text.decode('utf8') self.write(decrypt_text) return else: - self.write(u"功能选择错误") + self.write("功能选择错误") return else: - self.write(u"参数不完整,请确认") + self.write("参数不完整,请确认") return except Exception as e: self.write(str(e)) @@ -439,53 +441,53 @@ async def post(self): key = self.get_argument("key", "") data = self.get_argument("data", "") func = self.get_argument("f", "encode") - if (key) and (data) and (func): + if key and data and func: lines = "" for line in key.split("\n"): - if (line.find("--") < 0): + if line.find("--") < 0: line = line.replace(" ", "+") lines = lines + line + "\n" data = data.replace(" ", "+") cipher_rsa = PKCS1_v1_5.new(RSA.import_key(lines)) - if (func.find("encode") > -1): + if func.find("encode") > -1: crypt_text = cipher_rsa.encrypt( bytes(data, encoding="utf8")) crypt_text = base64.b64encode(crypt_text).decode('utf8') self.write(crypt_text) return - elif (func.find("decode") > -1): + elif func.find("decode") > -1: decrypt_text = cipher_rsa.decrypt(base64.b64decode(data), Random.new().read) decrypt_text = decrypt_text.decode('utf8') self.write(decrypt_text) return else: - self.write(u"功能选择错误") + self.write("功能选择错误") return else: - self.write(u"参数不完整,请确认") + self.write("参数不完整,请确认") return except Exception as e: self.write(str(e)) return -class toolboxHandler(BaseHandler): +class ToolboxHandler(BaseHandler): async def get(self, userid): - self.current_user["isadmin"] or self.check_permission( - {"userid": int(userid)}, 'r') - await self.render('toolbox.html', userid=userid) + if self.current_user["isadmin"] or self.check_permission( + {"userid": int(userid)}, 'r'): + await self.render('toolbox.html', userid=userid) async def post(self, userid): try: email = self.get_argument("email", "") pwd = self.get_argument("pwd", "") f = self.get_argument("f", "") - if (email) and (pwd) and (f): + if email and pwd and f: async with self.db.transaction() as sql_session: - if await self.db.user.challenge_MD5( + if await self.db.user.challenge_md5( email, pwd, sql_session=sql_session ) or await self.db.user.challenge( email, pwd, sql_session=sql_session): @@ -500,13 +502,13 @@ async def post(self, userid): fields=('content', ), sql_session=sql_session))['content'] new_data = self.get_argument("data", "") - if (f.find('write') > -1): + if f.find('write') > -1: text_data = new_data await self.db.notepad.mod(userid, notepadid, content=text_data, sql_session=sql_session) - elif (f.find('append') > -1): + elif f.find('append') > -1: if text_data is not None: text_data = text_data + '\r\n' + new_data else: @@ -518,55 +520,55 @@ async def post(self, userid): self.write(text_data) return else: - raise Exception(u"账号密码错误") + raise Exception("账号密码错误") else: - raise Exception(u"参数不完整,请确认") + raise Exception("参数不完整,请确认") except Exception as e: self.write(str(e)) return -class toolbox_notepad_Handler(BaseHandler): +class ToolboxNotepadHandler(BaseHandler): - @tornado.web.authenticated + @authenticated async def get(self, userid=None, notepadid=1): if userid is None: raise HTTPError(405) - self.current_user["isadmin"] or self.check_permission( - {"userid": int(userid)}, 'r') - notepadlist = await self.db.notepad.list(fields=('notepadid', - 'content'), - limit=20, - userid=userid) - notepadlist.sort(key=lambda x: x['notepadid']) - if len(notepadlist) == 0: - if await self.db.user.get(id=userid, fields=('id', )) is not None: - await self.db.notepad.add(dict(userid=userid, notepadid=1)) - notepadlist = await self.db.notepad.list(fields=('notepadid', - 'content'), - limit=20, - userid=userid) - else: - raise HTTPError(404, - log_message=u"用户不存在或未创建记事本", - reason=u"用户不存在或未创建记事本") - if int(notepadid) == 0: - notepadid = notepadlist[-1]['notepadid'] - await self.render('toolbox-notepad.html', - notepad_id=int(notepadid), - notepad_list=notepadlist, - userid=userid) + if self.current_user["isadmin"] or self.check_permission( + {"userid": int(userid)}, 'r'): + notepadlist = await self.db.notepad.list(fields=('notepadid', + 'content'), + limit=config.notepad_limit, + userid=userid) + notepadlist.sort(key=lambda x: x['notepadid']) + if len(notepadlist) == 0: + if await self.db.user.get(id=userid, fields=('id', )) is not None: + await self.db.notepad.add(dict(userid=userid, notepadid=1)) + notepadlist = await self.db.notepad.list(fields=('notepadid', + 'content'), + limit=config.notepad_limit, + userid=userid) + else: + raise HTTPError(404, + log_message="用户不存在或未创建记事本", + reason="用户不存在或未创建记事本") + if int(notepadid) == 0: + notepadid = notepadlist[-1]['notepadid'] + await self.render('toolbox-notepad.html', + notepad_id=int(notepadid), + notepad_list=notepadlist, + userid=userid) return - # @tornado.web.authenticated + # @authenticated async def post(self, userid=None): try: email = self.get_argument("email", "") pwd = self.get_argument("pwd", "") f = self.get_argument("f", "") - if (email) and (pwd) and (f): + if email and pwd and f: async with self.db.transaction() as sql_session: - if await self.db.user.challenge_MD5( + if await self.db.user.challenge_md5( email, pwd, sql_session=sql_session ) or await self.db.user.challenge( email, pwd, sql_session=sql_session): @@ -586,16 +588,16 @@ async def post(self, userid=None): dict(userid=userid, notepadid=notepadid), sql_session=sql_session) else: - raise Exception(u"记事本不存在") + raise Exception("记事本不存在") text_data = notepad['content'] new_data = self.get_argument("data", "") - if (f.find('write') > -1): + if f.find('write') > -1: text_data = new_data await self.db.notepad.mod(userid, notepadid, content=text_data, sql_session=sql_session) - elif (f.find('append') > -1): + elif f.find('append') > -1: if text_data is not None: text_data = text_data + '\r\n' + new_data else: @@ -607,51 +609,51 @@ async def post(self, userid=None): self.write(text_data) return else: - raise Exception(u"账号密码错误") + raise Exception("账号密码错误") else: - raise Exception(u"参数不完整,请确认") + raise Exception("参数不完整,请确认") except Exception as e: if config.traceback_print: traceback.print_exc() - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' self.write(str(e)) self.set_status(400) - logger_Web_Handler.error( + logger_web_handler.error( 'UserID: %s modify Notepad_Toolbox failed! Reason: %s', userid or '-1', str(e)) return -class toolbox_notepad_list_Handler(BaseHandler): +class ToolboxNotepadListHandler(BaseHandler): async def get(self, userid=None, notepadid=1): if userid is None: raise HTTPError(405) - self.current_user["isadmin"] or self.check_permission( - {"userid": int(userid)}, 'r') - notepadlist = await self.db.notepad.list(fields=('notepadid', - 'content'), - limit=20, - userid=userid) - notepadlist.sort(key=lambda x: x['notepadid']) - if len(notepadlist) == 0: - if await self.db.user.get(id=userid, fields=('id', )) is not None: - await self.db.notepad.add(dict(userid=userid, notepadid=1)) - notepadlist = await self.db.notepad.list(fields=('notepadid', - 'content'), - limit=20, - userid=userid) - else: - raise HTTPError(404, - log_message=u"用户不存在或未创建记事本", - reason=u"用户不存在或未创建记事本") - if int(notepadid) == 0: - notepadid = notepadlist[-1]['notepadid'] - await self.render('toolbox-notepad.html', - notepad_id=notepadid, - notepad_list=notepadlist, - userid=userid) + if self.current_user["isadmin"] or self.check_permission( + {"userid": int(userid)}, 'r'): + notepadlist = await self.db.notepad.list(fields=('notepadid', + 'content'), + limit=config.notepad_limit, + userid=userid) + notepadlist.sort(key=lambda x: x['notepadid']) + if len(notepadlist) == 0: + if await self.db.user.get(id=userid, fields=('id', )) is not None: + await self.db.notepad.add(dict(userid=userid, notepadid=1)) + notepadlist = await self.db.notepad.list(fields=('notepadid', + 'content'), + limit=config.notepad_limit, + userid=userid) + else: + raise HTTPError(404, + log_message="用户不存在或未创建记事本", + reason="用户不存在或未创建记事本") + if int(notepadid) == 0: + notepadid = notepadlist[-1]['notepadid'] + await self.render('toolbox-notepad.html', + notepad_id=notepadid, + notepad_list=notepadlist, + userid=userid) return async def post(self, userid=None): @@ -659,9 +661,9 @@ async def post(self, userid=None): email = self.get_argument("email", "") pwd = self.get_argument("pwd", "") f = self.get_argument("f", "list") - if (email) and (pwd) and (f): + if email and pwd and f: async with self.db.transaction() as sql_session: - if await self.db.user.challenge_MD5( + if await self.db.user.challenge_md5( email, pwd, sql_session=sql_session ) or await self.db.user.challenge( email, pwd, sql_session=sql_session): @@ -676,71 +678,68 @@ async def post(self, userid=None): notepadid = int(notepadid) notepadlist = await self.db.notepad.list( fields=('notepadid', ), - limit=20, + limit=config.notepad_limit, userid=userid, sql_session=sql_session) notepadlist = [x['notepadid'] for x in notepadlist] notepadlist.sort() if len(notepadlist) == 0: - raise Exception(u"无法获取该用户记事本编号") + raise Exception("无法获取该用户记事本编号") if f.find('add') > -1: - if len(notepadlist) >= 20: - raise Exception(u"记事本数量超过上限, limit: 20") + if len(notepadlist) >= config.notepad_limit: + raise Exception(f"记事本数量超过上限, limit: {config.notepad_limit}") new_data = self.get_argument("data", '') if new_data == '': new_data = None if notepadid == -1: notepadid = notepadlist[-1] + 1 elif notepadid in notepadlist: - raise Exception(u"记事本编号已存在, id_notepad: %s" % - notepadid) + raise Exception(f"记事本编号已存在, id_notepad: {notepadid}") await self.db.notepad.add(dict(userid=userid, notepadid=notepadid, content=new_data), sql_session=sql_session) - self.write(u"添加成功, id_notepad: %s" % (notepadid)) + self.write(f"添加成功, id_notepad: {notepadid}") return elif f.find('delete') > -1: if notepadid > 0: if notepadid not in notepadlist: raise Exception( - u"记事本编号不存在, id_notepad: %s" % - notepadid) + f"记事本编号不存在, id_notepad: {notepadid}") if notepadid == 1: - raise Exception(u"默认记事本不能删除") + raise Exception("默认记事本不能删除") await self.db.notepad.delete( userid, notepadid, sql_session=sql_session) - self.write(u"删除成功, id_notepad: %s" % - (notepadid)) + self.write(f"删除成功, id_notepad: {notepadid}") return else: - raise Exception(u"id_notepad参数不完整, 请确认") + raise Exception("id_notepad参数不完整, 请确认") elif f.find('list') > -1: self.write(notepadlist) return else: - raise Exception(u"参数不完整, 请确认") + raise Exception("参数不完整, 请确认") else: - raise Exception(u"账号密码错误") + raise Exception("账号密码错误") else: - raise Exception(u"参数不完整, 请确认") + raise Exception("参数不完整, 请确认") except Exception as e: if config.traceback_print: traceback.print_exc() - if (str(e).find('get user need id or email') > -1): - e = u'请输入用户名/密码' + if str(e).find('get user need id or email') > -1: + e = '请输入用户名/密码' self.write(str(e)) self.set_status(400) - logger_Web_Handler.error( + logger_web_handler.error( 'UserID: %s %s Notepad_Toolbox failed! Reason: %s', userid or '-1', f, str(e)) return -class DdddOCRServer(object): +class DdddOcrServer: def __init__(self): - if ddddocr is not None and hasattr(ddddocr,"DdddOcr"): + if ddddocr is not None and hasattr(ddddocr, "DdddOcr"): self.oldocr = ddddocr.DdddOcr(old=True, show_ad=False) self.ocr = ddddocr.DdddOcr(show_ad=False) self.det = ddddocr.DdddOcr(det=True, show_ad=False) @@ -749,21 +748,21 @@ def __init__(self): if len(config.extra_onnx_name) == len( config.extra_charsets_name ) and config.extra_onnx_name[0] and config.extra_charsets_name[0]: - for i in range(len(config.extra_onnx_name)): - self.extra[config.extra_onnx_name[i]] = ddddocr.DdddOcr( + for onnx_name in config.extra_onnx_name: + self.extra[onnx_name] = ddddocr.DdddOcr( show_ad=False, import_onnx_path=os.path.join( os.path.abspath( os.path.dirname( os.path.dirname(os.path.dirname(__file__)))), - "config", f"{config.extra_onnx_name[i]}.onnx"), + "config", f"{onnx_name}.onnx"), charsets_path=os.path.join( os.path.abspath( os.path.dirname( os.path.dirname(os.path.dirname(__file__)))), - "config", f"{config.extra_charsets_name[i]}.json")) - logger_Web_Util.info( - f"成功加载自定义Onnx模型: {config.extra_onnx_name[i]}.onnx") + "config", f"{onnx_name}.json")) + logger_web_util.info( + "成功加载自定义Onnx模型: %s.onnx", onnx_name) def classification(self, img: bytes, old=False, extra_onnx_name=""): if extra_onnx_name: @@ -782,32 +781,36 @@ def slide_match(self, imgtarget: bytes, imgbg: bytes, comparison=False, simple_t if not simple_target: try: return self.slide.slide_match(imgtarget, imgbg) - except: - pass + except Exception as e: + logger_web_handler.debug("slide_match error: %s", e, exc_info=config.traceback_print) return self.slide.slide_match(imgtarget, imgbg, simple_target=True) + if ddddocr: - DdddOCRServer = DdddOCRServer() + DDDDOCR_SERVER: Optional[DdddOcrServer] = DdddOcrServer() else: - DdddOCRServer = None + DDDDOCR_SERVER = None + async def get_img_from_url(imgurl): async with aiohttp.ClientSession( conn_timeout=config.connect_timeout) as session: async with session.get(imgurl, - verify_ssl=False, - timeout=config.request_timeout) as res: + verify_ssl=False, + timeout=config.request_timeout) as res: content = await res.read() base64_data = base64.b64encode(content).decode() return base64.b64decode(base64_data) + async def get_img(img="", imgurl="",): if img: # 判断是否为URL if img.startswith("http"): try: return await get_img_from_url(img) - except: + except Exception as e: + logger_web_handler.debug("get_img_from_url error: %s", e, exc_info=config.traceback_print) return base64.b64decode(img) return base64.b64decode(img) elif imgurl: @@ -819,30 +822,30 @@ async def get_img(img="", imgurl="",): class DdddOcrHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: img = self.get_argument("img", "") imgurl = self.get_argument("imgurl", "") old = bool(strtobool(self.get_argument("old", "False"))) extra_onnx_name = self.get_argument("extra_onnx_name", "") img = await get_img(img, imgurl) - Rtv[u"Result"] = DdddOCRServer.classification( + rtv["Result"] = DDDDOCR_SERVER.classification( img, old=old, extra_onnx_name=extra_onnx_name) - Rtv[u"状态"] = "OK" + rtv["状态"] = "OK" else: raise HTTPError(406) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return async def post(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: if self.request.headers.get("Content-Type", "").startswith("application/json"): body_dict = json.loads(self.request.body) @@ -857,43 +860,43 @@ async def post(self): extra_onnx_name = self.get_argument("extra_onnx_name", "") img = await get_img(img, imgurl) - Rtv[u"Result"] = DdddOCRServer.classification( + rtv["Result"] = DDDDOCR_SERVER.classification( img, old=old, extra_onnx_name=extra_onnx_name) - Rtv[u"状态"] = "OK" + rtv["状态"] = "OK" else: raise HTTPError(406) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=4)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=4)) return class DdddDetHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: img = self.get_argument("img", "") imgurl = self.get_argument("imgurl", "") img = await get_img(img, imgurl) - Rtv[u"Result"] = DdddOCRServer.detection(img) - Rtv[u"状态"] = "OK" + rtv["Result"] = DDDDOCR_SERVER.detection(img) + rtv["状态"] = "OK" else: raise HTTPError(406) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=None)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=None)) return async def post(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: if self.request.headers.get("Content-Type", "").startswith("application/json"): body_dict = json.loads(self.request.body) @@ -903,45 +906,45 @@ async def post(self): img = self.get_argument("img", "") imgurl = self.get_argument("imgurl", "") img = await get_img(img, imgurl) - Rtv[u"Result"] = DdddOCRServer.detection(img) - Rtv[u"状态"] = "OK" + rtv["Result"] = DDDDOCR_SERVER.detection(img) + rtv["状态"] = "OK" else: raise Exception(404) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=None)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=None)) return class DdddSlideHandler(BaseHandler): async def get(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: imgtarget = self.get_argument("imgtarget", "") imgbg = self.get_argument("imgbg", "") simple_target = bool(strtobool(self.get_argument("simple_target", "False"))) comparison = bool(strtobool(self.get_argument("comparison", "False"))) imgtarget = await get_img(imgtarget, "") imgbg = await get_img(imgbg, "") - Rtv[u"Result"] = DdddOCRServer.slide_match(imgtarget, imgbg, comparison=comparison, simple_target=simple_target) - Rtv[u"状态"] = "OK" + rtv["Result"] = DDDDOCR_SERVER.slide_match(imgtarget, imgbg, comparison=comparison, simple_target=simple_target) + rtv["状态"] = "OK" else: raise HTTPError(406) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=None)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=None)) return async def post(self): - Rtv = {} + rtv = {} try: - if DdddOCRServer: + if DDDDOCR_SERVER: if self.request.headers.get("Content-Type", "").startswith("application/json"): body_dict = json.loads(self.request.body) @@ -957,37 +960,37 @@ async def post(self): imgtarget = await get_img(imgtarget, "") imgbg = await get_img(imgbg, "") - Rtv[u"Result"] = DdddOCRServer.slide_match(imgtarget, imgbg, comparison=comparison, simple_target=simple_target) - Rtv[u"状态"] = "OK" + rtv["Result"] = DDDDOCR_SERVER.slide_match(imgtarget, imgbg, comparison=comparison, simple_target=simple_target) + rtv["状态"] = "OK" else: raise HTTPError(406) except Exception as e: - Rtv[u"状态"] = str(e) + rtv["状态"] = str(e) self.set_header('Content-Type', 'application/json; charset=UTF-8') - self.write(json.dumps(Rtv, ensure_ascii=False, indent=None)) + self.write(json.dumps(rtv, ensure_ascii=False, indent=None)) return handlers = [ - ('/util/delay', UtilDelayParaHandler), - ('/util/delay/(\d+)', UtilDelayIntHandler), - ('/util/delay/(\d+\.\d+)', UtilDelayHandler), - ('/util/timestamp', TimeStampHandler), - ('/util/unicode', UniCodeHandler), - ('/util/urldecode', UrlDecodeHandler), - ('/util/gb2312', GB2312Handler), - ('/util/regex', UtilRegexHandler), - ('/util/string/replace', UtilStrReplaceHandler), - ('/util/rsa', UtilRSAHandler), - ('/util/toolbox/(\d+)', toolboxHandler), - ('/util/toolbox/notepad', toolbox_notepad_Handler), - ('/util/toolbox/(\d+)/notepad', toolbox_notepad_Handler), - ('/util/toolbox/(\d+)/notepad/(\d+)', toolbox_notepad_Handler), - ('/util/toolbox/notepad/list', toolbox_notepad_list_Handler), - ('/util/toolbox/(\d+)/notepad/list', toolbox_notepad_list_Handler), - ('/util/toolbox/(\d+)/notepad/list/(\d+)', toolbox_notepad_list_Handler), - ('/util/dddd/ocr', DdddOcrHandler), - ('/util/dddd/det', DdddDetHandler), - ('/util/dddd/slide', DdddSlideHandler), + (r'/util/delay', UtilDelayParaHandler), + (r'/util/delay/(\d+)', UtilDelayIntHandler), + (r'/util/delay/(\d+\.\d+)', UtilDelayHandler), + (r'/util/timestamp', TimeStampHandler), + (r'/util/unicode', UniCodeHandler), + (r'/util/urldecode', UrlDecodeHandler), + (r'/util/gb2312', GB2312Handler), + (r'/util/regex', UtilRegexHandler), + (r'/util/string/replace', UtilStrReplaceHandler), + (r'/util/rsa', UtilRSAHandler), + (r'/util/toolbox/(\d+)', ToolboxHandler), + (r'/util/toolbox/notepad', ToolboxNotepadHandler), + (r'/util/toolbox/(\d+)/notepad', ToolboxNotepadHandler), + (r'/util/toolbox/(\d+)/notepad/(\d+)', ToolboxNotepadHandler), + (r'/util/toolbox/notepad/list', ToolboxNotepadListHandler), + (r'/util/toolbox/(\d+)/notepad/list', ToolboxNotepadListHandler), + (r'/util/toolbox/(\d+)/notepad/list/(\d+)', ToolboxNotepadListHandler), + (r'/util/dddd/ocr', DdddOcrHandler), + (r'/util/dddd/det', DdddDetHandler), + (r'/util/dddd/slide', DdddSlideHandler), ] diff --git a/worker.py b/worker.py index 9d3098b9987..00051369763 100644 --- a/worker.py +++ b/worker.py @@ -9,95 +9,120 @@ import json import time import traceback +from typing import Dict import tornado.ioloop import tornado.log from tornado import gen +from tornado.concurrent import Future import config from db import DB from libs.fetcher import Fetcher -from libs.funcs import cal, pusher +from libs.funcs import Cal, Pusher from libs.log import Log from libs.parse_url import parse_url -logger_Worker = Log('QD.Worker').getlogger() +logger_worker = Log('QD.Worker').getlogger() -class BaseWorker(object): +class BaseWorker: def __init__(self, db: DB): self.running = False self.db = db self.fetcher = Fetcher() - async def ClearLog(self, taskid, sql_session=None): - logDay = int((await self.db.site.get(1, fields=('logDay',), sql_session=sql_session))['logDay']) - for log in await self.db.tasklog.list(taskid=taskid, fields=('id', 'ctime'), sql_session=sql_session): - if (time.time() - log['ctime']) > (logDay * 24 * 60 * 60): - await self.db.tasklog.delete(log['id'], sql_session=sql_session) + async def clear_log(self, taskid, sql_session=None): + log_day = int( + (await self.db.site.get( + 1, + fields=('logDay',), + sql_session=sql_session + ))['logDay'] + ) + for log in await self.db.tasklog.list( + taskid=taskid, + fields=('id', 'ctime'), + sql_session=sql_session + ): + if (time.time() - log['ctime']) > (log_day * 24 * 60 * 60): + await self.db.tasklog.delete( + log['id'], + sql_session=sql_session + ) async def push_batch(self): try: async with self.db.transaction() as sql_session: - userlist = await self.db.user.list(fields=('id', 'email', 'status', 'push_batch'), sql_session=sql_session) - pushtool = pusher(self.db, sql_session=sql_session) + userlist = await self.db.user.list( + fields=('id', 'email', 'status', 'push_batch'), + sql_session=sql_session + ) + pushtool = Pusher(self.db, sql_session=sql_session) if userlist: for user in userlist: userid = user['id'] push_batch = json.loads(user['push_batch']) - if user['status'] == "Enable" and push_batch["sw"] and isinstance(push_batch['time'], (float, int)) and time.time() >= push_batch['time']: - logger_Worker.debug( - 'User %d check push_batch task, waiting...' % userid) - title = u"QD任务日志定期推送" + if user['status'] == "Enable" and push_batch.get('sw') and isinstance(push_batch.get('time'), (float, int)) and time.time() >= push_batch['time']: # noqa: E501 + logger_worker.debug( + 'User %d check push_batch task, waiting...', + userid + ) + title = "QD任务日志定期推送" delta = push_batch.get("delta", 86400) - logtemp = "{}".format(time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(push_batch['time']))) + logtemp = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(push_batch['time'])) tmpdict = {} tmp = "" numlog = 0 - task_list = await self.db.task.list(userid=userid, fields=('id', 'tplid', 'note', 'disabled', 'last_success', 'last_failed', 'pushsw'), sql_session=sql_session) + task_list = await self.db.task.list( + userid=userid, + fields=( + 'id', 'tplid', 'note', 'disabled', + 'last_success', 'last_failed', 'pushsw' + ), + sql_session=sql_session + ) for task in task_list: pushsw = json.loads(task['pushsw']) - if pushsw["pushen"] and (task["disabled"] == 0 or (task.get("last_success", 0) and task.get("last_success", 0) >= push_batch['time']-delta) or (task.get("last_failed", 0) and task.get("last_failed", 0) >= push_batch['time']-delta)): + if pushsw["pushen"] and (task["disabled"] == 0 or (task.get("last_success", 0) and task.get("last_success", 0) >= push_batch['time'] - delta) or (task.get("last_failed", 0) and task.get("last_failed", 0) >= push_batch['time'] - delta)): tmp0 = "" tasklog_list = await self.db.tasklog.list(taskid=task["id"], fields=('success', 'ctime', 'msg'), sql_session=sql_session) for log in tasklog_list: if (push_batch['time'] - delta) < log['ctime'] <= push_batch['time']: - tmp0 += "\\r\\n时间: {}\\r\\n日志: {}".format(time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(log['ctime'])), log['msg']) + c_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(log['ctime'])) + tmp0 += f"\\r\\n时间: {c_time}\\r\\n日志: {log['msg']}" numlog += 1 tmplist = tmpdict.get(task['tplid'], []) if tmp0: tmplist.append( - "\\r\\n-----任务{0}-{1}-----{2}\\r\\n".format(len(tmplist)+1, task['note'], tmp0)) + f"\\r\\n-----任务{len(tmplist) + 1}-{task['note']}-----{tmp0}\\r\\n") else: tmplist.append( - "\\r\\n-----任务{0}-{1}-----\\r\\n记录期间未执行定时任务,请检查任务! \\r\\n".format(len(tmplist)+1, task['note'])) + f"\\r\\n-----任务{len(tmplist) + 1}-{task['note']}-----\\r\\n记录期间未执行定时任务,请检查任务! \\r\\n") tmpdict[task['tplid']] = tmplist - for tmpkey in tmpdict: + for tmpkey, tmpval in tmpdict.items(): tmp_sitename = await self.db.tpl.get(tmpkey, fields=('sitename',), sql_session=sql_session) if tmp_sitename: - tmp = "\\r\\n\\r\\n=====QD: {0}=====".format( - tmp_sitename['sitename']) - tmp += ''.join(tmpdict[tmpkey]) + tmp = f"\\r\\n\\r\\n=====QD: {tmp_sitename['sitename']}=====" + tmp += ''.join(tmpval) logtemp += tmp push_batch["time"] = push_batch['time'] + delta await self.db.user.mod(userid, push_batch=json.dumps(push_batch), sql_session=sql_session) if tmp and numlog: user_email = user.get('email', 'Unkown') - logger_Worker.debug( - "Start push batch log for user {}, email:{}".format(userid, user_email)) + logger_worker.debug( + "Start push batch log for user %s, email:%s", userid, user_email) await pushtool.pusher(userid, {"pushen": bool(push_batch.get("sw", False))}, 4080, title, logtemp) - logger_Worker.info( - "Success push batch log for user {}, email:{}".format(userid, user_email)) + logger_worker.info( + "Success push batch log for user %s, email:%s", userid, user_email) else: - logger_Worker.debug( - 'User %d does not need to perform push_batch task, stop.' % userid) + logger_worker.debug( + 'User %s does not need to perform push_batch task, stop.', userid) except Exception as e: - if config.traceback_print: - traceback.print_exc() - logger_Worker.error('Push batch task failed: {}'.format(str(e))) + logger_worker.error('Push batch task failed: %s', e, exc_info=config.traceback_print) @staticmethod def failed_count_to_time(last_failed_count, retry_count=config.task_max_retry_count, retry_interval=None, interval=None): @@ -128,7 +153,7 @@ def failed_count_to_time(last_failed_count, retry_count=config.task_max_retry_co return next @staticmethod - def fix_next_time(next: float, gmt_offset=time.timezone/60) -> float: + def fix_next_time(next: float, gmt_offset=time.timezone / 60) -> float: """ fix next time to 2:00 - 21:00 (local time), while tpl interval is unset. @@ -172,8 +197,8 @@ async def do(self, task): return False newontime = json.loads(task["newontime"]) - pushtool = pusher(self.db, sql_session=sql_session) - caltool = cal() + pushtool = Pusher(self.db, sql_session=sql_session) + caltool = Cal() logtime = json.loads(user['logtime']) pushsw = json.loads(task['pushsw']) @@ -205,17 +230,17 @@ async def do(self, task): session = await self.db.user.encrypt(task['userid'], new_env['session'].to_json() if hasattr(new_env['session'], 'to_json') else new_env['session'], sql_session=sql_session) - if (newontime['sw']): - if ('mode' not in newontime): + if newontime['sw']: + if 'mode' not in newontime: newontime['mode'] = 'ontime' - if (newontime['mode'] == 'ontime'): + if newontime['mode'] == 'ontime': newontime['date'] = (datetime.datetime.now( - )+datetime.timedelta(days=1)).strftime("%Y-%m-%d") - next = caltool.calNextTs(newontime)['ts'] + ) + datetime.timedelta(days=1)).strftime("%Y-%m-%d") + next = caltool.cal_next_ts(newontime)['ts'] else: next = time.time() + \ max((tpl['interval'] if tpl['interval'] - else 24 * 60 * 60), 1*60) + else 24 * 60 * 60), 1 * 60) if tpl['interval'] is None: next = self.fix_next_time(next) @@ -224,7 +249,7 @@ async def do(self, task): await self.db.task.mod(task['id'], last_success=time.time(), last_failed_count=0, - success_count=task['success_count']+1, + success_count=task['success_count'] + 1, env=variables, session=session, mtime=time.time(), @@ -233,17 +258,16 @@ async def do(self, task): await self.db.tpl.incr_success(tpl['id'], sql_session=sql_session) t = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - title = u"QD定时任务 {0}-{1} 成功".format( - tpl['sitename'], task['note']) + title = f"QD定时任务 {tpl['sitename']}-{task['note']} 成功" logtemp = new_env['variables'].get('__log__') - logtemp = u"{0} \\r\\n日志:{1}".format(t, logtemp) + logtemp = f"{t} \\r\\n日志:{logtemp}" await pushtool.pusher(user['id'], pushsw, 0x2, title, logtemp) - logger_Worker.info('taskid:%d tplid:%d successed! %.5fs', - task['id'], task['tplid'], time.perf_counter()-start) + logger_worker.info('taskid:%d tplid:%d successed! %.5fs', + task['id'], task['tplid'], time.perf_counter() - start) # delete log - await self.ClearLog(task['id'], sql_session=sql_session) - logger_Worker.info( + await self.clear_log(task['id'], sql_session=sql_session) + logger_worker.info( 'taskid:%d tplid:%d clear log.', task['id'], task['tplid']) except Exception as e: # failed feedback @@ -253,45 +277,43 @@ async def do(self, task): task['last_failed_count'], task['retry_count'], task['retry_interval'], tpl['interval']) t = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - title = u"QD定时任务 {0}-{1} 失败".format( - tpl['sitename'], task['note']) - content = u"{0} \\r\\n日志:{1}".format(t, str(e)) + title = f"QD定时任务 {tpl['sitename']}-{task['note']} 失败" + content = f"{t} \\r\\n日志:{e}" disabled = False if next_time_delta: next = time.time() + next_time_delta content = content + \ - u" \\r\\n下次运行时间:{0}".format(time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(next))) - if (logtime['ErrTolerateCnt'] <= task['last_failed_count']): + f" \\r\\n下次运行时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(next))}" + if logtime['ErrTolerateCnt'] <= task['last_failed_count']: await pushtool.pusher(user['id'], pushsw, 0x1, title, content) else: disabled = True next = None - content = u" \\r\\n任务已禁用" + content = " \\r\\n任务已禁用" await pushtool.pusher(user['id'], pushsw, 0x1, title, content) await self.db.tasklog.add(task['id'], success=False, msg=str(e), sql_session=sql_session) await self.db.task.mod(task['id'], last_failed=time.time(), - failed_count=task['failed_count']+1, - last_failed_count=task['last_failed_count']+1, + failed_count=task['failed_count'] + 1, + last_failed_count=task['last_failed_count'] + 1, disabled=disabled, mtime=time.time(), next=next, sql_session=sql_session) await self.db.tpl.incr_failed(tpl['id'], sql_session=sql_session) - logger_Worker.error('taskid:%d tplid:%d failed! %.4fs \r\n%s', task['id'], task['tplid'], time.perf_counter( - )-start, str(e).replace('\\r\\n', '\r\n')) + logger_worker.error('taskid:%d tplid:%d failed! %.4fs \r\n%s', task['id'], task['tplid'], time.perf_counter( + ) - start, str(e).replace('\\r\\n', '\r\n')) return False return True class QueueWorker(BaseWorker): def __init__(self, db: DB): - logger_Worker.info('Queue Worker start...') - self.queue = asyncio.Queue(maxsize=config.queue_num) - self.task_lock = {} + logger_worker.info('Queue Worker start...') + self.queue: asyncio.Queue = asyncio.Queue(maxsize=config.queue_num) + self.task_lock: Dict = {} self.success = 0 self.failed = 0 super().__init__(db) @@ -305,8 +327,8 @@ async def __call__(self): while True: sleep = asyncio.sleep(config.push_batch_delta) if self.success or self.failed: - logger_Worker.info('Last %d seconds, %d task done. %d success, %d failed' % ( - config.push_batch_delta, self.success+self.failed, self.success, self.failed)) + logger_worker.info('Last %d seconds, %d task done. %d success, %d failed' , + config.push_batch_delta, self.success + self.failed, self.success, self.failed) self.success = 0 self.failed = 0 if config.push_batch_sw: @@ -314,20 +336,18 @@ async def __call__(self): await sleep async def runner(self, id): - logger_Worker.debug('Runner %d started' % id) + logger_worker.debug('Runner %d started' , id) while True: - sleep = asyncio.sleep(config.check_task_loop/1000.0) + sleep = asyncio.sleep(config.check_task_loop / 1000.0) task = await self.queue.get() - logger_Worker.debug( - 'Runner %d get task: %s, running...' % (id, task['id'])) + logger_worker.debug( + 'Runner %d get task: %s, running...' , id, task['id']) done = False try: done = await self.do(task) except Exception as e: - logger_Worker.error( - 'Runner %d get task: %s, failed! %s' % (id, task['id'], str(e))) - if config.traceback_print: - traceback.print_exc() + logger_worker.error( + 'Runner %d get task: %s, failed! %s' , id, task['id'], str(e), exc_info=config.traceback_print) if done: self.success += 1 self.task_lock.pop(task['id'], None) @@ -338,9 +358,9 @@ async def runner(self, id): await sleep async def producer(self): - logger_Worker.debug('Schedule Producer started') + logger_worker.debug('Schedule Producer started') while True: - sleep = asyncio.sleep(config.check_task_loop/1000.0) + sleep = asyncio.sleep(config.check_task_loop / 1000.0) try: tasks = await self.db.task.scan() unlock_tasks = 0 @@ -351,13 +371,11 @@ async def producer(self): unlock_tasks += 1 await self.queue.put(task) if unlock_tasks > 0: - logger_Worker.debug( - 'Scaned %d task, put in Queue...' % unlock_tasks) + logger_worker.debug( + 'Scaned %d task, put in Queue...', unlock_tasks) except Exception as e: - logger_Worker.error( - 'Schedule Producer get tasks failed! %s' % str(e)) - if config.traceback_print: - traceback.print_exc() + logger_worker.error( + 'Schedule Producer get tasks failed! %s', e, exc_info=config.traceback_print) await sleep # 旧版本批量任务定时执行 @@ -366,26 +384,25 @@ async def producer(self): class BatchWorker(BaseWorker): def __init__(self, db: DB): - logger_Worker.info('Batch Worker start...') + logger_worker.info('Batch Worker start...') super().__init__(db) + self.running = False def __call__(self): # self.running = tornado.ioloop.IOLoop.current().spawn_callback(self.run) # if self.running: # success, failed = self.running # if success or failed: - # logger_Worker.info('%d task done. %d success, %d failed' % (success+failed, success, failed)) - if self.running: - return - self.running = gen.convert_yielded(self.run()) + # logger_worker.info('%d task done. %d success, %d failed' % (success+failed, success, failed)) + if not self.running: + self.running = gen.convert_yielded(self.run()) - def done(future: asyncio.Future): - self.running = None + def done(future: Future): + self.running = False success, failed = future.result() if success or failed: - logger_Worker.info('%d task done. %d success, %d failed' % ( - success+failed, success, failed)) - return + logger_worker.info('%d task done. %d success, %d failed' , + success + failed, success, failed) self.running.add_done_callback(done) async def run(self): @@ -398,7 +415,7 @@ async def run(self): for task in tasks: running.append(asyncio.ensure_future(self.do(task))) if len(running) >= 50: - logger_Worker.debug( + logger_worker.debug( 'scaned %d task, waiting...', len(running)) result = await asyncio.gather(*running[:10]) for each in result: @@ -407,7 +424,7 @@ async def run(self): else: failed += 1 running = running[10:] - logger_Worker.debug('scaned %d task, waiting...', len(running)) + logger_worker.debug('scaned %d task, waiting...', len(running)) result = await asyncio.gather(*running) for each in result: if each: @@ -417,22 +434,21 @@ async def run(self): if config.push_batch_sw: await self.push_batch() except Exception as e: - logger_Worker.exception(e) + logger_worker.exception(e) return (success, failed) if __name__ == '__main__': - from db import DB tornado.log.enable_pretty_logging() io_loop = tornado.ioloop.IOLoop.instance() if config.worker_method.upper() == 'QUEUE': - worker = QueueWorker(DB()) - io_loop.add_callback(worker) + queue_worker = QueueWorker(DB()) + io_loop.add_callback(queue_worker) elif config.worker_method.upper() == 'BATCH': - worker = BatchWorker(DB()) - tornado.ioloop.PeriodicCallback(worker, config.check_task_loop).start() + batch_worker = BatchWorker(DB()) + tornado.ioloop.PeriodicCallback(batch_worker, config.check_task_loop).start() # worker() else: - raise Exception('Worker_method must be Queue or Batch') + raise RuntimeError('Worker_method must be Queue or Batch') io_loop.start()