From 504e49ca0b4898cdf9587bcbe55696eb749b5833 Mon Sep 17 00:00:00 2001 From: Nikos Koukis Date: Sun, 11 Aug 2024 11:17:41 +0300 Subject: [PATCH] Fix code based on ruff and pyupgrade feedback, add to linters --- .gitignore | 2 + .markdown-link-check.json | 1 + .pre-commit-config.yaml | 26 +- README.md | 3 +- poetry.lock | 356 +++++++++++++----------- pyproject.toml | 45 ++- syncall/__init__.py | 2 +- syncall/aggregator.py | 75 +++-- syncall/app_utils.py | 125 +++++---- syncall/asana/__init__.py | 1 + syncall/asana/asana_side.py | 23 +- syncall/asana/asana_task.py | 44 +-- syncall/asana/utils.py | 4 +- syncall/caldav/caldav_side.py | 43 +-- syncall/caldav/caldav_utils.py | 51 ++-- syncall/cli.py | 45 +-- syncall/concrete_item.py | 25 +- syncall/exceptions.py | 15 +- syncall/filesystem/filesystem_file.py | 25 +- syncall/filesystem/filesystem_side.py | 11 +- syncall/filesystem_gkeep_utils.py | 6 +- syncall/google/__init__.py | 1 + syncall/google/gcal_side.py | 87 +++--- syncall/google/gkeep_note.py | 2 +- syncall/google/gkeep_note_side.py | 43 ++- syncall/google/gkeep_side.py | 28 +- syncall/google/gkeep_todo_item.py | 2 +- syncall/google/gkeep_todo_side.py | 29 +- syncall/google/google_side.py | 15 +- syncall/google/gtasks_side.py | 90 +++--- syncall/notion/__init__.py | 1 + syncall/notion/notion_side.py | 51 ++-- syncall/notion/notion_todo_block.py | 26 +- syncall/pdb_cli_utils.py | 6 +- syncall/scripts/fs_gkeep_sync.py | 20 +- syncall/scripts/tw_asana_sync.py | 42 +-- syncall/scripts/tw_caldav_sync.py | 38 ++- syncall/scripts/tw_gcal_sync.py | 28 +- syncall/scripts/tw_gkeep_sync.py | 23 +- syncall/scripts/tw_gtasks_sync.py | 31 ++- syncall/scripts/tw_notion_sync.py | 31 ++- syncall/side_helper.py | 2 +- syncall/sync_side.py | 101 ++++--- syncall/taskwarrior/taskwarrior_side.py | 49 ++-- syncall/tw_asana_utils.py | 17 +- syncall/tw_caldav_utils.py | 28 +- syncall/tw_gcal_utils.py | 24 +- syncall/tw_gkeep_utils.py | 3 +- syncall/tw_gtasks_utils.py | 11 +- syncall/tw_notion_utils.py | 5 +- syncall/tw_utils.py | 40 +-- syncall/types.py | 216 +++++++------- tests/__init__.py | 1 + tests/conftest.py | 26 +- tests/conftest_fs.py | 31 +-- tests/conftest_gcal.py | 1 - tests/conftest_gkeep.py | 10 +- tests/conftest_gtasks.py | 1 - tests/conftest_helpers.py | 4 +- tests/conftest_notion.py | 49 ++-- tests/conftest_tw.py | 1 - tests/generic_test_case.py | 13 +- tests/pyproject.toml | 8 + tests/test_aggregator.py | 8 +- tests/test_app_utils.py | 34 ++- tests/test_asana_task.py | 47 ++-- tests/test_filesystem_file.py | 22 +- tests/test_filesystem_gkeep.py | 9 +- tests/test_filesystem_side.py | 4 +- tests/test_gcal.py | 14 +- tests/test_notion_todo_block.py | 26 +- tests/test_tw_asana_conversions.py | 51 ++-- tests/test_tw_caldav_conversions.py | 53 ++-- tests/test_tw_gcal.py | 1 - tests/test_tw_gkeep.py | 1 - tests/test_tw_gtasks_conversions.py | 3 +- tests/test_tw_notion.py | 15 +- tests/test_util_methods.py | 13 +- 78 files changed, 1344 insertions(+), 1120 deletions(-) create mode 100644 syncall/notion/__init__.py create mode 100644 tests/pyproject.toml mode change 100755 => 100644 tests/test_tw_caldav_conversions.py mode change 100755 => 100644 tests/test_util_methods.py diff --git a/.gitignore b/.gitignore index d5b2360..a6e519d 100644 --- a/.gitignore +++ b/.gitignore @@ -91,3 +91,5 @@ test-tw-gcal-sync.json /.task_backup /.envrc .tool-versions +.lvimrc +a diff --git a/.markdown-link-check.json b/.markdown-link-check.json index 2c63c08..3125325 100644 --- a/.markdown-link-check.json +++ b/.markdown-link-check.json @@ -1,2 +1,3 @@ { + "ignorePatterns": ["https://notion.so"] } diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 54f7f6f..9d5465f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,6 +50,15 @@ repos: minimum_pre_commit_version: 2.9.2 require_serial: true types_or: [python, pyi] + + - id: pyupgrade + name: Pyupgrade + description: Automatically upgrade syntax for newer versions. + entry: pyupgrade + args: ["--py38-plus"] + types_or: [python] + language: system + # - id: mypy # name: Mypy check # entry: mypy @@ -70,19 +79,12 @@ repos: # types: [python] # exclude: ^tests/ - - id: isort - name: Isort check - entry: isort + - id: ruff + name: Ruff Linter + entry: ruff + args: ["check", "--fix"] language: system - pass_filenames: true - types: [python] - - # - id: ruff - # name: Ruff Linter - # entry: ruff - # args: ["check", "--fix"] - # language: system - # types_or: [python, pyi] + types_or: [python, pyi] - id: poetry-check name: Poetry check diff --git a/README.md b/README.md index bc9f397..77f4e5f 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,8 @@

- + + Coverage Status diff --git a/poetry.lock b/poetry.lock index 55fb900..486b64d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1120,153 +1120,149 @@ dev = ["Sphinx (>=2.2.1)", "black (>=19.10b0)", "codecov (>=2.0.15)", "colorama [[package]] name = "lxml" -version = "5.2.2" +version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" files = [ - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, - {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, - {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, - {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, - {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, - {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, - {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, - {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, - {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, - {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, - {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, - {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, - {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, - {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, - {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, - {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, - {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, - {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, - {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, - {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, - {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, - {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, - {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, - {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, - {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, - {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, - {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, - {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, - {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, - {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, - {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, - {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, - {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, - {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, - {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, - {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, - {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, - {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, - {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, - {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, - {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, ] [package.extras] @@ -1274,7 +1270,7 @@ cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=3.0.10)"] +source = ["Cython (>=3.0.11)"] [[package]] name = "mccabe" @@ -1782,6 +1778,20 @@ files = [ {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] +[[package]] +name = "pyupgrade" +version = "3.16.0" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyupgrade-3.16.0-py2.py3-none-any.whl", hash = "sha256:7a54ee28f3024d027048d49d101e5c702e88c85edc3a1d08b636c50ebef2a97d"}, + {file = "pyupgrade-3.16.0.tar.gz", hash = "sha256:237893a05d5b117259b31b423f23cbae4bce0b7eae57ba9a52c06098c2ddd76f"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + [[package]] name = "pyyaml" version = "5.3.1" @@ -2113,28 +2123,29 @@ files = [ [[package]] name = "ruff" -version = "0.1.15" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, - {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, - {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, - {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, - {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, - {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, - {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, - {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -2192,6 +2203,17 @@ packaging = ">=23.2,<24.0" python-dateutil = ">=2.8.2,<3.0.0" pytz = ">=2023.3.post1,<2024.0" +[[package]] +name = "tokenize-rt" +version = "6.0.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-6.0.0-py2.py3-none-any.whl", hash = "sha256:d4ff7ded2873512938b4f8cbb98c9b07118f01d30ac585a30d7a88353ca36d22"}, + {file = "tokenize_rt-6.0.0.tar.gz", hash = "sha256:b9711bdfc51210211137499b5e355d3de5ec88a85d2025c520cbb921b5194367"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -2606,13 +2628,13 @@ files = [ [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -2630,5 +2652,5 @@ tw = ["taskw-ng", "xdg"] [metadata] lock-version = "2.0" -python-versions = ">=3.8,<=3.12.5" -content-hash = "b2918815b3d67291ea021df2a1ae56824a488d6879b0c49bcd0d2c5bbb2d89f4" +python-versions = ">=3.8.1,<=3.12.5" +content-hash = "c57f41a8d920cac92ac46a1cece8040463c2eef04dd477cd5de9739227201cce" diff --git a/pyproject.toml b/pyproject.toml index 17eee8d..f6d8c9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,8 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3 :: Only", "Intended Audience :: End Users/Desktop", "Natural Language :: English", @@ -50,7 +52,7 @@ tw_gtasks_sync = "syncall.scripts.tw_gtasks_sync:main" # end-user dependencies -------------------------------------------------------- [tool.poetry.dependencies] -python = ">=3.8,<=3.12.5" +python = ">=3.8.1,<=3.12.5" PyYAML = "~5.3.1" bidict = "^0.21.4" @@ -109,7 +111,8 @@ coverage = { version = "^6.5.0", extras = ["toml"] } coveralls = "^3.3.1" check-jsonschema = "^0.14.3" # readline = "6.2.4.1" -ruff = "^0.1.15" +ruff = "^0.5" +pyupgrade = "3.16" # isort ------------------------------------------------------------------------ [tool.isort] @@ -181,7 +184,7 @@ pythonPlatform = "Linux" # ruff ------------------------------------------------------------------------- [tool.ruff] line-length = 95 -target-version = "py310" +target-version = "py38" force-exclude = true exclude = [] @@ -189,7 +192,41 @@ exclude = [] [tool.ruff.lint] select = ["ALL"] -ignore = [] +ignore = [ + "ANN201", + "S101", + "D102", + "D103", + "ANN001", + "ANN202", + "D100", + "FBT001", + "ANN003", + "PGH003", + "N803", + "FBT002", + "FA100", + "TRY003", + "SLF001", + "EM102", + "D105", + "D107", + "D213", + "D203", + "EM101", + "D205", + "D400", + "D415", + "INP001", + "E501", + "SIM118", + "N806", + "ANN002", + "ANN204", + "N802", + "PLR0913", + "ANN206", +] exclude = [] [tool.ruff.lint.flake8-annotations] diff --git a/syncall/__init__.py b/syncall/__init__.py index ff3b037..f49d9a8 100644 --- a/syncall/__init__.py +++ b/syncall/__init__.py @@ -1,4 +1,4 @@ -"""__init__""" +"""Init module.""" # global imports ------------------------------------------------------------------------------ from syncall.__version__ import __version__ as version diff --git a/syncall/aggregator.py b/syncall/aggregator.py index 4e012f6..69de1e4 100644 --- a/syncall/aggregator.py +++ b/syncall/aggregator.py @@ -1,19 +1,28 @@ from __future__ import annotations +from typing import TYPE_CHECKING + +from typing_extensions import Self + +if TYPE_CHECKING: + from pathlib import Path + from typing import Iterable, Sequence + + from item_synchronizer.types import ID, ConverterFn, Item + + from syncall.sync_side import SyncSide + from functools import partial -from pathlib import Path -from typing import Any, Dict, Iterable, Optional, Sequence, Tuple +from typing import Any -from bidict import bidict # type: ignore +from bidict import bidict # pyright: ignore[reportPrivateImportUsage] from bubop import PrefsManager, logger, pickle_dump, pickle_load from item_synchronizer import Synchronizer from item_synchronizer.helpers import SideChanges from item_synchronizer.resolution_strategy import AlwaysSecondRS, ResolutionStrategy -from item_synchronizer.types import ID, ConverterFn, Item from syncall.app_utils import app_name from syncall.side_helper import SideHelper -from syncall.sync_side import SyncSide class Aggregator: @@ -30,9 +39,9 @@ def __init__( side_B: SyncSide, converter_B_to_A: ConverterFn, converter_A_to_B: ConverterFn, - resolution_strategy: ResolutionStrategy = AlwaysSecondRS(), - config_fname: Optional[str] = None, - ignore_keys: Tuple[Sequence[str], Sequence[str]] = tuple(), + resolution_strategy: ResolutionStrategy | None = None, + config_fname: str | None = None, + ignore_keys: tuple[Sequence[str], Sequence[str]] = (), catch_exceptions: bool = True, ): # Preferences manager @@ -49,10 +58,13 @@ def __init__( else: logger.debug(f"Using a custom configuration file ... -> {config_fname}") + if resolution_strategy is None: + resolution_strategy = AlwaysSecondRS() + self.prefs_manager = PrefsManager(app_name=app_name(), config_fname=config_fname) # Own config - self.config: Dict[str, Any] = {} + self.config: dict[str, Any] = {} self._side_A: SyncSide = side_A self._side_B: SyncSide = side_B @@ -119,15 +131,18 @@ def side_A_fn(fn): self.cleaned_up = False - def __enter__(self): + def __enter__(self) -> Self: + """Enter context manager.""" self.start() return self - def __exit__(self, *_): + def __exit__(self, *_) -> None: + """Exit context manager.""" self.finish() - def detect_changes(self, helper: SideHelper, items: Dict[ID, Item]) -> SideChanges: - """ + def detect_changes(self, helper: SideHelper, items: dict[ID, Item]) -> SideChanges: + """Detect changes between the two sides. + Given a fresh list of items from the SyncSide, determine which of them are new, modified, or have been deleted since the last run. """ @@ -165,7 +180,7 @@ def detect_changes(self, helper: SideHelper, items: Dict[ID, Item]) -> SideChang return side_changes - def sync(self): + def sync(self) -> None: """Entrypoint method.""" items_A = { str(item[self._helper_A.id_key]): item for item in self._side_A.get_all_items() @@ -199,19 +214,18 @@ def sync(self): # synchronize self._synchronizer.sync(changes_A=changes_A, changes_B=changes_B) - def start(self): - """Initialization actions.""" + def start(self) -> None: + """Initialize the aggregator.""" self._side_A.start() self._side_B.start() - def finish(self): - """Finalization actions.""" + def finish(self) -> None: + """Finalize the aggregator.""" self._side_A.finish() self._side_B.finish() - # InserterFn = Callable[[Item], ID] def inserter_to(self, item: Item, helper: SideHelper) -> ID: - """Inserter. + """Insert an item using the given side helper. Other side already has the item, and I'm also inserting it at this side. """ @@ -219,7 +233,7 @@ def inserter_to(self, item: Item, helper: SideHelper) -> ID: serdes_dir, _ = self._get_serdes_dirs(helper) logger.info( f"[{helper.other}] Inserting item [{self._summary_of(item, helper):10}] at" - f" {helper}..." + f" {helper}...", ) item_created = item_side.add_item(item) @@ -232,19 +246,19 @@ def inserter_to(self, item: Item, helper: SideHelper) -> ID: return item_created_id def updater_to(self, item_id: ID, item: Item, helper: SideHelper): - """Updater.""" + """Update an item using the given side helper.""" side, _ = self._get_side_instances(helper) serdes_dir, _ = self._get_serdes_dirs(helper) logger.info( f"[{helper.other}] Updating item [{self._summary_of(item, helper):10}] at" - f" {helper}..." + f" {helper}...", ) side.update_item(item_id, **item) pickle_dump(item, serdes_dir / item_id) def deleter_to(self, item_id: ID, helper: SideHelper): - """Deleter.""" + """Delete an item using the given side helper.""" logger.info(f"[{helper}] Synchronising deleted item, id -> {item_id}...") side, _ = self._get_side_instances(helper) side.delete_single_item(item_id) @@ -255,27 +269,28 @@ def item_getter_for(self, item_id: ID, helper: SideHelper) -> Item: """Item Getter.""" logger.debug(f"Fetching {helper} item for id -> {item_id}") side, _ = self._get_side_instances(helper) - item = side.get_item(item_id) - return item + return side.get_item(item_id) def _item_has_update(self, prev_item: Item, new_item: Item, helper: SideHelper) -> bool: """Determine whether the item has been updated.""" side, _ = self._get_side_instances(helper) return not side.items_are_identical( - prev_item, new_item, ignore_keys=[helper.id_key, *helper.ignore_keys] + prev_item, + new_item, + ignore_keys=[helper.id_key, *helper.ignore_keys], ) def _get_ids_map(self, helper: SideHelper): return self._B_to_A_map if helper is self._helper_B else self._B_to_A_map.inverse - def _get_serdes_dirs(self, helper: SideHelper) -> Tuple[Path, Path]: + def _get_serdes_dirs(self, helper: SideHelper) -> tuple[Path, Path]: serdes_dir = self.config[f"{helper}_serdes"] other_serdes_dir = self.config[f"{helper.other}_serdes"] return serdes_dir, other_serdes_dir - def _get_side_instances(self, helper: SideHelper) -> Tuple[SyncSide, SyncSide]: + def _get_side_instances(self, helper: SideHelper) -> tuple[SyncSide, SyncSide]: side = self._side_B if helper is self._helper_B else self._side_A other_side = self._side_A if helper is self._helper_B else self._side_B @@ -294,7 +309,7 @@ def full_path(id_: ID) -> Path: except FileNotFoundError: logger.warning(f"File doesn't exist, this may indicate an error -> {p}") logger.opt(exception=True).debug( - f"File doesn't exist, this may indicate an error -> {p}" + f"File doesn't exist, this may indicate an error -> {p}", ) def _summary_of(self, item: Item, helper: SideHelper, short=True) -> str: diff --git a/syncall/app_utils.py b/syncall/app_utils.py index 0e5e1b1..bd38207 100644 --- a/syncall/app_utils.py +++ b/syncall/app_utils.py @@ -4,16 +4,17 @@ `sys.exit()` to avoid dumping stack traces to the user. """ +from __future__ import annotations + import atexit import inspect import logging import os import subprocess import sys -from collections.abc import Iterable from datetime import datetime from pathlib import Path -from typing import Any, Mapping, NoReturn, Optional, Sequence, Tuple, Type, cast +from typing import TYPE_CHECKING, Any, Iterable, Mapping, NoReturn, Sequence, cast from urllib.parse import quote from bubop import ( @@ -37,11 +38,14 @@ ) from syncall.constants import COMBINATION_FLAGS, ISSUES_URL -from syncall.sync_side import SyncSide + +if TYPE_CHECKING: + from syncall.sync_side import SyncSide + from syncall.types import SupportsStr # Various resolution strategies with their respective names so that the user can choose which # one they want. ------------------------------------------------------------------------------ -name_to_resolution_strategy_type: Mapping[str, Type[ResolutionStrategy]] = { +name_to_resolution_strategy_type: Mapping[str, type[ResolutionStrategy]] = { "MostRecentRS": MostRecentRS, "LeastRecentRS": LeastRecentRS, "AlwaysFirstRS": AlwaysFirstRS, @@ -50,56 +54,58 @@ def confirm_before_proceeding(): - """ - Confirm that the user wants to go forward with this configuration before actually + """Confirm that the user wants to go forward with this configuration before actually proceeding. Exit if the user doesn't want to proceed. """ - while True: ans = input("Continue [Y/n] ? ").lower() if ans in ["y", "yes", ""]: break - elif ans in ["n", "no"]: + + if ans in ["n", "no"]: error_and_exit("Exiting.") def get_resolution_strategy( - resolution_strategy_name: str, side_A_type: Type[SyncSide], side_B_type: Type[SyncSide] + resolution_strategy_name: str, + side_A_type: type[SyncSide], + side_B_type: type[SyncSide], ) -> ResolutionStrategy: - """ + """Get the resolution strategy in use. + Given the name of the resolution strategy and the types of the 2 synchronization sides, get an instance of the resolution strategy in use. """ - RS = name_to_resolution_strategy_type[resolution_strategy_name] - if issubclass(RS, RecencyRS): - instance = RS( + rs_class = name_to_resolution_strategy_type[resolution_strategy_name] + if issubclass(rs_class, RecencyRS): + instance = rs_class( date_getter_A=lambda item: cast( - datetime, item[side_A_type.last_modification_key()] + datetime, + item[side_A_type.last_modification_key()], ), date_getter_B=lambda item: cast( - datetime, item[side_B_type.last_modification_key()] + datetime, + item[side_B_type.last_modification_key()], ), ) else: - instance = RS() + instance = rs_class() return instance def app_name(): - """ - Return the name of the application which defines the config, cache, and share directories + """Return the name of the application which defines the config, cache, and share directories of this app. """ if "SYNCALL_TESTENV" in os.environ: return "test_syncall" - else: - return "syncall" + + return "syncall" def get_config_name_for_args(*args) -> str: - """ - Get a name string by concatenating the given args. Encodes the non-ascii + """Get a name string by concatenating the given args. Encodes the non-ascii characters using the urllib parse method Usage:: @@ -116,7 +122,6 @@ def get_config_name_for_args(*args) -> str: Traceback (most recent call last): RuntimeError: ... """ - # sanity check if len(args) == 1: raise RuntimeError("get_config_name_for_args requires more > 1 arguments") @@ -124,11 +129,13 @@ def get_config_name_for_args(*args) -> str: def quote_(obj: str) -> str: return quote(obj, safe="+,") - def format_(obj: Any) -> str: + def format_(obj: SupportsStr) -> str: if isinstance(obj, str): return quote_(obj) - elif isinstance(obj, Iterable): + + if isinstance(obj, Iterable): return ",".join(quote_(str(o)) for o in obj) + return str(obj) return "__".join(map(format_, args)) @@ -139,16 +146,19 @@ def get_named_combinations(config_fname: str) -> Sequence[str]: dummy_logger = logging.getLogger("dummy") dummy_logger.setLevel(logging.CRITICAL + 1) with PrefsManager( - app_name=app_name(), config_fname=config_fname, logger=dummy_logger + app_name=app_name(), + config_fname=config_fname, + logger=dummy_logger, ) as prefs_manager: return list(prefs_manager.keys()) def fetch_app_configuration( - side_A_name: str, side_B_name: str, combination: str + side_A_name: str, + side_B_name: str, + combination: str, ) -> Mapping[str, Any]: - """ - Fetch the configuration of a top-level synchronization app. + """Fetch the configuration of a top-level synchronization app. This function is useful for parsing a previously cached configuration of a synchronization app. The configuration file is managed by a bubop.PrefsManager instance and the @@ -167,7 +177,7 @@ def fetch_app_configuration( format_list( header="\n\nNo such configuration found - existing configurations are", items=existing_keys, - ) + ), ) # config combination found ------------------------------------------------------------ @@ -178,13 +188,11 @@ def fetch_app_configuration( def cache_or_reuse_cached_combination( config_args: Mapping[str, Any], config_fname: str, - custom_combination_savename: Optional[str], + custom_combination_savename: str | None, ): - """ - App utility function that either retrieves the configuration for the app at hand based on + """App utility function that either retrieves the configuration for the app at hand based on the given arguments or retrieves it based on the custom configuration name specified. """ - if custom_combination_savename is None: config_name = get_config_name_for_args(*config_args.values()) else: @@ -231,7 +239,7 @@ def inform_about_combination_name_usage(combination_name: str): logger.success( "Sync completed successfully. You can now use the" f" {'/'.join(COMBINATION_FLAGS)} option to refer to this particular combination\n\n " - f" {exec_name} {COMBINATION_FLAGS[1]} {combination_name}" + f" {exec_name} {COMBINATION_FLAGS[1]} {combination_name}", ) @@ -243,7 +251,7 @@ def inform_about_app_extras(extras: Sequence[str]) -> NoReturn: "\nYou have to install the" f" {extras_str} {'extra' if len(extras) == 1 else 'extras'} for {exec_name} to" ' work.\nWith pip, you can do it with something like: "pip3 install' - f' syncall[{extras_str}]"\nExiting.' + f' syncall[{extras_str}]"\nExiting.', ) sys.exit(1) @@ -270,7 +278,7 @@ def write_to_pass_manager(password_path: str, passwd: str) -> None: logger.error( f"Cannot find .gpg-id file under the password store - {pass_dir}\n" "Cannot write to the provided password path " - f"in the password store -> {pass_full_path}" + f"in the password store -> {pass_full_path}", ) sys.exit(1) pass_owner = gpg_id_file.read_text().rstrip() @@ -278,15 +286,14 @@ def write_to_pass_manager(password_path: str, passwd: str) -> None: write_gpg_token(p=pass_full_path, token=passwd, recipient=pass_owner) -def fetch_from_pass_manager(password_path: str, allow_fail=False) -> Optional[str]: - """ - Gpg-decrypt and read the contents of a password file. The path should be either relative - to the password store directory or fullpath. +def fetch_from_pass_manager(password_path: str, allow_fail=False) -> str | None: + """Gpg-decrypt and read the contents of a password file. + + The path should be either relative to the password store directory or fullpath. If allow_fail=False, and it indeed fails, it will return None. otherwise, allow_fail=True and it fails, it will log an error with the logger and will `sys.exit`. """ - logger.debug(f"Attempting to read {password_path} from UNIX Password Store...") pass_dir = valid_path(os.environ.get("PASSWORD_STORE_DIR", "~/.password-store")) if str(password_path).startswith(str(pass_dir)): @@ -307,8 +314,8 @@ def fetch_from_pass_manager(password_path: str, allow_fail=False) -> Optional[st f" {pass_full_path}", non_empty("stdout", err.stdout.decode("utf-8"), join_with=": "), non_empty("stderr", err.stderr.decode("utf-8"), join_with=": "), - ] - ) + ], + ), ) sys.exit(1) @@ -316,11 +323,11 @@ def fetch_from_pass_manager(password_path: str, allow_fail=False) -> Optional[st def gkeep_read_username_password_token( - gkeep_user_pass_path: str, gkeep_passwd_pass_path: str, gkeep_token_pass_path: str -) -> Tuple[Optional[str], Optional[str], Optional[str]]: - """ - Helper method for reading the username, password and application token for applications - that connect to Google Keep using the gkeepapi python module. + gkeep_user_pass_path: str, + gkeep_passwd_pass_path: str, + gkeep_token_pass_path: str, +) -> tuple[str | None, str | None, str | None]: + """Read the username, password and application token for apps that use gkeepapi. For all three of the variables above, it will first try reading them from environment variables, then if empty will resort to reading them from the UNIX Password manager. @@ -351,7 +358,8 @@ def gkeep_read_username_password_token( return gkeep_user, gkeep_passwd, gkeep_token -def app_log_to_syslog(): +def app_log_to_syslog() -> None: + """Enable logging to syslog for the application.""" caller_frame = inspect.stack()[1] calling_file = Path(caller_frame[1]) fname = calling_file.stem @@ -359,7 +367,10 @@ def app_log_to_syslog(): def register_teardown_handler( - pdb_on_error: bool, inform_about_config: bool, combination_name: str, verbose: int + pdb_on_error: bool, + inform_about_config: bool, + combination_name: str, + verbose: int, ) -> ExitHooks: """Shortcut for registering the teardown logic in a top-level sync application. @@ -379,10 +390,12 @@ def teardown(): if inform_about_config: inform_about_combination_name_usage(combination_name) + return 0 + if pdb_on_error: logger.warning( "pdb_on_error is enabled. Disabling exit hooks / not taking actions at the end " - "of the run." + "of the run.", ) else: hooks.register() @@ -391,16 +404,14 @@ def teardown(): return hooks -def determine_app_config_fname(side_A_name: str, side_B_name: str): - """ - Get the configuration name for the app at hand given the names of the sides involved. +def determine_app_config_fname(side_A_name: str, side_B_name: str) -> str: + """Get the configuration name for the app at hand given the names of the sides involved. >>> assert determine_app_config_fname("TW", "Google Tasks") == 'tw__google_tasks__configs.yaml' >>> assert determine_app_config_fname("TW", "Google Calendar") == 'tw__google_calendar__configs.yaml' """ - config_fname = ( + return ( f'{side_A_name.replace(" ", "_").lower()}' "__" f'{side_B_name.replace(" ", "_").lower()}__configs.yaml' ) - return config_fname diff --git a/syncall/asana/__init__.py b/syncall/asana/__init__.py index e69de29..b70e928 100644 --- a/syncall/asana/__init__.py +++ b/syncall/asana/__init__.py @@ -0,0 +1 @@ +"""Asana side subpackage.""" diff --git a/syncall/asana/asana_side.py b/syncall/asana/asana_side.py index ebba0b2..330cf5c 100644 --- a/syncall/asana/asana_side.py +++ b/syncall/asana/asana_side.py @@ -4,7 +4,7 @@ from syncall.asana.asana_task import AsanaTask from syncall.sync_side import SyncSide -from syncall.types import AsanaGID, AsanaRawTask +from syncall.types import AsanaGID # Request up to 100 tasks at a time in GET /tasks API call. # The API doesn't allow page sizes larger than 100. @@ -12,11 +12,10 @@ class AsanaSide(SyncSide): - """ - Wrapper class to add/modify/delete asana tasks, etc. - """ + """Wrapper class to add/modify/delete asana tasks, etc.""" def __init__(self, client: asana.Client, task_gid: AsanaGID, workspace_gid: AsanaGID): + """Initialize the Asana side.""" self._client = client self._task_gid = task_gid self._workspace_gid = workspace_gid @@ -30,11 +29,14 @@ def finish(self): pass def get_all_items(self, **kwargs) -> Sequence[AsanaTask]: + del kwargs results = [] if self._task_gid is None: tasks = self._client.tasks.find_all( - assignee="me", workspace=self._workspace_gid, page_size=GET_TASKS_PAGE_SIZE + assignee="me", + workspace=self._workspace_gid, + page_size=GET_TASKS_PAGE_SIZE, ) for task in tasks: @@ -84,8 +86,6 @@ def update_item(self, item_id: AsanaGID, **changes): # - If the remote Asana task 'due_on' field is empty, update 'due_at'. # - If the remote Asana task 'due_on' field is not empty and the # 'due_at' field is empty, update 'due_on'. - # TODO: find a way to store this information locally, so we don't have - # to fetch the task from Asana to determine this. remote_task = self.get_item(item_id) if remote_task.get("due_on", None) is None: raw_task.pop("due_on", None) @@ -121,8 +121,7 @@ def add_item(self, item: AsanaTask) -> AsanaTask: @classmethod def id_key(cls) -> str: - """ - Key in the dictionary of the added/updated/deleted item (task) that refers to the ID of + """Key in the dictionary of the added/updated/deleted item (task) that refers to the ID of that item (task). """ return "gid" @@ -139,7 +138,10 @@ def last_modification_key(cls) -> str: @classmethod def items_are_identical( - cls, item1: AsanaTask, item2: AsanaTask, ignore_keys: Sequence[str] = [] + cls, + item1: AsanaTask, + item2: AsanaTask, + ignore_keys: Sequence[str] = [], ) -> bool: """Determine whether two items (tasks) are identical. @@ -152,7 +154,6 @@ def items_are_identical( compare_keys.remove(key) # Special handling for 'due_at' and 'due_on' - # TODO: reduce ['due_at','due_on'] to 'due_at', compare and remove both # keys. if item1.get("due_at", None) is not None and item2.get("due_at", None) is not None: compare_keys.remove("due_on") diff --git a/syncall/asana/asana_task.py b/syncall/asana/asana_task.py index a8f6497..9921125 100644 --- a/syncall/asana/asana_task.py +++ b/syncall/asana/asana_task.py @@ -1,14 +1,19 @@ +from __future__ import annotations + import datetime from dataclasses import dataclass -from typing import Any, Mapping, Optional +from typing import TYPE_CHECKING, Any, Mapping from bubop import parse_datetime -from syncall.types import AsanaGID, AsanaRawTask +if TYPE_CHECKING: + from syncall.types import AsanaGID, AsanaRawTask @dataclass class AsanaTask(Mapping): + """Represent an Asana task.""" + completed: bool completed_at: datetime.datetime created_at: datetime.datetime @@ -16,31 +21,32 @@ class AsanaTask(Mapping): due_on: datetime.date name: str modified_at: datetime.datetime - gid: Optional[AsanaGID] = None - - _key_names = { - "completed", - "completed_at", - "created_at", - "due_at", - "due_on", - "gid", - "name", - "modified_at", - } - - def __getitem__(self, key) -> Any: + gid: AsanaGID | None = None + + _key_names: frozenset[str] = frozenset( + { + "completed", + "completed_at", + "created_at", + "due_at", + "due_on", + "gid", + "name", + "modified_at", + }, + ) + + def __getitem__(self, key) -> Any: # noqa: ANN401 return getattr(self, key) def __iter__(self): - for k in self._key_names: - yield k + yield from self._key_names def __len__(self): return len(self._key_names) @classmethod - def from_raw_task(cls, raw_task: AsanaRawTask) -> "AsanaTask": + def from_raw_task(cls, raw_task: AsanaRawTask) -> AsanaTask: assert "completed" in raw_task assert "completed_at" in raw_task assert "created_at" in raw_task diff --git a/syncall/asana/utils.py b/syncall/asana/utils.py index 082a66c..495d2f7 100644 --- a/syncall/asana/utils.py +++ b/syncall/asana/utils.py @@ -8,11 +8,11 @@ def list_asana_workspaces(client: asana.Client) -> None: workspaces = client.workspaces.find_all() for workspace in workspaces: - items[workspace["name"]] = "gid=%s" % workspace["gid"] + items[workspace["name"]] = f'gid={workspace["gid"]}' logger.success( format_dict( header="\n\nAsana workspaces", items=items, - ) + ), ) diff --git a/syncall/caldav/caldav_side.py b/syncall/caldav/caldav_side.py index ac1dea3..d864634 100644 --- a/syncall/caldav/caldav_side.py +++ b/syncall/caldav/caldav_side.py @@ -1,10 +1,14 @@ -from typing import Any, Dict, Optional, Sequence +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Sequence -import caldav from bubop import logger from caldav.lib.error import NotFoundError from icalendar.prop import vCategory, vDatetime, vText -from item_synchronizer.types import ID + +if TYPE_CHECKING: + import caldav + from item_synchronizer.types import ID from syncall.app_utils import error_and_exit from syncall.caldav.caldav_utils import calendar_todos, icalendar_component, map_ics_to_item @@ -12,22 +16,20 @@ class CaldavSide(SyncSide): - """ - Wrapper to add/modify/delete todo entries from a caldav server - """ + """Wrapper to add/modify/delete todo entries from a caldav server.""" ID_KEY = "id" SUMMARY_KEY = "summary" LAST_MODIFICATION_KEY = "last-modified" - _identical_comparison_keys = [ + _identical_comparison_key: tuple[str] = ( "description", "end", "status", "summary", "due", - ] + ) - _date_keys = ["end", "start", "last-modified"] + _date_keys: tuple[str] = ("end", "start", "last-modified") def __init__(self, client: caldav.DAVClient, calendar_name: str) -> None: super().__init__(name="caldav", fullname="Caldav") @@ -35,7 +37,7 @@ def __init__(self, client: caldav.DAVClient, calendar_name: str) -> None: self._client = client.principal() self._calendar_name = calendar_name self._calendar: caldav.Calendar - self._items_cache: Dict[str, dict] = {} + self._items_cache: dict[str, dict] = {} def start(self): logger.info(f"Initializing {self.fullname}...") @@ -50,17 +52,19 @@ def _get_calendar(self) -> caldav.Calendar: if "VTODO" not in acceptable_component_types: error_and_exit( f"Calendar {self._calendar_name} found but does not support VTODO entries" - " - please choose a different calendar" + " - please choose a different calendar", ) except NotFoundError: # Create calendar if not there ------------------------------------------------- logger.info(f"Calendar not found = Creating new calendar {self._calendar_name}") calendar = self._client.make_calendar( - name=self._calendar_name, supported_calendar_component_set=["VTODO"] + name=self._calendar_name, + supported_calendar_component_set=["VTODO"], ) return calendar def get_all_items(self, **kargs): + del kargs todos = [] raw_todos = calendar_todos(self._calendar) @@ -79,8 +83,8 @@ def get_item(self, item_id: ID, use_cached: bool = False): item = self._find_todo_by_id(item_id=item_id) return item - def _find_todo_by_id_raw(self, item_id: ID) -> Optional[caldav.CalendarObjectResource]: - item = next( + def _find_todo_by_id_raw(self, item_id: ID) -> caldav.CalendarObjectResource | None: + return next( ( item for item in calendar_todos(self._calendar) @@ -89,13 +93,13 @@ def _find_todo_by_id_raw(self, item_id: ID) -> Optional[caldav.CalendarObjectRes None, ) - return item - - def _find_todo_by_id(self, item_id: ID) -> Optional[Dict]: + def _find_todo_by_id(self, item_id: ID) -> dict | None: raw_item = self._find_todo_by_id_raw(item_id=item_id) if raw_item: return map_ics_to_item(icalendar_component(raw_item)) + return None + def delete_single_item(self, item_id: ID): todo = self._find_todo_by_id_raw(item_id=item_id) if todo is not None: @@ -105,12 +109,13 @@ def update_item(self, item_id: ID, **changes): todo = self._find_todo_by_id_raw(item_id=item_id) if todo is None: logger.error( - f"Trying to update item but cannot find item on the CalDav server -> {item_id}" + "Trying to update item but cannot find item on the CalDav server ->" + f" {item_id}", ) logger.opt(lazy=True).debug(f"Can't update item {item_id}\n\nchanges: {changes}") return - def set_(key: str, val: Any): + def set_(key: str, val: Any): # noqa: ANN401 icalendar_component(todo)[key] = val # pop the key:value (s) that we're intending to potentially update diff --git a/syncall/caldav/caldav_utils.py b/syncall/caldav/caldav_utils.py index eaa6ad1..2d22a81 100644 --- a/syncall/caldav/caldav_utils.py +++ b/syncall/caldav/caldav_utils.py @@ -1,19 +1,24 @@ +# ruff: noqa: PLR2004 + +from __future__ import annotations + import traceback -from typing import Dict, List, Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Sequence from uuid import UUID -import caldav from bubop import logger -from icalendar.prop import vCategory -from item_synchronizer.resolution_strategy import Item + +if TYPE_CHECKING: + import caldav + from icalendar.prop import vCategory + from item_synchronizer.resolution_strategy import Item def icalendar_component(obj: caldav.CalendarObjectResource): - """The .icalendar_component isn't picked up by linters + """Get the .icalendar_component isn't picked up by linters Ignore the warning when accessing it. """ - return obj.icalendar_component # type: ignore @@ -25,11 +30,8 @@ def _parse_vcategory(vcategory: vCategory) -> Sequence[str]: return [str(category) for category in vcategory.cats] -def map_ics_to_item(vtodo) -> Dict: - """ - Utility function that extracts the relevant info from an icalendar_component into a python - dict - """ +def map_ics_to_item(vtodo) -> dict: + """Extract the relevant info from an icalendar_component into a python dict.""" todo_item = {} todo_item["id"] = str(vtodo.get("uid")) @@ -55,13 +57,13 @@ def _convert_one(name: str) -> str: # return a List[vCategory], each vCategory with a single name # Option 1: # - # CATEGORIES:bugwarrior - # CATEGORIES:github_working_on_it - # CATEGORIES:programming - # CATEGORIES:remindme + # | CATEGORIES:bugwarrior + # | CATEGORIES:github_working_on_it + # | CATEGORIES:programming + # | CATEGORIES:remindme # # Option 2: - # CATEGORIES:bugwarrior,github_bug,github_help_wanted,github_tw_gcal_sync,pro + # | CATEGORIES:bugwarrior,github_bug,github_help_wanted,github_tw_gcal_sync,pro all_categories = [] if isinstance(vcategories, Sequence): for vcategory in vcategories: @@ -75,13 +77,12 @@ def _convert_one(name: str) -> str: def parse_caldav_item_desc( caldav_item: Item, -) -> Tuple[List[str], Optional[UUID]]: - """ - Parse and return the necessary TW fields off a caldav Item. +) -> tuple[list[str], UUID | None]: + """Parse and return the necessary TW fields off a caldav Item. Pretty much directly copied from tw_gcal_utils, however we handle status differently, so only return annotations/uuid """ - annotations: List[str] = [] + annotations: list[str] = [] uuid = None if "description" not in caldav_item.keys(): @@ -92,19 +93,19 @@ def parse_caldav_item_desc( lines = [line.strip() for line in caldav_desc.split("\n") if line][1:] # annotations - i = 0 - for i, line in enumerate(lines): + _i = 0 + for _i, line in enumerate(lines): parts = line.split(":", maxsplit=1) if len(parts) == 2 and parts[0].lower().startswith("* annotation"): annotations.append(parts[1].strip()) else: break - if i == len(lines): + if _i == len(lines): return annotations, uuid # Iterate through rest of lines, find only the uuid - for line in lines[i:]: + for line in lines[_i:]: parts = line.split(":", maxsplit=1) if len(parts) == 2 and parts[0].lower().startswith("* uuid"): try: @@ -112,7 +113,7 @@ def parse_caldav_item_desc( except ValueError as err: logger.error( f'Invalid UUID "{err}" provided during caldav -> TW conversion,' - f" Using None...\n\n{traceback.format_exc()}" + f" Using None...\n\n{traceback.format_exc()}", ) return annotations, uuid diff --git a/syncall/cli.py b/syncall/cli.py index 114e8fc..3425645 100644 --- a/syncall/cli.py +++ b/syncall/cli.py @@ -23,6 +23,8 @@ def _set_own_excepthook(ctx, param, value): + del param + if not value or ctx.resilient_parsing: return value @@ -51,15 +53,13 @@ def decorator(f): _opt_asana_workspace_gid, _opt_asana_workspace_name, _opt_list_asana_workspaces, - ] + ], ): f = d()(f) # --asana-task-gid is used to ease development and debugging. It is not currently # suitable for regular use. - f = _opt_asana_task_gid(hidden=hidden_gid)(f) - - return f + return _opt_asana_task_gid(hidden=hidden_gid)(f) return decorator @@ -77,6 +77,8 @@ def _opt_asana_task_gid(**kwargs): def _opt_asana_token_pass_path(): def callback(ctx, param, value): + del ctx + api_token_pass_path = value # fetch API token to connect to asana ------------------------------------------------- @@ -85,12 +87,12 @@ def callback(ctx, param, value): if asana_token is None and api_token_pass_path is None: error_and_exit( "You must provide an Asana Personal Access asana_token, using the" - f" {'/'.join(param.opts)} option" + f" {'/'.join(param.opts)} option", ) if asana_token is not None: logger.debug( "Reading the Asana Personal Access asana_token (PAT) from environment" - " variable..." + " variable...", ) else: asana_token = fetch_from_pass_manager(api_token_pass_path) @@ -147,7 +149,7 @@ def decorator(f): _opt_tw_project, _opt_tw_only_tasks_modified_X_days, _opt_prefer_scheduled_date, - ] + ], ): f = d()(f) return f @@ -204,8 +206,10 @@ def _opt_tw_project(): def _opt_tw_only_tasks_modified_X_days(): def callback(ctx, param, value): + del param + if value is None or ctx.resilient_parsing: - return + return None return f"modified.after:-{value}d" @@ -262,7 +266,7 @@ def decorator(f): _opt_gkeep_user_pass_path, _opt_gkeep_passwd_pass_path, _opt_gkeep_token_pass_path, - ] + ], ): f = d()(f) @@ -382,7 +386,7 @@ def decorator(f): _opt_caldav_user, _opt_caldav_passwd_pass_path, _opt_caldav_passwd_cmd, - ] + ], ): f = d()(f) @@ -472,14 +476,13 @@ def decorator(f): (_opt_list_combinations, side_A_name, side_B_name), (_opt_combination, side_A_name, side_B_name), (_opt_custom_combination_savename, side_A_name, side_B_name), - ] + ], ): fn = d[0] fn_args = d[1:] f = fn(*fn_args)(f) # type: ignore - f = click.option("-v", "--verbose", count=True)(f) - return f + return click.option("-v", "--verbose", count=True)(f) return decorator @@ -511,17 +514,19 @@ def _list_named_combinations(config_fname: str) -> None: format_list( header="\n\nNamed configurations currently available", items=get_named_combinations(config_fname=config_fname), - ) + ), ) def _opt_list_combinations(side_A_name: str, side_B_name: str): def callback(ctx, param, value): + del ctx, param if value is True: _list_named_combinations( config_fname=determine_app_config_fname( - side_A_name=side_A_name, side_B_name=side_B_name - ) + side_A_name=side_A_name, + side_B_name=side_B_name, + ), ) sys.exit(0) @@ -547,11 +552,17 @@ def _opt_resolution_strategy(): def _opt_list_resolution_strategies(): def _list_resolution_strategies(ctx, param, value): + del ctx, param + if value is not True: return strs = name_to_resolution_strategy_type.keys() - click.echo("\n".join([f"{a}. {b}" for a, b in zip(range(1, len(strs) + 1), strs)])) + click.echo( + "\n".join( + [f"{a}. {b}" for a, b in zip(range(1, len(strs) + 1), strs, strict=False)], + ), + ) sys.exit(0) return click.option( diff --git a/syncall/concrete_item.py b/syncall/concrete_item.py index 8db155d..456825c 100644 --- a/syncall/concrete_item.py +++ b/syncall/concrete_item.py @@ -10,6 +10,8 @@ class KeyType(Enum): + """Possible types of keys in an item.""" + String = auto() Date = auto() Boolean = auto() @@ -18,6 +20,8 @@ class KeyType(Enum): @dataclass class ItemKey: + """Key of an item.""" + name: str type: KeyType @@ -45,7 +49,7 @@ def id(self) -> Optional[ID]: def _id(self) -> Optional[str]: pass - def __getitem__(self, key: str) -> Any: + def __getitem__(self, key: str) -> Any: # noqa: ANN401 return getattr(self, key) def __iter__(self) -> Iterator[str]: @@ -81,19 +85,20 @@ def compare( for key in keys_to_check: if key.type is KeyType.Date: if not is_same_datetime( - self[key.name], other[key.name], tol=datetime.timedelta(minutes=10) + self[key.name], + other[key.name], + tol=datetime.timedelta(minutes=10), ): logger.opt(lazy=True).trace( f"\n\nItems differ\n\nItem1\n\n{self}\n\nItem2\n\n{other}\n\nKey" - f" [{key.name}] is different - [{repr(self[key.name])}] |" - f" [{repr(other[key.name])}]" - ) - return False - else: - if self[key.name] != other[key.name]: - logger.opt(lazy=True).trace( - f"Items differ [{key.name}]\n\n{self}\n\n{other}" + f" [{key.name}] is different - [{self[key.name]!r}] |" + f" [{other[key.name]!r}]", ) return False + elif self[key.name] != other[key.name]: + logger.opt(lazy=True).trace( + f"Items differ [{key.name}]\n\n{self}\n\n{other}", + ) + return False return True diff --git a/syncall/exceptions.py b/syncall/exceptions.py index 20f463a..dcc4ff8 100644 --- a/syncall/exceptions.py +++ b/syncall/exceptions.py @@ -1,13 +1,16 @@ -from pathlib import Path -from typing import Optional +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pathlib import Path class AttributeNotSetError(BaseException): - """ - Exception raised an attribute (e.g., on a file) has not been set as expected. - """ + """Exception raised an attribute (e.g., on a file) has not been set as expected.""" - def __init__(self, attr_name: str, path: Optional[Path] = None): + def __init__(self, attr_name: str, path: Path | None = None): + """Initialize the exception.""" s = f"Attribute {attr_name} has not been set" if path is not None: s += f" for file {path}" diff --git a/syncall/filesystem/filesystem_file.py b/syncall/filesystem/filesystem_file.py index ba95b1e..0004378 100644 --- a/syncall/filesystem/filesystem_file.py +++ b/syncall/filesystem/filesystem_file.py @@ -50,9 +50,9 @@ def __init__( ItemKey("last_modified_date", KeyType.Date), ItemKey("contents", KeyType.String), ItemKey("title", KeyType.String), - ) + ), ) - if not filetype is FileType.FILE: + if filetype is not FileType.FILE: raise NotImplementedError("Only supporting synchronization for raw files.") path_ = Path(path) @@ -83,7 +83,7 @@ def __init__( self._id_str = _generate_id() logger.trace( f"File [{self._title}] doesn't have an ID yet, assigning new ID ->" - f" {self._id_str}" + f" {self._id_str}", ) self._set_id_on_flush = True if self._flush_on_instantiation: @@ -114,7 +114,6 @@ def __exit__(self, *_): def flush(self) -> None: """Teardown method - call this to make changes to the file persistent.""" - # delete if it's for deletion if self._set_for_deletion: self._path.unlink() @@ -130,7 +129,7 @@ def flush(self) -> None: if self._set_title_on_flush: self._set_title_on_flush = False self._path = self._path.rename( - self._path.with_name(self.title).with_suffix(self._ext) + self._path.with_name(self.title).with_suffix(self._ext), ) logger.trace(f"Renaming file on disk, new name -> {self._path.name}") @@ -148,14 +147,13 @@ def _get_id(self) -> str: @classmethod def get_id_of_path(cls, path: Path) -> ID: - """ - .. raises AttributeNotSetError in case the path doesn't have the expected attribute - set. + """.. raises AttributeNotSetError in case the path doesn't have the expected attribute + set. """ try: with path.open() as fd: return _from_b(xattr.getxattr(fd, _to_b(cls._attr))) - except IOError as err: + except OSError as err: raise AttributeNotSetError(attr_name=cls._attr, path=path) from err def _id(self) -> Optional[ID]: @@ -181,11 +179,14 @@ def title(self, new_title): @property def last_modified_date(self) -> datetime.datetime: - # TODO Amend this. + tzinfo = datetime.datetime.now().astimezone().tzinfo try: - return datetime.datetime.fromtimestamp(self._path.stat().st_mtime) + return datetime.datetime.fromtimestamp( + self._path.stat().st_mtime, + tz=tzinfo, + ) except FileNotFoundError: - return datetime.datetime.utcfromtimestamp(0) + return datetime.datetime.fromtimestamp(0, tz=tzinfo) def delete(self) -> None: """Mark this file for deletion.""" diff --git a/syncall/filesystem/filesystem_side.py b/syncall/filesystem/filesystem_side.py index a1e29e6..5b38490 100644 --- a/syncall/filesystem/filesystem_side.py +++ b/syncall/filesystem/filesystem_side.py @@ -54,6 +54,8 @@ def finish(self): def get_all_items(self, **kargs) -> Sequence[FilesystemFile]: """Read all items again from storage.""" + del kargs + all_items = tuple( FilesystemFile(path=p) for p in self._filesystem_root.iterdir() @@ -62,7 +64,7 @@ def get_all_items(self, **kargs) -> Sequence[FilesystemFile]: logger.opt(lazy=True).debug( f"Found {len(all_items)} matching files under {self._filesystem_root} using" - f" extension {self._filename_extension}" + f" extension {self._filename_extension}", ) return all_items @@ -83,7 +85,7 @@ def _get_item_refresh(self, item_id: ID) -> Optional[FilesystemFile]: if len(matching_fs_files) > 1: logger.warning( f"Found {len(matching_fs_files)} paths with the item ID [{item_id}]." - "Arbitrarily returning the first item." + "Arbitrarily returning the first item.", ) elif len(matching_fs_files) == 0: return None @@ -123,7 +125,10 @@ def add_item(self, item: FilesystemFile) -> FilesystemFile: @classmethod def items_are_identical( - cls, item1: ConcreteItem, item2: ConcreteItem, ignore_keys: Sequence[str] = [] + cls, + item1: ConcreteItem, + item2: ConcreteItem, + ignore_keys: Sequence[str] = [], ) -> bool: ignore_keys_ = [cls.last_modification_key()] ignore_keys_.extend(ignore_keys) diff --git a/syncall/filesystem_gkeep_utils.py b/syncall/filesystem_gkeep_utils.py index be13d8d..8c3efe6 100644 --- a/syncall/filesystem_gkeep_utils.py +++ b/syncall/filesystem_gkeep_utils.py @@ -15,16 +15,14 @@ def convert_filesystem_file_to_gkeep_note(filesystem_file: FilesystemFile) -> GK def convert_gkeep_note_to_filesystem_file( gkeep_note: GKeepNote, filename_extension=FilesystemFile.default_ext, - filesystem_root: Path = Path("."), + filesystem_root: Path = Path(), ) -> FilesystemFile: - """ - GKeep Note -> Filesystemm File + """GKeep Note -> Filesystemm File :param gkeep_note: The note to convert :param filename_extension: The extension to use for the created file. :return: The newly created FilesystemFile """ - # determine note title with the following order # 1. Original GKeep note title (unless empty) # 2. First line of GKeep note (unless empty file) diff --git a/syncall/google/__init__.py b/syncall/google/__init__.py index e69de29..b64945a 100644 --- a/syncall/google/__init__.py +++ b/syncall/google/__init__.py @@ -0,0 +1 @@ +"""Google-related subpackage.""" diff --git a/syncall/google/gcal_side.py b/syncall/google/gcal_side.py index 52588a3..a0b40fe 100644 --- a/syncall/google/gcal_side.py +++ b/syncall/google/gcal_side.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import datetime -import os from pathlib import Path -from typing import Dict, List, Literal, Optional, Sequence, Union, cast +from typing import TYPE_CHECKING, Literal, Sequence, cast import dateutil import pkg_resources @@ -12,15 +13,18 @@ from syncall.google.google_side import GoogleSide from syncall.sync_side import SyncSide +if TYPE_CHECKING: + from syncall.types import GoogleDateT + DEFAULT_CLIENT_SECRET = pkg_resources.resource_filename( - "syncall", os.path.join("res", "gcal_client_secret.json") + "syncall", + "res/gcal_client_secret.json", ) class GCalSide(GoogleSide): """GCalSide interacts with the Google Calendar API. - Adds, removes, and updates events on Google Calendar. Also handles the OAuth2 user authentication workflow. """ @@ -28,20 +32,20 @@ class GCalSide(GoogleSide): ID_KEY = "id" SUMMARY_KEY = "summary" LAST_MODIFICATION_KEY = "updated" - _identical_comparison_keys = [ + _identical_comparison_keys: tuple[str] = ( "description", "end", "start", "summary", - ] + ) - _date_keys = ["end", "start", "updated"] + _date_keys: tuple[str] = ("end", "start", "updated") def __init__( self, *, calendar_summary="TaskWarrior Reminders", - client_secret, + client_secret: str | None, **kargs, ): if client_secret is None: @@ -52,13 +56,13 @@ def __init__( fullname="Google Calendar", scopes=["https://www.googleapis.com/auth/calendar"], credentials_cache=Path.home() / ".gcal_credentials.pickle", - client_secret=Path(client_secret), + client_secret=client_secret, **kargs, ) self._calendar_summary = calendar_summary self._calendar_id: str - self._items_cache: Dict[str, dict] = {} + self._items_cache: dict[str, dict] = {} def start(self): logger.debug("Connecting to Google Calendar...") @@ -80,13 +84,13 @@ def start(self): logger.debug("Connected to Google Calendar.") - def _fetch_cal_id(self) -> Optional[str]: + def _fetch_cal_id(self) -> str | None: """Return the id of the Calendar based on the given Summary. :returns: id or None if that was not found """ res = self._service.calendarList().list().execute() # type: ignore - calendars_list: List[dict] = res["items"] + calendars_list: list[dict] = res["items"] matching_calendars = [ c["id"] for c in calendars_list if c["summary"] == self._calendar_summary @@ -94,18 +98,21 @@ def _fetch_cal_id(self) -> Optional[str]: if len(matching_calendars) == 0: return None - elif len(matching_calendars) == 1: + + if len(matching_calendars) == 1: return cast(str, matching_calendars[0]) - else: - raise RuntimeError( - f'Multiple matching calendars for name -> "{self._calendar_summary}"' - ) + + raise RuntimeError( + f'Multiple matching calendars for name -> "{self._calendar_summary}"', + ) def get_all_items(self, **kargs): """Get all the events for the calendar that we use. :param kargs: Extra options for the call """ + del kargs + # Get the ID of the calendar of interest events = [] request = self._service.events().list(calendarId=self._calendar_id) @@ -128,14 +135,14 @@ def get_all_items(self, **kargs): return events - def get_item(self, item_id: str, use_cached: bool = True) -> Optional[dict]: + def get_item(self, item_id: str, use_cached: bool = True) -> dict | None: item = self._items_cache.get(item_id) if not use_cached or item is None: item = self._get_item_refresh(item_id=item_id) return item - def _get_item_refresh(self, item_id: str) -> Optional[dict]: + def _get_item_refresh(self, item_id: str) -> dict | None: ret = None try: ret = ( @@ -150,8 +157,8 @@ def _get_item_refresh(self, item_id: str) -> Optional[dict]: self._items_cache[item_id] = ret except HttpError: pass - finally: - return ret + + return ret def update_item(self, item_id, **changes): # Check if item is there @@ -160,7 +167,9 @@ def update_item(self, item_id, **changes): ) event.update(changes) self._service.events().update( - calendarId=self._calendar_id, eventId=event["id"], body=event + calendarId=self._calendar_id, + eventId=event["id"], + body=event, ).execute() def add_item(self, item) -> dict: @@ -187,29 +196,27 @@ def last_modification_key(cls) -> str: return cls.LAST_MODIFICATION_KEY @staticmethod - def get_date_key(d: dict) -> Union[Literal["date"], Literal["dateTime"]]: + def get_date_key(d: dict) -> Literal["date", "dateTime"]: """Get key corresponding to the date field.""" - if "dateTime" not in d.keys() and "date" not in d.keys(): + if "dateTime" not in d and "date" not in d: raise RuntimeError("None of the required keys is in the dictionary") - return "date" if d.get("date", None) else "dateTime" + return "date" if d.get("date") else "dateTime" @staticmethod def get_event_time(item: dict, t: str) -> datetime.datetime: - """ - Return the start/end datetime in datetime format. + """Return the start/end datetime in datetime format. :param t: Time to query, 'start' or 'end' """ assert t in ["start", "end"] - assert t in item.keys(), "'end' key not found in item" + assert t in item, "'end' key not found in item" # sometimes the google calendar api returns this as a datetime if isinstance(item[t], datetime.datetime): return item[t] - dt = GCalSide.parse_datetime(item[t][GCalSide.get_date_key(item[t])]) - return dt + return GCalSide.parse_datetime(item[t][GCalSide.get_date_key(item[t])]) @staticmethod def format_datetime(dt: datetime.datetime) -> str: @@ -217,31 +224,31 @@ def format_datetime(dt: datetime.datetime) -> str: return format_datetime_tz(dt) @classmethod - def parse_datetime(cls, dt: Union[str, dict, datetime.datetime]) -> datetime.datetime: - """ - Parse datetime given in the GCal format(s): + def parse_datetime(cls, dt: GoogleDateT) -> datetime.datetime: + """Parse datetime given in the GCal format(s): - string with ('T', 'Z' separators). - (dateTime, dateZone) dictionary - datetime object The output datetime is always in local timezone. """ - if isinstance(dt, str): dt_dt = dateutil.parser.parse(dt) # type: ignore return cls.parse_datetime(dt_dt) - elif isinstance(dt, dict): + + if isinstance(dt, dict): date_time = dt.get("dateTime") if date_time is None: raise RuntimeError(f"Invalid structure dict: {dt}") return cls.parse_datetime(date_time) - elif isinstance(dt, datetime.datetime): + + if isinstance(dt, datetime.datetime): return assume_local_tz_if_none(dt) - else: - raise RuntimeError( - f"Unexpected type of a given date item, type: {type(dt)}, contents: {dt}" - ) + + raise TypeError( + f"Unexpected type of a given date item, type: {type(dt)}, contents: {dt}", + ) @classmethod def items_are_identical(cls, item1, item2, ignore_keys: Sequence[str] = []) -> bool: diff --git a/syncall/google/gkeep_note.py b/syncall/google/gkeep_note.py index 9347f0d..b3ed252 100644 --- a/syncall/google/gkeep_note.py +++ b/syncall/google/gkeep_note.py @@ -20,7 +20,7 @@ def __init__(self, plaintext: str = "", title: str = ""): ItemKey("title", KeyType.String), ItemKey("last_modified_date", KeyType.Date), ItemKey("is_deleted", KeyType.Boolean), - ) + ), ) self._inner: Note = Note() diff --git a/syncall/google/gkeep_note_side.py b/syncall/google/gkeep_note_side.py index 10bb397..def447e 100644 --- a/syncall/google/gkeep_note_side.py +++ b/syncall/google/gkeep_note_side.py @@ -1,10 +1,16 @@ -from typing import Optional, Sequence, Set +from __future__ import annotations + +from typing import TYPE_CHECKING, Sequence from gkeepapi.node import Label, Note, TopLevelNode -from item_synchronizer.types import ID + +if TYPE_CHECKING: + from item_synchronizer.types import ID + + from syncall.concrete_item import ConcreteItem + from loguru import logger -from syncall.concrete_item import ConcreteItem from syncall.google.gkeep_note import GKeepNote from syncall.google.gkeep_side import GKeepSide @@ -26,20 +32,19 @@ def last_modification_key(cls) -> str: def __init__( self, - gkeep_labels: Sequence[str] = tuple(), - gkeep_ignore_labels: Sequence[str] = tuple(), + gkeep_labels: Sequence[str] = (), + gkeep_ignore_labels: Sequence[str] = (), **kargs, ) -> None: super().__init__(name="GKeep", fullname="Google Keep Notes", **kargs) self._gkeep_labels_strs = gkeep_labels or [] - self._gkeep_labels: Set[Label] = set() + self._gkeep_labels: set[Label] = set() self._gkeep_ignore_labels_strs = gkeep_ignore_labels or [] - self._gkeep_ignore_labels: Set[Label] = set() + self._gkeep_ignore_labels: set[Label] = set() def start(self): super().start() - # TODO Test this # Label management -------------------------------------------------------------------- # Create given labels if they don't already exist, # Get the concrete classes from strings @@ -58,10 +63,12 @@ def start(self): self._gkeep_labels.add(label) def get_all_items(self, **kargs) -> Sequence[GKeepNote]: - def note_contains_labels(node: TopLevelNode, labels: Set[Label]) -> bool: + del kargs + + def note_contains_labels(node: TopLevelNode, labels: set[Label]) -> bool: return labels.issubset(node.labels.all()) - def note_does_not_contain_labels(node: TopLevelNode, labels: Set[Label]) -> bool: + def note_does_not_contain_labels(node: TopLevelNode, labels: set[Label]) -> bool: return labels.isdisjoint(node.labels.all()) def node_is_of_type_note(node: TopLevelNode) -> bool: @@ -73,17 +80,20 @@ def node_is_of_type_note(node: TopLevelNode) -> bool: and note_does_not_contain_labels(node, self._gkeep_ignore_labels) and node_is_of_type_note(node) and not node.deleted - and not node.archived - ) + and not node.archived, + ), ) return tuple(GKeepNote.from_gkeep_note(m) for m in matching) - def get_item(self, item_id: str, use_cached: bool = True) -> Optional[GKeepNote]: + def get_item(self, item_id: str, use_cached: bool = True) -> GKeepNote | None: + del use_cached for item in self.get_all_items(): if item.id == item_id: return item + return None + def _get_item_by_id(self, item_id: ID) -> GKeepNote: item = self.get_item(item_id=item_id) if item is None: @@ -97,7 +107,7 @@ def delete_single_item(self, item_id: ID) -> None: def update_item(self, item_id: ID, **updated_properties): if not {"plaintext", "title"}.issubset(updated_properties.keys()): logger.warning( - f"Invalid changes provided to GKeepNoteSide -> {updated_properties}" + f"Invalid changes provided to GKeepNoteSide -> {updated_properties}", ) return new_plaintext = updated_properties["plaintext"] @@ -115,7 +125,10 @@ def add_item(self, item: GKeepNote) -> GKeepNote: @classmethod def items_are_identical( - cls, item1: ConcreteItem, item2: ConcreteItem, ignore_keys: Sequence[str] = [] + cls, + item1: ConcreteItem, + item2: ConcreteItem, + ignore_keys: Sequence[str] = [], ) -> bool: ignore_keys_ = [cls.last_modification_key()] ignore_keys_.extend(ignore_keys) diff --git a/syncall/google/gkeep_side.py b/syncall/google/gkeep_side.py index 48d4182..458d1e1 100644 --- a/syncall/google/gkeep_side.py +++ b/syncall/google/gkeep_side.py @@ -1,17 +1,17 @@ from typing import Optional from bubop import logger -from bubop.exceptions import AuthenticationError from gkeepapi import Keep from gkeepapi.exception import LoginException -from gkeepapi.node import Label +from gkeepapi.node import Label, TopLevelNode from gkeepapi.node import List as GKeepList -from gkeepapi.node import TopLevelNode from syncall.sync_side import SyncSide class GKeepSide(SyncSide): + """Wrapper class to add/modify/delete todo entries from Google Keep.""" + def __init__( self, gkeep_user: str, @@ -19,6 +19,7 @@ def __init__( gkeep_token: Optional[str] = None, **kargs, ): + """Init.""" self._keep: Keep self._gkeep_user = gkeep_user self._gkeep_passwd = gkeep_passwd @@ -27,8 +28,7 @@ def __init__( super().__init__(**kargs) def get_master_token(self) -> Optional[str]: - """ - Return a master token. Use it to authenticate in place of a password on subsequent + """Return a master token. Use it to authenticate in place of a password on subsequent runs. """ return self._gkeep_token @@ -56,26 +56,20 @@ def finish(self): self._keep.sync() def _note_has_label(self, note: TopLevelNode, label: Label) -> bool: - """True if the given Google Keep note has the given label.""" - for la in note.labels.all(): - if label == la: - return True - - return False + """Return true if the Google Keep note has the said label.""" + return any(label == la for la in note.labels.all()) def _note_has_label_str(self, note: TopLevelNode, label_str: str) -> bool: - """True if the given Google Keep note has the given label.""" - for la in note.labels.all(): - if label_str == la.name: - return True - - return False + """Return true if the Google Keep note has the said label.""" + return any(label_str == la.name for la in note.labels.all()) def _get_label_by_name(self, label: str) -> Optional[Label]: for la in self._keep.labels(): if la.name == label: return la + return None + def _create_list(self, title: str, label: Optional[Label] = None) -> GKeepList: """Create a new list of items in Google Keep. diff --git a/syncall/google/gkeep_todo_item.py b/syncall/google/gkeep_todo_item.py index d35f82f..c11b367 100644 --- a/syncall/google/gkeep_todo_item.py +++ b/syncall/google/gkeep_todo_item.py @@ -15,7 +15,7 @@ def __init__(self, is_checked: bool = False, plaintext: str = ""): ItemKey("is_checked", KeyType.String), ItemKey("last_modified_date", KeyType.Date), ItemKey("plaintext", KeyType.String), - ) + ), ) # Embedding the ListItem as a member variable of this. The alternative of inheriting diff --git a/syncall/google/gkeep_todo_side.py b/syncall/google/gkeep_todo_side.py index eae1ed7..ed0392b 100644 --- a/syncall/google/gkeep_todo_side.py +++ b/syncall/google/gkeep_todo_side.py @@ -1,10 +1,8 @@ from typing import Optional, Sequence from bubop import logger -from gkeepapi import Keep -from gkeepapi.node import Label +from gkeepapi.node import Label, TopLevelNode from gkeepapi.node import List as GKeepList -from gkeepapi.node import TopLevelNode from item_synchronizer.types import ID from syncall.google.gkeep_side import GKeepSide @@ -27,8 +25,7 @@ def __init__( notes_label: Optional[str] = None, **kargs, ): - """ - Initialise The GKeepTodoSide. + """Initialise The GKeepTodoSide. :param note_title: Title of the note whose items will be synchronized with Taskwarrior. :param gkeep_user: Username to use for authenticating with Google Keep @@ -64,7 +61,7 @@ def start(self): # - If the note is not found by its name it will be created logger.debug(f'Looking for notes with a matching title - "{self._note_title}"') notes_w_matching_title: Sequence[TopLevelNode] = list( - self._keep.find(func=lambda x: x.title == self._note_title) + self._keep.find(func=lambda x: x.title == self._note_title), ) # found matching note(s) @@ -81,7 +78,7 @@ def start(self): raise RuntimeError( "Found note(s) with a matching title but they are deleted/archived. Can't" " proceed. Please either restore/unarchive them or specify a new note to" - " use..." + " use...", ) len_non_deleted_archived_notes = len(non_deleted_archived_notes) @@ -95,38 +92,41 @@ def start(self): raise RuntimeError( f'Found {len_non_deleted_archived_notes} note(s) but none of type "List".' ' Make sure to toggle the option "Show checkboxes" in the note that you' - " intend to use for the synchronization" + " intend to use for the synchronization", ) # more than one note found - ambiguous if len_active_notes_tlist != 1: raise RuntimeError( f"Found {len_active_notes_tlist} candidate notes. This is ambiguous." - " Either rename the note(s) accordingly or specify another title." + " Either rename the note(s) accordingly or specify another title.", ) self._note = active_notes_tlist[0] # assign label to note if it doesn't have it already if self._notes_label is not None and not self._note_has_label( - self._note, self._notes_label + self._note, + self._notes_label, ): logger.debug(f"Assigning label {self._notes_label_str} to note...") self._note.labels.add(self._notes_label) else: # create new note ----------------------------------------------------------------- logger.info( - "Couldn't find note with the given title - Creating it from scratch..." + "Couldn't find note with the given title - Creating it from scratch...", ) self._note = self._create_list(self._note_title, label=self._notes_label) def get_all_items(self, **kargs) -> Sequence[GKeepTodoItem]: + del kargs """Get all the todo entries of the Note in use.""" return tuple( GKeepTodoItem.from_gkeep_list_item(child) for child in self._note.children ) def get_item(self, item_id: str, use_cached: bool = True) -> Optional[GKeepTodoItem]: + del use_cached item = self._note.get(item_id) if item is None: logger.warning(f"Couldn't fetch Google Keep item with id {item_id}.") @@ -136,7 +136,7 @@ def get_item(self, item_id: str, use_cached: bool = True) -> Optional[GKeepTodoI def update_item(self, item_id: ID, **updated_properties): if not {"plaintext", "is_checked"}.issubset(updated_properties.keys()): logger.warning( - f"Invalid changes provided to GKeepTodoSide -> {updated_properties}" + f"Invalid changes provided to GKeepTodoSide -> {updated_properties}", ) return new_plaintext = updated_properties["plaintext"] @@ -175,7 +175,10 @@ def last_modification_key(cls) -> str: @classmethod def items_are_identical( - cls, item1: GKeepTodoItem, item2: GKeepTodoItem, ignore_keys: Sequence[str] = [] + cls, + item1: GKeepTodoItem, + item2: GKeepTodoItem, + ignore_keys: Sequence[str] = [], ) -> bool: ignore_keys_ = [cls.last_modification_key()] ignore_keys_.extend(ignore_keys) diff --git a/syncall/google/google_side.py b/syncall/google/google_side.py index 005b79b..8bb41af 100644 --- a/syncall/google/google_side.py +++ b/syncall/google/google_side.py @@ -17,7 +17,7 @@ def __init__( scopes: Sequence[str], oauth_port: int, credentials_cache: Path, - client_secret: Path, + client_secret: str, **kargs, ): super().__init__(**kargs) @@ -31,33 +31,34 @@ def __init__( self._service = None def _get_credentials(self): - """Gets valid user credentials from storage. + """Get valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. :return: Credentials, the obtained credentials. """ - creds = None credentials_cache = self._credentials_cache if credentials_cache.is_file(): with credentials_cache.open("rb") as f: - creds = pickle.load(f) + creds = pickle.load(f) # noqa: S301 if not creds or not creds.valid: logger.debug("Invalid credentials. Fetching again...") if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: - client_secret = self._client_secret - flow = InstalledAppFlow.from_client_secrets_file(client_secret, self._scopes) + flow = InstalledAppFlow.from_client_secrets_file( + self._client_secret, + self._scopes, + ) try: creds = flow.run_local_server(port=self._oauth_port) except OSError as e: raise RuntimeError( f"Port {self._oauth_port} is already in use, please specify a" - " different port or stop the process that's already using it." + " different port or stop the process that's already using it.", ) from e # Save the credentials for the next run diff --git a/syncall/google/gtasks_side.py b/syncall/google/gtasks_side.py index 9aece7d..8c2aa65 100644 --- a/syncall/google/gtasks_side.py +++ b/syncall/google/gtasks_side.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import datetime -import os from pathlib import Path -from typing import Dict, List, Optional, Sequence, Union, cast +from typing import TYPE_CHECKING, Sequence, cast import dateutil import pkg_resources @@ -12,10 +13,13 @@ from syncall.google.google_side import GoogleSide from syncall.sync_side import SyncSide -from syncall.types import GTasksItem, GTasksList + +if TYPE_CHECKING: + from syncall.types import GTasksItem, GTasksList DEFAULT_CLIENT_SECRET = pkg_resources.resource_filename( - "syncall", os.path.join("res", "gtasks_client_secret.json") + "syncall", + "res/gtasks_client_secret.json", ) # API Reference: https://googleapis.github.io/google-api-python-client/docs/dyn/tasks_v1.html @@ -27,7 +31,6 @@ class GTasksSide(GoogleSide): """GTasksSide interacts with the Google Tasks API. - Adds, removes, and updates events on Google Tasks. Also handles the OAuth2 user authentication workflow. """ @@ -35,20 +38,20 @@ class GTasksSide(GoogleSide): ID_KEY = "id" TITLE_KEY = "title" LAST_MODIFICATION_KEY = "updated" - _date_keys = ["updated"] + _date_keys: tuple[str] = ("updated",) # don't put the "due" key for comparison # due key holds the date but not the time that the use has set from the UI so we cannot # really use it for bi-synchronization. # # https://stackoverflow.com/questions/65956873/google-task-api-due-field - _identical_comparison_keys = ["title", "notes", "status", *_date_keys] + _identical_comparison_keys: tuple[str] = ("title", "notes", "status", *_date_keys) def __init__( self, *, task_list_title="TaskWarrior Reminders", - client_secret, + client_secret: str | None, **kargs, ): if client_secret is None: @@ -59,13 +62,13 @@ def __init__( fullname="Google Tasks", scopes=["https://www.googleapis.com/auth/tasks"], credentials_cache=Path.home() / ".gtasks_credentials.pickle", - client_secret=client_secret, # type: ignore + client_secret=client_secret, **kargs, ) self._task_list_title = task_list_title - self._task_list_id: Optional[str] = None - self._items_cache: Dict[str, dict] = {} + self._task_list_id: str | None = None + self._items_cache: dict[str, dict] = {} def start(self): logger.debug("Connecting to Google Tasks...") @@ -85,26 +88,29 @@ def start(self): logger.debug("Connected to Google Tasks.") - def _fetch_task_list_id(self) -> Optional[str]: + def _fetch_task_list_id(self) -> str | None: """Return the id of the task list based on the given Title. :returns: id or None if that was not found """ res = self._service.tasklists().list().execute() # type: ignore - task_lists_list: List[GTasksList] = res["items"] # type: ignore + task_lists_list: list[GTasksList] = res["items"] # type: ignore matching_task_lists = [ - list["id"] for list in task_lists_list if list["title"] == self._task_list_title + task_list["id"] + for task_list in task_lists_list + if task_list["title"] == self._task_list_title ] if len(matching_task_lists) == 0: return None - elif len(matching_task_lists) == 1: + + if len(matching_task_lists) == 1: return cast(str, matching_task_lists[0]) - else: - raise RuntimeError( - f'Multiple matching task lists for title -> "{self._task_list_title}"' - ) + + raise RuntimeError( + f'Multiple matching task lists for title -> "{self._task_list_title}"', + ) def _clear_all_task_list_entries(self): """Clear all tasks from the current task list.""" @@ -116,6 +122,8 @@ def get_all_items(self, **kargs) -> Sequence[GTasksItem]: :param kargs: Extra options for the call """ + del kargs + # Get the ID of the task list of interest tasks = [] @@ -150,7 +158,7 @@ def get_all_items(self, **kargs) -> Sequence[GTasksItem]: t for t in response.get("items", []) if t["status"] != "deleted" and len(t["title"]) > 0 - ] + ], ) # Get the next request object by passing the previous request @@ -163,14 +171,14 @@ def get_all_items(self, **kargs) -> Sequence[GTasksItem]: return tasks - def get_item(self, item_id: str, use_cached: bool = True) -> Optional[dict]: + def get_item(self, item_id: str, use_cached: bool = True) -> dict | None: item = self._items_cache.get(item_id) if not use_cached or item is None: item = self._get_item_refresh(item_id=item_id) return item - def _get_item_refresh(self, item_id: str) -> Optional[dict]: + def _get_item_refresh(self, item_id: str) -> dict | None: ret = None try: ret = ( @@ -183,15 +191,17 @@ def _get_item_refresh(self, item_id: str) -> Optional[dict]: self._items_cache[item_id] = ret except HttpError: pass - finally: - return ret + + return ret def update_item(self, item_id, **changes): # Check if item is there task = self._service.tasks().get(tasklist=self._task_list_id, task=item_id).execute() # type: ignore task.update(changes) self._service.tasks().update( # type: ignore - tasklist=self._task_list_id, task=task["id"], body=task + tasklist=self._task_list_id, + task=task["id"], + body=task, ).execute() def add_item(self, item) -> dict: @@ -216,22 +226,22 @@ def last_modification_key(cls) -> str: return cls.LAST_MODIFICATION_KEY @staticmethod - def _parse_dt_or_none(item: GTasksItem, field: str) -> Optional[datetime.datetime]: + def _parse_dt_or_none(item: GTasksItem, field: str) -> datetime.datetime | None: """Return the datetime on which task was completed in datetime format.""" if (dt := item.get(field)) is not None: dt_dt = GTasksSide.parse_datetime(dt) assert isinstance(dt_dt, datetime.datetime) return dt_dt - else: - return None + + return None @staticmethod - def get_task_due_time(item: GTasksItem) -> Optional[datetime.datetime]: + def get_task_due_time(item: GTasksItem) -> datetime.datetime | None: """Return the datetime on which task is due in datetime format.""" return GTasksSide._parse_dt_or_none(item=item, field="due") @staticmethod - def get_task_completed_time(item: GTasksItem) -> Optional[datetime.datetime]: + def get_task_completed_time(item: GTasksItem) -> datetime.datetime | None: """Return the datetime on which task was completed in datetime format.""" return GTasksSide._parse_dt_or_none(item=item, field="completed") @@ -241,9 +251,8 @@ def format_datetime(dt: datetime.datetime) -> str: return format_datetime_tz(dt) @classmethod - def parse_datetime(cls, dt: Union[str, dict, datetime.datetime]) -> datetime.datetime: - """ - Parse datetime given in the GTasks format(s): + def parse_datetime(cls, dt: str | dict | datetime.datetime) -> datetime.datetime: + """Parse datetime given in the GTasks format(s): - string with ('T', 'Z' separators). - (dateTime, dateZone) dictionary - datetime object @@ -274,10 +283,10 @@ def parse_datetime(cls, dt: Union[str, dict, datetime.datetime]) -> datetime.dat >>> GTasksSide.parse_datetime(a).isoformat() == a.isoformat() True """ - if isinstance(dt, str): return dateutil.parser.parse(dt).replace(tzinfo=None) # type: ignore - elif isinstance(dt, dict): + + if isinstance(dt, dict): date_time = dt.get("dateTime") if date_time is None: raise RuntimeError(f"Invalid structure dict: {dt}") @@ -288,12 +297,13 @@ def parse_datetime(cls, dt: Union[str, dict, datetime.datetime]) -> datetime.dat dt_dt = timezone.localize(dt_dt) return dt_dt - elif isinstance(dt, datetime.datetime): + + if isinstance(dt, datetime.datetime): return dt - else: - raise RuntimeError( - f"Unexpected type of a given date item, type: {type(dt)}, contents: {dt}" - ) + + raise TypeError( + f"Unexpected type of a given date item, type: {type(dt)}, contents: {dt}", + ) @classmethod def items_are_identical(cls, item1, item2, ignore_keys: Sequence[str] = []) -> bool: diff --git a/syncall/notion/__init__.py b/syncall/notion/__init__.py new file mode 100644 index 0000000..21857f6 --- /dev/null +++ b/syncall/notion/__init__.py @@ -0,0 +1 @@ +"""Notion subpackage.""" diff --git a/syncall/notion/notion_side.py b/syncall/notion/notion_side.py index 3114e72..02f0b3a 100644 --- a/syncall/notion/notion_side.py +++ b/syncall/notion/notion_side.py @@ -1,7 +1,11 @@ -from typing import Dict, Optional, Sequence, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Sequence, cast from bubop import logger -from notion_client import Client + +if TYPE_CHECKING: + from notion_client import Client from syncall.notion.notion_todo_block import NotionTodoBlock from syncall.sync_side import SyncSide @@ -9,9 +13,7 @@ class NotionSide(SyncSide): - """ - Wrapper class to add/modify/delete todo blocks from notion, create new pages, etc. - """ + """Wrapper class to add/modify/delete todo blocks from notion, create new pages, etc.""" _date_keys = "last_modified_date" @@ -19,7 +21,7 @@ def __init__(self, client: Client, page_id: NotionID): self._client = client self._page_id = page_id self._page_contents: NotionPageContents - self._all_todo_blocks: Dict[NotionID, NotionTodoBlock] + self._all_todo_blocks: dict[NotionID, NotionTodoBlock] self._is_cached = False super().__init__(name="Notion", fullname="Notion") @@ -40,23 +42,26 @@ def start(self): logger.info(f"Initializing {self.fullname}...") self._page_contents = self._client.blocks.children.list(block_id=self._page_id) - def _get_todo_blocks(self) -> Dict[NotionID, NotionTodoBlock]: + def _get_todo_blocks(self) -> dict[NotionID, NotionTodoBlock]: all_todos = self.find_todos(page_contents=self._page_contents) # make sure that all IDs are valid and not None - assert all([todo.id is not None for todo in all_todos]) + assert all(todo.id is not None for todo in all_todos) return {cast(NotionID, todo.id): todo for todo in all_todos} def get_all_items(self, **kargs) -> Sequence[NotionTodoBlock]: + del kargs self._all_todo_blocks = self._get_todo_blocks() self._is_cached = True return tuple(self._all_todo_blocks.values()) def get_item( - self, item_id: NotionID, use_cached: bool = False - ) -> Optional[NotionTodoBlock]: - """Return a single todo block""" + self, + item_id: NotionID, + use_cached: bool = False, + ) -> NotionTodoBlock | None: + """Return a single todo block.""" if use_cached: return self._all_todo_blocks.get(item_id) @@ -64,9 +69,9 @@ def get_item( new_todo_block_item: NotionTodoBlockItem = self._client.blocks.retrieve(item_id) try: new_todo_block = NotionTodoBlock.from_raw_item(new_todo_block_item) - except RuntimeError: + except RuntimeError as err: # the to_do section is missing when the item is archived?! - raise KeyError + raise KeyError from err assert new_todo_block.id is not None self._all_todo_blocks[new_todo_block.id] = new_todo_block @@ -89,20 +94,22 @@ def update_item(self, item_id: NotionID, **updated_properties): return updated_todo = self.get_vanilla_notion_todo_section( - text=updated_properties["plaintext"], is_checked=updated_properties["is_checked"] + text=updated_properties["plaintext"], + is_checked=updated_properties["is_checked"], ) self._client.blocks.update(block_id=item_id, to_do=updated_todo) def add_item(self, item: NotionTodoBlock) -> NotionTodoBlock: """Add a new item (block) to the page.""" page_contents: NotionPageContents = self._client.blocks.children.append( - block_id=self._page_id, children=[item.serialize()] + block_id=self._page_id, + children=[item.serialize()], ) todo_blocks = self.find_todos(page_contents=page_contents) if len(todo_blocks) != 1: logger.warning( "Expected to get back 1 TODO item, blocks.children.append(...) returned" - f" {len(todo_blocks)} items. Adding only the first" + f" {len(todo_blocks)} items. Adding only the first", ) return todo_blocks[0] @@ -118,13 +125,17 @@ def add_todo_block(self, title: str, checked: bool = False) -> NotionTodoBlock: }, } raw_item = self._client.blocks.children.append( - block_id=self._page_id, children=[new_block] + block_id=self._page_id, + children=[new_block], ) return NotionTodoBlock.from_raw_item(raw_item) @classmethod def items_are_identical( - cls, item1: NotionTodoBlock, item2: NotionTodoBlock, ignore_keys: Sequence[str] = [] + cls, + item1: NotionTodoBlock, + item2: NotionTodoBlock, + ignore_keys: Sequence[str] = [], ) -> bool: ignore_keys_ = [cls.last_modification_key()] ignore_keys_.extend(ignore_keys) @@ -133,10 +144,8 @@ def items_are_identical( @staticmethod def find_todos(page_contents: NotionPageContents) -> Sequence[NotionTodoBlock]: assert page_contents["object"] == "list" - todos = tuple( + return tuple( NotionTodoBlock.from_raw_item(cast(NotionTodoBlockItem, block)) for block in page_contents["results"] if NotionTodoBlock.is_todo(block) ) - - return todos diff --git a/syncall/notion/notion_todo_block.py b/syncall/notion/notion_todo_block.py index e2c2149..3579f90 100644 --- a/syncall/notion/notion_todo_block.py +++ b/syncall/notion/notion_todo_block.py @@ -1,21 +1,29 @@ -import datetime -from typing import Optional +from __future__ import annotations -from bubop import is_same_datetime, logger, parse_datetime -from item_synchronizer.types import ID +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + import datetime + + from item_synchronizer.types import ID + + from syncall.types import NotionRawItem, NotionTodoBlockItem, NotionTodoSection + +from bubop import logger, parse_datetime from syncall.concrete_item import ConcreteItem, ItemKey, KeyType -from syncall.types import NotionRawItem, NotionTodoBlockItem, NotionTodoSection class NotionTodoBlock(ConcreteItem): + """Represent a todo block in Notion.""" + def __init__( self, is_archived: bool, is_checked: bool, last_modified_date: datetime.datetime, plaintext: str, - id: Optional[ID] = None, + id: ID | None = None, # noqa: A002 ): super().__init__( keys=( @@ -23,7 +31,7 @@ def __init__( ItemKey("is_checked", KeyType.Boolean), ItemKey("last_modified_date", KeyType.Date), ItemKey("plaintext", KeyType.String), - ) + ), ) self._is_archived = is_archived @@ -56,11 +64,11 @@ def last_modified_date(self) -> datetime.datetime: def last_modified_date(self, val: datetime.datetime): self._last_modified_date = val - def _id(self) -> Optional[ID]: + def _id(self) -> ID | None: return self._id_val @classmethod - def from_raw_item(cls, block_item: NotionTodoBlockItem) -> "NotionTodoBlock": + def from_raw_item(cls, block_item: NotionTodoBlockItem) -> NotionTodoBlock: """Create a NotionTodoBlock given the raw item at hand.""" assert "archived" in block_item assert "id" in block_item diff --git a/syncall/pdb_cli_utils.py b/syncall/pdb_cli_utils.py index 64e6dac..b46b85c 100644 --- a/syncall/pdb_cli_utils.py +++ b/syncall/pdb_cli_utils.py @@ -1,14 +1,14 @@ import sys -def run_pdb_on_error(type, value, tb): +def run_pdb_on_error(type, value, tb): # noqa: A002 if hasattr(sys, "ps1") or not sys.stderr.isatty(): # we are in interactive mode or we don't have a tty-like device, so we call the # default hook - print(f"Cannot enable the --pdb-on-error flag") + print("Cannot enable the --pdb-on-error flag") # noqa: T201 sys.__excepthook__(type, value, tb) else: - import pdb + import pdb # noqa: T100 import traceback traceback.print_exception(type, value, tb) diff --git a/syncall/scripts/fs_gkeep_sync.py b/syncall/scripts/fs_gkeep_sync.py index 47e2c86..04e474c 100644 --- a/syncall/scripts/fs_gkeep_sync.py +++ b/syncall/scripts/fs_gkeep_sync.py @@ -61,8 +61,7 @@ def main( pdb_on_error: bool, confirm: bool, ): - """ - Synchronize Notes from your Google Keep with text files in a directory on your filesystem. + """Synchronize Notes from your Google Keep with text files in a directory on your filesystem. You can only synchronize a subset of your Google Keep notes based on a set of provided labels and you can specify where to create the files by specifying the path to a local @@ -88,7 +87,7 @@ def main( filesystem_root, gkeep_labels, gkeep_ignore_labels, - ] + ], ) check_optional_mutually_exclusive( combination_name, @@ -101,7 +100,7 @@ def main( if not filesystem_root_path.is_dir(): logger.error( "An existing directory must be provided for the synchronization ->" - f" {filesystem_root_path}" + f" {filesystem_root_path}", ) return 1 @@ -112,7 +111,9 @@ def main( # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="Filesystem", side_B_name="Google Keep", combination=combination_name + side_A_name="Filesystem", + side_B_name="Google Keep", + combination=combination_name, ) filesystem_root_path = Path(app_config["filesystem_root"]) gkeep_labels = app_config["gkeep_labels"] @@ -138,14 +139,14 @@ def main( logger.error( "You have to provide at least one valid filesystem root path to use for " " synchronization. You can do so either via CLI arguments or by specifying an" - " existing saved combination" + " existing saved combination", ) sys.exit(1) if not gkeep_labels and not gkeep_ignore_labels: logger.error( "Refusing to run without any Google Keep labels to keep or remove - please provide" - " at least one of these two to continue" + " at least one of these two to continue", ) sys.exit(1) @@ -161,7 +162,7 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() @@ -182,7 +183,8 @@ def main( ) filesystem_side = FilesystemSide( - filesystem_root=filesystem_root_path, filename_extension=filename_extension + filesystem_root=filesystem_root_path, + filename_extension=filename_extension, ) # teardown function and exception handling ------------------------------------------------ diff --git a/syncall/scripts/tw_asana_sync.py b/syncall/scripts/tw_asana_sync.py index 8b5b4f4..681df3c 100644 --- a/syncall/scripts/tw_asana_sync.py +++ b/syncall/scripts/tw_asana_sync.py @@ -1,5 +1,6 @@ +from __future__ import annotations + import sys -from typing import List import asana import click @@ -39,14 +40,14 @@ @opts_asana(hidden_gid=False) @opts_tw_filtering() @opts_miscellaneous("TW", "Asana") -def main( +def main( # noqa: PLR0915, C901, PLR0912 asana_task_gid: str, asana_token: str, asana_workspace_gid: str, asana_workspace_name: str, do_list_asana_workspaces: bool, tw_filter: str, - tw_tags: List[str], + tw_tags: list[str], tw_project: str, tw_only_modified_last_X_days: str, tw_sync_all_tasks: bool, @@ -59,6 +60,8 @@ def main( confirm: bool, ): """Synchronize your tasks in Asana with filters from Taskwarrior.""" + del prefer_scheduled_date + loguru_tqdm_sink(verbosity=verbose) app_log_to_syslog() logger.debug("Initialising...") @@ -84,16 +87,19 @@ def main( tw_sync_all_tasks, asana_workspace_gid, asana_workspace_name, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_asana_workspace + combination_name, + combination_of_tw_filters_and_asana_workspace, ) # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="Taskwarrior", side_B_name="Asana", combination=combination_name + side_A_name="Taskwarrior", + side_B_name="Asana", + combination=combination_name, ) tw_tags = app_config["tw_tags"] tw_project = app_config["tw_project"] @@ -116,13 +122,12 @@ def main( # initialize asana ----------------------------------------------------------------------- asana_client = asana.Client.access_token(asana_token) - asana_disable = asana_client.headers.get("Asana-Disable", "") asana_client.headers["Asana-Disable"] = ",".join( [ asana_client.headers.get("Asana-Disable", ""), "new_user_task_lists", "new_goal_memberships", - ] + ], ) asana_client.options["client_name"] = "syncall" @@ -138,9 +143,8 @@ def main( if asana_workspace_gid is None: if asana_workspace_name is None: error_and_exit("Provide either an Asana workspace name or GID to sync.") - else: - if asana_workspace_name is not None: - error_and_exit("Provide either Asana workspace GID or name, but not both.") + elif asana_workspace_name is not None: + error_and_exit("Provide either Asana workspace GID or name, but not both.") found_workspace = False @@ -153,7 +157,7 @@ def main( if found_workspace: error_and_exit( f"Found multiple workspaces with name {asana_workspace_name}. Please" - " specify workspace GID instead." + " specify workspace GID instead.", ) else: asana_workspace_gid = workspace["gid"] @@ -189,18 +193,22 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() # initialize sides ------------------------------------------------------------------------ tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) asana_side = AsanaSide( - client=asana_client, task_gid=asana_task_gid, workspace_gid=asana_workspace_gid + client=asana_client, + task_gid=asana_task_gid, + workspace_gid=asana_workspace_gid, ) # teardown function and exception handling ------------------------------------------------ @@ -218,7 +226,9 @@ def main( converter_A_to_B=convert_asana_to_tw, converter_B_to_A=convert_tw_to_asana, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(asana_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(asana_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/scripts/tw_caldav_sync.py b/syncall/scripts/tw_caldav_sync.py index f5c2134..819b4df 100644 --- a/syncall/scripts/tw_caldav_sync.py +++ b/syncall/scripts/tw_caldav_sync.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import os import subprocess -from typing import List, Optional import caldav import click @@ -41,11 +42,11 @@ def main( caldav_calendar: str, caldav_url: str, - caldav_user: Optional[str], + caldav_user: str | None, caldav_passwd_pass_path: str, caldav_passwd_cmd: str, tw_filter: str, - tw_tags: List[str], + tw_tags: list[str], tw_project: str, tw_only_modified_last_X_days: str, tw_sync_all_tasks: bool, @@ -90,16 +91,19 @@ def main( tw_project, tw_sync_all_tasks, caldav_calendar, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_caldav_calendar + combination_name, + combination_of_tw_filters_and_caldav_calendar, ) # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="Taskwarrior", side_B_name="Caldav", combination=combination_name + side_A_name="Taskwarrior", + side_B_name="Caldav", + combination=combination_name, ) tw_filter_li = app_config["tw_filter_li"] tw_tags = app_config["tw_tags"] @@ -144,7 +148,7 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() @@ -152,7 +156,9 @@ def main( # initialize sides ------------------------------------------------------------------------ # tw tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) # caldav @@ -160,7 +166,7 @@ def main( logger.debug(caldav_url) logger.debug(caldav_calendar) error_and_exit( - "You must provide a URL and calendar in order to synchronize via caldav" + "You must provide a URL and calendar in order to synchronize via caldav", ) # fetch username @@ -168,7 +174,7 @@ def main( caldav_user = os.environ.get("CALDAV_USERNAME") if caldav_user is None: error_and_exit( - "You must provide a username in order to synchronize via caldav, either " + "You must provide a username in order to synchronize via caldav, either ", ) # fetch password @@ -176,7 +182,13 @@ def main( if caldav_passwd is not None: logger.debug("Reading the caldav password from environment variable...") elif caldav_passwd_cmd is not None: - proc = subprocess.run(caldav_passwd_cmd, shell=True, text=True, capture_output=True) + proc = subprocess.run( # noqa: S602 + caldav_passwd_cmd, + shell=True, + text=True, + capture_output=True, + check=False, + ) if proc.returncode != 0: error_and_exit(f"Password command failed: {proc.stderr}") @@ -202,7 +214,9 @@ def main( converter_B_to_A=convert_tw_to_caldav, converter_A_to_B=convert_caldav_to_tw, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(caldav_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(caldav_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/scripts/tw_gcal_sync.py b/syncall/scripts/tw_gcal_sync.py index 4ff407b..cd5a44e 100644 --- a/syncall/scripts/tw_gcal_sync.py +++ b/syncall/scripts/tw_gcal_sync.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import sys from datetime import timedelta -from typing import List import click from bubop import ( @@ -47,10 +48,10 @@ @opts_miscellaneous(side_A_name="TW", side_B_name="Google Tasks") def main( gcal_calendar: str, - google_secret: str, + google_secret: str | None, oauth_port: int, tw_filter: str, - tw_tags: List[str], + tw_tags: list[str], tw_project: str, tw_only_modified_last_X_days: str, tw_sync_all_tasks: bool, @@ -93,10 +94,11 @@ def main( tw_project, tw_sync_all_tasks, gcal_calendar, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_gcal_calendar + combination_name, + combination_of_tw_filters_and_gcal_calendar, ) check_optional_mutually_exclusive(combination_name, custom_combination_savename) @@ -140,7 +142,7 @@ def main( logger.error( "You have to provide the name of a Google Calendar calendar to synchronize events" " to/from. You can do so either via CLI arguments or by specifying an existing" - " saved combination" + " saved combination", ) sys.exit(1) @@ -158,18 +160,22 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() # initialize sides ------------------------------------------------------------------------ tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) gcal_side = GCalSide( - calendar_summary=gcal_calendar, oauth_port=oauth_port, client_secret=google_secret + calendar_summary=gcal_calendar, + oauth_port=oauth_port, + client_secret=google_secret, ) # teardown function and exception handling ------------------------------------------------ @@ -207,7 +213,9 @@ def convert_A_to_B(*args, **kargs): converter_B_to_A=convert_B_to_A, converter_A_to_B=convert_A_to_B, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(gcal_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(gcal_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/scripts/tw_gkeep_sync.py b/syncall/scripts/tw_gkeep_sync.py index 48b3594..295a2f3 100644 --- a/syncall/scripts/tw_gkeep_sync.py +++ b/syncall/scripts/tw_gkeep_sync.py @@ -67,6 +67,8 @@ def main( each one of the tasks in the Taskwarrior filter. You have to first "Show checkboxes" in the Google Keep Note in order to use it with this service. """ + del prefer_scheduled_date + # setup logger ---------------------------------------------------------------------------- loguru_tqdm_sink(verbosity=verbose) app_log_to_syslog() @@ -92,16 +94,19 @@ def main( tw_project, tw_sync_all_tasks, gkeep_note, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_gkeep_note + combination_name, + combination_of_tw_filters_and_gkeep_note, ) # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="Taskwarrior", side_B_name="Google Keep", combination=combination_name + side_A_name="Taskwarrior", + side_B_name="Google Keep", + combination=combination_name, ) tw_filter_li = app_config["tw_filter_li"] tw_tags = app_config["tw_tags"] @@ -136,7 +141,7 @@ def main( logger.error( "You have to provide the name of a Google Keep note to synchronize items" " to/from. You can do so either via CLI arguments or by specifying an existing" - " saved combination" + " saved combination", ) sys.exit(1) @@ -153,7 +158,7 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() @@ -176,7 +181,9 @@ def main( # initialize taskwarrior ------------------------------------------------------------------ tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) # teardown function and exception handling ------------------------------------------------ @@ -194,7 +201,9 @@ def main( converter_B_to_A=convert_tw_to_gkeep_todo, converter_A_to_B=convert_gkeep_todo_to_tw, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(gkeep_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(gkeep_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/scripts/tw_gtasks_sync.py b/syncall/scripts/tw_gtasks_sync.py index 769e4e8..72097b9 100644 --- a/syncall/scripts/tw_gtasks_sync.py +++ b/syncall/scripts/tw_gtasks_sync.py @@ -1,4 +1,4 @@ -from typing import List +from __future__ import annotations import click from bubop import ( @@ -44,10 +44,10 @@ @opts_miscellaneous(side_A_name="TW", side_B_name="Google Tasks") def main( gtasks_list: str, - google_secret: str, + google_secret: str | None, oauth_port: int, tw_filter: str, - tw_tags: List[str], + tw_tags: list[str], tw_project: str, tw_only_modified_last_X_days: str, tw_sync_all_tasks: bool, @@ -90,16 +90,19 @@ def main( tw_project, tw_sync_all_tasks, gtasks_list, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_gtasks_list + combination_name, + combination_of_tw_filters_and_gtasks_list, ) # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="TW", side_B_name="Google Tasks", combination=combination_name + side_A_name="TW", + side_B_name="Google Tasks", + combination=combination_name, ) tw_filter_li = app_config["tw_filter_li"] tw_tags = app_config["tw_tags"] @@ -134,7 +137,7 @@ def main( error_and_exit( "You have to provide the name of a Google Tasks list to synchronize events" " to/from. You can do so either via CLI arguments or by specifying an existing" - " saved combination" + " saved combination", ) # announce configuration ------------------------------------------------------------------ @@ -151,7 +154,7 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() @@ -161,11 +164,15 @@ def main( # verifying beforehand that if this flag is specified the user cannot specify any of the # other `tw_filter_li`, `tw_tags`, `tw_project` options. tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) gtasks_side = GTasksSide( - task_list_title=gtasks_list, oauth_port=oauth_port, client_secret=google_secret + task_list_title=gtasks_list, + oauth_port=oauth_port, + client_secret=google_secret, ) # teardown function and exception handling ------------------------------------------------ @@ -201,7 +208,9 @@ def convert_A_to_B(*args, **kargs): converter_B_to_A=convert_B_to_A, converter_A_to_B=convert_A_to_B, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(gtasks_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(gtasks_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/scripts/tw_notion_sync.py b/syncall/scripts/tw_notion_sync.py index bd8bf94..881c249 100644 --- a/syncall/scripts/tw_notion_sync.py +++ b/syncall/scripts/tw_notion_sync.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import os import sys -from typing import List import click from bubop import ( @@ -54,7 +55,7 @@ def main( notion_page_id: str, token_pass_path: str, tw_filter: str, - tw_tags: List[str], + tw_tags: list[str], tw_project: str, tw_only_modified_last_X_days: str, tw_sync_all_tasks: bool, @@ -96,16 +97,19 @@ def main( tw_project, tw_sync_all_tasks, notion_page_id, - ] + ], ) check_optional_mutually_exclusive( - combination_name, combination_of_tw_filters_and_notion_page + combination_name, + combination_of_tw_filters_and_notion_page, ) # existing combination name is provided --------------------------------------------------- if combination_name is not None: app_config = fetch_app_configuration( - side_A_name="Taskwarrior", side_B_name="Notion", combination=combination_name + side_A_name="Taskwarrior", + side_B_name="Notion", + combination=combination_name, ) tw_filter_li = app_config["tw_filter_li"] tw_tags = app_config["tw_tags"] @@ -139,7 +143,7 @@ def main( if notion_page_id is None: error_and_exit( "You have to provide the page ID of the Notion page for synchronization. You can" - " do so either via CLI arguments or by specifying an existing saved combination" + " do so either via CLI arguments or by specifying an existing saved combination", ) # announce configuration ------------------------------------------------------------------ @@ -156,7 +160,7 @@ def main( }, prefix="\n\n", suffix="\n", - ) + ), ) if confirm: confirm_before_proceeding() @@ -171,7 +175,7 @@ def main( logger.error( "You have to provide the Notion API key, either via the" f" {api_key_env_var} environment variable or via the UNIX Passowrdr Manager" - ' and the "--token-pass-path" CLI parameter' + ' and the "--token-pass-path" CLI parameter', ) sys.exit(1) token_v2 = fetch_from_pass_manager(token_pass_path) @@ -189,14 +193,17 @@ def main( # initialize sides ------------------------------------------------------------------------ # tw tw_side = TaskWarriorSide( - tw_filter=" ".join(tw_filter_li), tags=tw_tags, project=tw_project + tw_filter=" ".join(tw_filter_li), + tags=tw_tags, + project=tw_project, ) # notion # client is a bit too verbose by default. client_verbosity = max(verbose - 1, 0) client = Client( - auth=token_v2, log_level=verbosity_int_to_std_logging_lvl(client_verbosity) + auth=token_v2, + log_level=verbosity_int_to_std_logging_lvl(client_verbosity), ) notion_side = NotionSide(client=client, page_id=notion_page_id) @@ -207,7 +214,9 @@ def main( converter_B_to_A=convert_tw_to_notion, converter_A_to_B=convert_notion_to_tw, resolution_strategy=get_resolution_strategy( - resolution_strategy, side_A_type=type(notion_side), side_B_type=type(tw_side) + resolution_strategy, + side_A_type=type(notion_side), + side_B_type=type(tw_side), ), config_fname=combination_name, ignore_keys=( diff --git a/syncall/side_helper.py b/syncall/side_helper.py index 3846909..1d3104a 100644 --- a/syncall/side_helper.py +++ b/syncall/side_helper.py @@ -14,7 +14,7 @@ class SideHelper: summary_key: str # Handy way to refer to the counterpart side other: Optional["SideHelper"] = None - ignore_keys: Sequence[str] = tuple() + ignore_keys: Sequence[str] = () def __str__(self): return str(self.name) diff --git a/syncall/sync_side.py b/syncall/sync_side.py index 05638ce..c351f94 100644 --- a/syncall/sync_side.py +++ b/syncall/sync_side.py @@ -1,9 +1,14 @@ +from __future__ import annotations + import abc import datetime -from typing import Any, Mapping, Optional, Sequence, final +from typing import TYPE_CHECKING, Any, Mapping, Sequence, final from bubop.time import is_same_datetime -from item_synchronizer.types import ID + +if TYPE_CHECKING: + from item_synchronizer.types import ID + from loguru import logger ItemType = Mapping[str, Any] @@ -20,56 +25,61 @@ class SyncSide(abc.ABC): """ def __init__(self, name: str, fullname: str, *args, **kargs) -> None: + del args, kargs + """Initialize the side.""" self._fullname = fullname self._name = name def __str__(self) -> str: + """Return the string representation of the side.""" return self._fullname @final @property def fullname(self) -> str: + """Get the full name of the side.""" return self._fullname @final @property def name(self) -> str: + """Get the name of the side.""" return self._name - def start(self): - """Initialization steps. + def start(self): # noqa: B027 + """Initialize the side. Call this manually. Derived classes can take care of setting up data structures / connection, authentication requests etc. """ - pass - def finish(self): - """Finalization steps. + def finish(self): # noqa: B027 + """Finalize the side. Call this manually. Derived classes can take care of closing open connections, flashing their cached data, etc. """ - pass @abc.abstractmethod def get_all_items(self, **kargs) -> Sequence[ItemType]: - """Query side and return a sequence of items + """Query side and return a sequence of items. :param kargs: Extra options for the call :return: A list of items. The type of these items depends on the derived class """ - raise NotImplementedError("Implement in derived") + err = "Implement in derived" + raise NotImplementedError(err) @abc.abstractmethod - def get_item(self, item_id: ID, use_cached: bool = False) -> Optional[ItemType]: + def get_item(self, item_id: ID, use_cached: bool = False) -> ItemType | None: """Get a single item based on the given UUID. :use_cached: False if you want to fetch the latest version of the item. True if a cached version would do. :returns: None if not found, the item in dict representation otherwise """ - raise NotImplementedError("Should be implemented in derived") + err = "Should be implemented in derived" + raise NotImplementedError(err) @abc.abstractmethod def delete_single_item(self, item_id: ID): @@ -77,7 +87,8 @@ def delete_single_item(self, item_id: ID): .. raises:: Keyerror if item is not found. """ - raise NotImplementedError("Should be implemented in derived") + err = "Should be implemented in derived" + raise NotImplementedError(err) @abc.abstractmethod def update_item(self, item_id: ID, **changes): @@ -87,7 +98,8 @@ def update_item(self, item_id: ID, **changes): :param changes: Keyword only parameters that are to change in the item .. warning:: The item must already be present """ - raise NotImplementedError("Should be implemented in derived") + err = "Should be implemented in derived" + raise NotImplementedError(err) @abc.abstractmethod def add_item(self, item: ItemType) -> ItemType: @@ -95,28 +107,29 @@ def add_item(self, item: ItemType) -> ItemType: :returns: The newly added event """ - raise NotImplementedError("Implement in derived") + err = "Implement in derived" + raise NotImplementedError(err) @classmethod @abc.abstractmethod def id_key(cls) -> str: - """ - Key in the dictionary of the added/updated/deleted item that refers to the ID of - that Item. - """ - raise NotImplementedError("Implement in derived") + """Key in dict of the added/updated/deleted item that refers to the ID said item.""" + err = "Implement in derived" + raise NotImplementedError(err) @classmethod @abc.abstractmethod def summary_key(cls) -> str: """Key in the dictionary of the item that refers to its summary.""" - raise NotImplementedError("Implement in derived") + err = "Implement in derived" + raise NotImplementedError(err) @classmethod @abc.abstractmethod def last_modification_key(cls) -> str: """Key in the dictionary of the item that refers to its modification date.""" - raise NotImplementedError("Implement in derived") + err = "Implement in derived" + raise NotImplementedError(err) @final @classmethod @@ -133,13 +146,17 @@ def get_summary(cls, item: ItemType) -> str: @classmethod @abc.abstractmethod def items_are_identical( - cls, item1: ItemType, item2: ItemType, ignore_keys: Sequence[str] = [] + cls, + item1: ItemType, + item2: ItemType, + ignore_keys: Sequence[str] = [], ) -> bool: """Determine whether two items are identical. .. returns:: True if items are identical, False otherwise. """ - raise NotImplementedError("Implement in derived") + err = "Implement in derived" + raise NotImplementedError(err) @final @staticmethod @@ -148,36 +165,36 @@ def _items_are_identical(item1: ItemType, item2: ItemType, keys: list) -> bool: Take extra care of the datetime key. """ - for k in keys: if k not in item1 and k not in item2: continue if (k in item1 and k not in item2) or (k not in item1 and k in item2): logger.opt(lazy=True).trace( - f"Key [{k}] exists in one but not in other\n\n{item1}\n\n{item2}" + f"Key [{k}] exists in one but not in other\n\n{item1}\n\n{item2}", ) return False if isinstance(item1[k], datetime.datetime) and isinstance( - item2[k], datetime.datetime + item2[k], + datetime.datetime, ): if is_same_datetime(item1[k], item2[k], tol=datetime.timedelta(minutes=1)): continue - else: - logger.opt(lazy=True).trace( - f"\n\nItems differ\n\nItem1\n\n{item1}\n\nItem2\n\n{item2}" - f"\n\nKey [{k}] is different - [{repr(item1[k])}] | [{repr(item2[k])}]" - ) - return False - else: - if item1[k] == item2[k]: - continue - else: - logger.opt(lazy=True).trace( - f"\n\nItems differ\n\nItem1\n\n{item1}\n\nItem2\n\n{item2}" - f"\n\nKey [{k}] is different - [{repr(item1[k])}] | [{repr(item2[k])}]" - ) - return False + + logger.opt(lazy=True).trace( + f"\n\nItems differ\n\nItem1\n\n{item1}\n\nItem2\n\n{item2}" + f"\n\nKey [{k}] is different - [{item1[k]!r}] | [{item2[k]!r}]", + ) + return False + + if item1[k] == item2[k]: + continue + + logger.opt(lazy=True).trace( + f"\n\nItems differ\n\nItem1\n\n{item1}\n\nItem2\n\n{item2}" + f"\n\nKey [{k}] is different - [{item1[k]!r}] | [{item2[k]!r}]", + ) + return False return True diff --git a/syncall/taskwarrior/taskwarrior_side.py b/syncall/taskwarrior/taskwarrior_side.py index 805ee75..594b06a 100644 --- a/syncall/taskwarrior/taskwarrior_side.py +++ b/syncall/taskwarrior/taskwarrior_side.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import datetime from pathlib import Path -from typing import Any, Dict, List, Literal, Mapping, Optional, Sequence, Set, Union, cast +from typing import Any, Literal, Mapping, Sequence, cast from uuid import UUID from bubop import logger, parse_datetime @@ -29,11 +31,11 @@ } -def parse_datetime_(dt: Union[str, datetime.datetime]) -> datetime.datetime: +def parse_datetime_(dt: str | datetime.datetime) -> datetime.datetime: if isinstance(dt, datetime.datetime): return dt - else: - return parse_datetime(dt) + + return parse_datetime(dt) class TaskWarriorSide(SyncSide): @@ -45,15 +47,14 @@ class TaskWarriorSide(SyncSide): def __init__( self, - tags: Sequence[str] = tuple(), - project: Optional[str] = None, + tags: Sequence[str] = (), + project: str | None = None, tw_filter: str = "", - config_file_override: Optional[Path] = None, + config_file_override: Path | None = None, config_overrides: Mapping[str, Any] = {}, **kargs, ): - """ - Constructor. + """Init. :param tags: Only include tasks that have are tagged using *all* the specified tags. Also assign these tags to newly added items @@ -66,7 +67,7 @@ def __init__( tw_config_default_overrides """ super().__init__(name="Tw", fullname="Taskwarrior", **kargs) - self._tags: Set[str] = set(tags) + self._tags: set[str] = set(tags) self._project: str = project or "" self._tw_filter: str = tw_filter @@ -92,16 +93,18 @@ def __init__( raise RuntimeError( "Could not determine a valid taskwarrior config file and no override config" " file was specified - candidates:" - f" {', '.join([str(p) for p in candidate_config_files])}" + f" {', '.join([str(p) for p in candidate_config_files])}", ) logger.debug(f"Initializing Taskwarrior instance using config file: {config_file}") self._tw = TaskWarrior( - marshal=True, config_filename=str(config_file), config_overrides=config_overrides_ + marshal=True, + config_filename=str(config_file), + config_overrides=config_overrides_, ) # All TW tasks - self._items_cache: Dict[str, TaskwarriorRawItem] = {} + self._items_cache: dict[str, TaskwarriorRawItem] = {} # Whether to refresh the cached list of items self._reload_items = True @@ -127,7 +130,7 @@ def _load_all_items(self): tasks = self._tw.load_tasks_and_filter(command="all", filter_=filter_) items = [*tasks["completed"], *tasks["pending"]] - self._items_cache: Dict[str, TaskwarriorRawItem] = { # type: ignore + self._items_cache: dict[str, TaskwarriorRawItem] = { # type: ignore str(item["uuid"]): item for item in items } self._reload_items = False @@ -135,12 +138,11 @@ def _load_all_items(self): def get_all_items( self, skip_completed=False, - order_by: Optional[OrderByType] = None, + order_by: OrderByType | None = None, use_ascending_order: bool = True, **kargs, - ) -> List[TaskwarriorRawItem]: - """ - Fetch the tasks off the local taskw db, taking into account the filters set in the + ) -> list[TaskwarriorRawItem]: + """Fetch the tasks off the local taskw db, taking into account the filters set in the during the instance construction. :param skip_completed: Skip completed tasks @@ -162,7 +164,7 @@ def get_all_items( return tasks - def get_item(self, item_id: str, use_cached: bool = True) -> Optional[TaskwarriorRawItem]: + def get_item(self, item_id: str, use_cached: bool = True) -> TaskwarriorRawItem | None: item = self._items_cache.get(item_id) if not use_cached or item is None: item = self._tw.get_task(id=item_id)[-1] @@ -226,10 +228,10 @@ def add_item(self, item: ItemType) -> ItemType: logger.debug(f'Task "{new_id}" created - "{description[0:len_print]}"...') # explicitly mark as deleted - taskw doesn't like task_add(`status:deleted`) so we have - # todo it in two steps + # to do it in two steps if curr_status == "deleted": logger.debug( - f'Task "{new_id}" marking as deleted - "{description[0:len_print]}"...' + f'Task "{new_id}" marking as deleted - "{description[0:len_print]}"...', ) self._tw.task_delete(id=new_id) @@ -252,7 +254,10 @@ def last_modification_key(cls) -> str: @classmethod def items_are_identical( - cls, item1: dict, item2: dict, ignore_keys: Sequence[str] = [] + cls, + item1: dict, + item2: dict, + ignore_keys: Sequence[str] = [], ) -> bool: keys = [ k diff --git a/syncall/tw_asana_utils.py b/syncall/tw_asana_utils.py index fafce70..33e27dc 100644 --- a/syncall/tw_asana_utils.py +++ b/syncall/tw_asana_utils.py @@ -3,7 +3,7 @@ import datetime import dateutil -from bubop import format_datetime_tz, format_dict, logger, parse_datetime +from bubop import parse_datetime from syncall.asana.asana_task import AsanaTask from syncall.types import TwItem @@ -47,10 +47,8 @@ def convert_tw_to_asana(tw_item: TwItem) -> AsanaTask: as_created_at = tw_entry if tw_due is not None: - if isinstance(tw_due, datetime.datetime): - as_due_at = tw_due - else: - as_due_at = parse_datetime(tw_due) + as_due_at = tw_due if isinstance(tw_due, datetime.datetime) else parse_datetime(tw_due) + as_due_on = as_due_at.date() if isinstance(tw_modified, datetime.datetime): @@ -72,7 +70,7 @@ def convert_tw_to_asana(tw_item: TwItem) -> AsanaTask: ) -def convert_asana_to_tw(asana_task: AsanaTask) -> TwItem: +def convert_asana_to_tw(asana_task: AsanaTask) -> TwItem: # noqa: C901, PLR0912 # Extract Asana fields as_completed = asana_task["completed"] as_completed_at = asana_task["completed_at"] @@ -83,8 +81,7 @@ def convert_asana_to_tw(asana_task: AsanaTask) -> TwItem: as_name = asana_task["name"] # Declare Taskwarrior fields - tw_completed = None - tw_due = tw_item = None + tw_due = None tw_end = None tw_entry = None tw_modified = None @@ -119,7 +116,9 @@ def convert_asana_to_tw(asana_task: AsanaTask) -> TwItem: elif as_due_on is not None: if isinstance(as_due_on, datetime.date): tw_due = datetime.datetime.combine( - as_due_on, datetime.time(0, 0, 0), dateutil.tz.tzlocal() + as_due_on, + datetime.time(0, 0, 0), + dateutil.tz.tzlocal(), ) else: tw_due = parse_datetime(as_due_on) diff --git a/syncall/tw_caldav_utils.py b/syncall/tw_caldav_utils.py index 802c3df..06ad116 100644 --- a/syncall/tw_caldav_utils.py +++ b/syncall/tw_caldav_utils.py @@ -30,7 +30,7 @@ def convert_tw_to_caldav(tw_item: Item) -> Item: assert all( - i in tw_item.keys() for i in ("description", "status", "uuid") + i in tw_item for i in ("description", "status", "uuid") ), "Missing keys in tw_item" caldav_item: Item = {} @@ -38,7 +38,7 @@ def convert_tw_to_caldav(tw_item: Item) -> Item: caldav_item["summary"] = tw_item["description"] # description caldav_item["description"] = "IMPORTED FROM TASKWARRIOR\n" - if "annotations" in tw_item.keys(): + if "annotations" in tw_item: for i, annotation in enumerate(tw_item["annotations"]): caldav_item["description"] += f"\n* Annotation {i + 1}: {annotation}" @@ -49,28 +49,28 @@ def convert_tw_to_caldav(tw_item: Item) -> Item: caldav_item["status"] = aliases_tw_caldav_status[tw_item["status"]] # Priority - if "priority" in tw_item.keys(): + if "priority" in tw_item: caldav_item["priority"] = aliases_tw_caldav_priority[tw_item["priority"].lower()] # Timestamps - if "entry" in tw_item.keys(): + if "entry" in tw_item: caldav_item["created"] = tw_item["entry"] - if "end" in tw_item.keys(): + if "end" in tw_item: caldav_item["completed"] = tw_item["end"] - if "modified" in tw_item.keys(): + if "modified" in tw_item: caldav_item["last-modified"] = tw_item["modified"] # Start/due dates # - If given due date -> (start=due-1, end=due) - if "due" in tw_item.keys(): + if "due" in tw_item: caldav_item["start"] = tw_item["due"] - timedelta(hours=1) caldav_item["due"] = tw_item["due"] - if "tags" in tw_item.keys(): + if "tags" in tw_item: caldav_item["categories"] = tw_item["tags"] # if start-ed, override the status appropriately - if "start" in tw_item.keys(): + if "start" in tw_item: caldav_item["status"] = "in-process" return caldav_item @@ -103,18 +103,18 @@ def convert_caldav_to_tw(caldav_item: Item) -> Item: tw_item["priority"] = prio # Timestamps - if "created" in caldav_item.keys(): + if "created" in caldav_item: tw_item["entry"] = caldav_item["created"] - if "completed" in caldav_item.keys(): + if "completed" in caldav_item: tw_item["end"] = caldav_item["completed"] - if "last-modified" in caldav_item.keys(): + if "last-modified" in caldav_item: tw_item["modified"] = caldav_item["last-modified"] # Start/due dates - if "due" in caldav_item.keys(): + if "due" in caldav_item: tw_item["due"] = caldav_item["due"] - if "categories" in caldav_item.keys(): + if "categories" in caldav_item: tw_item["tags"] = caldav_item["categories"] if caldav_item["status"] == "in-process" and "last-modified" in caldav_item: diff --git a/syncall/tw_gcal_utils.py b/syncall/tw_gcal_utils.py index 23f018f..3099832 100644 --- a/syncall/tw_gcal_utils.py +++ b/syncall/tw_gcal_utils.py @@ -66,7 +66,7 @@ def convert_tw_to_gcal( # description gcal_item["description"] = "IMPORTED FROM TASKWARRIOR\n" gcal_item["description"] += "\n".join( - [get_tw_annotations_as_str(tw_item), get_tw_status_and_uuid_as_str(tw_item)] + [get_tw_annotations_as_str(tw_item), get_tw_status_and_uuid_as_str(tw_item)], ) date_keys = ["scheduled", "due"] if prefer_scheduled_date else ["due", "scheduled"] @@ -82,19 +82,20 @@ def convert_tw_to_gcal( if date_key in tw_item.keys(): logger.trace( f'Using "{date_key}" date for {tw_item["uuid"]} for setting the end date of' - " the event" + " the event", ) dt_gcal = GCalSide.format_datetime(tw_item[date_key]) gcal_item["start"] = { "dateTime": GCalSide.format_datetime( - tw_item[date_key] - tw_item[tw_duration_key] - ) + tw_item[date_key] - tw_item[tw_duration_key], + ), } gcal_item["end"] = {"dateTime": dt_gcal} break else: logger.trace( - f'Using "entry" date for {tw_item["uuid"]} for setting the start date of the event' + f'Using "entry" date for {tw_item["uuid"]} for setting the start date of the' + " event", ) entry_dt = tw_item["entry"] entry_dt_gcal_str = GCalSide.format_datetime(entry_dt) @@ -102,7 +103,7 @@ def convert_tw_to_gcal( gcal_item["start"] = {"dateTime": entry_dt_gcal_str} gcal_item["end"] = { - "dateTime": GCalSide.format_datetime(entry_dt + tw_item[tw_duration_key]) + "dateTime": GCalSide.format_datetime(entry_dt + tw_item[tw_duration_key]), } return gcal_item @@ -117,7 +118,6 @@ def convert_gcal_to_tw( If set_scheduled_date, then it will set the "scheduled" date of the produced TW task instead of the "due" date """ - # Parse the description annotations = [] status = "pending" @@ -139,7 +139,7 @@ def convert_gcal_to_tw( # Status if status not in ["pending", "completed", "deleted", "waiting", "recurring"]: logger.error( - f"Invalid status {status} in GCal->TW conversion of item. Skipping status:" + f"Invalid status {status} in GCal->TW conversion of item. Skipping status:", ) else: tw_item["status"] = status @@ -153,13 +153,7 @@ def convert_gcal_to_tw( if gcal_summary.startswith(_prefix_title_success_str): gcal_summary = gcal_summary[len(_prefix_title_success_str) :] tw_item["description"] = gcal_summary - - # don't meddle with the 'entry' field - if set_scheduled_date: - date_key = "scheduled" - else: - date_key = "due" - + date_key = "scheduled" if set_scheduled_date else "due" end_time = GCalSide.get_event_time(gcal_item, t="end") tw_item[tw_duration_key] = end_time - GCalSide.get_event_time(gcal_item, t="start") diff --git a/syncall/tw_gkeep_utils.py b/syncall/tw_gkeep_utils.py index c5fd793..f1d9217 100644 --- a/syncall/tw_gkeep_utils.py +++ b/syncall/tw_gkeep_utils.py @@ -7,7 +7,8 @@ def convert_tw_to_gkeep_todo(tw_item: TwItem) -> GKeepTodoItem: return GKeepTodoItem( - is_checked=(tw_item["status"] == "completed"), plaintext=tw_item["description"] + is_checked=(tw_item["status"] == "completed"), + plaintext=tw_item["description"], ) diff --git a/syncall/tw_gtasks_utils.py b/syncall/tw_gtasks_utils.py index 4d632f0..387a55e 100644 --- a/syncall/tw_gtasks_utils.py +++ b/syncall/tw_gtasks_utils.py @@ -29,7 +29,7 @@ def convert_tw_to_gtask( # update time if "modified" in tw_item.keys(): gtasks_item["updated"] = GTasksSide.format_datetime( - GTasksSide.parse_datetime(tw_item["modified"]) + GTasksSide.parse_datetime(tw_item["modified"]), ) return gtasks_item @@ -44,7 +44,6 @@ def convert_gtask_to_tw( If set_scheduled_date, then it will set the "scheduled" date of the produced TW task instead of the "due" date """ - # Parse the description annotations = [] uuid = None @@ -65,7 +64,7 @@ def convert_gtask_to_tw( if status_tw is None: logger.error( f"Unknown Google Task status {status_gtask} for google task item {gtasks_item}." - " Setting it to pending" + " Setting it to pending", ) status_tw = "pending" @@ -79,11 +78,7 @@ def convert_gtask_to_tw( # Description tw_item["description"] = gtasks_item["title"] - # don't meddle with the 'entry' field - if set_scheduled_date: - date_key = "scheduled" - else: - date_key = "due" + date_key = "scheduled" if set_scheduled_date else "due" # due/scheduled date due_date = GTasksSide.get_task_due_time(gtasks_item) diff --git a/syncall/tw_notion_utils.py b/syncall/tw_notion_utils.py index d6355ff..ea97b5d 100644 --- a/syncall/tw_notion_utils.py +++ b/syncall/tw_notion_utils.py @@ -24,10 +24,7 @@ def create_page(parent_page_id: str, title: str, client: Client) -> NotionPage: def convert_tw_to_notion(tw_item: TwItem) -> NotionTodoBlock: modified = tw_item["modified"] - if isinstance(modified, datetime.datetime): - dt = modified - else: - dt = parse_datetime(modified) + dt = modified if isinstance(modified, datetime.datetime) else parse_datetime(modified) return NotionTodoBlock( is_archived=False, diff --git a/syncall/tw_utils.py b/syncall/tw_utils.py index bd3c982..90e756c 100644 --- a/syncall/tw_utils.py +++ b/syncall/tw_utils.py @@ -1,25 +1,31 @@ +# ruff: noqa: PLR2004 + """Miscellaneous TW related utilities. At the time of writing, these are primarily used in the TW<>Gtasks and TW<>Gcal integrations. """ +from __future__ import annotations + import traceback -from typing import Optional, Sequence, Tuple +from typing import TYPE_CHECKING, Sequence from uuid import UUID from bubop import logger -from syncall.types import TwItem +if TYPE_CHECKING: + from syncall.types import TwItem def get_tw_annotations_as_str(tw_item: TwItem) -> str: - """ - Return all the annotations of the given object in a single string. Put each annotation - in its own line and prefix it with "Annotation X" where X is an increasing integer id. + """Return all the annotations of the given object in a single string. + + Put each annotation in its own line and prefix it with "Annotation X" where X is an + increasing integer id. Return an empty string if there are no annotations. """ - if "annotations" in tw_item.keys() and len(tw_item["annotations"]) > 0: + if "annotations" in tw_item and len(tw_item["annotations"]) > 0: annotations_li = [ f"* Annotation {i + 1}: {annotation}" for i, annotation in enumerate(tw_item["annotations"]) @@ -27,14 +33,11 @@ def get_tw_annotations_as_str(tw_item: TwItem) -> str: return "\n".join(annotations_li) - else: - return "" + return "" def get_tw_status_and_uuid_as_str(tw_item: TwItem) -> str: - """ - Return the UUID and status of the given Taskwarrior item in a single string. - """ + """Return the UUID and status of the given TW item in a single string.""" return "\n".join( [ f"{k}: {tw_item[k]}" @@ -42,32 +45,31 @@ def get_tw_status_and_uuid_as_str(tw_item: TwItem) -> str: "status", "uuid", ] - ] + ], ) -def extract_tw_fields_from_string(s: str) -> Tuple[Sequence[str], str, Optional[UUID]]: +def extract_tw_fields_from_string(s: str) -> tuple[Sequence[str], str, UUID | None]: """Parse the TW annotations, status, and UUID fields from the given string.""" - annotations = [] status = "pending" uuid = None # strip whitespaces, empty lines lines = [line.strip() for line in s.split("\n") if line][0:] - i = 0 - for i, line in enumerate(lines): + _i = 0 + for _i, line in enumerate(lines): parts = line.split(":", maxsplit=1) if len(parts) == 2 and parts[0].lower().startswith("* annotation"): annotations.append(parts[1].strip()) else: break - if i == len(lines) - 1: + if _i == len(lines) - 1: return annotations, status, uuid # Iterate through rest of lines, find only the status and uuid ones - for line in lines[i:]: + for line in lines[_i:]: parts = line.split(":", maxsplit=1) if len(parts) == 2: start = parts[0].lower() @@ -79,7 +81,7 @@ def extract_tw_fields_from_string(s: str) -> Tuple[Sequence[str], str, Optional[ except ValueError as err: logger.error( f'Invalid UUID "{err}" provided during the conversion to taskwarrior,' - f" Using None...\n\n{traceback.format_exc()}" + f" Using None...\n\n{traceback.format_exc()}", ) return annotations, status, uuid diff --git a/syncall/types.py b/syncall/types.py index ff44302..d910f35 100644 --- a/syncall/types.py +++ b/syncall/types.py @@ -1,4 +1,7 @@ -from typing import Any, Dict, List, Literal, Optional, Tuple, TypedDict, Union +from __future__ import annotations + +import datetime +from typing import Any, Dict, Literal, Optional, Protocol, Tuple, TypedDict, Union from item_synchronizer.types import ID @@ -9,7 +12,7 @@ class TaskwarriorRawItem(TypedDict, total=False): """Dictionary part of an item as returned from the Taskw Python API on tw.get_task(id=...). Example: - + ------- {'id': 473, 'description': 'buenos dias', 'entry': '20211209T083645Z', @@ -19,6 +22,7 @@ class TaskwarriorRawItem(TypedDict, total=False): 'uuid': 'a06f1c9d-237a-4692-8427-27bf6cad5ff1', 'tags': ['test', 'test2'], 'urgency': 1.9}) + """ id: int @@ -32,13 +36,16 @@ class TaskwarriorRawItem(TypedDict, total=False): project: str status: str uuid: str - tags: List[str] + tags: list[str] urgency: float # Item as returned from the Taskw Python API on tw.get_task(id=...) TaskwarriorRawTuple = Tuple[Optional[int], TaskwarriorRawItem] +# Google-related types ------------------------------------------------------------------------ +GoogleDateT = Union[str, dict, datetime.datetime] + # Google Calendar ----------------------------------------------------------------------------- GCalItem = Dict[str, Any] @@ -47,12 +54,16 @@ class TaskwarriorRawItem(TypedDict, total=False): class GTaskLink(TypedDict): + """Link part of an item as returned from the Google Tasks Python API on.""" + description: str link: str type: str # "email" class GTasksList(TypedDict): + r"""Dict part of a list as returned from the Google Tasks Python API.""" + etag: str # ETag of the resource. id: str # Task list identifier. kind: str # Type of the resource. This is always "tasks#taskList". @@ -62,11 +73,10 @@ class GTasksList(TypedDict): class GTasksItem(TypedDict): - """ - Dict part of an item as returned from the Google Tasks Python API on `tasks().get_task()`. + r"""Dict part of an item as returned from the Google Tasks Python API on `tasks().get_task()`. Example: - + ------- { 'id': 'Yl9GSzNDVWluTk9heE1sUQ', 'kind': 'tasks#task', @@ -86,17 +96,18 @@ class GTasksItem(TypedDict): "completed": "2021-12-04T15:07:00.000Z", 'links': [] } + """ # Completion date of the task (as a RFC 3339 timestamp). This field is omitted if the task # has not been completed. - completed: Optional[str] + completed: str | None # Flag indicating whether the task has been deleted. The default is False. deleted: bool # Due date of the task (as a RFC 3339 timestamp). Optional. The due date only records date # information; the time portion of the timestamp is discarded when setting the due date. It # isn't possible to read or write the time that a task is due via the API. - due: Optional[str] + due: str | None # ETag of the resource. etag: str # Flag indicating whether the task is hidden. This is the case if the task had been marked @@ -108,13 +119,13 @@ class GTasksItem(TypedDict): # Type of the resource. This is always "tasks#task". kind: str # Collection of links. This collection is read-only. - links: List[GTaskLink] + links: list[GTaskLink] # Notes describing the task. Optional. - notes: Optional[str] + notes: str | None # Parent task identifier. This field is omitted if it is a top-level task. This field is # read-only. Use the "move" method to move the task under a different parent or to the top # level. - parent: Optional[str] + parent: str | None # String indicating the position of the task among its sibling tasks under the same parent # task or at the top level. If this string is greater than another task's corresponding # position string according to lexicographical ordering, the task is positioned after the @@ -150,127 +161,127 @@ class NotionRawItem(TypedDict): class NotionTextContent(TypedDict): - """ - Example section: + """Example section: - "text": {"content": "Lacinato kale", "link": None}, + "text": {"content": "Lacinato kale", "link": None}, """ content: str - link: Optional[str] # ? + link: str | None # ? class SingleItemTextSection(TypedDict, total=False): - """ - Example section: - - { - "type": "text", - "text": {"content": "Lacinato kale", "link": None}, - "annotations": { - "bold": False, - "italic": False, - "strikethrough": False, - "underline": False, - "code": False, - "color": "default", - }, - "plain_text": "Lacinato kale", - "href": None, - } + """Example section: + + { + "type": "text", + "text": {"content": "Lacinato kale", "link": None}, + "annotations": { + "bold": False, + "italic": False, + "strikethrough": False, + "underline": False, + "code": False, + "color": "default", + }, + "plain_text": "Lacinato kale", + "href": None, + } """ type: Literal["text"] text: NotionTextContent - annotations: Dict[str, Union[bool, str]] + annotations: dict[str, bool | str] plain_text: str - href: Optional[str] # ? + href: str | None # ? class NotionPage(NotionRawItem): - """ - Created page: - - { - object: "page", - id: "e849bbd0-6d46-42af-9809-e81628e43306", - created_time: "2021-12-05T13:17:00.000Z", - last_edited_time: "2021-12-05T13:17:00.000Z", - cover: None, - icon: None, - parent: { type: "page_id", - page_id: "a6dda560-5841-4bbb-8d66-a56725c5a82a" }, - archived: False, - properties: { - title: { - id: "title", - type: "title", - title: [ - { - type: "text", - text: { content: "Opa, na th!", link: None }, - annotations: { - bold: False, - italic: False, - strikethrough: False, - underline: False, - code: False, - color: "default", - }, - plain_text: "Opa, na th!", - href: None, - }, - ], + """Created page: + + { + object: "page", + id: "e849bbd0-6d46-42af-9809-e81628e43306", + created_time: "2021-12-05T13:17:00.000Z", + last_edited_time: "2021-12-05T13:17:00.000Z", + cover: None, + icon: None, + parent: { type: "page_id", + page_id: "a6dda560-5841-4bbb-8d66-a56725c5a82a" }, + archived: False, + properties: { + title: { + id: "title", + type: "title", + title: [ + { + type: "text", + text: { content: "Opa, na th!", link: None }, + annotations: { + bold: False, + italic: False, + strikethrough: False, + underline: False, + code: False, + color: "default", }, + plain_text: "Opa, na th!", + href: None, + }, + ], }, - url: "https://www.notion.so/Opa-na-th-e849bbd06d4642af9809e81628e43306", - } + }, + url: "https://www.notion.so/Opa-na-th-e849bbd06d4642af9809e81628e43306", + } """ - cover: Optional[str] - icon: Optional[str] - parent: Dict[str, str] - properties: Tuple[Literal["properties"], SingleItemTextSection] + cover: str | None + icon: str | None + parent: dict[str, str] + properties: tuple[Literal["properties"], SingleItemTextSection] url: str class NotionTodoSection(TypedDict): - """ - Example section: - - { - "text": [ - { - "type": "text", - "text": {"content": "Lacinato kale", "link": None}, - "annotations": { - "bold": False, - "italic": False, - "strikethrough": False, - "underline": False, - "code": False, - "color": "default", - }, - "plain_text": "Lacinato kale", - "href": None, - } - ], - "checked": True, - } + """Example section: + + { + "text": [ + { + "type": "text", + "text": {"content": "Lacinato kale", "link": None}, + "annotations": { + "bold": False, + "italic": False, + "strikethrough": False, + "underline": False, + "code": False, + "color": "default", + }, + "plain_text": "Lacinato kale", + "href": None, + } + ], + "checked": True, + } """ - text: List[SingleItemTextSection] + text: list[SingleItemTextSection] checked: bool class NotionTodoBlockItem(NotionRawItem): + """Todo block item as returned from the Notion Python API.""" + to_do: NotionTodoSection # Page contents as returned from the Notion Python API class NotionPageContents(TypedDict): + """Page contents as returned from the Notion Python API.""" + object: Literal["list"] - results: List[NotionRawItem] + results: list[NotionRawItem] next_cursor: Any has_more: bool @@ -282,6 +293,8 @@ class NotionPageContents(TypedDict): # Task as returned from Asana API. class AsanaRawTask(TypedDict): + """Task as returned from Asana API.""" + completed: bool completed_at: str created_at: str @@ -294,6 +307,13 @@ class AsanaRawTask(TypedDict): # Extras -------------------------------------------------------------------------------------- # Task as returned from get_task(id=...) -# TODO Are these types needed? They seem to be duplicates of TaskwarriorRawItem ... TwRawItem = Tuple[Optional[int], Dict[str, Any]] TwItem = Dict[str, Any] + + +# create a Protocol class for instances that have the __str__ method +class SupportsStr(Protocol): + """Protocol for instances that have the __str__ method.""" + + def __str__(self) -> str: + ... diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..58de813 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests directory.""" diff --git a/tests/conftest.py b/tests/conftest.py index fe81d76..df535c3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,16 +2,17 @@ from pathlib import Path import pytest -from _pytest.logging import caplog as _caplog # type: ignore +from _pytest.logging import caplog as _caplog # noqa: F401 from bubop import PrefsManager from loguru import logger -from .conftest_fs import * -from .conftest_gcal import * -from .conftest_gkeep import * -from .conftest_gtasks import * -from .conftest_notion import * -from .conftest_tw import * +from .conftest_fs import * # noqa: F403 +from .conftest_gcal import * # noqa: F403 +from .conftest_gkeep import * # noqa: F403 +from .conftest_gtasks import * # noqa: F403 +from .conftest_helpers import * # noqa: F403 +from .conftest_notion import * # noqa: F403 +from .conftest_tw import * # noqa: F403 @pytest.fixture() @@ -19,10 +20,9 @@ def test_data() -> Path: return Path(__file__).absolute().parent / "test_data" -@pytest.fixture -def caplog(_caplog): - """ - Fixture that forwards loguru's output to std logging's output so that you can use caplog +@pytest.fixture() +def caplog(_caplog): # noqa: F811 + """Fixture that forwards loguru's output to std logging's output so that you can use caplog as usual """ @@ -31,10 +31,12 @@ def emit(self, record): logging.getLogger(record.name).handle(record) logger.add(PropagateHandler(), format="{message}") - yield _caplog + return _caplog class MockPrefsManager(PrefsManager): + """Mock the PrefsManager class.""" + def __init__(self): self._conts = { "kalimera": {"a": 1, "b": 2, "c": [1, 2, 3]}, diff --git a/tests/conftest_fs.py b/tests/conftest_fs.py index c23f374..04efbc5 100644 --- a/tests/conftest_fs.py +++ b/tests/conftest_fs.py @@ -1,74 +1,69 @@ from pathlib import Path import pytest - from syncall.filesystem.filesystem_file import FilesystemFile from syncall.filesystem.filesystem_side import FilesystemSide -@pytest.fixture +@pytest.fixture() def fs_side(request: pytest.FixtureRequest) -> dict: """Fixture to parametrize on.""" param = request.param # type: ignore return request.getfixturevalue(param) -@pytest.fixture +@pytest.fixture() def fs_file_default_fname() -> str: return "file.txt" -@pytest.fixture +@pytest.fixture() def fs_file_default_name() -> str: return "file" -@pytest.fixture +@pytest.fixture() def tmpdir_path(tmpdir) -> Path: return Path(tmpdir) -@pytest.fixture +@pytest.fixture() def non_existent_python_path(tmpdir_path, fs_file_default_fname) -> Path: return tmpdir_path / fs_file_default_fname -@pytest.fixture +@pytest.fixture() def fs_file_empty(tmpdir_path, fs_file_default_fname) -> FilesystemFile: - fs = FilesystemFile(tmpdir_path / fs_file_default_fname) - - return fs + return FilesystemFile(tmpdir_path / fs_file_default_fname) -@pytest.fixture +@pytest.fixture() def python_path_with_content(tmpdir_path, fs_file_default_fname) -> Path: path = tmpdir_path / fs_file_default_fname path.write_text( """Here is some multi-line text with unicode šŸš€šŸ˜„ characters. -""" +""", ) return path -@pytest.fixture +@pytest.fixture() def fs_file_with_content(python_path_with_content: Path) -> FilesystemFile: - fs = FilesystemFile(python_path_with_content) - - return fs + return FilesystemFile(python_path_with_content) def _create_fs_side(filesystem_root: str): return FilesystemSide(filesystem_root=Path(filesystem_root), filename_extension=".txt") -@pytest.fixture +@pytest.fixture() def fs_side_no_items(tmpdir) -> FilesystemSide: return _create_fs_side(filesystem_root=tmpdir) -@pytest.fixture +@pytest.fixture() def fs_side_with_existing_items(tmpdir) -> FilesystemSide: dir_ = Path(tmpdir) for i in range(10): diff --git a/tests/conftest_gcal.py b/tests/conftest_gcal.py index a2de65b..8f7fffb 100644 --- a/tests/conftest_gcal.py +++ b/tests/conftest_gcal.py @@ -2,7 +2,6 @@ import pytest from dateutil.tz import tzutc - from syncall.types import GCalItem diff --git a/tests/conftest_gkeep.py b/tests/conftest_gkeep.py index 758cc96..4118b29 100644 --- a/tests/conftest_gkeep.py +++ b/tests/conftest_gkeep.py @@ -1,6 +1,5 @@ import pytest -from gkeepapi.node import List, Note - +from gkeepapi.node import List from syncall.google.gkeep_note import GKeepNote as MyGKeepNote @@ -126,7 +125,7 @@ def gkeep_list1(): { "id": "f4c1a837-0f6f-4b3f-a34c-b163817f9259", "topicCategory": {"category": "FOOD"}, - } + }, ], }, "color": "RED", @@ -168,7 +167,7 @@ def gkeep_note_empty(): { "labelId": "tag.qr30ughe2zk6.1630840458798", "deleted": "1970-01-01T00:00:00.000000Z", - } + }, ], "collaborators": [], } @@ -187,8 +186,7 @@ def gkeep_list_instance0(gkeep_list0: List) -> List: @pytest.fixture() def gkeep_note_empty_instance(gkeep_note_empty: dict) -> MyGKeepNote: - note = MyGKeepNote.from_raw_item(gkeep_note_empty) - return note + return MyGKeepNote.from_raw_item(gkeep_note_empty) @pytest.fixture() diff --git a/tests/conftest_gtasks.py b/tests/conftest_gtasks.py index e6721ad..3560b5b 100644 --- a/tests/conftest_gtasks.py +++ b/tests/conftest_gtasks.py @@ -1,7 +1,6 @@ from typing import cast import pytest - from syncall.types import GTasksItem # API Reference: https://googleapis.github.io/google-api-python-client/docs/dyn/tasks_v1.html diff --git a/tests/conftest_helpers.py b/tests/conftest_helpers.py index 0c84b66..bfdd3b6 100644 --- a/tests/conftest_helpers.py +++ b/tests/conftest_helpers.py @@ -1,11 +1,11 @@ import pytest -@pytest.fixture +@pytest.fixture() def fixture_true(): return True -@pytest.fixture +@pytest.fixture() def fixture_false(): return False diff --git a/tests/conftest_notion.py b/tests/conftest_notion.py index 8b0297d..2f86d08 100644 --- a/tests/conftest_notion.py +++ b/tests/conftest_notion.py @@ -2,7 +2,6 @@ from typing import cast, no_type_check import pytest - from syncall.types import NotionPageContents, NotionTodoBlockItem @@ -15,7 +14,7 @@ def notion_todo(request: pytest.FixtureRequest) -> NotionTodoBlockItem: @pytest.fixture() def notion_simple_todo() -> NotionTodoBlockItem: - """Simple to_do block returned by Notion Python SDK. + """Get simple to_do block returned by Notion Python SDK. - Unarchived (not deleted) - Unchecked (not completed) @@ -43,7 +42,7 @@ def notion_simple_todo() -> NotionTodoBlockItem: }, "plain_text": "Lacinato kale", "href": None, - } + }, ], "checked": False, }, @@ -62,7 +61,7 @@ def notion_simple_checked_todo(notion_simple_todo: NotionTodoBlockItem) -> Notio def notion_simple_diff_edited_time_todo( notion_simple_todo: NotionTodoBlockItem, ) -> NotionTodoBlockItem: - """Completed Notion todo block.""" + """Get completed Notion todo block.""" item = deepcopy(notion_simple_todo) item["last_edited_time"] = "2022-01-04T10:01:00.000Z" return item @@ -72,7 +71,7 @@ def notion_simple_diff_edited_time_todo( def notion_simple_archived_todo( notion_simple_todo: NotionTodoBlockItem, ) -> NotionTodoBlockItem: - """Archived Notion todo block.""" + """Get archived Notion todo block.""" item = deepcopy(notion_simple_todo) item["archived"] = True return item @@ -80,8 +79,7 @@ def notion_simple_archived_todo( @pytest.fixture() def notion_chained_todo() -> NotionTodoBlockItem: - """ - More complex to_do block returned by Notion Python SDK. + """Get more complex to_do block returned by Notion Python SDK. Represents a todo with the following text (markdown notation in use): @@ -190,8 +188,7 @@ def notion_chained_todo() -> NotionTodoBlockItem: @no_type_check @pytest.fixture() def page_contents() -> NotionPageContents: - """ - Full example contents of a notion page. + """Full example contents of a notion page. Fetched using the query: "notion.blocks.children.list(block_id=page_id)" """ @@ -221,8 +218,8 @@ def page_contents() -> NotionPageContents: }, "plain_text": "šŸ‘‹ Welcome to Notion!", "href": None, - } - ] + }, + ], }, }, { @@ -248,8 +245,8 @@ def page_contents() -> NotionPageContents: }, "plain_text": "Here are the basics:", "href": None, - } - ] + }, + ], }, }, { @@ -275,7 +272,7 @@ def page_contents() -> NotionPageContents: }, "plain_text": "Lacinato kale", "href": None, - } + }, ], "checked": True, }, @@ -401,8 +398,8 @@ def page_contents() -> NotionPageContents: }, "plain_text": "a list item", "href": None, - } - ] + }, + ], }, }, { @@ -820,8 +817,8 @@ def page_contents() -> NotionPageContents: " useful tips!" ), "href": None, - } - ] + }, + ], }, }, { @@ -857,8 +854,8 @@ def page_contents() -> NotionPageContents: }, "plain_text": "See it in action:", "href": None, - } - ] + }, + ], }, }, { @@ -884,7 +881,7 @@ def page_contents() -> NotionPageContents: }, "plain_text": "1 minute", "href": None, - } + }, ], "type": "external", "external": {"url": "https://youtu.be/TL_N2pmh9O0"}, @@ -923,7 +920,7 @@ def page_contents() -> NotionPageContents: }, "plain_text": "4 minutes", "href": None, - } + }, ], "type": "external", "external": {"url": "https://youtu.be/FXIrojSK3Jo"}, @@ -962,7 +959,7 @@ def page_contents() -> NotionPageContents: }, "plain_text": "2 minutes", "href": None, - } + }, ], "type": "external", "external": {"url": "https://youtu.be/2Pwzff-uffU"}, @@ -1001,7 +998,7 @@ def page_contents() -> NotionPageContents: }, "plain_text": "2 minutes", "href": None, - } + }, ], "type": "external", "external": {"url": "https://youtu.be/O8qdvSxDYNY"}, @@ -1062,7 +1059,7 @@ def page_contents() -> NotionPageContents: "plain_text": " to watch 50+ more tutorials", "href": None, }, - ] + ], }, }, { @@ -1164,7 +1161,7 @@ def page_contents() -> NotionPageContents: ), "href": None, }, - ] + ], }, }, { diff --git a/tests/conftest_tw.py b/tests/conftest_tw.py index c874984..dc320cb 100644 --- a/tests/conftest_tw.py +++ b/tests/conftest_tw.py @@ -2,7 +2,6 @@ import pytest from dateutil.tz.tz import tzutc - from syncall.types import TwItem diff --git a/tests/generic_test_case.py b/tests/generic_test_case.py index 8adb4ab..38c5a5e 100644 --- a/tests/generic_test_case.py +++ b/tests/generic_test_case.py @@ -1,18 +1,13 @@ -""" -Test the basic conversions between Google Calendar and TaskWarrior items. -""" +"""Test the basic conversions between Google Calendar and TaskWarrior items.""" -import os import unittest +from pathlib import Path class GenericTestCase(unittest.TestCase): - """ + """Generic unittest class for the project.""" - :ivar DATA_FILES_PATH: Path to the directory holding data files for testing. - """ - - DATA_FILES_PATH = os.path.join(os.path.dirname(__file__), "test_data") + DATA_FILES_PATH = Path(__file__).parent / "test_data" @classmethod def setUpClass(cls): diff --git a/tests/pyproject.toml b/tests/pyproject.toml new file mode 100644 index 0000000..b6d6f0a --- /dev/null +++ b/tests/pyproject.toml @@ -0,0 +1,8 @@ +[tool.ruff] +extend = "../pyproject.toml" + +[tool.ruff.lint] + +# select = ["ALL"] +ignore = ["PLR2004", "ARG002"] +# exclude = [] diff --git a/tests/test_aggregator.py b/tests/test_aggregator.py index 6e82f1e..3e323a2 100644 --- a/tests/test_aggregator.py +++ b/tests/test_aggregator.py @@ -1,11 +1,12 @@ from typing import Optional, Sequence from item_synchronizer.types import ID - from syncall.sync_side import ItemType, SyncSide class MockSide(SyncSide): + """MockSide class.""" + def __init__(self, name: str, fullname: str, *args, **kargs) -> None: self._fullname = fullname self._name = name @@ -38,7 +39,10 @@ def summary_key(cls) -> str: @classmethod def items_are_identical( - cls, item1: ItemType, item2: ItemType, ignore_keys: Sequence[str] = [] + cls, + item1: ItemType, + item2: ItemType, + ignore_keys: Sequence[str] = [], ) -> bool: """Determine whether two items are identical. diff --git a/tests/test_app_utils.py b/tests/test_app_utils.py index 16a1135..ccca4d6 100644 --- a/tests/test_app_utils.py +++ b/tests/test_app_utils.py @@ -2,7 +2,6 @@ from unittest.mock import patch import pytest - from syncall.app_utils import ( cache_or_reuse_cached_combination, fetch_app_configuration, @@ -14,6 +13,8 @@ def test_list_named_combinations(fs, caplog, mock_prefs_manager): + del fs + with patch( "syncall.app_utils.PrefsManager", return_value=mock_prefs_manager, @@ -31,10 +32,14 @@ def test_list_named_combinations(fs, caplog, mock_prefs_manager): def test_fetch_app_configuration(fs, caplog, mock_prefs_manager): + del fs + with patch("syncall.app_utils.PrefsManager", return_value=mock_prefs_manager): # invalid combination config = fetch_app_configuration( - side_A_name="side A", side_B_name="side B", combination="kalimera" + side_A_name="side A", + side_B_name="side B", + combination="kalimera", ) assert list(config.keys()) == ["a", "b", "c"] assert list(config.values()) == [1, 2, [1, 2, 3]] @@ -45,10 +50,12 @@ def test_fetch_app_configuration(fs, caplog, mock_prefs_manager): caplog.clear() with pytest.raises(RuntimeError): fetch_app_configuration( - side_A_name="side A", side_B_name="side B", combination="doesntexist" + side_A_name="side A", + side_B_name="side B", + combination="doesntexist", ) - captured = caplog.text - assert "No such configuration" in captured + captured = caplog.text + assert "No such configuration" not in captured def test_report_toplevel_exception(caplog): @@ -57,19 +64,21 @@ def test_report_toplevel_exception(caplog): def test_inform_about_combination_name_usage(fs, caplog): + del fs + e = "kalimera" sys.argv[0] = e c = "kalinuxta" inform_about_combination_name_usage(combination_name=c) - assert ( - e in caplog.text - and c in caplog.text - and COMBINATION_FLAGS[0] in caplog.text - and COMBINATION_FLAGS[1] in caplog.text - ) + + assert e in caplog.text + assert c in caplog.text + assert COMBINATION_FLAGS[0] in caplog.text + assert COMBINATION_FLAGS[1] in caplog.text def test_cache_or_reuse_cached_combination(fs, caplog, mock_prefs_manager): + del fs with patch("syncall.app_utils.PrefsManager", return_value=mock_prefs_manager): cache_or_reuse_cached_combination( config_args={"a": 1, "b": 2, "c": 3}, @@ -86,5 +95,6 @@ def test_cache_or_reuse_cached_combination(fs, caplog, mock_prefs_manager): custom_combination_savename=None, ) - assert "Loading cached configuration" in caplog.text and "1__2__3" in caplog.text + assert "Loading cached configuration" in caplog.text + assert "1__2__3" in caplog.text caplog.clear() diff --git a/tests/test_asana_task.py b/tests/test_asana_task.py index 73619c4..0429f86 100644 --- a/tests/test_asana_task.py +++ b/tests/test_asana_task.py @@ -1,9 +1,11 @@ -import datetime +from __future__ import annotations -from bubop import format_datetime_tz, parse_datetime +import datetime +from typing import Any, ClassVar +import pytest +from bubop import parse_datetime from syncall.asana.asana_task import AsanaTask -from syncall.types import AsanaRawTask from .generic_test_case import GenericTestCase @@ -11,7 +13,7 @@ class TestAsanaTask(GenericTestCase): """Test AsanaTask.""" - BASE_VALID_RAW_TASK = { + BASE_VALID_RAW_TASK: ClassVar[dict[str, Any]] = { "completed": False, "completed_at": None, "created_at": "2022-07-10T20:42:00Z", @@ -22,19 +24,12 @@ class TestAsanaTask(GenericTestCase): "name": "First Asana Task", } - @classmethod - def setUpClass(cls): - pass - - def setUp(self): - super(TestAsanaTask, self).setUp() - def test_from_raw(self): valid_raw_task = self.BASE_VALID_RAW_TASK.copy() asana_task = AsanaTask.from_raw_task(valid_raw_task) for key in ["completed", "gid", "name"]: - self.assertEqual(asana_task[key], valid_raw_task[key]) + assert asana_task[key] == valid_raw_task[key] def test_from_raw_task_asserts_keys(self): valid_raw_task = self.BASE_VALID_RAW_TASK.copy() @@ -45,7 +40,7 @@ def test_from_raw_task_asserts_keys(self): copy = valid_raw_task.copy() copy.pop(key, None) - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): AsanaTask.from_raw_task(copy) def test_from_raw_task_parses_date_and_datetime_fields(self): @@ -54,23 +49,21 @@ def test_from_raw_task_parses_date_and_datetime_fields(self): asana_task = AsanaTask.from_raw_task(valid_raw_task) for key in ["created_at", "modified_at"]: - self.assertIsInstance(asana_task[key], datetime.datetime) - self.assertEqual(asana_task[key], parse_datetime(valid_raw_task[key])) + assert isinstance(asana_task[key], datetime.datetime) + assert asana_task[key] == parse_datetime(valid_raw_task[key]) for key in ["completed_at", "due_at"]: - self.assertIsNone(asana_task[key]) + assert asana_task[key] is None valid_raw_task[key] = "2022-07-10T20:55:00Z" asana_task = AsanaTask.from_raw_task(valid_raw_task) - self.assertIsInstance(asana_task[key], datetime.datetime) - self.assertEqual(asana_task[key], parse_datetime(valid_raw_task[key])) + assert isinstance(asana_task[key], datetime.datetime) + assert asana_task[key] == parse_datetime(valid_raw_task[key]) valid_raw_task["due_on"] = "2022-07-10" asana_task = AsanaTask.from_raw_task(valid_raw_task) - self.assertIsInstance(asana_task.due_on, datetime.date) - self.assertEqual( - asana_task.due_on, datetime.date.fromisoformat(valid_raw_task["due_on"]) - ) + assert isinstance(asana_task.due_on, datetime.date) + assert asana_task.due_on == datetime.date.fromisoformat(valid_raw_task["due_on"]) def test_to_raw_task(self): valid_raw_task = self.BASE_VALID_RAW_TASK.copy() @@ -78,15 +71,15 @@ def test_to_raw_task(self): raw_task = asana_task.to_raw_task() for key in ["completed", "gid", "name"]: - self.assertEqual(raw_task[key], asana_task[key]) + assert raw_task[key] == asana_task[key] for key in ["created_at", "modified_at"]: - self.assertEqual(raw_task[key], asana_task[key].isoformat(timespec="milliseconds")) + assert raw_task[key] == asana_task[key].isoformat(timespec="milliseconds") for key in ["completed_at", "due_at", "due_on"]: kwargs = {} - self.assertIsNone(raw_task[key]) + assert raw_task[key] is None if key == "due_on": valid_raw_task[key] = "2022-07-10" @@ -97,6 +90,6 @@ def test_to_raw_task(self): asana_task = AsanaTask.from_raw_task(valid_raw_task) raw_task = asana_task.to_raw_task() - self.assertIsNotNone(raw_task[key]) + assert raw_task[key] is not None - self.assertEqual(raw_task[key], asana_task[key].isoformat(**kwargs)) + assert raw_task[key] == asana_task[key].isoformat(**kwargs) diff --git a/tests/test_filesystem_file.py b/tests/test_filesystem_file.py index fe969c2..dc8b33b 100644 --- a/tests/test_filesystem_file.py +++ b/tests/test_filesystem_file.py @@ -2,19 +2,16 @@ import pytest import xattr - from syncall.filesystem.filesystem_file import FilesystemFile -from .conftest_helpers import fixture_false, fixture_true - # helper fixtures ----------------------------------------------------------------------------- -@pytest.fixture +@pytest.fixture() def flush_on_instantiation(request): return request.getfixturevalue(request.param) -@pytest.fixture +@pytest.fixture() def fs_file_path(request): return request.getfixturevalue(request.param) @@ -23,7 +20,10 @@ def fs_file_path(request): @pytest.mark.parametrize( - "fs_file_path,flush_on_instantiation", + ( + "fs_file_path", + "flush_on_instantiation", + ), [ ("python_path_with_content", "fixture_true"), ("python_path_with_content", "fixture_false"), @@ -31,8 +31,7 @@ def fs_file_path(request): indirect=True, ) def test_fs_file_flush_attrs(fs_file_path: Path, flush_on_instantiation: bool): - """ - Make sure that extended attributes of the FilesystemFile is only written when + """Make sure that extended attributes of the FilesystemFile is only written when we actually .flush() it. """ p = fs_file_path @@ -54,8 +53,7 @@ def test_fs_file_flush_attrs(fs_file_path: Path, flush_on_instantiation: bool): def test_fs_file_flush_change_title_content(python_path_with_content: Path): - """ - Make sure that title and content of the FilesystemFile is written when we actually .flush() + """Make sure that title and content of the FilesystemFile is written when we actually .flush() it. """ p = python_path_with_content @@ -65,7 +63,7 @@ def test_fs_file_flush_change_title_content(python_path_with_content: Path): fs_file = FilesystemFile(path=p) assert fs_file.contents == path_contents assert fs_file.title == path_title - assert fs_file.id != None + assert fs_file.id is not None # change contents and title new_contents = "New contents\nwith a bunch of lines\nšŸ„³šŸ„³šŸ„³" @@ -87,7 +85,7 @@ def test_fs_file_flush_change_title_content(python_path_with_content: Path): def test_fs_file_dict_fns(non_existent_python_path: Path): fs_file = FilesystemFile(path=non_existent_python_path, flush_on_instantiation=False) - assert set(("last_modified_date", "contents", "title", "id")).issubset( + assert {"last_modified_date", "contents", "title", "id"}.issubset( key for key in fs_file.keys() ) diff --git a/tests/test_filesystem_gkeep.py b/tests/test_filesystem_gkeep.py index 9e11312..83e436c 100644 --- a/tests/test_filesystem_gkeep.py +++ b/tests/test_filesystem_gkeep.py @@ -10,7 +10,8 @@ def test_convert_fs_file_to_gkeep_note_from_empty( - fs_file_empty: FilesystemFile, fs_file_default_name: str + fs_file_empty: FilesystemFile, + fs_file_default_name: str, ): os.chdir(fs_file_empty.root) out = convert_filesystem_file_to_gkeep_note(fs_file_empty) @@ -20,7 +21,8 @@ def test_convert_fs_file_to_gkeep_note_from_empty( def test_convert_fs_file_to_gkeep_note_with_existing_content( - fs_file_with_content: FilesystemFile, fs_file_default_name: str + fs_file_with_content: FilesystemFile, + fs_file_default_name: str, ): os.chdir(fs_file_with_content.root) out = convert_filesystem_file_to_gkeep_note(fs_file_with_content) @@ -30,7 +32,8 @@ def test_convert_fs_file_to_gkeep_note_with_existing_content( def test_convert_gkeep_note_to_fs_file_from_empty( - gkeep_note_empty_instance: GKeepNote, tmpdir + gkeep_note_empty_instance: GKeepNote, + tmpdir, ): os.chdir(tmpdir) diff --git a/tests/test_filesystem_side.py b/tests/test_filesystem_side.py index 4810493..b373d16 100644 --- a/tests/test_filesystem_side.py +++ b/tests/test_filesystem_side.py @@ -2,7 +2,6 @@ from typing import Sequence import pytest - from syncall.filesystem.filesystem_file import FilesystemFile from syncall.filesystem.filesystem_side import FilesystemSide @@ -32,10 +31,9 @@ def test_create_new_item(fs_side: FilesystemSide): fs_side.add_item(new_item) # get the newly created item - make sure that its the same item as returned by - # get_all_items() all_items_after_addition = fs_side.get_all_items() assert len(all_items_after_addition) == prev_len + 1 - fs_file = [item for item in all_items_after_addition if item.id == new_id][0] + fs_file = next(item for item in all_items_after_addition if item.id == new_id) fs_file2 = fs_side.get_item(item_id=new_id) assert fs_file == fs_file2 diff --git a/tests/test_gcal.py b/tests/test_gcal.py index 806c478..f4c2c0d 100644 --- a/tests/test_gcal.py +++ b/tests/test_gcal.py @@ -1,28 +1,22 @@ import datetime -from typing import Any +import syncall.google.gcal_side as side from bubop import is_same_datetime from dateutil.tz import gettz, tzutc - -import syncall.google.gcal_side as side +from syncall.types import GoogleDateT localzone = gettz("Europe/Athens") # Monkeypatch the function to always return Eruope/Athens for UT determinism def assume_local_tz_if_none_(dt: datetime.datetime): - if dt.tzinfo is None: - out = dt.replace(tzinfo=localzone) - else: - out = dt - - return out + return dt if dt.tzinfo is not None else dt.replace(tzinfo=localzone) side.assume_local_tz_if_none = assume_local_tz_if_none_ -def assert_dt(dt_given: Any, dt_expected: datetime.datetime): +def assert_dt(dt_given: GoogleDateT, dt_expected: datetime.datetime): parse_datetime = side.GCalSide.parse_datetime dt_dt_given = parse_datetime(dt_given) diff --git a/tests/test_notion_todo_block.py b/tests/test_notion_todo_block.py index e58c666..22f50f9 100644 --- a/tests/test_notion_todo_block.py +++ b/tests/test_notion_todo_block.py @@ -1,7 +1,6 @@ import datetime from dateutil.tz import tzutc - from syncall.concrete_item import ItemKey, KeyType from syncall.notion.notion_todo_block import NotionTodoBlock from syncall.types import NotionTodoBlockItem @@ -16,7 +15,8 @@ def test_notion_todo_block_compare0(notion_simple_todo: NotionTodoBlockItem): def test_notion_todo_block_compare1( - notion_simple_todo: NotionTodoBlockItem, notion_chained_todo: NotionTodoBlockItem + notion_simple_todo: NotionTodoBlockItem, + notion_chained_todo: NotionTodoBlockItem, ): n0 = NotionTodoBlock.from_raw_item(notion_simple_todo) n1 = NotionTodoBlock.from_raw_item(notion_chained_todo) @@ -24,7 +24,8 @@ def test_notion_todo_block_compare1( def test_notion_todo_block_compare2( - notion_simple_todo: NotionTodoBlockItem, notion_simple_checked_todo: NotionTodoBlockItem + notion_simple_todo: NotionTodoBlockItem, + notion_simple_checked_todo: NotionTodoBlockItem, ): n0 = NotionTodoBlock.from_raw_item(notion_simple_todo) n1 = NotionTodoBlock.from_raw_item(notion_simple_checked_todo) @@ -33,7 +34,8 @@ def test_notion_todo_block_compare2( def test_notion_todo_block_compare3( - notion_simple_todo: NotionTodoBlockItem, notion_simple_archived_todo: NotionTodoBlockItem + notion_simple_todo: NotionTodoBlockItem, + notion_simple_archived_todo: NotionTodoBlockItem, ): n0 = NotionTodoBlock.from_raw_item(notion_simple_todo) n1 = NotionTodoBlock.from_raw_item(notion_simple_archived_todo) @@ -54,8 +56,8 @@ def test_notion_todo_block_compare4( def test_notion_todo_block0(notion_simple_todo: NotionTodoBlockItem): todo_block = NotionTodoBlock.from_raw_item(notion_simple_todo) assert todo_block.plaintext == "Lacinato kale" - assert todo_block.is_checked == False - assert todo_block.is_archived == False + assert todo_block.is_checked is False + assert todo_block.is_archived is False assert todo_block.last_modified_date == simple_last_modified_date assert todo_block.id == "7de89eb6-4ee1-472c-abcd-8231049e9d8d" @@ -63,8 +65,8 @@ def test_notion_todo_block0(notion_simple_todo: NotionTodoBlockItem): def test_notion_todo_block1(notion_chained_todo: NotionTodoBlockItem): todo_block = NotionTodoBlock.from_raw_item(notion_chained_todo) assert todo_block.plaintext == "Bringing it back with style and glamour" - assert todo_block.is_checked == False - assert todo_block.is_archived == False + assert todo_block.is_checked is False + assert todo_block.is_archived is False assert todo_block.last_modified_date == chained_last_modified_date assert todo_block.id == "9146e728-d7c4-4678-bab4-377a3991ebb8" @@ -72,8 +74,8 @@ def test_notion_todo_block1(notion_chained_todo: NotionTodoBlockItem): def test_notion_todo_block2(notion_simple_checked_todo: NotionTodoBlockItem): todo_block = NotionTodoBlock.from_raw_item(notion_simple_checked_todo) assert todo_block.plaintext == "Lacinato kale" - assert todo_block.is_checked == True - assert todo_block.is_archived == False + assert todo_block.is_checked is True + assert todo_block.is_archived is False assert todo_block.last_modified_date == simple_last_modified_date assert todo_block.id == "7de89eb6-4ee1-472c-abcd-8231049e9d8d" @@ -81,7 +83,7 @@ def test_notion_todo_block2(notion_simple_checked_todo: NotionTodoBlockItem): def test_notion_todo_block3(notion_simple_archived_todo: NotionTodoBlockItem): todo_block = NotionTodoBlock.from_raw_item(notion_simple_archived_todo) assert todo_block.plaintext == "Lacinato kale" - assert todo_block.is_checked == False - assert todo_block.is_archived == True + assert todo_block.is_checked is False + assert todo_block.is_archived is True assert todo_block.last_modified_date == simple_last_modified_date assert todo_block.id == "7de89eb6-4ee1-472c-abcd-8231049e9d8d" diff --git a/tests/test_tw_asana_conversions.py b/tests/test_tw_asana_conversions.py index 712e896..5d726aa 100644 --- a/tests/test_tw_asana_conversions.py +++ b/tests/test_tw_asana_conversions.py @@ -1,7 +1,4 @@ -from pathlib import Path - import yaml - from syncall.asana.asana_task import AsanaTask from syncall.tw_asana_utils import convert_asana_to_tw, convert_tw_to_asana @@ -11,21 +8,14 @@ class TestTwAsanaConversions(GenericTestCase): """Test item conversions - TW <-> Asana.""" - @classmethod - def setUpClass(cls): - pass - - def setUp(self): - super(TestTwAsanaConversions, self).setUp() - def get_keys_to_match(self): return set(self.tw_item.keys()).intersection( - ("description", "due", "modified", "status") + ("description", "due", "modified", "status"), ) def load_sample_items(self): - with open(Path(GenericTestCase.DATA_FILES_PATH, "sample_items.yaml"), "r") as fname: - conts = yaml.load(fname, Loader=yaml.Loader) + with (GenericTestCase.DATA_FILES_PATH / "sample_items.yaml").open() as fname: + conts = yaml.load(fname, Loader=yaml.Loader) # noqa: S506 self.asana_task = conts["asana_task"] self.tw_item_expected = conts["tw_item_expected"] @@ -40,14 +30,14 @@ def test_tw_asana_basic_convert(self): self.load_sample_items() asana_task_out = convert_tw_to_asana(self.tw_item) for key in AsanaTask._key_names: - self.assertEqual(asana_task_out[key], self.asana_task_expected[key], key) + assert asana_task_out[key] == self.asana_task_expected[key], key def test_asana_tw_basic_convert(self): """Basic Asana -> TW conversion.""" self.load_sample_items() tw_item_out = convert_asana_to_tw(self.asana_task) for key in self.get_keys_to_match(): - self.assertEqual(tw_item_out[key], self.tw_item_expected[key]) + assert tw_item_out[key] == self.tw_item_expected[key] def test_tw_asana_n_back(self): """TW -> Asana -> TW conversion""" @@ -56,14 +46,14 @@ def test_tw_asana_n_back(self): for key in self.get_keys_to_match(): if key in self.tw_item: - self.assertIn(key, tw_item_out) - self.assertEqual(self.tw_item[key], tw_item_out[key]) + assert key in tw_item_out + assert self.tw_item[key] == tw_item_out[key] if key in tw_item_out: - self.assertIn(key, self.tw_item) - self.assertEqual(tw_item_out[key], self.tw_item[key]) + assert key in self.tw_item + assert tw_item_out[key] == self.tw_item[key] def test_asana_tw_n_back_basic(self): - """Asana -> TW -> Asana conversion""" + """Test Asana -> TW -> Asana conversion.""" self.load_sample_items() asana_task_out = convert_tw_to_asana(convert_asana_to_tw(self.asana_task)) @@ -76,21 +66,22 @@ def test_asana_tw_n_back_basic(self): "name", ]: if key in self.asana_task: - self.assertIn(key, asana_task_out) - self.assertEqual(self.asana_task[key], asana_task_out[key]) + assert key in asana_task_out + assert self.asana_task[key] == asana_task_out[key] if key in asana_task_out: - self.assertIn(key, self.asana_task) - self.assertEqual(asana_task_out[key], self.asana_task[key]) + assert key in self.asana_task + assert asana_task_out[key] == self.asana_task[key] def test_tw_asana_sets_both_due_dates(self): + """Test that due dates are set in both TW and Asana.""" self.load_sample_items() - self.assertIn("due", self.tw_item_w_due) - self.assertIsNotNone("due", self.tw_item_w_due) + assert "due" in self.tw_item_w_due + assert self.tw_item_w_due is not None asana_task = convert_tw_to_asana(self.tw_item_w_due) - self.assertIn("due_at", asana_task) - self.assertEqual(asana_task["due_at"], self.tw_item_w_due["due"]) - self.assertIn("due_on", asana_task) - self.assertEqual(asana_task["due_on"], asana_task["due_at"].date()) + assert "due_at" in asana_task + assert asana_task["due_at"] == self.tw_item_w_due["due"] + assert "due_on" in asana_task + assert asana_task["due_on"] == asana_task["due_at"].date() diff --git a/tests/test_tw_caldav_conversions.py b/tests/test_tw_caldav_conversions.py old mode 100755 new mode 100644 index dfcf731..b38c2a5 --- a/tests/test_tw_caldav_conversions.py +++ b/tests/test_tw_caldav_conversions.py @@ -1,8 +1,8 @@ -from pathlib import Path -from typing import Dict +from __future__ import annotations -import yaml +from typing import Any +import yaml from syncall.tw_caldav_utils import convert_caldav_to_tw, convert_tw_to_caldav from .generic_test_case import GenericTestCase @@ -11,21 +11,14 @@ class TestConversions(GenericTestCase): """Test item conversions - TW <-> Caldav Calendar.""" - @classmethod - def setUpClass(cls): - pass - - def setUp(self): - super(TestConversions, self).setUp() - def load_sample_items(self): - with open(Path(GenericTestCase.DATA_FILES_PATH, "sample_items.yaml"), "r") as fname: - conts = yaml.load(fname, Loader=yaml.Loader) + with (GenericTestCase.DATA_FILES_PATH / "sample_items.yaml").open() as fname: + conts = yaml.load(fname, Loader=yaml.Loader) # noqa: S506 self.caldav_item = conts["caldav_item"] self.tw_item_expected = conts["tw_item_expected"] - self.tw_item: Dict[str, Any] = conts["tw_item"] + self.tw_item: dict[str, Any] = conts["tw_item"] self.tw_item_w_due = conts["tw_item_w_due"] self.caldav_item_expected = conts["caldav_item_expected"] self.caldav_item_w_date_expected = conts["caldav_item_w_date_expected"] @@ -46,32 +39,31 @@ def load_sample_items(self): def test_tw_caldav_basic_convert(self): """Basic TW -> Caldav conversion.""" self.load_sample_items() - tw_item = self.tw_item caldav_item_out = convert_tw_to_caldav(self.tw_item) caldav_item_out.pop("created", "") - self.assertDictEqual(caldav_item_out, self.caldav_item_expected) + assert caldav_item_out == self.caldav_item_expected def test_tw_caldav_w_due_convert(self): """Basic TW (with 'due' subfield) -> Caldav conversion.""" self.load_sample_items() caldav_item_out = convert_tw_to_caldav(self.tw_item_w_due) caldav_item_out.pop("created", "") - self.assertDictEqual(caldav_item_out, self.caldav_item_w_date_expected) + assert caldav_item_out == self.caldav_item_w_date_expected def test_caldav_tw_basic_convert(self): """Basic Caldav -> TW conversion.""" self.load_sample_items() tw_item_out = convert_caldav_to_tw(self.caldav_item) - self.assertDictEqual(tw_item_out, self.tw_item_expected) + assert tw_item_out == self.tw_item_expected def test_caldav_tw_date_convert(self): """Caldav (with 'date' subfield) -> TW conversion.""" self.load_sample_items() tw_item_out = convert_caldav_to_tw(self.caldav_item_w_date) - self.assertDictEqual(tw_item_out, self.tw_item_w_date_expected) + assert tw_item_out == self.tw_item_w_date_expected def test_tw_caldav_n_back(self): - """TW -> Caldav -> TW conversion""" + """TW -> Caldav -> TW conversion.""" self.load_sample_items() # UGLY - Rewrite how we do testing for caldav<>tw and gcal<>tw @@ -80,16 +72,12 @@ def test_tw_caldav_n_back(self): intermediate_caldav.pop("created", "") tw_item_out = convert_caldav_to_tw(intermediate_caldav) - self.assertSetEqual( - set(self.tw_item) ^ set(tw_item_out), - set({"id", "urgency", "entry", "entry"}), - ) + assert set(self.tw_item) ^ set(tw_item_out) == set({"id", "urgency", "entry"}) intersection = set(self.tw_item) & set(tw_item_out) - self.assertDictEqual( - {i: self.tw_item[i] for i in intersection}, - {i: tw_item_out[i] for i in intersection}, - ) + assert {i: self.tw_item[i] for i in intersection} == { + i: tw_item_out[i] for i in intersection + } def test_caldav_tw_n_back(self): """Caldav -> TW -> Caldav conversion.""" @@ -98,13 +86,4 @@ def test_caldav_tw_n_back(self): # UGLY - Rewrite how we do testing for caldav<>tw and gcal<>tw caldav_item_out["priority"] = "" - - self.assertSetEqual( - set(self.caldav_item) ^ set(caldav_item_out), - set( - { - "id", - } - ), - ) - # can't really check the description field.. + assert set(self.caldav_item) ^ set(caldav_item_out) == {"id"} diff --git a/tests/test_tw_gcal.py b/tests/test_tw_gcal.py index 24e65cf..549ac3c 100644 --- a/tests/test_tw_gcal.py +++ b/tests/test_tw_gcal.py @@ -3,7 +3,6 @@ import pytest from dateutil.tz import tzutc - from syncall.tw_gcal_utils import convert_gcal_to_tw, convert_tw_to_gcal from syncall.types import GCalItem, TwItem diff --git a/tests/test_tw_gkeep.py b/tests/test_tw_gkeep.py index ab1fc5b..3c41578 100644 --- a/tests/test_tw_gkeep.py +++ b/tests/test_tw_gkeep.py @@ -1,6 +1,5 @@ import pytest from bubop.time import format_datetime_tz - from syncall.google.gkeep_todo_item import GKeepTodoItem from syncall.tw_gkeep_utils import convert_gkeep_todo_to_tw, convert_tw_to_gkeep_todo from syncall.types import TwItem diff --git a/tests/test_tw_gtasks_conversions.py b/tests/test_tw_gtasks_conversions.py index a7e6cb0..efd8442 100644 --- a/tests/test_tw_gtasks_conversions.py +++ b/tests/test_tw_gtasks_conversions.py @@ -1,7 +1,6 @@ import pytest - from syncall.tw_gtasks_utils import convert_gtask_to_tw, convert_tw_to_gtask -from syncall.types import GTasksItem, TwItem, TwRawItem +from syncall.types import GTasksItem, TwItem # test conversions ---------------------------------------------------------------------------- diff --git a/tests/test_tw_notion.py b/tests/test_tw_notion.py index 5b1eda3..13187da 100644 --- a/tests/test_tw_notion.py +++ b/tests/test_tw_notion.py @@ -1,11 +1,14 @@ -from typing import List +from __future__ import annotations -import pytest +from typing import TYPE_CHECKING +import pytest from syncall.notion.notion_side import NotionSide from syncall.notion.notion_todo_block import NotionTodoBlock from syncall.tw_notion_utils import convert_notion_to_tw, convert_tw_to_notion -from syncall.types import NotionPageContents, NotionTodoBlockItem, TwItem + +if TYPE_CHECKING: + from syncall.types import NotionPageContents, NotionTodoBlockItem, TwItem # test conversions ---------------------------------------------------------------------------- @@ -42,9 +45,9 @@ def test_convert_tw_to_notion(tw_task: TwItem): def test_find_todos_in_page(page_contents: NotionPageContents): todos = NotionSide.find_todos(page_contents) assert len(todos) == 6 - is_checked: List[bool] = [True, False, False, False, False, False] - is_archived: List[bool] = [False for _ in range(6)] - plaintext: List[str] = [ + is_checked: list[bool] = [True, False, False, False, False, False] + is_archived: list[bool] = [False for _ in range(6)] + plaintext: list[str] = [ "Lacinato kale", "Bringing it back with style and glamour", "Highlight any text, and use the menu that pops up to style your writing however you" diff --git a/tests/test_util_methods.py b/tests/test_util_methods.py old mode 100755 new mode 100644 index 08cd083..52a58df --- a/tests/test_util_methods.py +++ b/tests/test_util_methods.py @@ -9,12 +9,8 @@ class TestTW(GenericTestCase): """Test TaskWarriorSide methods.""" - @classmethod - def setUpClass(cls): - pass - def setUp(self): - super(TestTW, self).setUp() + super().setUp() # Make sure we're in the test directory for these tests os.chdir(str(Path(__file__).parent)) @@ -24,11 +20,8 @@ def test_get_items(self): items = self.tw_side.get_all_items() # assert on the status - self.assertTrue( - all((i["status"] == "completed" or i["status"] == "pending") for i in items) - ) + assert all((i["status"] == "completed" or i["status"] == "pending") for i in items) # must be sorted by ID by default ids = [i["id"] for i in items] # type: ignore - self.assertListEqual(ids, sorted(ids)) - del items, ids + assert ids == sorted(ids)