diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..18a8f1a --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Berlin Institute of Health, Manuel Holtgrewe + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..56b1c7b --- /dev/null +++ b/Makefile @@ -0,0 +1,54 @@ +.PHONY: help +help: + @echo "Usage: make [target]" + @echo "Targets:" + @echo " format Format the code" + @echo " lint Lint the code" + @echo " build Build the package" + @echo " install-e Install the package in editable mode" + @echo " help Show this help message" + @echo "" + @echo " format-isort Format the code with isort" + @echo " format-black Format the code with black" + @echo " lint-isort Lint the code with isort" + @echo " lint-black Lint the code with black" + @echo " lint-pyright Lint the code with pyright" + @echo " lint-flake8 the code with flake8" + +.PHONY: format +format: format-isort format-black + +.PHONY: format-isort +format-isort: + isort --force-sort-within-sections --profile=black . + +.PHONY: format-black +format-black: + black -l 100 . + +.PHONY: lint +lint: lint-isort lint-black lint-pyright lint-flake8 + +.PHONY: lint-isort +lint-isort: + isort --force-sort-within-sections --profile=black --check . + +.PHONY: lint-black +lint-black: + black -l 100 --check . + +.PHONY: lint-pyright +lint-pyright: + pyright + +.PHONY: lint-flake8 +lint-flake8: + flake8 . + +.PHONY: build +build: + python -m pip install --upgrade build + +.PHONY: install-e +install-e: + python -m pip install --upgrade -e . diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..9707634 --- /dev/null +++ b/Pipfile @@ -0,0 +1,24 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +ldap3 = "~=2.9" +pydantic = "~=2.6" +typer = "*" +httpx = "*" +mechanize = "*" +rich = "*" +pydantic-settings = "*" +xattr = "*" +pyhumps = "*" + +[dev-packages] +pyright = "*" +black = "*" +hatchling = "*" +ipdb = "*" + +[requires] +python_version = "3.10" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..873158b --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,709 @@ +{ + "_meta": { + "hash": { + "sha256": "f1303f68553958f50fdb2d8d728c5ba95ac9cf7bf1a5cc296d8892a0e4a8212b" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.10" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "annotated-types": { + "hashes": [ + "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43", + "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d" + ], + "markers": "python_version >= '3.8'", + "version": "==0.6.0" + }, + "anyio": { + "hashes": [ + "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", + "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6" + ], + "markers": "python_version >= '3.8'", + "version": "==4.3.0" + }, + "asttokens": { + "hashes": [ + "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", + "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0" + ], + "version": "==2.4.1" + }, + "certifi": { + "hashes": [ + "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f", + "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.2.2" + }, + "cffi": { + "hashes": [ + "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc", + "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a", + "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417", + "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab", + "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520", + "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36", + "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743", + "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8", + "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed", + "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684", + "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56", + "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324", + "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d", + "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235", + "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e", + "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088", + "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000", + "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7", + "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e", + "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673", + "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c", + "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe", + "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2", + "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098", + "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8", + "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a", + "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0", + "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b", + "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896", + "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e", + "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9", + "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2", + "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b", + "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6", + "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404", + "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f", + "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0", + "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4", + "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc", + "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936", + "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba", + "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872", + "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb", + "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614", + "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1", + "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d", + "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969", + "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b", + "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4", + "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627", + "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956", + "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357" + ], + "markers": "python_version >= '3.8'", + "version": "==1.16.0" + }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "decorator": { + "hashes": [ + "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", + "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186" + ], + "markers": "python_version < '3.11' and python_version >= '3.7'", + "version": "==5.1.1" + }, + "exceptiongroup": { + "hashes": [ + "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14", + "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68" + ], + "markers": "python_version < '3.11'", + "version": "==1.2.0" + }, + "executing": { + "hashes": [ + "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147", + "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc" + ], + "markers": "python_version >= '3.5'", + "version": "==2.0.1" + }, + "h11": { + "hashes": [ + "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", + "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" + ], + "markers": "python_version >= '3.7'", + "version": "==0.14.0" + }, + "html5lib": { + "hashes": [ + "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", + "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.1" + }, + "httpcore": { + "hashes": [ + "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61", + "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5" + ], + "markers": "python_version >= '3.8'", + "version": "==1.0.5" + }, + "httpx": { + "hashes": [ + "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5", + "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5" + ], + "index": "pypi", + "version": "==0.27.0" + }, + "idna": { + "hashes": [ + "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc", + "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0" + ], + "markers": "python_version >= '3.5'", + "version": "==3.7" + }, + "ipdb": { + "hashes": [ + "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4", + "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726" + ], + "index": "pypi", + "version": "==0.13.13" + }, + "ipython": { + "hashes": [ + "sha256:07232af52a5ba146dc3372c7bf52a0f890a23edf38d77caef8d53f9cdc2584c1", + "sha256:7468edaf4f6de3e1b912e57f66c241e6fd3c7099f2ec2136e239e142e800274d" + ], + "markers": "python_version < '3.11' and python_version >= '3.7'", + "version": "==8.23.0" + }, + "jedi": { + "hashes": [ + "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd", + "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0" + ], + "markers": "python_version >= '3.6'", + "version": "==0.19.1" + }, + "ldap3": { + "hashes": [ + "sha256:2bc966556fc4d4fa9f445a1c31dc484ee81d44a51ab0e2d0fd05b62cac75daa6", + "sha256:5630d1383e09ba94839e253e013f1aa1a2cf7a547628ba1265cb7b9a844b5687", + "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70", + "sha256:5ab7febc00689181375de40c396dcad4f2659cd260fc5e94c508b6d77c17e9d5", + "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f" + ], + "index": "pypi", + "version": "==2.9.1" + }, + "markdown-it-py": { + "hashes": [ + "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", + "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb" + ], + "markers": "python_version >= '3.8'", + "version": "==3.0.0" + }, + "matplotlib-inline": { + "hashes": [ + "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", + "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca" + ], + "markers": "python_version >= '3.8'", + "version": "==0.1.7" + }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, + "mechanize": { + "hashes": [ + "sha256:69a5edb0962f921e8b10837368c2242d8ad049f0b91ff699ce7f601bfc431521", + "sha256:eafc3350fbcd895f4715712f0bdc7e544a1b2883e85f1987423b86bd9dd93ff7" + ], + "index": "pypi", + "version": "==0.4.9" + }, + "parso": { + "hashes": [ + "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", + "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d" + ], + "markers": "python_version >= '3.6'", + "version": "==0.8.4" + }, + "pexpect": { + "hashes": [ + "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", + "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f" + ], + "markers": "sys_platform != 'win32' and sys_platform != 'emscripten'", + "version": "==4.9.0" + }, + "prompt-toolkit": { + "hashes": [ + "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d", + "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.0.43" + }, + "ptyprocess": { + "hashes": [ + "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", + "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" + ], + "version": "==0.7.0" + }, + "pure-eval": { + "hashes": [ + "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350", + "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3" + ], + "version": "==0.2.2" + }, + "pyasn1": { + "hashes": [ + "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c", + "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473" + ], + "markers": "python_version >= '3.8'", + "version": "==0.6.0" + }, + "pycparser": { + "hashes": [ + "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", + "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc" + ], + "markers": "python_version >= '3.8'", + "version": "==2.22" + }, + "pydantic": { + "hashes": [ + "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352", + "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383" + ], + "index": "pypi", + "version": "==2.7.0" + }, + "pydantic-core": { + "hashes": [ + "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6", + "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb", + "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0", + "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6", + "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47", + "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a", + "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a", + "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac", + "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88", + "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db", + "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d", + "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d", + "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9", + "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e", + "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b", + "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d", + "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649", + "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c", + "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1", + "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09", + "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0", + "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90", + "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d", + "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294", + "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144", + "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b", + "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1", + "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b", + "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2", + "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad", + "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622", + "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17", + "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06", + "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc", + "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50", + "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d", + "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59", + "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539", + "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a", + "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b", + "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5", + "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9", + "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278", + "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6", + "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44", + "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0", + "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb", + "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80", + "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5", + "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570", + "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b", + "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de", + "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6", + "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8", + "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203", + "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7", + "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048", + "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae", + "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89", + "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f", + "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926", + "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2", + "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76", + "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d", + "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411", + "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9", + "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2", + "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586", + "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35", + "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c", + "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143", + "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6", + "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60", + "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b", + "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226", + "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519", + "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31", + "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7", + "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b" + ], + "markers": "python_version >= '3.8'", + "version": "==2.18.1" + }, + "pydantic-settings": { + "hashes": [ + "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed", + "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091" + ], + "index": "pypi", + "version": "==2.2.1" + }, + "pygments": { + "hashes": [ + "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c", + "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367" + ], + "markers": "python_version >= '3.7'", + "version": "==2.17.2" + }, + "pyhumps": { + "hashes": [ + "sha256:060e1954d9069f428232a1adda165db0b9d8dfdce1d265d36df7fbff540acfd6", + "sha256:498026258f7ee1a8e447c2e28526c0bea9407f9a59c03260aee4bd6c04d681a3" + ], + "index": "pypi", + "version": "==3.8.0" + }, + "python-dotenv": { + "hashes": [ + "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", + "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a" + ], + "markers": "python_version >= '3.8'", + "version": "==1.0.1" + }, + "rich": { + "hashes": [ + "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222", + "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432" + ], + "index": "pypi", + "version": "==13.7.1" + }, + "shellingham": { + "hashes": [ + "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", + "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de" + ], + "markers": "python_version >= '3.7'", + "version": "==1.5.4" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "sniffio": { + "hashes": [ + "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", + "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.1" + }, + "stack-data": { + "hashes": [ + "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", + "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695" + ], + "version": "==0.6.3" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11' and python_version >= '3.7'", + "version": "==2.0.1" + }, + "traitlets": { + "hashes": [ + "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9", + "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80" + ], + "markers": "python_version >= '3.8'", + "version": "==5.14.2" + }, + "typer": { + "hashes": [ + "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914", + "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482" + ], + "index": "pypi", + "version": "==0.12.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", + "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a" + ], + "markers": "python_version >= '3.8'", + "version": "==4.11.0" + }, + "wcwidth": { + "hashes": [ + "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", + "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" + ], + "version": "==0.2.13" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" + ], + "version": "==0.5.1" + }, + "xattr": { + "hashes": [ + "sha256:00d2b415cf9d6a24112d019e721aa2a85652f7bbc9f3b9574b2d1cd8668eb491", + "sha256:0683dae7609f7280b0c89774d00b5957e6ffcb181c6019c46632b389706b77e6", + "sha256:08f61cbed52dc6f7c181455826a9ff1e375ad86f67dd9d5eb7663574abb32451", + "sha256:0a9c431b0e66516a078125e9a273251d4b8e5ba84fe644b619f2725050d688a0", + "sha256:0f06e0c1e4d06b4e0e49aaa1184b6f0e81c3758c2e8365597918054890763b53", + "sha256:1a5921ea3313cc1c57f2f53b63ea8ca9a91e48f4cc7ebec057d2447ec82c7efe", + "sha256:23705c7079b05761ff2fa778ad17396e7599c8759401abc05b312dfb3bc99f69", + "sha256:24d97f0d28f63695e3344ffdabca9fcc30c33e5c8ccc198c7524361a98d526f2", + "sha256:27272afeba8422f2a9d27e1080a9a7b807394e88cce73db9ed8d2dde3afcfb87", + "sha256:46a641ac038a9f53d2f696716147ca4dbd6a01998dc9cd4bc628801bc0df7f4d", + "sha256:47a3bdfe034b4fdb70e5941d97037405e3904accc28e10dbef6d1c9061fb6fd7", + "sha256:4cb70c16e7c3ae6ba0ab6c6835c8448c61d8caf43ea63b813af1f4dbe83dd156", + "sha256:54cb15cd94e5ef8a0ef02309f1bf973ba0e13c11e87686e983f371948cfee6af", + "sha256:6461a43b585e5f2e049b39bcbfcb6391bfef3c5118231f1b15d10bdb89ef17fe", + "sha256:6480589c1dac7785d1f851347a32c4a97305937bf7b488b857fe8b28a25de9e9", + "sha256:687e7d18611ef8d84a6ecd8f4d1ab6757500c1302f4c2046ce0aa3585e13da3f", + "sha256:6881b120f9a4b36ccd8a28d933bc0f6e1de67218b6ce6e66874e0280fc006844", + "sha256:6ad47d89968c9097900607457a0c89160b4771601d813e769f68263755516065", + "sha256:78b377832dd0ee408f9f121a354082c6346960f7b6b1480483ed0618b1912120", + "sha256:793c01deaadac50926c0e1481702133260c7cb5e62116762f6fe1543d07b826f", + "sha256:7a92aff66c43fa3e44cbeab7cbeee66266c91178a0f595e044bf3ce51485743b", + "sha256:7e4ca0956fd11679bb2e0c0d6b9cdc0f25470cc00d8da173bb7656cc9a9cf104", + "sha256:83652910ef6a368b77b00825ad67815e5c92bfab551a848ca66e9981d14a7519", + "sha256:9013f290387f1ac90bccbb1926555ca9aef75651271098d99217284d9e010f7c", + "sha256:918e1f83f2e8a072da2671eac710871ee5af337e9bf8554b5ce7f20cdb113186", + "sha256:96ca300c0acca4f0cddd2332bb860ef58e1465d376364f0e72a1823fdd58e90d", + "sha256:9b1664edf003153ac8d1911e83a0fc60db1b1b374ee8ac943f215f93754a1102", + "sha256:9c5a78c7558989492c4cb7242e490ffb03482437bf782967dfff114e44242343", + "sha256:9d4f71b673339aeaae1f6ea9ef8ea6c9643c8cd0df5003b9a0eaa75403e2e06c", + "sha256:9dcd5dfbcee73c7be057676ecb900cabb46c691aff4397bf48c579ffb30bb963", + "sha256:a20de1c47b5cd7b47da61799a3b34e11e5815d716299351f82a88627a43f9a96", + "sha256:afacebbc1fa519f41728f8746a92da891c7755e6745164bd0d5739face318e86", + "sha256:b0d73150f2f9655b4da01c2369eb33a294b7f9d56eccb089819eafdbeb99f896", + "sha256:b489b7916f239100956ea0b39c504f3c3a00258ba65677e4c8ba1bd0b5513446", + "sha256:b6ceb9efe0657a982ccb8b8a2efe96b690891779584c901d2f920784e5d20ae3", + "sha256:b735ac2625a4fc2c9343b19f806793db6494336338537d2911c8ee4c390dda46", + "sha256:caab2c2986c30f92301f12e9c50415d324412e8e6a739a52a603c3e6a54b3610", + "sha256:ccab735d0632fe71f7d72e72adf886f45c18b7787430467ce0070207882cfe25", + "sha256:cd11e917f5b89f2a0ad639d9875943806c6c9309a3dd02da5a3e8ef92db7bed9", + "sha256:cebcf8a303a44fbc439b68321408af7267507c0d8643229dbb107f6c132d389c", + "sha256:d1059b2f726e2702c8bbf9bbf369acfc042202a4cc576c2dec6791234ad5e948", + "sha256:d1418705f253b6b6a7224b69773842cac83fcbcd12870354b6e11dd1cd54630f", + "sha256:d44e8f955218638c9ab222eed21e9bd9ab430d296caf2176fb37abe69a714e5c", + "sha256:d6eb7d5f281014cd44e2d847a9107491af1bf3087f5afeded75ed3e37ec87239", + "sha256:dab29d9288aa28e68a6f355ddfc3f0a7342b40c9012798829f3e7bd765e85c2c", + "sha256:dba4f80b9855cc98513ddf22b7ad8551bc448c70d3147799ea4f6c0b758fb466", + "sha256:dc53cab265f6e8449bd683d5ee3bc5a191e6dd940736f3de1a188e6da66b0653", + "sha256:dd43978966de3baf4aea367c99ffa102b289d6c2ea5f3d9ce34a203dc2f2ab73", + "sha256:dda2684228798e937a7c29b0e1c7ef3d70e2b85390a69b42a1c61b2039ba81de", + "sha256:ded771eaf27bb4eb3c64c0d09866460ee8801d81dc21097269cf495b3cac8657", + "sha256:e0c80bbf55339c93770fc294b4b6586b5bf8e85ec00a4c2d585c33dbd84b5006", + "sha256:e189e440bcd04ccaad0474720abee6ee64890823ec0db361fb0a4fb5e843a1bf", + "sha256:e2255f36ebf2cb2dbf772a7437ad870836b7396e60517211834cf66ce678b595", + "sha256:ef2fa0f85458736178fd3dcfeb09c3cf423f0843313e25391db2cfd1acec8888", + "sha256:f6ad2a7bd5e6cf71d4a862413234a067cf158ca0ae94a40d4b87b98b62808498", + "sha256:fa6a7af7a4ada43f15ccc58b6f9adcdbff4c36ba040013d2681e589e07ae280a", + "sha256:fecbf3b05043ed3487a28190dec3e4c4d879b2fcec0e30bafd8ec5d4b6043630", + "sha256:ff6223a854229055e803c2ad0c0ea9a6da50c6be30d92c198cf5f9f28819a921" + ], + "index": "pypi", + "version": "==1.1.0" + } + }, + "develop": { + "black": { + "hashes": [ + "sha256:1bb9ca06e556a09f7f7177bc7cb604e5ed2d2df1e9119e4f7d2f1f7071c32e5d", + "sha256:21f9407063ec71c5580b8ad975653c66508d6a9f57bd008bb8691d273705adcd", + "sha256:4396ca365a4310beef84d446ca5016f671b10f07abdba3e4e4304218d2c71d33", + "sha256:44d99dfdf37a2a00a6f7a8dcbd19edf361d056ee51093b2445de7ca09adac965", + "sha256:5cd5b4f76056cecce3e69b0d4c228326d2595f506797f40b9233424e2524c070", + "sha256:64578cf99b6b46a6301bc28bdb89f9d6f9b592b1c5837818a177c98525dbe397", + "sha256:64e60a7edd71fd542a10a9643bf369bfd2644de95ec71e86790b063aa02ff745", + "sha256:652e55bb722ca026299eb74e53880ee2315b181dfdd44dca98e43448620ddec1", + "sha256:6644f97a7ef6f401a150cca551a1ff97e03c25d8519ee0bbc9b0058772882665", + "sha256:6ad001a9ddd9b8dfd1b434d566be39b1cd502802c8d38bbb1ba612afda2ef436", + "sha256:71d998b73c957444fb7c52096c3843875f4b6b47a54972598741fe9a7f737fcb", + "sha256:74eb9b5420e26b42c00a3ff470dc0cd144b80a766128b1771d07643165e08d0e", + "sha256:75a2d0b4f5eb81f7eebc31f788f9830a6ce10a68c91fbe0fade34fff7a2836e6", + "sha256:7852b05d02b5b9a8c893ab95863ef8986e4dda29af80bbbda94d7aee1abf8702", + "sha256:7f2966b9b2b3b7104fca9d75b2ee856fe3fdd7ed9e47c753a4bb1a675f2caab8", + "sha256:8e5537f456a22cf5cfcb2707803431d2feeb82ab3748ade280d6ccd0b40ed2e8", + "sha256:d4e71cdebdc8efeb6deaf5f2deb28325f8614d48426bed118ecc2dcaefb9ebf3", + "sha256:dae79397f367ac8d7adb6c779813328f6d690943f64b32983e896bcccd18cbad", + "sha256:e3a3a092b8b756c643fe45f4624dbd5a389f770a4ac294cf4d0fce6af86addaf", + "sha256:eb949f56a63c5e134dfdca12091e98ffb5fd446293ebae123d10fc1abad00b9e", + "sha256:f07b69fda20578367eaebbd670ff8fc653ab181e1ff95d84497f9fa20e7d0641", + "sha256:f95cece33329dc4aa3b0e1a771c41075812e46cf3d6e3f1dfe3d91ff09826ed2" + ], + "index": "pypi", + "version": "==24.4.0" + }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "hatchling": { + "hashes": [ + "sha256:9d19a37e563cf767f996d298e1a5c20f299235d0f16a944aca8ed1569cf03d68", + "sha256:cb2892c05858cbeb313ec5e7f0f253550250827e33c59aa732168ef24ff30f71" + ], + "index": "pypi", + "version": "==1.24.0" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "nodeenv": { + "hashes": [ + "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2", + "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", + "version": "==1.8.0" + }, + "packaging": { + "hashes": [ + "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", + "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9" + ], + "markers": "python_version >= '3.7'", + "version": "==24.0" + }, + "pathspec": { + "hashes": [ + "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", + "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" + ], + "markers": "python_version >= '3.8'", + "version": "==0.12.1" + }, + "platformdirs": { + "hashes": [ + "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068", + "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768" + ], + "markers": "python_version >= '3.8'", + "version": "==4.2.0" + }, + "pluggy": { + "hashes": [ + "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981", + "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be" + ], + "markers": "python_version >= '3.8'", + "version": "==1.4.0" + }, + "pyright": { + "hashes": [ + "sha256:0995b6a95eb11bd26f093cd5dee3d5e7258441b1b94d4a171b5dc5b79a1d4f4e", + "sha256:185524a8d52f6f14bbd3b290b92ad905f25b964dddc9e7148aad760bd35c9f60" + ], + "index": "pypi", + "version": "==1.1.358" + }, + "setuptools": { + "hashes": [ + "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987", + "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32" + ], + "markers": "python_version >= '3.8'", + "version": "==69.5.1" + }, + "tomli": { + "hashes": [ + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" + ], + "markers": "python_version < '3.11' and python_version >= '3.7'", + "version": "==2.0.1" + }, + "trove-classifiers": { + "hashes": [ + "sha256:49f40bb6a746b72a1cba4f8d55ee8252169cda0f70802e3fd24f04b7fb25a492", + "sha256:678bd6fcc5218d72e3304e27a608acc9b91e17bd00c3f3d8c968497c843ad98b" + ], + "version": "==2024.4.10" + }, + "typing-extensions": { + "hashes": [ + "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0", + "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a" + ], + "markers": "python_version >= '3.8'", + "version": "==4.11.0" + } + } +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..bce423f --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# hpc-access command line interface diff --git a/hpc_access_cli/__init__.py b/hpc_access_cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hpc_access_cli/config.py b/hpc_access_cli/config.py new file mode 100644 index 0000000..3ef0ce8 --- /dev/null +++ b/hpc_access_cli/config.py @@ -0,0 +1,108 @@ +"""Configuration for hpc-access-cli.""" + +import sys +from pathlib import Path +from typing import List + +import typer +from pydantic import BaseModel, HttpUrl, SecretStr +from pydantic_settings import BaseSettings, SettingsConfigDict +from rich.console import Console + +from hpc_access_cli.models import StateOperation + +#: The rich console to use for output. +console_err = Console(file=sys.stderr) + + +class LdapSettings(BaseModel): + """Configuration of LDAP.""" + + #: The hostname of the LDAP server. + server_host: str + #: The port of the LDAP server. + server_port: int = 389 + #: The distinguished name of the user to bind to the server. + bind_dn: str + #: The password of the user to bind to the server. + bind_pw: SecretStr + #: The base DN to search for users. + search_base: str + + +class SmtpSettings(BaseModel): + """Configuration for sending out emails via SMTP.""" + + #: The hostname of the SMTP server. + server_host: str + #: The username for the SMTP server. + sender_email: str + + +class MailmanSettings(BaseModel): + """Configuration for managing mailman subscriptions.""" + + #: URL to server to use. + server_url: HttpUrl + #: Password to use for logging into mailman. + admin_password: SecretStr + + +class HpcaccessSettings(BaseModel): + """Configuration for the hpc-access server.""" + + #: The server base url. + server_url: HttpUrl + #: The token to use. + api_token: SecretStr + + +class Settings(BaseSettings): + """Configuration of hpc-access-cli.""" + + #: Configuration for internal LDAP. + ldap_hpc: LdapSettings + #: Configuration for sending out emails via SMTP. + smtp: SmtpSettings + #: Configuration for managing mailman subscriptions. + mailman: MailmanSettings + #: HPC access server configuration. + hpc_access: HpcaccessSettings + + #: Operations to perform on LDAP users. + ldap_user_ops: List[StateOperation] = [ + StateOperation.CREATE, + StateOperation.UPDATE, + StateOperation.DISABLE, + ] + #: Operations to perform on LDAP groups. + ldap_group_ops: List[StateOperation] = [ + StateOperation.CREATE, + StateOperation.UPDATE, + StateOperation.DISABLE, + ] + #: Operations to perform on file system directories. + fs_ops: List[StateOperation] = [ + StateOperation.CREATE, + StateOperation.UPDATE, + StateOperation.DISABLE, + ] + #: Whether try run is enabled. + dry_run: bool = False + + #: Obtaining configuration from environment variables. + model_config = SettingsConfigDict(env_prefix="HPC_ACCESS_") + + +def load_settings(config_path: str) -> Settings: + """Load configuration from the given path. + + :param path: The path to the configuration file. + :return: The loaded configuration. + :raises typer.Exit: If the configuration file does not exist. + """ + if not Path(config_path).exists(): + console_err.log(f"ERROR: Configuration file {config_path} does not exist.", style="red") + raise typer.Exit(1) + with open(config_path, "rt") as f: + return Settings.model_validate_json(f.read()) diff --git a/hpc_access_cli/constants.py b/hpc_access_cli/constants.py new file mode 100644 index 0000000..9de32c6 --- /dev/null +++ b/hpc_access_cli/constants.py @@ -0,0 +1,73 @@ +#: Prefix for POSIX groups +POSIX_AG_PREFIX = "hpc-ag-" +#: Prefix for POSIX projects +POSIX_PROJECT_PREFIX = "hpc-prj-" + +#: Base path for tier1 +BASE_PATH_TIER1 = "/data/cephfs-1" +#: Base path for tier2 +BASE_PATH_TIER2 = "/data/cephfs-2" + +#: Base DN for work groups. +BASE_DN_GROUPS = "ou=Teams,ou=Groups,dc=hpc,dc=bihealth,dc=org" +#: Base DN for projects +BASE_DN_PROJECTS = "ou=Projects,ou=Groups,dc=hpc,dc=bihealth,dc=org" +#: Base DN for Charite users +BASE_DN_CHARITE = "ou=Charite,ou=Users,dc=hpc,dc=bihealth,dc=org" +#: Base DN for MDC users +BASE_DN_MDC = "ou=MDC,ou=Users,dc=hpc,dc=bihealth,dc=org" + +#: Quota on user home (1G) +QUOTA_HOME_BYTES = 1024 * 1024 * 1024 +#: Quota on scratch (100T) +QUOTA_SCRATCH_BYTES = 100 * 1024 * 1024 * 1024 * 1024 + +#: Group name for users without a group. +HPC_ALUMNIS_GROUP = "hpc-alumnis" +#: GID for users without a group. +HPC_ALUMNIS_GID = 1030001 +#: Group name for hpc-users (active+has home) +HPC_USERS_GROUP = "hpc-users" +#: GID for hpc-users +HPC_USERS_GID = 1005269 + +ENTITY_USERS = "users" +ENTITY_GROUPS = "groups" +ENTITY_PROJECTS = "projects" +ENTITIES = ( + ENTITY_USERS, + ENTITY_GROUPS, + ENTITY_PROJECTS, +) + +TIER_USER_HOME = "tier1_home" +TIER_WORK = "tier1_work" +TIER_SCRATCH = "tier1_scratch" +TIER_UNMIRRORED = "tier2_unmirrored" +TIER_MIRRORED = "tier2_mirrored" + +FOLDER_HOME = "home" +FOLDER_WORK = "work" +FOLDER_SCRATCH = "scratch" +FOLDER_UNMIRRORED = "unmirrored" +FOLDER_MIRRORED = "mirrored" + +FOLDER_CEPHFS1 = "cephfs-1" +FOLDER_CEPHFS2 = "cephfs-2" + +CEPHFS_TIER_MAPPING = { + (FOLDER_CEPHFS1, FOLDER_HOME, ENTITY_USERS): TIER_USER_HOME, + (FOLDER_CEPHFS1, FOLDER_WORK, ENTITY_PROJECTS): TIER_WORK, + (FOLDER_CEPHFS1, FOLDER_WORK, ENTITY_GROUPS): TIER_WORK, + (FOLDER_CEPHFS1, FOLDER_SCRATCH, ENTITY_PROJECTS): TIER_SCRATCH, + (FOLDER_CEPHFS1, FOLDER_SCRATCH, ENTITY_GROUPS): TIER_SCRATCH, + (FOLDER_CEPHFS2, FOLDER_UNMIRRORED, ENTITY_PROJECTS): TIER_UNMIRRORED, + (FOLDER_CEPHFS2, FOLDER_UNMIRRORED, ENTITY_GROUPS): TIER_UNMIRRORED, + (FOLDER_CEPHFS2, FOLDER_MIRRORED, ENTITY_PROJECTS): TIER_MIRRORED, + (FOLDER_CEPHFS2, FOLDER_MIRRORED, ENTITY_GROUPS): TIER_MIRRORED, +} +PREFIX_MAPPING = { + "projects": POSIX_PROJECT_PREFIX, + "groups": POSIX_AG_PREFIX, +} +RE_PATH = r"/(?Pcephfs-[12])/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)" diff --git a/hpc_access_cli/fs.py b/hpc_access_cli/fs.py new file mode 100644 index 0000000..497f8e6 --- /dev/null +++ b/hpc_access_cli/fs.py @@ -0,0 +1,167 @@ +"""Code for load file system resource management.""" + +import errno +import os +import sys +from pathlib import Path +from subprocess import check_call +from typing import Dict, List + +import xattr +from rich.console import Console + +from hpc_access_cli.constants import BASE_PATH_TIER1, BASE_PATH_TIER2 +from hpc_access_cli.models import FsDirectory, FsDirectoryOp, StateOperation + +#: The rich console to use for logging. +console_err = Console(file=sys.stderr) + + +def get_extended_attribute(path: str, attr_name: str) -> str: + """Get the value of an extended attribute.""" + try: + # Get the value of the specified extended attribute + value = xattr.getxattr(path, attr_name).decode("utf-8") + return value + except OSError as e: + if os.environ.get("DEBUG", "0") == "1": + return "0" + # Handle the case when the attribute is not found + if e.errno == errno.ENODATA: + raise ValueError(f"extended attribute {attr_name} not found") from e + else: + # Re-raise the exception for other errors + raise + + +def _transform_perms(perms: str) -> str: + """Transform the permissions string.""" + perms_user = perms[1:4].replace("-", "") + if "S" in perms_user: + perms_user = f"u={perms_user.replace('S', '')},u+s" + elif "S" in perms_user: + perms_user = f"u={perms_user.replace('s', 'x')},u+s" + else: + perms_user = f"u={perms_user},u-s" + perms_group = perms[4:7].replace("-", "") + if "S" in perms_group: + perms_group = f"g={perms_group.replace('S', '')},g+s" + elif "s" in perms_group: + perms_group = f"g={perms_group.replace('s', 'x')},g+s" + perms_other = perms[7:].replace("-", "").replace("S", "").replace("s", "x") + perms_other = f"o={perms_other},o-s" + return f"{perms_user},{perms_group},{perms_other}" + + +class FsResourceManager: + """Helper class to manage resources on file system. + + Effectively, it reads/writes the well-known folders and attributes. + """ + + def __init__(self, *, prefix: str = ""): + self.path_tier1_home = f"{prefix}{BASE_PATH_TIER1}/home" + self.path_tier1_work = f"{prefix}{BASE_PATH_TIER1}/work" + self.path_tier1_scratch = f"{prefix}{BASE_PATH_TIER1}/scratch" + self.path_tier2_mirrored = f"{prefix}{BASE_PATH_TIER2}/mirrored" + self.path_tier2_unmirrored = f"{prefix}{BASE_PATH_TIER2}/unmirrored" + + def load_directories(self) -> List[FsDirectory]: + """Load the directories and their sizes.""" + result = [] + for path in (self.path_tier1_home, self.path_tier1_work, self.path_tier1_scratch): + for path_obj in Path(path).glob("*/*"): + if path_obj.is_dir(): + result.append(FsDirectory.from_path(str(path_obj))) + for path in (self.path_tier2_mirrored, self.path_tier2_unmirrored): + for path_obj in Path(path).glob("*/*"): + if path_obj.is_dir(): + result.append(FsDirectory.from_path(str(path_obj))) + result.sort(key=lambda x: x.path) + return result + + def apply_fs_op(self, fs_op: FsDirectoryOp, dry_run: bool = False): + """Apply the file system operations.""" + if fs_op.operation == StateOperation.CREATE: + self._fs_op_create(fs_op.directory, dry_run) + elif fs_op.operation == StateOperation.DISABLE: + self._fs_op_disable(fs_op.directory, dry_run) + elif fs_op.operation == StateOperation.UPDATE: + self._fs_op_update(fs_op.directory, fs_op.diff, dry_run) + + def _fs_op_create(self, directory: FsDirectory, dry_run: bool): + perms = _transform_perms(directory.perms) + console_err.log(f"+ mkdir -v -m {perms} -p {directory.path}") + console_err.log( + f"+ chown -c {directory.owner_name}:{directory.group_name} {directory.path}" + ) + if not dry_run: + check_call(["mkdir", "-v", "-m", perms, "-p", directory.path]) + check_call( + ["chown", "-c", f"{directory.owner_name}:{directory.group_name}", directory.path] + ) + + def _fs_op_disable(self, directory: FsDirectory, dry_run: bool): + console_err.log(f"+ setfattr -n ceph-quota.max_files -v 0 {directory.path}") + if not dry_run: + check_call(["setfattr", "-n", "ceph-quota.max_files", "-v", "0", directory.path]) + + def _fs_op_update( + self, directory: FsDirectory, diff: Dict[str, None | int | str], dry_run: bool + ): + for key, value in diff.items(): + if key == "quota_bytes": + if value is None: + console_err.log(f"+ setfattr -x ceph-quota.max_bytes {directory.path}") + if not dry_run: + check_call(["setfattr", "-x", "ceph-quota.max_bytes", directory.path]) + else: + console_err.log( + f"+ setfattr -n ceph-quota.max_bytes -v {value} {directory.path}" + ) + if not dry_run: + check_call( + [ + "setfattr", + "-n", + "ceph-quota.max_bytes", + "-v", + f"{value}", + directory.path, + ] + ) + elif key == "quota_files": + if value is None: + console_err.log(f"+ setfattr -x ceph-quota.max_files {directory.path}") + if not dry_run: + check_call(["setfattr", "-x", "ceph-quota.max_files", directory.path]) + else: + console_err.log( + f"+ setfattr -n ceph-quota.max_files -v {value} {directory.path}" + ) + if not dry_run: + check_call( + [ + "setfattr", + "-n", + "ceph-quota.max_files", + "-v", + f"{value}", + directory.path, + ] + ) + elif key in ["owner_name", "owner_uid"]: + console_err.log(f"+ chown -c {value} {directory.path}") + if not dry_run: + check_call(["chown", "-c", f"{value}", directory.path]) + elif key in ["group_name", "group_gid"]: + console_err.log(f"+ chgrp -c {value} {directory.path}") + if not dry_run: + check_call(["chgrp", "-c", f"{value}", directory.path]) + elif key == "perms": + perms = _transform_perms(directory.perms) + console_err.log(f"+ chmod -c {perms} {directory.path}") + if not dry_run: + check_call(["chmod", "-c", perms, directory.path]) + else: + raise ValueError(f"I don't know how to handle fs directory diff key '{key}'") diff --git a/hpc_access_cli/ldap.py b/hpc_access_cli/ldap.py new file mode 100644 index 0000000..349b1d5 --- /dev/null +++ b/hpc_access_cli/ldap.py @@ -0,0 +1,334 @@ +"""Code for interfacing with LDAP servers.""" + +import sys +from typing import Any, Dict, List, Optional + +import humps +import ldap3 +from rich.console import Console + +from hpc_access_cli.config import LdapSettings +from hpc_access_cli.models import ( + LOGIN_SHELL_DISABLED, + Gecos, + LdapGroup, + LdapGroupOp, + LdapUser, + LdapUserOp, + StateOperation, +) + +#: The rich console to use for output. +console_err = Console(file=sys.stderr) + +#: The object classes for users. +USER_OBJ_CLASSES = ("inetOrgPerson", "posixAccount", "ldapPublicKey", "bih-expireDates", "top") + + +def attribute_as_str(attribute: ldap3.Attribute) -> Optional[str]: + """Get attribute as string or None if empty.""" + if len(attribute): + return str(attribute[0]) + else: + return None + + +def attribute_list_as_str_list( + attribute: ldap3.Attribute, +) -> List[str]: + """Get attribute as list of strings.""" + return [str(x) for x in attribute] + + +class LdapConnection: + """Wrapper around an ``ldap3`` connection.""" + + def __init__(self, config: LdapSettings): + #: The configuration for the LDAP connection. + self.config = config + #: Server to connect to. + self.server = ldap3.Server( + host=config.server_host, + port=config.server_port, + ) + console_err.log(f"Connecting to {self.server.host}:{self.server.port}...") + #: Connection to the LDAP server. + self.connection = ldap3.Connection( + server=self.server, + user=config.bind_dn, + password=config.bind_pw.get_secret_value(), + auto_bind=True, + ) + if not self.connection.bind(): + raise Exception("Failed to bind to LDAP server.") + console_err.log("... connected.") + + def load_users(self) -> List[LdapUser]: + """Load ``LdapUser`` records from the LDAP server.""" + search_filter = "(&(objectClass=posixAccount)(uid=*))" + + console_err.log(f"Searching for users with filter {search_filter}...") + if not self.connection.search( + search_base=self.config.search_base, + search_filter=search_filter, + search_scope=ldap3.SUBTREE, + attributes=[ + "sn", + "givenName", + "cn", + "uid", + "uidNumber", + "gidNumber", + "homeDirectory", + "gecos", + "loginShell", + "mail", + "displayName", + "sshPublicKey", + ], + ): + raise Exception("Failed to search for users.") + result = [] + for entry in self.connection.entries: + gecos_str = attribute_as_str(entry.gecos) + gecos = Gecos.from_string(gecos_str) if gecos_str else None + uid_str = attribute_as_str(entry.uidNumber) + uid_number = int(uid_str) if uid_str else None + if not uid_number: + raise ValueError(f"Missing LDAP attribute uidNumber for {entry.entry_dn}") + gid_str = attribute_as_str(entry.gidNumber) + gid_number = int(gid_str) if gid_str else None + if not gid_number: + raise ValueError(f"Missing LDAP attribute gidNumber for {entry.entry_dn}") + cn = attribute_as_str(entry.cn) + if not cn: + raise ValueError(f"Missing LDAP attribute cn for {entry.entry_dn}") + uid = attribute_as_str(entry.uid) + if not uid: + raise ValueError(f"Missing LDAP attribute uid for {entry.entry_dn}") + sn = attribute_as_str(entry.sn) + given_name = attribute_as_str(entry.givenName) + home_directory = attribute_as_str(entry.homeDirectory) + if not home_directory: + raise ValueError(f"Missing LDAP attribute homeDirectory for {entry.entry_dn}") + login_shell = attribute_as_str(entry.loginShell) + if not login_shell: + raise ValueError(f"Missing LDAP attribute loginShell for {entry.entry_dn}") + result.append( + LdapUser( + dn=entry.entry_dn, + cn=cn, + uid=uid, + sn=sn, + mail=attribute_as_str(entry.mail), + given_name=given_name, + uid_number=uid_number, + gid_number=gid_number, + home_directory=home_directory, + login_shell=login_shell, + gecos=gecos, + ssh_public_key=attribute_list_as_str_list(entry.sshPublicKey), + ) + ) + return result + + def apply_user_op(self, op: LdapUserOp, dry_run: bool): + """Apply a user operation to the LDAP server.""" + if op.operation == StateOperation.CREATE: + self._user_op_create(op.user, dry_run) + elif op.operation == StateOperation.DISABLE: + self._user_op_disable(op.user, dry_run) + elif op.operation == StateOperation.UPDATE: + self._user_op_update(op.user, op.diff, dry_run) + + def _user_op_create(self, user: LdapUser, dry_run: bool): + user_data = { + "cn": user.cn, + "uid": user.uid, + "uidNumber": user.uid_number, + "homeDirectory": user.home_directory, + } + if user.sn: + user_data["sn"] = user.sn + if user.given_name: + user_data["givenName"] = user.given_name + console_err.log( + f"+ create LDAP user\nDN={user.dn}\nclasses={USER_OBJ_CLASSES}\ndata={user_data}" + ) + if not dry_run: + self.connection.add( + user.dn, + USER_OBJ_CLASSES, + user_data, + ) + + def _user_op_disable(self, user: LdapUser, dry_run: bool): + console_err.log(f"+ disable LDAP user DN: {user.dn}") + search_params = { + "search_base": self.config.search_base, + "search_filter": f"(&(objectClass=posixAccount)(uid={user.uid}))", + "search_scope": ldap3.SUBTREE, + "attributes": [ + "objectclass", + "uid", + "uidNumber", + "telephoneNumber", + "mail", + "displayName", + "sshPublicKey", + "loginShell", + "sn", + "givenName", + ], + "paged_size": 20, + "generator": False, + } + if not self.connection.extend.standard.paged_search(**search_params): + msg = f"FATAL: could not find users with search base {self.config.search_base}" + raise Exception(msg) + writable = self.connection.entries[0].entry_writable() + writable["loginShell"] = LOGIN_SHELL_DISABLED + if not dry_run: + if not writable.entry_commit_changes(): + raise Exception(f"Failed to disable user {user.uid}.") + else: + console_err.log(f"user diabled CN: {user.cn}") + + def _user_op_update( + self, + user: LdapUser, + diff: Dict[str, None | int | str | List[str] | Dict[str, Any]], + dry_run: bool, + ): + search_params = { + "search_base": self.config.search_base, + "search_filter": f"(&(objectClass=posixAccount)(uid={user.uid}))", + "search_scope": ldap3.SUBTREE, + "attributes": [ + "objectclass", + "uid", + "uidNumber", + "telephoneNumber", + "mail", + "displayName", + "sshPublicKey", + "loginShell", + "sn", + "givenName", + ], + "paged_size": 20, + "generator": False, + } + if not self.connection.extend.standard.paged_search(**search_params): + msg = f"FATAL: could not find users with search base {self.config.search_base}" + raise Exception(msg) + writable = self.connection.entries[0].entry_writable() + applied_diff = {} + for key, value in diff.items(): + key = humps.camelize(key) + if key == "gecos": + gecos: Gecos = value or Gecos() # type: ignore + applied_diff[key] = Gecos.model_validate(gecos).to_string() + elif key == "sshPublicKey": + # We only support clearing this list for now which is fine as the + # SSH keys live in the upstream ADs only. + applied_diff[key] = [(ldap3.MODIFY_DELETE, x) for x in writable[key]] + else: + applied_diff[key] = value or "" + writable[key] = value or "" + console_err.log(f"+ update LDAP user DN: {user.dn}, diff: {applied_diff}") + if not dry_run: + if not writable.entry_commit_changes(): + raise Exception(f"Failed to disable user {user.uid}.") + else: + console_err.log(f"upser updated DN: {user.dn}") + + def load_groups(self) -> List[LdapGroup]: + """Load group names from the LDAP server.""" + search_filter = "(&(objectClass=posixGroup)(cn=*))" + + console_err.log(f"Searching for groups with filter {search_filter}...") + if not self.connection.search( + search_base=self.config.search_base, + search_filter=search_filter, + search_scope=ldap3.SUBTREE, + attributes=[ + "cn", + "gidNumber", + "bih-groupOwnerDN", + "bih-groupDelegateDNs", + "memberUid", + "description", + ], + ): + raise Exception("Failed to search for groups.") + result = [] + for entry in self.connection.entries: + cn = attribute_as_str(entry.cn) + if not cn: + raise ValueError(f"Missing LDAP attribute cn for {entry.entry_dn}") + gid_str = attribute_as_str(entry.gidNumber) + gid_number = int(gid_str) if gid_str else None + if not gid_number: + raise ValueError(f"Missing LDAP attribute gidNumber for {entry.entry_dn}") + owner_dn = attribute_as_str(entry["bih-groupOwnerDN"]) + delegate_dns = attribute_list_as_str_list(entry["bih-groupDelegateDNs"]) + member_uids = attribute_list_as_str_list(entry.memberUid) + result.append( + LdapGroup( + dn=entry.entry_dn, + cn=cn, + gid_number=gid_number, + description=attribute_as_str(entry.description), + owner_dn=owner_dn, + delegate_dns=delegate_dns, + member_uids=member_uids, + ) + ) + return result + + def apply_group_op(self, op: LdapGroupOp, dry_run: bool): + """Apply a group operation to the LDAP server.""" + if op == StateOperation.CREATE: + self._group_op_create(op.group, dry_run) + elif op == StateOperation.DISABLE: + self._group_op_disable(op.group, dry_run) + elif op == StateOperation.UPDATE: + self._group_op_update(op.group, op.diff, dry_run) + + def _group_op_create(self, group: LdapGroup, dry_run: bool): + pass + + def _group_op_disable(self, group: LdapGroup, dry_run: bool): + """Disabling a group in LDAP currently is a no-op as this is applied on + the file system by setting the file count quota to 0. + """ + _, _ = group, dry_run + + def _group_op_update( + self, + group: LdapGroup, + diff: Dict[str, None | int | str | List[str] | Dict[str, Any]], + dry_run: bool, + ): + console_err.log(f"+ update LDAP group DN: {group.dn}, diff: {diff}") + search_params = { + "search_base": self.config.search_base, + "search_filter": f"(&(objectClass=gidNumber)(gidNumber={group.gid_number}))", + "search_scope": ldap3.SUBTREE, + "attributes": ["*"], + "paged_size": 20, + "generator": False, + } + if not self.connection.extend.standard.paged_search(**search_params): + msg = f"FATAL: could not find group with search base {self.config.search_base}" + raise Exception(msg) + writable = self.connection.entries[0].entry_writable() + + for key, value in diff.items(): + writable[key] = value + if not dry_run: + if not writable.entry_commit_changes(): + raise Exception(f"Failed to update DN: {group.dn}.") + else: + console_err.log(f"group updated DN: {group.dn}") diff --git a/hpc_access_cli/main.py b/hpc_access_cli/main.py new file mode 100644 index 0000000..8368aa6 --- /dev/null +++ b/hpc_access_cli/main.py @@ -0,0 +1,172 @@ +import os +import sys +from typing import List + +import mechanize +import typer +from rich.console import Console +from typing_extensions import Annotated + +from hpc_access_cli.config import load_settings +from hpc_access_cli.constants import ENTITIES, ENTITY_USERS +from hpc_access_cli.fs import FsResourceManager +from hpc_access_cli.ldap import LdapConnection +from hpc_access_cli.models import StateOperation +from hpc_access_cli.states import ( + TargetStateBuilder, + TargetStateComparison, + convert_to_hpcaccess_state, + deploy_hpcaccess_state, + fs_validation, + gather_hpcaccess_state, + gather_system_state, +) + +#: The typer application object to use. +app = typer.Typer() +#: The rich console to use for output. +console_err = Console(file=sys.stderr) +console_out = Console(file=sys.stdout) + + +@app.command("mailman-sync") +def mailman_sync( + config_path: Annotated[ + str, typer.Option(..., help="path to configuration file") + ] = "/etc/hpc-access-cli/config.json", + dry_run: Annotated[bool, typer.Option(..., help="perform a dry run (no changes)")] = True, +): + """obtain email addresses of active users and sync to mailman""" + settings = load_settings(config_path) + dst_state = gather_hpcaccess_state(settings.hpc_access) + emails = list(sorted(user.email for user in dst_state.hpc_users.values() if user.email)) + console_err.log(f"will update to {len(emails)} email addresses") + console_err.log("\n".join(emails)) + + console_err.log(f"Opening URL to mailman '{settings.mailman.server_url}' ...") + br = mechanize.Browser() + br.set_handle_robots(False) + br.open(str(settings.mailman.server_url)) + console_err.log(" ... filling login form") + br.select_form(nr=0) + br["adminpw"] = settings.mailman.admin_password.get_secret_value() + console_err.log(" ... submitting login form") + _ = br.submit() + console_err.log(" ... filling sync membership list form") + br.select_form(nr=0) + br["memberlist"] = "\n".join(emails) + if br.forms()[0].action != str(settings.mailman.server_url): # type: ignore + raise Exception(f"unexpected form action {br.forms()[0].action}") # type: ignore + console_err.log(" ... submitting sync membership list form") + if dry_run: + console_err.log(" ... **dry run, not submitting**") + else: + _ = br.submit() + console_err.log("... done") + + +@app.command("state-dump") +def dump_data( + config_path: Annotated[ + str, typer.Option(..., help="path to configuration file") + ] = "/etc/hpc-access-cli/config.json", +): + """dump system state as hpc-access state""" + settings = load_settings(config_path) + console_err.print_json(data=settings.model_dump(mode="json")) + system_state = gather_system_state(settings) + hpcaccess_state = convert_to_hpcaccess_state(system_state) + console_out.print_json(data=hpcaccess_state.model_dump(mode="json")) + + +@app.command("state-sync") +def sync_data( + config_path: Annotated[ + str, typer.Option(..., help="path to configuration file") + ] = "/etc/hpc-access-cli/config.json", + ldap_user_ops: Annotated[ + List[StateOperation], + typer.Option(..., help="user operations to perform (default: all)"), + ] = list, + ldap_group_ops: Annotated[ + List[StateOperation], + typer.Option(..., help="group operations to perform (default: all)"), + ] = list, + fs_ops: Annotated[ + List[StateOperation], + typer.Option(..., help="file system operations to perform (default: all)"), + ] = list, + dry_run: Annotated[bool, typer.Option(..., help="perform a dry run (no changes)")] = True, +): + """sync hpc-access state to HPC LDAP""" + settings = load_settings(config_path).model_copy( + update={ + "ldap_user_ops": ldap_user_ops or list(StateOperation), + "ldap_group_ops": ldap_group_ops or list(StateOperation), + "fs_ops": fs_ops or list(StateOperation), + "dry_run": dry_run, + } + ) + # console_err.print_json(data=settings.model_dump(mode="json")) + src_state = gather_system_state(settings) + dst_builder = TargetStateBuilder(settings.hpc_access, src_state) + dst_state = dst_builder.run() + comparison = TargetStateComparison(settings.hpc_access, src_state, dst_state) + operations = comparison.run() + # console_err.print_json(data=operations.model_dump(mode="json")) + connection = LdapConnection(settings.ldap_hpc) + console_err.log(f"applying LDAP group operations now, dry_run={dry_run}") + for group_op in operations.ldap_group_ops: + connection.apply_group_op(group_op, dry_run) + console_err.log(f"applying LDAP user operations now, dry_run={dry_run}") + for user_op in operations.ldap_user_ops: + connection.apply_user_op(user_op, dry_run) + console_err.log(f"applying file system operations now, dry_run={dry_run}") + fs_mgr = FsResourceManager(prefix="/data/sshfs" if os.environ.get("DEBUG", "0") == "1" else "") + for fs_op in operations.fs_ops: + fs_mgr.apply_fs_op(fs_op, dry_run) + + +@app.command("storage-usage-sync") +def sync_storage_usage( + config_path: Annotated[ + str, typer.Option(..., help="path to configuration file") + ] = "/etc/hpc-access-cli/config.json", + dry_run: Annotated[bool, typer.Option(..., help="perform a dry run (no changes)")] = True, +): + """sync storage usage to hpc-access""" + settings = load_settings(config_path) + src_state = gather_system_state(settings) + dst_state = gather_hpcaccess_state(settings.hpc_access) + hpcaccess = {e: dict() for e in ENTITIES} + + for entity in hpcaccess.keys(): + for d in getattr(dst_state, "hpc_%s" % entity).values(): + d.resources_used = {} + name = d.username if entity == ENTITY_USERS else d.name + hpcaccess[entity][name] = d + + for fs_data in src_state.fs_directories.values(): + try: + entity, name, resource = fs_validation(fs_data) + except ValueError as e: + console_err.log(f"WARNING: {e}") + continue + + if not hpcaccess.get(entity, {}).get(name): + console_err.log(f"WARNING: folder not present in hpc-access: {entity}/{name}") + continue + + # The following lines update the entries in dst_state (!) + d = getattr(dst_state, f"hpc_{entity}") + p = 4 - int(entity == ENTITY_USERS) + d[hpcaccess[entity][name].uuid].resources_used[resource] = fs_data.rbytes / 1024**p + + if not dry_run: + deploy_hpcaccess_state(settings.hpc_access, dst_state) + + console_err.log(f"syncing storage usage to hpc-access now, dry_run={dry_run}") + + +if __name__ == "__main__": + app() diff --git a/hpc_access_cli/models.py b/hpc_access_cli/models.py new file mode 100644 index 0000000..6b27395 --- /dev/null +++ b/hpc_access_cli/models.py @@ -0,0 +1,409 @@ +"""Pydantic models for representing records.""" + +import datetime +import enum +import grp +import os +import pwd +import stat +from typing import Any, Dict, List, Optional +from uuid import UUID + +from pydantic import BaseModel + +#: Login shell to use for disabled users. +LOGIN_SHELL_DISABLED = "/usr/sbin/nologin" + + +class FsDirectory(BaseModel): + """Information about a file system directory. + + This consists of the classic POSIX file system attributes and + additional Ceph extended attributes. + """ + + #: Absolute path to the directory. + path: str + #: The username of the owner of the directory. + owner_name: str + #: The user UID of the owner of the directory. + owner_uid: int + #: The group of the directory. + group_name: str + #: The group GID of the directory. + group_gid: int + #: The directory permissions. + perms: str + + #: The size of the directory in bytes. + rbytes: Optional[int] + #: The number of files in the directory. + rfiles: Optional[int] + #: The bytes quota. + quota_bytes: Optional[int] + #: The files quota. + quota_files: Optional[int] + + @staticmethod + def from_path(path: str) -> "FsDirectory": + """Create a new instance from a path.""" + from hpc_access_cli.fs import get_extended_attribute + + # Get owner user name, owner uid, group name, group gid + uid = os.stat(path).st_uid + gid = os.stat(path).st_gid + try: + owner_name = pwd.getpwuid(uid).pw_name + except KeyError: + if os.environ.get("DEBUG", "0") == "1": + owner_name = "unknown" + else: + raise + try: + group_name = grp.getgrgid(gid).gr_name + except KeyError: + if os.environ.get("DEBUG", "0") == "1": + group_name = "unknown" + else: + raise + # Get permissions mask + mode = os.stat(path).st_mode + permissions = stat.filemode(mode) + # Get Ceph extended attributes. + rbytes = int(get_extended_attribute(path, "ceph.dir.rbytes")) + rfiles = int(get_extended_attribute(path, "ceph.dir.rfiles")) + try: + quota_bytes = int(get_extended_attribute(path, "ceph.quota.max_bytes")) + except ValueError: + # attribute missing => no quota set + quota_bytes = None + try: + quota_files = int(get_extended_attribute(path, "ceph.quota.max_files")) + except ValueError: + # attribute missing => no quota set + quota_files = None + + return FsDirectory( + path=path, + owner_name=owner_name, + owner_uid=uid, + group_name=group_name, + group_gid=gid, + perms=permissions, + rbytes=rbytes, + rfiles=rfiles, + quota_bytes=quota_bytes, + quota_files=quota_files, + ) + + +class Gecos(BaseModel): + """GECOS information about a user.""" + + #: The full name of the user. + full_name: Optional[str] = None + #: The office location of the user. + office_location: Optional[str] = None + #: The office phone number of the user. + office_phone: Optional[str] = None + #: The home phone number of the user. + home_phone: Optional[str] = None + #: The other information about the user. + other: Optional[str] = None + + def to_string(self): + """Convert the GECOS information to a GECOS string.""" + return ",".join( + [ + self.full_name if self.full_name else "", + self.office_location if self.office_location else "", + self.office_phone if self.office_phone else "", + self.home_phone if self.home_phone else "", + self.other if self.other else "", + ] + ) + + @staticmethod + def from_string(gecos: str) -> "Gecos": + """Create a new instance from a GECOS string.""" + parts = gecos.split(",", 4) + if len(parts) < 5: + parts.extend([""] * (5 - len(parts))) + return Gecos( + full_name=parts[0] if parts[0] != "None" else None, + office_location=parts[1] if parts[1] != "None" else None, + office_phone=parts[2] if parts[2] != "None" else None, + home_phone=parts[3] if parts[3] != "None" else None, + other=parts[4] if parts[4] != "None" else None, + ) + + +class LdapUser(BaseModel): + """A user form the LDAP directory.""" + + #: The common name of the user. + cn: str + #: The distinguished name of the user. + dn: str + #: The username. + uid: str + #: The email address of the user. + mail: Optional[str] + #: The user's surname. + sn: Optional[str] + #: The user's given name. + given_name: Optional[str] + #: The numeric user ID. + uid_number: int + #: The primary group of the user. + gid_number: Optional[int] + #: The home directory of the user. + home_directory: str + #: The login shell of the user. + login_shell: str + #: The GECOS information of the user. + gecos: Optional[Gecos] + #: Public SSH keys. + ssh_public_key: List[str] + + +class LdapGroup(BaseModel): + """A group from the LDAP directory. + + Note that we use this both for work groups and for projects. Work groups + will have ``member_uids==[]`` as the members are added via their primary + numeric group uid. + """ + + #: The common name of the group. + cn: str + #: The distinguished name of the group. + dn: str + #: The GID number. + gid_number: int + #: Description of the group. + description: Optional[str] + #: The distinguished name of the group's owner. + owner_dn: Optional[str] + #: The distinguished name of the group's delegates. + delegate_dns: List[str] + #: The member uids (== user names) of the group. + member_uids: List[str] + + +class ResourceData(BaseModel): + """A resource request/usage for a user.""" + + #: Storage on tier 1 in TB (work). + tier1_work: float = 0.0 + #: Storage on tier 1 in TB (scratch). + tier1_scratch: float = 0.0 + #: Storage on tier 2 (mirrored) in TB. + tier2_mirrored: float = 0.0 + #: Storage on tier 2 (unmirrored) in TB. + tier2_unmirrored: float = 0.0 + + +class ResourceDataUser(BaseModel): + """A resource request/usage for a user.""" + + #: Storage on tier 1 in GB (home). + tier1_home: float = 0.0 + + +class GroupFolders(BaseModel): + """Folders for a group or project.""" + + #: The work directory. + tier1_work: str + #: The scratch directory. + tier1_scratch: str + #: The mirrored directory. + tier2_mirrored: str + #: The unmirrored directory. + tier2_unmirrored: str + + +@enum.unique +class Status(enum.Enum): + """Status of a hpc user, group, or project.""" + + INITIAL = "INITIAL" + ACTIVE = "ACTIVE" + DELETED = "DELETED" + EXPIRED = "EXPIRED" + + +class HpcUser(BaseModel): + """A user as read from the hpc-access API.""" + + #: The UUID of the record. + uuid: UUID + #: The UUID of the primary ``HpcGroup``. + primary_group: Optional[UUID] + #: Description of the record. + description: Optional[str] + #: The user's email address. + email: Optional[str] + #: The full name of the user. + full_name: str + #: The first name fo the user. + first_name: Optional[str] + #: The last name of the user. + last_name: Optional[str] + #: The office phone number of the user. + phone_number: Optional[str] + #: The requested resources. + resources_requested: Optional[ResourceDataUser] + #: The used resources. + resources_used: Optional[ResourceDataUser] + #: The status of the record. + status: Status + #: The POSIX UID of the user. + uid: int + #: The username of the record. + username: str + #: Point in time of user expiration. + expiration: datetime.datetime + #: The home directory. + home_directory: str + #: The login shell + login_shell: str + #: The version of the user record. + current_version: int + + +class HpcGroup(BaseModel): + """A group as read from the hpc-access API.""" + + #: The UUID of the record. + uuid: UUID + #: The owning ``HpcUser``. + owner: UUID + #: Description of the record. + description: Optional[str] + #: The delegate. + delegate: Optional[UUID] + #: The requested resources. + resources_requested: Optional[ResourceData] + #: The used resources. + resources_used: Optional[ResourceData] + #: The status of the record. + status: Status + #: The POSIX GID of the corresponding Unix group. + gid: Optional[int] + #: The name of the record. + name: str + #: The folders of the group. + folders: GroupFolders + #: Point in time of group expiration. + expiration: datetime.datetime + #: The version of the group record. + current_version: int + + +class HpcProject(BaseModel): + """A project as read from the hpc-access API.""" + + #: The UUID of the record. + uuid: UUID + #: The owning ``HpcGroup``, owner of group is owner of project. + group: Optional[UUID] + #: Description of the record. + description: Optional[str] + #: The delegate for the project. + delegate: Optional[UUID] + #: The requested resources. + resources_requested: Optional[ResourceData] + #: The used resources. + resources_used: Optional[ResourceData] + #: The status of the record. + status: Status + #: The POSIX GID of the corresponding Unix group. + gid: Optional[int] + #: The name of the record. + name: str + #: The folders of the group. + folders: GroupFolders + #: Point in time of group expiration. + expiration: datetime.datetime + #: The version of the project record. + current_version: int + #: The project's member user UUIDs. + members: List[UUID] + + +class SystemState(BaseModel): + """System state retrieved from LDAP and file system.""" + + #: Mapping from LDAP username to ``LdapUser``. + ldap_users: Dict[str, LdapUser] + #: Mapping from LDAP groupname to ``LdapGroup``. + ldap_groups: Dict[str, LdapGroup] + #: Mapping from file system path to ``FsDirectory``. + fs_directories: Dict[str, FsDirectory] + + +class HpcaccessState(BaseModel): + """State as loaded from hpc-access.""" + + hpc_users: Dict[UUID, HpcUser] + hpc_groups: Dict[UUID, HpcGroup] + hpc_projects: Dict[UUID, HpcProject] + + +@enum.unique +class StateOperation(enum.Enum): + """Operation to perform on the state.""" + + #: Create a new object. + CREATE = "CREATE" + #: Update an object's attributes. + UPDATE = "UPDATE" + #: Disable access to an update; note that we will never delete + #: in scripts by design. + DISABLE = "DISABLE" + + +class FsDirectoryOp(BaseModel): + """Operation to perform on a file system directory.""" + + #: The operation to perform. + operation: StateOperation + #: The directory to operate on. + directory: FsDirectory + #: The diff to update. + diff: Dict[str, None | int | str] + + +class LdapUserOp(BaseModel): + """Operation to perform on a LDAP user.""" + + #: The operation to perform. + operation: StateOperation + #: The user to operate on. + user: LdapUser + #: The diff to update (``None`` => clear). + diff: Dict[str, None | int | str | List[str] | Dict[str, Any]] + + +class LdapGroupOp(BaseModel): + """Operation to perform on a LDAP group.""" + + #: The operation to perform. + operation: StateOperation + #: The group to operate on. + group: LdapGroup + #: The diff to update (``None`` => clear). + diff: Dict[str, None | int | str | List[str] | Dict[str, Any]] + + +class OperationsContainer(BaseModel): + """Container for all operations to perform.""" + + #: Operations to perform on LDAP users. + ldap_user_ops: List[LdapUserOp] + #: Operations to perform on LDAP groups. + ldap_group_ops: List[LdapGroupOp] + #: Operations to perform on file system directories. + fs_ops: List[FsDirectoryOp] diff --git a/hpc_access_cli/rest.py b/hpc_access_cli/rest.py new file mode 100644 index 0000000..2884770 --- /dev/null +++ b/hpc_access_cli/rest.py @@ -0,0 +1,114 @@ +"""Code for interfacing with the hpc-access REST API.""" + +from typing import List + +import httpx + +from hpc_access_cli.config import HpcaccessSettings +from hpc_access_cli.models import ( + HpcGroup, + HpcProject, + HpcUser, + ResourceData, + ResourceDataUser, +) + + +class HpcaccessClient: + """Client for accessing the hpc-access REST API.""" + + def __init__(self, settings: HpcaccessSettings): + #: The settings to use. + self.settings = settings + self.base_url = settings.server_url + self.client = httpx.Client(base_url=str(settings.server_url)) + + def load_users(self) -> List[HpcUser]: + """Load users from the hpc-access server.""" + url = "adminsec/api/hpcuser/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + result = [] + while True: + response = self.client.get(url, headers=headers) + response.raise_for_status() + response_data = response.json() + for entry in response_data.get("results", []): + result.append(HpcUser.model_validate(entry)) + if response_data.get("next"): + url = str(response_data.get("next")) + url = url[len(str(self.base_url)) :] + else: + break + return result + + def load_groups(self) -> List[HpcGroup]: + """Load groups from the hpc-access server.""" + url = "adminsec/api/hpcgroup/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + result = [] + while True: + response = self.client.get(url, headers=headers) + response.raise_for_status() + response_data = response.json() + for entry in response_data.get("results", []): + result.append(HpcGroup.model_validate(entry)) + if response_data.get("next"): + url = str(response_data.get("next")) + url = url[len(str(self.base_url)) :] + else: + break + return result + + def load_projects(self) -> List[HpcProject]: + """Load projects from the hpc-access server.""" + url = "adminsec/api/hpcproject/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + result = [] + while True: + response = self.client.get(url, headers=headers) + response.raise_for_status() + response_data = response.json() + for entry in response_data.get("results", []): + result.append(HpcProject.model_validate(entry)) + if response_data.get("next"): + url = str(response_data.get("next")) + url = url[len(str(self.base_url)) :] + else: + break + return result + + def update_user_resources_used(self, user: HpcUser): + """Update resource usage for a user.""" + url = f"adminsec/api/hpcuser/{user.uuid}/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + resources_used = user.resources_used or ResourceDataUser().model_dump() + data = { + "resources_used": resources_used, + } + response = self.client.patch(url, headers=headers, json=data) + response.raise_for_status() + + def update_group_resources_used(self, group: HpcGroup): + """Update resource usage for a group.""" + url = f"adminsec/api/hpcgroup/{group.uuid}/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + resources_used = group.resources_used or ResourceData().model_dump() + data = { + "resources_used": resources_used, + } + response = self.client.patch(url, headers=headers, json=data) + response.raise_for_status() + + def update_project_resources_used(self, project: HpcProject): + """Update resource usage for a project.""" + url = f"adminsec/api/hpcproject/{project.uuid}/" + headers = {"Authorization": f"Token {self.settings.api_token.get_secret_value()}"} + resources_used = project.resources_used or ResourceData().model_dump() + data = { + "resources_used": resources_used, + } + response = self.client.patch(url, headers=headers, json=data) + response.raise_for_status() + + def close(self): + self.client.close() diff --git a/hpc_access_cli/states.py b/hpc_access_cli/states.py new file mode 100644 index 0000000..a9cbb45 --- /dev/null +++ b/hpc_access_cli/states.py @@ -0,0 +1,740 @@ +"""State gathering, comparison and update.""" + +import datetime +import os +import re +import sys +from typing import Dict, List, Optional +from uuid import uuid4 + +from rich.console import Console + +from hpc_access_cli.config import HpcaccessSettings, Settings +from hpc_access_cli.constants import ( + BASE_DN_CHARITE, + BASE_DN_GROUPS, + BASE_DN_MDC, + BASE_DN_PROJECTS, + BASE_PATH_TIER1, + BASE_PATH_TIER2, + CEPHFS_TIER_MAPPING, + ENTITIES, + ENTITY_GROUPS, + ENTITY_PROJECTS, + ENTITY_USERS, + HPC_ALUMNIS_GID, + HPC_ALUMNIS_GROUP, + HPC_USERS_GID, + POSIX_AG_PREFIX, + POSIX_PROJECT_PREFIX, + PREFIX_MAPPING, + QUOTA_HOME_BYTES, + RE_PATH, +) +from hpc_access_cli.fs import FsResourceManager +from hpc_access_cli.ldap import LdapConnection +from hpc_access_cli.models import ( + LOGIN_SHELL_DISABLED, + FsDirectory, + FsDirectoryOp, + Gecos, + GroupFolders, + HpcaccessState, + HpcGroup, + HpcProject, + HpcUser, + LdapGroup, + LdapGroupOp, + LdapUser, + LdapUserOp, + OperationsContainer, + ResourceData, + ResourceDataUser, + StateOperation, + Status, + SystemState, +) +from hpc_access_cli.rest import HpcaccessClient + +#: The rich console to use for output. +console_err = Console(file=sys.stderr) + + +def strip_prefix(name: str, prefix: str | None = None) -> str: + if prefix: + if name.startswith(prefix): + return name[len(prefix) :] + else: + if name.startswith(POSIX_AG_PREFIX): + return name[len(POSIX_AG_PREFIX) :] + elif name.startswith(POSIX_PROJECT_PREFIX): + return name[len(POSIX_PROJECT_PREFIX) :] + return name + + +def user_dn(user: HpcUser) -> str: + """Get the DN for the user.""" + if user.username.endswith("_m"): + return f"cn={user.full_name},{BASE_DN_MDC}" + else: + return f"cn={user.full_name},{BASE_DN_CHARITE}" + + +def gather_hpcaccess_state(settings: HpcaccessSettings) -> HpcaccessState: + """Gather the state.""" + console_err.log("Loading hpc-access users, groups, and projects...") + rest_client = HpcaccessClient(settings) + result = HpcaccessState( + hpc_users={u.uuid: u for u in rest_client.load_users()}, + hpc_groups={g.uuid: g for g in rest_client.load_groups()}, + hpc_projects={p.uuid: p for p in rest_client.load_projects()}, + ) + console_err.log(" # of users:", len(result.hpc_users)) + console_err.log(" # of groups:", len(result.hpc_groups)) + console_err.log(" # of projects:", len(result.hpc_projects)) + console_err.log("... have hpc-access data now.") + rest_client.close() + return result + + +def deploy_hpcaccess_state(settings: HpcaccessSettings, state: HpcaccessState) -> None: + """Deploy the state.""" + console_err.log("Deploying hpc-access users, groups, and projects...") + rest_client = HpcaccessClient(settings) + for u in state.hpc_users.values(): + rest_client.update_user_resources_used(u) + for g in state.hpc_groups.values(): + rest_client.update_group_resources_used(g) + for p in state.hpc_projects.values(): + rest_client.update_project_resources_used(p) + rest_client.close() + console_err.log("... deployed hpc-access data now.") + + +class TargetStateBuilder: + """ "Helper class that is capable of building the target state giving data + from hpc-access. + """ + + def __init__(self, settings: HpcaccessSettings, system_state: SystemState): + #: The settings to use. + self.settings = settings + #: The current system state, used for determining next group id. + self.system_state = system_state + #: The next gid. + self.next_gid = self._get_next_gid(system_state) + console_err.log(f"Next available GID is {self.next_gid}.") + + def _get_next_gid(self, system_state: SystemState) -> int: + """Get the next available GID.""" + gids = [g.gid_number for g in system_state.ldap_groups.values()] + gids.extend([u.gid_number for u in system_state.ldap_users.values() if u.gid_number]) + return max(gids) + 1 if gids else 1000 + + def run(self) -> SystemState: + """Run the builder.""" + hpcaccess_state = gather_hpcaccess_state(self.settings) + return self._build(hpcaccess_state) + + def _build(self, hpcaccess_state: HpcaccessState) -> SystemState: + """Build the target state.""" + # IMPORANT: Note that order matters here! First, we must create + # LDAP groups so we have the Unix GIDs when users are considered. + ldap_groups = self._build_ldap_groups(hpcaccess_state) + ldap_users = self._build_ldap_users(hpcaccess_state) + # build hpc-users group + ldap_groups["hpc-users"] = LdapGroup( + dn="cn=hpc-users,ou=Groups,dc=hpc,dc=bihealth,dc=org", + cn="hpc-users", + gid_number=HPC_USERS_GID, + description="users allowed to login (active+have group)", + owner_dn=None, + delegate_dns=[], + member_uids=[ + u.uid + for u in ldap_users.values() + if u.gid_number != HPC_ALUMNIS_GID and "nologin" not in u.login_shell + ], + ) + return SystemState( + ldap_users=ldap_users, + ldap_groups=ldap_groups, + fs_directories=self._build_fs_directories(hpcaccess_state), + ) + + def _build_fs_directories(self, hpcaccess_state: HpcaccessState) -> Dict[str, FsDirectory]: + """Build the file system directories from the hpc-access state.""" + result = {} + for user in hpcaccess_state.hpc_users.values(): + if user.primary_group: + hpc_group = hpcaccess_state.hpc_groups[user.primary_group] + group_name = hpc_group.name + group_gid = hpc_group.gid or HPC_ALUMNIS_GID + else: + group_name = HPC_ALUMNIS_GROUP + group_gid = HPC_ALUMNIS_GID + result[f"{BASE_PATH_TIER1}/home/users/{user.username}"] = FsDirectory( + path=f"{BASE_PATH_TIER1}/home/users/{user.username}", + owner_name=user.username, + owner_uid=user.uid, + group_name=group_name, + group_gid=group_gid, + perms="drwx--S---", + rbytes=None, + rfiles=None, + # Currently, hard-coded user quotas only. + # Note: maybe remove from HpcUser model! + quota_bytes=QUOTA_HOME_BYTES, + quota_files=None, + ) + for group in hpcaccess_state.hpc_groups.values(): + if not group.gid: + console_err.log( + f"Group {group.name} has no gid, skipping.", + ) + continue + owner = hpcaccess_state.hpc_users[group.owner] + group_name = strip_prefix(group.name, prefix=POSIX_AG_PREFIX) + # Tier 1 + quota_work = (group.resources_requested or ResourceData).tier1_work + if not quota_work: + continue + quota_scratch = (group.resources_requested or ResourceData).tier1_scratch + if not quota_scratch: + continue + for volume, quota in ( + ("home", QUOTA_HOME_BYTES), + ("scratch", quota_scratch * 1024 * 1024 * 1024 * 1024), + ("work", quota_work * 1024 * 1024 * 1024 * 1024), + ): + result[f"{BASE_PATH_TIER1}/{volume}/groups/ag-{group_name}"] = FsDirectory( + path=f"{BASE_PATH_TIER1}/{volume}/groups/ag-{group_name}", + owner_name=owner.username, + owner_uid=owner.uid, + group_name=group_name, + group_gid=group.gid, + perms="drwxrwS---", + rbytes=None, + rfiles=None, + quota_bytes=None if quota is None else int(quota), + quota_files=None, + ) + # Tier 2 + for variant in ("unmirrored", "mirrored"): + if variant == "mirrored": + quota = (group.resources_requested or ResourceData).tier2_mirrored + elif variant == "unmirrored": + quota = (group.resources_requested or ResourceData).tier2_unmirrored + else: + raise ValueError("Invalid variant") + if not quota: + continue + result[f"{BASE_PATH_TIER2}/{variant}/groups/ag-{group_name}"] = FsDirectory( + path=f"{BASE_PATH_TIER2}/{variant}/groups/ag-{group_name}", + owner_name=owner.username, + owner_uid=owner.uid, + group_name=group_name, + group_gid=group.gid, + perms="drwxrwS---", + rbytes=None, + rfiles=None, + quota_bytes=None if quota is None else int(quota), + quota_files=None, + ) + for project in hpcaccess_state.hpc_projects.values(): + if not project.gid: + console_err.log( + f"Project {project.name} has no gid, skipping.", + ) + continue + if not project.group: + console_err.log( + f"Project {project.name} has no owning group, skipping.", + ) + continue + owning_group = hpcaccess_state.hpc_groups[project.group] + owner = hpcaccess_state.hpc_users[owning_group.owner] + project_name = strip_prefix(group.name, prefix=POSIX_PROJECT_PREFIX) + # Tier 1 + quota_work = (project.resources_requested or ResourceData).tier1_work + if not quota_work: + continue + quota_scratch = (project.resources_requested or ResourceData).tier1_scratch + if not quota_scratch: + continue + for volume, quota in ( + ("home", QUOTA_HOME_BYTES), + ("scratch", quota_scratch * 1024 * 1024 * 1024 * 1024), + ("work", quota_work * 1024 * 1024 * 1024 * 1024), + ): + result[f"{BASE_PATH_TIER1}/{volume}/projects/{project_name}"] = FsDirectory( + path=f"{BASE_PATH_TIER1}/{volume}/projects/{project_name}", + owner_name=owner.username, + owner_uid=owner.uid, + group_name=project_name, + group_gid=project.gid, + perms="drwxrwS---", + rbytes=None, + rfiles=None, + quota_bytes=None if quota is None else int(quota), + quota_files=None, + ) + # Tier 2 + for variant in ("unmirrored", "mirrored"): + if variant == "mirrored": + quota = (project.resources_requested or ResourceData).tier2_mirrored + elif variant == "unmirrored": + quota = (project.resources_requested or ResourceData).tier2_unmirrored + else: + raise ValueError("Invalid variant") + if not quota: + continue + result[f"{BASE_PATH_TIER2}/{variant}/projects/{project_name}"] = FsDirectory( + path=f"{BASE_PATH_TIER2}/{variant}/projects/{project_name}", + owner_name=owner.username, + owner_uid=owner.uid, + group_name=project_name, + group_gid=project.gid, + perms="drwxrwS---", + rbytes=None, + rfiles=None, + quota_bytes=None if quota is None else int(quota), + quota_files=None, + ) + + return result + + def _build_ldap_users(self, hpcaccess_state: HpcaccessState) -> Dict[str, LdapUser]: + """Build the LDAP users from the hpc-access state.""" + result = {} + for user in hpcaccess_state.hpc_users.values(): + gecos = Gecos( + full_name=user.full_name, + office_location=None, + office_phone=user.phone_number, + other=None, + ) + if user.primary_group: + hpc_group = hpcaccess_state.hpc_groups[user.primary_group] + group_gid = hpc_group.gid or HPC_ALUMNIS_GID + else: + group_gid = HPC_ALUMNIS_GID + result[user.username] = LdapUser( + dn=user_dn(user), + cn=user.full_name, + sn=user.last_name, + given_name=user.first_name, + uid=user.username, + mail=user.email, + gecos=gecos, + uid_number=user.uid, + gid_number=group_gid, + # user.home_directory + home_directory=f"{BASE_PATH_TIER1}/home/users/{user.username}", + # user.login_shell + login_shell="/usr/bin/bash", + # SSH keys are managed via upstream LDAP. + ssh_public_key=[], + ) + return result + + def _build_ldap_groups(self, state: HpcaccessState) -> Dict[str, LdapGroup]: + """Build the LDAP groups from the hpc-access state.""" + result = {} + # build for work groups + for group in state.hpc_groups.values(): + if not group.gid: + # assign new group Unix GID if necessary + group.gid = self.next_gid + self.next_gid += 1 + group_dn = f"cn={POSIX_AG_PREFIX}{group.name},{BASE_DN_GROUPS}" + owner = state.hpc_users[group.owner] + delegate = state.hpc_users[group.delegate] if group.delegate else None + group_name = f"{POSIX_AG_PREFIX}{group.name}" + result[group_name] = LdapGroup( + dn=group_dn, + cn=group_name, + gid_number=group.gid, + description=group.description, + owner_dn=user_dn(owner), + delegate_dns=[user_dn(delegate)] if delegate else [], + member_uids=[], + ) + # build for projects + for project in state.hpc_projects.values(): + if not project.gid: + # assign new project Unix GID if necessary + project.gid = self.next_gid + self.next_gid += 1 + group_dn = f"cn={POSIX_PROJECT_PREFIX}{project.name},{BASE_DN_PROJECTS}" + if project.group: + owning_group = state.hpc_groups[project.group] + owner = state.hpc_users[owning_group.owner] + owner_dn = user_dn(owner) + else: + owner_dn = None + delegate = state.hpc_users[project.delegate] if project.delegate else None + project_name = f"{POSIX_PROJECT_PREFIX}{project.name}" + result[project_name] = LdapGroup( + dn=group_dn, + cn=project_name, + gid_number=project.gid, + description=project.description, + owner_dn=owner_dn, + delegate_dns=[user_dn(delegate)] if delegate else [], + member_uids=[], + ) + return result + + +def gather_system_state(settings: Settings) -> SystemState: + """Gather the system state from LDAP and file system.""" + connection = LdapConnection(settings.ldap_hpc) + console_err.log("Loading LDAP users and groups...") + ldap_users = connection.load_users() + ldap_groups = connection.load_groups() + console_err.log("Loading file system directories...") + fs_mgr = FsResourceManager(prefix="/data/sshfs" if os.environ.get("DEBUG", "0") == "1" else "") + fs_directories = fs_mgr.load_directories() + result = SystemState( + ldap_users={u.uid: u for u in ldap_users}, + ldap_groups={g.cn: g for g in ldap_groups}, + fs_directories={d.path: d for d in fs_directories}, + ) + console_err.log(" # of users:", len(result.ldap_users)) + console_err.log(" # of groups:", len(result.ldap_groups)) + console_err.log(" # of directories:", len(result.fs_directories)) + console_err.log("... have system state now") + return result + + +def fs_validation(fs: FsDirectory) -> tuple[str, str, str]: + """Validate the path.""" + matches = re.search(RE_PATH, fs.path) + if not matches: + raise ValueError(f"no match for path {fs.path}") + + tier, subdir, entity, folder_name = matches.groups() + + if entity not in ENTITIES: + raise ValueError(f"entity unknown ({'/'.join(ENTITIES)}): {entity}") + + entity_name = ( + fs.owner_name + if entity == "users" + else strip_prefix(fs.group_name, prefix=PREFIX_MAPPING[entity]) + ) + + if not entity_name == folder_name: + raise ValueError(f"name mismatch: {entity_name} {fs.path}") + + resource = CEPHFS_TIER_MAPPING.get((tier, subdir, entity)) + + if not resource: + raise ValueError( + f"path {fs.path} not in {['/'.join(k) for k in CEPHFS_TIER_MAPPING.keys()]}" + ) + + return entity, folder_name, resource + + +def convert_to_hpcaccess_state(system_state: SystemState) -> HpcaccessState: + """Convert hpc-access to system state. + + Note that this will make up the UUIDs. + """ + # create UUID mapping from user/groupnames + user_uuids = {u.uid: uuid4() for u in system_state.ldap_users.values()} + user_by_uid = {u.uid: u for u in system_state.ldap_users.values()} + user_by_dn = {u.dn: u for u in system_state.ldap_users.values()} + group_uuids = { + g.cn: uuid4() + for g in system_state.ldap_groups.values() + if g.cn.startswith(POSIX_AG_PREFIX) or g.cn.startswith(POSIX_PROJECT_PREFIX) + } + group_by_name = {strip_prefix(g.cn): g for g in system_state.ldap_groups.values()} + group_by_gid_number = {g.gid_number: g for g in system_state.ldap_groups.values()} + group_by_owner_dn: Dict[str, LdapGroup] = {} + for g in system_state.ldap_groups.values(): + if g.owner_dn: + group_by_owner_dn[user_by_dn[g.owner_dn].dn] = g + user_quotas: Dict[str, ResourceDataUser] = {} + group_quotas: Dict[str, ResourceData] = {} + for fs_data in system_state.fs_directories.values(): + try: + entity, name, resource = fs_validation(fs_data) + except ValueError as e: + console_err.log(f"WARNING: {e}") + continue + + quota_bytes = fs_data.quota_bytes if fs_data.quota_bytes is not None else 0 + + if entity == ENTITY_USERS: + if name not in user_by_uid: + console_err.log(f"WARNING: user {name} not found") + continue + if name not in user_quotas: + user_quotas[name] = {} + user_quotas[name][resource] = quota_bytes / 1024**3 + elif entity in (ENTITY_GROUPS, ENTITY_PROJECTS): + if name not in group_by_name: + console_err.log(f"WARNING: group {name} not found") + continue + if name not in group_quotas: + group_quotas[name] = {} + group_quotas[name][resource] = quota_bytes / 1024**4 + + def build_hpcuser(u: LdapUser, quotas: Dict[str, str]) -> HpcUser: + if u.login_shell != LOGIN_SHELL_DISABLED: + status = Status.ACTIVE + expiration = datetime.datetime.now() + datetime.timedelta(days=365) + else: + status = Status.EXPIRED + expiration = datetime.datetime.now() + if u.gid_number and u.gid_number in group_by_gid_number: + primary_group = group_uuids.get(group_by_gid_number[u.gid_number].cn) + else: + primary_group = None + return HpcUser( + uuid=user_uuids[u.uid], + primary_group=primary_group, + description=None, + full_name=u.cn, + first_name=u.given_name, + last_name=u.sn, + email=u.mail, + phone_number=u.gecos.office_phone if u.gecos else None, + resources_requested=ResourceDataUser(**quotas), + resources_used=ResourceDataUser( + tier1_home=0, + ), + status=status, + uid=u.uid_number, + username=u.uid, + expiration=expiration, + home_directory=u.home_directory, + login_shell=u.login_shell, + current_version=1, + ) + + def build_hpcgroup(g: LdapGroup, quotas: Dict[str, str]) -> Optional[HpcGroup]: + expiration = datetime.datetime.now() + datetime.timedelta(days=365) + name = strip_prefix(g.cn, POSIX_AG_PREFIX) + if not g.owner_dn: + console_err.log(f"no owner DN for {g.cn}, skipping") + return + return HpcGroup( + uuid=group_uuids[g.cn], + name=name, + description=g.description, + owner=user_uuids[user_by_dn[g.owner_dn].uid], + delegate=user_uuids[user_by_dn[g.delegate_dns[0]].uid] if g.delegate_dns else None, + resources_requested=ResourceData(**quotas), + resources_used=ResourceData( + tier1_work=0, + tier1_scratch=0, + tier2_mirrored=0, + tier2_unmirrored=0, + ), + status=Status.ACTIVE, + gid=g.gid_number, + folders=GroupFolders( + tier1_work=f"{BASE_PATH_TIER1}/work/groups/{name}", + tier1_scratch=f"{BASE_PATH_TIER1}/scratch/groups/{name}", + tier2_mirrored=f"{BASE_PATH_TIER2}/mirrored/groups/{name}", + tier2_unmirrored=f"{BASE_PATH_TIER2}/unmirrored/groups/{name}", + ), + expiration=expiration, + current_version=1, + ) + + def build_hpcproject(p: LdapGroup, quotas: Dict[str, str]) -> Optional[HpcProject]: + expiration = datetime.datetime.now() + datetime.timedelta(days=365) + name = strip_prefix(p.cn, POSIX_PROJECT_PREFIX) + if not p.owner_dn: + console_err.log(f"no owner DN for {p.cn}, skipping") + return + members = [] + for uid in p.member_uids: + uid = uid.strip() + user = user_by_uid[uid] + members.append(user_uuids[user.uid]) + gid_number = user_by_dn[p.owner_dn].gid_number + if not gid_number: + group = None + else: + group = group_uuids[group_by_gid_number[gid_number].cn] + return HpcProject( + uuid=group_uuids[p.cn], + name=name, + description=g.description, + group=group, + delegate=user_uuids[user_by_dn[p.delegate_dns[0]].uid] if p.delegate_dns else None, + resources_requested=ResourceData(**quotas), + resources_used=ResourceData( + tier1_work=0, + tier1_scratch=0, + tier2_mirrored=0, + tier2_unmirrored=0, + ), + status=Status.ACTIVE, + gid=p.gid_number, + folders=GroupFolders( + tier1_work=f"{BASE_PATH_TIER1}/work/projects/{name}", + tier1_scratch=f"{BASE_PATH_TIER1}/scratch/projects/{name}", + tier2_mirrored=f"{BASE_PATH_TIER2}/mirrored/projects/{name}", + tier2_unmirrored=f"{BASE_PATH_TIER2}/unmirrored/projects/{name}", + ), + expiration=expiration, + current_version=1, + members=members, + ) + + # construct the resulting state + hpc_users = {} + for u in system_state.ldap_users.values(): + hpc_user = build_hpcuser(u, user_quotas.get(u.uid, {})) + hpc_users[hpc_user.uuid] = hpc_user + hpc_groups = {} + hpc_projects = {} + for g in system_state.ldap_groups.values(): + if g.cn.startswith(POSIX_AG_PREFIX): + hpc_group = build_hpcgroup( + g, group_quotas.get(strip_prefix(g.cn, prefix=POSIX_AG_PREFIX), {}) + ) + if hpc_group: + hpc_groups[hpc_group.uuid] = hpc_group + elif g.cn.startswith(POSIX_PROJECT_PREFIX): + hpc_project = build_hpcproject( + g, group_quotas.get(strip_prefix(g.cn, prefix=POSIX_PROJECT_PREFIX), {}) + ) + if hpc_project: + hpc_projects[hpc_project.uuid] = hpc_project + return HpcaccessState( + hpc_users=hpc_users, + hpc_groups=hpc_groups, + hpc_projects=hpc_projects, + ) + + +class TargetStateComparison: + """Helper class that compares two system states. + + Differences are handled as follows. + + - LDAP + - Missing LDAP objects are created. + - Existing but differing LDAP objects are updated. + - Extra LDAP users are disabled by setting `loginShell` to `/sbin/nologin`. + - file system + - Missing directories are created. + - Existing but differing directories are updated. + - Extra directories have the owner set to ``root:root`` and the access + to them is disabled. + """ + + def __init__(self, settings: HpcaccessSettings, src: SystemState, dst: SystemState): + #: Configuration of ``hpc-access`` system to use. + self.settings = settings + #: Source state + self.src = src + #: Target state + self.dst = dst + + def run(self) -> OperationsContainer: + """Run the comparison.""" + console_err.log("Comparing source and target state...") + result = OperationsContainer( + ldap_user_ops=self._compare_ldap_users(), + ldap_group_ops=self._compare_ldap_groups(), + fs_ops=self._compare_fs_directories(), + ) + console_err.log("... have operations now.") + return result + + def _compare_ldap_users(self) -> List[LdapUserOp]: + """Compare ``LdapUser`` records between system states.""" + result = [] + extra_usernames = set(self.src.ldap_users.keys()) - set(self.dst.ldap_users.keys()) + missing_usernames = set(self.dst.ldap_users.keys()) - set(self.src.ldap_users.keys()) + common_usernames = set(self.src.ldap_users.keys()) & set(self.dst.ldap_users.keys()) + for username in extra_usernames: + user = self.src.ldap_users[username] + result.append(LdapUserOp(operation=StateOperation.DISABLE, user=user, diff={})) + for username in missing_usernames: + user = self.src.ldap_users[username] + result.append(LdapUserOp(operation=StateOperation.CREATE, user=user, diff={})) + for username in common_usernames: + src_user = self.src.ldap_users[username] + dst_user = self.dst.ldap_users[username] + src_user_dict = src_user.model_dump() + dst_user_dict = dst_user.model_dump() + all_keys = set(src_user_dict.keys()) | set(dst_user_dict.keys()) + if src_user_dict != dst_user_dict: + diff = {} + for key in all_keys: + if src_user_dict.get(key) != dst_user_dict.get(key): + diff[key] = dst_user_dict.get(key) + result.append(LdapUserOp(operation=StateOperation.UPDATE, user=src_user, diff=diff)) + return result + + def _compare_ldap_groups(self) -> List[LdapGroupOp]: + result = [] + extra_group_names = set(self.src.ldap_groups.keys()) - set(self.dst.ldap_groups.keys()) + missing_group_names = set(self.dst.ldap_groups.keys()) - set(self.src.ldap_groups.keys()) + common_group_names = set(self.src.ldap_groups.keys()) & set(self.dst.ldap_groups.keys()) + for name in extra_group_names: + group = self.src.ldap_groups[name] + result.append(LdapGroupOp(operation=StateOperation.DISABLE, group=group, diff={})) + for name in missing_group_names: + group = self.dst.ldap_groups[name] + result.append(LdapGroupOp(operation=StateOperation.CREATE, group=group, diff={})) + for name in common_group_names: + src_group = self.src.ldap_groups[name] + dst_group = self.dst.ldap_groups[name] + src_group_dict = src_group.model_dump() + dst_group_dict = dst_group.model_dump() + all_keys = set(src_group_dict.keys()) | set(dst_group_dict.keys()) + if src_group_dict != dst_group_dict: + diff = {} + for key in all_keys: + if src_group_dict.get(key) != dst_group_dict.get(key): + diff[key] = dst_group_dict.get(key) + result.append( + LdapGroupOp(operation=StateOperation.UPDATE, group=src_group, diff=diff) + ) + return result + + def _compare_fs_directories(self) -> List[FsDirectoryOp]: + result = [] + extra_paths = set(self.src.fs_directories.keys()) - set(self.dst.fs_directories.keys()) + missing_paths = set(self.dst.fs_directories.keys()) - set(self.src.fs_directories.keys()) + common_paths = set(self.src.fs_directories.keys()) & set(self.dst.fs_directories.keys()) + for path in extra_paths: + directory = self.src.fs_directories[path] + result.append( + FsDirectoryOp(operation=StateOperation.DISABLE, directory=directory, diff={}) + ) + for path in missing_paths: + directory = self.dst.fs_directories[path] + result.append( + FsDirectoryOp(operation=StateOperation.CREATE, directory=directory, diff={}) + ) + for path in common_paths: + src_directory = self.src.fs_directories[path] + dst_directory = self.dst.fs_directories[path] + src_directory_dict = src_directory.model_dump() + dst_directory_dict = dst_directory.model_dump() + if src_directory_dict != dst_directory_dict: + diff = {} + for key in ("owner_uid", "owner_gid", "perms", "quota_bytes", "quota_files"): + if src_directory_dict.get(key) != dst_directory_dict.get(key): + diff[key] = dst_directory_dict.get(key) + result.append( + FsDirectoryOp( + operation=StateOperation.UPDATE, + directory=src_directory, + diff=diff, + ) + ) + return result diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..795bb06 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,69 @@ +[project] +name = "hpc-access-cli" +version = "0.2.0" +authors = [ + { name="Manuel Holtgrewe", email="manuel.holtgrewe@bih-charite.de" }, +] +description = "CLI for hpc-access" +readme = "README.md" +requires-python = ">=3.10" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] +dependencies = [ + "annotated-types==0.6.0", + "anyio==4.2.0", + "black==24.2.0", + "certifi==2024.2.2", + "cffi==1.16.0", + "click==8.1.7", + "editables==0.5", + "exceptiongroup==1.2.0", + "h11==0.14.0", + "hatchling==1.21.1", + "html5lib==1.1", + "httpcore==1.0.3", + "httpx==0.26.0", + "idna==3.6", + "ldap3==2.9.1", + "markdown-it-py==3.0.0", + "mdurl==0.1.2", + "mechanize==0.4.9", + "mypy-extensions==1.0.0", + "nodeenv==1.8.0", + "packaging==23.2", + "pathspec==0.12.1", + "platformdirs==4.2.0", + "pluggy==1.4.0", + "pyasn1==0.5.1", + "pycparser==2.21", + "pydantic==2.6.1", + "pydantic-settings==2.1.0", + "pydantic_core==2.16.2", + "Pygments==2.17.2", + "pyhumps==3.8.0", + "pyright==1.1.350", + "python-dotenv==1.0.1", + "rich==13.7.0", + "six==1.16.0", + "sniffio==1.3.0", + "tomli==2.0.1", + "trove-classifiers==2024.1.31", + "typer==0.9.0", + "typing_extensions==4.9.0", + "webencodings==0.5.1", + "xattr==1.1.0", +] + +[project.scripts] +hpc-access-cli = "hpc_access_cli.main:app" + +[project.urls] +Homepage = "https://github.com/bihealth/hpc-access" +Issues = "https://github.com/bihealth/hpc-access/issues" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build"