diff --git a/.github/actions/start-build/action.yml b/.github/actions/start-build/action.yml index ea4fa0ff66a..d1b124ef50e 100644 --- a/.github/actions/start-build/action.yml +++ b/.github/actions/start-build/action.yml @@ -27,8 +27,8 @@ runs: - name: Copy Settings shell: bash run: | - cp website/settings/local-travis.py website/settings/local.py - cp api/base/settings/local-travis.py api/base/settings/local.py + cp website/settings/local-ci.py website/settings/local.py + cp api/base/settings/local-ci.py api/base/settings/local.py mkdir -p ~/preprints touch ~/preprints/index.html - name: PIP install @@ -41,7 +41,7 @@ runs: shell: bash run: | # bumped psycopg to match requirements.txt, as otherwise build would fail - poetry run python3 -m invoke travis-addon-settings + poetry run python3 -m invoke ci-addon-settings pip uninstall uritemplate.py --yes # use yarn add --exact to match versions in yarn.lock w/o installing all deps yarn add --exact bower@^1.8.8 diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 7fe875888d9..42f264772ac 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -52,7 +52,7 @@ jobs: - uses: actions/checkout@v2 - uses: ./.github/actions/start-build - name: Run tests - run: poetry run python3 -m invoke test-travis-addons -n 1 --junit + run: poetry run python3 -m invoke test-ci-addons -n 1 --junit - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report @@ -79,7 +79,7 @@ jobs: - uses: actions/checkout@v2 - uses: ./.github/actions/start-build - name: Run tests - run: poetry run python3 -m invoke test-travis-website -n 1 --junit + run: poetry run python3 -m invoke test-ci-website -n 1 --junit - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report @@ -108,7 +108,7 @@ jobs: - name: NVM & yarn install run: poetry run python3 -m invoke assets --dev - name: Run test - run: poetry run python3 -m invoke test-travis-api1-and-js -n 1 --junit + run: poetry run python3 -m invoke test-ci-api1-and-js -n 1 --junit - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report @@ -135,7 +135,7 @@ jobs: - uses: actions/checkout@v2 - uses: ./.github/actions/start-build - name: Run tests - run: poetry run python3 -m invoke test-travis-api2 -n 1 --junit + run: poetry run python3 -m invoke test-ci-api2 -n 1 --junit - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report @@ -163,7 +163,7 @@ jobs: - uses: actions/checkout@v2 - uses: ./.github/actions/start-build - name: Run tests - run: poetry run python3 -m invoke test-travis-api3-and-osf -n 1 --junit + run: poetry run python3 -m invoke test-ci-api3-and-osf -n 1 --junit - name: Upload report if: (github.event_name != 'pull_request') && (success() || failure()) # run this step even if previous step failed uses: ./.github/actions/gen-report diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 0d5c765066d..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,185 +0,0 @@ -# Config file for automatic testing at travis-ci.org - -language: python - -python: - - "3.10" - -dist: trusty - -# TODO: uncomment when https://github.com/travis-ci/travis-ci/issues/8836 is resolved -# addons: -# chrome: stable - -os: linux - -cache: - yarn: true - pip: true - directories: - - $HOME/.cache - - node_modules - - website/static/vendor/bower_components - -env: - global: - - WHEELHOUSE="$HOME/.cache/wheelhouse" - - LIBXML2_DEB="libxml2-dbg_2.9.1+dfsg1-3ubuntu4.9_amd64.deb" - - POSTGRES_DEB="postgresql-9.6_9.6.3-1.pgdg12.4+1_amd64.deb" - - ELASTICSEARCH_ARCHIVE="elasticsearch-2.4.5.tar.gz" - - ELASTICSEARCH6_ARCHIVE="elasticsearch-6.3.1.tar.gz" - - LIBJEMALLOC_DEB="libjemalloc1_3.5.1-2_amd64.deb" - - LIBPCRE_DEB="libpcre3_8.31-2ubuntu2.3_amd64.deb" - # - VARNISH_DEB="varnish_4.1.0-1~trusty_amd64.deb" - - OSF_DB_PORT="54321" - # Workaround for travis bug: see https://github.com/travis-ci/travis-ci/issues/7940#issuecomment-311411559 - - BOTO_CONFIG=/dev/null - jobs: - - TEST_BUILD="addons" - - TEST_BUILD="website" - - TEST_BUILD="api1_and_js" - - TEST_BUILD="api2" - - TEST_BUILD="api3_and_osf" - -before_install: - # cache directories - - | - mkdir -p $HOME/.cache/downloads - mkdir -p $HOME/.cache/pip - mkdir -p $HOME/.cache/wheelhouse - mkdir -p $HOME/.cache/testmon - rm -rf node_modules ## TODO remove this later - # postgres - - | - cd $HOME/.cache/downloads - - if [ ! -f "$LIBXML2_DEB" ]; then - curl -SLO http://security.ubuntu.com/ubuntu/pool/main/libx/libxml2/$LIBXML2_DEB - fi - - if [ ! -f "$POSTGRES_DEB" ]; then - curl -SLO http://apt.postgresql.org/pub/repos/apt/pool/main/p/postgresql-9.6/$POSTGRES_DEB - fi - - dpkg -x $LIBXML2_DEB /tmp/libxml2 - dpkg -x $POSTGRES_DEB /tmp/postgres - - | - export LD_LIBRARY_PATH=/tmp/libxml2/usr/lib/x86_64-linux-gnu - /tmp/postgres/usr/lib/postgresql/9.6/bin/initdb /tmp/postgres/data --nosync -U postgres - sed -i -e 's/#fsync.*/fsync = off/' /tmp/postgres/data/postgresql.conf - sed -i -e 's/#synchronous_commit.*/synchronous_commit = off/' /tmp/postgres/data/postgresql.conf - sed -i -e 's/#full_page_writes.*/full_page_writes = off/' /tmp/postgres/data/postgresql.conf - /tmp/postgres/usr/lib/postgresql/9.6/bin/postgres -k /tmp -D /tmp/postgres/data -p 54321 > /dev/null & export POSTGRES_PID=$! - # elasticsearch - - | - cd $HOME/.cache/downloads - - if [ ! -f "$ELASTICSEARCH_ARCHIVE" ]; then - curl -SLO https://download.elasticsearch.org/elasticsearch/elasticsearch/$ELASTICSEARCH_ARCHIVE - fi - - if [ ! -f "$ELASTICSEARCH_ARCHIVE.sha1.txt" ]; then - curl -SLO https://download.elasticsearch.org/elasticsearch/elasticsearch/$ELASTICSEARCH_ARCHIVE.sha1.txt - fi - - sha1sum --check $ELASTICSEARCH_ARCHIVE.sha1.txt - - mkdir -p /tmp/elasticsearch - tar xzf $ELASTICSEARCH_ARCHIVE -C /tmp/elasticsearch --strip-components=1 - - /tmp/elasticsearch/bin/elasticsearch > /dev/null & export ELASTICSEARCH_PID=$! - # Wait for elasticsearch to come online - - |- - while true; do - sleep 5 - curl -sf http://localhost:9200/_cluster/health?wait_for_status=yellow - if [ $? -eq 0 ]; then - break - fi - done - - # elasticsearch6 - - | - - if [ ! -f "$ELASTICSEARCH6_ARCHIVE" ]; then - curl -SLO https://artifacts.elastic.co/downloads/elasticsearch/$ELASTICSEARCH6_ARCHIVE - fi - - if [ ! -f "$ELASTICSEARCH6_ARCHIVE.sha1.txt" ]; then - curl -SLO https://artifacts.elastic.co/downloads/elasticsearch/$ELASTICSEARCH6_ARCHIVE.sha1.txt - fi - - sha1sum --check $ELASTICSEARCH6_ARCHIVE.sha1.txt - - mkdir -p /tmp/elasticsearch6 - tar xzf $ELASTICSEARCH6_ARCHIVE -C /tmp/elasticsearch6 --strip-components=1 - - /tmp/elasticsearch6/bin/elasticsearch > /dev/null & export ELASTICSEARCH6_PID=$! - # Wait for elasticsearch to come online - - |- - while true; do - sleep 5 - curl -sf http://localhost:9201/_cluster/health?wait_for_status=yellow - if [ $? -eq 0 ]; then - break - fi - done - - -install: - - cd $TRAVIS_BUILD_DIR - - cp website/settings/local-travis.py website/settings/local.py - - cp api/base/settings/local-travis.py api/base/settings/local.py - - '[ -d $HOME/preprints ] || ( mkdir -p $HOME/preprints && touch $HOME/preprints/index.html )' - - - travis_retry pip install --upgrade pip - - travis_retry pip install invoke==0.13.0 - - travis_retry pip install flake8==7.0.0 --force-reinstall --upgrade - - travis_retry invoke wheelhouse --dev --addons - - - | - if [ "$TEST_BUILD" = "api1_and_js" ]; then - nvm install 8.6.0 - nvm use 8.6.0 - curl -o- -L https://yarnpkg.com/install.sh | bash - export PATH=$HOME/.yarn/bin:$PATH - travis_retry invoke assets --dev - fi - - - travis_retry invoke travis_addon_settings - # bumped psycopg to match requirements.txt, as otherwise build would fail - - travis_retry pip install psycopg2==2.9.9 --no-binary psycopg2 - - travis_retry invoke requirements --dev --addons - # Hack to fix package conflict between uritemplate and uritemplate.py (dependency of github3.py) - - pip uninstall uritemplate.py --yes - - pip install uritemplate.py==0.3.0 - -# Run Python tests (core and addon) and JS tests - -script: - - export COVERAGE=`if [ "$TRAVIS_BRANCH" == "master-w-coverage" ]; then echo "--coverage"; else echo ""; fi` - # Testmon will run for PRs, but will be disabled when merging into master or develop - - export TESTMON=`if [[ "$TRAVIS_PULL_REQUEST_BRANCH" == "" && "$TRAVIS_BRANCH" == "develop" || "$TRAVIS_PULL_REQUEST_BRANCH" == "" && "$TRAVIS_BRANCH" == "master" ]]; then echo ""; else echo "--testmon"; fi` - - export TESTMON_DATAFILE=$HOME/.cache/testmon/.testmondata_$TEST_BUILD - - invoke test_travis_$TEST_BUILD -n 1 $COVERAGE $TESTMON - -after_success: - - if [[ "$TRAVIS_BRANCH" == "master-w-coverage" ]]; then coveralls; fi - -before_cache: - # This ensures failed tests are removed from the cache so they can be re-tried. - - inv remove_failures_from_testmon --db-path=$HOME/.cache/testmon/.testmondata_$TEST_BUILD - - rm -Rf $HOME/.cache/pip/http - - rm -f $HOME/.cache/pip/log/debug.log - # exclude python requirements from github repo's - - rm -f $HOME/.cache/wheelhouse/modular_odm-*.whl - - rm -f $HOME/.cache/wheelhouse/mfr-*.whl - - rm -f $HOME/.cache/wheelhouse/responses-*.whl - - rm -f $HOME/.cache/wheelhouse/mendeley-*.whl - - rm -f $HOME/.cache/wheelhouse/feedparser-*.whl - # kill any running processes - - kill -9 $POSTGRES_PID - - kill -9 $ELASTICSEARCH_PID - - kill -9 $ELASTICSEARCH6_PID - -branches: - except: - - /^[0-9]/ diff --git a/CHANGELOG b/CHANGELOG index ea0a600ebd7..80be09be0b5 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -2,6 +2,20 @@ We follow the CalVer (https://calver.org/) versioning scheme: YY.MINOR.MICRO. +24.06.0 (2024-09-12) +==================== + +- Fix duplicate notifications for contributor-add failures +- Allow Read and Write contributors to view a project's draft registrations +- Change how files for withdrawn registrations are surfaced in the API +- Fix date displayed in citation for a registration +- New API endpoint /users/me/draft_preprints/ +- Add button to admin to move preprint from initial to pending +- Update language in notifications to indicate preprint resubmission +- Fix Preprint emails so they are sent as expected +- Fix ORCiD email by sending them after changes are committed to DB +- Remove references of TravisCI + 24.05.0 (2024-07-22) ==================== - Bump base python version from py3.6 to py3.12. diff --git a/README-docker-compose.md b/README-docker-compose.md index 9d8d6a2ba46..8e2fb5098ef 100644 --- a/README-docker-compose.md +++ b/README-docker-compose.md @@ -3,9 +3,7 @@ 1. Install the Docker Client - OSX: https://www.docker.com/products/docker#/mac - - Ubuntu - - docker: https://docs.docker.com/engine/installation/linux/ubuntulinux - - docker-compose: https://docs.docker.com/compose/install/ + - Ubuntu: https://docs.docker.com/engine/installation/linux/ubuntulinux - Windows: https://www.docker.com/products/docker#/windows 2. Grant the docker client additional resources (recommended minimums of 1 CPU, 8GB memory, 2GB swap, and 32GB disk image size) - OSX: https://docs.docker.com/docker-for-mac/#/preferences @@ -26,16 +24,21 @@ - Ubuntu - Add loopback alias - `sudo ifconfig lo:0 192.168.168.167 netmask 255.255.255.255 up` + ```bash + sudo ifconfig lo:0 192.168.168.167 netmask 255.255.255.255 up + ``` - For persistance, add to /etc/network/interfaces... Add lo:0 to auto line... - ```auto lo lo:0``` + ```bash + auto lo lo:0 + ``` Add stanza for lo:0... - ```iface lo:0 inet static - address 192.168.168.167 - netmask 255.255.255.255 - network 192.168.168.167 + ```bash + iface lo:0 inet static + address 192.168.168.167 + netmask 255.255.255.255 + network 192.168.168.167 ``` - If UFW enabled. Enable UFW forwarding. - https://docs.docker.com/engine/installation/linux/linux-postinstall/#allow-access-to-the-remote-api-through-a-firewall @@ -49,8 +52,10 @@ `sudo usermod -aG docker $USER` - In order to run OSF Preprints, raise fs.inotify.max_user_watches from default value - `echo fs.inotify.max_user_watches=131072 | sudo tee -a /etc/sysctl.conf` - `sudo sysctl -p` + ```bash + echo fs.inotify.max_user_watches=131072 | sudo tee -a /etc/sysctl.conf + sudo sysctl -p` + ``` - Windows - Install Microsoft Loopback Adapter (Windows 10 follow community comments as the driver was renamed) @@ -75,19 +80,22 @@ * _NOTE: After making changes to `Environment Variables` or `Volume Mounts` you will need to recreate the container(s)._ - - `$ docker-compose up --force-recreate --no-deps preprints` + ```bash + docker compose up --force-recreate --no-deps preprints + ``` 1. Application Settings - e.g. OSF & OSF API local.py - - `$ cp ./website/settings/local-dist.py ./website/settings/local.py` - - `$ cp ./api/base/settings/local-dist.py ./api/base/settings/local.py` - - `$ cp ./docker-compose-dist.override.yml ./docker-compose.override.yml` + ```bash + cp ./website/settings/local-dist.py ./website/settings/local.py + cp ./api/base/settings/local-dist.py ./api/base/settings/local.py + cp ./docker-compose-dist.override.yml ./docker-compose.override.yml + ``` For local tasks, (dev only) - `$ cp ./tasks/local-dist.py ./tasks/local.py` + ```bash + cp ./tasks/local-dist.py ./tasks/local.py + ``` 2. OPTIONAL (uncomment the below lines if you will use remote debugging) Environment variables (incl. remote debugging) - e.g. .docker-compose.env @@ -102,63 +110,63 @@ #### Special Instructions for Apple Chipset (M1, M2, etc.) and other ARM64 architecture - * _NOTE: The `elasticsearch`, `elasticsearch6`, and `sharejs` containers are incompatible with ARM64._ + * _NOTE: The default `elasticsearch`, `elasticsearch6`, and `sharejs` containers are incompatible with ARM64._ - - Running containers with docker-compose + - To run `elasticsearch6` on ARM64 architecture: - - Copy an ARM64-compatible configuration to `docker-compose.override.yml`: + - Copy `docker-compose-dist-arm64.override.yml` into your `docker-compose.override.yml` file - `$ cp ./docker-compose-dist-arm64.override.yml ./docker-compose.override.yml` + - Running containers with docker compose - In `webite/settings/local.py`, disable `SEARCH_ENGINE` ```python - # SEARCH_ENGINE = 'elastic' - SEARCH_ENGINE = None + # SEARCH_ENGINE = 'elastic' + SEARCH_ENGINE = None ``` - - Building the Docker image - - - If you wish to use an OSF image other than the latest `develop-arm64`: - - Build the image - ```bash - $ cd - $ git checkout - $ docker buildx build --platform linux/arm64 -t osf:-arm64 . - ``` - - In `docker-compose.override.yml`, replace any `quay.io/centerforopenscience/osf:develop-arm64` with the locally-tagged image above: - ```yml - image: osf:-arm64 - ``` - ## Application Runtime * _NOTE: Running docker containers detached (`-d`) will execute them in the background, if you would like to view/follow their console log output use the following command._ - - `$ docker-compose logs -f --tail 1000 web` + ```bash + docker compose logs -f --tail 1000 web + ``` 1. Application Environment - - `$ docker-compose up requirements mfr_requirements wb_requirements gv_requirements` + ```bash + docker compose up requirements mfr_requirements wb_requirements gv_requirements + ``` - _NOTE: When the various requirements installations are complete these containers will exit. You should only need to run these containers after pulling code that changes python requirements or if you update the python requirements._ + _NOTE: When the various requirements installations are complete these containers will exit. You should only need to run these containers after pulling code that changes python requirements or if you update the python requirements._ 2. Start Core Component Services (Detached) - - `$ docker-compose up -d elasticsearch postgres mongo rabbitmq` + + ```bash + docker compose up -d elasticsearch postgres mongo rabbitmq + ``` 3. Remove your existing node_modules and start the assets watcher (Detached) - - `$ rm -Rf ./node_modules` - - `$ docker-compose up -d assets` - - `$ docker-compose up -d admin_assets` + ```bash + rm -Rf ./node_modules + docker compose up -d assets + docker compose up -d admin_assets + ``` + + _NOTE: The first time the assets container is run it will take Webpack/NPM up to 15 minutes to compile resources. + When you see the BowerJS build occurring it is likely a safe time to move forward with starting the remaining + containers._ - _NOTE: The first time the assets container is run it will take Webpack/NPM up to 15 minutes to compile resources. - When you see the BowerJS build occurring it is likely a safe time to move forward with starting the remaining - containers._ 4. Start the Services (Detached) - - `$ docker-compose up -d mfr wb fakecas sharejs` + ```bash + docker compose up -d mfr wb fakecas sharejs + ``` 5. Run migrations and create preprint providers - When starting with an empty database you will need to run migrations and populate preprint providers. See the [Running arbitrary commands](#running-arbitrary-commands) section below for instructions. 6. Start the OSF Web, API Server, and Preprints (Detached) - - `$ docker-compose up -d worker web api admin preprints ember_osf_web gv` + ```bash + docker compose up -d worker web api admin preprints ember_osf_web gv + ``` 7. View the OSF at [http://localhost:5000](http://localhost:5000). @@ -167,52 +175,52 @@ - Once the requirements have all been installed, you can start the OSF in the background with ```bash - $ docker-compose up -d assets admin_assets mfr wb fakecas sharejs worker web api admin preprints ember_osf_web gv + docker compose up -d assets admin_assets mfr wb fakecas sharejs worker web api admin preprints ember_osf_web gv ``` - To view the logs for a given container: ```bash - $ docker-compose logs -f --tail 100 web + docker compose logs -f --tail 100 web ``` ### Helpful aliases - Start all containers ```bash - alias dcsa="docker-compose up -d assets admin_assets mfr wb fakecas sharejs worker elasticsearch elasticsearch6 web api admin preprints gv" + alias dcsa="docker compose up -d assets admin_assets mfr wb fakecas sharejs worker elasticsearch elasticsearch6 web api admin preprints gv" ``` - Shut down all containers ```bash - alias dchs="docker-compose down" + alias dchs="docker compose down" ``` - Attach to container logs - dcl . Ie. `dcl web` will log only the web container - ```bash - alias dcl="docker-compose logs -f --tail 100 " - ``` + ```bash + alias dcl="docker compose logs -f --tail 100 " + ``` - Run migrations (Starting a fresh database or changes to migrations) ```bash - alias dcm="docker-compose run --rm web python3 manage.py migrate" + alias dcm="docker compose run --rm web python3 manage.py migrate" ``` - Download requirements (Whenever the requirements change or first-time set-up) ```bash - alias dcreq="docker-compose up requirements mfr_requirements wb_requirements gv_requirements" + alias dcreq="docker compose up requirements mfr_requirements wb_requirements gv_requirements" ``` - Restart the containers - - `$ dcr `. Ie. `dcr web` will restart the web container + - `dcr `. Ie. `dcr web` will restart the web container ```bash - alias dcr="docker-compose restart -t 0 " + alias dcr="docker compose restart -t 0 " ``` - Start the OSF shell (Interactive python shell that allows working directly with the osf on a code level instead of a web level.) ```bash - alias dcosfs="docker-compose run --rm web python3 manage.py osf_shell" + alias dcosfs="docker compose run --rm web python3 manage.py osf_shell" ``` - List all these commands @@ -222,35 +230,60 @@ ## Running arbitrary commands -- View logs: `$ docker-compose logs -f --tail 100 ` +- View logs: + ```bash + docker compose logs -f --tail 100 + ``` - _NOTE: CTRL-c will exit_ - Run migrations: - After creating migrations, resetting your database, or starting on a fresh install you will need to run migrations to make the needed changes to database. This command looks at the migrations on disk and compares them to the list of migrations in the `django_migrations` database table and runs any migrations that have not been run. - - `docker-compose run --rm web python3 manage.py migrate` To run `osf` migrations - - `docker-compose run --rm gv python manage.py migrate` To run `gravyvalet(gv)` migrations + - To run `osf` migrations: + ```bash + docker compose run --rm web python3 manage.py migrate + ``` + - To run `gravyvalet(gv)` migrations: + ```bash + docker compose run --rm gv python manage.py migrate + ``` - Populate institutions: - After resetting your database or with a new install you will need to populate the table of institutions. **You must have run migrations first.** - - `docker-compose run --rm web python3 -m scripts.populate_institutions -e test -a` + ```bash + docker compose run --rm web python3 -m scripts.populate_institutions -e test -a + ``` - Populate preprint, registration, and collection providers: - After resetting your database or with a new install, the required providers and subjects will be created automatically **when you run migrations.** To create more: - - `docker-compose run --rm web python3 manage.py populate_fake_providers` + ```bash + docker compose run --rm web python3 manage.py populate_fake_providers + ``` - _NOTE: In case, you encounter error with missing data, when running the `'populate_fake_providers'` command. Fix this with 'update_taxonomies' command:_ - - `docker-compose run --rm web python3 -m scripts.update_taxonomies` + ```bash + docker compose run --rm web python3 -m scripts.update_taxonomies + ``` - Populate citation styles - Needed for api v2 citation style rendering. - - `docker-compose run --rm web python3 -m scripts.parse_citation_styles` + ```bash + docker compose run --rm web python3 -m scripts.parse_citation_styles + ``` - Start ember_osf_web - Needed for quickfiles feature: - - `docker-compose up -d ember_osf_web` + ```bash + docker compose up -d ember_osf_web + ``` - OPTIONAL: Register OAuth Scopes - Needed for things such as the ember-osf dummy app - - `docker-compose run --rm web python3 -m scripts.register_oauth_scopes` + ```bash + docker compose run --rm web python3 -m scripts.register_oauth_scopes + ``` - OPTIONAL: Create migrations: - After changing a model you will need to create migrations and apply them. Migrations are python code that changes either the structure or the data of a database. This will compare the django models on disk to the database, find the differences, and create migration code to change the database. If there are no changes this command is a noop. - - `docker-compose run --rm web python3 manage.py makemigrations` + ```bash + docker compose run --rm web python3 manage.py makemigrations + ``` - OPTIONAL: Destroy and recreate an empty database: - **WARNING**: This will delete all data in your database. - - `docker-compose run --rm web python3 manage.py reset_db --noinput` + ```bash + docker compose run --rm web python3 manage.py reset_db --noinput + ``` ## Application Debugging @@ -260,7 +293,7 @@ If you want to debug your changes by using print statements, you'll have to have 1. Edit your container configuration in docker-compose.mfr.env or docker-compose.mfr.env to include the new environment variable by uncommenting PYTHONUNBUFFERED=0 2. If you're using a container running Python 3 you can insert the following code prior to a print statement: - ``` + ```python import functools print = functools.partial(print, flush=True) ``` @@ -278,13 +311,13 @@ You should run the `web` and/or `api` container (depending on which codebase the ```bash # Kill the already-running web container -$ docker-compose kill web +docker compose kill web # Run a web container. App logs and breakpoints will show up here. -$ docker-compose run --rm --service-ports web +docker compose run --rm --service-ports web ``` -**IMPORTANT: While attached to the running app, CTRL-c will stop the container.** To detach from the container and leave it running, **use CTRL-p CTRL-q**. Use `docker attach` to re-attach to the container, passing the *container-name* (which you can get from `docker-compose ps`), e.g. `docker attach osf_web_run_1`. +**IMPORTANT: While attached to the running app, CTRL-c will stop the container.** To detach from the container and leave it running, **use CTRL-p CTRL-q**. Use `docker attach` to re-attach to the container, passing the *container-name* (which you can get from `docker compose ps`), e.g. `docker attach osf_web_run_1`. ### Remote Debugging with PyCharm @@ -300,37 +333,47 @@ $ docker-compose run --rm --service-ports web ## Application Tests - Run All Tests - - `$ docker-compose run --rm web invoke test` - -- Run OSF Specific Tests - - `$ docker-compose run --rm web invoke test_osf` + ```bash + docker compose run --rm web python3 -m pytest + ``` - Test a Specific Module - - `$ docker-compose run --rm web invoke test_module -m tests/test_conferences.py` + ```bash + docker compose run --rm web python3 -m pytest tests/test_conferences.py + ``` - Test a Specific Class - - `docker-compose run --rm web invoke test_module -m tests/test_conferences.py::TestProvisionNode` + ```bash + docker compose run --rm web python3 -m pytest tests/test_conferences.py::TestProvisionNode + ``` - Test a Specific Method - - `$ docker-compose run --rm web invoke test_module -m tests/test_conferences.py::TestProvisionNode::test_upload` - -- Test with Specific Parameters (1 cpu, capture stdout) - - `$ docker-compose run --rm web invoke test_module -m tests/test_conferences.py::TestProvisionNode::test_upload -n 1 --params '--capture=sys'` + ```bash + docker compose run --rm web python3 -m pytest tests/test_conferences.py::TestProvisionNode::test_upload + ``` ## Managing Container State Restart a container: - - `$ docker-compose restart -t 0 assets` +```bash +docker compose restart -t 0 assets +``` Recreate a container _(useful to ensure all environment variables/volume changes are in order)_: - - `$ docker-compose up --force-recreate --no-deps assets` + ```bash + docker compose up --force-recreate --no-deps assets + ``` Delete a container _(does not remove volumes)_: - - `$ docker-compose stop -t 0 assets` - - `$ docker-compose rm assets` + ```bash + docker compose stop -t 0 assets + docker compose rm assets + ``` List containers and status: - - `$ docker-compose ps` +```bash +docker compose ps +``` ### Backing up your database In certain cases, you may wish to remove all docker container images, but preserve a copy of the database used by your @@ -342,11 +385,13 @@ resetting docker. To back up your database, follow the following sequence of com ([as of this writing](https://github.com/CenterForOpenScience/osf.io/blob/ce1702cbc95eb7777e5aaf650658a9966f0e6b0c/docker-compose.yml#L53), Postgres 15) 2. Start postgres locally. This must be on a different port than the one used by [docker postgres](https://github.com/CenterForOpenScience/osf.io/blob/ce1702cbc95eb7777e5aaf650658a9966f0e6b0c/docker-compose.yml#L61). Eg, `pg_ctl -D /usr/local/var/postgres start -o "-p 5433"` -3. Verify that the postgres docker container is running (`docker-compose up -d postgres`) +3. Verify that the postgres docker container is running (`docker compose up -d postgres`) 4. Tell your local (non-docker) version of postgres to connect to (and back up) data from the instance in docker - (defaults to port 5432): - `pg_dump --username postgres --compress 9 --create --clean --format d --jobs 4 --host localhost --file ~/Desktop/osf_backup osf` for osf -5. The same can be done for `grayvalet`, just replace `osf` with `gravyvalet` (this applies for all following commands related to backups) + (defaults to port 5432). For `osf` run: + ```bash + pg_dump --username postgres --compress 9 --create --clean --format d --jobs 4 --host localhost --file ~/Desktop/osf_backup osf + ``` +6. The same can be done for `grayvalet`, just replace `osf` with `gravyvalet` (this applies for all following commands related to backups) (shorthand: `pg_dump -U postgres -Z 9 -C --c -Fd --j 4 -h localhost --f ~/Desktop/osf_backup osf`) @@ -357,12 +402,14 @@ resetting docker. To back up your database, follow the following sequence of com ``` 2. Delete a persistent storage volume: **WARNING: All postgres data will be destroyed.** - - `$ docker-compose stop -t 0 postgres` - - `$ docker-compose rm postgres` - - `$ docker volume rm osfio_postgres_data_vol` +```bash +docker compose stop -t 0 postgres +docker compose rm postgres +docker volume rm osfio_postgres_data_vol +``` 3. Starting a new postgres container. ```bash -docker-compose up -d postgres +docker compose up -d postgres ``` 4. Restoring the database from the dump file into the new postgres container. ```bash @@ -376,7 +423,7 @@ instructions on dropping postgres data volumes) When ready, run the restore command from a local terminal: ```bash -$ pg_restore --username postgres --clean --dbname osf --format d --jobs 4 --host localhost ~/Desktop/osf_backup +pg_restore --username postgres --clean --dbname osf --format d --jobs 4 --host localhost ~/Desktop/osf_backup ``` (shorthand) `pg_restore -U postgres -c -d osf -Fd -j 4 -h localhost ~/Desktop/osf_backup` @@ -386,29 +433,31 @@ $ pg_restore --username postgres --clean --dbname osf --format d --jobs 4 --host Resetting the Environment: **WARNING: All volumes and containers are destroyed** - - `$ docker-compose down -v` + - `docker compose down -v` Delete a persistent storage volume: **WARNING: All postgres data will be destroyed.** - - `$ docker-compose stop -t 0 postgres` - - `$ docker-compose rm postgres` - - `$ docker volume rm osfio_postgres_data_vol` + ```bash + docker compose stop -t 0 postgres + docker compose rm postgres + docker volume rm osfio_postgres_data_vol + ``` ## Updating ```bash -$ git stash # if you have any changes that need to be stashed -$ git pull upstream develop # (replace upstream with the name of your remote) -$ git stash pop # unstash changes +git stash # if you have any changes that need to be stashed +git pull upstream develop # (replace upstream with the name of your remote) +git stash pop # unstash changes # If you get an out of space error -$ docker image prune +docker image prune # Pull latest images -$ docker-compose pull +docker compose pull # It is recommended to run requirements only for services that require update, not to wear off local SSD more than needed -$ docker-compose up requirements mfr_requirements wb_requirements gv_requirements +docker compose up requirements mfr_requirements wb_requirements gv_requirements # Run db migrations -$ docker-compose run --rm web python3 manage.py migrate +docker compose run --rm web python3 manage.py migrate ``` ## Miscellaneous @@ -425,7 +474,7 @@ The issue is that docker containers run in unprivileged mode by default. For `docker run`, you can use `--privilege=true` to give the container extended privileges. You can also add or drop capabilities by using `cap-add` and `cap-drop`. Since Docker 1.12, there is no need to add `--security-opt seccomp=unconfined` because the seccomp profile will adjust to selected capabilities. ([Reference](https://docs.docker.com/engine/reference/run/#runtime-privilege-and-linux-capabilities)) -When using `docker-compose`, set `privileged: true` for individual containers in the `docker-compose.yml`. ([Reference](https://docs.docker.com/compose/compose-file/#domainname-hostname-ipc-mac_address-privileged-read_only-shm_size-stdin_open-tty-user-working_dir)) Here is an example for WaterButler: +When using `docker compose`, set `privileged: true` for individual containers in the `docker-compose.yml`. ([Reference](https://docs.docker.com/compose/compose-file/#domainname-hostname-ipc-mac_address-privileged-read_only-shm_size-stdin_open-tty-user-working_dir)) Here is an example for WaterButler: ```yml wb: diff --git a/addons/base/views.py b/addons/base/views.py index 6253f7bc91b..dbb3c1a3072 100644 --- a/addons/base/views.py +++ b/addons/base/views.py @@ -317,6 +317,9 @@ def get_authenticated_resource(resource_id): if resource.deleted: raise HTTPError(http_status.HTTP_410_GONE, message='Resource has been deleted.') + if getattr(resource, 'is_retracted', False): + raise HTTPError(http_status.HTTP_410_GONE, message='Resource has been retracted.') + return resource diff --git a/addons/dataverse/settings/local-travis.py b/addons/dataverse/settings/local-ci.py similarity index 100% rename from addons/dataverse/settings/local-travis.py rename to addons/dataverse/settings/local-ci.py diff --git a/admin/nodes/urls.py b/admin/nodes/urls.py index 6a918831f4c..5036b9dd06d 100644 --- a/admin/nodes/urls.py +++ b/admin/nodes/urls.py @@ -37,4 +37,5 @@ name='recalculate-node-storage'), re_path(r'^(?P[a-z0-9]+)/make_private/$', views.NodeMakePrivate.as_view(), name='make-private'), re_path(r'^(?P[a-z0-9]+)/make_public/$', views.NodeMakePublic.as_view(), name='make-public'), + re_path(r'^(?P[a-z0-9]+)/remove_notifications/$', views.NodeRemoveNotificationView.as_view(), name='node-remove-notifications'), ] diff --git a/admin/nodes/views.py b/admin/nodes/views.py index e7902956add..74b6b08feae 100644 --- a/admin/nodes/views.py +++ b/admin/nodes/views.py @@ -21,6 +21,7 @@ from admin.base.utils import change_embargo_date, validate_embargo_date from admin.base.views import GuidView from admin.base.forms import GuidForm +from admin.notifications.views import detect_duplicate_notifications, delete_selected_notifications from api.share.utils import update_share from api.caching.tasks import update_storage_usage_cache @@ -92,12 +93,30 @@ class NodeView(NodeMixin, GuidView): raise_exception = True def get_context_data(self, **kwargs): - return super().get_context_data(**{ + context = super().get_context_data(**kwargs) + node = self.get_object() + + detailed_duplicates = detect_duplicate_notifications(node_id=node.id) + + context.update({ 'SPAM_STATUS': SpamStatus, 'STORAGE_LIMITS': settings.StorageLimits, - 'node': kwargs.pop('object', self.get_object()), - }, **kwargs) + 'node': node, + 'duplicates': detailed_duplicates + }) + + return context + +class NodeRemoveNotificationView(View): + def post(self, request, *args, **kwargs): + selected_ids = request.POST.getlist('selected_notifications') + if selected_ids: + delete_selected_notifications(selected_ids) + messages.success(request, 'Selected notifications were successfully deleted.') + else: + messages.error(request, 'No notifications selected for deletion.') + return redirect('nodes:node', guid=kwargs.get('guid')) class NodeSearchView(PermissionRequiredMixin, FormView): """ Allows authorized users to search for a node by it's guid. diff --git a/admin/notifications/views.py b/admin/notifications/views.py new file mode 100644 index 00000000000..7a3a13a8df8 --- /dev/null +++ b/admin/notifications/views.py @@ -0,0 +1,30 @@ +from osf.models.notifications import NotificationSubscription +from django.db.models import Count + +def delete_selected_notifications(selected_ids): + NotificationSubscription.objects.filter(id__in=selected_ids).delete() + +def detect_duplicate_notifications(node_id=None): + query = NotificationSubscription.objects.values('_id').annotate(count=Count('_id')).filter(count__gt=1) + if node_id: + query = query.filter(node_id=node_id) + + detailed_duplicates = [] + for dup in query: + notifications = NotificationSubscription.objects.filter( + _id=dup['_id'] + ).order_by('created') + + for notification in notifications: + detailed_duplicates.append({ + 'id': notification.id, + '_id': notification._id, + 'event_name': notification.event_name, + 'created': notification.created, + 'count': dup['count'], + 'email_transactional': [u._id for u in notification.email_transactional.all()], + 'email_digest': [u._id for u in notification.email_digest.all()], + 'none': [u._id for u in notification.none.all()] + }) + + return detailed_duplicates diff --git a/admin/preprints/forms.py b/admin/preprints/forms.py index 15b0ba077ea..2ea91931018 100644 --- a/admin/preprints/forms.py +++ b/admin/preprints/forms.py @@ -1,9 +1,33 @@ from django import forms from osf.models import Preprint - +from osf.utils.workflows import ReviewStates class ChangeProviderForm(forms.ModelForm): class Meta: model = Preprint fields = ('provider',) + + +class MachineStateForm(forms.ModelForm): + class Meta: + model = Preprint + fields = ('machine_state',) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if not self.instance.is_public: + self.fields['machine_state'].widget.attrs['disabled'] = 'disabled' + else: + if self.instance.machine_state == ReviewStates.INITIAL.db_name: + self.fields['machine_state'].choices = [ + (ReviewStates.INITIAL.value, ReviewStates.INITIAL.value), + (ReviewStates.PENDING.value, ReviewStates.PENDING.value), + ] + else: + # Disabled Option you are on + self.fields['machine_state'].widget.attrs['disabled'] = 'disabled' + self.fields['machine_state'].choices = [ + (self.instance.machine_state.title(), self.instance.machine_state) + ] diff --git a/admin/preprints/urls.py b/admin/preprints/urls.py index ddbbc9c4a54..cec79891134 100644 --- a/admin/preprints/urls.py +++ b/admin/preprints/urls.py @@ -10,6 +10,8 @@ re_path(r'^known_ham$', views.PreprintKnownHamList.as_view(), name='known-ham'), re_path(r'^withdrawal_requests$', views.PreprintWithdrawalRequestList.as_view(), name='withdrawal-requests'), re_path(r'^(?P[a-z0-9]+)/$', views.PreprintView.as_view(), name='preprint'), + re_path(r'^(?P[a-z0-9]+)/change_provider/$', views.PreprintProviderChangeView.as_view(), name='preprint-provider'), + re_path(r'^(?P[a-z0-9]+)/machine_state/$', views.PreprintMachineStateView.as_view(), name='preprint-machine-state'), re_path(r'^(?P[a-z0-9]+)/reindex_share_preprint/$', views.PreprintReindexShare.as_view(), name='reindex-share-preprint'), re_path(r'^(?P[a-z0-9]+)/remove_user/(?P[a-z0-9]+)/$', views.PreprintRemoveContributorView.as_view(), diff --git a/admin/preprints/views.py b/admin/preprints/views.py index f8950c349c9..80f6da1f059 100644 --- a/admin/preprints/views.py +++ b/admin/preprints/views.py @@ -15,7 +15,7 @@ from admin.base.views import GuidView from admin.base.forms import GuidForm from admin.nodes.views import NodeRemoveContributorView -from admin.preprints.forms import ChangeProviderForm +from admin.preprints.forms import ChangeProviderForm, MachineStateForm from api.share.utils import update_share @@ -62,6 +62,21 @@ class PreprintView(PreprintMixin, GuidView): """ template_name = 'preprints/preprint.html' permission_required = ('osf.view_preprint', 'osf.change_preprint',) + + def get_context_data(self, **kwargs): + preprint = self.get_object() + return super().get_context_data(**{ + 'preprint': preprint, + 'SPAM_STATUS': SpamStatus, + 'change_provider_form': ChangeProviderForm(instance=preprint), + 'change_machine_state_form': MachineStateForm(instance=preprint), + }, **kwargs) + + +class PreprintProviderChangeView(PreprintMixin, GuidView): + """ Allows authorized users to view preprint info and change a preprint's provider. + """ + permission_required = ('osf.view_preprint', 'osf.change_preprint',) form_class = ChangeProviderForm def post(self, request, *args, **kwargs): @@ -79,13 +94,26 @@ def post(self, request, *args, **kwargs): return redirect(self.get_success_url()) - def get_context_data(self, **kwargs): + +class PreprintMachineStateView(PreprintMixin, GuidView): + """ Allows authorized users to view preprint info and change a preprint's machine_state. + """ + permission_required = ('osf.view_preprint', 'osf.change_preprint',) + form_class = MachineStateForm + + def post(self, request, *args, **kwargs): preprint = self.get_object() - return super().get_context_data(**{ - 'preprint': preprint, - 'SPAM_STATUS': SpamStatus, - 'form': ChangeProviderForm(instance=preprint), - }, **kwargs) + new_machine_state = request.POST.get('machine_state') + if new_machine_state and preprint.machine_state != new_machine_state: + preprint.machine_state = new_machine_state + try: + preprint.save() + except Exception as e: + messages.error(self.request, e.message) + + preprint.refresh_from_db() + + return redirect(self.get_success_url()) class PreprintSearchView(PermissionRequiredMixin, FormView): diff --git a/admin/templates/nodes/node.html b/admin/templates/nodes/node.html index 2a410881666..6ec71e2dfdc 100644 --- a/admin/templates/nodes/node.html +++ b/admin/templates/nodes/node.html @@ -104,6 +104,50 @@

{{ node.type|cut:'osf.'|title }}: {{ node.title }} + +

Duplicate Notifications

+ {% if duplicates %} +
+ {% csrf_token %} + + + + + + + + + + + + + + {% for notification in duplicates %} + + + + + + + + + + {% empty %} + + + + {% endfor %} + +
SelectEvent NameCreatedCountEmail TransactionalEmail DigestNone
{{ notification.event_name }}{{ notification.created }}{{ notification.count }}{{ notification.email_transactional|join:", " }}{{ notification.email_digest|join:", " }}{{ notification.none|join:", " }}
No duplicate notifications found!
+ +
+ {% else %} +

No duplicate notifications found.

+ {% endif %} + + diff --git a/admin/templates/preprints/machine_state.html b/admin/templates/preprints/machine_state.html new file mode 100644 index 00000000000..0d133b037bb --- /dev/null +++ b/admin/templates/preprints/machine_state.html @@ -0,0 +1,22 @@ +{% load node_extras %} + + Machine State + +

{{ preprint.machine_state }}

+

{{ preprint.state }}

+ {% if perms.osf.change_preprint %} +
+ Change preprint machine_state + +
+
+
+ {% csrf_token %} + {{ change_machine_state_form.as_p }} + +
+
+
+ {% endif %} + + \ No newline at end of file diff --git a/admin/templates/preprints/preprint.html b/admin/templates/preprints/preprint.html index 4d96190339f..0b76a65951f 100644 --- a/admin/templates/preprints/preprint.html +++ b/admin/templates/preprints/preprint.html @@ -74,10 +74,6 @@

Preprint: {{ preprint.title }} Published {{ preprint.is_published }} - - Machine State - {{ preprint.machine_state }} - {% if preprint.is_published %} Date Published @@ -104,6 +100,7 @@

Preprint: {{ preprint.title }} {% endif %} {% include "preprints/provider.html" with preprint=preprint %} + {% include "preprints/machine_state.html" with preprint=preprint %} Subjects diff --git a/admin/templates/preprints/provider.html b/admin/templates/preprints/provider.html index 4d14d1faf03..4a640b997c7 100644 --- a/admin/templates/preprints/provider.html +++ b/admin/templates/preprints/provider.html @@ -9,9 +9,9 @@
-
+ {% csrf_token %} - {{ form.as_p }} + {{ change_provider_form.as_p }}
diff --git a/admin_tests/notifications/__init__.py b/admin_tests/notifications/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/admin_tests/notifications/test_views.py b/admin_tests/notifications/test_views.py new file mode 100644 index 00000000000..08ad695edd1 --- /dev/null +++ b/admin_tests/notifications/test_views.py @@ -0,0 +1,39 @@ +import pytest +from django.test import RequestFactory +from osf.models import OSFUser, NotificationSubscription, Node +from admin.notifications.views import ( + delete_selected_notifications, + detect_duplicate_notifications, +) +from tests.base import AdminTestCase + +pytestmark = pytest.mark.django_db + +class TestNotificationFunctions(AdminTestCase): + + def setUp(self): + super().setUp() + self.user = OSFUser.objects.create(username='admin', is_staff=True) + self.node = Node.objects.create(creator=self.user, title='Test Node') + self.request_factory = RequestFactory() + + def test_delete_selected_notifications(self): + notification1 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') + notification2 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') + notification3 = NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event3') + + delete_selected_notifications([notification1.id, notification2.id]) + + assert not NotificationSubscription.objects.filter(id__in=[notification1.id, notification2.id]).exists() + assert NotificationSubscription.objects.filter(id=notification3.id).exists() + + def test_detect_duplicate_notifications(self): + NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event1') + NotificationSubscription.objects.create(user=self.user, node=self.node, event_name='event2') + + duplicates = detect_duplicate_notifications() + + print(f"Detected duplicates: {duplicates}") + + assert len(duplicates) == 3, f"Expected 3 duplicates, but found {len(duplicates)}" diff --git a/admin_tests/preprints/test_views.py b/admin_tests/preprints/test_views.py index 2c9d46c48a0..3d06b2c2f86 100644 --- a/admin_tests/preprints/test_views.py +++ b/admin_tests/preprints/test_views.py @@ -57,7 +57,7 @@ class TestPreprintView: @pytest.fixture() def plain_view(self): - return views.PreprintView + return views.PreprintProviderChangeView @pytest.fixture() def view(self, req, plain_view): @@ -589,3 +589,63 @@ def test_approve_reject_on_list_view(self, withdrawal_request, admin, action, fi assert original_comment == withdrawal_request.target.withdrawal_justification else: assert not withdrawal_request.target.withdrawal_justification + + +@pytest.mark.urls('admin.base.urls') +@pytest.mark.django_db +class TestPreprintMachineStateView: + + @pytest.fixture() + def preprint(self): + return PreprintFactory() + + @pytest.fixture() + def user(self): + return AuthUserFactory() + + @pytest.fixture() + def admin_user(self): + admin_user = AuthUserFactory() + admin_user.is_admin = True + admin_user.save() + return admin_user + + @pytest.fixture() + def req(self, user): + req = RequestFactory().post('/fake_path') + req.user = user + return req + + @pytest.fixture() + def admin_req(self, admin_user): + req = RequestFactory().post('/fake_path') + req.user = admin_user + return req + + def test_post_changes_machine_state(self, admin_req, preprint): + new_state = 'new_state' + admin_req.POST = {'machine_state': new_state} + + view = setup_view(views.PreprintMachineStateView(), admin_req, guid=preprint._id) + response = view.post(admin_req) + + preprint.refresh_from_db() + assert preprint.machine_state == new_state + assert response.status_code == 302 + + def test_post_no_change_in_machine_state(self, admin_req, preprint): + current_state = preprint.machine_state + admin_req.POST = {'machine_state': current_state} + + view = setup_view(views.PreprintMachineStateView(), admin_req, guid=preprint._id) + response = view.post(admin_req) + + preprint.refresh_from_db() + assert preprint.machine_state == current_state + assert response.status_code == 302 + + def test_no_permission_raises_error(self, req, preprint): + request = RequestFactory().post(reverse('preprints:preprint-machine-state', kwargs={'guid': preprint._id})) + request.user = req.user + with pytest.raises(PermissionDenied): + views.PreprintMachineStateView.as_view()(request, guid=preprint._id) diff --git a/api/base/settings/defaults.py b/api/base/settings/defaults.py index d74e744f787..136f7f48b6b 100644 --- a/api/base/settings/defaults.py +++ b/api/base/settings/defaults.py @@ -317,7 +317,7 @@ # django-elasticsearch-metrics ELASTICSEARCH_DSL = { 'default': { - 'hosts': os.environ.get('ELASTIC6_URI', '127.0.0.1:9201'), + 'hosts': osf_settings.ELASTIC6_URI, 'retry_on_timeout': True, }, } @@ -360,7 +360,7 @@ MAX_SIZE_OF_ES_QUERY = 10000 DEFAULT_ES_NULL_VALUE = 'N/A' -TRAVIS_ENV = False +CI_ENV = False CITATION_STYLES_REPO_URL = 'https://github.com/CenterForOpenScience/styles/archive/88e6ed31a91e9f5a480b486029cda97b535935d4.zip' DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' diff --git a/api/base/settings/local-travis.py b/api/base/settings/local-ci.py similarity index 97% rename from api/base/settings/local-travis.py rename to api/base/settings/local-ci.py index 8b98f743169..159ff9a777f 100644 --- a/api/base/settings/local-travis.py +++ b/api/base/settings/local-ci.py @@ -25,4 +25,4 @@ ALLOWED_HOSTS.append('localhost') -TRAVIS_ENV = True +CI_ENV = True diff --git a/api/draft_registrations/permissions.py b/api/draft_registrations/permissions.py index 83bf44a612e..5232ee9d546 100644 --- a/api/draft_registrations/permissions.py +++ b/api/draft_registrations/permissions.py @@ -8,6 +8,8 @@ OSFUser, ) from api.nodes.permissions import ContributorDetailPermissions +from osf.utils.permissions import WRITE, ADMIN + class IsContributorOrAdminContributor(permissions.BasePermission): """ @@ -57,3 +59,34 @@ class DraftContributorDetailPermissions(ContributorDetailPermissions): def load_resource(self, context, view): return DraftRegistration.load(context['draft_id']) + + +class DraftRegistrationPermission(permissions.BasePermission): + """ + Check permissions for draft and node, Admin can create (POST) or edit (PATCH, PUT) to a DraftRegistration, but write + users can only edit them. Node permissions are inherited by the DraftRegistration when they are higher. + """ + acceptable_models = (DraftRegistration, AbstractNode) + + def has_object_permission(self, request, view, obj): + auth = get_user_auth(request) + + if not auth.user: + return False + + if request.method in permissions.SAFE_METHODS: + if isinstance(obj, DraftRegistration): + return obj.can_view(auth) + elif isinstance(obj, AbstractNode): + return obj.can_view(auth) + elif request.method == 'POST': # Only Admin can create a draft registration + if isinstance(obj, DraftRegistration): + return obj.is_contributor(auth.user) and obj.has_permission(auth.user, ADMIN) + elif isinstance(obj, AbstractNode): + return obj.has_permission(auth.user, ADMIN) + else: + if isinstance(obj, DraftRegistration): + return obj.is_contributor(auth.user) and obj.has_permission(auth.user, WRITE) + elif isinstance(obj, AbstractNode): + return obj.has_permission(auth.user, WRITE) + return False diff --git a/api/draft_registrations/views.py b/api/draft_registrations/views.py index 16443195492..30c583dd94a 100644 --- a/api/draft_registrations/views.py +++ b/api/draft_registrations/views.py @@ -6,7 +6,7 @@ from api.base.pagination import DraftRegistrationContributorPagination from api.draft_registrations.permissions import ( DraftContributorDetailPermissions, - IsContributorOrAdminContributor, + DraftRegistrationPermission, IsAdminContributor, ) from api.draft_registrations.serializers import ( @@ -50,9 +50,9 @@ def check_resource_permissions(self, resource): class DraftRegistrationList(NodeDraftRegistrationsList): permission_classes = ( - IsContributorOrAdminContributor, drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, + DraftRegistrationPermission, ) view_category = 'draft_registrations' @@ -70,10 +70,9 @@ def get_queryset(self): # Returns DraftRegistrations for which a user is a contributor return user.draft_registrations_active - class DraftRegistrationDetail(NodeDraftRegistrationDetail, DraftRegistrationMixin): permission_classes = ( - ContributorOrPublic, + DraftRegistrationPermission, AdminDeletePermissions, drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, diff --git a/api/files/serializers.py b/api/files/serializers.py index 20287c90454..e68845c4cd1 100644 --- a/api/files/serializers.py +++ b/api/files/serializers.py @@ -445,7 +445,6 @@ def to_representation(self, value): guid = Guid.load(view.kwargs['file_id']) if guid: data['data']['id'] = guid._id - return data diff --git a/api/files/views.py b/api/files/views.py index 4a4861f31ec..5a498fa7089 100644 --- a/api/files/views.py +++ b/api/files/views.py @@ -57,6 +57,9 @@ def get_file(self, check_permissions=True): if obj.target.creator.is_disabled: raise Gone(detail='This user has been deactivated and their quickfiles are no longer available.') + if getattr(obj.target, 'is_retracted', False): + raise Gone(detail='The requested file is no longer available.') + if check_permissions: # May raise a permission denied self.check_object_permissions(self.request, obj) diff --git a/api/institutions/authentication.py b/api/institutions/authentication.py index f475db8860c..30e9d7a7502 100644 --- a/api/institutions/authentication.py +++ b/api/institutions/authentication.py @@ -25,6 +25,7 @@ from website.mails import send_mail, WELCOME_OSF4I, DUPLICATE_ACCOUNTS_OSF4I, ADD_SSO_EMAIL_OSF4I from website.settings import OSF_SUPPORT_EMAIL, DOMAIN +from website.util.metrics import institution_source_tag logger = logging.getLogger(__name__) @@ -388,6 +389,8 @@ def authenticate(self, request): sso_mail=sso_email, sso_department=department, ) + if is_created: + user.add_system_tag(institution_source_tag(secondary_institution._id)) # Storage region is only updated if the user is created via institutional SSO; the region will be set to the # institution's preferred one if the user's current region is not in the institution's default region list. diff --git a/api/nodes/views.py b/api/nodes/views.py index f43dbf17ad2..c19b7a2762a 100644 --- a/api/nodes/views.py +++ b/api/nodes/views.py @@ -5,7 +5,7 @@ from django.db.models import F, Max, Q, Subquery from django.utils import timezone from django.contrib.contenttypes.models import ContentType -from rest_framework import generics, permissions as drf_permissions +from rest_framework import generics, permissions as drf_permissions, exceptions from rest_framework.exceptions import PermissionDenied, ValidationError, NotFound, MethodNotAllowed, NotAuthenticated from rest_framework.response import Response from rest_framework.status import HTTP_202_ACCEPTED, HTTP_204_NO_CONTENT @@ -66,6 +66,7 @@ NodeCommentSerializer, ) from api.draft_registrations.serializers import DraftRegistrationSerializer, DraftRegistrationDetailSerializer +from api.draft_registrations.permissions import DraftRegistrationPermission from api.files.serializers import FileSerializer, OsfStorageFileSerializer from api.files import annotations as file_annotations from api.identifiers.serializers import NodeIdentifierSerializer @@ -75,7 +76,6 @@ from api.nodes.filters import NodesFilterMixin from api.nodes.permissions import ( IsAdmin, - IsAdminContributor, IsPublic, AdminOrPublic, WriteAdmin, @@ -626,7 +626,7 @@ class NodeDraftRegistrationsList(JSONAPIBaseView, generics.ListCreateAPIView, No Use DraftRegistrationsList endpoint instead. """ permission_classes = ( - IsAdminContributor, + DraftRegistrationPermission, drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) @@ -649,8 +649,11 @@ def get_serializer_class(self): # overrides ListCreateAPIView def get_queryset(self): + user = self.request.user node = self.get_node() - return node.draft_registrations_active + if user.is_anonymous: + raise exceptions.NotAuthenticated() + return user.draft_registrations_active.filter(branched_from=node) class NodeDraftRegistrationDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, DraftMixin): @@ -660,9 +663,9 @@ class NodeDraftRegistrationDetail(JSONAPIBaseView, generics.RetrieveUpdateDestro Use DraftRegistrationDetail endpoint instead. """ permission_classes = ( + DraftRegistrationPermission, drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, - IsAdminContributor, ) parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON) diff --git a/api/preprints/serializers.py b/api/preprints/serializers.py index 3936292b6cb..2646b5ea413 100644 --- a/api/preprints/serializers.py +++ b/api/preprints/serializers.py @@ -447,6 +447,21 @@ def set_field(self, func, val, auth, save=False): raise exceptions.ValidationError(detail=str(e)) +class PreprintDraftSerializer(PreprintSerializer): + + def get_absolute_url(self, obj): + return absolute_reverse( + 'users:user-draft-preprints', + kwargs={ + 'preprint_id': obj._id, + 'version': self.context['request'].parser_context['kwargs']['version'], + }, + ) + + class Meta: + type_ = 'draft-preprints' + + class PreprintCreateSerializer(PreprintSerializer): # Overrides PreprintSerializer to make id nullable, adds `create` id = IDField(source='_id', required=False, allow_null=True) diff --git a/api/providers/serializers.py b/api/providers/serializers.py index 91a13bb980d..ef89388e281 100644 --- a/api/providers/serializers.py +++ b/api/providers/serializers.py @@ -330,10 +330,11 @@ def create(self, validated_data): raise ValidationError('"full_name" is required when adding a moderator via email.') user = OSFUser.create_unregistered(full_name, email=address) user.add_unclaimed_record( - provider, referrer=auth.user, - given_name=full_name, email=address, + provider, + referrer=auth.user, + given_name=full_name, + email=address, ) - user.save() claim_url = user.get_claim_url(provider._id, external=True) context['claim_url'] = claim_url else: diff --git a/api/users/serializers.py b/api/users/serializers.py index fd92914df05..5e8ca59d9cf 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -19,11 +19,10 @@ JSONAPIListField, ShowIfCurrentUser, ) -from api.base.utils import absolute_reverse, get_user_auth, is_deprecated, hashids -from api.base.utils import default_node_list_queryset +from api.base.utils import absolute_reverse, default_node_list_queryset, get_user_auth, is_deprecated, hashids from api.base.versioning import get_kebab_snake_case_field from api.nodes.serializers import NodeSerializer, RegionRelationshipField -from framework.auth.views import send_confirm_email +from framework.auth.views import send_confirm_email_async from osf.exceptions import ValidationValueError, ValidationError, BlockedEmailError from osf.models import Email, Node, OSFUser, Preprint, Registration from osf.models.provider import AbstractProviderGroupObjectPermission @@ -140,6 +139,14 @@ class UserSerializer(JSONAPISerializer): ), ) + draft_preprints = HideIfDisabled( + RelationshipField( + related_view='users:user-draft-preprints', + related_view_kwargs={'user_id': '<_id>'}, + related_meta={'count': 'get_draft_preprint_count'}, + ), + ) + emails = ShowIfCurrentUser( RelationshipField( related_view='users:user-emails', @@ -202,6 +209,11 @@ def get_preprint_count(self, obj): user_preprints_query = Preprint.objects.filter(_contributors__guids___id=obj._id).exclude(machine_state='initial') return Preprint.objects.can_view(user_preprints_query, auth_user, allow_contribs=False).count() + def get_draft_preprint_count(self, obj): + auth_user = get_user_auth(self.context['request']).user + user_preprints_query = Preprint.objects.filter(_contributors__guids___id=obj._id).filter(machine_state='initial') + return Preprint.objects.can_view(user_preprints_query, auth_user, allow_contribs=False).count() + def get_institutions_count(self, obj): if isinstance(obj, OSFUser): return obj.get_affiliated_institutions().count() @@ -610,7 +622,7 @@ def create(self, validated_data): token = user.add_unconfirmed_email(address) user.save() if CONFIRM_REGISTRATIONS_BY_EMAIL: - send_confirm_email(user, email=address) + send_confirm_email_async(user, email=address) user.email_last_sent = timezone.now() user.save() except ValidationError as e: diff --git a/api/users/urls.py b/api/users/urls.py index 8e273368e44..cf9bd0bb7b9 100644 --- a/api/users/urls.py +++ b/api/users/urls.py @@ -16,6 +16,7 @@ re_path(r'^(?P\w+)/nodes/$', views.UserNodes.as_view(), name=views.UserNodes.view_name), re_path(r'^(?P\w+)/groups/$', views.UserGroups.as_view(), name=views.UserGroups.view_name), re_path(r'^(?P\w+)/preprints/$', views.UserPreprints.as_view(), name=views.UserPreprints.view_name), + re_path(r'^(?P\w+)/draft_preprints/$', views.UserDraftPreprints.as_view(), name=views.UserDraftPreprints.view_name), re_path(r'^(?P\w+)/registrations/$', views.UserRegistrations.as_view(), name=views.UserRegistrations.view_name), re_path(r'^(?P\w+)/settings/$', views.UserSettings.as_view(), name=views.UserSettings.view_name), re_path(r'^(?P\w+)/quickfiles/$', views.UserQuickFiles.as_view(), name=views.UserQuickFiles.view_name), diff --git a/api/users/views.py b/api/users/views.py index 325619d517d..927b5dc2f9b 100644 --- a/api/users/views.py +++ b/api/users/views.py @@ -31,7 +31,7 @@ from api.nodes.serializers import DraftRegistrationLegacySerializer from api.nodes.utils import NodeOptimizationMixin from api.osf_groups.serializers import GroupSerializer -from api.preprints.serializers import PreprintSerializer +from api.preprints.serializers import PreprintSerializer, PreprintDraftSerializer from api.registrations import annotations as registration_annotations from api.registrations.serializers import RegistrationSerializer from api.resources import annotations as resource_annotations @@ -60,7 +60,7 @@ from django.http import JsonResponse from django.utils import timezone from framework.auth.core import get_user -from framework.auth.views import send_confirm_email +from framework.auth.views import send_confirm_email_async from framework.auth.oauth_scopes import CoreScopes, normalize_scopes from framework.auth.exceptions import ChangePasswordError from framework.utils import throttle_period_expired @@ -413,6 +413,36 @@ def get_queryset(self): return self.get_queryset_from_request() +class UserDraftPreprints(JSONAPIBaseView, generics.ListAPIView, UserMixin, PreprintFilterMixin): + """The documentation for this endpoint can be found [here](https://developer.osf.io/). + """ + + permission_classes = ( + drf_permissions.IsAuthenticatedOrReadOnly, + base_permissions.TokenHasScope, + CurrentUser, + ) + + ordering = ('-created') + + required_read_scopes = [CoreScopes.USERS_READ, CoreScopes.NODE_PREPRINTS_READ] + required_write_scopes = [CoreScopes.USERS_WRITE, CoreScopes.NODE_PREPRINTS_WRITE] + + serializer_class = PreprintDraftSerializer + view_category = 'users' + view_name = 'user-draft-preprints' + + def get_default_queryset(self): + user = self.get_user() + return user.preprints.filter( + machine_state='initial', + deleted__isnull=True, + ) + + def get_queryset(self): + return self.get_queryset_from_request() + + class UserInstitutions(JSONAPIBaseView, generics.ListAPIView, UserMixin): """The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/users_institutions_list). """ @@ -900,7 +930,7 @@ def get_object(self): if self.request.method == 'GET' and is_truthy(self.request.query_params.get('resend_confirmation')): if not confirmed and settings.CONFIRM_REGISTRATIONS_BY_EMAIL: if throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): - send_confirm_email(user, email=address, renew=True) + send_confirm_email_async(user, email=address, renew=True) user.email_last_sent = timezone.now() user.save() diff --git a/api_tests/draft_registrations/views/test_draft_registration_detail.py b/api_tests/draft_registrations/views/test_draft_registration_detail.py index 18b00014f94..2106f87fb5a 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_detail.py +++ b/api_tests/draft_registrations/views/test_draft_registration_detail.py @@ -2,10 +2,10 @@ from api.base.settings.defaults import API_BASE from api_tests.nodes.views.test_node_draft_registration_detail import ( - TestDraftRegistrationDetail, TestDraftRegistrationUpdate, TestDraftRegistrationPatch, TestDraftRegistrationDelete, + AbstractDraftRegistrationTestCase ) from osf.models import DraftNode, Node, NodeLicense, RegistrationSchema from osf.utils.permissions import ADMIN, READ, WRITE @@ -16,58 +16,34 @@ SubjectFactory, ProjectFactory, ) +from website.settings import API_DOMAIN @pytest.mark.django_db -class TestDraftRegistrationDetailEndpoint(TestDraftRegistrationDetail): +class TestDraftRegistrationDetailEndpoint(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self, project_public, draft_registration): - return '/{}draft_registrations/{}/'.format( - API_BASE, draft_registration._id) - - # Overrides TestDraftRegistrationDetail - def test_admin_group_member_can_view(self, app, user, draft_registration, project_public, - schema, url_draft_registrations, group_mem): - - res = app.get(url_draft_registrations, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 + return f'/{API_BASE}draft_registrations/{draft_registration._id}/' - def test_can_view_draft( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, - url_draft_registrations, group, group_mem): - - # test_read_only_contributor_can_view_draft - res = app.get( - url_draft_registrations, - auth=user_read_contrib.auth, - expect_errors=False) + def test_read_only_contributor_can_view_draft(self, app, user_read_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_read_contrib.auth) assert res.status_code == 200 - # test_read_write_contributor_can_view_draft - res = app.get( - url_draft_registrations, - auth=user_write_contrib.auth, - expect_errors=False) + def test_read_write_contributor_can_view_draft(self, app, user_write_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_write_contrib.auth) assert res.status_code == 200 - def test_cannot_view_draft( - self, app, project_public, - user_non_contrib, url_draft_registrations): - - # test_logged_in_non_contributor_cannot_view_draft - res = app.get( - url_draft_registrations, - auth=user_non_contrib.auth, - expect_errors=True) + def test_logged_in_non_contributor_cannot_view_draft(self, app, user_non_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_non_contrib.auth, expect_errors=True) assert res.status_code == 403 - # test_unauthenticated_user_cannot_view_draft + def test_unauthenticated_user_cannot_view_draft(self, app, url_draft_registrations): res = app.get(url_draft_registrations, expect_errors=True) assert res.status_code == 401 - def test_detail_view_returns_editable_fields(self, app, user, draft_registration, - url_draft_registrations, project_public): + def test_detail_view_returns_editable_fields( + self, app, user, draft_registration, url_draft_registrations, project_public + ): res = app.get(url_draft_registrations, auth=user.auth, expect_errors=True) attributes = res.json['data']['attributes'] @@ -77,7 +53,7 @@ def test_detail_view_returns_editable_fields(self, app, user, draft_registration assert attributes['category'] == project_public.category assert attributes['has_project'] - res.json['data']['links']['self'] == url_draft_registrations + assert res.json['data']['links']['self'] == f'{API_DOMAIN}{url_draft_registrations.lstrip("/")}' relationships = res.json['data']['relationships'] assert Node.load(relationships['branched_from']['data']['id']) == draft_registration.branched_from @@ -89,8 +65,7 @@ def test_detail_view_returns_editable_fields(self, app, user, draft_registration def test_detail_view_returns_editable_fields_no_specified_node(self, app, user): draft_registration = DraftRegistrationFactory(initiator=user, branched_from=None) - url = '/{}draft_registrations/{}/'.format( - API_BASE, draft_registration._id) + url = f'{API_DOMAIN}{API_BASE}draft_registrations/{draft_registration._id}/' res = app.get(url, auth=user.auth, expect_errors=True) attributes = res.json['data']['attributes'] @@ -101,7 +76,7 @@ def test_detail_view_returns_editable_fields_no_specified_node(self, app, user): assert attributes['node_license'] is None assert not attributes['has_project'] - res.json['data']['links']['self'] == url + assert res.json['data']['links']['self'] == url relationships = res.json['data']['relationships'] assert 'affiliated_institutions' in relationships @@ -112,16 +87,13 @@ def test_detail_view_returns_editable_fields_no_specified_node(self, app, user): res = app.get(draft_node_link, auth=user.auth) assert DraftNode.load(res.json['data']['id']) == draft_registration.branched_from - def test_draft_registration_perms_checked_on_draft_not_node(self, app, user, project_public, - draft_registration, url_draft_registrations): - - # Admin on node and draft + def test_admin_node_and_draft(self, app, user, project_public, draft_registration, url_draft_registrations): assert project_public.has_permission(user, ADMIN) is True assert draft_registration.has_permission(user, ADMIN) is True res = app.get(url_draft_registrations, auth=user.auth) assert res.status_code == 200 - # Admin on node but not draft + def test_admin_node_not_draft(self, app, user, project_public, draft_registration, url_draft_registrations): node_admin = AuthUserFactory() project_public.add_contributor(node_admin, ADMIN) assert project_public.has_permission(node_admin, ADMIN) is True @@ -129,7 +101,7 @@ def test_draft_registration_perms_checked_on_draft_not_node(self, app, user, pro res = app.get(url_draft_registrations, auth=node_admin.auth, expect_errors=True) assert res.status_code == 403 - # Admin on draft but not node + def test_admin_draft_not_node(self, app, user, project_public, draft_registration, url_draft_registrations): draft_admin = AuthUserFactory() draft_registration.add_contributor(draft_admin, ADMIN) assert project_public.has_permission(draft_admin, ADMIN) is False @@ -137,19 +109,66 @@ def test_draft_registration_perms_checked_on_draft_not_node(self, app, user, pro res = app.get(url_draft_registrations, auth=draft_admin.auth) assert res.status_code == 200 - # Overwrites TestDraftRegistrationDetail - def test_can_view_after_added( - self, app, schema, draft_registration, url_draft_registrations): - # Draft Registration permissions are no longer based on the branched from project + def test_write_node_and_draft(self, app, user, project_public, draft_registration, url_draft_registrations): + assert project_public.has_permission(user, WRITE) is True + assert draft_registration.has_permission(user, WRITE) is True + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 + + def test_write_node_not_draft(self, app, user, project_public, draft_registration, url_draft_registrations): + node_admin = AuthUserFactory() + project_public.add_contributor(node_admin, WRITE) + assert project_public.has_permission(node_admin, WRITE) is True + assert draft_registration.has_permission(node_admin, WRITE) is False + res = app.get(url_draft_registrations, auth=node_admin.auth, expect_errors=True) + assert res.status_code == 403 + + def test_write_draft_not_node(self, app, user, project_public, draft_registration, url_draft_registrations): + draft_admin = AuthUserFactory() + draft_registration.add_contributor(draft_admin, WRITE) + assert project_public.has_permission(draft_admin, WRITE) is False + assert draft_registration.has_permission(draft_admin, WRITE) is True + res = app.get(url_draft_registrations, auth=draft_admin.auth) + assert res.status_code == 200 + + def test_read_node_and_draft(self, app, user, project_public, draft_registration, url_draft_registrations): + assert project_public.has_permission(user, READ) is True + assert draft_registration.has_permission(user, READ) is True + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 + def test_read_node_not_draft(self, app, user, project_public, draft_registration, url_draft_registrations): + node_admin = AuthUserFactory() + project_public.add_contributor(node_admin, READ) + assert project_public.has_permission(node_admin, READ) is True + assert draft_registration.has_permission(node_admin, READ) is False + res = app.get(url_draft_registrations, auth=node_admin.auth, expect_errors=True) + assert res.status_code == 403 + + def test_read_draft_not_node(self, app, user, project_public, draft_registration, url_draft_registrations): + draft_admin = AuthUserFactory() + draft_registration.add_contributor(draft_admin, READ) + assert project_public.has_permission(draft_admin, READ) is False + assert draft_registration.has_permission(draft_admin, READ) is True + res = app.get(url_draft_registrations, auth=draft_admin.auth) + assert res.status_code == 200 + + def test_can_view_after_added(self, app, schema, draft_registration, url_draft_registrations): + """ + Ensure Draft Registration permissions are no longer based on the branched from project + """ user = AuthUserFactory() project = draft_registration.branched_from project.add_contributor(user, ADMIN) res = app.get(url_draft_registrations, auth=user.auth, expect_errors=True) assert res.status_code == 403 + draft_registration.add_contributor(user, ADMIN) + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 - def test_current_permissions_field(self, app, user_read_contrib, - user_write_contrib, user, draft_registration, url_draft_registrations): + def test_current_permissions_field( + self, app, user_read_contrib, user_write_contrib, user, draft_registration, url_draft_registrations + ): res = app.get(url_draft_registrations, auth=user_read_contrib.auth, expect_errors=False) assert res.json['data']['attributes']['current_user_permissions'] == [READ] @@ -548,9 +567,8 @@ def test_write_contributor_can_update_draft( assert data['attributes']['registration_metadata'] == payload['data']['attributes']['registration_metadata'] -class TestDraftRegistrationDelete(TestDraftRegistrationDelete): +class TestDraftRegistrationDeleteDetail(TestDraftRegistrationDelete): @pytest.fixture() def url_draft_registrations(self, project_public, draft_registration): # Overrides TestDraftRegistrationDelete - return '/{}draft_registrations/{}/'.format( - API_BASE, draft_registration._id) + return f'/{API_BASE}draft_registrations/{draft_registration._id}/' diff --git a/api_tests/draft_registrations/views/test_draft_registration_list.py b/api_tests/draft_registrations/views/test_draft_registration_list.py index ba49520a174..1126af09ad3 100644 --- a/api_tests/draft_registrations/views/test_draft_registration_list.py +++ b/api_tests/draft_registrations/views/test_draft_registration_list.py @@ -2,20 +2,19 @@ import pytest from framework.auth.core import Auth -from api_tests.nodes.views.test_node_draft_registration_list import ( - TestDraftRegistrationList, - TestDraftRegistrationCreate -) +from django.utils import timezone +from api_tests.nodes.views.test_node_draft_registration_list import AbstractDraftRegistrationTestCase from api.base.settings.defaults import API_BASE from osf.migrations import ensure_invisible_and_inactive_schema -from osf.models import DraftRegistration, NodeLicense, RegistrationProvider +from osf.models import DraftRegistration, NodeLicense, RegistrationProvider, RegistrationSchema from osf_tests.factories import ( RegistrationFactory, CollectionFactory, ProjectFactory, AuthUserFactory, - InstitutionFactory + InstitutionFactory, + DraftRegistrationFactory, ) from osf.utils.permissions import READ, WRITE, ADMIN @@ -28,52 +27,143 @@ def invisible_and_inactive_schema(): @pytest.mark.django_db -class TestDraftRegistrationListNewWorkflow(TestDraftRegistrationList): +class TestDraftRegistrationListTopLevelEndpoint: + @pytest.fixture() - def url_draft_registrations(self, project_public): - return f'/{API_BASE}draft_registrations/?' + def url_draft_registrations(self): + return f'/{API_BASE}draft_registrations/' - # Overrides TestDraftRegistrationList - def test_osf_group_with_admin_permissions_can_view(self): - # DraftRegistration endpoints permissions are not calculated from the node - return + @pytest.fixture() + def user(self): + return AuthUserFactory() - # Overrides TestDraftRegistrationList - def test_cannot_view_draft_list( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, draft_registration, - url_draft_registrations, group, group_mem): + @pytest.fixture() + def user_admin_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def user_write_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def user_read_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def user_non_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def group_mem(self): + return AuthUserFactory() + + @pytest.fixture() + def project(self, user): + return ProjectFactory(creator=user) + + @pytest.fixture() + def schema(self): + return RegistrationSchema.objects.get(name='Open-Ended Registration', schema_version=3) - # test_read_only_contributor_can_view_draft_list + @pytest.fixture() + def draft_registration(self, user, project, schema, user_write_contrib, user_read_contrib, user_admin_contrib): + draft = DraftRegistrationFactory( + initiator=user, + registration_schema=schema, + branched_from=project + ) + draft.add_contributor(user_read_contrib, permissions=READ) + draft.add_contributor(user_write_contrib, permissions=WRITE) + draft.add_contributor(user_admin_contrib, permissions=ADMIN) + return draft + + def test_read_only_contributor_can_view_draft_list( + self, app, user_read_contrib, draft_registration, url_draft_registrations + ): res = app.get( url_draft_registrations, - auth=user_read_contrib.auth) + auth=user_read_contrib.auth + ) assert res.status_code == 200 assert len(res.json['data']) == 1 - # test_read_write_contributor_can_view_draft_list - res = app.get( - url_draft_registrations, - auth=user_write_contrib.auth) + def test_read_write_contributor_can_view_draft_list( + self, app, user_write_contrib, draft_registration, url_draft_registrations + ): + res = app.get(url_draft_registrations, auth=user_write_contrib.auth) assert res.status_code == 200 assert len(res.json['data']) == 1 - # test_logged_in_non_contributor_can_view_draft_list - res = app.get( - url_draft_registrations, - auth=user_non_contrib.auth, - expect_errors=True) + def test_admin_can_view_draft_list( + self, app, user_admin_contrib, draft_registration, schema, url_draft_registrations + ): + res = app.get(url_draft_registrations, auth=user_admin_contrib.auth) + + assert res.status_code == 200 + data = res.json['data'] + assert len(data) == 1 + + assert schema._id in data[0]['relationships']['registration_schema']['links']['related']['href'] + assert data[0]['id'] == draft_registration._id + assert data[0]['attributes']['registration_metadata'] == {} + + def test_logged_in_non_contributor_has_empty_list( + self, app, user_non_contrib, url_draft_registrations + ): + res = app.get(url_draft_registrations, auth=user_non_contrib.auth) assert res.status_code == 200 assert len(res.json['data']) == 0 - # test_unauthenticated_user_cannot_view_draft_list + def test_unauthenticated_user_cannot_view_draft_list(self, app, url_draft_registrations): res = app.get(url_draft_registrations, expect_errors=True) assert res.status_code == 401 + def test_logged_in_non_contributor_cannot_view_draft_list(self, app, user_non_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_non_contrib.auth) + assert res.status_code == 200 + assert len(res.json['data']) == 0 + + def test_deleted_draft_registration_does_not_show_up_in_draft_list(self, app, user, draft_registration, url_draft_registrations): + draft_registration.deleted = timezone.now() + draft_registration.save() + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 + assert not res.json['data'] + + def test_draft_with_registered_node_does_not_show_up_in_draft_list( + self, app, user, project, draft_registration, url_draft_registrations + ): + registration = RegistrationFactory( + project=project, + draft_registration=draft_registration + ) + draft_registration.registered_node = registration + draft_registration.save() + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 + assert not res.json['data'] + + def test_draft_with_deleted_registered_node_shows_up_in_draft_list( + self, app, user, project, draft_registration, schema, url_draft_registrations + ): + registration = RegistrationFactory(project=project, draft_registration=draft_registration) + draft_registration.registered_node = registration + draft_registration.save() + registration.deleted = timezone.now() + registration.save() + draft_registration.deleted = None + draft_registration.save() + res = app.get(url_draft_registrations, auth=user.auth) + assert res.status_code == 200 + data = res.json['data'] + assert len(data) == 1 + assert schema._id in data[0]['relationships']['registration_schema']['links']['related']['href'] + assert data[0]['id'] == draft_registration._id + assert data[0]['attributes']['registration_metadata'] == {} + -class TestDraftRegistrationCreateWithNode(TestDraftRegistrationCreate): +class TestDraftRegistrationCreateWithNode(AbstractDraftRegistrationTestCase): - # Overrides `url_draft_registrations` in `TestDraftRegistrationCreate` @pytest.fixture() def url_draft_registrations(self, project_public): return f'/{API_BASE}draft_registrations/?' @@ -125,29 +215,35 @@ def payload_alt(self, payload, provider_alt): new_payload['data']['relationships']['provider']['data']['id'] = provider_alt._id return new_payload - # Overrides TestDraftRegistrationList - def test_cannot_create_draft_errors(self, app, user, payload_alt, project_public, url_draft_registrations): - # test_cannot_create_draft_from_a_registration + def test_cannot_create_draft_from_a_registration(self, app, user, payload_alt, project_public, url_draft_registrations): registration = RegistrationFactory( - project=project_public, creator=user) + project=project_public, + creator=user + ) payload_alt['data']['relationships']['branched_from']['data']['id'] = registration._id res = app.post_json_api( - url_draft_registrations, payload_alt, auth=user.auth, - expect_errors=True) + url_draft_registrations, + payload_alt, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 404 - # test_cannot_create_draft_from_deleted_node + def test_cannot_create_draft_from_deleted_node(self, app, user, payload_alt, project_public, url_draft_registrations): project = ProjectFactory(is_public=True, creator=user) project.is_deleted = True project.save() payload_alt['data']['relationships']['branched_from']['data']['id'] = project._id res = app.post_json_api( - url_draft_registrations, payload_alt, - auth=user.auth, expect_errors=True) + url_draft_registrations, + payload_alt, + auth=user.auth, + expect_errors=True + ) assert res.status_code == 410 assert res.json['errors'][0]['detail'] == 'The requested node is no longer available.' - # test_cannot_create_draft_from_collection + def test_cannot_create_draft_from_collection(self, app, user, payload_alt, project_public, url_draft_registrations): collection = CollectionFactory(creator=user) payload_alt['data']['relationships']['branched_from']['data']['id'] = collection._id res = app.post_json_api( @@ -155,8 +251,9 @@ def test_cannot_create_draft_errors(self, app, user, payload_alt, project_public expect_errors=True) assert res.status_code == 404 - def test_draft_registration_attributes_copied_from_node(self, app, project_public, - url_draft_registrations, user, payload_alt): + def test_draft_registration_attributes_copied_from_node( + self, app, project_public, url_draft_registrations, user, payload_alt + ): write_contrib = AuthUserFactory() read_contrib = AuthUserFactory() @@ -175,8 +272,9 @@ def test_draft_registration_attributes_copied_from_node(self, app, project_publi project_public.add_contributor(write_contrib, WRITE) project_public.add_contributor(read_contrib, READ) + # Only an admin can create a DraftRegistration res = app.post_json_api(url_draft_registrations, payload_alt, auth=write_contrib.auth, expect_errors=True) - assert res.status_code == 201 + assert res.status_code == 403 res = app.post_json_api(url_draft_registrations, payload_alt, auth=read_contrib.auth, expect_errors=True) assert res.status_code == 403 @@ -196,67 +294,55 @@ def test_draft_registration_attributes_copied_from_node(self, app, project_publi assert 'subjects' in relationships assert 'contributors' in relationships - def test_cannot_create_draft( - self, app, user_write_contrib, - user_read_contrib, user_non_contrib, - project_public, payload_alt, group, - url_draft_registrations, group_mem): - - # test_write_only_contributor_cannot_create_draft + def test_write_only_contributor_cannot_create_draft( + self, app, user_write_contrib, project_public, payload_alt, url_draft_registrations + ): assert user_write_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, payload_alt, auth=user_write_contrib.auth, - expect_errors=True) - assert res.status_code == 201 + expect_errors=True + ) + assert res.status_code == 403 - # test_read_only_contributor_cannot_create_draft + def test_read_only_contributor_cannot_create_draft( + self, app, user_write_contrib, user_read_contrib, project_public, payload_alt, url_draft_registrations + ): assert user_read_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, payload_alt, auth=user_read_contrib.auth, - expect_errors=True) + expect_errors=True + ) assert res.status_code == 403 - # test_non_authenticated_user_cannot_create_draft + def test_non_authenticated_user_cannot_create_draft( + self, app, user_write_contrib, payload_alt, group, url_draft_registrations + ): res = app.post_json_api( url_draft_registrations, - payload_alt, expect_errors=True) + payload_alt, + expect_errors=True + ) assert res.status_code == 401 - # test_logged_in_non_contributor_cannot_create_draft + def test_logged_in_non_contributor_cannot_create_draft( + self, app, user_non_contrib, payload_alt, url_draft_registrations + ): + res = app.post_json_api( url_draft_registrations, payload_alt, auth=user_non_contrib.auth, - expect_errors=True) + expect_errors=True + ) assert res.status_code == 403 - # test_group_admin_cannot_create_draft - res = app.post_json_api( - url_draft_registrations, - payload_alt, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 201 - - # test_group_write_contrib_cannot_create_draft - project_public.remove_osf_group(group) - project_public.add_osf_group(group, WRITE) - res = app.post_json_api( - url_draft_registrations, - payload_alt, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 201 - - def test_create_project_based_draft_does_not_email_initiator( - self, app, user, url_draft_registrations, payload): - post_url = url_draft_registrations + 'embed=branched_from&embed=initiator' + def test_create_project_based_draft_does_not_email_initiator(self, app, user, url_draft_registrations, payload): with mock.patch.object(mails, 'send_mail') as mock_send_mail: - app.post_json_api(post_url, payload, auth=user.auth) + app.post_json_api(f'{url_draft_registrations}?embed=branched_from&embed=initiator', payload, auth=user.auth) assert not mock_send_mail.called @@ -320,7 +406,7 @@ def test_affiliated_institutions_are_copied_from_user(self, app, user, url_draft assert list(draft_registration.affiliated_institutions.all()) == list(user.get_affiliated_institutions()) -class TestDraftRegistrationCreateWithoutNode(TestDraftRegistrationCreate): +class TestDraftRegistrationCreateWithoutNode(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self): return f'/{API_BASE}draft_registrations/?' @@ -346,13 +432,14 @@ def test_admin_can_create_draft( assert draft.creator == user assert draft.has_permission(user, ADMIN) is True - def test_create_no_project_draft_emails_initiator( - self, app, user, url_draft_registrations, payload): - post_url = url_draft_registrations + 'embed=branched_from&embed=initiator' - + def test_create_no_project_draft_emails_initiator(self, app, user, url_draft_registrations, payload): # Intercepting the send_mail call from website.project.views.contributor.notify_added_contributor with mock.patch.object(mails, 'send_mail') as mock_send_mail: - resp = app.post_json_api(post_url, payload, auth=user.auth) + resp = app.post_json_api( + f'{url_draft_registrations}?embed=branched_from&embed=initiator', + payload, + auth=user.auth + ) assert mock_send_mail.called # Python 3.6 does not support mock.call_args.args/kwargs @@ -363,7 +450,9 @@ def test_create_no_project_draft_emails_initiator( assert mock_send_kwargs['user'] == user assert mock_send_kwargs['node'] == DraftRegistration.load(resp.json['data']['id']) - def test_create_draft_with_provider(self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider): + def test_create_draft_with_provider( + self, app, user, url_draft_registrations, non_default_provider, payload_with_non_default_provider + ): res = app.post_json_api(url_draft_registrations, payload_with_non_default_provider, auth=user.auth) assert res.status_code == 201 data = res.json['data'] @@ -373,14 +462,9 @@ def test_create_draft_with_provider(self, app, user, url_draft_registrations, no draft = DraftRegistration.load(data['id']) assert draft.provider == non_default_provider - # Overrides TestDraftRegistrationList - def test_cannot_create_draft( - self, app, user_write_contrib, - user_read_contrib, user_non_contrib, - project_public, payload, group, - url_draft_registrations, group_mem): - - # test_write_contrib (no node supplied, so any logged in user can create) + def test_write_contrib(self, app, user, project_public, payload, url_draft_registrations, user_write_contrib): + """(no node supplied, so any logged in user can create) + """ assert user_write_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, @@ -388,7 +472,9 @@ def test_cannot_create_draft( auth=user_write_contrib.auth) assert res.status_code == 201 - # test_read_only (no node supplied, so any logged in user can create) + def test_read_only(self, app, user, url_draft_registrations, user_read_contrib, project_public, payload): + '''(no node supplied, so any logged in user can create) + ''' assert user_read_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, @@ -396,24 +482,24 @@ def test_cannot_create_draft( auth=user_read_contrib.auth) assert res.status_code == 201 - # test_non_authenticated_user_cannot_create_draft + def test_non_authenticated_user_cannot_create_draft(self, app, user, url_draft_registrations, payload): res = app.post_json_api( url_draft_registrations, - payload, expect_errors=True) + payload, + expect_errors=True + ) assert res.status_code == 401 - # test_logged_in_non_contributor (no node supplied, so any logged in user can create) + def test_logged_in_non_contributor(self, app, user, url_draft_registrations, user_non_contrib, payload): + '''(no node supplied, so any logged in user can create) + ''' res = app.post_json_api( url_draft_registrations, payload, - auth=user_non_contrib.auth) + auth=user_non_contrib.auth + ) assert res.status_code == 201 - # Overrides TestDraftRegistrationList - def test_cannot_create_draft_errors(self): - # The original test assumes a node is being passed in - return - def test_draft_registration_attributes_not_copied_from_node(self, app, project_public, url_draft_registrations, user, payload): diff --git a/api_tests/files/views/test_file_detail.py b/api_tests/files/views/test_file_detail.py index 75224022929..a80b9319dae 100644 --- a/api_tests/files/views/test_file_detail.py +++ b/api_tests/files/views/test_file_detail.py @@ -31,6 +31,9 @@ SessionStore = import_module(django_conf_settings.SESSION_ENGINE).SessionStore +from addons.base.views import get_authenticated_resource +from framework.exceptions import HTTPError + # stolen from^W^Winspired by DRF # rest_framework.fields.DateTimeField.to_representation def _dt_to_iso8601(value): @@ -639,6 +642,10 @@ def file(self, root_node, user): }).save() return file + @pytest.fixture() + def file_url(self, file): + return f'/{API_BASE}files/{file._id}/' + def test_listing(self, app, user, file): file.create_version(user, { 'object': '0683m38e', @@ -705,6 +712,67 @@ def test_load_and_property(self, app, user, file): expect_errors=True, auth=user.auth, ).status_code == 405 + def test_retracted_registration_file(self, app, user, file_url, file): + resource = RegistrationFactory(is_public=True) + retraction = resource.retract_registration( + user=resource.creator, + justification='Justification for retraction', + save=True, + moderator_initiated=False + ) + + retraction.accept() + resource.save() + resource.refresh_from_db() + + file.target = resource + file.save() + + res = app.get(file_url, auth=user.auth, expect_errors=True) + assert res.status_code == 410 + + def test_retracted_file_returns_410(self, app, user, file_url, file): + resource = RegistrationFactory(is_public=True) + retraction = resource.retract_registration( + user=resource.creator, + justification='Justification for retraction', + save=True, + moderator_initiated=False + ) + + retraction.accept() + resource.save() + resource.refresh_from_db() + + file.target = resource + file.save() + + res = app.get(file_url, auth=user.auth, expect_errors=True) + assert res.status_code == 410 + + def test_get_authenticated_resource_retracted(self): + resource = RegistrationFactory(is_public=True) + + assert resource.is_retracted is False + + retraction = resource.retract_registration( + user=resource.creator, + justification='Justification for retraction', + save=True, + moderator_initiated=False + ) + + retraction.accept() + resource.save() + resource.refresh_from_db() + + assert resource.is_retracted is True + + with pytest.raises(HTTPError) as excinfo: + get_authenticated_resource(resource._id) + + assert excinfo.value.code == 410 + @pytest.mark.django_db class TestFileTagging: @@ -916,20 +984,20 @@ def test_withdrawn_preprint_files(self, app, file_url, preprint, user, other_use # Unauthenticated res = app.get(file_url, expect_errors=True) - assert res.status_code == 401 + assert res.status_code == 410 # Noncontrib res = app.get(file_url, auth=other_user.auth, expect_errors=True) - assert res.status_code == 403 + assert res.status_code == 410 # Write contributor preprint.add_contributor(other_user, WRITE, save=True) res = app.get(file_url, auth=other_user.auth, expect_errors=True) - assert res.status_code == 403 + assert res.status_code == 410 # Admin contrib res = app.get(file_url, auth=user.auth, expect_errors=True) - assert res.status_code == 403 + assert res.status_code == 410 @pytest.mark.django_db class TestShowAsUnviewed: diff --git a/api_tests/institutions/views/test_institution_auth.py b/api_tests/institutions/views/test_institution_auth.py index 40742424b91..670a6ee31b4 100644 --- a/api_tests/institutions/views/test_institution_auth.py +++ b/api_tests/institutions/views/test_institution_auth.py @@ -12,7 +12,7 @@ from framework.auth import signals, Auth from framework.auth.core import get_user -from framework.auth.views import send_confirm_email +from framework.auth.views import send_confirm_email_async from osf.models import OSFUser, InstitutionAffiliation, InstitutionStorageRegion from osf.models.institution import SsoFilterCriteriaAction @@ -204,6 +204,7 @@ def test_new_user_created(self, app, url_auth_institution, institution): assert user.fullname == 'Fake User' assert user.accepted_terms_of_service is None assert institution in user.get_affiliated_institutions() + assert f'source:institution|{institution._id}' in user.system_tags def test_existing_user_found_but_not_affiliated(self, app, institution, url_auth_institution): @@ -219,6 +220,7 @@ def test_existing_user_found_but_not_affiliated(self, app, institution, url_auth user.reload() assert user.fullname == 'Foo Bar' assert institution in user.get_affiliated_institutions() + assert f'source:institution|{institution._id}' not in user.system_tags def test_user_found_and_affiliated(self, app, institution, url_auth_institution): @@ -454,7 +456,7 @@ def test_user_external_unconfirmed(self, app, institution, url_auth_institution) assert user.external_identity # Send confirm email in order to add new email verifications - send_confirm_email( + send_confirm_email_async( user, user.username, external_id_provider=external_id_provider, @@ -811,6 +813,8 @@ def test_new_user_primary_only(self, app, url_auth_institution, assert user.accepted_terms_of_service is None assert institution_primary_type_1 in user.get_affiliated_institutions() assert institution_secondary_type_1 not in user.get_affiliated_institutions() + assert f'source:institution|{institution_primary_type_1._id}' in user.system_tags + assert f'source:institution|{institution_secondary_type_1._id}' not in user.system_tags def test_new_user_primary_and_secondary(self, app, url_auth_institution, institution_primary_type_1, institution_secondary_type_1): @@ -830,6 +834,8 @@ def test_new_user_primary_and_secondary(self, app, url_auth_institution, assert user assert user.fullname == 'Fake User' assert user.accepted_terms_of_service is None + assert f'source:institution|{institution_primary_type_1._id}' in user.system_tags + assert f'source:institution|{institution_secondary_type_1._id}' in user.system_tags assert institution_primary_type_1 in user.get_affiliated_institutions() assert institution_secondary_type_1 in user.get_affiliated_institutions() @@ -1059,6 +1065,7 @@ def test_selective_sso_allowed_new_user(self, app, url_auth_institution, institu assert user.fullname == 'Fake User' assert user.accepted_terms_of_service is None assert institution_selective_type_1 in user.get_affiliated_institutions() + assert f'source:institution|{institution_selective_type_1._id}' in user.system_tags def test_selective_sso_allowed_existing_user_not_affiliated(self, app, url_auth_institution, institution_selective_type_1): @@ -1147,6 +1154,7 @@ def test_selective_sso_allowed_new_user(self, app, url_auth_institution, institu assert user.fullname == 'Fake User' assert user.accepted_terms_of_service is None assert institution_selective_type_2 in user.get_affiliated_institutions() + assert f'source:institution|{institution_selective_type_2._id}' in user.system_tags def test_selective_sso_allowed_existing_user_not_affiliated(self, app, url_auth_institution, institution_selective_type_2): @@ -1240,6 +1248,7 @@ def test_new_user(self, app, url_auth_institution, institution): assert affiliation.sso_mail == sso_email assert affiliation.sso_identity == sso_identity assert affiliation.sso_department == department + assert f'source:institution|{institution._id}' in user.system_tags def test_existing_user_by_both_email_and_identity(self, app, url_auth_institution, institution): diff --git a/api_tests/institutions/views/test_institution_user_metric_list.py b/api_tests/institutions/views/test_institution_user_metric_list.py index 225f876e383..dfee4d178f5 100644 --- a/api_tests/institutions/views/test_institution_user_metric_list.py +++ b/api_tests/institutions/views/test_institution_user_metric_list.py @@ -218,7 +218,7 @@ def test_filter(self, app, url, admin, populate_counts): resp = app.get(f'{url}?filter[department]=Psychology dept', auth=admin.auth) assert resp.json['data'][0]['attributes']['department'] == 'Psychology dept' - @pytest.mark.skipif(settings.TRAVIS_ENV, reason='Non-deterministic fails on travis') + @pytest.mark.skipif(settings.CI_ENV, reason='Non-deterministic fails on CI') def test_sort_and_pagination(self, app, url, user, user2, user3, admin, populate_counts, populate_more_counts, institution): resp = app.get(f'{url}?sort=user_name&page[size]=1&page=2', auth=admin.auth) assert resp.status_code == 200 @@ -229,7 +229,7 @@ def test_sort_and_pagination(self, app, url, user, user2, user3, admin, populate assert resp.json['links']['meta']['total'] == 11 assert resp.json['data'][-1]['attributes']['user_name'] == 'Zedd' - @pytest.mark.skipif(settings.TRAVIS_ENV, reason='Non-deterministic fails on travis') + @pytest.mark.skipif(settings.CI_ENV, reason='Non-deterministic fails on CI') def test_filter_and_pagination(self, app, user, user2, user3, url, admin, populate_counts, populate_more_counts, institution): resp = app.get(f'{url}?page=2', auth=admin.auth) assert resp.json['links']['meta']['total'] == 11 @@ -238,7 +238,7 @@ def test_filter_and_pagination(self, app, user, user2, user3, url, admin, popula assert resp.json['links']['meta']['total'] == 1 assert resp.json['data'][0]['attributes']['user_name'] == 'Zedd' - @pytest.mark.skipif(settings.TRAVIS_ENV, reason='Non-deterministic fails on travis') + @pytest.mark.skipif(settings.CI_ENV, reason='Non-deterministic fails on CI') def test_filter_and_sort(self, app, url, user, user2, user3, admin, user4, populate_counts, populate_na_department, institution): """ Testing for bug where sorting and filtering would throw 502. diff --git a/api_tests/metrics/test_preprint_metrics.py b/api_tests/metrics/test_preprint_metrics.py index c3a24f4183e..57e31655c40 100644 --- a/api_tests/metrics/test_preprint_metrics.py +++ b/api_tests/metrics/test_preprint_metrics.py @@ -190,7 +190,7 @@ def test_preprint_list_with_metrics_fails(self, mock_timezone, app, user, base_u res = app.get(one_preprint_url, auth=other_non_admin_user.auth, expect_errors=True) assert res.status_code == 403 - @pytest.mark.skip('Return results will be entirely mocked so does not make a lot of sense to run on travis.') + @pytest.mark.skip('Return results will be entirely mocked so does not make a lot of sense to run on ci.') @mock.patch('api.metrics.utils.timezone.now') def test_preprint_with_metrics_succeeds(self, mock_timezone, app, user, base_url, preprint, other_user, preprint_no_results, metric_dates): diff --git a/api_tests/nodes/views/test_node_draft_registration_detail.py b/api_tests/nodes/views/test_node_draft_registration_detail.py index a4acf62be51..33e0a25b21a 100644 --- a/api_tests/nodes/views/test_node_draft_registration_detail.py +++ b/api_tests/nodes/views/test_node_draft_registration_detail.py @@ -9,41 +9,21 @@ AuthUserFactory, RegistrationFactory, ) -from osf.utils.permissions import WRITE, READ, ADMIN -from api_tests.nodes.views.test_node_draft_registration_list import DraftRegistrationTestCase +from osf.utils.permissions import ADMIN +from api_tests.nodes.views.test_node_draft_registration_list import AbstractDraftRegistrationTestCase +from framework.auth.core import Auth SCHEMA_VERSION = 2 @pytest.mark.django_db -class TestDraftRegistrationDetail(DraftRegistrationTestCase): - - @pytest.fixture() - def schema(self): - return RegistrationSchema.objects.get( - name='OSF-Standard Pre-Data Collection Registration', - schema_version=SCHEMA_VERSION) - - @pytest.fixture() - def draft_registration(self, user, project_public, schema): - return DraftRegistrationFactory( - initiator=user, - registration_schema=schema, - branched_from=project_public - ) - - @pytest.fixture() - def project_other(self, user): - return ProjectFactory(creator=user) +class TestDraftRegistrationDetail(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self, project_public, draft_registration): - return '/{}nodes/{}/draft_registrations/{}/?{}'.format( - API_BASE, project_public._id, draft_registration._id, 'version=2.19') + return f'/{API_BASE}nodes/{project_public._id}/draft_registrations/{draft_registration._id}/?version=2.19' - def test_admin_can_view_draft( - self, app, user, draft_registration, project_public, - schema, url_draft_registrations, group_mem): + def test_node_admin_can_view_draft(self, app, user, draft_registration, schema, url_draft_registrations): res = app.get(url_draft_registrations, auth=user.auth) assert res.status_code == 200 data = res.json['data'] @@ -51,75 +31,55 @@ def test_admin_can_view_draft( assert data['id'] == draft_registration._id assert data['attributes']['registration_metadata'] == {} - def test_admin_group_member_can_view( - self, app, user, draft_registration, project_public, - schema, url_draft_registrations, group_mem): - - res = app.get(url_draft_registrations, auth=group_mem.auth) + def test_read_contributor_can_view_draft(self, app, user_read_contrib, url_draft_registrations): + """ + Note this is the Node permissions not DraftRegistration permission + """ + res = app.get(url_draft_registrations, auth=user_read_contrib.auth) assert res.status_code == 200 - def test_cannot_view_draft( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, - url_draft_registrations, group, group_mem): - - # test_read_only_contributor_cannot_view_draft - res = app.get( - url_draft_registrations, - auth=user_read_contrib.auth, - expect_errors=True) - assert res.status_code == 403 - - # test_read_write_contributor_cannot_view_draft - res = app.get( - url_draft_registrations, - auth=user_write_contrib.auth, - expect_errors=True) - assert res.status_code == 403 + def test_write_contributor_can_view_draft(self, app, user_write_contrib, url_draft_registrations): + """ + Note this is the Node permissions not DraftRegistration permission + """ + res = app.get(url_draft_registrations, auth=user_write_contrib.auth) + assert res.status_code == 200 - # test_logged_in_non_contributor_cannot_view_draft - res = app.get( - url_draft_registrations, - auth=user_non_contrib.auth, - expect_errors=True) - assert res.status_code == 403 + def test_logged_in_non_contributor_cannot_view_draft(self, app, user_non_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_non_contrib.auth, expect_errors=True) + assert res.status_code == 200 - # test_unauthenticated_user_cannot_view_draft + def test_unauthenticated_user_cannot_view_draft(self, app, url_draft_registrations): res = app.get(url_draft_registrations, expect_errors=True) assert res.status_code == 401 - # test_group_mem_read_cannot_view - project_public.remove_osf_group(group) - project_public.add_osf_group(group, READ) - res = app.get(url_draft_registrations, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - - def test_cannot_view_deleted_draft( - self, app, user, url_draft_registrations): + def test_cannot_view_deleted_draft(self, app, user, url_draft_registrations): res = app.delete_json_api(url_draft_registrations, auth=user.auth) assert res.status_code == 204 res = app.get( url_draft_registrations, auth=user.auth, - expect_errors=True) + expect_errors=True + ) assert res.status_code == 410 - def test_draft_must_be_branched_from_node_in_kwargs( - self, app, user, project_other, draft_registration): - url = '/{}nodes/{}/draft_registrations/{}/'.format( - API_BASE, project_other._id, draft_registration._id) - res = app.get(url, auth=user.auth, expect_errors=True) + def test_draft_must_be_branched_from_node_in_kwargs(self, app, user, project_other, draft_registration): + res = app.get( + f'/{API_BASE}nodes/{project_other._id}/draft_registrations/{draft_registration._id}/', + auth=user.auth, + expect_errors=True + ) assert res.status_code == 400 errors = res.json['errors'][0] assert errors['detail'] == 'This draft registration is not created from the given node.' def test_draft_registration_serializer_usage(self, app, user, project_public, draft_registration): # Tests the usage of DraftRegistrationDetailSerializer for version 2.20 - url_draft_registrations = '/{}nodes/{}/draft_registrations/{}/?{}'.format( - API_BASE, project_public._id, draft_registration._id, 'version=2.20') - - res = app.get(url_draft_registrations, auth=user.auth) + res = app.get( + f'/{API_BASE}nodes/{project_public._id}/draft_registrations/{draft_registration._id}/?version=2.20', + auth=user.auth + ) assert res.status_code == 200 data = res.json['data'] @@ -128,8 +88,7 @@ def test_draft_registration_serializer_usage(self, app, user, project_public, dr assert data['attributes']['description'] assert data['relationships']['affiliated_institutions'] - def test_can_view_after_added( - self, app, schema, draft_registration, url_draft_registrations): + def test_can_view_after_added(self, app, schema, draft_registration, url_draft_registrations): user = AuthUserFactory() project = draft_registration.branched_from project.add_contributor(user, ADMIN) @@ -138,21 +97,11 @@ def test_can_view_after_added( @pytest.mark.django_db -class TestDraftRegistrationUpdate(DraftRegistrationTestCase): - - @pytest.fixture() - def schema(self): - return RegistrationSchema.objects.get( - name='OSF-Standard Pre-Data Collection Registration', - schema_version=SCHEMA_VERSION) +class TestDraftRegistrationUpdate(AbstractDraftRegistrationTestCase): @pytest.fixture() - def draft_registration(self, user, project_public, schema): - return DraftRegistrationFactory( - initiator=user, - registration_schema=schema, - branched_from=project_public - ) + def url_draft_registrations(self, project_public, draft_registration): + return f'/{API_BASE}nodes/{project_public._id}/draft_registrations/{draft_registration._id}/?version=2.19' @pytest.fixture() def reg_schema(self): @@ -170,20 +119,13 @@ def draft_registration_prereg(self, user, project_public, reg_schema): ) @pytest.fixture() - def metadata_registration( - self, metadata, - draft_registration_prereg): + def metadata_registration(self, metadata, draft_registration_prereg): return metadata(draft_registration_prereg) @pytest.fixture() def project_other(self, user): return ProjectFactory(creator=user) - @pytest.fixture() - def url_draft_registrations(self, project_public, draft_registration): - return '/{}nodes/{}/draft_registrations/{}/?{}'.format( - API_BASE, project_public._id, draft_registration._id, 'version=2.19') - @pytest.fixture() def payload(self, draft_registration): return { @@ -267,12 +209,10 @@ def test_draft_must_be_branched_from_node( errors = res.json['errors'][0] assert errors['detail'] == 'This draft registration is not created from the given node.' - def test_cannot_update_draft( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, - payload, url_draft_registrations, group, group_mem): + def test_read_only_contributor_cannot_update_draft( + self, app, user_read_contrib, payload, url_draft_registrations, + ): - # test_read_only_contributor_cannot_update_draft res = app.put_json_api( url_draft_registrations, payload, @@ -280,37 +220,20 @@ def test_cannot_update_draft( expect_errors=True) assert res.status_code == 403 - # test_logged_in_non_contributor_cannot_update_draft + def test_logged_in_non_contributor_cannot_update_draft( + self, app, user_non_contrib, payload, url_draft_registrations, + ): res = app.put_json_api( url_draft_registrations, payload, auth=user_non_contrib.auth, - expect_errors=True) - assert res.status_code == 403 - - # test_unauthenticated_user_cannot_update_draft - res = app.put_json_api( - url_draft_registrations, - payload, expect_errors=True) - assert res.status_code == 401 - - # test_osf_group_member_admin_cannot_update_draft - res = app.put_json_api( - url_draft_registrations, - payload, expect_errors=True, - auth=group_mem.auth + expect_errors=True ) assert res.status_code == 403 - # test_osf_group_member_write_cannot_update_draft - project_public.remove_osf_group(group) - project_public.add_osf_group(group, WRITE) - res = app.put_json_api( - url_draft_registrations, - payload, expect_errors=True, - auth=group_mem.auth - ) - assert res.status_code == 403 + def test_unauthenticated_user_cannot_update_draft(self, app, payload, url_draft_registrations): + res = app.put_json_api(url_draft_registrations, payload, expect_errors=True) + assert res.status_code == 401 def test_registration_metadata_does_not_need_to_be_supplied( self, app, user, payload, url_draft_registrations): @@ -528,21 +451,7 @@ def test_multiple_choice_question_value_in_registration_responses_must_match_val @pytest.mark.django_db -class TestDraftRegistrationPatch(DraftRegistrationTestCase): - - @pytest.fixture() - def schema(self): - return RegistrationSchema.objects.get( - name='OSF-Standard Pre-Data Collection Registration', - schema_version=SCHEMA_VERSION) - - @pytest.fixture() - def draft_registration(self, user, project_public, schema): - return DraftRegistrationFactory( - initiator=user, - registration_schema=schema, - branched_from=project_public - ) +class TestDraftRegistrationPatch(AbstractDraftRegistrationTestCase): @pytest.fixture() def reg_schema(self): @@ -562,10 +471,6 @@ def draft_registration_prereg(self, user, project_public, reg_schema): def metadata_registration(self, metadata, draft_registration_prereg): return metadata(draft_registration_prereg) - @pytest.fixture() - def project_other(self, user): - return ProjectFactory(creator=user) - @pytest.fixture() def url_draft_registrations(self, project_public, draft_registration): return '/{}nodes/{}/draft_registrations/{}/?{}'.format( @@ -604,111 +509,69 @@ def test_admin_can_update_draft( assert schema._id in data['relationships']['registration_schema']['links']['related']['href'] assert data['attributes']['registration_metadata'] == payload['data']['attributes']['registration_metadata'] - def test_cannot_update_draft( - self, app, user_write_contrib, - user_read_contrib, user_non_contrib, - payload, url_draft_registrations, group_mem): - - # test_read_only_contributor_cannot_update_draft + def test_read_only_contributor_cannot_update_draft( + self, app, user_read_contrib, payload, url_draft_registrations + ): res = app.patch_json_api( url_draft_registrations, payload, auth=user_read_contrib.auth, - expect_errors=True) + expect_errors=True + ) assert res.status_code == 403 - # test_logged_in_non_contributor_cannot_update_draft + def test_logged_in_non_contributor_cannot_update_draft( + self, app, user_non_contrib, payload, url_draft_registrations + ): res = app.patch_json_api( url_draft_registrations, payload, auth=user_non_contrib.auth, - expect_errors=True) + expect_errors=True + ) assert res.status_code == 403 - # test_unauthenticated_user_cannot_update_draft + def test_unauthenticated_user_cannot_update_draft( + self, app, user_non_contrib, payload, url_draft_registrations + ): res = app.patch_json_api( url_draft_registrations, - payload, expect_errors=True) + payload, + expect_errors=True + ) assert res.status_code == 401 - # group admin cannot update draft - res = app.patch_json_api( - url_draft_registrations, - payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 @pytest.mark.django_db -class TestDraftRegistrationDelete(DraftRegistrationTestCase): - - @pytest.fixture() - def schema(self): - return RegistrationSchema.objects.get( - name='OSF-Standard Pre-Data Collection Registration', - schema_version=SCHEMA_VERSION) - - @pytest.fixture() - def draft_registration(self, user, project_public, schema): - return DraftRegistrationFactory( - initiator=user, - registration_schema=schema, - branched_from=project_public - ) - - @pytest.fixture() - def project_other(self, user): - return ProjectFactory(creator=user) +class TestDraftRegistrationDelete(AbstractDraftRegistrationTestCase): @pytest.fixture() def url_draft_registrations(self, project_public, draft_registration): - return '/{}nodes/{}/draft_registrations/{}/?{}'.format( - API_BASE, project_public._id, draft_registration._id, 'version=2.19') + return f'/{API_BASE}nodes/{project_public._id}/draft_registrations/{draft_registration._id}/?version=2.19' def test_admin_can_delete_draft(self, app, user, url_draft_registrations, project_public): res = app.delete_json_api(url_draft_registrations, auth=user.auth) assert res.status_code == 204 - def test_cannot_delete_draft( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, - url_draft_registrations, group, group_mem): - - # test_read_only_contributor_cannot_delete_draft - res = app.delete_json_api( - url_draft_registrations, - auth=user_read_contrib.auth, - expect_errors=True) + def test_read_only_contributor_cannot_delete_draft(self, app, user_read_contrib, url_draft_registrations): + res = app.delete_json_api(url_draft_registrations, auth=user_read_contrib.auth, expect_errors=True) assert res.status_code == 403 - # test_read_write_contributor_cannot_delete_draft - res = app.delete_json_api( - url_draft_registrations, - auth=user_write_contrib.auth, - expect_errors=True) + def test_read_write_draft_contributor_cannot_delete_draft( + self, app, user_write_contrib, url_draft_registrations, project_public + ): + project_public.remove_contributor(user_write_contrib, Auth(user_write_contrib)) # Draft contributor only + res = app.delete_json_api(url_draft_registrations, auth=user_write_contrib.auth, expect_errors=True) assert res.status_code == 403 - # test_logged_in_non_contributor_cannot_delete_draft - res = app.delete_json_api( - url_draft_registrations, - auth=user_non_contrib.auth, - expect_errors=True) + def test_logged_in_non_contributor_cannot_delete_draft(self, app, user_non_contrib, url_draft_registrations): + res = app.delete_json_api(url_draft_registrations, auth=user_non_contrib.auth, expect_errors=True) assert res.status_code == 403 - # test_unauthenticated_user_cannot_delete_draft + def test_unauthenticated_user_cannot_delete_draft(self, app, url_draft_registrations): res = app.delete_json_api(url_draft_registrations, expect_errors=True) assert res.status_code == 401 - # test_group_member_admin_cannot_delete_draft - res = app.delete_json_api(url_draft_registrations, expect_errors=True, auth=group_mem.auth) - assert res.status_code == 403 - - # test_group_member_write_cannot_delete_draft - project_public.remove_osf_group(group) - project_public.add_osf_group(group, WRITE) - res = app.delete_json_api(url_draft_registrations, expect_errors=True, auth=group_mem.auth) - assert res.status_code == 403 - def test_draft_that_has_been_registered_cannot_be_deleted( self, app, user, project_public, draft_registration, url_draft_registrations): reg = RegistrationFactory(project=project_public) diff --git a/api_tests/nodes/views/test_node_draft_registration_list.py b/api_tests/nodes/views/test_node_draft_registration_list.py index 3a3c06f6c18..5e46b46b4c0 100644 --- a/api_tests/nodes/views/test_node_draft_registration_list.py +++ b/api_tests/nodes/views/test_node_draft_registration_list.py @@ -28,12 +28,16 @@ def invisible_and_inactive_schema(): @pytest.mark.django_db -class DraftRegistrationTestCase: +class AbstractDraftRegistrationTestCase: @pytest.fixture() def user(self): return AuthUserFactory() + @pytest.fixture() + def user_admin_contrib(self, user): + return AuthUserFactory() + @pytest.fixture() def user_write_contrib(self): return AuthUserFactory() @@ -55,7 +59,7 @@ def group(self, group_mem): return OSFGroupFactory(creator=group_mem) @pytest.fixture() - def project_public(self, user, user_write_contrib, user_read_contrib, group, group_mem): + def project_public(self, user, user_admin_contrib, user_write_contrib, user_read_contrib, group, group_mem): project_public = ProjectFactory(is_public=True, creator=user) project_public.add_contributor( user_write_contrib, @@ -63,11 +67,22 @@ def project_public(self, user, user_write_contrib, user_read_contrib, group, gro project_public.add_contributor( user_read_contrib, permissions=permissions.READ) + project_public.add_contributor( + user_admin_contrib, + permissions=permissions.ADMIN) project_public.save() project_public.add_osf_group(group, permissions.ADMIN) project_public.add_tag('hello', Auth(user), save=True) return project_public + @pytest.fixture() + def draft_registration(self, user, project_public, schema): + return DraftRegistrationFactory( + initiator=user, + registration_schema=schema, + branched_from=project_public + ) + @pytest.fixture() def metadata(self): def metadata(draft): @@ -90,15 +105,96 @@ def metadata(draft): return test_metadata return metadata + @pytest.fixture() + def schema(self): + return RegistrationSchema.objects.get( + name='OSF-Standard Pre-Data Collection Registration', + schema_version=SCHEMA_VERSION + ) + + @pytest.fixture() + def metaschema_open_ended(self): + return RegistrationSchema.objects.get( + name='Open-Ended Registration', + schema_version=OPEN_ENDED_SCHEMA_VERSION + ) + + @pytest.fixture() + def project_other(self, user): + return ProjectFactory(creator=user) + + @pytest.fixture() + def payload(self, metaschema_open_ended, provider): + return { + 'data': { + 'type': 'draft_registrations', + 'attributes': {}, + 'relationships': { + 'registration_schema': { + 'data': { + 'type': 'registration_schema', + 'id': metaschema_open_ended._id + } + }, + 'provider': { + 'data': { + 'type': 'registration-providers', + 'id': provider._id, + } + } + } + } + } + + @pytest.fixture() + def provider(self): + return RegistrationProvider.get_default() + + @pytest.fixture() + def non_default_provider(self, metaschema_open_ended): + non_default_provider = RegistrationProviderFactory() + non_default_provider.schemas.add(metaschema_open_ended) + non_default_provider.save() + return non_default_provider + + @pytest.fixture() + def payload_with_non_default_provider(self, metaschema_open_ended, non_default_provider): + return { + 'data': { + 'type': 'draft_registrations', + 'attributes': {}, + 'relationships': { + 'registration_schema': { + 'data': { + 'type': 'registration_schema', + 'id': metaschema_open_ended._id + } + }, + 'provider': { + 'data': { + 'type': 'registration-providers', + 'id': non_default_provider._id, + } + } + } + } + } + @pytest.mark.django_db -class TestDraftRegistrationList(DraftRegistrationTestCase): +class TestDraftRegistrationList(AbstractDraftRegistrationTestCase): + + @pytest.fixture() + def url_draft_registrations(self, project_public): + # Specifies version to test functionality when using DraftRegistrationLegacySerializer + return f'/{API_BASE}nodes/{project_public._id}/draft_registrations/?version=2.19' @pytest.fixture() def schema(self): return RegistrationSchema.objects.get( name='Open-Ended Registration', - schema_version=OPEN_ENDED_SCHEMA_VERSION) + schema_version=OPEN_ENDED_SCHEMA_VERSION + ) @pytest.fixture() def draft_registration(self, user, project_public, schema): @@ -108,15 +204,9 @@ def draft_registration(self, user, project_public, schema): branched_from=project_public ) - @pytest.fixture() - def url_draft_registrations(self, project_public): - # Specifies version to test functionality when using DraftRegistrationLegacySerializer - return '/{}nodes/{}/draft_registrations/?{}'.format( - API_BASE, project_public._id, 'version=2.19') - - def test_admin_can_view_draft_list( - self, app, user, draft_registration, project_public, - schema, url_draft_registrations): + def test_draft_admin_can_view_draft_list( + self, app, user, draft_registration, project_public, schema, url_draft_registrations + ): res = app.get(url_draft_registrations, auth=user.auth) assert res.status_code == 200 data = res.json['data'] @@ -126,54 +216,27 @@ def test_admin_can_view_draft_list( assert data[0]['id'] == draft_registration._id assert data[0]['attributes']['registration_metadata'] == {} - def test_osf_group_with_admin_permissions_can_view( - self, app, user, draft_registration, project_public, - schema, url_draft_registrations): - group_mem = AuthUserFactory() - group = OSFGroupFactory(creator=group_mem) - project_public.add_osf_group(group, permissions.ADMIN) - res = app.get(url_draft_registrations, auth=group_mem.auth, expect_errors=True) + def test_read_only_contributor_can_view_draft_list(self, app, user_read_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_read_contrib.auth) assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert schema._id in data[0]['relationships']['registration_schema']['links']['related']['href'] - - def test_cannot_view_draft_list( - self, app, user_write_contrib, project_public, - user_read_contrib, user_non_contrib, - url_draft_registrations, group, group_mem): - - # test_read_only_contributor_cannot_view_draft_list - res = app.get( - url_draft_registrations, - auth=user_read_contrib.auth, - expect_errors=True) - assert res.status_code == 403 - # test_read_write_contributor_cannot_view_draft_list - res = app.get( - url_draft_registrations, - auth=user_write_contrib.auth, - expect_errors=True) - assert res.status_code == 403 + def test_read_write_contributor_can_view_draft_list(self, app, user_write_contrib, url_draft_registrations): + res = app.get(url_draft_registrations, auth=user_write_contrib.auth) + assert res.status_code == 200 - # test_logged_in_non_contributor_cannot_view_draft_list - res = app.get( - url_draft_registrations, - auth=user_non_contrib.auth, - expect_errors=True) - assert res.status_code == 403 + def test_draft_contributor_not_project_contributor_can_view_draft_list(self, app, user_non_contrib, draft_registration, project_public, url_draft_registrations): + draft_registration.add_contributor(contributor=user_non_contrib, auth=Auth(draft_registration.initiator), save=True) + assert not project_public.is_contributor(user_non_contrib) + assert draft_registration.is_contributor(user_non_contrib) + res = app.get(url_draft_registrations, auth=user_non_contrib.auth) + assert res.status_code == 200 + data = res.json['data'] + assert len(data) == 1 - # test_unauthenticated_user_cannot_view_draft_list + def test_unauthenticated_user_cannot_view_draft_list(self, app, url_draft_registrations): res = app.get(url_draft_registrations, expect_errors=True) assert res.status_code == 401 - # test_osf_group_with_read_permissions - project_public.remove_osf_group(group) - project_public.add_osf_group(group, permissions.READ) - res = app.get(url_draft_registrations, auth=group_mem.auth, expect_errors=True) - assert res.status_code == 403 - def test_deleted_draft_registration_does_not_show_up_in_draft_list( self, app, user, draft_registration, url_draft_registrations): draft_registration.deleted = timezone.now() @@ -227,24 +290,11 @@ def test_draft_registration_serializer_usage(self, app, user, project_public, dr @pytest.mark.django_db -class TestDraftRegistrationCreate(DraftRegistrationTestCase): - - @pytest.fixture() - def provider(self): - return RegistrationProvider.get_default() - - @pytest.fixture() - def non_default_provider(self, metaschema_open_ended): - non_default_provider = RegistrationProviderFactory() - non_default_provider.schemas.add(metaschema_open_ended) - non_default_provider.save() - return non_default_provider +class TestDraftRegistrationCreate(AbstractDraftRegistrationTestCase): @pytest.fixture() - def metaschema_open_ended(self): - return RegistrationSchema.objects.get( - name='Open-Ended Registration', - schema_version=OPEN_ENDED_SCHEMA_VERSION) + def url_draft_registrations(self, project_public): + return f'/{API_BASE}nodes/{project_public._id}/draft_registrations/?version=2.19' @pytest.fixture() def payload(self, metaschema_open_ended, provider): @@ -269,37 +319,7 @@ def payload(self, metaschema_open_ended, provider): } } - @pytest.fixture() - def payload_with_non_default_provider(self, metaschema_open_ended, non_default_provider): - return { - 'data': { - 'type': 'draft_registrations', - 'attributes': {}, - 'relationships': { - 'registration_schema': { - 'data': { - 'type': 'registration_schema', - 'id': metaschema_open_ended._id - } - }, - 'provider': { - 'data': { - 'type': 'registration-providers', - 'id': non_default_provider._id, - } - } - } - } - } - - @pytest.fixture() - def url_draft_registrations(self, project_public): - return '/{}nodes/{}/draft_registrations/?{}'.format( - API_BASE, project_public._id, 'version=2.19') - - def test_type_is_draft_registrations( - self, app, user, metaschema_open_ended, - url_draft_registrations): + def test_type_is_draft_registrations(self, app, user, metaschema_open_ended, url_draft_registrations): draft_data = { 'data': { 'type': 'nodes', @@ -322,10 +342,13 @@ def test_type_is_draft_registrations( assert res.status_code == 409 def test_admin_can_create_draft( - self, app, user, project_public, url_draft_registrations, - payload, metaschema_open_ended): - url = f'{url_draft_registrations}&embed=branched_from&embed=initiator' - res = app.post_json_api(url, payload, auth=user.auth) + self, app, user, project_public, url_draft_registrations, payload, metaschema_open_ended + ): + res = app.post_json_api( + f'{url_draft_registrations}&embed=branched_from&embed=initiator', + payload, + auth=user.auth + ) assert res.status_code == 201 data = res.json['data'] assert metaschema_open_ended._id in data['relationships']['registration_schema']['links']['related']['href'] @@ -335,13 +358,10 @@ def test_admin_can_create_draft( assert data['embeds']['branched_from']['data']['id'] == project_public._id assert data['embeds']['initiator']['data']['id'] == user._id - def test_cannot_create_draft( - self, app, user_write_contrib, - user_read_contrib, user_non_contrib, - project_public, payload, group, - url_draft_registrations, group_mem): + def test_write_only_contributor_cannot_create_draft( + self, app, user_write_contrib, project_public, payload, url_draft_registrations + ): - # test_write_only_contributor_cannot_create_draft assert user_write_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, @@ -350,7 +370,9 @@ def test_cannot_create_draft( expect_errors=True) assert res.status_code == 403 - # test_read_only_contributor_cannot_create_draft + def test_read_only_contributor_cannot_create_draft( + self, app, user_read_contrib, project_public, payload, url_draft_registrations + ): assert user_read_contrib in project_public.contributors.all() res = app.post_json_api( url_draft_registrations, @@ -359,13 +381,17 @@ def test_cannot_create_draft( expect_errors=True) assert res.status_code == 403 - # test_non_authenticated_user_cannot_create_draft + def test_non_authenticated_user_cannot_create_draft(self, app, user_read_contrib, payload, url_draft_registrations): res = app.post_json_api( url_draft_registrations, - payload, expect_errors=True) + payload, + expect_errors=True + ) assert res.status_code == 401 - # test_logged_in_non_contributor_cannot_create_draft + def test_logged_in_non_contributor_cannot_create_draft( + self, app, user_non_contrib, payload, url_draft_registrations + ): res = app.post_json_api( url_draft_registrations, payload, @@ -373,24 +399,6 @@ def test_cannot_create_draft( expect_errors=True) assert res.status_code == 403 - # test_group_admin_cannot_create_draft - res = app.post_json_api( - url_draft_registrations, - payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - - # test_group_write_contrib_cannot_create_draft - project_public.remove_osf_group(group) - project_public.add_osf_group(group, permissions.WRITE) - res = app.post_json_api( - url_draft_registrations, - payload, - auth=group_mem.auth, - expect_errors=True) - assert res.status_code == 403 - def test_schema_validation( self, app, user, provider, non_default_provider, payload, payload_with_non_default_provider, url_draft_registrations, metaschema_open_ended): # Schema validation for a default provider without defined schemas with any schema is tested by `test_admin_can_create_draft` diff --git a/api_tests/registrations/views/test_registration_list.py b/api_tests/registrations/views/test_registration_list.py index b9604b83501..4eca7295f7a 100644 --- a/api_tests/registrations/views/test_registration_list.py +++ b/api_tests/registrations/views/test_registration_list.py @@ -7,7 +7,7 @@ from api.base.settings.defaults import API_BASE from api.base.versioning import CREATE_REGISTRATION_FIELD_CHANGE_VERSION -from api_tests.nodes.views.test_node_draft_registration_list import DraftRegistrationTestCase +from api_tests.nodes.views.test_node_draft_registration_list import AbstractDraftRegistrationTestCase from api_tests.subjects.mixins import SubjectsFilterMixin from api_tests.registrations.filters.test_filters import RegistrationListFilteringMixin from api_tests.utils import create_test_file @@ -585,7 +585,7 @@ def url(self): return f'/{API_BASE}registrations/' -class TestNodeRegistrationCreate(DraftRegistrationTestCase): +class TestNodeRegistrationCreate(AbstractDraftRegistrationTestCase): """ Tests for creating registration through old workflow - POST NodeRegistrationList diff --git a/api_tests/users/views/test_user_detail.py b/api_tests/users/views/test_user_detail.py index d676bb8b7af..02a616bc4c4 100644 --- a/api_tests/users/views/test_user_detail.py +++ b/api_tests/users/views/test_user_detail.py @@ -122,6 +122,16 @@ def test_preprint_relationship(self, app, user_one): href_url = user_json['relationships']['preprints']['links']['related']['href'] assert preprint_url in href_url + def test_draft_preprint_relationship(self, app, user_one): + preprint_url = f'/{API_BASE}users/{user_one._id}/draft_preprints/' + res = app.get( + f'/{API_BASE}users/{user_one._id}/', + auth=user_one + ) + user_json = res.json['data'] + href_url = user_json['relationships']['draft_preprints']['links']['related']['href'] + assert preprint_url in href_url + def test_registrations_relationship(self, app, user_one): url = f'/{API_BASE}users/{user_one._id}/' registration_url = '/{}users/{}/registrations/'.format( diff --git a/api_tests/users/views/test_user_draft_preprint.py b/api_tests/users/views/test_user_draft_preprint.py new file mode 100644 index 00000000000..7d1aca575c6 --- /dev/null +++ b/api_tests/users/views/test_user_draft_preprint.py @@ -0,0 +1,160 @@ +import pytest +from osf.utils.permissions import WRITE +from osf_tests.factories import ( + PreprintFactory, + AuthUserFactory, + ProjectFactory, + SubjectFactory, + PreprintProviderFactory, +) +from api.base.settings.defaults import API_BASE +from django.utils import timezone + +@pytest.mark.django_db +class TestPreprintDraftList: + + @pytest.fixture() + def admin(self): + return AuthUserFactory() + + @pytest.fixture() + def write_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def non_contrib(self): + return AuthUserFactory() + + @pytest.fixture() + def public_project(self, admin): + return ProjectFactory(creator=admin, is_public=True) + + @pytest.fixture() + def private_project(self, admin): + return ProjectFactory(creator=admin, is_public=False) + + @pytest.fixture() + def subject(self): + return SubjectFactory() + + @pytest.fixture() + def provider(self): + return PreprintProviderFactory() + + @pytest.fixture() + def unpublished_preprint(self, admin, provider, subject, public_project): + return PreprintFactory( + creator=admin, + provider=provider, + is_published=False, + machine_state='initial' + ) + + @pytest.fixture() + def private_preprint(self, admin, provider, subject, private_project, write_contrib): + preprint = PreprintFactory( + creator=admin, + provider=provider, + is_published=True, + is_public=False, + machine_state='accepted' + ) + preprint.add_contributor(write_contrib, permissions=WRITE) + preprint.save() + return preprint + + @pytest.fixture() + def published_preprint(self, admin, provider, subject, write_contrib): + preprint = PreprintFactory( + creator=admin, + provider=provider, + is_published=True, + is_public=True, + machine_state='accepted' + ) + preprint.add_contributor(write_contrib, permissions=WRITE) + return preprint + + @pytest.fixture() + def abandoned_private_preprint(self, admin, provider, subject, private_project): + return PreprintFactory( + creator=admin, + provider=provider, + project=private_project, + is_published=False, + is_public=False, + machine_state='initial' + ) + + @pytest.fixture() + def abandoned_public_preprint(self, admin, provider, subject, public_project): + return PreprintFactory( + creator=admin, + provider=provider, + project=public_project, + is_published=False, + is_public=True, + machine_state='initial' + ) + + @pytest.fixture() + def deleted_preprint(self, admin, provider, subject, public_project): + preprint = PreprintFactory( + creator=admin, + provider=provider, + project=public_project, + is_published=False, + is_public=False, + machine_state='initial', + ) + preprint.deleted = timezone.now() + preprint.save() + return preprint + + def test_gets_preprint_drafts(self, app, admin, abandoned_public_preprint, abandoned_private_preprint, published_preprint): + res = app.get( + f'/{API_BASE}users/{admin._id}/draft_preprints/', + auth=admin.auth + ) + assert res.status_code == 200 + + ids = [each['id'] for each in res.json['data']] + assert abandoned_public_preprint._id in ids + assert abandoned_private_preprint._id in ids + assert published_preprint._id not in ids + + def test_anonymous_gets_401(self, app, admin): + res = app.get( + f'/{API_BASE}users/{admin._id}/draft_preprints/', + expect_errors=True + ) + assert res.status_code == 401 + + def test_get_preprints_non_contrib_gets_403(self, app, admin, non_contrib, abandoned_public_preprint, abandoned_private_preprint): + res = app.get( + f'/{API_BASE}users/{admin._id}/draft_preprints/', + auth=non_contrib.auth, + expect_errors=True + ) + assert res.status_code == 403 + + def test_get_projects_logged_in_as_write_user(self, app, admin, write_contrib, abandoned_public_preprint): + res = app.get( + f'/{API_BASE}users/{admin._id}/draft_preprints/', + auth=write_contrib.auth, + expect_errors=True + ) + assert res.status_code == 403 + + def test_deleted_drafts_excluded(self, app, admin, abandoned_public_preprint, abandoned_private_preprint, published_preprint, deleted_preprint): + res = app.get( + f'/{API_BASE}users/{admin._id}/draft_preprints/', + auth=admin.auth + ) + assert res.status_code == 200 + + ids = [each['id'] for each in res.json['data']] + assert abandoned_public_preprint._id in ids + assert abandoned_private_preprint._id in ids + assert published_preprint._id not in ids + assert deleted_preprint._id not in ids # Make sure deleted preprints are not listed diff --git a/api_tests/users/views/test_user_draft_registration_list.py b/api_tests/users/views/test_user_draft_registration_list.py index 272a9a73d9c..1f43cc3ee33 100644 --- a/api_tests/users/views/test_user_draft_registration_list.py +++ b/api_tests/users/views/test_user_draft_registration_list.py @@ -3,7 +3,7 @@ from api.base.settings.defaults import API_BASE from api.users.views import UserDraftRegistrations -from api_tests.nodes.views.test_node_draft_registration_list import DraftRegistrationTestCase +from api_tests.nodes.views.test_node_draft_registration_list import AbstractDraftRegistrationTestCase from api_tests.utils import only_supports_methods from osf.models import RegistrationSchema from osf_tests.factories import ( @@ -17,7 +17,11 @@ @pytest.mark.django_db -class TestDraftRegistrationList(DraftRegistrationTestCase): +class TestUserDraftRegistrationList(AbstractDraftRegistrationTestCase): + + @pytest.fixture() + def url_draft_registrations(self, project_public): + return f'/{API_BASE}users/me/draft_registrations/' @pytest.fixture() def other_admin(self, project_public): @@ -29,7 +33,8 @@ def other_admin(self, project_public): def schema(self): return RegistrationSchema.objects.get( name='Open-Ended Registration', - schema_version=SCHEMA_VERSION) + schema_version=SCHEMA_VERSION + ) @pytest.fixture() def draft_registration(self, user, project_public, schema): @@ -39,25 +44,12 @@ def draft_registration(self, user, project_public, schema): branched_from=project_public ) - @pytest.fixture() - def url_draft_registrations(self, project_public): - return f'/{API_BASE}users/me/draft_registrations/' - def test_unacceptable_methods(self): assert only_supports_methods(UserDraftRegistrations, ['GET']) - def test_view_permissions( - self, app, user, other_admin, draft_registration, - user_write_contrib, user_read_contrib, user_non_contrib, - schema, url_draft_registrations): - res = app.get(url_draft_registrations, auth=user.auth) - assert res.status_code == 200 - data = res.json['data'] - assert len(data) == 1 - assert schema._id in data[0]['relationships']['registration_schema']['links']['related']['href'] - assert data[0]['id'] == draft_registration._id - assert data[0]['attributes']['registration_metadata'] == {} - + def test_non_contrib_view_permissions( + self, app, user, other_admin, draft_registration, schema, url_draft_registrations + ): res = app.get(url_draft_registrations, auth=user.auth) assert res.status_code == 200 data = res.json['data'] @@ -66,25 +58,33 @@ def test_view_permissions( assert data[0]['id'] == draft_registration._id assert data[0]['attributes']['registration_metadata'] == {} - # test_read_only_contributor_can_view_draft_list + def test_read_only_contributor_can_view_draft_list( + self, app, draft_registration, user_read_contrib, url_draft_registrations + ): res = app.get( url_draft_registrations, - auth=user_read_contrib.auth) + auth=user_read_contrib.auth + ) assert len(res.json['data']) == 1 - # test_read_write_contributor_can_view_draft_list + def test_read_write_contributor_can_view_draft_list( + self, app, user, other_admin, draft_registration, user_write_contrib, url_draft_registrations + ): res = app.get( url_draft_registrations, - auth=user_write_contrib.auth) + auth=user_write_contrib.auth + ) assert len(res.json['data']) == 1 - # test_logged_in_non_contributor_cannot_view_draft_list + def test_logged_in_non_contributor_cannot_view_draft_list( + self, app, user, draft_registration, user_non_contrib, url_draft_registrations + ): res = app.get( url_draft_registrations, auth=user_non_contrib.auth) assert len(res.json['data']) == 0 - # test_unauthenticated_user_cannot_view_draft_list + def test_unauthenticated_user_cannot_view_draft_list(self, app, url_draft_registrations): res = app.get(url_draft_registrations, expect_errors=True) assert res.status_code == 401 @@ -133,15 +133,15 @@ def test_draft_with_deleted_registered_node_shows_up_in_draft_list( assert data[0]['id'] == draft_registration._id assert data[0]['attributes']['registration_metadata'] == {} - def test_cannot_access_other_users_draft_registration( - self, app, user, other_admin, project_public, - draft_registration, schema): - url = f'/{API_BASE}users/{user._id}/draft_registrations/' - res = app.get(url, auth=other_admin.auth, expect_errors=True) + def test_cannot_access_other_users_draft_registration(self, app, user, other_admin, draft_registration, schema): + res = app.get( + f'/{API_BASE}users/{user._id}/draft_registrations/', + auth=other_admin.auth, + expect_errors=True + ) assert res.status_code == 403 - def test_can_access_own_draft_registrations_with_guid( - self, app, user, draft_registration): + def test_can_access_own_draft_registrations_with_guid(self, app, user, draft_registration): url = f'/{API_BASE}users/{user._id}/draft_registrations/' res = app.get(url, auth=user.auth, expect_errors=True) assert res.status_code == 200 diff --git a/api_tests/users/views/test_user_settings.py b/api_tests/users/views/test_user_settings.py index d62b47405e7..039401afd3c 100644 --- a/api_tests/users/views/test_user_settings.py +++ b/api_tests/users/views/test_user_settings.py @@ -215,7 +215,7 @@ def test_unconfirmed_email_included(self, app, url, payload, user_one, unconfirm assert res.status_code == 200 assert unconfirmed_address in [result['attributes']['email_address'] for result in res.json['data']] - @mock.patch('api.users.serializers.send_confirm_email') + @mock.patch('api.users.serializers.send_confirm_email_async') def test_create_new_email_current_user(self, mock_send_confirm_mail, user_one, user_two, app, url, payload): new_email = 'hhh@wwe.test' payload['data']['attributes']['email_address'] = new_email @@ -228,7 +228,7 @@ def test_create_new_email_current_user(self, mock_send_confirm_mail, user_one, u assert new_email in user_one.unconfirmed_emails assert mock_send_confirm_mail.called - @mock.patch('api.users.serializers.send_confirm_email') + @mock.patch('api.users.serializers.send_confirm_email_async') def test_create_new_email_not_current_user(self, mock_send_confirm_mail, app, url, payload, user_one, user_two): new_email = 'HHH@wwe.test' payload['data']['attributes']['email_address'] = new_email @@ -238,7 +238,7 @@ def test_create_new_email_not_current_user(self, mock_send_confirm_mail, app, ur assert new_email not in user_one.unconfirmed_emails assert not mock_send_confirm_mail.called - @mock.patch('api.users.serializers.send_confirm_email') + @mock.patch('api.users.serializers.send_confirm_email_async') def test_create_email_already_exists(self, mock_send_confirm_mail, app, url, payload, user_one): new_email = 'hello@email.test' Email.objects.create(address=new_email, user=user_one) @@ -577,28 +577,28 @@ def test_updating_verified_for_merge(self, app, user_one, user_two, payload): assert res.json['data']['attributes']['confirmed'] is True assert res.json['data']['attributes']['is_merge'] is False - @mock.patch('api.users.views.send_confirm_email') - def test_resend_confirmation_email(self, mock_send_confirm_email, app, user_one, unconfirmed_url, confirmed_url): + @mock.patch('api.users.views.send_confirm_email_async') + def test_resend_confirmation_email(self, mock_send_confirm_email_async, app, user_one, unconfirmed_url, confirmed_url): url = f'{unconfirmed_url}?resend_confirmation=True' res = app.get(url, auth=user_one.auth) assert res.status_code == 202 - assert mock_send_confirm_email.called - call_count = mock_send_confirm_email.call_count + assert mock_send_confirm_email_async.called + call_count = mock_send_confirm_email_async.call_count # make sure setting false does not send confirm email url = f'{unconfirmed_url}?resend_confirmation=False' res = app.get(url, auth=user_one.auth) # should return 200 instead of 202 because nothing has been done assert res.status_code == 200 - assert mock_send_confirm_email.call_count + assert mock_send_confirm_email_async.call_count # make sure normal GET request does not re-send confirmation email res = app.get(unconfirmed_url, auth=user_one.auth) - assert mock_send_confirm_email.call_count == call_count + assert mock_send_confirm_email_async.call_count == call_count assert res.status_code == 200 # resend confirmation with confirmed email address does not send confirmation email url = f'{confirmed_url}?resend_confirmation=True' res = app.get(url, auth=user_one.auth) - assert mock_send_confirm_email.call_count == call_count + assert mock_send_confirm_email_async.call_count == call_count assert res.status_code == 200 diff --git a/conftest.py b/conftest.py index 1acfabbdbb5..2eb51df076e 100644 --- a/conftest.py +++ b/conftest.py @@ -124,7 +124,7 @@ def _test_speedups_disable(request, settings, _test_speedups): @pytest.fixture(scope='session') def setup_connections(): - connections.create_connection(hosts=['http://localhost:9201']) + connections.create_connection(hosts=[website_settings.ELASTIC6_URI]) @pytest.fixture(scope='function') diff --git a/docker-compose-dist-arm64.override.yml b/docker-compose-dist-arm64.override.yml index aad331ae1a7..cffa4bd8982 100644 --- a/docker-compose-dist-arm64.override.yml +++ b/docker-compose-dist-arm64.override.yml @@ -6,43 +6,6 @@ services: # OSF # ####### - requirements: - image: quay.io/centerforopenscience/osf:develop-arm64 + elasticsearch6: + image: quay.io/centerforopenscience/elasticsearch:es6-arm-6.3.1 platform: linux/arm64 - - assets: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - - admin_assets: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - - worker: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - - admin: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - - api: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - - web: - image: quay.io/centerforopenscience/osf:develop-arm64 - platform: linux/arm64 - # Need to allocate tty to be able to call invoke for requirements task - tty: true - diff --git a/docker-compose.yml b/docker-compose.yml index 22d0dad7676..ce4e3ea0618 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -358,7 +358,7 @@ services: - /bin/bash - -c - python -m venv /tmp/venv - && /tmp/venv/bin/pip install poetry==1.8.0 && + && /tmp/venv/bin/pip install poetry==1.8.3 && /tmp/venv/bin/poetry install --no-root --without release --compile --sync && rm -rf /python3.12/* && cp -Rf -p /usr/local/lib/python3.12 / @@ -366,7 +366,8 @@ services: environment: DJANGO_SETTINGS_MODULE: api.base.settings volumes: - - ./:/code:cached + - ./pyproject.toml:/code/pyproject.toml + - ./poetry.lock:/code/poetry.lock - osf_requirements_3_12_vol:/python3.12 assets: diff --git a/framework/auth/__init__.py b/framework/auth/__init__.py index 02ae8787edb..73ca8dd06a8 100644 --- a/framework/auth/__init__.py +++ b/framework/auth/__init__.py @@ -12,6 +12,7 @@ from framework.celery_tasks.handlers import enqueue_task from framework.sessions import get_session, create_session from framework.sessions.utils import remove_session +from website.util.metrics import institution_source_tag __all__ = [ @@ -153,6 +154,7 @@ def get_or_create_institutional_user(fullname, sso_email, sso_identity, primary_ # Note: Institution users are created as confirmed with a strong and random password. Users don't need the # password since they sign in via SSO. They can reset their password to enable email/password login. user = OSFUser.create_confirmed(sso_email, str(uuid.uuid4()), fullname) + user.add_system_tag(institution_source_tag(primary_institution._id)) return user, True, None, None, sso_identity diff --git a/framework/auth/views.py b/framework/auth/views.py index 1b3f5fc425c..e398a6db0a5 100644 --- a/framework/auth/views.py +++ b/framework/auth/views.py @@ -22,6 +22,7 @@ from framework.celery_tasks.handlers import enqueue_task from framework.exceptions import HTTPError from framework.flask import redirect # VOL-aware redirect +from framework.postcommit_tasks.handlers import enqueue_postcommit_task from framework.sessions.utils import remove_sessions_for_user from framework.sessions import get_session from framework.utils import throttle_period_expired @@ -800,7 +801,6 @@ def unconfirmed_email_add(auth=None): 'removed_email': json_body['address'] }, 200 - def send_confirm_email(user, email, renew=False, external_id_provider=None, external_id=None, destination=None): """ Sends `user` a confirmation to the given `email`. @@ -815,7 +815,6 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte :return: :raises: KeyError if user does not have a confirmation token for the given email. """ - confirmation_url = user.get_confirmation_url( email, external=True, @@ -872,6 +871,9 @@ def send_confirm_email(user, email, renew=False, external_id_provider=None, exte logo=logo if logo else settings.OSF_LOGO ) +def send_confirm_email_async(user, email, renew=False, external_id_provider=None, external_id=None, destination=None): + enqueue_postcommit_task(send_confirm_email, (user, email, renew, external_id_provider, external_id, destination), {}) + def register_user(**kwargs): """ @@ -942,7 +944,7 @@ def register_user(**kwargs): ) if settings.CONFIRM_REGISTRATIONS_BY_EMAIL: - send_confirm_email(user, email=user.username) + send_confirm_email_async(user, email=user.username) message = language.REGISTRATION_SUCCESS.format(email=user.username) return {'message': message} else: @@ -1096,7 +1098,7 @@ def external_login_email_post(): # 2. add unconfirmed email and send confirmation email user.add_unconfirmed_email(clean_email, external_identity=external_identity) user.save() - send_confirm_email( + send_confirm_email_async( user, clean_email, external_id_provider=external_id_provider, @@ -1126,7 +1128,7 @@ def external_login_email_post(): # TODO: [#OSF-6934] update social fields, verified social fields cannot be modified user.save() # 3. send confirmation email - send_confirm_email( + send_confirm_email_async( user, user.username, external_id_provider=external_id_provider, diff --git a/osf/models/mixins.py b/osf/models/mixins.py index b205d2fbe2b..ba039cc8651 100644 --- a/osf/models/mixins.py +++ b/osf/models/mixins.py @@ -1451,22 +1451,29 @@ def add_unregistered_contributor(self, fullname, email, auth, send_email=None, # Create a new user record if you weren't passed an existing user contributor = existing_user if existing_user else OSFUser.create_unregistered(fullname=fullname, email=email) - contributor.add_unclaimed_record(self, referrer=auth.user, - given_name=fullname, email=email) try: - contributor.save() - except ValidationError: # User with same email already exists - contributor = get_user(email=email) - # Unregistered users may have multiple unclaimed records, so - # only raise error if user is registered. - if contributor.is_registered or self.is_contributor(contributor): - raise - contributor.add_unclaimed_record( - self, referrer=auth.user, given_name=fullname, email=email + self, + referrer=auth.user, + given_name=fullname, + email=email, ) - - contributor.save() + except ValidationError as e: + if 'Osf user with this Username already exists.' in e.message_dict.get('username'): + contributor = get_user(email=email) + # Unregistered users may have multiple unclaimed records, so + # only raise error if user is registered. + if contributor.is_registered or self.is_contributor(contributor): + raise + + contributor.add_unclaimed_record( + self, + referrer=auth.user, + given_name=fullname, + email=email, + ) + else: + raise e self.add_contributor( contributor, permissions=permissions, auth=auth, diff --git a/osf/models/node.py b/osf/models/node.py index d341014f336..9e342308f44 100644 --- a/osf/models/node.py +++ b/osf/models/node.py @@ -697,8 +697,11 @@ def csl(self): # formats node information into CSL format for citation parsing if doi: csl['DOI'] = doi - if self.logs.exists(): - csl['issued'] = datetime_to_csl(self.logs.latest().date) + if self.registered_date: + csl['issued'] = datetime_to_csl(self.registered_date) + else: + if self.logs.exists(): + csl['issued'] = datetime_to_csl(self.logs.latest().date) return csl diff --git a/osf/models/osf_group.py b/osf/models/osf_group.py index f93bff12353..a9a6b3b6f56 100644 --- a/osf/models/osf_group.py +++ b/osf/models/osf_group.py @@ -233,8 +233,12 @@ def add_unregistered_member(self, fullname, email, auth, role=MEMBER): raise ValueError('User already exists.') else: user = OSFUser.create_unregistered(fullname=fullname, email=email) - user.add_unclaimed_record(self, referrer=auth.user, given_name=fullname, email=email) - user.save() + user.add_unclaimed_record( + self, + referrer=auth.user, + given_name=fullname, + email=email, + ) if role == MANAGER: self.make_manager(user, auth=auth) diff --git a/osf/models/user.py b/osf/models/user.py index 517b3790cb9..29e10efa991 100644 --- a/osf/models/user.py +++ b/osf/models/user.py @@ -55,7 +55,7 @@ from website import settings as website_settings from website import filters, mails from website.project import new_bookmark_collection -from website.util.metrics import OsfSourceTags +from website.util.metrics import OsfSourceTags, unregistered_created_source_tag from importlib import import_module from osf.utils.requests import get_headers_from_request @@ -1662,6 +1662,10 @@ def add_unclaimed_record(self, claim_origin, referrer, given_name, email=None): 'email': clean_email, } self.unclaimed_records[pid] = record + + self.save() # must save for PK to add system tags + self.add_system_tag(unregistered_created_source_tag(referrer_id)) + return record def get_unclaimed_record(self, project_id): diff --git a/osf/utils/notifications.py b/osf/utils/notifications.py index 004d0c57d30..92ea38fcf70 100644 --- a/osf/utils/notifications.py +++ b/osf/utils/notifications.py @@ -43,13 +43,19 @@ def notify_submit(resource, user, *args, **kwargs): ) -def notify_resubmit(resource, user, action, *args, **kwargs): +def notify_resubmit(resource, user, *args, **kwargs): context = get_email_template_context(resource) - reviews_signals.reviews_email.send( - creator=user, + context['referrer'] = user + context['resubmission'] = True + recipients = list(resource.contributors) + reviews_signals.reviews_email_submit.send( + recipients=recipients, context=context, - template='reviews_resubmission_confirmation', - action=action + template=mails.REVIEWS_RESUBMISSION_CONFIRMATION, + ) + reviews_signals.reviews_email_submit_moderators_notifications.send( + timestamp=timezone.now(), + context=context ) diff --git a/osf_tests/management_commands/test_reindex_es6.py b/osf_tests/management_commands/test_reindex_es6.py index 2e881b8f088..1031ba1f782 100644 --- a/osf_tests/management_commands/test_reindex_es6.py +++ b/osf_tests/management_commands/test_reindex_es6.py @@ -46,7 +46,7 @@ def url(self): return f'{settings.API_DOMAIN}_/metrics/preprints/downloads/' @pytest.mark.es - @pytest.mark.skipif(django_settings.TRAVIS_ENV, reason='Non-deterministic fails on travis') + @pytest.mark.skipif(django_settings.CI_ENV, reason='Non-deterministic fails on CI') def test_reindexing(self, app, url, preprint, user, admin, es6_client): preprint_download = PreprintDownload.record_for_preprint( preprint, diff --git a/osf_tests/test_reviewable.py b/osf_tests/test_reviewable.py index 6ded16a6df3..1d25ca4adac 100644 --- a/osf_tests/test_reviewable.py +++ b/osf_tests/test_reviewable.py @@ -4,6 +4,8 @@ from osf.models import Preprint from osf.utils.workflows import DefaultStates from osf_tests.factories import PreprintFactory, AuthUserFactory +from website import mails + @pytest.mark.django_db class TestReviewable: @@ -31,3 +33,29 @@ def test_state_changes(self, _): assert preprint.machine_state == DefaultStates.ACCEPTED.value from_db.refresh_from_db() assert from_db.machine_state == DefaultStates.ACCEPTED.value + + @mock.patch('website.reviews.listeners.mails.send_mail') + def test_reject_resubmission_sends_emails(self, send_mail): + user = AuthUserFactory() + preprint = PreprintFactory( + reviews_workflow='pre-moderation', + is_published=False + ) + assert preprint.machine_state == DefaultStates.INITIAL.value + assert not send_mail.call_count + + preprint.run_submit(user) + assert send_mail.call_count == 1 + assert preprint.machine_state == DefaultStates.PENDING.value + mail_template = send_mail.call_args[0][1] + assert mail_template == mails.REVIEWS_SUBMISSION_CONFIRMATION + + assert not user.notification_subscriptions.exists() + preprint.run_reject(user, 'comment') + assert preprint.machine_state == DefaultStates.REJECTED.value + + preprint.run_submit(user) # Resubmission alerts users and moderators + assert preprint.machine_state == DefaultStates.PENDING.value + mail_template = send_mail.call_args[0][1] + assert send_mail.call_count == 2 + assert mail_template == mails.REVIEWS_RESUBMISSION_CONFIRMATION diff --git a/osf_tests/test_user.py b/osf_tests/test_user.py index 46b89f46343..c5774823ccd 100644 --- a/osf_tests/test_user.py +++ b/osf_tests/test_user.py @@ -1189,6 +1189,7 @@ def test_add_unclaimed_record(self, unreg_user, unreg_moderator, email, referrer assert 'token' in data assert data['email'] == email assert data == unreg_user.get_unclaimed_record(project._primary_key) + assert f'source:unregistered_created|{referrer._id}' in unreg_user.system_tags # test_unreg_moderator data = unreg_moderator.unclaimed_records[provider._id] @@ -1197,6 +1198,7 @@ def test_add_unclaimed_record(self, unreg_user, unreg_moderator, email, referrer assert 'token' in data assert data['email'] == email assert data == unreg_moderator.get_unclaimed_record(provider._id) + assert f'source:unregistered_created|{referrer._id}' in unreg_user.system_tags def test_get_claim_url(self, unreg_user, unreg_moderator, project, provider): # test_unreg_contrib diff --git a/package.json b/package.json index e558bdbb72c..f9345f16fcf 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "OSF", - "version": "24.05.0", + "version": "24.06.0", "description": "Facilitating Open Science", "repository": "https://github.com/CenterForOpenScience/osf.io", "author": "Center for Open Science", diff --git a/poetry.lock b/poetry.lock index df2d3af5e08..4fefc57a6b2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,103 +1,91 @@ # This file is automatically @generated by Poetry 1.8.0 and should not be changed by hand. -[[package]] -name = "aiohappyeyeballs" -version = "2.3.5" -description = "Happy Eyeballs for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, - {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, -] - [[package]] name = "aiohttp" -version = "3.10.1" +version = "3.9.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"}, - {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"}, - {file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"}, - {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"}, - {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"}, - {file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"}, - {file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"}, - {file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"}, - {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"}, - {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"}, - {file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"}, - {file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2212296cdb63b092e295c3e4b4b442e7b7eb41e8a30d0f53c16d5962efed395d"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4dcb127ca3eb0a61205818a606393cbb60d93b7afb9accd2fd1e9081cc533144"}, - {file = "aiohttp-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb8b79a65332e1a426ccb6290ce0409e1dc16b4daac1cc5761e059127fa3d134"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc24f707ed9cb961f6ee04020ca01de2c89b2811f3cf3361dc7c96a14bfbcc"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cb54f5725b4b37af12edf6c9e834df59258c82c15a244daa521a065fbb11717"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d03e948e53b3639ce4d438f3d1d8202898ec6655cadcc09ec99229d4adc2a9"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786299d719eb5d868f161aeec56d589396b053925b7e0ce36e983d30d0a3e55c"}, - {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda4009a30d51d3f06f36bc7411a62b3e647fa6cc935ef667e3e3d3a7dd09b1"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67f7639424c313125213954e93a6229d3a1d386855d70c292a12628f600c7150"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e5a26d7aac4c0d8414a347da162696eea0629fdce939ada6aedf951abb1d745"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:120548d89f14b76a041088b582454d89389370632ee12bf39d919cc5c561d1ca"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f5293726943bdcea24715b121d8c4ae12581441d22623b0e6ab12d07ce85f9c4"}, - {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f8605e573ed6c44ec689d94544b2c4bb1390aaa723a8b5a2cc0a5a485987a68"}, - {file = "aiohttp-3.10.1-cp312-cp312-win32.whl", hash = "sha256:e7168782621be4448d90169a60c8b37e9b0926b3b79b6097bc180c0a8a119e73"}, - {file = "aiohttp-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbf8c0ded367c5c8eaf585f85ca8dd85ff4d5b73fb8fe1e6ac9e1b5e62e11f7"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:54b7f4a20d7cc6bfa4438abbde069d417bb7a119f870975f78a2b99890226d55"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fa643ca990323db68911b92f3f7a0ca9ae300ae340d0235de87c523601e58d9"}, - {file = "aiohttp-3.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8311d0d690487359fe2247ec5d2cac9946e70d50dced8c01ce9e72341c21151"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222821c60b8f6a64c5908cb43d69c0ee978a1188f6a8433d4757d39231b42cdb"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7b55d9ede66af7feb6de87ff277e0ccf6d51c7db74cc39337fe3a0e31b5872d"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a95151a5567b3b00368e99e9c5334a919514f60888a6b6d2054fea5e66e527e"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9e9171d2fe6bfd9d3838a6fe63b1e91b55e0bf726c16edf265536e4eafed19"}, - {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a57e73f9523e980f6101dc9a83adcd7ac0006ea8bf7937ca3870391c7bb4f8ff"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0df51a3d70a2bfbb9c921619f68d6d02591f24f10e9c76de6f3388c89ed01de6"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b0de63ff0307eac3961b4af74382d30220d4813f36b7aaaf57f063a1243b4214"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8db9b749f589b5af8e4993623dbda6716b2b7a5fcb0fa2277bf3ce4b278c7059"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6b14c19172eb53b63931d3e62a9749d6519f7c121149493e6eefca055fcdb352"}, - {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cd57ad998e3038aa87c38fe85c99ed728001bf5dde8eca121cadee06ee3f637"}, - {file = "aiohttp-3.10.1-cp38-cp38-win32.whl", hash = "sha256:df31641e3f02b77eb3c5fb63c0508bee0fc067cf153da0e002ebbb0db0b6d91a"}, - {file = "aiohttp-3.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:93094eba50bc2ad4c40ff4997ead1fdcd41536116f2e7d6cfec9596a8ecb3615"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"}, - {file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"}, - {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"}, - {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"}, - {file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"}, - {file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"}, - {file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"}, -] - -[package.dependencies] -aiohappyeyeballs = ">=2.3.0" + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +] + +[package.dependencies] aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" @@ -105,7 +93,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiosignal" @@ -188,22 +176,22 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "attrs" -version = "24.2.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "autopep8" @@ -380,13 +368,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.156" +version = "1.34.147" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.156-py3-none-any.whl", hash = "sha256:c48f8c8996216dfdeeb0aa6d3c0f2c7ae25234766434a2ea3e57bdc08494bdda"}, - {file = "botocore-1.34.156.tar.gz", hash = "sha256:5d1478c41ab9681e660b3322432fe09c4055759c317984b7b8d3af9557ff769a"}, + {file = "botocore-1.34.147-py3-none-any.whl", hash = "sha256:be94a2f4874b1d1705cae2bd512c475047497379651678593acb6c61c50d91de"}, + {file = "botocore-1.34.147.tar.gz", hash = "sha256:2e8f000b77e4ca345146cb2edab6403769a517b564f627bb084ab335417f3dbe"}, ] [package.dependencies] @@ -395,7 +383,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.21.2)"] +crt = ["awscrt (==0.20.11)"] [[package]] name = "boxsdk" @@ -522,78 +510,63 @@ files = [ [[package]] name = "cffi" -version = "1.17.0" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -827,83 +800,63 @@ files = [ [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.extras] @@ -1047,13 +1000,13 @@ files = [ [[package]] name = "django" -version = "4.2.15" +version = "4.2.13" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.13-py3-none-any.whl", hash = "sha256:a17fcba2aad3fc7d46fdb23215095dbbd64e6174bf4589171e732b18b07e426a"}, + {file = "Django-4.2.13.tar.gz", hash = "sha256:837e3cf1f6c31347a1396a3f6b65688f2b4bb4a11c580dcb628b5afe527b68a5"}, ] [package.dependencies] @@ -2080,13 +2033,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.7.2" +version = "2.7.1" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">=3.7" files = [ - {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, - {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, + {file = "google-resumable-media-2.7.1.tar.gz", hash = "sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33"}, + {file = "google_resumable_media-2.7.1-py2.py3-none-any.whl", hash = "sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c"}, ] [package.dependencies] @@ -3353,22 +3306,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.27.3" +version = "5.27.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.3-cp310-abi3-win32.whl", hash = "sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b"}, - {file = "protobuf-5.27.3-cp310-abi3-win_amd64.whl", hash = "sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7"}, - {file = "protobuf-5.27.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f"}, - {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce"}, - {file = "protobuf-5.27.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25"}, - {file = "protobuf-5.27.3-cp38-cp38-win32.whl", hash = "sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035"}, - {file = "protobuf-5.27.3-cp38-cp38-win_amd64.whl", hash = "sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e"}, - {file = "protobuf-5.27.3-cp39-cp39-win32.whl", hash = "sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf"}, - {file = "protobuf-5.27.3-cp39-cp39-win_amd64.whl", hash = "sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1"}, - {file = "protobuf-5.27.3-py3-none-any.whl", hash = "sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5"}, - {file = "protobuf-5.27.3.tar.gz", hash = "sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c"}, + {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, + {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, + {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, + {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, + {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, + {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, + {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, + {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, + {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] [[package]] @@ -4133,17 +4086,17 @@ networkx = ["networkx (>=2.0.0,<3.0.0)"] [[package]] name = "redis" -version = "5.0.8" +version = "5.0.7" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.0.7-py3-none-any.whl", hash = "sha256:0e479e24da960c690be5d9b96d21f7b918a98c0cf49af3b6fafaa0753f93a0db"}, + {file = "redis-5.0.7.tar.gz", hash = "sha256:8f611490b93c8109b50adc317b31bfd84fff31def3475b92e7e80bf39f48175b"}, ] [package.extras] -hiredis = ["hiredis (>1.0.0)"] +hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] [[package]] @@ -4235,114 +4188,110 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "rpds-py" -version = "0.20.0" +version = "0.19.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"}, + {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"}, + {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"}, + {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"}, + {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"}, + {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"}, + {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"}, + {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"}, + {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"}, + {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"}, + {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5095a7c838a8647c32aa37c3a460d2c48debff7fc26e1136aee60100a8cd8f68"}, + {file = "rpds_py-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f2f78ef14077e08856e788fa482107aa602636c16c25bdf59c22ea525a785e9"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7cc6cb44f8636fbf4a934ca72f3e786ba3c9f9ba4f4d74611e7da80684e48d2"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf902878b4af334a09de7a45badbff0389e7cf8dc2e4dcf5f07125d0b7c2656d"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:688aa6b8aa724db1596514751ffb767766e02e5c4a87486ab36b8e1ebc1aedac"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57dbc9167d48e355e2569346b5aa4077f29bf86389c924df25c0a8b9124461fb"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4cf5a9497874822341c2ebe0d5850fed392034caadc0bad134ab6822c0925b"}, + {file = "rpds_py-0.19.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a790d235b9d39c70a466200d506bb33a98e2ee374a9b4eec7a8ac64c2c261fa"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1d16089dfa58719c98a1c06f2daceba6d8e3fb9b5d7931af4a990a3c486241cb"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bc9128e74fe94650367fe23f37074f121b9f796cabbd2f928f13e9661837296d"}, + {file = "rpds_py-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8f77e661ffd96ff104bebf7d0f3255b02aa5d5b28326f5408d6284c4a8b3248"}, + {file = "rpds_py-0.19.0-cp312-none-win32.whl", hash = "sha256:5f83689a38e76969327e9b682be5521d87a0c9e5a2e187d2bc6be4765f0d4600"}, + {file = "rpds_py-0.19.0-cp312-none-win_amd64.whl", hash = "sha256:06925c50f86da0596b9c3c64c3837b2481337b83ef3519e5db2701df695453a4"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:52e466bea6f8f3a44b1234570244b1cff45150f59a4acae3fcc5fd700c2993ca"}, + {file = "rpds_py-0.19.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e21cc693045fda7f745c790cb687958161ce172ffe3c5719ca1764e752237d16"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b31f059878eb1f5da8b2fd82480cc18bed8dcd7fb8fe68370e2e6285fa86da6"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dd46f309e953927dd018567d6a9e2fb84783963650171f6c5fe7e5c41fd5666"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a01a4490e170376cd79258b7f755fa13b1a6c3667e872c8e35051ae857a92b"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcf426a8c38eb57f7bf28932e68425ba86def6e756a5b8cb4731d8e62e4e0223"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68eea5df6347d3f1378ce992d86b2af16ad7ff4dcb4a19ccdc23dea901b87fb"}, + {file = "rpds_py-0.19.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dab8d921b55a28287733263c0e4c7db11b3ee22aee158a4de09f13c93283c62d"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6fe87efd7f47266dfc42fe76dae89060038f1d9cb911f89ae7e5084148d1cc08"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:535d4b52524a961d220875688159277f0e9eeeda0ac45e766092bfb54437543f"}, + {file = "rpds_py-0.19.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8b1a94b8afc154fbe36978a511a1f155f9bd97664e4f1f7a374d72e180ceb0ae"}, + {file = "rpds_py-0.19.0-cp38-none-win32.whl", hash = "sha256:7c98298a15d6b90c8f6e3caa6457f4f022423caa5fa1a1ca7a5e9e512bdb77a4"}, + {file = "rpds_py-0.19.0-cp38-none-win_amd64.whl", hash = "sha256:b0da31853ab6e58a11db3205729133ce0df26e6804e93079dee095be3d681dc1"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"}, + {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"}, + {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"}, + {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"}, + {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"}, + {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"}, + {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"}, + {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"}, + {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"}, + {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"}, ] [[package]] @@ -4486,13 +4435,13 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setuptools" -version = "72.1.0" +version = "71.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, - {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, + {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, + {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, ] [package.extras] @@ -4841,13 +4790,13 @@ webtest = ">=2.0.9" [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.1" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, ] [package.dependencies] @@ -5010,45 +4959,47 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.0.1" +version = "6.4.post2" description = "Interfaces for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "zope.interface-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ec4e87e6fdc511a535254daa122c20e11959ce043b4e3425494b237692a34f1c"}, - {file = "zope.interface-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51d5713e8e38f2d3ec26e0dfdca398ed0c20abda2eb49ffc15a15a23eb8e5f6d"}, - {file = "zope.interface-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8d51e5eb29e57d34744369cd08267637aa5a0fefc9b5d33775ab7ff2ebf2e3"}, - {file = "zope.interface-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55bbcc74dc0c7ab489c315c28b61d7a1d03cf938cc99cc58092eb065f120c3a5"}, - {file = "zope.interface-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10ebac566dd0cec66f942dc759d46a994a2b3ba7179420f0e2130f88f8a5f400"}, - {file = "zope.interface-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:7039e624bcb820f77cc2ff3d1adcce531932990eee16121077eb51d9c76b6c14"}, - {file = "zope.interface-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03bd5c0db82237bbc47833a8b25f1cc090646e212f86b601903d79d7e6b37031"}, - {file = "zope.interface-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f52050c6a10d4a039ec6f2c58e5b3ade5cc570d16cf9d102711e6b8413c90e6"}, - {file = "zope.interface-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af0b33f04677b57843d529b9257a475d2865403300b48c67654c40abac2f9f24"}, - {file = "zope.interface-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:696c2a381fc7876b3056711717dba5eddd07c2c9e5ccd50da54029a1293b6e43"}, - {file = "zope.interface-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f89a420cf5a6f2aa7849dd59e1ff0e477f562d97cf8d6a1ee03461e1eec39887"}, - {file = "zope.interface-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:b59deb0ddc7b431e41d720c00f99d68b52cb9bd1d5605a085dc18f502fe9c47f"}, - {file = "zope.interface-7.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52f5253cca1b35eaeefa51abd366b87f48f8714097c99b131ba61f3fdbbb58e7"}, - {file = "zope.interface-7.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88d108d004e0df25224de77ce349a7e73494ea2cb194031f7c9687e68a88ec9b"}, - {file = "zope.interface-7.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c203d82069ba31e1f3bc7ba530b2461ec86366cd4bfc9b95ec6ce58b1b559c34"}, - {file = "zope.interface-7.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f3495462bc0438b76536a0e10d765b168ae636092082531b88340dc40dcd118"}, - {file = "zope.interface-7.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192b7a792e3145ed880ff6b1a206fdb783697cfdb4915083bfca7065ec845e60"}, - {file = "zope.interface-7.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:400d06c9ec8dbcc96f56e79376297e7be07a315605c9a2208720da263d44d76f"}, - {file = "zope.interface-7.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c1dff87b30fd150c61367d0e2cdc49bb55f8b9fd2a303560bbc24b951573ae1"}, - {file = "zope.interface-7.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f749ca804648d00eda62fe1098f229b082dfca930d8bad8386e572a6eafa7525"}, - {file = "zope.interface-7.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec212037becf6d2f705b7ed4538d56980b1e7bba237df0d8995cbbed29961dc"}, - {file = "zope.interface-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d33cb526efdc235a2531433fc1287fcb80d807d5b401f9b801b78bf22df560dd"}, - {file = "zope.interface-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b419f2144e1762ab845f20316f1df36b15431f2622ebae8a6d5f7e8e712b413c"}, - {file = "zope.interface-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f1452d5d1f279184d5bdb663a3dc39902d9320eceb63276240791e849054b6"}, - {file = "zope.interface-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ba4b3638d014918b918aa90a9c8370bd74a03abf8fcf9deb353b3a461a59a84"}, - {file = "zope.interface-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc0615351221926a36a0fbcb2520fb52e0b23e8c22a43754d9cb8f21358c33c0"}, - {file = "zope.interface-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:ce6cbb852fb8f2f9bb7b9cdca44e2e37bce783b5f4c167ff82cb5f5128163c8f"}, - {file = "zope.interface-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5566fd9271c89ad03d81b0831c37d46ae5e2ed211122c998637130159a120cf1"}, - {file = "zope.interface-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da0cef4d7e3f19c3bd1d71658d6900321af0492fee36ec01b550a10924cffb9c"}, - {file = "zope.interface-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32ca483e6ade23c7caaee9d5ee5d550cf4146e9b68d2fb6c68bac183aa41c37"}, - {file = "zope.interface-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da21e7eec49252df34d426c2ee9cf0361c923026d37c24728b0fa4cc0599fd03"}, - {file = "zope.interface-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a8195b99e650e6f329ce4e5eb22d448bdfef0406404080812bc96e2a05674cb"}, - {file = "zope.interface-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:19c829d52e921b9fe0b2c0c6a8f9a2508c49678ee1be598f87d143335b6a35dc"}, - {file = "zope.interface-7.0.1.tar.gz", hash = "sha256:f0f5fda7cbf890371a59ab1d06512da4f2c89a6ea194e595808123c863c38eff"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2eccd5bef45883802848f821d940367c1d0ad588de71e5cabe3813175444202c"}, + {file = "zope.interface-6.4.post2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:762e616199f6319bb98e7f4f27d254c84c5fb1c25c908c2a9d0f92b92fb27530"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef8356f16b1a83609f7a992a6e33d792bb5eff2370712c9eaae0d02e1924341"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e4fa5d34d7973e6b0efa46fe4405090f3b406f64b6290facbb19dcbf642ad6b"}, + {file = "zope.interface-6.4.post2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d22fce0b0f5715cdac082e35a9e735a1752dc8585f005d045abb1a7c20e197f9"}, + {file = "zope.interface-6.4.post2-cp310-cp310-win_amd64.whl", hash = "sha256:97e615eab34bd8477c3f34197a17ce08c648d38467489359cb9eb7394f1083f7"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:599f3b07bde2627e163ce484d5497a54a0a8437779362395c6b25e68c6590ede"}, + {file = "zope.interface-6.4.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:136cacdde1a2c5e5bc3d0b2a1beed733f97e2dad8c2ad3c2e17116f6590a3827"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47937cf2e7ed4e0e37f7851c76edeb8543ec9b0eae149b36ecd26176ff1ca874"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f0a6be264afb094975b5ef55c911379d6989caa87c4e558814ec4f5125cfa2e"}, + {file = "zope.interface-6.4.post2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47654177e675bafdf4e4738ce58cdc5c6d6ee2157ac0a78a3fa460942b9d64a8"}, + {file = "zope.interface-6.4.post2-cp311-cp311-win_amd64.whl", hash = "sha256:e2fb8e8158306567a3a9a41670c1ff99d0567d7fc96fa93b7abf8b519a46b250"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b912750b13d76af8aac45ddf4679535def304b2a48a07989ec736508d0bbfbde"}, + {file = "zope.interface-6.4.post2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ac46298e0143d91e4644a27a769d1388d5d89e82ee0cf37bf2b0b001b9712a4"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86a94af4a88110ed4bb8961f5ac72edf782958e665d5bfceaab6bf388420a78b"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73f9752cf3596771c7726f7eea5b9e634ad47c6d863043589a1c3bb31325c7eb"}, + {file = "zope.interface-6.4.post2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b5c3e9744dcdc9e84c24ed6646d5cf0cf66551347b310b3ffd70f056535854"}, + {file = "zope.interface-6.4.post2-cp312-cp312-win_amd64.whl", hash = "sha256:551db2fe892fcbefb38f6f81ffa62de11090c8119fd4e66a60f3adff70751ec7"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96ac6b3169940a8cd57b4f2b8edcad8f5213b60efcd197d59fbe52f0accd66e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cebff2fe5dc82cb22122e4e1225e00a4a506b1a16fafa911142ee124febf2c9e"}, + {file = "zope.interface-6.4.post2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ee982237cffaf946db365c3a6ebaa37855d8e3ca5800f6f48890209c1cfefc"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:fbf649bc77510ef2521cf797700b96167bb77838c40780da7ea3edd8b78044d1"}, + {file = "zope.interface-6.4.post2-cp37-cp37m-win_amd64.whl", hash = "sha256:4c0b208a5d6c81434bdfa0f06d9b667e5de15af84d8cae5723c3a33ba6611b82"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3fe667935e9562407c2511570dca14604a654988a13d8725667e95161d92e9b"}, + {file = "zope.interface-6.4.post2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a96e6d4074db29b152222c34d7eec2e2db2f92638d2b2b2c704f9e8db3ae0edc"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a0f583be79f0def667a5d2c60b7b4cc68f0c0a470f227e1122691b443c934"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fe919027f29b12f7a2562ba0daf3e045cb388f844e022552a5674fcdf5d21f1"}, + {file = "zope.interface-6.4.post2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e0343a6e06d94f6b6ac52fbc75269b41dd3c57066541a6c76517f69fe67cb43"}, + {file = "zope.interface-6.4.post2-cp38-cp38-win_amd64.whl", hash = "sha256:dabb70a6e3d9c22df50e08dc55b14ca2a99da95a2d941954255ac76fd6982bc5"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:706efc19f9679a1b425d6fa2b4bc770d976d0984335eaea0869bd32f627591d2"}, + {file = "zope.interface-6.4.post2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d136e5b8821073e1a09dde3eb076ea9988e7010c54ffe4d39701adf0c303438"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1730c93a38b5a18d24549bc81613223962a19d457cfda9bdc66e542f475a36f4"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc2676312cc3468a25aac001ec727168994ea3b69b48914944a44c6a0b251e79"}, + {file = "zope.interface-6.4.post2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a62fd6cd518693568e23e02f41816adedfca637f26716837681c90b36af3671"}, + {file = "zope.interface-6.4.post2-cp39-cp39-win_amd64.whl", hash = "sha256:d3f7e001328bd6466b3414215f66dde3c7c13d8025a9c160a75d7b2687090d15"}, + {file = "zope.interface-6.4.post2.tar.gz", hash = "sha256:1c207e6f6dfd5749a26f5a5fd966602d6b824ec00d2df84a7e9a924e8933654e"}, ] [package.dependencies] @@ -5062,4 +5013,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "1de9354e338d27f87b817d801505ffbc9bc5e6a9d7cd80e06480eace40a1da11" +content-hash = "78f529cdfdc2cbc64c62d3acb53e096f6a7090ade7e3ee70a232b059061ec13d" diff --git a/pyproject.toml b/pyproject.toml index 6b51afc118d..b78ec011364 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ packages = [{include = "osf"}] python = "^3.12" wheel = "0.42.0" invoke = "2.2.0" -Werkzeug = "3.0.3" +Werkzeug = "3.0.1" Flask = "3.0.2" Mako = "1.3.2" Markdown = "3.5.2" @@ -55,9 +55,8 @@ oauthlib = "3.2.2" requests-oauthlib = "1.3.1" sentry-sdk = {version= "2.2.0", extras = ["django", "flask", "celery"]} django-redis = "5.4.0" - # API requirements -Django = "4.2.15" +Django = "4.2.13" djangorestframework = "3.15.1" django-cors-headers = "4.3.1" djangorestframework-bulk = "0.2.1" @@ -70,18 +69,16 @@ django-timezone-field = "6.1.0" # pin to delay untested major version bump pyjwe = "1.0.0" # Required by pyjwe and ndg-httpsclient cryptography = "42.0.5" +#rpds-py==0.18.0 jsonschema = "4.21.1" django-guardian = "2.4.0" - # Admin requirements -# fork to generate old webpack stats format so we don't have to upgrade admin's webpack django-webpack-loader = {git = "https://github.com/CenterForOpenScience/django-webpack-loader.git", rev = "af8438c2da909ec9f2188a6c07c9d2caad0f7e93"} # branch is feature/v1-webpack-stats django-sendgrid-v5 = "1.2.3" # metadata says python 3.10 not supported, but tests pass -# Analytics requirement +# Analytics requirements keen = "0.7.0" geoip2 = "4.7.0" - # OSF models django-typed-models = "0.14.0" django-storages = "1.14.3" @@ -89,29 +86,21 @@ google-cloud-storage = "2.16.0" # dependency of django-storages, hard-pin to ve django-dirtyfields = "1.9.2" django-extensions = "3.2.3" psycopg2 = "2.9.9" - # Reviews requirements transitions = "0.8.11" - # identifiers datacite = "1.1.3" - # metadata rdflib = "7.0.0" -packaging = "^24.0" - colorlog = "6.8.2" - # Metrics -# fork to pin installed version of elasticsearch-dsl django-elasticsearch-metrics = {git ="https://github.com/CenterForOpenScience/django-elasticsearch-metrics.git", rev = "f5b9312914154e213aa01731e934c593e3434269"} # branch is feature/pin-esdsl - # Impact Metrics CSV Export djangorestframework-csv = "3.0.2" gevent = "24.2.1" +packaging = "^24.0" [tool.poetry.group.dev.dependencies] -# Requirements that are used in the development environment only pytest = "7.4.4" pytest-socket = "0.7.0" pytest-xdist = "3.5.0" @@ -147,52 +136,33 @@ nplusone = "1.0.0" django-silk = "5.1.0" [tool.poetry.group.addons.dependencies] -## boa +# Requirements for the boa add-on boa-api = "0.1.14" -# Requirements for running asyncio in celery -asgiref = "3.7.2" -## box +# Requirements for running asyncio in celery, using 3.4.1 for Python 3.6 compatibility +asgiref = "3.7.2" boxsdk = "3.9.2" - -## dataverse -# new features & dependency updates +# Allow for optional timeout parameter. +# https://github.com/IQSS/dataverse-client-python/pull/27 dataverse = {git = "https://github.com/CenterForOpenScience/dataverse-client-python.git", rev="2b3827578048e6df3818f82381c7ea9a2395e526"} # branch is feature/dv-client-updates - -## dropbox dropbox = "11.36.2" -## github cachecontrol = "0.14.0" "github3.py" = "4.0.1" uritemplate = "4.1.1" - -## gitlab python-gitlab = "4.4.0" - -## mendeley # up-to-date with mendeley's master + add folder support and future dep updates mendeley = {git = "https://github.com/CenterForOpenScience/mendeley-python-sdk.git", rev="be8a811fa6c3b105d9f5c656cabb6b1ba855ed5b"} # branch is feature/osf-dep-updates - -## owncloud +# Requirements for the owncloud add-on pyocclient = "0.6.0" - -## s3 boto3 = "1.34.60" - -## twofactor pyotp = "2.9.0" - -## wiki -# needs pymongo, but already installed in deps -# was 4.6.3 in wiki requirements, but older version seems to work - -## zotero Pyzotero = "1.5.18" [tool.poetry.group.release.dependencies] -# Requirements to be installed on server deployments +# newrelic APM agent newrelic = "9.7.1" +# uwsgi uwsgi = "2.0.24" diff --git a/tasks/__init__.py b/tasks/__init__.py index d393d7fb1b6..180d7838126 100755 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -470,47 +470,47 @@ def remove_failures_from_testmon(ctx, db_path=None): conn = sqlite3.connect(db_path) tests_decached = conn.execute("delete from node where result <> '{}'").rowcount - ctx.run(f'echo {tests_decached} failures purged from travis cache') + ctx.run(f'echo {tests_decached} failures purged from ci cache') @task -def travis_setup(ctx): +def ci_setup(ctx): with open('package.json') as fobj: package_json = json.load(fobj) ctx.run('npm install @centerforopenscience/list-of-licenses@{}'.format(package_json['dependencies']['@centerforopenscience/list-of-licenses']), echo=True) @task -def test_travis_addons(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): +def test_ci_addons(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): """ - Run half of the tests to help travis go faster. + Run half of the tests to help ci go faster. """ - #travis_setup(ctx) + #ci_setup(ctx) syntax(ctx) test_addons(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task -def test_travis_website(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): +def test_ci_website(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): """ - Run other half of the tests to help travis go faster. + Run other half of the tests to help ci go faster. """ - #travis_setup(ctx) + #ci_setup(ctx) test_website(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task -def test_travis_api1_and_js(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): - #travis_setup(ctx) +def test_ci_api1_and_js(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + #ci_setup(ctx) test_api1(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task -def test_travis_api2(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): - #travis_setup(ctx) +def test_ci_api2(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + #ci_setup(ctx) test_api2(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task -def test_travis_api3_and_osf(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): - #travis_setup(ctx) +def test_ci_api3_and_osf(ctx, numprocesses=None, coverage=False, testmon=False, junit=False): + #ci_setup(ctx) test_api3(ctx, numprocesses=numprocesses, coverage=coverage, testmon=testmon, junit=junit) @task @@ -559,13 +559,13 @@ def addon_requirements(ctx): @task -def travis_addon_settings(ctx): +def ci_addon_settings(ctx): for directory in os.listdir(settings.ADDON_PATH): path = os.path.join(settings.ADDON_PATH, directory, 'settings') if os.path.isdir(path): try: - open(os.path.join(path, 'local-travis.py')) - ctx.run('cp {path}/local-travis.py {path}/local.py'.format(path=path)) + open(os.path.join(path, 'local-ci.py')) + ctx.run('cp {path}/local-ci.py {path}/local.py'.format(path=path)) except OSError: pass diff --git a/tests/base.py b/tests/base.py index 1d2068189b2..2c36dd801eb 100644 --- a/tests/base.py +++ b/tests/base.py @@ -150,6 +150,9 @@ def setUp(self): class SearchTestCase(unittest.TestCase): def setUp(self): + if settings.SEARCH_ENGINE is None: + return + settings.ELASTIC_INDEX = uuid.uuid1().hex settings.ELASTIC_TIMEOUT = 60 @@ -163,7 +166,8 @@ def setUp(self): def tearDown(self): super().tearDown() - + if settings.SEARCH_ENGINE is None: + return from website.search import elastic_search elastic_search.delete_index(settings.ELASTIC_INDEX) diff --git a/tests/test_views.py b/tests/test_views.py index 406ffb66ede..3d9de04fc36 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -3256,7 +3256,7 @@ def test_register_email_without_accepted_tos(self, _): user = OSFUser.objects.get(username=email) assert user.accepted_terms_of_service is None - @mock.patch('framework.auth.views.send_confirm_email') + @mock.patch('framework.auth.views.send_confirm_email_async') def test_register_scrubs_username(self, _): url = api_url_for('register_user') name = "Eunice O' \"Cornwallis\"" @@ -3438,8 +3438,8 @@ def test_register_after_being_invited_as_unreg_contributor(self, mock_update_sea assert new_user.check_password(password) assert new_user.fullname == real_name - @mock.patch('framework.auth.views.send_confirm_email') - def test_register_sends_user_registered_signal(self, mock_send_confirm_email): + @mock.patch('framework.auth.views.send_confirm_email_async') + def test_register_sends_user_registered_signal(self, mock_send_confirm_email_async): url = api_url_for('register_user') name, email, password = fake.name(), fake_email(), 'underpressure' with capture_signals() as mock_signals: @@ -3453,7 +3453,7 @@ def test_register_sends_user_registered_signal(self, mock_send_confirm_email): } ) assert mock_signals.signals_sent() == {auth.signals.user_registered, auth.signals.unconfirmed_user_created} - assert mock_send_confirm_email.called + assert mock_send_confirm_email_async.called @mock.patch('framework.auth.views.mails.send_mail') def test_resend_confirmation(self, send_mail: MagicMock): diff --git a/website/mails/mails.py b/website/mails/mails.py index 8f1c46b3310..da66ad8d083 100644 --- a/website/mails/mails.py +++ b/website/mails/mails.py @@ -459,6 +459,11 @@ def get_english_article(word): subject='Confirmation of your submission to ${provider_name}' ) +REVIEWS_RESUBMISSION_CONFIRMATION = Mail( + 'reviews_resubmission_confirmation', + subject='Confirmation of your submission to ${provider_name}' +) + ACCESS_REQUEST_SUBMITTED = Mail( 'access_request_submitted', subject='An OSF user has requested access to your ${node.project_or_component}' diff --git a/website/profile/views.py b/website/profile/views.py index 3e377157bb6..c4306b92125 100644 --- a/website/profile/views.py +++ b/website/profile/views.py @@ -14,7 +14,7 @@ from framework.auth.decorators import must_be_logged_in from framework.auth.decorators import must_be_confirmed from framework.auth.exceptions import ChangePasswordError -from framework.auth.views import send_confirm_email +from framework.auth.views import send_confirm_email_async from framework.auth.signals import ( user_account_merged, user_account_deactivated, @@ -83,7 +83,7 @@ def resend_confirmation(auth): # TODO: This setting is now named incorrectly. if settings.CONFIRM_REGISTRATIONS_BY_EMAIL: - send_confirm_email(user, email=address) + send_confirm_email_async(user, email=address) user.email_last_sent = timezone.now() user.save() @@ -166,7 +166,7 @@ def update_user(auth): if not throttle_period_expired(user.email_last_sent, settings.SEND_EMAIL_THROTTLE): raise HTTPError(http_status.HTTP_400_BAD_REQUEST, data={'message_long': 'Too many requests. Please wait a while before adding an email to your account.'}) - send_confirm_email(user, email=address) + send_confirm_email_async(user, email=address) ############ # Username # diff --git a/website/project/views/contributor.py b/website/project/views/contributor.py index 593cb57816d..bfe964b2e1e 100644 --- a/website/project/views/contributor.py +++ b/website/project/views/contributor.py @@ -188,10 +188,12 @@ def deserialize_contributors(node, user_dicts, auth, validate=False): # Add unclaimed record if necessary if not contributor.is_registered: - contributor.add_unclaimed_record(node, referrer=auth.user, + contributor.add_unclaimed_record( + node, + referrer=auth.user, given_name=fullname, - email=email) - contributor.save() + email=email, + ) contribs.append({ 'user': contributor, diff --git a/website/reviews/listeners.py b/website/reviews/listeners.py index aced08f644f..27a15c2c337 100644 --- a/website/reviews/listeners.py +++ b/website/reviews/listeners.py @@ -6,9 +6,12 @@ from website.settings import OSF_PREPRINTS_LOGO, OSF_REGISTRIES_LOGO, DOMAIN -# Handle email notifications including: update comment, accept, and reject of submission. @reviews_signals.reviews_email.connect def reviews_notification(self, creator, template, context, action): + """ + Handle email notifications including: update comment, accept, and reject of submission, but not initial submission + or resubmission. + """ # Avoid AppRegistryNotReady error from website.notifications.emails import notify_global_event recipients = list(action.target.contributors) @@ -25,9 +28,14 @@ def reviews_notification(self, creator, template, context, action): ) -# Handle email notifications for a new submission. @reviews_signals.reviews_email_submit.connect -def reviews_submit_notification(self, recipients, context): +def reviews_submit_notification(self, recipients, context, template=None): + """ + Handle email notifications for a new submission or a resubmission + """ + if not template: + template = mails.REVIEWS_SUBMISSION_CONFIRMATION + # Avoid AppRegistryNotReady error from website.notifications.emails import get_user_subscriptions @@ -51,15 +59,17 @@ def reviews_submit_notification(self, recipients, context): context['provider_name'] = context['reviewable'].provider.name mails.send_mail( recipient.username, - mails.REVIEWS_SUBMISSION_CONFIRMATION, + template, user=recipient, **context ) -# Handle email notifications to notify moderators of new submissions. @reviews_signals.reviews_email_submit_moderators_notifications.connect def reviews_submit_notification_moderators(self, timestamp, context): + """ + Handle email notifications to notify moderators of new submissions or resubmission. + """ # imports moved here to avoid AppRegistryNotReady error from osf.models import NotificationSubscription from website.profile.utils import get_profile_image_url @@ -87,7 +97,10 @@ def reviews_submit_notification_moderators(self, timestamp, context): context['message'] = f'submitted updates to "{resource.title}".' context['reviews_submission_url'] += f'&revisionId={revision_id}' else: - context['message'] = f'submitted "{resource.title}".' + if context.get('resubmission'): + context['message'] = f'resubmitted "{resource.title}".' + else: + context['message'] = f'submitted "{resource.title}".' # Get NotificationSubscription instance, which contains reference to all subscribers provider_subscription, created = NotificationSubscription.objects.get_or_create( diff --git a/website/settings/defaults.py b/website/settings/defaults.py index 7d6df427336..b305fe0fd88 100644 --- a/website/settings/defaults.py +++ b/website/settings/defaults.py @@ -105,6 +105,7 @@ def parent_dir(path): SEARCH_ENGINE = 'elastic' # Can be 'elastic', or None ELASTIC_URI = '127.0.0.1:9200' +ELASTIC6_URI = os.environ.get('ELASTIC6_URI', '127.0.0.1:9201') ELASTIC_TIMEOUT = 10 ELASTIC_INDEX = 'website' ELASTIC_KWARGS = { diff --git a/website/settings/local-travis.py b/website/settings/local-ci.py similarity index 100% rename from website/settings/local-travis.py rename to website/settings/local-ci.py diff --git a/website/templates/emails/reviews_resubmission_confirmation.html.mako b/website/templates/emails/reviews_resubmission_confirmation.html.mako index a1ac664992d..23ce18781ba 100644 --- a/website/templates/emails/reviews_resubmission_confirmation.html.mako +++ b/website/templates/emails/reviews_resubmission_confirmation.html.mako @@ -1,39 +1,42 @@ -## -*- coding: utf-8 -*- -
-

Hello ${recipient.fullname},

-

- The ${document_type} - ${reviewable.title} - has been successfully re-submitted to ${reviewable.provider.name}. -

-

- ${reviewable.provider.name} has chosen to moderate their submissions using a - pre-moderation workflow, which means your submission is pending until accepted - by a moderator. +<%inherit file="notify_base.mako"/> +<%def name="content()"> +

+

+ Hello ${referrer.fullname}, +

+

+ The ${document_type} ${reviewable.title} has been successfully + resubmitted to ${reviewable.provider.name}. +

+

+ ${reviewable.provider.name} has chosen to moderate their submissions using a pre-moderation workflow, which + means your submission is pending until accepted by a moderator. % if not no_future_emails: You will receive a separate notification informing you of any status changes. % endif -

-

- You will ${'not receive ' if no_future_emails else 'be automatically subscribed to '}future - notification emails for this ${document_type}. -

-

- If you have been erroneously associated with "${reviewable.title}", then you - may visit the ${document_type}'s "Edit" page and remove yourself as a contributor. -

-

- For more information about ${reviewable.provider.name}, visit - ${provider_url} to learn more. To learn about the - Open Science Framework, visit https://osf.io/. -

-

For questions regarding submission criteria, please email ${provider_contact_email}

-
- Sincerely,
- Your ${reviewable.provider.name} and OSF teams -

- Center for Open Science
- 210 Ridge McIntire Road, Suite 500, Charlottesville, VA 22903 -

- Privacy Policy -
+

+

+ You will ${'not receive ' if no_future_emails else 'be automatically subscribed to '}future notification emails + for this ${document_type}. +

+

+ If you have been erroneously associated with "${reviewable.title}", then you may visit the ${document_type}'s + "Edit" page and remove yourself as a contributor. +

+

+ For more information about ${reviewable.provider.name}, visit ${provider_url} to + learn more. To learn about the Open Science Framework, visit . +

+

+ For questions regarding submission criteria, please email ${provider_contact_email} +

+
+ Sincerely, +
+ Your ${reviewable.provider.name} and OSF teams +

+ Center for Open Science
210 Ridge McIntire Road, Suite 500, Charlottesville, VA 22903 +

+ Privacy Policy +
+ diff --git a/website/util/metrics.py b/website/util/metrics.py index bb40dac9915..7324a410138 100644 --- a/website/util/metrics.py +++ b/website/util/metrics.py @@ -28,6 +28,14 @@ def campaign_source_tag(campaign_name): return f'source:campaign|{campaign_name}' +def unregistered_created_source_tag(reffer_id): + return f'source:unregistered_created|{reffer_id}' + + +def institution_source_tag(institution_id): + return f'source:institution|{institution_id}' + + def provider_claimed_tag(provider_id, service=None): if service: return f'claimed:provider|{service}|{provider_id}'