diff --git a/.github/ISSUE_TEMPLATE/release_cleanup.md b/.github/ISSUE_TEMPLATE/release_cleanup.md index 16284f830..a5e57f0a3 100644 --- a/.github/ISSUE_TEMPLATE/release_cleanup.md +++ b/.github/ISSUE_TEMPLATE/release_cleanup.md @@ -19,13 +19,14 @@ TBA - [ ] Review code style and cleanup - [ ] Review and update docs entries -- [ ] Update `SODAR_API_DEFAULT_VERSION` and `SODAR_API_ALLOWED_VERSIONS` +- [ ] Ensure REST API versions are up to date and documented - [ ] Run `npx update-browserslist-db@latest` for Vue app - [ ] Update Vue app version with `npm version` - [ ] Update version in CHANGELOG and SODAR Release Notes doc - [ ] Update version in docs conf.py - [ ] Ensure both SODAR and SODAR Core API versioning is correct in API docs - [ ] Ensure docs can be built without errors +- [ ] Ensure `generateschema` runs without errors or warnings (until in CI) ## Notes diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4ebb71544..f7b5cd5dc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -7,12 +7,12 @@ jobs: strategy: matrix: python-version: - - '3.8' - '3.9' - '3.10' + - '3.11' services: postgres: - image: postgres:11 + image: postgres:16 env: POSTGRES_DB: sodar POSTGRES_USER: postgres @@ -24,6 +24,17 @@ jobs: --health-retries 10 ports: - 5432:5432 + # TODO: Remove temporary iRODS postgres server once on iRODS 4.3 + postgres-irods: + image: postgres:11 + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd "pg_isready -U postgres" + --health-interval 10s + --health-timeout 5s + --health-retries 10 redis: image: redis options: >- @@ -39,7 +50,7 @@ jobs: env: IRODS_AUTHENTICATION_SCHEME: native IRODS_DEFAULT_HASH_SCHEME: MD5 - IRODS_ICAT_DBSERVER: postgres + IRODS_ICAT_DBSERVER: postgres-irods # TODO: Use main postgres on 4.3 IRODS_ICAT_DBUSER: postgres IRODS_ICAT_DBPASS: postgres IRODS_ZONE_NAME: sodarZone @@ -64,7 +75,7 @@ jobs: uses: actions/checkout@v3 - name: Install project Python dependencies run: | - pip install wheel==0.40.0 + pip install wheel==0.42.0 pip install -r requirements/local.txt pip install -r requirements/test.txt - name: Setup Node.js @@ -86,14 +97,15 @@ jobs: coverage report - name: Run Vue app tests run: make test_samplesheets_vue - if: ${{ matrix.python-version == '3.8' }} + if: ${{ matrix.python-version == '3.11' }} - name: Check Python linting run: flake8 . - name: Check Python formatting run: make black arg=--check + if: ${{ matrix.python-version == '3.11' }} - name: Report coverage with Coveralls uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} path-to-lcov: './coverage.lcov' - if: ${{ matrix.python-version == '3.8' }} + if: ${{ matrix.python-version == '3.11' }} diff --git a/.readthedocs.yaml b/.readthedocs.yaml index e72d09389..2c8ba6f42 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,7 +9,7 @@ version: 2 build: os: ubuntu-20.04 tools: - python: '3.8' + python: '3.11' # Build documentation in the docs/ directory with Sphinx sphinx: diff --git a/Makefile b/Makefile index a0996e68b..c919238a5 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,7 @@ define USAGE= @echo -e "Usage:" @echo -e "\tmake black [arg=--] -- format python with black" @echo -e "\tmake serve [arg=sync] -- start server" +@echo -e "\tmake flake -- run flake8" @echo -e "\tmake celery -- start celery & celerybeat" @echo -e "\tmake demo -- start demo server" @echo -e "\tmake samplesheets_vue -- start samplesheet vue.js app" @@ -39,6 +40,11 @@ endif $(MANAGE) runserver 0.0.0.0:8000 --settings=config.settings.local +.PHONY: flake +flake: + flake8 . + + .PHONY: celery celery: celery -A config worker -l info --beat diff --git a/config/settings/base.py b/config/settings/base.py index 940142d90..178a41c9f 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -8,6 +8,7 @@ https://docs.djangoproject.com/en/3.2/ref/settings/ """ +import itertools import os import re @@ -61,6 +62,7 @@ 'markupfield', # For markdown 'rest_framework', # For API views 'knox', # For token auth + 'social_django', # For OIDC authentication 'docs', # For the online user documentation/manual 'dal', # For user search combo box 'dal_select2', @@ -304,7 +306,7 @@ AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify' # Location of root django.contrib.admin URL, use {% url 'admin:index' %} -ADMIN_URL = r'^admin/' +ADMIN_URL = 'admin/' # Celery @@ -326,6 +328,8 @@ CELERYD_TASK_TIME_LIMIT = 5 * 60 # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit CELERYD_TASK_SOFT_TIME_LIMIT = 60 +# https://docs.celeryq.dev/en/latest/userguide/configuration.html#broker-connection-retry-on-startup +CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = False CELERY_IMPORTS = [ 'landingzones.tasks_celery', 'samplesheets.tasks_celery', @@ -355,7 +359,6 @@ LDAP_ALT_DOMAINS = env.list('LDAP_ALT_DOMAINS', None, default=[]) if ENABLE_LDAP: - import itertools import ldap from django_auth_ldap.config import LDAPSearch @@ -384,8 +387,9 @@ AUTH_LDAP_USER_FILTER = env.str( 'AUTH_LDAP_USER_FILTER', '(sAMAccountName=%(user)s)' ) + AUTH_LDAP_USER_SEARCH_BASE = env.str('AUTH_LDAP_USER_SEARCH_BASE', None) AUTH_LDAP_USER_SEARCH = LDAPSearch( - env.str('AUTH_LDAP_USER_SEARCH_BASE', None), + AUTH_LDAP_USER_SEARCH_BASE, ldap.SCOPE_SUBTREE, AUTH_LDAP_USER_FILTER, ) @@ -417,8 +421,11 @@ AUTH_LDAP2_USER_FILTER = env.str( 'AUTH_LDAP2_USER_FILTER', '(sAMAccountName=%(user)s)' ) + AUTH_LDAP2_USER_SEARCH_BASE = env.str( + 'AUTH_LDAP2_USER_SEARCH_BASE', None + ) AUTH_LDAP2_USER_SEARCH = LDAPSearch( - env.str('AUTH_LDAP2_USER_SEARCH_BASE', None), + AUTH_LDAP2_USER_SEARCH_BASE, ldap.SCOPE_SUBTREE, AUTH_LDAP2_USER_FILTER, ) @@ -435,79 +442,40 @@ ) -# SAML configuration +# OpenID Connect (OIDC) configuration # ------------------------------------------------------------------------------ -ENABLE_SAML = env.bool('ENABLE_SAML', False) -SAML2_AUTH = { - # Required setting - # Pysaml2 Saml client settings - # See: https://pysaml2.readthedocs.io/en/latest/howto/config.html - 'SAML_CLIENT_SETTINGS': { - # Optional entity ID string to be passed in the 'Issuer' element of - # authn request, if required by the IDP. - 'entityid': env.str('SAML_CLIENT_ENTITY_ID', 'SODAR'), - 'entitybaseurl': env.str( - 'SAML_CLIENT_ENTITY_URL', 'https://localhost:8000' - ), - # The auto(dynamic) metadata configuration URL of SAML2 - 'metadata': { - 'local': [ - env.str('SAML_CLIENT_METADATA_FILE', 'metadata.xml'), - ], - }, - 'service': { - 'sp': { - 'idp': env.str( - 'SAML_CLIENT_IPD', - 'https://sso.hpc.bihealth.org/auth/realms/cubi', - ), - # Keycloak expects client signature - 'authn_requests_signed': 'true', - # Enforce POST binding which is required by keycloak - 'binding': 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST', - }, - }, - 'key_file': env.str('SAML_CLIENT_KEY_FILE', 'key.pem'), - 'cert_file': env.str('SAML_CLIENT_CERT_FILE', 'cert.pem'), - 'xmlsec_binary': env.str('SAML_CLIENT_XMLSEC1', '/usr/bin/xmlsec1'), - 'encryption_keypairs': [ - { - 'key_file': env.str('SAML_CLIENT_KEY_FILE', 'key.pem'), - 'cert_file': env.str('SAML_CLIENT_CERT_FILE', 'cert.pem'), - } - ], - }, - # Custom target redirect URL after the user get logged in. - # Defaults to /admin if not set. This setting will be overwritten if you - # have parameter ?next= specificed in the login URL. - 'DEFAULT_NEXT_URL': '/', - # # Optional settings below - # 'NEW_USER_PROFILE': { - # 'USER_GROUPS': [], # The default group name when a new user logs in - # 'ACTIVE_STATUS': True, # The default active status for new users - # 'STAFF_STATUS': True, # The staff status for new users - # 'SUPERUSER_STATUS': False, # The superuser status for new users - # }, - 'ATTRIBUTES_MAP': env.dict( - 'SAML_ATTRIBUTES_MAP', - default={ - # Change values to corresponding SAML2 userprofile attributes. - 'email': 'Email', - 'username': 'UserName', - 'first_name': 'FirstName', - 'last_name': 'LastName', - }, - ), - # 'TRIGGER': { - # 'FIND_USER': 'path.to.your.find.user.hook.method', - # 'NEW_USER': 'path.to.your.new.user.hook.method', - # 'CREATE_USER': 'path.to.your.create.user.hook.method', - # 'BEFORE_LOGIN': 'path.to.your.login.hook.method', - # }, - # Custom URL to validate incoming SAML requests against - # 'ASSERTION_URL': 'https://your.url.here', -} +ENABLE_OIDC = env.bool('ENABLE_OIDC', False) + +if ENABLE_OIDC: + AUTHENTICATION_BACKENDS = tuple( + itertools.chain( + ('social_core.backends.open_id_connect.OpenIdConnectAuth',), + AUTHENTICATION_BACKENDS, + ) + ) + TEMPLATES[0]['OPTIONS']['context_processors'] += [ + 'social_django.context_processors.backends', + 'social_django.context_processors.login_redirect', + ] + SOCIAL_AUTH_JSONFIELD_ENABLED = True + SOCIAL_AUTH_JSONFIELD_CUSTOM = 'django.db.models.JSONField' + SOCIAL_AUTH_USER_MODEL = AUTH_USER_MODEL + SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = [ + 'username', + 'name', + 'first_name', + 'last_name', + 'email', + ] + SOCIAL_AUTH_OIDC_OIDC_ENDPOINT = env.str( + 'SOCIAL_AUTH_OIDC_OIDC_ENDPOINT', None + ) + SOCIAL_AUTH_OIDC_KEY = env.str('SOCIAL_AUTH_OIDC_KEY', 'CHANGEME') + SOCIAL_AUTH_OIDC_SECRET = env.str('SOCIAL_AUTH_OIDC_SECRET', 'CHANGEME') + SOCIAL_AUTH_OIDC_USERNAME_KEY = env.str( + 'SOCIAL_AUTH_OIDC_USERNAME_KEY', 'username' + ) # Logging diff --git a/config/urls.py b/config/urls.py index 0ab844bf9..feee7eb8d 100644 --- a/config/urls.py +++ b/config/urls.py @@ -1,5 +1,5 @@ from django.conf import settings -from django.conf.urls import include, url +from django.conf.urls import include from django.conf.urls.static import static from django.contrib import admin from django.contrib.auth import views as auth_views @@ -26,7 +26,7 @@ def handler500(request, *args, **argv): urlpatterns = [ path(route='', view=HomeView.as_view(), name='home'), # Django Admin, use {% url 'admin:index' %} - url(settings.ADMIN_URL, admin.site.urls), + path(settings.ADMIN_URL, admin.site.urls), # Login and logout path( route='login/', @@ -40,6 +40,8 @@ def handler500(request, *args, **argv): path('api/auth/', include('knox.urls')), # Iconify SVG icons path('icons/', include('dj_iconify.urls')), + # Social auth for OIDC support + path('social/', include('social_django.urls')), # General site apps path('alerts/adm/', include('adminalerts.urls')), path('alerts/app/', include('appalerts.urls')), diff --git a/docs_manual/source/_static/sodar_ui/timeline.png b/docs_manual/source/_static/sodar_ui/timeline.png index 7df999756..b54b1d619 100644 Binary files a/docs_manual/source/_static/sodar_ui/timeline.png and b/docs_manual/source/_static/sodar_ui/timeline.png differ diff --git a/docs_manual/source/_static/sodar_ui/user_profile.png b/docs_manual/source/_static/sodar_ui/user_profile.png index 42d00fdec..7679620fc 100644 Binary files a/docs_manual/source/_static/sodar_ui/user_profile.png and b/docs_manual/source/_static/sodar_ui/user_profile.png differ diff --git a/docs_manual/source/_static/sodar_ui/user_profile_settings.png b/docs_manual/source/_static/sodar_ui/user_profile_settings.png new file mode 100644 index 000000000..2a33e011b Binary files /dev/null and b/docs_manual/source/_static/sodar_ui/user_profile_settings.png differ diff --git a/docs_manual/source/admin_commands.rst b/docs_manual/source/admin_commands.rst index cc59b7026..5c90c9972 100644 --- a/docs_manual/source/admin_commands.rst +++ b/docs_manual/source/admin_commands.rst @@ -19,6 +19,9 @@ These commands originate in SODAR Core. More information can be found in the Add remote site for remote project synchronization. ``batchupdateroles`` Batch update project roles and send invites. +``checkusers`` + Check the status of LDAP users. Returns info on users whose accounts are + disabled or removed on an LDAP server. ``cleanappsettings`` Clean up unused application settings. ``deletecache`` diff --git a/docs_manual/source/admin_install.rst b/docs_manual/source/admin_install.rst index d92ce0254..cd5003a3a 100644 --- a/docs_manual/source/admin_install.rst +++ b/docs_manual/source/admin_install.rst @@ -17,26 +17,22 @@ environment. All these components are required for running the complete feature set of SODAR. However, it is also possible to run some of these outside of the Docker Compose network if e.g. you already have a separate iRODS server running. -- Essential SODAR Components +Essential SODAR Components - ``sodar-web``: The SODAR web server for main program logic and UIs. - ``sodar-celeryd-default``: Celery daemon for background jobs. - ``sodar-celerybeat``: Celery service for periodic tasks. -- Database Servers - - ``postgres``: PostgreSQL server for SODAR and iRODS databases. +Database Servers + - ``postgres``: PostgreSQL server for SODAR and iRODS databases. Minimum + supported version is v12, recommended version is v16. - ``redis``: In-memory database for Celery jobs and caching. -- iRODS Servers - - ``irods``: An iRODS iCAT server for file storage. +iRODS Servers + - ``irods``: An iRODS iCAT server for file storage. The minimum supported + version is v4.3. - ``davrods``: iRODS WebDAV server for web access and IGV/UCSC integration. -- Networking +Networking - ``traefik``: Reverse proxy for TLS/SSL routing. - ``sssd``: System Security Service Daemon for LDAP/AD authentication. -.. note:: - - Currently the sodar-docker-compose environment only supports iRODS v4.2. - Support for v4.3 is being worked on. iRODS v4.3 will be the default - supported version from SODAR v1.0 onwards. - Quickstart Guide ================ @@ -60,15 +56,15 @@ Prerequisites Ensure your system matches the following operating system and software requirements. -- Hardware +Hardware - ~10 GB of disk space for the Docker images -- Operating System +Operating System - A modern Linux distribution that is `supported by Docker `_. - Outgoing HTTPS connections to the internet are allowed to download data and Docker images. - Server ports 80 and 443 are open and free on the host. -- Software +Software - `Docker `_ - `Docker Compose `_ - `OpenSSL `_ @@ -279,7 +275,7 @@ production is generally recommended only for experienced SODAR admins. .. note:: - SODAR v1.0 will be upgraded to use iRODS 4.3 and Postgres v16. This version + SODAR v1.0 has been upgraded to use iRODS 4.3 and Postgres v16. This version may require special steps for upgrading an existing environment. Make sure to refer to the sodar-docker-compose README for instructions. @@ -348,7 +344,7 @@ Production Prerequisites In addition to the :ref:`general prerequisites `, we recommend the following for a production deployment of SODAR: -- Recommended Hardware +Recommended Hardware - Memory: 64 GB of RAM - CPU: 16 cores - Disk: 600+ GB of free and **fast** disk space diff --git a/docs_manual/source/api_documentation.rst b/docs_manual/source/api_documentation.rst index d3a246762..c209e2351 100644 --- a/docs_manual/source/api_documentation.rst +++ b/docs_manual/source/api_documentation.rst @@ -20,7 +20,7 @@ The API supports authentication through Knox authentication tokens as well as logging in using your SODAR username and password. Tokens are the recommended method for security purposes. -For token access, first retrieve your token using the **API Tokens** site app +For token access, first retrieve your token using the :ref:`ui_api_tokens` app on the SODAR web UI. Note that you can you only see the token once when creating it. @@ -34,22 +34,32 @@ follows: Versioning ---------- +.. note:: + + API versioning has had a major overhaul in SODAR v1.0. Some changes are + breaking with no backwards compatibility. Please review this part of the + document carefully and adjust your clients accordingly. + The SODAR REST API uses accept header versioning. While specifying the desired API version in your HTTP requests is optional, it is **strongly recommended**. This ensures you will get the appropriate return data and avoid running into unexpected incompatibility issues. To enable versioning, add the ``Accept`` header to your request with the -following media type and version syntax. Replace the version number with your -expected version. +appropriate media type of your API and the expected version. From SODAR v1.0 +onwards, both the media type and the version are specific for a SODAR Server or +SODAR Core application, as each provides their independent API which may +introduce new versions independent of other APIs. + +Example for the SODAR Server samplesheets API: .. code-block:: console - Accept: application/vnd.bihealth.sodar+json; version=0.15.0 + Accept: application/vnd.bihealth.sodar.samplesheets+json; version=1.0 + +For detailed media types and versioning information of each API, see the +respective application API documentation. -Specific sections of the SODAR API may require their own accept header. See the -exact header requirement in the respective documentation on each section of the -API. Model Access and Permissions ---------------------------- @@ -71,3 +81,22 @@ specified. If return data is not specified in the documentation of an API view, it will return the appropriate HTTP status code along with an optional ``detail`` JSON field upon a successfully processed request. + +Pagination +---------- + +From SODAR V1.0 onwards, list views support pagination unless otherwise +specified. Pagination can be enabled by providing the ``?page=x`` query string +in the API request. This will change the return data into a paginated format. +Example: + +.. code-block:: python + + { + 'count' 170, + 'next': 'api/url?page=3', + 'previous': 'api/url?page=1', + 'results': [ + # ... + ] + } diff --git a/docs_manual/source/api_examples.rst b/docs_manual/source/api_examples.rst index 544172551..53601d3b4 100644 --- a/docs_manual/source/api_examples.rst +++ b/docs_manual/source/api_examples.rst @@ -40,10 +40,12 @@ the SODAR API: # Headers for requests: # Token authorization header (required) auth_header = {'Authorization': 'token {}'.format(api_token)} - # Use core_headers for project management API endpoints - core_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar-core+json; version=0.13.4'} - # Use sodar_headers for sample sheet and landing zone API endpoints - sodar_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar+json; version=0.15.0'} + # Use project_headers for project management API endpoints + project_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar-core.projectroles+json; version=1.0'} + # Use the following headers for sample sheet and landing zone API endpoints + sheet_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar.samplesheets+json; version=1.0'} + zone_headers = {**auth_header, 'Accept': 'application/vnd.bihealth.sodar.landingzones+json; version=1.0'} + .. note:: @@ -58,7 +60,7 @@ categories and projects available to you with the following request: .. code-block:: python url = sodar_url + '/project/api/list' - projects = requests.get(url, headers=core_headers).json() + projects = requests.get(url, headers=project_headers).json() Create Project @@ -72,14 +74,15 @@ required for most subsequent operations you wish to perform on that project. url = sodar_url + '/project/api/create' data = {'title': 'New Project via API', 'type': 'PROJECT', 'parent': category_uuid, 'owner': user_uuid} - project = requests.post(url, data=data, headers=core_headers).json() + project = requests.post(url, data=data, headers=project_headers).json() project_uuid = project['sodar_uuid'] .. note:: - Note the use of ``core_headers`` here, as the project management API comes + Note the use of ``project_headers`` here, as the project management API comes from the `SODAR Core `_ package, which - has its own API and versioning. + has its own API and versioning. See the related + `SODAR Core API documentation `_. Assign a Member Role @@ -94,7 +97,7 @@ including its UUID for future updates. other_user_uuid = '33333333-3333-3333-3333-333333333333' url = sodar_url + '/project/api/roles/create/' + project_uuid data = {'role': 'project contributor', 'user': other_user_uuid} - response_data = requests.post(url, data=data, headers=core_headers).json() + response_data = requests.post(url, data=data, headers=project_headers).json() role_uuid = response_data.get('role_uuid') @@ -111,7 +114,7 @@ archived ISA-Tab. url = sodar_url + '/samplesheets/api/import/' + project_uuid sheet_path = '/tmp/your_isa_tab.zip' files = {'file': ('your_isa_tab.zip', open(sheet_path, 'rb'), 'application/zip')} - response = requests.post(url, files=files, headers=sodar_headers) + response = requests.post(url, files=files, headers=sheet_headers) To ensure your import was successful, you can retrieve investigation information via the API. This also returns e.g. the UUIDs for studies and assays: @@ -119,7 +122,7 @@ via the API. This also returns e.g. the UUIDs for studies and assays: .. code-block:: python url = sodar_url + '/samplesheets/api/investigation/retrieve/' + project_uuid - inv_info = requests.get(url, headers=sodar_headers).json() + inv_info = requests.get(url, headers=sheet_headers).json() Export Sample Sheets @@ -132,7 +135,7 @@ providing the TSV data to e.g. parsers for further editing. .. code-block:: python url = sodar_url + '/samplesheets/api/export/json/' + project_uuid - response_data = requests.get(url, headers=sodar_headers).json() + response_data = requests.get(url, headers=sheet_headers).json() print(response_data.keys()) # dict_keys(['investigation', 'studies', 'assays', 'date_modified']) @@ -167,7 +170,7 @@ the path to the sample repository collection in your project. .. code-block:: python url = sodar_url + '/samplesheets/api/irods/collections/create/' + project_uuid - response = requests.post(url, headers=sodar_headers) + response = requests.post(url, headers=sheet_headers) irods_path = response.json().get('path') The API request below initiates the process for creating a landing zone. You @@ -178,7 +181,7 @@ investigation information API endpoint as detailed above. url = sodar_url + '/landingzones/api/create/' + project_uuid data = {'assay': assay_uuid} - response = requests.post(url, data=data, headers=sodar_headers) + response = requests.post(url, data=data, headers=zone_headers) zone_uuid = response.json().get('sodar_uuid') As with most landing zone operations, the landing zone creation process is @@ -188,7 +191,7 @@ before proceeding with file uploads: .. code-block:: python url = sodar_url + '/landingzones/api/retrieve/' + zone_uuid - response_data = requests.get(url, headers=sodar_headers).json() + response_data = requests.get(url, headers=zone_headers).json() if response_data.get('status') == 'ACTIVE': pass # OK to proceed @@ -199,7 +202,7 @@ moving process as follows: .. code-block:: python url = sodar_url + '/landingzones/api/submit/move/' + zone_uuid - response = requests.post(url, headers=sodar_headers) + response = requests.post(url, headers=zone_headers) Once the landing zone status is returned as ``MOVED``, the landing zone files have been moved into the project sample data repository and the zone has been @@ -208,6 +211,6 @@ deleted. .. code-block:: python url = sodar_url + '/landingzones/api/retrieve/' + zone_uuid - response_data = requests.get(url, headers=sodar_headers).json() + response_data = requests.get(url, headers=zone_headers).json() if response_data.get('status') == 'MOVED': pass # Moving was successful diff --git a/docs_manual/source/api_projectroles.rst b/docs_manual/source/api_projectroles.rst index da9bd8de8..5e2017329 100644 --- a/docs_manual/source/api_projectroles.rst +++ b/docs_manual/source/api_projectroles.rst @@ -1,26 +1,60 @@ .. _api_projectroles: -Project Management API -^^^^^^^^^^^^^^^^^^^^^^ +Project Management APIs +^^^^^^^^^^^^^^^^^^^^^^^ -The REST API for project access and management operations is described in this -document. +The REST APIs for project access and management operations is described in this +document. These APIs are provided by the SODAR Core package. Thus, detailed +documentation can be found in the +`SODAR Core documentation `_. +Projectroles API +================ + +This API handles the management of projects, project members and app settings. + +Versioning +---------- + +Media Type + ``application/vnd.bihealth.sodar-core.projectroles+json`` +Current Version + ``1.0`` +Accepted Versions + ``1.0`` +Header Example + ``Accept: application/vnd.bihealth.sodar-core.projectroles+json; version=1.0`` + API Views -========= +--------- + +The projectoles API is provided by the SODAR Core package. Documentation for the +API views can be found in the +`Projectroles REST API documentation `_. -The project management API is provided by the SODAR Core package. The -documentation for the REST API views can be found in the -`SODAR Core Documentation `_. +Timeline API +============ + +This API can be used to query events in the +:ref:`timeline ` audit trail logs. Versioning -========== +---------- -For accept header versioning, the following media type and version are expected -in the current SODAR version: +Media Type + ``application/vnd.bihealth.sodar-core.timeline+json`` +Current Version + ``1.0`` +Accepted Versions + ``1.0`` +Header Example + ``Accept: application/vnd.bihealth.sodar-core.timeline+json; version=1.0`` -.. code-block:: console +API Views +--------- - Accept: application/vnd.bihealth.sodar-core+json; version=0.13.3 +The timeline API is provided by the SODAR Core package. Documentation for the +API views can be found in the +`Timeline REST API documentation `_. diff --git a/docs_manual/source/dev_install.rst b/docs_manual/source/dev_install.rst index 0915ad1f4..cfe4b44e5 100644 --- a/docs_manual/source/dev_install.rst +++ b/docs_manual/source/dev_install.rst @@ -21,8 +21,8 @@ System requirements for SODAR development are as follows: - Other Ubuntu versions and Linux distributions may work but are not supported. The instructions in this section assume the use of Ubuntu 20.04. -- Python 3.8, 3.9 or 3.10 - - 3.8 recommended. +- Python 3.9, 3.10 or 3.11 + - 3.11 is recommended. - `Docker `_ - `Docker Compose `_ - `OpenSSL `_ @@ -34,7 +34,7 @@ SODAR Docker Compose Setup In addition to the ``sodar-server`` repository, the following components are required for SODAR development: -- PostgreSQL +- PostgreSQL v12+ (v16 recommended) - Redis - Main iRODS server - Test iRODS server diff --git a/docs_manual/source/index.rst b/docs_manual/source/index.rst index a6cfd4bc6..026b09f5f 100644 --- a/docs_manual/source/index.rst +++ b/docs_manual/source/index.rst @@ -116,7 +116,7 @@ Table of Contents :name: api_docs api_documentation - Project Management API + Project Management APIs Sample Sheets API Landing Zones API iRODS Info API diff --git a/docs_manual/source/introduction.rst b/docs_manual/source/introduction.rst index 2bc1cee66..d09c4f4e8 100644 --- a/docs_manual/source/introduction.rst +++ b/docs_manual/source/introduction.rst @@ -68,8 +68,7 @@ Notable Features ================ - Accessibility - * User access via one or multiple LDAP/AD services, Single Sign-On via SAML - and/or local accounts + * User access via one or multiple LDAP/AD services and/or local accounts * Access tokens can be can be generated for REST API use * UUIDs and permanent URLs for all relevant objects in the system - iRODS Integration diff --git a/docs_manual/source/ui_project_timeline.rst b/docs_manual/source/ui_project_timeline.rst index 5a7545599..eeedf3f4f 100644 --- a/docs_manual/source/ui_project_timeline.rst +++ b/docs_manual/source/ui_project_timeline.rst @@ -9,7 +9,7 @@ and changes to the sample sheets. The activity is displayed as a list of events. .. figure:: _static/sodar_ui/timeline.png :align: center - :scale: 45% + :scale: 55% Project timeline @@ -19,17 +19,17 @@ Timestamp Time of the event's creation. This doubles as a link to a modal which displays the event status history. This can be useful information e.g. in case of asynchronous background events. -Event - Type of the event along with the SODAR application which created the event. User User initiating the event. Description - Description of the event. Objects included in the description have a link - displayed as a clock notation. Clicking this opens a list of all events - related to the object. The title of the object also often works as a link - to the related application. Possible extra JSON data is displayed as a link - in the right hand side of the field. The link opens a modal displaying the - JSON data. + Description of the event. The description is preceded by a badge displaying + the event type. Objects included in the description are linked to the + respective application. Objects also have a history link displayed as a + clock icon. Clicking on the icon opens a list of all events related to the + object within the project. The title of the object also often works as a + link to the related application. Possible extra JSON data is displayed as a + link in the right hand side of the field. The link opens a modal displaying + the JSON data. Status Current status of the event. diff --git a/docs_manual/source/ui_project_update.rst b/docs_manual/source/ui_project_update.rst index 24f0ab5b4..55754c13e 100644 --- a/docs_manual/source/ui_project_update.rst +++ b/docs_manual/source/ui_project_update.rst @@ -23,7 +23,8 @@ Project Metadata The following project metadata and settings are available for updating: Title - Update the project title. + Update the project title. The title must be unique within the parent + category. Parent Move the project to another category. Options only contain categories for which you have sufficient access. @@ -32,23 +33,29 @@ Description ReadMe Set an optional ReadMe document with for the project. MarkDown notation can be used. -Public Guest Access +Public guest access Enable public guest access to the project for anyone using SODAR. This should be used with caution and is generally intended for demonstration projects. If your SODAR server allows anonymous users, this will grant guest access to anyone browsing the site. -Notify Members of Landing Zone Uploads +Enable project on {target remote site} + Enable access to project for project members on the selected target site. + Target sites for which project owner/delegation modifying is enabled by an + administrator are listed here. Deselecting a previously selected site will + revoke access on the target site. For more information, see + `Remote Site Access documentation `_. +Notify members of landing zone uploads Send project members email updates for file uploads using landing zones. For more information, see the :ref:`Landing Zones app documentation `. -Allow Sample Sheet Editing +Allow sample sheet editing Enable or disable editability of sample sheets in the project. -Enable Sheet Synchronization +Enable sheet synchronization Enable sample sheet synchronization. For more information, see :ref:`app_samplesheets_sync`. -URL for Sheet Synchronization +URL for sheet synchronization REST API URL of remote project for sample sheet synchronization. -Token for Sheet Synchronization +Token for sheet synchronization Token string for sample sheet synchronization. IGV session genome Genome used in generating IGV session files for the project. The name needs @@ -69,9 +76,9 @@ VCF paths to omit from IGV sessions cancer and germline projects. Behaves similarly to the related BAM/CRAM setting. You need to run :guilabel:`Update Sheet Cache` in the Sample Sheet application for changes to take effect. -IP Restrict +IP restrict Restrict project access to specific IP addresses if this is set. -IP Allow List +IP allow list List of accepted IP addresses for the IP Restrict setting. When creating a new project, the following fields are included: diff --git a/docs_manual/source/ui_user_profile.rst b/docs_manual/source/ui_user_profile.rst index 94a72b4b3..5d54cf732 100644 --- a/docs_manual/source/ui_user_profile.rst +++ b/docs_manual/source/ui_user_profile.rst @@ -3,38 +3,76 @@ User Profile ^^^^^^^^^^^^ -The user profile screen displays information regarding your account. +The user profile screen displays information regarding your account. The user +profile displays your information and provides functionality for managing your +user details. .. figure:: _static/sodar_ui/user_profile.png :align: center - :scale: 65% + :scale: 60% User profile view + +User Settings +============= + Through the user profile, you can modify global user-specific settings for your account by clicking the :guilabel:`Update Settings` button. The following user settings are available: -Sample Sheet Table Height +Sample sheet table height Choose the maximum height of study and assay tables in the sample sheets app from a set of options. In browsing mode, table height will fit the table content if the height of content is lower than the setting. In edit mode, the chosen table height will be maintained regardless of content. -Display Template Output Directory Field +Display template output directory field Display or hide the "output directory" field in sample sheet template creation form. This can be enabled if there is need to control the output directory in sample sheet ISA-Tab exports. Defaults to false. -Display Project UUID Copying Link +Receive email for admin alerts + Receive email notifications for admin alerts where the admin has enabled + email alering. If unchecked, you can still see the alerts in the SODAR GUI. +Display project UUID copying link Enabling this will add an icon next to the project title on each project view. Clicking it will copy the project identifier (UUID) into the clipboard. -Additional Email - Additional email addresses for the user can be input here. If email sending - is enabled on the server, notification emails will be sent to these - addresses in addition to the default user email. Separate multiple addresses - with the semicolon character (``;``). +Receive email for project updates + Receive email notifications for category or project creation, updating, + moving and archiving. +Receive email for project membership updates + Receive email notifications for category or project membership updates and + member invitation activity. + + +.. figure:: _static/sodar_ui/user_profile_settings.png + :align: center + :scale: 60% + + User settings view + + +Additional Emails +================= + +You can configure additional emails for your user account in case you want to +receive automated emails to addresses other than your account's primary +address. The user profile view displays additional emails and provides controls +for managing these addresses. + +A new email address can be added with by clicking on the :guilabel:`Add Email` +button. After creation, a verification email will be sent to the specified +address. Opening a link contained in the email will verify the email. Only +verified email addresses will receive automated emails from SODAR. + +For each email address displayed in the list, there are controls to re-send the +verification email in case of an unverified email and deleting the address. + + +Update User Details +=================== If local users are enabled on the site and you have a local SODAR account, the profile also includes the :guilabel:`Update User` button. This opens a form in which you can update your details and password. This form is **not** available -for users authenticating with an existing user account via LDAP or SAML. +for users authenticating with an existing user account via LDAP. diff --git a/irodsinfo/tests/test_permissions.py b/irodsinfo/tests/test_permissions.py index 12d3f246a..0436638c1 100644 --- a/irodsinfo/tests/test_permissions.py +++ b/irodsinfo/tests/test_permissions.py @@ -3,10 +3,10 @@ from django.urls import reverse # Projectroles dependency -from projectroles.tests.test_permissions import TestSiteAppPermissionBase +from projectroles.tests.test_permissions import SiteAppPermissionTestBase -class TestIrodsinfoPermissions(TestSiteAppPermissionBase): +class TestIrodsinfoPermissions(SiteAppPermissionTestBase): """Tests for irodsinfo UI view permissions""" def test_get_irods_info(self): diff --git a/irodsinfo/tests/test_permissions_api.py b/irodsinfo/tests/test_permissions_api.py index ba42883c9..03a04da1d 100644 --- a/irodsinfo/tests/test_permissions_api.py +++ b/irodsinfo/tests/test_permissions_api.py @@ -3,10 +3,10 @@ from django.urls import reverse # Projectroles dependency -from projectroles.tests.test_permissions import TestSiteAppPermissionBase +from projectroles.tests.test_permissions import SiteAppPermissionTestBase -class TestIrodsConfigRetrieveAPIView(TestSiteAppPermissionBase): +class TestIrodsConfigRetrieveAPIView(SiteAppPermissionTestBase): """Tests for irodsinfo API""" def test_get_irods_config(self): diff --git a/isatemplates/plugins.py b/isatemplates/plugins.py index 0f012cd43..a0cc4eb41 100644 --- a/isatemplates/plugins.py +++ b/isatemplates/plugins.py @@ -3,7 +3,11 @@ from django.urls import reverse # Projectroles dependency -from projectroles.plugins import SiteAppPluginPoint, BackendPluginPoint +from projectroles.plugins import ( + SiteAppPluginPoint, + BackendPluginPoint, + PluginObjectLink, +) from isatemplates.api import ISATemplateAPI from isatemplates.models import CookiecutterISATemplate @@ -43,24 +47,24 @@ class SiteAppPlugin(SiteAppPluginPoint): def get_object_link(self, model_str, uuid): """ - Return URL for referring to a object used by the app, along with a - label to be shown to the user for linking. + Return URL referring to an object used by the app, along with a name to + be shown to the user for linking. :param model_str: Object class (string) :param uuid: sodar_uuid of the referred object - :return: Dict or None if not found + :return: PluginObjectLink or None if not found """ obj = self.get_object(eval(model_str), uuid) if not obj: return None if obj.__class__ == CookiecutterISATemplate: - return { - 'url': reverse( + return PluginObjectLink( + url=reverse( 'isatemplates:detail', kwargs={'cookiecutterisatemplate': obj.sodar_uuid}, ), - 'label': obj.description, - } + name=obj.description, + ) def get_statistics(self): """ diff --git a/isatemplates/tests/test_permissions.py b/isatemplates/tests/test_permissions.py index d154ad952..9f17e8ece 100644 --- a/isatemplates/tests/test_permissions.py +++ b/isatemplates/tests/test_permissions.py @@ -6,7 +6,7 @@ from django.urls import reverse # Projectroles dependency -from projectroles.tests.test_permissions import TestSiteAppPermissionBase +from projectroles.tests.test_permissions import SiteAppPermissionTestBase from isatemplates.tests.test_models import ( CookiecutterISATemplateMixin, @@ -16,7 +16,7 @@ class TestISATemplatesPermissions( - CookiecutterISATemplateMixin, TestSiteAppPermissionBase + CookiecutterISATemplateMixin, SiteAppPermissionTestBase ): """Tests for isatemplates UI view permissions""" diff --git a/isatemplates/tests/test_ui.py b/isatemplates/tests/test_ui.py index 244538199..a27345277 100644 --- a/isatemplates/tests/test_ui.py +++ b/isatemplates/tests/test_ui.py @@ -12,7 +12,7 @@ from selenium.webdriver.common.by import By # Projectroles dependency -from projectroles.tests.test_ui import TestUIBase +from projectroles.tests.test_ui import UITestBase from isatemplates.models import ISA_FILE_PREFIXES from isatemplates.tests.test_models import ( @@ -29,7 +29,7 @@ BACKEND_PLUGINS_NO_TPL.remove('isatemplates_backend') -class TestISATemplateListView(CookiecutterISATemplateMixin, TestUIBase): +class TestISATemplateListView(CookiecutterISATemplateMixin, UITestBase): """Tests for ISATemplateListView UI""" def setUp(self): @@ -106,7 +106,7 @@ def test_get_disable_backend(self): class TestISATemplateDetailView( - CookiecutterISATemplateMixin, CookiecutterISAFileMixin, TestUIBase + CookiecutterISATemplateMixin, CookiecutterISAFileMixin, UITestBase ): """Tests for ISATemplateDetailView UI""" @@ -156,7 +156,7 @@ def test_get(self): class TestCUBIISATemplateDetailView( - CookiecutterISATemplateMixin, CookiecutterISAFileMixin, TestUIBase + CookiecutterISATemplateMixin, CookiecutterISAFileMixin, UITestBase ): """Tests for CUBIISATemplateDetailView UI""" diff --git a/isatemplates/tests/test_views.py b/isatemplates/tests/test_views.py index da6ea28a5..b2819f2d9 100644 --- a/isatemplates/tests/test_views.py +++ b/isatemplates/tests/test_views.py @@ -16,7 +16,7 @@ from test_plus.test import TestCase # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from isatemplates.forms import ( NO_JSON_MSG, @@ -250,7 +250,8 @@ def test_post_zip(self): self.assertEqual(CookiecutterISATemplate.objects.count(), 0) self.assertEqual(CookiecutterISAFile.objects.count(), 0) self.assertEqual( - ProjectEvent.objects.filter(event_name='template_create').count(), 0 + TimelineEvent.objects.filter(event_name='template_create').count(), + 0, ) data = { @@ -290,7 +291,8 @@ def test_post_zip(self): } self.assertEqual(model_to_dict(file_obj), expected) self.assertEqual( - ProjectEvent.objects.filter(event_name='template_create').count(), 1 + TimelineEvent.objects.filter(event_name='template_create').count(), + 1, ) def test_post_zip_no_json(self): @@ -615,7 +617,8 @@ def test_post(self): for f in self._get_files(): self.assertEqual(f.content, '') self.assertEqual( - ProjectEvent.objects.filter(event_name='template_update').count(), 0 + TimelineEvent.objects.filter(event_name='template_update').count(), + 0, ) data = { @@ -642,7 +645,8 @@ def test_post(self): with open(fp, 'rb') as f: self.assertEqual(file_obj.content, f.read().decode('utf-8')) self.assertEqual( - ProjectEvent.objects.filter(event_name='template_update').count(), 1 + TimelineEvent.objects.filter(event_name='template_update').count(), + 1, ) def test_post_no_file(self): @@ -655,7 +659,8 @@ def test_post_no_file(self): for f in self._get_files(): self.assertEqual(f.content, '') self.assertEqual( - ProjectEvent.objects.filter(event_name='template_update').count(), 0 + TimelineEvent.objects.filter(event_name='template_update').count(), + 0, ) data = { 'description': TEMPLATE_DESC_UPDATE, @@ -673,7 +678,8 @@ def test_post_no_file(self): for f in self._get_files(): self.assertEqual(f.content, '') self.assertEqual( - ProjectEvent.objects.filter(event_name='template_update').count(), 1 + TimelineEvent.objects.filter(event_name='template_update').count(), + 1, ) def test_post_no_name(self): @@ -712,7 +718,8 @@ def test_post_no_json(self): for f in self._get_files(): self.assertEqual(f.content, '') self.assertEqual( - ProjectEvent.objects.filter(event_name='template_update').count(), 0 + TimelineEvent.objects.filter(event_name='template_update').count(), + 0, ) def test_post_no_investigation(self): @@ -817,7 +824,8 @@ def test_get(self): def test_post(self): """Test POST""" self.assertEqual( - ProjectEvent.objects.filter(event_name='template_delete').count(), 0 + TimelineEvent.objects.filter(event_name='template_delete').count(), + 0, ) self.assertEqual(CookiecutterISATemplate.objects.count(), 1) self.assertEqual(CookiecutterISAFile.objects.count(), 3) @@ -827,7 +835,8 @@ def test_post(self): self.assertEqual(CookiecutterISATemplate.objects.count(), 0) self.assertEqual(CookiecutterISAFile.objects.count(), 0) self.assertEqual( - ProjectEvent.objects.filter(event_name='template_delete').count(), 1 + TimelineEvent.objects.filter(event_name='template_delete').count(), + 1, ) diff --git a/isatemplates/views.py b/isatemplates/views.py index 71d2b12cb..bf70cc856 100644 --- a/isatemplates/views.py +++ b/isatemplates/views.py @@ -203,8 +203,9 @@ class ISATemplateDeleteView( slug_field = 'sodar_uuid' template_name = 'isatemplates/template_confirm_delete.html' - def get_success_url(self): - return self.handle_modify(self.object, 'delete') + def form_valid(self, form): + self.object.delete() + return redirect(self.handle_modify(self.object, 'delete')) class ISATemplateExportView(LoggedInPermissionMixin, View): diff --git a/landingzones/configapps/bih_proteomics_smb/urls.py b/landingzones/configapps/bih_proteomics_smb/urls.py index 8c6581432..a86e75996 100644 --- a/landingzones/configapps/bih_proteomics_smb/urls.py +++ b/landingzones/configapps/bih_proteomics_smb/urls.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import path from . import views @@ -6,8 +6,8 @@ app_name = 'landingzones.configapps.bih_proteomics_smb' urlpatterns = [ - url( - regex=r'^(?P[0-9a-f-]+)$', + path( + route='', view=views.ZoneTicketGetView.as_view(), name='ticket_get', ) diff --git a/landingzones/plugins.py b/landingzones/plugins.py index 1c0e5e014..482a2e5e9 100644 --- a/landingzones/plugins.py +++ b/landingzones/plugins.py @@ -11,6 +11,7 @@ from projectroles.plugins import ( ProjectAppPluginPoint, ProjectModifyPluginMixin, + PluginObjectLink, get_backend_api, ) @@ -123,31 +124,30 @@ class ProjectAppPlugin( def get_object_link(self, model_str, uuid): """ - Return URL for referring to a object used by the app, along with a - label to be shown to the user for linking. + Return URL referring to an object used by the app, along with a name to + be shown to the user for linking. :param model_str: Object class (string) :param uuid: sodar_uuid of the referred object - :return: Dict or None if not found + :return: PluginObjectLink or None if not found """ obj = self.get_object(eval(model_str), uuid) if not obj: return None if obj.__class__ == LandingZone and obj.status != ZONE_STATUS_MOVED: - return { - 'url': reverse( + return PluginObjectLink( + url=reverse( 'landingzones:list', kwargs={'project': obj.project.sodar_uuid}, ) + '#' + str(obj.sodar_uuid), - 'label': obj.title, - } - elif obj.__class__ == Assay: - return { - 'url': obj.get_url(), - 'label': obj.get_display_name(), - } + name=obj.title, + ) + if obj.__class__ == Assay: + return PluginObjectLink( + url=obj.get_url(), name=obj.get_display_name() + ) def get_statistics(self): """ diff --git a/landingzones/tests/test_permissions.py b/landingzones/tests/test_permissions.py index 09b40926a..cba66844c 100644 --- a/landingzones/tests/test_permissions.py +++ b/landingzones/tests/test_permissions.py @@ -5,7 +5,7 @@ # Projectroles dependency from projectroles.models import SODAR_CONSTANTS -from projectroles.tests.test_permissions import TestProjectPermissionBase +from projectroles.tests.test_permissions import ProjectPermissionTestBase # Samplesheets dependency from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR @@ -33,7 +33,7 @@ class LandingzonesPermissionTestBase( LandingZoneMixin, SampleSheetIOMixin, - TestProjectPermissionBase, + ProjectPermissionTestBase, ): """Base class for landingzones permissions tests""" diff --git a/landingzones/tests/test_permissions_api.py b/landingzones/tests/test_permissions_api.py index 5b8a612a3..2a6c08513 100644 --- a/landingzones/tests/test_permissions_api.py +++ b/landingzones/tests/test_permissions_api.py @@ -5,7 +5,7 @@ # Projectroles dependency from projectroles.models import SODAR_CONSTANTS -from projectroles.tests.test_permissions_api import TestProjectAPIPermissionBase +from projectroles.tests.test_permissions_api import ProjectAPIPermissionTestBase # Samplesheets dependency from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR @@ -32,7 +32,7 @@ class ZoneAPIPermissionTestBase( LandingZoneMixin, SampleSheetIOMixin, - TestProjectAPIPermissionBase, + ProjectAPIPermissionTestBase, ): """Base class for landingzones REST API view permission tests""" diff --git a/landingzones/tests/test_ui.py b/landingzones/tests/test_ui.py index 9925feb29..56e0d927f 100644 --- a/landingzones/tests/test_ui.py +++ b/landingzones/tests/test_ui.py @@ -12,7 +12,7 @@ # Projectroles dependency from projectroles.app_settings import AppSettingAPI -from projectroles.tests.test_ui import TestUIBase +from projectroles.tests.test_ui import UITestBase # Samplesheets dependency from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR @@ -36,7 +36,7 @@ class LandingZoneUITestBase( - SampleSheetIOMixin, SheetConfigMixin, LandingZoneMixin, TestUIBase + SampleSheetIOMixin, SheetConfigMixin, LandingZoneMixin, UITestBase ): """Base class for landingzones UI tests""" @@ -51,7 +51,7 @@ def _setup_investigation(self): def _assert_element(self, by, element, expected=True): """Assert element existence for an already logged in user""" - # TODO: Add this into TestUIBase (see bihealth/sodar-core#1104) + # TODO: Add this into UITestBase (see bihealth/sodar-core#1104) if expected: self.assertIsNotNone(self.selenium.find_element(by, element)) else: diff --git a/landingzones/tests/test_views_api.py b/landingzones/tests/test_views_api.py index cf068201d..5dab0c546 100644 --- a/landingzones/tests/test_views_api.py +++ b/landingzones/tests/test_views_api.py @@ -7,7 +7,7 @@ # Projectroles dependency from projectroles.models import SODAR_CONSTANTS from projectroles.plugins import get_backend_api -from projectroles.tests.test_views_api import TestAPIViewsBase +from projectroles.tests.test_views_api import APIViewTestBase # Samplesheets dependency from samplesheets.tests.test_io import SampleSheetIOMixin, SHEET_DIR @@ -38,7 +38,7 @@ class TestLandingZoneAPIViewsBase( - LandingZoneMixin, SampleSheetIOMixin, TestAPIViewsBase + LandingZoneMixin, SampleSheetIOMixin, APIViewTestBase ): """Base class for Landingzones API view testing""" diff --git a/landingzones/tests/test_views_taskflow.py b/landingzones/tests/test_views_taskflow.py index 0826c638a..d8ae22702 100644 --- a/landingzones/tests/test_views_taskflow.py +++ b/landingzones/tests/test_views_taskflow.py @@ -28,7 +28,7 @@ # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from landingzones.constants import ( ZONE_STATUS_CREATING, @@ -188,7 +188,7 @@ def test_create_zone(self): """Test landingzones creation with taskflow""" self.assertEqual(LandingZone.objects.count(), 0) self.assertEqual( - ProjectEvent.objects.filter(event_name='zone_create').count(), 0 + TimelineEvent.objects.filter(event_name='zone_create').count(), 0 ) self.assertEqual(len(mail.outbox), 1) @@ -210,7 +210,9 @@ def test_create_zone(self): self.assert_irods_coll(zone) for c in ZONE_BASE_COLLS: self.assert_irods_coll(zone, c, False) - tl_event = ProjectEvent.objects.filter(event_name='zone_create').first() + tl_event = TimelineEvent.objects.filter( + event_name='zone_create' + ).first() expected_extra = { 'title': zone.title, 'assay': str(zone.assay.sodar_uuid), @@ -246,7 +248,9 @@ def test_create_zone_colls(self): self.assert_zone_count(1) zone = LandingZone.objects.first() self.assert_zone_status(zone, ZONE_STATUS_ACTIVE) - tl_event = ProjectEvent.objects.filter(event_name='zone_create').first() + tl_event = TimelineEvent.objects.filter( + event_name='zone_create' + ).first() self.assertEqual(tl_event.extra_data['create_colls'], True) self.assertEqual(tl_event.extra_data['restrict_colls'], False) self.assert_irods_coll(zone) @@ -660,7 +664,7 @@ def test_move_invalid_status(self): self.assertEqual(len(self.assay_coll.data_objects), 0) self.assertEqual(len(mail.outbox), 1) self.assertEqual( - ProjectEvent.objects.filter(event_name='zone_move').count(), 0 + TimelineEvent.objects.filter(event_name='zone_move').count(), 0 ) self.assertEqual( AppAlert.objects.filter(alert_name='zone_move').count(), 0 @@ -678,7 +682,7 @@ def test_move_invalid_status(self): self.assertEqual(len(self.zone_coll.data_objects), 2) self.assertEqual(len(self.assay_coll.data_objects), 0) self.assertEqual(len(mail.outbox), 1) - tl_event = ProjectEvent.objects.filter(event_name='zone_move').first() + tl_event = TimelineEvent.objects.filter(event_name='zone_move').first() self.assertIsNone(tl_event) self.assertEqual( AppAlert.objects.filter(alert_name='zone_move').count(), 0 @@ -695,7 +699,7 @@ def test_move_lock_failure(self): self.assertEqual(len(self.assay_coll.data_objects), 0) self.assertEqual(len(mail.outbox), 1) self.assertEqual( - ProjectEvent.objects.filter(event_name='zone_move').count(), 0 + TimelineEvent.objects.filter(event_name='zone_move').count(), 0 ) self.assertEqual( AppAlert.objects.filter(alert_name='zone_move').count(), 0 @@ -709,8 +713,8 @@ def test_move_lock_failure(self): self.assertEqual(len(self.zone_coll.data_objects), 2) self.assertEqual(len(self.assay_coll.data_objects), 0) self.assertEqual(len(mail.outbox), 1) # TODO: Should this send email? - tl_event = ProjectEvent.objects.filter(event_name='zone_move').first() - self.assertIsInstance(tl_event, ProjectEvent) + tl_event = TimelineEvent.objects.filter(event_name='zone_move').first() + self.assertIsInstance(tl_event, TimelineEvent) self.assertEqual(tl_event.get_status().status_type, ZONE_STATUS_FAILED) # TODO: Create app alerts for async failures (see #1499) self.assertEqual( diff --git a/ontologyaccess/tests/test_permissions.py b/ontologyaccess/tests/test_permissions.py index 1b0acce8c..b244654f0 100644 --- a/ontologyaccess/tests/test_permissions.py +++ b/ontologyaccess/tests/test_permissions.py @@ -3,7 +3,7 @@ from django.urls import reverse # Projectroles dependency -from projectroles.tests.test_permissions import TestSiteAppPermissionBase +from projectroles.tests.test_permissions import SiteAppPermissionTestBase from ontologyaccess.models import DEFAULT_TERM_URL from ontologyaccess.tests.test_models import OBOFormatOntologyModelMixin @@ -29,7 +29,7 @@ class OntologyAccessPermissionTestBase( - OBOFormatOntologyModelMixin, TestSiteAppPermissionBase + OBOFormatOntologyModelMixin, SiteAppPermissionTestBase ): """Base class for ontologyaccess UI view permission tests""" diff --git a/requirements/base.txt b/requirements/base.txt index a77640a9c..b1bf87435 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,50 +1,56 @@ # Wheel # NOTE: For best results wheel should be installed separately before other deps -wheel==0.40.0 +wheel==0.42.0 # Setuptools -setuptools==67.6.0 +setuptools==70.0.0 # Django -django==3.2.25 +django==4.2.16 # Configuration -django-environ==0.10.0 +django-environ==0.11.2 # Forms -django-crispy-forms==2.0 -crispy-bootstrap4==2022.1 +django-crispy-forms==2.1 +crispy-bootstrap4==2024.1 # Models -django-model-utils==4.3.1 +django-model-utils==4.4.0 # Password storage argon2-cffi==21.3.0 # Python-PostgreSQL Database Adapter -psycopg2-binary==2.9.5 +psycopg2-binary==2.9.9 # Unicode slugification awesome-slugify==1.6.5 # Time zones support -pytz==2022.7.1 +pytz==2024.1 + +# SVG icon support +django-iconify==0.3 + +# OpenID Connect (OIDC) authentication support +social-auth-app-django==5.4.0 # Redis support -redis>=4.5.4, <4.6 +redis>=5.0.2, <5.1 # Profiling django-cprofile-middleware==1.0.5 -# Versioning -versioneer==0.28 - # Online documentation via django-docs -docutils==0.18.1 -Sphinx==6.2.1 # NOTE: sphinx-rtd-theme v1.2.2 forces <7 +docutils==0.20.1 +Sphinx==7.2.6 django-docs==0.3.3 -sphinx-rtd-theme==1.2.2 -sphinxcontrib-youtube==1.2.0 +sphinx-rtd-theme==2.0.0 +sphinxcontrib-youtube==1.4.1 + +# Versioning +versioneer==0.29 ##################### # SODAR Core imports @@ -54,27 +60,27 @@ sphinxcontrib-youtube==1.2.0 rules==3.3 # REST framework -djangorestframework==3.14.0 +djangorestframework==3.15.2 # Token authentication django-rest-knox==4.2.0 # Markdown field support -markdown==3.4.1 +markdown==3.5.2 django-markupfield==2.0.1 django-pagedown==2.2.1 -mistune==2.0.5 +mistune==3.0.2 # Pin to avoid issue with v3.9.5 # See issue #166 and bihealth/sodar-core#1225 -django-autocomplete-light==3.9.4 +django-autocomplete-light==3.11.0 # SODAR Core -django-sodar-core==0.13.4 -# -e git+https://github.com/bihealth/sodar-core.git@be012e5536bacf8bfbfe95e3c930324edae0309b#egg=django-sodar-core +django-sodar-core==1.0.2 +# -e git+https://github.com/bihealth/sodar-core.git@dac0069d08bb7d2d6e68a30b607eaa86e5cc4425#egg=django-sodar-core # Celery -celery==5.2.7 +celery==5.3.6 #################### # SODAR app imports diff --git a/requirements/local.txt b/requirements/local.txt index 35d2a276f..73bf77dbe 100644 --- a/requirements/local.txt +++ b/requirements/local.txt @@ -1,9 +1,14 @@ # Local development dependencies go here -r base.txt -django-extensions==3.2.1 +django-extensions==3.2.3 Werkzeug==3.0.3 -django-debug-toolbar==3.8.1 +django-debug-toolbar==4.3.0 # improved REPL ipdb==0.13.13 + +# OpenAPI support +inflection>=0.5.1, <0.6 +pyyaml>=6.0.1, <6.1 +uritemplate>=4.1.1, <4.2 diff --git a/requirements/test.txt b/requirements/test.txt index 896b65109..a9945a904 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,21 +1,21 @@ # Test dependencies go here. -r base.txt -flake8==6.0.0 -django-test-plus==2.2.1 -factory-boy==3.2.1 +flake8==7.0.0 +django-test-plus==2.2.3 +factory-boy==3.3.0 coverage==6.5.0 # NOTE: coveralls 3.3.1 requires <7.0 -django-coverage-plugin==3.0.0 +django-coverage-plugin==3.1.0 # pytest -pytest-django==4.5.2 -pytest-sugar==0.9.6 +pytest-django==4.8.0 +pytest-sugar==1.0.0 # Selenium for UI testing -selenium==4.8.2 +selenium==4.18.1 # BeautifulSoup for HTML testing -beautifulsoup4==4.11.2 +beautifulsoup4==4.12.3 # Black for formatting black==24.3.0 diff --git a/samplesheets/plugins.py b/samplesheets/plugins.py index 52baed0fe..1c547a048 100644 --- a/samplesheets/plugins.py +++ b/samplesheets/plugins.py @@ -20,6 +20,8 @@ from projectroles.plugins import ( ProjectAppPluginPoint, ProjectModifyPluginMixin, + PluginObjectLink, + PluginSearchResult, get_backend_api, ) from projectroles.utils import build_secret @@ -300,46 +302,44 @@ class ProjectAppPlugin( def get_object_link(self, model_str, uuid): """ - Return URL for referring to a object used by the app, along with a - label to be shown to the user for linking. + Return URL referring to an object used by the app, along with a name to + be shown to the user for linking. :param model_str: Object class (string) :param uuid: sodar_uuid of the referred object - :return: Dict or None if not found + :return: PluginObjectLink or None if not found """ obj = self.get_object(eval(model_str), uuid) if not obj: return None if obj.__class__ == IrodsAccessTicket: - return { - 'url': reverse( + return PluginObjectLink( + url=reverse( 'samplesheets:irods_tickets', kwargs={'project': obj.get_project().sodar_uuid}, ), - 'label': obj.get_display_name(), - } + name=obj.get_display_name(), + ) if obj.__class__ in [Investigation, Study, Assay]: - return { - 'url': obj.get_url(), - 'label': ( + return PluginObjectLink( + url=obj.get_url(), + name=( obj.title if obj.__class__ == Investigation else obj.get_display_name() ), - } + ) url_kwargs = {'project': obj.project.sodar_uuid} if obj.__class__ == ISATab: - return { - 'url': reverse('samplesheets:versions', kwargs=url_kwargs), - 'label': obj.get_full_name(), - } - elif obj.__class__ == IrodsDataRequest: - return { - 'url': reverse( - 'samplesheets:irods_requests', kwargs=url_kwargs - ), - 'label': obj.get_display_name(), - } + return PluginObjectLink( + url=reverse('samplesheets:versions', kwargs=url_kwargs), + name=obj.get_full_name(), + ) + if obj.__class__ == IrodsDataRequest: + return PluginObjectLink( + url=reverse('samplesheets:versions', kwargs=url_kwargs), + name=obj.get_full_name(), + ) @classmethod def _get_search_materials(cls, search_terms, user, keywords, item_types): @@ -449,32 +449,34 @@ def search(self, search_terms, user, search_type=None, keywords=None): :param user: User object for user initiating the search :param search_type: String :param keywords: List (optional) - :return: Dict + :return: List of PluginSearchResult objects """ irods_backend = get_backend_api('omics_irods') - results = {} + ret = [] # Materials if not search_type or search_type in MATERIAL_SEARCH_TYPES: item_types = ['SOURCE', 'SAMPLE'] if search_type in MATERIAL_SEARCH_TYPES: item_types = [search_type.upper()] - results['materials'] = { - 'title': 'Sources and Samples', - 'search_types': ['source', 'sample'], - 'items': self._get_search_materials( + r = PluginSearchResult( + category='materials', + title='Sources and Samples', + search_types=['source', 'sample'], + items=self._get_search_materials( search_terms, user, keywords, item_types ), - } + ) + ret.append(r) # iRODS files if irods_backend and (not search_type or search_type == 'file'): - results['files'] = { - 'title': 'Sample Files in iRODS', - 'search_types': ['file'], - 'items': self._get_search_files( - search_terms, user, irods_backend - ), - } - return results + r = PluginSearchResult( + category='files', + title='Sample Files in iRODS', + search_types=['file'], + items=self._get_search_files(search_terms, user, irods_backend), + ) + ret.append(r) + return ret def get_project_list_value(self, column_id, project, user): """ diff --git a/samplesheets/studyapps/cancer/urls.py b/samplesheets/studyapps/cancer/urls.py index 03e80db17..1684c39a5 100644 --- a/samplesheets/studyapps/cancer/urls.py +++ b/samplesheets/studyapps/cancer/urls.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import path from . import views @@ -6,8 +6,8 @@ app_name = 'samplesheets.studyapps.cancer' urlpatterns = [ - url( - regex=r'^render/igv/(?P[0-9a-f-]+)(\..*)?$', + path( + route='render/igv/', view=views.IGVSessionFileRenderView.as_view(), name='igv', ) diff --git a/samplesheets/studyapps/germline/urls.py b/samplesheets/studyapps/germline/urls.py index 844e6dc12..1f7445a23 100644 --- a/samplesheets/studyapps/germline/urls.py +++ b/samplesheets/studyapps/germline/urls.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import path from . import views @@ -6,8 +6,8 @@ app_name = 'samplesheets.studyapps.germline' urlpatterns = [ - url( - regex=r'^render/igv/(?P[0-9a-f-]+)(\..*)?$', + path( + route='render/igv/', view=views.IGVSessionFileRenderView.as_view(), name='igv', ) diff --git a/samplesheets/tests/test_commands.py b/samplesheets/tests/test_commands.py index 18835784d..72acad22b 100644 --- a/samplesheets/tests/test_commands.py +++ b/samplesheets/tests/test_commands.py @@ -19,7 +19,7 @@ from sodarcache.models import JSONCacheItem # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from samplesheets.management.commands.normalizesheets import ( LIB_NAME, @@ -80,7 +80,7 @@ def _assert_study_table_header(self, study_tables, assay, header, expected): def _assert_tl_event(self, expected): self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( event_name='sheet_normalize', project=self.project ).count(), expected, diff --git a/samplesheets/tests/test_permissions.py b/samplesheets/tests/test_permissions.py index 9fdb4ac8b..68eb2c0b2 100644 --- a/samplesheets/tests/test_permissions.py +++ b/samplesheets/tests/test_permissions.py @@ -7,7 +7,7 @@ # Projectroles dependency from projectroles.app_settings import AppSettingAPI -from projectroles.tests.test_permissions import TestProjectPermissionBase +from projectroles.tests.test_permissions import ProjectPermissionTestBase from projectroles.utils import build_secret from samplesheets.models import ( @@ -37,7 +37,7 @@ class SamplesheetsPermissionTestBase( - SampleSheetIOMixin, TestProjectPermissionBase + SampleSheetIOMixin, ProjectPermissionTestBase ): """Base test class for samplesheets UI view permissions""" @@ -307,7 +307,7 @@ def test_get_sync(self): self.assert_response(self.url, bad_users, 302) -class TestSheetTemplateCreateView(TestProjectPermissionBase): +class TestSheetTemplateCreateView(ProjectPermissionTestBase): """Permission tests for SheetTemplateCreateView""" def setUp(self): diff --git a/samplesheets/tests/test_permissions_ajax.py b/samplesheets/tests/test_permissions_ajax.py index c8b76b718..593388733 100644 --- a/samplesheets/tests/test_permissions_ajax.py +++ b/samplesheets/tests/test_permissions_ajax.py @@ -5,7 +5,7 @@ # Projectroles dependency from projectroles.app_settings import AppSettingAPI -from projectroles.tests.test_permissions import TestProjectPermissionBase +from projectroles.tests.test_permissions import ProjectPermissionTestBase from projectroles.utils import build_secret from samplesheets.models import ( @@ -31,7 +31,7 @@ class SampleSheetsAjaxPermissionTestBase( - SampleSheetIOMixin, TestProjectPermissionBase + SampleSheetIOMixin, ProjectPermissionTestBase ): """Base test class for samplesheets Ajax view permissions""" diff --git a/samplesheets/tests/test_permissions_api.py b/samplesheets/tests/test_permissions_api.py index 96f65084c..fb815a7c2 100644 --- a/samplesheets/tests/test_permissions_api.py +++ b/samplesheets/tests/test_permissions_api.py @@ -11,8 +11,8 @@ # Projectroles dependency from projectroles.models import SODAR_CONSTANTS from projectroles.tests.test_models import RemoteSiteMixin, RemoteProjectMixin -from projectroles.tests.test_permissions import TestProjectPermissionBase -from projectroles.tests.test_permissions_api import TestProjectAPIPermissionBase +from projectroles.tests.test_permissions import ProjectPermissionTestBase +from projectroles.tests.test_permissions_api import ProjectAPIPermissionTestBase from samplesheets.models import ( Investigation, @@ -40,7 +40,7 @@ class TestInvestigationRetrieveAPIView( SampleSheetIOMixin, - TestProjectAPIPermissionBase, + ProjectAPIPermissionTestBase, ): """Tests for InvestigationRetrieveAPIView permissions""" @@ -120,7 +120,7 @@ def test_get_archive(self): self.assert_response_api(url, self.anonymous, 401) -class TestSheetImportAPIView(SampleSheetIOMixin, TestProjectAPIPermissionBase): +class TestSheetImportAPIView(SampleSheetIOMixin, ProjectAPIPermissionTestBase): """Tests for SheetImportAPIView permissions""" def _cleanup_import(self): @@ -291,7 +291,7 @@ def test_post_archive(self): class TestSheetISAExportAPIView( SampleSheetIOMixin, - TestProjectAPIPermissionBase, + ProjectAPIPermissionTestBase, ): """Tests for SheetISAExportAPIView permissions""" @@ -364,7 +364,7 @@ def test_get_archive(self): class TestIrodsAccessTicketListAPIView( - SampleSheetIOMixin, IrodsAccessTicketMixin, TestProjectAPIPermissionBase + SampleSheetIOMixin, IrodsAccessTicketMixin, ProjectAPIPermissionTestBase ): """Test permissions for IrodsAccessTicketListAPIView""" @@ -438,7 +438,7 @@ def test_get_archive(self): class TestIrodsAccessTicketRetrieveAPIView( - SampleSheetIOMixin, IrodsAccessTicketMixin, TestProjectAPIPermissionBase + SampleSheetIOMixin, IrodsAccessTicketMixin, ProjectAPIPermissionTestBase ): """Test permissions for IrodsAccessTicketRetrieveAPIView""" @@ -509,7 +509,7 @@ def test_get_archive(self): class TestIrodsDataRequestRetrieveAPIView( - IrodsDataRequestMixin, TestProjectAPIPermissionBase + IrodsDataRequestMixin, ProjectAPIPermissionTestBase ): """Tests for TestIrodsDataRequestRetrieveAPIView permissions""" @@ -581,7 +581,7 @@ def test_get_archive(self): self.assert_response_api(self.url, self.anonymous, 401) -class TestIrodsDataRequestListAPIView(TestProjectAPIPermissionBase): +class TestIrodsDataRequestListAPIView(ProjectAPIPermissionTestBase): """Tests for TestIrodsDataRequestListAPIView permissions""" def setUp(self): @@ -646,7 +646,7 @@ def test_get_archive(self): class TestIrodsDataRequestRejectAPIView( - IrodsDataRequestMixin, TestProjectAPIPermissionBase + IrodsDataRequestMixin, ProjectAPIPermissionTestBase ): """Test permissions for TestIrodsDataRequestRejectAPIView""" @@ -728,7 +728,7 @@ def test_reject_archive(self): class TestIrodsDataRequestDestroyAPIView( - SampleSheetIOMixin, IrodsDataRequestMixin, TestProjectAPIPermissionBase + SampleSheetIOMixin, IrodsDataRequestMixin, ProjectAPIPermissionTestBase ): """Test permissions for IrodsDataRequestDestroyAPIView""" @@ -816,7 +816,7 @@ class TestRemoteSheetGetAPIView( SampleSheetIOMixin, RemoteSiteMixin, RemoteProjectMixin, - TestProjectPermissionBase, + ProjectPermissionTestBase, ): """Tests for RemoteSheetGetAPIView permissions""" diff --git a/samplesheets/tests/test_tasks_celery_taskflow.py b/samplesheets/tests/test_tasks_celery_taskflow.py index ba97509ca..03ae778b6 100644 --- a/samplesheets/tests/test_tasks_celery_taskflow.py +++ b/samplesheets/tests/test_tasks_celery_taskflow.py @@ -18,7 +18,7 @@ from taskflowbackend.tests.base import TaskflowViewTestBase # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from samplesheets.models import ISATab from samplesheets.tasks_celery import ( @@ -79,7 +79,7 @@ def test_update_cache(self): self.assertEqual( AppAlert.objects.filter(alert_name=CACHE_UPDATE_EVENT).count(), 0 ) - self.assertEqual(ProjectEvent.objects.count(), 2) + self.assertEqual(TimelineEvent.objects.count(), 2) update_project_cache_task( self.project.sodar_uuid, @@ -109,7 +109,7 @@ def test_update_cache(self): ) alert = AppAlert.objects.order_by('-pk').first() self.assertTrue(alert.message.endswith(CACHE_ALERT_MESSAGE)) - self.assertEqual(ProjectEvent.objects.count(), 3) + self.assertEqual(TimelineEvent.objects.count(), 3) def test_update_cache_no_alert(self): """Test cache update with app alert disabled""" @@ -119,7 +119,7 @@ def test_update_cache_no_alert(self): self.assertEqual( AppAlert.objects.filter(alert_name=CACHE_UPDATE_EVENT).count(), 0 ) - self.assertEqual(ProjectEvent.objects.count(), 2) + self.assertEqual(TimelineEvent.objects.count(), 2) update_project_cache_task( self.project.sodar_uuid, self.user.sodar_uuid, add_alert=False @@ -131,7 +131,7 @@ def test_update_cache_no_alert(self): self.assertEqual( AppAlert.objects.filter(alert_name=CACHE_UPDATE_EVENT).count(), 0 ) - self.assertEqual(ProjectEvent.objects.count(), 3) + self.assertEqual(TimelineEvent.objects.count(), 3) def test_update_cache_no_user(self): """Test cache update with no user""" @@ -141,7 +141,7 @@ def test_update_cache_no_user(self): self.assertEqual( AppAlert.objects.filter(alert_name=CACHE_UPDATE_EVENT).count(), 0 ) - self.assertEqual(ProjectEvent.objects.count(), 2) + self.assertEqual(TimelineEvent.objects.count(), 2) update_project_cache_task(self.project.sodar_uuid, None, add_alert=True) @@ -151,7 +151,7 @@ def test_update_cache_no_user(self): self.assertEqual( AppAlert.objects.filter(alert_name=CACHE_UPDATE_EVENT).count(), 0 ) - self.assertEqual(ProjectEvent.objects.count(), 3) + self.assertEqual(TimelineEvent.objects.count(), 3) class TestSheetRemoteSyncTask(SheetRemoteSyncTestBase): diff --git a/samplesheets/tests/test_ui.py b/samplesheets/tests/test_ui.py index f3b5222ba..72b5b09bf 100644 --- a/samplesheets/tests/test_ui.py +++ b/samplesheets/tests/test_ui.py @@ -18,7 +18,7 @@ # Projectroles dependency from projectroles.app_settings import AppSettingAPI from projectroles.plugins import get_backend_api -from projectroles.tests.test_ui import TestUIBase +from projectroles.tests.test_ui import UITestBase from samplesheets.forms import TPL_DIR_FIELD, TPL_DIR_LABEL from samplesheets.models import ( @@ -59,7 +59,7 @@ CONFIG_DATA_UPDATED = json.load(fp) -class SamplesheetsUITestBase(SampleSheetIOMixin, SheetConfigMixin, TestUIBase): +class SamplesheetsUITestBase(SampleSheetIOMixin, SheetConfigMixin, UITestBase): """Base view samplesheets view UI tests""" def setup_investigation(self, config_data=None): @@ -609,7 +609,7 @@ def test_render_davrods_button(self): class TestSheetVersionCompareView( - SampleSheetIOMixin, SheetConfigMixin, TestUIBase + SampleSheetIOMixin, SheetConfigMixin, UITestBase ): """Tests for sheet version compare view UI""" @@ -647,7 +647,7 @@ def test_render(self): class TestSheetVersionCompareFileView( - SampleSheetIOMixin, SheetConfigMixin, TestUIBase + SampleSheetIOMixin, SheetConfigMixin, UITestBase ): """Tests for sheet version compare file view UI""" diff --git a/samplesheets/tests/test_views.py b/samplesheets/tests/test_views.py index 3c57c9adc..08c1e1259 100644 --- a/samplesheets/tests/test_views.py +++ b/samplesheets/tests/test_views.py @@ -1497,9 +1497,7 @@ class TestProjectSearchResultsView(SamplesheetsViewTestBase): """Tests for ProjectSearchResultsView view with sample sheet input""" def _get_items(self, response): - return response.context['app_results'][0]['results']['materials'][ - 'items' - ] + return response.context['app_results'][0]['results']['materials'].items def setUp(self): super().setUp() diff --git a/samplesheets/tests/test_views_ajax.py b/samplesheets/tests/test_views_ajax.py index 2e5e8e5b0..e17be5e18 100644 --- a/samplesheets/tests/test_views_ajax.py +++ b/samplesheets/tests/test_views_ajax.py @@ -17,7 +17,7 @@ from sodarcache.models import JSONCacheItem # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent # Ontologyaccess dependency from ontologyaccess.io import OBOFormatOntologyIO @@ -1834,7 +1834,7 @@ def test_post_study_column(self): ) self.assertEqual(sheet_config, CONFIG_DATA_DEFAULT) self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( project=self.project, app=APP_NAME, event_name='field_update', @@ -1871,7 +1871,7 @@ def test_post_study_column(self): expected, ) self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( project=self.project, app=APP_NAME, event_name='field_update', diff --git a/samplesheets/tests/test_views_api.py b/samplesheets/tests/test_views_api.py index 061619aa7..0fd345db9 100644 --- a/samplesheets/tests/test_views_api.py +++ b/samplesheets/tests/test_views_api.py @@ -14,13 +14,13 @@ from projectroles.models import SODAR_CONSTANTS from projectroles.plugins import get_backend_api from projectroles.tests.test_models import RemoteSiteMixin, RemoteProjectMixin -from projectroles.tests.test_views_api import TestAPIViewsBase +from projectroles.tests.test_views_api import APIViewTestBase # Sodarcache dependency from sodarcache.models import JSONCacheItem # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent # Landingzones dependency from landingzones.models import LandingZone @@ -93,7 +93,7 @@ # TODO: Add testing for study table cache updates -class SampleSheetAPIViewTestBase(SampleSheetIOMixin, TestAPIViewsBase): +class SampleSheetAPIViewTestBase(SampleSheetIOMixin, APIViewTestBase): """Base view for samplesheets API views tests""" @@ -923,9 +923,9 @@ class TestIrodsDataRequestDestroyAPIView( """Tests for IrodsDataRequestDestroyAPIView""" def _assert_tl_count(self, count): - """Assert timeline ProjectEvent count""" + """Assert timeline TimelineEvent count""" self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( event_name='irods_request_delete' ).count(), count, diff --git a/samplesheets/tests/test_views_api_taskflow.py b/samplesheets/tests/test_views_api_taskflow.py index da68980ab..38e17b9d9 100644 --- a/samplesheets/tests/test_views_api_taskflow.py +++ b/samplesheets/tests/test_views_api_taskflow.py @@ -20,7 +20,7 @@ from projectroles.plugins import get_backend_api # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent # Taskflowbackend dependency from taskflowbackend.tests.base import ( @@ -830,9 +830,9 @@ class TestIrodsDataRequestUpdateAPIView( """Tests for IrodsDataRequestUpdateAPIView""" def _assert_tl_count(self, count): - """Assert timeline ProjectEvent count""" + """Assert timeline TimelineEvent count""" self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( event_name='irods_request_update' ).count(), count, diff --git a/samplesheets/tests/test_views_taskflow.py b/samplesheets/tests/test_views_taskflow.py index 179882775..07a82300f 100644 --- a/samplesheets/tests/test_views_taskflow.py +++ b/samplesheets/tests/test_views_taskflow.py @@ -20,13 +20,13 @@ from projectroles.app_settings import AppSettingAPI from projectroles.models import SODAR_CONSTANTS from projectroles.plugins import get_backend_api -from projectroles.views import MSG_NO_AUTH +from projectroles.views import NO_AUTH_MSG # Appalerts dependency from appalerts.models import AppAlert # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent # Taskflowbackend dependency from taskflowbackend.tests.base import TaskflowViewTestBase @@ -185,7 +185,7 @@ def get_tl_event_count(cls, action): :param action: "create", "update" or "delete" (string) :return: Integer """ - return ProjectEvent.objects.filter( + return TimelineEvent.objects.filter( event_name='irods_ticket_' + action ).count() @@ -1099,9 +1099,9 @@ def _assert_tl_count(self, event_name, count, **kwargs): :param kwargs: Extra kwargs for query (dict, optional) """ timeline = get_backend_api('timeline_backend') - ProjectEvent, _ = timeline.get_models() + TimelineEvent, _ = timeline.get_models() self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( event_name=event_name, **kwargs ).count(), count, @@ -1196,7 +1196,7 @@ def test_post(self): self.assertEqual(obj.description, IRODS_REQUEST_DESC) self._assert_tl_count(EVENT_CREATE, 1) self.assertEqual( - ProjectEvent.objects.get(event_name=EVENT_CREATE).extra_data, + TimelineEvent.objects.get(event_name=EVENT_CREATE).extra_data, { 'action': IRODS_REQUEST_ACTION_DELETE, 'path': obj.path, @@ -1368,7 +1368,7 @@ def test_post(self): self.assertEqual(self.request.description, IRODS_REQUEST_DESC_UPDATE) self._assert_tl_count(EVENT_UPDATE, 1) self.assertEqual( - ProjectEvent.objects.get(event_name=EVENT_UPDATE).extra_data, + TimelineEvent.objects.get(event_name=EVENT_UPDATE).extra_data, { 'action': IRODS_REQUEST_ACTION_DELETE, 'path': self.request.path, @@ -1466,7 +1466,7 @@ def test_post(self): self.assert_irods_obj(self.obj_path) self._assert_tl_count(EVENT_DELETE, 1) self.assertEqual( - ProjectEvent.objects.get(event_name=EVENT_DELETE).extra_data, {} + TimelineEvent.objects.get(event_name=EVENT_DELETE).extra_data, {} ) # Create alerts should be deleted self._assert_alert_count(EVENT_CREATE, self.user, 0) @@ -2268,7 +2268,7 @@ def test_get_contributor(self): self.assertRedirects(response, reverse('home')) self.assertEqual( - list(get_messages(response.wsgi_request))[-1].message, MSG_NO_AUTH + list(get_messages(response.wsgi_request))[-1].message, NO_AUTH_MSG ) request.refresh_from_db() self.assertEqual(request.status, IRODS_REQUEST_STATUS_ACTIVE) @@ -2552,13 +2552,13 @@ def test_search(self): self.assertEqual(response.status_code, 200) data = response.context['app_results'][0] self.assertEqual(len(data['results']), 2) - self.assertEqual(len(data['results']['materials']['items']), 1) + self.assertEqual(len(data['results']['materials'].items), 1) self.assertEqual( - data['results']['materials']['items'][0]['name'], SAMPLE_ID + data['results']['materials'].items[0]['name'], SAMPLE_ID ) - self.assertEqual(len(data['results']['files']['items']), 1) + self.assertEqual(len(data['results']['files'].items), 1) self.assertEqual( - data['results']['files']['items'][0]['name'], self.file_name + data['results']['files'].items[0]['name'], self.file_name ) def test_search_limit_source(self): @@ -2572,9 +2572,9 @@ def test_search_limit_source(self): self.assertEqual(response.status_code, 200) data = response.context['app_results'][0] self.assertEqual(len(data['results']), 1) - self.assertEqual(len(data['results']['materials']['items']), 1) + self.assertEqual(len(data['results']['materials'].items), 1) self.assertEqual( - data['results']['materials']['items'][0]['name'], SOURCE_ID + data['results']['materials'].items[0]['name'], SOURCE_ID ) def test_search_limit_sample(self): @@ -2588,9 +2588,9 @@ def test_search_limit_sample(self): self.assertEqual(response.status_code, 200) data = response.context['app_results'][0] self.assertEqual(len(data['results']), 1) - self.assertEqual(len(data['results']['materials']['items']), 1) + self.assertEqual(len(data['results']['materials'].items), 1) self.assertEqual( - data['results']['materials']['items'][0]['name'], SAMPLE_ID + data['results']['materials'].items[0]['name'], SAMPLE_ID ) def test_search_limit_file(self): @@ -2604,9 +2604,9 @@ def test_search_limit_file(self): self.assertEqual(response.status_code, 200) data = response.context['app_results'][0] self.assertEqual(len(data['results']), 1) - self.assertEqual(len(data['results']['files']['items']), 1) + self.assertEqual(len(data['results']['files'].items), 1) self.assertEqual( - data['results']['files']['items'][0]['name'], self.file_name + data['results']['files'].items[0]['name'], self.file_name ) diff --git a/samplesheets/views.py b/samplesheets/views.py index b69186297..72bcf4398 100644 --- a/samplesheets/views.py +++ b/samplesheets/views.py @@ -182,7 +182,7 @@ def add_tl_event(self, project, action, tpl_name=None): :param project: Project object :param action: "import", "create" or "replace" (string) :param tpl_name: Optional template name (string) - :return: ProjectEvent object + :return: TimelineEvent object """ if action not in ['create', 'import', 'replace']: raise ValueError('Invalid action "{}"'.format(action)) @@ -2326,13 +2326,7 @@ class IrodsAccessTicketDeleteView( slug_url_kwarg = 'irodsaccessticket' slug_field = 'sodar_uuid' - def get_success_url(self): - return reverse( - 'samplesheets:irods_tickets', - kwargs={'project': self.object.get_project().sodar_uuid}, - ) - - def delete(self, request, *args, **kwargs): + def form_valid(self, form): obj = self.get_object() irods_backend = get_backend_api('omics_irods') try: @@ -2340,7 +2334,8 @@ def delete(self, request, *args, **kwargs): irods_backend.delete_ticket(irods, obj.ticket) except Exception as ex: messages.error( - request, 'Error deleting iRODS access ticket: {}'.format(ex) + self.request, + 'Error deleting iRODS access ticket: {}'.format(ex), ) return redirect( reverse( @@ -2349,12 +2344,18 @@ def delete(self, request, *args, **kwargs): ) ) self.add_tl_event(obj, 'delete') - self.create_app_alerts(obj, 'delete', request.user) + self.create_app_alerts(obj, 'delete', self.request.user) + obj.delete() messages.success( - request, + self.request, 'iRODS access ticket "{}" deleted.'.format(obj.get_display_name()), ) - return super().delete(request, *args, **kwargs) + return redirect( + reverse( + 'samplesheets:irods_tickets', + kwargs={'project': self.object.get_project().sodar_uuid}, + ) + ) class IrodsDataRequestListView( diff --git a/setup.cfg b/setup.cfg index 23abd30b6..b1cfce7d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ max-line-length = 80 exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs_manual, node_modules,src/*,config/*,versioneer.py,env/*,.venv,_version.py -ignore = E203, E266, E501, F405, W503, W504, C901 +ignore = E203, E266, E501, F405, W503, W504, C901, E721 max-complexity = 18 select = B,C,E,F,W,T4,B9 diff --git a/setup.py b/setup.py index 414ed0cc8..357dae221 100644 --- a/setup.py +++ b/setup.py @@ -25,14 +25,14 @@ classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', - 'Framework :: Django :: 3.2', + 'Framework :: Django :: 4.2', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], diff --git a/sodar/_version.py b/sodar/_version.py index 462976ecd..c6c35e766 100644 --- a/sodar/_version.py +++ b/sodar/_version.py @@ -6,7 +6,7 @@ # that just contains the computed version number. # This file is released into the public domain. -# Generated by versioneer-0.28 +# Generated by versioneer-0.29 # https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" @@ -16,11 +16,11 @@ import re import subprocess import sys -from typing import Callable, Dict +from typing import Any, Callable, Dict, List, Optional, Tuple import functools -def get_keywords(): +def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must @@ -36,8 +36,15 @@ def get_keywords(): class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool -def get_config(): + +def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py @@ -59,9 +66,9 @@ class NotThisMethod(Exception): HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} @@ -70,13 +77,19 @@ def decorate(f): return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) process = None - popen_kwargs = {} + popen_kwargs: Dict[str, Any] = {} if sys.platform == "win32": # This hides the console window if pythonw.exe is used startupinfo = subprocess.STARTUPINFO() @@ -92,8 +105,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs) break - except OSError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -113,7 +125,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, return stdout, process.returncode -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -138,13 +154,13 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: @@ -166,7 +182,11 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") @@ -230,7 +250,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -270,7 +295,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None @@ -362,14 +387,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): return pieces -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -394,7 +419,7 @@ def render_pep440(pieces): return rendered -def render_pep440_branch(pieces): +def render_pep440_branch(pieces: Dict[str, Any]) -> str: """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards @@ -424,7 +449,7 @@ def render_pep440_branch(pieces): return rendered -def pep440_split_post(ver): +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the @@ -434,7 +459,7 @@ def pep440_split_post(ver): return vc[0], int(vc[1] or 0) if len(vc) == 2 else None -def render_pep440_pre(pieces): +def render_pep440_pre(pieces: Dict[str, Any]) -> str: """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: @@ -458,7 +483,7 @@ def render_pep440_pre(pieces): return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -485,7 +510,7 @@ def render_pep440_post(pieces): return rendered -def render_pep440_post_branch(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. @@ -514,7 +539,7 @@ def render_pep440_post_branch(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. @@ -536,7 +561,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -556,7 +581,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -576,7 +601,7 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", @@ -612,7 +637,7 @@ def render(pieces, style): "date": pieces.get("date")} -def get_versions(): +def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some diff --git a/taskflowbackend/api.py b/taskflowbackend/api.py index 990858ad5..89a4d0b3a 100644 --- a/taskflowbackend/api.py +++ b/taskflowbackend/api.py @@ -105,7 +105,7 @@ def get_flow( :param flow_name: Name of flow (string) :param flow_data: Flow parameters (dict) :param async_mode: Set up flow asynchronously if True (boolean) - :param tl_event: ProjectEvent object for timeline updating or None + :param tl_event: TimelineEvent object for timeline updating or None """ flow_cls = flows.get_flow(flow_name) if not flow_cls: @@ -142,8 +142,8 @@ def run_flow( :param project: Project object :param force_fail: Force failure (boolean, for testing) :param async_mode: Submit in async mode (boolean, default=False) - :param tl_event: Timeline ProjectEvent object or None. Event status will - be updated if the flow is run in async mode + :param tl_event: TimelineEvent object or None. Event status will be + updated if the flow is run in async mode :return: Dict """ flow_result = None @@ -235,7 +235,7 @@ def submit( :param flow_data: Input data for flow execution (dict, must be JSON serializable) :param async_mode: Run flow asynchronously (boolean, default False) - :param tl_event: Corresponding timeline ProjectEvent (optional) + :param tl_event: Corresponding TimelineEvent (optional) :param force_fail: Make flow fail on purpose (boolean, default False) :return: Boolean :raise: FlowSubmitException if submission fails diff --git a/taskflowbackend/tests/base.py b/taskflowbackend/tests/base.py index 2686ee9e9..c5cbf9b7c 100644 --- a/taskflowbackend/tests/base.py +++ b/taskflowbackend/tests/base.py @@ -37,10 +37,13 @@ RoleMixin, RoleAssignmentMixin, ) -from projectroles.tests.test_permissions import TestPermissionMixin +from projectroles.tests.test_permissions import PermissionTestMixin from projectroles.tests.test_permissions_api import SODARAPIPermissionTestMixin from projectroles.tests.test_views_api import SODARAPIViewTestMixin -from projectroles.views_api import CORE_API_MEDIA_TYPE, CORE_API_DEFAULT_VERSION +from projectroles.views_api import ( + PROJECTROLES_API_MEDIA_TYPE, + PROJECTROLES_API_DEFAULT_VERSION, +) app_settings = AppSettingAPI() @@ -433,8 +436,8 @@ def make_project_taskflow( reverse('projectroles:api_project_create'), method='POST', data=post_data, - media_type=CORE_API_MEDIA_TYPE, - version=CORE_API_DEFAULT_VERSION, + media_type=PROJECTROLES_API_MEDIA_TYPE, + version=PROJECTROLES_API_DEFAULT_VERSION, ) # Assert response and object status self.assertEqual(response.status_code, 201, msg=response.content) @@ -452,8 +455,8 @@ def make_assignment_taskflow(self, project, user, role): url, method='POST', data=request_data, - media_type=CORE_API_MEDIA_TYPE, - version=CORE_API_DEFAULT_VERSION, + media_type=PROJECTROLES_API_MEDIA_TYPE, + version=PROJECTROLES_API_DEFAULT_VERSION, ) self.assertEqual(response.status_code, 201, msg=response.content) return RoleAssignment.objects.get(project=project, user=user, role=role) @@ -485,7 +488,7 @@ def setUp(self): class TaskflowPermissionTestBase( TaskflowProjectTestMixin, TaskflowPermissionTestMixin, - TestPermissionMixin, + PermissionTestMixin, TestCase, ): """Base class for testing UI and Ajax view permissions with taskflow""" diff --git a/taskflowbackend/tests/test_plugins.py b/taskflowbackend/tests/test_plugins.py index d6a471008..c75a65f31 100644 --- a/taskflowbackend/tests/test_plugins.py +++ b/taskflowbackend/tests/test_plugins.py @@ -14,7 +14,7 @@ from irodsbackend.api import USER_GROUP_TEMPLATE # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from taskflowbackend.tests.base import TaskflowViewTestBase @@ -97,7 +97,7 @@ def test_create(self): self.irods.users.get(self.user_owner_cat.username), iRODSUser ) self.assert_group_member(project, self.user_owner_cat, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=project, plugin='taskflow', user=self.user, @@ -256,7 +256,7 @@ def test_create_category(self): with self.assertRaises(UserGroupDoesNotExist): self.irods.user_groups.get(group_name) self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( project=category, plugin='taskflow', user=self.user, @@ -446,7 +446,7 @@ def test_revert_create(self): self.assert_irods_coll(self.project, expected=False) with self.assertRaises(UserGroupDoesNotExist): self.irods.user_groups.get(self.group_name) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -485,7 +485,7 @@ def test_create(self): ) self.assert_group_member(self.project, self.user_new, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -508,7 +508,7 @@ def test_create_parent(self): ) self.assert_group_member(self.project, self.user_new, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -532,7 +532,7 @@ def test_create_parent_finder(self): # Should still be False self.assert_group_member(self.project, self.user_new, False) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -595,7 +595,7 @@ def test_update(self): self.assert_group_member(self.project, self.user_new, True) self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -698,7 +698,7 @@ def test_revert_create(self): ) self.assert_group_member(self.project, self.user_new, False) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -721,7 +721,7 @@ def test_revert_create_parent(self): ) self.assert_group_member(self.project, self.user_new, False) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -816,7 +816,7 @@ def test_revert_update(self): ) self.assert_group_member(self.project, self.user_new, True) self.assertEqual( - ProjectEvent.objects.filter( + TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -900,7 +900,7 @@ def test_delete(self): self.assert_group_member(self.project, self.user_new, True) self.plugin.perform_role_delete(self.role_as, self.request) self.assert_group_member(self.project, self.user_new, False) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -917,7 +917,7 @@ def test_delete_parent(self): self.assert_group_member(self.project, self.user_new, True) self.plugin.perform_role_delete(self.role_as, self.request) self.assert_group_member(self.project, self.user_new, False) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -984,7 +984,7 @@ def test_revert(self): self.plugin.revert_role_delete(self.role_as, self.request) self.assert_group_member(self.project, self.user_new, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -1004,7 +1004,7 @@ def test_revert_parent(self): self.plugin.revert_role_delete(self.role_as, self.request) self.assert_group_member(self.project, self.user_new, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -1102,7 +1102,7 @@ def test_transfer_category(self): self.assert_group_member(self.project, self.user, True) self.assert_group_member(self.project, self.user_new, True) self.assert_group_member(self.project, self.user_owner_cat, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.category, plugin='taskflow', user=self.user, @@ -1241,7 +1241,7 @@ def test_sync_new_project(self): self.irods.users.get(self.user_owner_cat.username), iRODSUser ) self.assert_group_member(project, self.user_owner_cat, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=project, plugin='taskflow', user=None, @@ -1276,7 +1276,7 @@ def test_sync_existing(self): ) self.assert_group_member(project, self.user, True) self.assert_group_member(project, self.user_owner_cat, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=project, plugin='taskflow', user=None, diff --git a/taskflowbackend/tests/test_project_api_views.py b/taskflowbackend/tests/test_project_api_views.py index 06c63a500..a05e86aa6 100644 --- a/taskflowbackend/tests/test_project_api_views.py +++ b/taskflowbackend/tests/test_project_api_views.py @@ -12,7 +12,10 @@ from projectroles.app_settings import AppSettingAPI from projectroles.models import Project, RoleAssignment, SODAR_CONSTANTS -from projectroles.views_api import CORE_API_MEDIA_TYPE, CORE_API_DEFAULT_VERSION +from projectroles.views_api import ( + PROJECTROLES_API_MEDIA_TYPE, + PROJECTROLES_API_DEFAULT_VERSION, +) from taskflowbackend.tests.base import TaskflowAPIViewTestBase @@ -47,8 +50,8 @@ class CoreTaskflowAPITestBase(TaskflowAPIViewTestBase): """Override of TestTaskflowAPIBase for SODAR Core API views""" - media_type = CORE_API_MEDIA_TYPE - api_version = CORE_API_DEFAULT_VERSION + media_type = PROJECTROLES_API_MEDIA_TYPE + api_version = PROJECTROLES_API_DEFAULT_VERSION class TestProjectCreateAPIView(CoreTaskflowAPITestBase): diff --git a/taskflowbackend/tests/test_project_views.py b/taskflowbackend/tests/test_project_views.py index 0c9334c48..df8c16b55 100644 --- a/taskflowbackend/tests/test_project_views.py +++ b/taskflowbackend/tests/test_project_views.py @@ -20,7 +20,7 @@ from projectroles.tests.test_models import ProjectInviteMixin # Timeline dependency -from timeline.models import ProjectEvent +from timeline.models import TimelineEvent from taskflowbackend.tests.base import TaskflowViewTestBase @@ -137,7 +137,7 @@ def test_create_project(self): ) self.assertEqual(group.hasmember(self.user_owner_cat.username), True) # Assert timeline event - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=project, plugin='taskflow', user=self.user, @@ -233,7 +233,7 @@ def test_update(self): ) self.assert_group_member(self.project, self.user, True) self.assert_group_member(self.project, self.user_owner_cat, True) - tl_events = ProjectEvent.objects.filter( + tl_events = TimelineEvent.objects.filter( project=self.project, plugin='taskflow', user=self.user, @@ -771,7 +771,7 @@ def test_accept_invite_ldap(self): [ ( reverse( - 'projectroles:invite_process_ldap', + 'projectroles:invite_process_login', kwargs={'secret': invite.secret}, ), 302, @@ -823,7 +823,7 @@ def test_accept_invite_ldap_category(self): [ ( reverse( - 'projectroles:invite_process_ldap', + 'projectroles:invite_process_login', kwargs={'secret': invite.secret}, ), 302, @@ -862,7 +862,7 @@ def test_accept_invite_local(self): [ ( reverse( - 'projectroles:invite_process_local', + 'projectroles:invite_process_new_user', kwargs={'secret': invite.secret}, ), 302, @@ -904,7 +904,7 @@ def test_accept_invite_local_category(self): [ ( reverse( - 'projectroles:invite_process_local', + 'projectroles:invite_process_new_user', kwargs={'secret': invite.secret}, ), 302, diff --git a/utility/install_postgres.sh b/utility/install_postgres.sh index b052f00e3..cf0f8188f 100755 --- a/utility/install_postgres.sh +++ b/utility/install_postgres.sh @@ -1,9 +1,9 @@ #!/usr/bin/env bash echo "***********************************************" -echo "Installing PostgreSQL v11" +echo "Installing PostgreSQL v16" echo "***********************************************" add-apt-repository -y "deb http://apt.postgresql.org/pub/repos/apt/ focal-pgdg main" wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - apt-get -y update -apt-get -y install postgresql-11 +apt-get -y install postgresql-16 diff --git a/utility/install_python.sh b/utility/install_python.sh index d74cf5546..863ad020b 100755 --- a/utility/install_python.sh +++ b/utility/install_python.sh @@ -1,9 +1,9 @@ #!/usr/bin/env bash echo "***********************************************" -echo "Installing Python 3.8" +echo "Installing Python 3.11" echo "***********************************************" add-apt-repository -y ppa:deadsnakes/ppa apt-get -y update -apt-get -y install python3.8 python3.8-dev python3.8-venv -curl https://bootstrap.pypa.io/get-pip.py | sudo -H python3.8 +apt-get -y install python3.11 python3.11-dev python3.11-venv python3.11-gdbm +curl https://bootstrap.pypa.io/get-pip.py | sudo -H python3.11 diff --git a/utility/install_python_dependencies.sh b/utility/install_python_dependencies.sh index dfbd1f857..ceff0f44e 100755 --- a/utility/install_python_dependencies.sh +++ b/utility/install_python_dependencies.sh @@ -25,7 +25,7 @@ if [ -z "$VIRTUAL_ENV" ]; then exit 1; else - pip install wheel==0.40.0 + pip install "wheel>=0.42.0, <0.43" pip install -r $PROJECT_DIR/requirements/local.txt pip install -r $PROJECT_DIR/requirements/test.txt pip install -r $PROJECT_DIR/requirements.txt diff --git a/versioneer.py b/versioneer.py index 18e34c2f5..1e3753e63 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,5 +1,5 @@ -# Version: 0.28 +# Version: 0.29 """The Versioneer - like a rocketeer, but for versions. @@ -10,7 +10,7 @@ * https://github.com/python-versioneer/python-versioneer * Brian Warner * License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 +* Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3 * [![Latest Version][pypi-image]][pypi-url] * [![Build Status][travis-image]][travis-url] @@ -316,7 +316,8 @@ import subprocess import sys from pathlib import Path -from typing import Callable, Dict +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union +from typing import NoReturn import functools have_tomllib = True @@ -332,8 +333,16 @@ class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + versionfile_source: str + versionfile_build: Optional[str] + parentdir_prefix: Optional[str] + verbose: Optional[bool] -def get_root(): + +def get_root() -> str: """Get the project root directory. We require that all commands are run from the project root, i.e. the @@ -341,13 +350,23 @@ def get_root(): """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") + pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + if not ( + os.path.exists(setup_py) + or os.path.exists(pyproject_toml) + or os.path.exists(versioneer_py) + ): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") + pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + if not ( + os.path.exists(setup_py) + or os.path.exists(pyproject_toml) + or os.path.exists(versioneer_py) + ): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " @@ -372,23 +391,24 @@ def get_root(): return root -def get_config_from_root(root): +def get_config_from_root(root: str) -> VersioneerConfig: """Read the project setup.cfg file to determine Versioneer config.""" # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None + root_pth = Path(root) + pyproject_toml = root_pth / "pyproject.toml" + setup_cfg = root_pth / "setup.cfg" + section: Union[Dict[str, Any], configparser.SectionProxy, None] = None if pyproject_toml.exists() and have_tomllib: try: with open(pyproject_toml, 'rb') as fobj: pp = tomllib.load(fobj) section = pp['tool']['versioneer'] - except (tomllib.TOMLDecodeError, KeyError): - pass + except (tomllib.TOMLDecodeError, KeyError) as e: + print(f"Failed to load config from {pyproject_toml}: {e}") + print("Try to load it from setup.cfg") if not section: parser = configparser.ConfigParser() with open(setup_cfg) as cfg_file: @@ -397,16 +417,25 @@ def get_config_from_root(root): section = parser["versioneer"] + # `cast`` really shouldn't be used, but its simplest for the + # common VersioneerConfig users at the moment. We verify against + # `None` values elsewhere where it matters + cfg = VersioneerConfig() cfg.VCS = section['VCS'] cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_source = cast(str, section.get("versionfile_source")) cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") + cfg.tag_prefix = cast(str, section.get("tag_prefix")) if cfg.tag_prefix in ("''", '""', None): cfg.tag_prefix = "" cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") + if isinstance(section, configparser.SectionProxy): + # Make sure configparser translates to bool + cfg.verbose = section.getboolean("verbose") + else: + cfg.verbose = section.get("verbose") + return cfg @@ -419,22 +448,28 @@ class NotThisMethod(Exception): HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" HANDLERS.setdefault(vcs, {})[method] = f return f return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) process = None - popen_kwargs = {} + popen_kwargs: Dict[str, Any] = {} if sys.platform == "win32": # This hides the console window if pythonw.exe is used startupinfo = subprocess.STARTUPINFO() @@ -450,8 +485,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs) break - except OSError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -479,7 +513,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, # that just contains the computed version number. # This file is released into the public domain. -# Generated by versioneer-0.28 +# Generated by versioneer-0.29 # https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" @@ -489,11 +523,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, import re import subprocess import sys -from typing import Callable, Dict +from typing import Any, Callable, Dict, List, Optional, Tuple import functools -def get_keywords(): +def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must @@ -509,8 +543,15 @@ def get_keywords(): class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool + -def get_config(): +def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py @@ -532,9 +573,9 @@ class NotThisMethod(Exception): HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} @@ -543,13 +584,19 @@ def decorate(f): return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) process = None - popen_kwargs = {} + popen_kwargs: Dict[str, Any] = {} if sys.platform == "win32": # This hides the console window if pythonw.exe is used startupinfo = subprocess.STARTUPINFO() @@ -565,8 +612,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, stderr=(subprocess.PIPE if hide_stderr else None), **popen_kwargs) break - except OSError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -586,7 +632,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, return stdout, process.returncode -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -611,13 +661,13 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: @@ -639,7 +689,11 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") @@ -703,7 +757,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -743,7 +802,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None @@ -835,14 +894,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): return pieces -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -867,7 +926,7 @@ def render_pep440(pieces): return rendered -def render_pep440_branch(pieces): +def render_pep440_branch(pieces: Dict[str, Any]) -> str: """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards @@ -897,7 +956,7 @@ def render_pep440_branch(pieces): return rendered -def pep440_split_post(ver): +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the @@ -907,7 +966,7 @@ def pep440_split_post(ver): return vc[0], int(vc[1] or 0) if len(vc) == 2 else None -def render_pep440_pre(pieces): +def render_pep440_pre(pieces: Dict[str, Any]) -> str: """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: @@ -931,7 +990,7 @@ def render_pep440_pre(pieces): return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -958,7 +1017,7 @@ def render_pep440_post(pieces): return rendered -def render_pep440_post_branch(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. @@ -987,7 +1046,7 @@ def render_pep440_post_branch(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. @@ -1009,7 +1068,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -1029,7 +1088,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -1049,7 +1108,7 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", @@ -1085,7 +1144,7 @@ def render(pieces, style): "date": pieces.get("date")} -def get_versions(): +def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some @@ -1133,13 +1192,13 @@ def get_versions(): @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: with open(versionfile_abs, "r") as fobj: for line in fobj: @@ -1161,7 +1220,11 @@ def git_get_keywords(versionfile_abs): @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" if "refnames" not in keywords: raise NotThisMethod("Short version file found") @@ -1225,7 +1288,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -1265,7 +1333,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None @@ -1357,7 +1425,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): return pieces -def do_vcs_install(versionfile_source, ipy): +def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None: """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py @@ -1395,7 +1463,11 @@ def do_vcs_install(versionfile_source, ipy): run_command(GITS, ["add", "--"] + files) -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -1420,7 +1492,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.28) from +# This file was generated by 'versioneer.py' (0.29) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1437,7 +1509,7 @@ def get_versions(): """ -def versions_from_file(filename): +def versions_from_file(filename: str) -> Dict[str, Any]: """Try to determine the version from _version.py if present.""" try: with open(filename) as f: @@ -1454,9 +1526,8 @@ def versions_from_file(filename): return json.loads(mo.group(1)) -def write_to_version_file(filename, versions): +def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None: """Write the given version number to the given _version.py file.""" - os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: @@ -1465,14 +1536,14 @@ def write_to_version_file(filename, versions): print("set %s to '%s'" % (filename, versions["version"])) -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -1497,7 +1568,7 @@ def render_pep440(pieces): return rendered -def render_pep440_branch(pieces): +def render_pep440_branch(pieces: Dict[str, Any]) -> str: """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . The ".dev0" means not master branch. Note that .dev0 sorts backwards @@ -1527,7 +1598,7 @@ def render_pep440_branch(pieces): return rendered -def pep440_split_post(ver): +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: """Split pep440 version string at the post-release segment. Returns the release segments before the post-release and the @@ -1537,7 +1608,7 @@ def pep440_split_post(ver): return vc[0], int(vc[1] or 0) if len(vc) == 2 else None -def render_pep440_pre(pieces): +def render_pep440_pre(pieces: Dict[str, Any]) -> str: """TAG[.postN.devDISTANCE] -- No -dirty. Exceptions: @@ -1561,7 +1632,7 @@ def render_pep440_pre(pieces): return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -1588,7 +1659,7 @@ def render_pep440_post(pieces): return rendered -def render_pep440_post_branch(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . The ".dev0" means not master branch. @@ -1617,7 +1688,7 @@ def render_pep440_post_branch(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. @@ -1639,7 +1710,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -1659,7 +1730,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -1679,7 +1750,7 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", @@ -1719,7 +1790,7 @@ class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" -def get_versions(verbose=False): +def get_versions(verbose: bool = False) -> Dict[str, Any]: """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. @@ -1734,7 +1805,7 @@ def get_versions(verbose=False): assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose + verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None` assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" @@ -1795,12 +1866,12 @@ def get_versions(verbose=False): "date": None} -def get_version(): +def get_version() -> str: """Get the short version string for this project.""" return get_versions()["version"] -def get_cmdclass(cmdclass=None): +def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None): """Get the custom setuptools subclasses used by Versioneer. If the package uses a different cmdclass (e.g. one from numpy), it @@ -1828,16 +1899,16 @@ def get_cmdclass(cmdclass=None): class cmd_version(Command): description = "report generated version string" - user_options = [] - boolean_options = [] + user_options: List[Tuple[str, str, str]] = [] + boolean_options: List[str] = [] - def initialize_options(self): + def initialize_options(self) -> None: pass - def finalize_options(self): + def finalize_options(self) -> None: pass - def run(self): + def run(self) -> None: vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) @@ -1867,12 +1938,12 @@ def run(self): # we override different "build_py" commands for both environments if 'build_py' in cmds: - _build_py = cmds['build_py'] + _build_py: Any = cmds['build_py'] else: from setuptools.command.build_py import build_py as _build_py class cmd_build_py(_build_py): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1891,12 +1962,12 @@ def run(self): cmds["build_py"] = cmd_build_py if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] + _build_ext: Any = cmds['build_ext'] else: from setuptools.command.build_ext import build_ext as _build_ext class cmd_build_ext(_build_ext): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1923,7 +1994,7 @@ def run(self): cmds["build_ext"] = cmd_build_ext if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe + from cx_Freeze.dist import build_exe as _build_exe # type: ignore # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ @@ -1932,7 +2003,7 @@ def run(self): # ... class cmd_build_exe(_build_exe): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1956,12 +2027,12 @@ def run(self): if 'py2exe' in sys.modules: # py2exe enabled? try: - from py2exe.setuptools_buildexe import py2exe as _py2exe + from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe + from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore class cmd_py2exe(_py2exe): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1984,12 +2055,12 @@ def run(self): # sdist farms its file list building out to egg_info if 'egg_info' in cmds: - _egg_info = cmds['egg_info'] + _egg_info: Any = cmds['egg_info'] else: from setuptools.command.egg_info import egg_info as _egg_info class cmd_egg_info(_egg_info): - def find_sources(self): + def find_sources(self) -> None: # egg_info.find_sources builds the manifest list and writes it # in one shot super().find_sources() @@ -2021,12 +2092,12 @@ def find_sources(self): # we override different "sdist" commands for both environments if 'sdist' in cmds: - _sdist = cmds['sdist'] + _sdist: Any = cmds['sdist'] else: from setuptools.command.sdist import sdist as _sdist class cmd_sdist(_sdist): - def run(self): + def run(self) -> None: versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old @@ -2034,7 +2105,7 @@ def run(self): self.distribution.metadata.version = versions["version"] return _sdist.run(self) - def make_release_tree(self, base_dir, files): + def make_release_tree(self, base_dir: str, files: List[str]) -> None: root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) @@ -2099,7 +2170,7 @@ def make_release_tree(self, base_dir, files): """ -def do_setup(): +def do_setup() -> int: """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: @@ -2126,6 +2197,7 @@ def do_setup(): ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + maybe_ipy: Optional[str] = ipy if os.path.exists(ipy): try: with open(ipy, "r") as f: @@ -2146,16 +2218,16 @@ def do_setup(): print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) - ipy = None + maybe_ipy = None # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. - do_vcs_install(cfg.versionfile_source, ipy) + do_vcs_install(cfg.versionfile_source, maybe_ipy) return 0 -def scan_setup_py(): +def scan_setup_py() -> int: """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False @@ -2192,7 +2264,7 @@ def scan_setup_py(): return errors -def setup_command(): +def setup_command() -> NoReturn: """Set up Versioneer and exit with appropriate error code.""" errors = do_setup() errors += scan_setup_py()